Compare commits
764 commits
scanner-ra
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6739742fb6 | ||
|
|
d251226a3c | ||
|
|
ae8184b8b9 | ||
|
|
470ecf8996 | ||
|
|
0cdb6bef7b | ||
|
|
cd603fa981 | ||
|
|
b12009a746 | ||
|
|
9f9a0bc410 | ||
|
|
5d290a2a75 | ||
|
|
5808350bfe | ||
|
|
e64e74d5ed | ||
|
|
1a88b26a10 | ||
|
|
6c05cdfb0c | ||
|
|
aefae11c0d | ||
|
|
630fa52717 | ||
|
|
eea85f4eff | ||
|
|
cd6672701b | ||
|
|
f4ca3d95ca | ||
|
|
17e3c7a5c5 | ||
|
|
dd60d5cff0 | ||
|
|
f1288ea5c9 | ||
|
|
47ae060966 | ||
|
|
a1893b4420 | ||
|
|
999e041d49 | ||
|
|
0d4d854809 | ||
|
|
93d793d249 | ||
|
|
82486d4b0a | ||
|
|
5d9605a91e | ||
|
|
5293dd683e | ||
|
|
62effdf128 | ||
|
|
8e4f21aba0 | ||
|
|
5208299bbb | ||
|
|
ba7350c7ee | ||
|
|
f96d518ebf | ||
|
|
d5b82fbbab | ||
|
|
a7d8c0cbb2 | ||
|
|
24007727d4 | ||
|
|
6aa63a7213 | ||
|
|
eacb95c85d | ||
|
|
6967640571 | ||
|
|
4ac2d5d276 | ||
|
|
642b56d9af | ||
|
|
0574fcf256 | ||
|
|
98de2bc1a8 | ||
|
|
cd4b6e2ef9 | ||
|
|
8caecbc13f | ||
|
|
1b654ae35d | ||
|
|
3bd44afcaa | ||
|
|
8b8199775f | ||
|
|
744e556f7e | ||
|
|
8a3dcc6155 | ||
|
|
b0afbf3762 | ||
|
|
974be3bb30 | ||
|
|
d861e2bcd9 | ||
|
|
b9c2d1dc89 | ||
|
|
8ca17d1bb1 | ||
|
|
3182efeccc | ||
|
|
bec7c3272b | ||
|
|
e6bfed33ee | ||
|
|
053b264502 | ||
|
|
a8f25fa441 | ||
|
|
f450ce4f6e | ||
|
|
3ff8edf9e8 | ||
|
|
6b6040961c | ||
|
|
888f57657d | ||
|
|
be8fe690d8 | ||
|
|
c0b1710f8a | ||
|
|
7d3feeae9a | ||
|
|
3f85f65e3f | ||
|
|
df8b62fc50 | ||
|
|
14b4708018 | ||
|
|
dcef0cc0ee | ||
|
|
c1a0f48781 | ||
|
|
f6d17fdb04 | ||
|
|
829733a35e | ||
|
|
d64b863030 | ||
|
|
882aa867eb | ||
|
|
de92a9b4c9 | ||
|
|
5880df47e2 | ||
|
|
e92a7803eb | ||
|
|
0d656de98b | ||
|
|
b095968dff | ||
|
|
d592b16ac0 | ||
|
|
320c0865e9 | ||
|
|
60635e0729 | ||
|
|
120f74723e | ||
|
|
02508d5570 | ||
|
|
42e7e9c3e7 | ||
|
|
55b9a25c84 | ||
|
|
877782a8a4 | ||
|
|
0e1f715ef1 | ||
|
|
f3012a999d | ||
|
|
3072d35ed5 | ||
|
|
57e3a7b2ca | ||
|
|
0df2916920 | ||
|
|
61c21aa408 | ||
|
|
7eb23d9f3c | ||
|
|
db2d221ae9 | ||
|
|
67cb3cb881 | ||
|
|
12a31536e1 | ||
|
|
7657cc9d35 | ||
|
|
13ff3935ac | ||
|
|
361287fb56 | ||
|
|
13d4db8bb4 | ||
|
|
419a5a7305 | ||
|
|
c7b5f89392 | ||
|
|
d546e28abf | ||
|
|
86e2fd2337 | ||
|
|
ff255a2354 | ||
|
|
fe67521b3d | ||
|
|
f02d7e7e33 | ||
|
|
6a8676f335 | ||
|
|
944386d25f | ||
|
|
ef03a3f8fe | ||
|
|
18a5243933 | ||
|
|
8444cc3deb | ||
|
|
097c2d4f05 | ||
|
|
b8f52210f9 | ||
|
|
ecc787e221 | ||
|
|
6188010f53 | ||
|
|
70cde4a110 | ||
|
|
77363a65c2 | ||
|
|
605e580063 | ||
|
|
a2f2b16acb | ||
|
|
87d778a1c6 | ||
|
|
e344837e35 | ||
|
|
bdee2c2dd3 | ||
|
|
da5926d6f5 | ||
|
|
b3bc7701cd | ||
|
|
262f1782cc | ||
|
|
00d172bf9f | ||
|
|
ae54350c76 | ||
|
|
3355825a68 | ||
|
|
7d0e029e37 | ||
|
|
0f5ccc4aba | ||
|
|
0cf6e7c507 | ||
|
|
1dc4804b6e | ||
|
|
c5b22a1dc6 | ||
|
|
92efd26380 | ||
|
|
24c8feba3e | ||
|
|
122493b717 | ||
|
|
4edcca9850 | ||
|
|
be0c44f871 | ||
|
|
35b1356e96 | ||
|
|
443acf080a | ||
|
|
00e394f0f1 | ||
|
|
341665824c | ||
|
|
bd02be25d5 | ||
|
|
12a6400c63 | ||
|
|
d86e1b4f5e | ||
|
|
422866a437 | ||
|
|
5f7806f99e | ||
|
|
a9bce7c18a | ||
|
|
335bfabc60 | ||
|
|
e1b424c191 | ||
|
|
ea9c318afb | ||
|
|
9d66dbc28f | ||
|
|
8c22426223 | ||
|
|
90ee433c9b | ||
|
|
f26bd44a43 | ||
|
|
021d9c447d | ||
|
|
ce56465197 | ||
|
|
b0cdab85fe | ||
|
|
47c9256976 | ||
|
|
cf89840460 | ||
|
|
e3294c3faf | ||
|
|
95ab17e444 | ||
|
|
9b914885f1 | ||
|
|
92678f0fc5 | ||
|
|
a1640e4fe4 | ||
|
|
1be51c2129 | ||
|
|
6214f95e7e | ||
|
|
c89e40f008 | ||
|
|
8873c1aeff | ||
|
|
d543e2e50b | ||
|
|
e5c11d9efc | ||
|
|
d13657c40c | ||
|
|
804ef22075 | ||
|
|
fa28b430af | ||
|
|
311585d304 | ||
|
|
60c3bed6a4 | ||
|
|
6dfa79013f | ||
|
|
79ef484392 | ||
|
|
0ca8fe8c12 | ||
|
|
552ab537e8 | ||
|
|
0cf217179c | ||
|
|
48a5077035 | ||
|
|
a69367f739 | ||
|
|
1a0868c487 | ||
|
|
9be3e2bdd8 | ||
|
|
074e991280 | ||
|
|
f09dc3cf46 | ||
|
|
f222db57ce | ||
|
|
d29132512b | ||
|
|
22553b3372 | ||
|
|
46ea65c89b | ||
|
|
6a28a62369 | ||
|
|
db0d05fab3 | ||
|
|
317e2e74c2 | ||
|
|
04cfee5664 | ||
|
|
57c6105897 | ||
|
|
339bad2de4 | ||
|
|
31ff62445b | ||
|
|
c54bc441ba | ||
|
|
070b91628f | ||
|
|
9593737871 | ||
|
|
0269357c5a | ||
|
|
fd68c02072 | ||
|
|
39a67eec61 | ||
|
|
67f50b85f5 | ||
|
|
eedbec8f24 | ||
|
|
7ba7c4a8ce | ||
|
|
b6f45b0a2e | ||
|
|
3d26b8e500 | ||
|
|
46f7f860e6 | ||
|
|
2ae677162f | ||
|
|
4dbfb5b49a | ||
|
|
3a911d578c | ||
|
|
63f48afaeb | ||
|
|
ac39aed7c5 | ||
|
|
c4d02a5254 | ||
|
|
69c42450c3 | ||
|
|
b863b16454 | ||
|
|
0c35511aea | ||
|
|
06741d0d5d | ||
|
|
ca8b944b53 | ||
|
|
580cd9541a | ||
|
|
d60ef9ad0a | ||
|
|
821cf797f2 | ||
|
|
917895e6a3 | ||
|
|
6e53dcc8e1 | ||
|
|
56325d2a3b | ||
|
|
d1160cb820 | ||
|
|
5528cfee17 | ||
|
|
937dcf5fd1 | ||
|
|
57e2f41f42 | ||
|
|
3c1f02a7f9 | ||
|
|
907c7bc80b | ||
|
|
40a8678989 | ||
|
|
6451d2f65d | ||
|
|
e4e643086b | ||
|
|
e9e4316569 | ||
|
|
0719bd6ffb | ||
|
|
8d6d19de1b | ||
|
|
16502e19dd | ||
|
|
6b2ed39df6 | ||
|
|
d517af4c1a | ||
|
|
27e5147a5f | ||
|
|
e659dddad1 | ||
|
|
643f532a70 | ||
|
|
86d86628cb | ||
|
|
17854168d9 | ||
|
|
d287acfcc0 | ||
|
|
595299a3c2 | ||
|
|
3eabba637c | ||
|
|
cd12e66e67 | ||
|
|
1d0ebd1065 | ||
|
|
5fd818babe | ||
|
|
968d39328d | ||
|
|
310c0b86a7 | ||
|
|
cc5463ad44 | ||
|
|
f95a52df4a | ||
|
|
3b0273fc61 | ||
|
|
6cb9486b28 | ||
|
|
f44cbd407f | ||
|
|
0b836b2de0 | ||
|
|
151130a5df | ||
|
|
4559ec51f7 | ||
|
|
5cd6e747a0 | ||
|
|
5263cd0706 | ||
|
|
4535ea6aaa | ||
|
|
6e8ad7e5cc | ||
|
|
22d658518b | ||
|
|
f0cfaffd5e | ||
|
|
94996b26e5 | ||
|
|
f2e71ec95c | ||
|
|
da61d7cac5 | ||
|
|
9b23cd5394 | ||
|
|
b75196bb81 | ||
|
|
dbe88f8bbb | ||
|
|
f9a331a505 | ||
|
|
47beafb836 | ||
|
|
a12a79b366 | ||
|
|
62f93e221d | ||
|
|
07986471b3 | ||
|
|
dba8446d9e | ||
|
|
0c5cdcb161 | ||
|
|
8b3e023ef0 | ||
|
|
a0fd1ded97 | ||
|
|
5272b6b908 | ||
|
|
a866eb5dd0 | ||
|
|
90bdd63a71 | ||
|
|
ed2abf8609 | ||
|
|
d188bf6352 | ||
|
|
4db3edadf4 | ||
|
|
ca27fb5d43 | ||
|
|
e6fe91e2e7 | ||
|
|
b57b7213a9 | ||
|
|
ed91767663 | ||
|
|
489ad07e8b | ||
|
|
88e323ca43 | ||
|
|
721b0e8b11 | ||
|
|
ac171eb280 | ||
|
|
a1211d3fbd | ||
|
|
0be215e152 | ||
|
|
c12b4a1565 | ||
|
|
34ef1157a6 | ||
|
|
e0edfe1cb3 | ||
|
|
8387101a61 | ||
|
|
9fdf7213d4 | ||
|
|
b7f36a13ba | ||
|
|
66ea1a6dda | ||
|
|
298b6775c6 | ||
|
|
2e4b7d26b1 | ||
|
|
dab84a1b10 | ||
|
|
340df02655 | ||
|
|
99d8b58868 | ||
|
|
1152bf4c9d | ||
|
|
107bd800b0 | ||
|
|
e67f9f8f7a | ||
|
|
79177a1cd5 | ||
|
|
0a7274678a | ||
|
|
0d914c860a | ||
|
|
eb5ad7eb26 | ||
|
|
4602e60c1b | ||
|
|
1c640ac2c3 | ||
|
|
10f5a42fd7 | ||
|
|
fee50ad0ce | ||
|
|
be888a5fef | ||
|
|
86b9f81ad6 | ||
|
|
88e0b4cea4 | ||
|
|
7bc8f76667 | ||
|
|
0bb43f7afb | ||
|
|
99c121bfe8 | ||
|
|
c60264b87f | ||
|
|
f9f7ae0850 | ||
|
|
d45642c345 | ||
|
|
790dcc115f | ||
|
|
79a5e6a671 | ||
|
|
3a47bc4435 | ||
|
|
3294b04436 | ||
|
|
2c02f0767b | ||
|
|
6f7f9a3869 | ||
|
|
1704c604bf | ||
|
|
21b38004da | ||
|
|
aed8b8b32c | ||
|
|
1562b719d2 | ||
|
|
e749a97f9f | ||
|
|
3d9f4d1bd3 | ||
|
|
9f2dd2f377 | ||
|
|
d87921bb9c | ||
|
|
f7838748df | ||
|
|
7e7e376046 | ||
|
|
bbec252c51 | ||
|
|
9d619d6fdc | ||
|
|
49ae48f7fe | ||
|
|
46a0e94de7 | ||
|
|
2368af52ea | ||
|
|
d000698847 | ||
|
|
d641249f85 | ||
|
|
346080aad2 | ||
|
|
acfeed006a | ||
|
|
5e1daf0c41 | ||
|
|
7d0b94c008 | ||
|
|
d3c2fed4b3 | ||
|
|
32c88194f5 | ||
|
|
9ced6172de | ||
|
|
07b4c8d05d | ||
|
|
b787f31481 | ||
|
|
6850df969d | ||
|
|
62c53197c4 | ||
|
|
cd2cb661a4 | ||
|
|
8e90799e27 | ||
|
|
d810217e63 | ||
|
|
8676eda663 | ||
|
|
8c61bbdb73 | ||
|
|
99988b7081 | ||
|
|
436162ae7c | ||
|
|
c3012a7d8a | ||
|
|
f0165dee92 | ||
|
|
dff828cdbe | ||
|
|
c18d019db0 | ||
|
|
25c601bd2f | ||
|
|
85ab2929e9 | ||
|
|
86b2c939c7 | ||
|
|
e3b85fd0d5 | ||
|
|
0f79c61188 | ||
|
|
618b9dd66e | ||
|
|
ccc707152a | ||
|
|
aeab755033 | ||
|
|
912167a9cd | ||
|
|
42e2bba8d6 | ||
|
|
56c2fe59cb | ||
|
|
c7f877de96 | ||
|
|
aa11c198af | ||
|
|
f8f93c1ec1 | ||
|
|
f04fab9b7a | ||
|
|
854f527f6e | ||
|
|
c740f244ba | ||
|
|
5ed2c77b59 | ||
|
|
ad0f953c21 | ||
|
|
3c56f53105 | ||
|
|
d2e06bf130 | ||
|
|
36d93aeff3 | ||
|
|
1e7d77c517 | ||
|
|
81849352fc | ||
|
|
01d8a39c0b | ||
|
|
1a3b0375fa | ||
|
|
59bcffe83b | ||
|
|
45318922d8 | ||
|
|
1a2167b155 | ||
|
|
8938309f4b | ||
|
|
51a800b7df | ||
|
|
a6cd6abcfb | ||
|
|
b09a15eb54 | ||
|
|
a9818e4b17 | ||
|
|
be8b3e282a | ||
|
|
259e34435f | ||
|
|
eaa10b279f | ||
|
|
8a15b881fd | ||
|
|
f475182c7d | ||
|
|
4c51f27b0a | ||
|
|
64760ffa76 | ||
|
|
02fff92b91 | ||
|
|
876d5a96bf | ||
|
|
e8d3d01598 | ||
|
|
889015f03b | ||
|
|
a2c98b4b5f | ||
|
|
46309a1f95 | ||
|
|
e1f6e38b57 | ||
|
|
9772270868 | ||
|
|
0fdf569571 | ||
|
|
a6e530b33d | ||
|
|
6cabd9e67f | ||
|
|
ac13c86675 | ||
|
|
50eaf5befd | ||
|
|
0220d9d93d | ||
|
|
88ac3abaa1 | ||
|
|
ad133ecb38 | ||
|
|
6ba73fd888 | ||
|
|
8bd923ab9e | ||
|
|
50622f71f8 | ||
|
|
2ab9c9b590 | ||
|
|
52b719f8fb | ||
|
|
135eeded02 | ||
|
|
0bdf698673 | ||
|
|
e7f9160867 | ||
|
|
ca64399f9f | ||
|
|
19148eaa0d | ||
|
|
b8ba76b1ae | ||
|
|
f91255a201 | ||
|
|
06537fda83 | ||
|
|
299f8a9fb9 | ||
|
|
4339b0fe05 | ||
|
|
08bb6f44a4 | ||
|
|
31b9717ca3 | ||
|
|
52a792384f | ||
|
|
b1d2b7cfb8 | ||
|
|
cc634236b1 | ||
|
|
91274f47e4 | ||
|
|
bfc5d1180c | ||
|
|
21c658a12c | ||
|
|
e057299b0d | ||
|
|
3056dc5be4 | ||
|
|
0191e94226 | ||
|
|
dcdd5bc372 | ||
|
|
d0e0e0322c | ||
|
|
ca7ff033db | ||
|
|
d4d8ed32b3 | ||
|
|
635c49909c | ||
|
|
70cf2a7a22 | ||
|
|
74e6641afc | ||
|
|
c7475e4bf3 | ||
|
|
eee41925aa | ||
|
|
69d553c82a | ||
|
|
043e0c65ec | ||
|
|
4dffb818e2 | ||
|
|
4514751803 | ||
|
|
27fa1088b9 | ||
|
|
853ca46899 | ||
|
|
21390af2dd | ||
|
|
45a281c962 | ||
|
|
c7c1614278 | ||
|
|
c085a772cf | ||
|
|
9fdf685dd5 | ||
|
|
733d7513af | ||
|
|
b341073192 | ||
|
|
92c5d3b8e2 | ||
|
|
abc5c6bc50 | ||
|
|
52d2865365 | ||
|
|
89b04babfb | ||
|
|
755f77231c | ||
|
|
3d4d13ea1e | ||
|
|
4ec8aacaec | ||
|
|
0f949168ef | ||
|
|
71941d8bda | ||
|
|
521da2b0a7 | ||
|
|
37a9ecd5b7 | ||
|
|
ee8d529552 | ||
|
|
dac6300558 | ||
|
|
d05e4ae7ff | ||
|
|
e3db212b0b | ||
|
|
f0e7ac2f18 | ||
|
|
e08fe3b993 | ||
|
|
a380e1a259 | ||
|
|
17471bdfcc | ||
|
|
5985690d45 | ||
|
|
07a1b7fc00 | ||
|
|
12aff698b9 | ||
|
|
276accc210 | ||
|
|
cc3994928c | ||
|
|
efd212ee46 | ||
|
|
ec3a889758 | ||
|
|
1850762118 | ||
|
|
0b28226615 | ||
|
|
a00fab7dc4 | ||
|
|
11071ed682 | ||
|
|
066fd77d39 | ||
|
|
8138dba800 | ||
|
|
b749c02eb5 | ||
|
|
1fdd1d250c | ||
|
|
998fb34d15 | ||
|
|
cb30ec5b17 | ||
|
|
ab7eb70a3c | ||
|
|
3b67861def | ||
|
|
b26b7f8d62 | ||
|
|
2bd400dcee | ||
|
|
dedcc5255a | ||
|
|
14b8ead412 | ||
|
|
f0571b1e33 | ||
|
|
8e79929cb8 | ||
|
|
0a33135483 | ||
|
|
a40265cbeb | ||
|
|
74d7ca8582 | ||
|
|
1a80a1f413 | ||
|
|
f95e0e3a56 | ||
|
|
14647b2a38 | ||
|
|
5311904619 | ||
|
|
60b76c7834 | ||
|
|
2bf04d1f04 | ||
|
|
f5afe475de | ||
|
|
f20d4b0897 | ||
|
|
05d443a019 | ||
|
|
eed662df98 | ||
|
|
9ad096ef22 | ||
|
|
ac8a4ba80e | ||
|
|
c2221f2732 | ||
|
|
16aaed78ae | ||
|
|
a115e515ff | ||
|
|
060e69e671 | ||
|
|
40dfe6bd51 | ||
|
|
dc64bb5395 | ||
|
|
51acdf9723 | ||
|
|
1b3a9a4563 | ||
|
|
304e56850f | ||
|
|
9d62f17bc4 | ||
|
|
032af925fc | ||
|
|
f5e9680b75 | ||
|
|
9515be4fc1 | ||
|
|
2cf18f5ac2 | ||
|
|
56a6df0c57 | ||
|
|
67619dab5c | ||
|
|
5546862bb7 | ||
|
|
959f096a89 | ||
|
|
016dd7afbf | ||
|
|
67cc0a9f58 | ||
|
|
4940d2c29b | ||
|
|
4b6723192e | ||
|
|
8f3ceb471a | ||
|
|
36f5f7918f | ||
|
|
c01bf6ea4a | ||
|
|
5cfeba9c0d | ||
|
|
3074c0adf2 | ||
|
|
46f8d1267c | ||
|
|
4820d50336 | ||
|
|
bde94aed4d | ||
|
|
6dbcfdf282 | ||
|
|
4ab78ae00d | ||
|
|
a70b2fee4b | ||
|
|
aa900bf6ed | ||
|
|
cda634a1c4 | ||
|
|
ca087d2c07 | ||
|
|
8bb1448a6f | ||
|
|
f0222107b8 | ||
|
|
9003b883f1 | ||
|
|
dc8a9cc4f2 | ||
|
|
e1eb2fdbca | ||
|
|
b5894fac95 | ||
|
|
6c3eb760a9 | ||
|
|
a76934dda0 | ||
|
|
a18ae03e0b | ||
|
|
e901716ebd | ||
|
|
e874d2d8f8 | ||
|
|
d60c6f163d | ||
|
|
e3ac504e86 | ||
|
|
58662417a6 | ||
|
|
3e7721e554 | ||
|
|
3e72969ce4 | ||
|
|
79244b5815 | ||
|
|
f6a943a1ad | ||
|
|
692332ed1c | ||
|
|
0dba35c30f | ||
|
|
1f66d156b5 | ||
|
|
09cb4c5729 | ||
|
|
a4b20c1c56 | ||
|
|
10e6ecf162 | ||
|
|
be7716dfa7 | ||
|
|
b1e39d2dba | ||
|
|
25e6de4a0a | ||
|
|
11410b5a8e | ||
|
|
169d7ad57f | ||
|
|
31ceb99603 | ||
|
|
2cae67892e | ||
|
|
07a86b1729 | ||
|
|
6941497c7a | ||
|
|
6e88672dac | ||
|
|
9dbe165296 | ||
|
|
c8353a52af | ||
|
|
48059b72a8 | ||
|
|
f23a52f410 | ||
|
|
27bc78698d | ||
|
|
9365586cc3 | ||
|
|
f3259288b3 | ||
|
|
0e226561b1 | ||
|
|
5c776a4e62 | ||
|
|
40eb26e580 | ||
|
|
a633a06bb4 | ||
|
|
1c9a2fa455 | ||
|
|
a9dbb7257c | ||
|
|
7bf51ae08a | ||
|
|
29e6717c31 | ||
|
|
a2f8daf38d | ||
|
|
3a85d4d5f3 | ||
|
|
23e0891cd5 | ||
|
|
24f51518d1 | ||
|
|
344a88c4fb | ||
|
|
810d99d972 | ||
|
|
b26adf4265 | ||
|
|
f941277a9d | ||
|
|
3414bbd48e | ||
|
|
e389d54868 | ||
|
|
5de314833f | ||
|
|
ac8bb1b777 | ||
|
|
26d4b19006 | ||
|
|
52ef2992aa | ||
|
|
9d9c76e693 | ||
|
|
d65a74a667 | ||
|
|
ea9c85fb94 | ||
|
|
dcfc95e563 | ||
|
|
cd6e766365 | ||
|
|
95fd37ecf8 | ||
|
|
f47319212b | ||
|
|
1695e454a7 | ||
|
|
5a2c5ed865 | ||
|
|
8ab351ba32 | ||
|
|
7668192a49 | ||
|
|
7953aba070 | ||
|
|
329dcc92a1 | ||
|
|
17017da7a6 | ||
|
|
0195bbf1b4 | ||
|
|
e58b1faac1 | ||
|
|
d38aa596e1 | ||
|
|
4d6740980c | ||
|
|
b75ca0b957 | ||
|
|
21d74b8482 | ||
|
|
c8bd78a29c | ||
|
|
2c6f70cc57 | ||
|
|
207ef9796e | ||
|
|
cbf960ff20 | ||
|
|
5a854e6030 | ||
|
|
68e707eb4f | ||
|
|
4170f71dbc | ||
|
|
ba19fe31be | ||
|
|
2814c00faa | ||
|
|
45fa028201 | ||
|
|
dcdd6ce2d2 | ||
|
|
aea3a4720a | ||
|
|
28d5272e71 | ||
|
|
767b5486c6 | ||
|
|
867433afd7 | ||
|
|
5a825a0930 | ||
|
|
ef39298342 | ||
|
|
86b507a842 | ||
|
|
19482834bd | ||
|
|
07c08432ca | ||
|
|
619d347f95 | ||
|
|
efc51a596c | ||
|
|
5f379da544 | ||
|
|
a7e6d01144 | ||
|
|
f8e77aa99d | ||
|
|
a2f6b96683 | ||
|
|
16be945cdd | ||
|
|
55fda55b9b | ||
|
|
88d2f010f5 | ||
|
|
6bad1bc6c5 | ||
|
|
b3183363a2 | ||
|
|
cc449ad965 | ||
|
|
3456330fe9 | ||
|
|
2c064039c7 | ||
|
|
6ef4432718 | ||
|
|
2304109eab | ||
|
|
0a58175e46 | ||
|
|
af6f668659 | ||
|
|
77f74a05c7 | ||
|
|
cd3d967f3c | ||
|
|
c712276676 | ||
|
|
490f79bca2 | ||
|
|
22f67e2b67 | ||
|
|
d87d4592e0 | ||
|
|
5d99014bb8 | ||
|
|
d2db7eb8ef | ||
|
|
4f9869142f | ||
|
|
48fe030bdd | ||
|
|
9228a31f94 | ||
|
|
e3b2545ab7 | ||
|
|
b70843a033 | ||
|
|
5bd0d11982 | ||
|
|
bd6aa2be2b | ||
|
|
87c4df3ff3 | ||
|
|
dda45cfbb6 | ||
|
|
694d636322 | ||
|
|
f3d50f273b | ||
|
|
7ba0f297e5 | ||
|
|
e9b95078c7 | ||
|
|
f6c325df0b | ||
|
|
72807e187e | ||
|
|
432f4fd9b5 | ||
|
|
03b7760275 | ||
|
|
28f7c6bdb2 | ||
|
|
043969ef18 | ||
|
|
201b41cf11 | ||
|
|
2a63077cac | ||
|
|
faf97b896a | ||
|
|
b747261929 | ||
|
|
5d9870ebee | ||
|
|
8744a4e3f2 | ||
|
|
00674e3162 | ||
|
|
f7def174cd | ||
|
|
b68c64b332 | ||
|
|
90666c951d | ||
|
|
c132f1928f | ||
|
|
47a2f042d5 | ||
|
|
1b1e99bf1a | ||
|
|
7d3dbc062d | ||
|
|
214b3dc2e6 | ||
|
|
2374bca62a | ||
|
|
6c4a50a9c7 | ||
|
|
07aaf2322e | ||
|
|
9e1a2a701b | ||
|
|
e2d1e40a4d | ||
|
|
c053b63be8 | ||
|
|
f279d10aa2 | ||
|
|
8de3a2ee13 | ||
|
|
8368f9994d | ||
|
|
495fe2a6c5 | ||
|
|
cd94dbd57f | ||
|
|
530e0571c9 | ||
|
|
ea9aa018b3 | ||
|
|
69d977d736 | ||
|
|
c7b218838d | ||
|
|
e445532a1f | ||
|
|
473f0a1a4d | ||
|
|
a08c4b58ab | ||
|
|
8eb4407200 |
483 changed files with 43786 additions and 21996 deletions
|
|
@ -10,6 +10,9 @@ insert_final_newline = true
|
||||||
[*.rs]
|
[*.rs]
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.{zig,zon}]
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
[Makefile]
|
[Makefile]
|
||||||
indent_style = tab
|
indent_style = tab
|
||||||
indent_size = 8
|
indent_size = 8
|
||||||
|
|
|
||||||
1
.envrc
Normal file
1
.envrc
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
use flake
|
||||||
1
.gitattributes
vendored
1
.gitattributes
vendored
|
|
@ -3,5 +3,4 @@
|
||||||
/lib/src/unicode/*.h linguist-vendored
|
/lib/src/unicode/*.h linguist-vendored
|
||||||
/lib/src/unicode/LICENSE linguist-vendored
|
/lib/src/unicode/LICENSE linguist-vendored
|
||||||
|
|
||||||
/cli/src/generate/prepare_grammar/*.json -diff
|
|
||||||
Cargo.lock -diff
|
Cargo.lock -diff
|
||||||
|
|
|
||||||
15
.github/FUNDING.yml
vendored
Normal file
15
.github/FUNDING.yml
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: tree-sitter
|
||||||
|
patreon: # Replace with a single Patreon username
|
||||||
|
open_collective: tree-sitter # Replace with a single Open Collective username
|
||||||
|
ko_fi: amaanq
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||||
|
polar: # Replace with a single Polar username
|
||||||
|
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||||
|
thanks_dev: # Replace with a single thanks.dev username
|
||||||
|
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
||||||
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -1,6 +1,6 @@
|
||||||
name: Bug Report
|
name: Bug Report
|
||||||
description: Report a problem
|
description: Report a problem
|
||||||
labels: [bug]
|
type: Bug
|
||||||
body:
|
body:
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
|
|
|
||||||
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
|
|
@ -1,6 +1,6 @@
|
||||||
name: Feature request
|
name: Feature request
|
||||||
description: Request an enhancement
|
description: Request an enhancement
|
||||||
labels: [enhancement]
|
type: Feature
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
|
|
|
||||||
6
.github/actions/cache/action.yml
vendored
6
.github/actions/cache/action.yml
vendored
|
|
@ -17,7 +17,9 @@ runs:
|
||||||
test/fixtures/grammars
|
test/fixtures/grammars
|
||||||
target/release/tree-sitter-*.wasm
|
target/release/tree-sitter-*.wasm
|
||||||
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
||||||
'cli/generate/src/**',
|
'crates/generate/src/**',
|
||||||
'xtask/src/*',
|
'lib/src/parser.h',
|
||||||
|
'lib/src/array.h',
|
||||||
|
'lib/src/alloc.h',
|
||||||
'test/fixtures/grammars/*/**/src/*.c',
|
'test/fixtures/grammars/*/**/src/*.c',
|
||||||
'.github/actions/cache/action.yml') }}
|
'.github/actions/cache/action.yml') }}
|
||||||
|
|
|
||||||
4
.github/cliff.toml
vendored
4
.github/cliff.toml
vendored
|
|
@ -16,13 +16,13 @@ body = """
|
||||||
{% for commit in commits%}\
|
{% for commit in commits%}\
|
||||||
{% if not commit.scope %}\
|
{% if not commit.scope %}\
|
||||||
- {{ commit.message | upper_first }}\
|
- {{ commit.message | upper_first }}\
|
||||||
{% if commit.github.pr_number %} (<https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}/pull/{{ commit.github.pr_number }}>){%- endif %}
|
{% if commit.remote.pr_number %} (<https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}/pull/{{ commit.remote.pr_number }}>){%- endif %}
|
||||||
{% endif %}\
|
{% endif %}\
|
||||||
{% endfor %}\
|
{% endfor %}\
|
||||||
{% for group, commits in commits | group_by(attribute="scope") %}\
|
{% for group, commits in commits | group_by(attribute="scope") %}\
|
||||||
{% for commit in commits %}\
|
{% for commit in commits %}\
|
||||||
- **{{commit.scope}}**: {{ commit.message | upper_first }}\
|
- **{{commit.scope}}**: {{ commit.message | upper_first }}\
|
||||||
{% if commit.github.pr_number %} (<https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}/pull/{{ commit.github.pr_number }}>){%- endif %}
|
{% if commit.remote.pr_number %} (<https://github.com/{{ remote.github.owner }}/{{ remote.github.repo }}/pull/{{ commit.remote.pr_number }}>){%- endif %}
|
||||||
{% endfor %}\
|
{% endfor %}\
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
|
||||||
27
.github/dependabot.yml
vendored
27
.github/dependabot.yml
vendored
|
|
@ -4,6 +4,8 @@ updates:
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
cooldown:
|
||||||
|
default-days: 3
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "build(deps)"
|
prefix: "build(deps)"
|
||||||
labels:
|
labels:
|
||||||
|
|
@ -12,10 +14,16 @@ updates:
|
||||||
groups:
|
groups:
|
||||||
cargo:
|
cargo:
|
||||||
patterns: ["*"]
|
patterns: ["*"]
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
||||||
|
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
cooldown:
|
||||||
|
default-days: 3
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "ci"
|
prefix: "ci"
|
||||||
labels:
|
labels:
|
||||||
|
|
@ -24,3 +32,22 @@ updates:
|
||||||
groups:
|
groups:
|
||||||
actions:
|
actions:
|
||||||
patterns: ["*"]
|
patterns: ["*"]
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
versioning-strategy: increase
|
||||||
|
directories:
|
||||||
|
- "/crates/npm"
|
||||||
|
- "/crates/eslint"
|
||||||
|
- "/lib/binding_web"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
cooldown:
|
||||||
|
default-days: 3
|
||||||
|
commit-message:
|
||||||
|
prefix: "build(deps)"
|
||||||
|
labels:
|
||||||
|
- "dependencies"
|
||||||
|
- "npm"
|
||||||
|
groups:
|
||||||
|
npm:
|
||||||
|
patterns: ["*"]
|
||||||
|
|
|
||||||
29
.github/scripts/close_spam.js
vendored
Normal file
29
.github/scripts/close_spam.js
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
module.exports = async ({ github, context }) => {
|
||||||
|
let target = context.payload.issue;
|
||||||
|
if (target) {
|
||||||
|
await github.rest.issues.update({
|
||||||
|
...context.repo,
|
||||||
|
issue_number: target.number,
|
||||||
|
state: "closed",
|
||||||
|
state_reason: "not_planned",
|
||||||
|
title: "[spam]",
|
||||||
|
body: "",
|
||||||
|
type: null,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
target = context.payload.pull_request;
|
||||||
|
await github.rest.pulls.update({
|
||||||
|
...context.repo,
|
||||||
|
pull_number: target.number,
|
||||||
|
state: "closed",
|
||||||
|
title: "[spam]",
|
||||||
|
body: "",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await github.rest.issues.lock({
|
||||||
|
...context.repo,
|
||||||
|
issue_number: target.number,
|
||||||
|
lock_reason: "spam",
|
||||||
|
});
|
||||||
|
};
|
||||||
3
.github/scripts/cross.sh
vendored
3
.github/scripts/cross.sh
vendored
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/bash -eu
|
|
||||||
|
|
||||||
exec docker run --rm -v /home/runner:/home/runner -w "$PWD" "$CROSS_IMAGE" "$@"
|
|
||||||
9
.github/scripts/make.sh
vendored
9
.github/scripts/make.sh
vendored
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash -eu
|
|
||||||
|
|
||||||
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
|
||||||
|
|
||||||
if [[ $BUILD_CMD == cross ]]; then
|
|
||||||
cross.sh make CC="$CC" AR="$AR" "$@"
|
|
||||||
else
|
|
||||||
exec make "$@"
|
|
||||||
fi
|
|
||||||
9
.github/scripts/tree-sitter.sh
vendored
9
.github/scripts/tree-sitter.sh
vendored
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash -eu
|
|
||||||
|
|
||||||
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
|
||||||
|
|
||||||
if [[ $BUILD_CMD == cross ]]; then
|
|
||||||
cross.sh "$CROSS_RUNNER" "$tree_sitter" "$@"
|
|
||||||
else
|
|
||||||
exec "$tree_sitter" "$@"
|
|
||||||
fi
|
|
||||||
25
.github/scripts/wasm_stdlib.js
vendored
Normal file
25
.github/scripts/wasm_stdlib.js
vendored
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
if (context.eventName !== 'pull_request') return;
|
||||||
|
|
||||||
|
const prNumber = context.payload.pull_request.number;
|
||||||
|
const owner = context.repo.owner;
|
||||||
|
const repo = context.repo.repo;
|
||||||
|
|
||||||
|
const { data: files } = await github.rest.pulls.listFiles({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: prNumber
|
||||||
|
});
|
||||||
|
|
||||||
|
const changedFiles = files.map(file => file.filename);
|
||||||
|
|
||||||
|
const wasmStdLibSrc = 'crates/language/wasm/';
|
||||||
|
const dirChanged = changedFiles.some(file => file.startsWith(wasmStdLibSrc));
|
||||||
|
|
||||||
|
if (!dirChanged) return;
|
||||||
|
|
||||||
|
const wasmStdLibHeader = 'lib/src/wasm/wasm-stdlib.h';
|
||||||
|
const requiredChanged = changedFiles.includes(wasmStdLibHeader);
|
||||||
|
|
||||||
|
if (!requiredChanged) core.setFailed(`Changes detected in ${wasmStdLibSrc} but ${wasmStdLibHeader} was not modified.`);
|
||||||
|
};
|
||||||
6
.github/workflows/backport.yml
vendored
6
.github/workflows/backport.yml
vendored
|
|
@ -14,17 +14,17 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Create app token
|
- name: Create app token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@v2
|
||||||
id: app-token
|
id: app-token
|
||||||
with:
|
with:
|
||||||
app-id: ${{ vars.BACKPORT_APP }}
|
app-id: ${{ vars.BACKPORT_APP }}
|
||||||
private-key: ${{ secrets.BACKPORT_KEY }}
|
private-key: ${{ secrets.BACKPORT_KEY }}
|
||||||
|
|
||||||
- name: Create backport PR
|
- name: Create backport PR
|
||||||
uses: korthout/backport-action@v3
|
uses: korthout/backport-action@v4
|
||||||
with:
|
with:
|
||||||
pull_title: "${pull_title}"
|
pull_title: "${pull_title}"
|
||||||
label_pattern: "^ci:backport ([^ ]+)$"
|
label_pattern: "^ci:backport ([^ ]+)$"
|
||||||
|
|
|
||||||
8
.github/workflows/bindgen.yml
vendored
8
.github/workflows/bindgen.yml
vendored
|
|
@ -2,15 +2,21 @@ name: Check Bindgen Output
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- lib/include/tree_sitter/api.h
|
||||||
|
- lib/binding_rust/bindings.rs
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
|
paths:
|
||||||
|
- lib/include/tree_sitter/api.h
|
||||||
|
- lib/binding_rust/bindings.rs
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-bindgen:
|
check-bindgen:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
|
||||||
303
.github/workflows/build.yml
vendored
303
.github/workflows/build.yml
vendored
|
|
@ -1,10 +1,5 @@
|
||||||
name: Build & Test
|
name: Build & Test
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
RUSTFLAGS: "-D warnings"
|
|
||||||
CROSS_DEBUG: 1
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
|
|
@ -31,38 +26,41 @@ jobs:
|
||||||
- windows-x86
|
- windows-x86
|
||||||
- macos-arm64
|
- macos-arm64
|
||||||
- macos-x64
|
- macos-x64
|
||||||
|
- wasm32
|
||||||
|
|
||||||
include:
|
include:
|
||||||
# When adding a new `target`:
|
# When adding a new `target`:
|
||||||
# 1. Define a new platform alias above
|
# 1. Define a new platform alias above
|
||||||
# 2. Add a new record to the matrix map in `cli/npm/install.js`
|
# 2. Add a new record to the matrix map in `crates/cli/npm/install.js`
|
||||||
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-24.04-arm }
|
||||||
- { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-arm , target: armv7-unknown-linux-gnueabihf , os: ubuntu-24.04-arm }
|
||||||
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-20.04 , features: wasm } # See #2272
|
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-24.04 }
|
||||||
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-24.04 }
|
||||||
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-24.04 }
|
||||||
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest }
|
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-11-arm }
|
||||||
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , features: wasm }
|
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-2025 }
|
||||||
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest }
|
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-2025 }
|
||||||
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-14 , features: wasm }
|
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-15 }
|
||||||
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-13 , features: wasm }
|
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-15-intel }
|
||||||
|
- { platform: wasm32 , target: wasm32-unknown-unknown , os: ubuntu-24.04 }
|
||||||
|
|
||||||
# Cross compilers for C library
|
# Extra features
|
||||||
- { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar }
|
- { platform: linux-arm64 , features: wasm }
|
||||||
- { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar }
|
- { platform: linux-x64 , features: wasm }
|
||||||
- { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar }
|
- { platform: macos-arm64 , features: wasm }
|
||||||
- { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar }
|
- { platform: macos-x64 , features: wasm }
|
||||||
|
|
||||||
# Prevent race condition (see #2041)
|
# Cross-compilation
|
||||||
- { platform: windows-x64 , rust-test-threads: 1 }
|
- { platform: linux-arm , cross: true }
|
||||||
- { platform: windows-x86 , rust-test-threads: 1 }
|
- { platform: linux-x86 , cross: true }
|
||||||
|
- { platform: linux-powerpc64 , cross: true }
|
||||||
|
|
||||||
# Can't natively run CLI on Github runner's host
|
# Compile-only
|
||||||
- { platform: windows-arm64 , no-run: true }
|
- { platform: wasm32 , no-run: true }
|
||||||
|
|
||||||
env:
|
env:
|
||||||
BUILD_CMD: cargo
|
CARGO_TERM_COLOR: always
|
||||||
SUFFIX: ${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
RUSTFLAGS: -D warnings
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
|
|
@ -70,13 +68,28 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Read Emscripten version
|
- name: Set up cross-compilation
|
||||||
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<cli/loader/emscripten-version)" >> $GITHUB_ENV
|
if: matrix.cross
|
||||||
|
run: |
|
||||||
|
for target in armv7-unknown-linux-gnueabihf i686-unknown-linux-gnu powerpc64-unknown-linux-gnu; do
|
||||||
|
camel_target=${target//-/_}; target_cc=${target/-unknown/}
|
||||||
|
printf 'CC_%s=%s\n' "$camel_target" "${target_cc/v7/}-gcc"
|
||||||
|
printf 'AR_%s=%s\n' "$camel_target" "${target_cc/v7/}-ar"
|
||||||
|
printf 'CARGO_TARGET_%s_LINKER=%s\n' "${camel_target^^}" "${target_cc/v7/}-gcc"
|
||||||
|
done >> $GITHUB_ENV
|
||||||
|
{
|
||||||
|
printf 'CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_RUNNER=qemu-arm -L /usr/arm-linux-gnueabihf\n'
|
||||||
|
printf 'CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_RUNNER=qemu-ppc64 -L /usr/powerpc64-linux-gnu\n'
|
||||||
|
} >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Get emscripten version
|
||||||
|
if: contains(matrix.features, 'wasm')
|
||||||
|
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<crates/loader/emscripten-version)" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install Emscripten
|
- name: Install Emscripten
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
if: contains(matrix.features, 'wasm')
|
||||||
uses: mymindstorm/setup-emsdk@v14
|
uses: mymindstorm/setup-emsdk@v14
|
||||||
with:
|
with:
|
||||||
version: ${{ env.EMSCRIPTEN_VERSION }}
|
version: ${{ env.EMSCRIPTEN_VERSION }}
|
||||||
|
|
@ -86,58 +99,84 @@ jobs:
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
|
|
||||||
- name: Install cross
|
- name: Install cross-compilation toolchain
|
||||||
if: ${{ matrix.use-cross }}
|
if: matrix.cross
|
||||||
run: cargo install cross --git https://github.com/cross-rs/cross
|
|
||||||
|
|
||||||
- name: Configure cross
|
|
||||||
if: ${{ matrix.use-cross }}
|
|
||||||
run: |
|
run: |
|
||||||
printf '%s\n' > Cross.toml \
|
sudo apt-get update -qy
|
||||||
'[target.${{ matrix.target }}]' \
|
if [[ $PLATFORM == linux-arm ]]; then
|
||||||
'image = "ghcr.io/cross-rs/${{ matrix.target }}:edge"' \
|
sudo apt-get install -qy {binutils,gcc}-arm-linux-gnueabihf qemu-user
|
||||||
'[build]' \
|
elif [[ $PLATFORM == linux-x86 ]]; then
|
||||||
'pre-build = [' \
|
sudo apt-get install -qy {binutils,gcc}-i686-linux-gnu
|
||||||
' "dpkg --add-architecture $CROSS_DEB_ARCH",' \
|
elif [[ $PLATFORM == linux-powerpc64 ]]; then
|
||||||
' "curl -fsSL https://deb.nodesource.com/setup_22.x | bash -",' \
|
sudo apt-get install -qy {binutils,gcc}-powerpc64-linux-gnu qemu-user
|
||||||
' "apt-get update && apt-get -y install libssl-dev nodejs"' \
|
|
||||||
']'
|
|
||||||
cat - Cross.toml <<< 'Cross.toml:'
|
|
||||||
printf '%s\n' >> $GITHUB_ENV \
|
|
||||||
"CROSS_CONFIG=$PWD/Cross.toml" \
|
|
||||||
"CROSS_IMAGE=ghcr.io/cross-rs/${{ matrix.target }}:edge"
|
|
||||||
|
|
||||||
- name: Set up environment
|
|
||||||
env:
|
|
||||||
RUST_TEST_THREADS: ${{ matrix.rust-test-threads }}
|
|
||||||
USE_CROSS: ${{ matrix.use-cross }}
|
|
||||||
TARGET: ${{ matrix.target }}
|
|
||||||
CC: ${{ matrix.cc }}
|
|
||||||
AR: ${{ matrix.ar }}
|
|
||||||
run: |
|
|
||||||
PATH="$PWD/.github/scripts:$PATH"
|
|
||||||
printf '%s/.github/scripts\n' "$PWD" >> $GITHUB_PATH
|
|
||||||
|
|
||||||
printf '%s\n' >> $GITHUB_ENV \
|
|
||||||
'TREE_SITTER=tree-sitter.sh' \
|
|
||||||
"TARGET=$TARGET" \
|
|
||||||
"ROOT=$PWD"
|
|
||||||
|
|
||||||
[[ -n $RUST_TEST_THREADS ]] && \
|
|
||||||
printf 'RUST_TEST_THREADS=%s\n' "$RUST_TEST_THREADS" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
[[ -n $CC ]] && printf 'CC=%s\n' "$CC" >> $GITHUB_ENV
|
|
||||||
[[ -n $AR ]] && printf 'AR=%s\n' "$AR" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
if [[ $USE_CROSS == true ]]; then
|
|
||||||
printf 'BUILD_CMD=cross\n' >> $GITHUB_ENV
|
|
||||||
runner=$(cross.sh bash -c "env | sed -n 's/^CARGO_TARGET_.*_RUNNER=//p'")
|
|
||||||
[[ -n $runner ]] && printf 'CROSS_RUNNER=%s\n' "$runner" >> $GITHUB_ENV
|
|
||||||
fi
|
fi
|
||||||
|
env:
|
||||||
|
PLATFORM: ${{ matrix.platform }}
|
||||||
|
|
||||||
- name: Build wasmtime library
|
- name: Install MinGW and Clang (Windows x64 MSYS2)
|
||||||
if: ${{ !matrix.use-cross && contains(matrix.features, 'wasm') }}
|
if: matrix.platform == 'windows-x64'
|
||||||
|
uses: msys2/setup-msys2@v2
|
||||||
|
with:
|
||||||
|
update: true
|
||||||
|
install: |
|
||||||
|
mingw-w64-x86_64-toolchain
|
||||||
|
mingw-w64-x86_64-clang
|
||||||
|
mingw-w64-x86_64-make
|
||||||
|
mingw-w64-x86_64-cmake
|
||||||
|
|
||||||
|
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
||||||
|
# the `mismatched-lifetime-syntaxes` lint
|
||||||
|
- name: Build wasmtime library (Windows x64 MSYS2)
|
||||||
|
if: contains(matrix.features, 'wasm') && matrix.platform == 'windows-x64'
|
||||||
run: |
|
run: |
|
||||||
|
mkdir -p target
|
||||||
|
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
||||||
|
jq -r '.packages[] | select(.name == "wasmtime-c-api-impl") | .version')
|
||||||
|
curl -LSs "$WASMTIME_REPO/archive/refs/tags/v${WASMTIME_VERSION}.tar.gz" | tar xzf - -C target
|
||||||
|
cd target/wasmtime-${WASMTIME_VERSION}
|
||||||
|
cmake -S crates/c-api -B target/c-api \
|
||||||
|
-DCMAKE_INSTALL_PREFIX="$PWD/artifacts" \
|
||||||
|
-DWASMTIME_DISABLE_ALL_FEATURES=ON \
|
||||||
|
-DWASMTIME_FEATURE_CRANELIFT=ON \
|
||||||
|
-DWASMTIME_TARGET='x86_64-pc-windows-gnu'
|
||||||
|
cmake --build target/c-api && cmake --install target/c-api
|
||||||
|
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
||||||
|
RUSTFLAGS: ${{ env.RUSTFLAGS }} --cap-lints allow
|
||||||
|
|
||||||
|
- name: Build C library (Windows x64 MSYS2 CMake)
|
||||||
|
if: matrix.platform == 'windows-x64'
|
||||||
|
shell: msys2 {0}
|
||||||
|
run: |
|
||||||
|
cmake -G Ninja -S . -B build/static \
|
||||||
|
-DBUILD_SHARED_LIBS=OFF \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
|
-DTREE_SITTER_FEATURE_WASM=$WASM \
|
||||||
|
-DCMAKE_C_COMPILER=clang
|
||||||
|
cmake --build build/static
|
||||||
|
|
||||||
|
cmake -G Ninja -S . -B build/shared \
|
||||||
|
-DBUILD_SHARED_LIBS=ON \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
|
-DTREE_SITTER_FEATURE_WASM=$WASM \
|
||||||
|
-DCMAKE_C_COMPILER=clang
|
||||||
|
cmake --build build/shared
|
||||||
|
rm -rf \
|
||||||
|
build/{static,shared} \
|
||||||
|
"${CMAKE_PREFIX_PATH}/artifacts" \
|
||||||
|
target/wasmtime-${WASMTIME_VERSION}
|
||||||
|
env:
|
||||||
|
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
||||||
|
|
||||||
|
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
||||||
|
# the `mismatched-lifetime-syntaxes` lint
|
||||||
|
- name: Build wasmtime library
|
||||||
|
if: contains(matrix.features, 'wasm')
|
||||||
|
run: |
|
||||||
|
mkdir -p target
|
||||||
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
||||||
jq -r '.packages[] | select(.name == "wasmtime-c-api-impl") | .version')
|
jq -r '.packages[] | select(.name == "wasmtime-c-api-impl") | .version')
|
||||||
curl -LSs "$WASMTIME_REPO/archive/refs/tags/v${WASMTIME_VERSION}.tar.gz" | tar xzf - -C target
|
curl -LSs "$WASMTIME_REPO/archive/refs/tags/v${WASMTIME_VERSION}.tar.gz" | tar xzf - -C target
|
||||||
|
|
@ -151,86 +190,122 @@ jobs:
|
||||||
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
||||||
env:
|
env:
|
||||||
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
||||||
|
RUSTFLAGS: ${{ env.RUSTFLAGS }} --cap-lints allow
|
||||||
|
|
||||||
- name: Build C library (make)
|
- name: Build C library (make)
|
||||||
if: ${{ runner.os != 'Windows' }}
|
if: runner.os != 'Windows'
|
||||||
run: make.sh -j CFLAGS="$CFLAGS"
|
run: |
|
||||||
|
if [[ $PLATFORM == linux-arm ]]; then
|
||||||
|
CC=arm-linux-gnueabihf-gcc; AR=arm-linux-gnueabihf-ar
|
||||||
|
elif [[ $PLATFORM == linux-x86 ]]; then
|
||||||
|
CC=i686-linux-gnu-gcc; AR=i686-linux-gnu-ar
|
||||||
|
elif [[ $PLATFORM == linux-powerpc64 ]]; then
|
||||||
|
CC=powerpc64-linux-gnu-gcc; AR=powerpc64-linux-gnu-ar
|
||||||
|
else
|
||||||
|
CC=gcc; AR=ar
|
||||||
|
fi
|
||||||
|
make -j CFLAGS="$CFLAGS" CC=$CC AR=$AR
|
||||||
env:
|
env:
|
||||||
|
PLATFORM: ${{ matrix.platform }}
|
||||||
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
|
|
||||||
- name: Build C library (CMake)
|
- name: Build C library (CMake)
|
||||||
if: ${{ !matrix.use-cross }}
|
if: "!matrix.cross"
|
||||||
run: |
|
run: |
|
||||||
cmake -S lib -B build/static \
|
cmake -S . -B build/static \
|
||||||
-DBUILD_SHARED_LIBS=OFF \
|
-DBUILD_SHARED_LIBS=OFF \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
cmake --build build/static --verbose
|
cmake --build build/static --verbose
|
||||||
|
|
||||||
cmake -S lib -B build/shared \
|
cmake -S . -B build/shared \
|
||||||
-DBUILD_SHARED_LIBS=ON \
|
-DBUILD_SHARED_LIBS=ON \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
cmake --build build/shared --verbose
|
cmake --build build/shared --verbose
|
||||||
env:
|
env:
|
||||||
CC: ${{ contains(matrix.target, 'linux') && 'clang' || '' }}
|
CC: ${{ contains(matrix.platform, 'linux') && 'clang' || '' }}
|
||||||
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
||||||
|
|
||||||
- name: Build wasm library
|
- name: Build Wasm library
|
||||||
# No reason to build on the same Github runner hosts many times
|
if: contains(matrix.features, 'wasm')
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
shell: bash
|
||||||
run: $BUILD_CMD run -p xtask -- build-wasm
|
run: |
|
||||||
|
cd lib/binding_web
|
||||||
|
npm ci
|
||||||
|
CJS=true npm run build
|
||||||
|
CJS=true npm run build:debug
|
||||||
|
npm run build
|
||||||
|
npm run build:debug
|
||||||
|
|
||||||
|
- name: Check no_std builds
|
||||||
|
if: inputs.run-test && !matrix.no-run
|
||||||
|
working-directory: lib
|
||||||
|
shell: bash
|
||||||
|
run: cargo check --no-default-features --target='${{ matrix.target }}'
|
||||||
|
|
||||||
- name: Build target
|
- name: Build target
|
||||||
run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.features }}
|
run: cargo build --release --target='${{ matrix.target }}' --features='${{ matrix.features }}' $PACKAGE
|
||||||
|
env:
|
||||||
|
PACKAGE: ${{ matrix.platform == 'wasm32' && '-p tree-sitter' || '' }}
|
||||||
|
|
||||||
- name: Cache fixtures
|
- name: Cache fixtures
|
||||||
id: cache
|
id: cache
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
uses: ./.github/actions/cache
|
uses: ./.github/actions/cache
|
||||||
|
|
||||||
- name: Fetch fixtures
|
- name: Fetch fixtures
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
run: $BUILD_CMD run -p xtask -- fetch-fixtures
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- fetch-fixtures
|
||||||
|
|
||||||
- name: Generate fixtures
|
- name: Generate fixtures
|
||||||
if: ${{ !matrix.no-run && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
if: inputs.run-test && !matrix.no-run && steps.cache.outputs.cache-hit != 'true'
|
||||||
run: $BUILD_CMD run -p xtask -- generate-fixtures
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- generate-fixtures
|
||||||
|
|
||||||
- name: Generate Wasm fixtures
|
- name: Generate Wasm fixtures
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
if: inputs.run-test && !matrix.no-run && contains(matrix.features, 'wasm') && steps.cache.outputs.cache-hit != 'true'
|
||||||
run: $BUILD_CMD run -p xtask -- generate-fixtures --wasm
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- generate-fixtures --wasm
|
||||||
|
|
||||||
- name: Run main tests
|
- name: Run main tests
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
run: $BUILD_CMD test --target=${{ matrix.target }} --features=${{ matrix.features }}
|
run: cargo test --target='${{ matrix.target }}' --features='${{ matrix.features }}'
|
||||||
|
|
||||||
- name: Run wasm tests
|
- name: Run Wasm tests
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run && contains(matrix.features, 'wasm')
|
||||||
run: $BUILD_CMD run -p xtask -- test-wasm
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- test-wasm
|
||||||
|
|
||||||
- name: Run benchmarks
|
|
||||||
# Cross-compiled benchmarks are pointless
|
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
|
||||||
run: $BUILD_CMD bench benchmark -p tree-sitter-cli --target=${{ matrix.target }}
|
|
||||||
|
|
||||||
- name: Upload CLI artifact
|
- name: Upload CLI artifact
|
||||||
uses: actions/upload-artifact@v4
|
if: "!matrix.no-run"
|
||||||
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: tree-sitter.${{ matrix.platform }}
|
name: tree-sitter.${{ matrix.platform }}
|
||||||
path: target/${{ matrix.target }}/release/tree-sitter${{ env.SUFFIX }}
|
path: target/${{ matrix.target }}/release/tree-sitter${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
- name: Upload Wasm artifacts
|
- name: Upload Wasm artifacts
|
||||||
if: ${{ matrix.platform == 'linux-x64' }}
|
if: matrix.platform == 'linux-x64'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: tree-sitter.wasm
|
name: tree-sitter.wasm
|
||||||
path: |
|
path: |
|
||||||
lib/binding_web/tree-sitter.js
|
lib/binding_web/web-tree-sitter.js
|
||||||
lib/binding_web/tree-sitter.wasm
|
lib/binding_web/web-tree-sitter.js.map
|
||||||
|
lib/binding_web/web-tree-sitter.cjs
|
||||||
|
lib/binding_web/web-tree-sitter.cjs.map
|
||||||
|
lib/binding_web/web-tree-sitter.wasm
|
||||||
|
lib/binding_web/web-tree-sitter.wasm.map
|
||||||
|
lib/binding_web/debug/web-tree-sitter.cjs
|
||||||
|
lib/binding_web/debug/web-tree-sitter.cjs.map
|
||||||
|
lib/binding_web/debug/web-tree-sitter.js
|
||||||
|
lib/binding_web/debug/web-tree-sitter.js.map
|
||||||
|
lib/binding_web/debug/web-tree-sitter.wasm
|
||||||
|
lib/binding_web/debug/web-tree-sitter.wasm.map
|
||||||
|
lib/binding_web/lib/*.c
|
||||||
|
lib/binding_web/lib/*.h
|
||||||
|
lib/binding_web/lib/*.ts
|
||||||
|
lib/binding_web/src/*.ts
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
|
||||||
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
|
|
@ -2,8 +2,20 @@ name: CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
|
paths-ignore:
|
||||||
|
- docs/**
|
||||||
|
- "**/README.md"
|
||||||
|
- CONTRIBUTING.md
|
||||||
|
- LICENSE
|
||||||
|
- cli/src/templates
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
|
paths-ignore:
|
||||||
|
- docs/**
|
||||||
|
- "**/README.md"
|
||||||
|
- CONTRIBUTING.md
|
||||||
|
- LICENSE
|
||||||
|
- cli/src/templates
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
|
@ -14,24 +26,24 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
|
|
||||||
- name: Set up nightly Rust toolchain
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: nightly
|
|
||||||
components: clippy, rustfmt
|
components: clippy, rustfmt
|
||||||
|
|
||||||
- name: Lint files
|
- name: Lint files
|
||||||
run: make lint
|
run: |
|
||||||
|
make lint
|
||||||
|
make lint-web
|
||||||
|
|
||||||
sanitize:
|
sanitize:
|
||||||
uses: ./.github/workflows/sanitize.yml
|
uses: ./.github/workflows/sanitize.yml
|
||||||
|
|
||||||
build:
|
build:
|
||||||
uses: ./.github/workflows/build.yml
|
uses: ./.github/workflows/build.yml
|
||||||
|
|
||||||
|
check-wasm-stdlib:
|
||||||
|
uses: ./.github/workflows/wasm_stdlib.yml
|
||||||
|
|
|
||||||
50
.github/workflows/docs.yml
vendored
Normal file
50
.github/workflows/docs.yml
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
||||||
|
name: Deploy Docs
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
paths: [docs/**]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy-docs:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pages: write
|
||||||
|
id-token: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Set up Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
||||||
|
- name: Install mdbook
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
jq_expr='.assets[] | select(.name | contains("x86_64-unknown-linux-gnu")) | .browser_download_url'
|
||||||
|
url=$(gh api repos/rust-lang/mdbook/releases/tags/v0.4.52 --jq "$jq_expr")
|
||||||
|
mkdir mdbook
|
||||||
|
curl -sSL "$url" | tar -xz -C mdbook
|
||||||
|
printf '%s/mdbook\n' "$PWD" >> "$GITHUB_PATH"
|
||||||
|
|
||||||
|
- name: Install mdbook-admonish
|
||||||
|
run: cargo install mdbook-admonish
|
||||||
|
|
||||||
|
- name: Build Book
|
||||||
|
run: mdbook build docs
|
||||||
|
|
||||||
|
- name: Setup Pages
|
||||||
|
uses: actions/configure-pages@v5
|
||||||
|
|
||||||
|
- name: Upload artifact
|
||||||
|
uses: actions/upload-pages-artifact@v4
|
||||||
|
with:
|
||||||
|
path: docs/book
|
||||||
|
|
||||||
|
- name: Deploy to GitHub Pages
|
||||||
|
id: deployment
|
||||||
|
uses: actions/deploy-pages@v4
|
||||||
13
.github/workflows/nvim_ts.yml
vendored
13
.github/workflows/nvim_ts.yml
vendored
|
|
@ -3,7 +3,10 @@ name: nvim-treesitter parser tests
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- 'cli/**'
|
- 'crates/cli/**'
|
||||||
|
- 'crates/config/**'
|
||||||
|
- 'crates/generate/**'
|
||||||
|
- 'crates/loader/**'
|
||||||
- '.github/workflows/nvim_ts.yml'
|
- '.github/workflows/nvim_ts.yml'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|
@ -13,7 +16,7 @@ concurrency:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_compilation:
|
check_compilation:
|
||||||
timeout-minutes: 20
|
timeout-minutes: 30
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
|
@ -25,9 +28,9 @@ jobs:
|
||||||
NVIM: ${{ matrix.os == 'windows-latest' && 'nvim-win64\\bin\\nvim.exe' || 'nvim' }}
|
NVIM: ${{ matrix.os == 'windows-latest' && 'nvim-win64\\bin\\nvim.exe' || 'nvim' }}
|
||||||
NVIM_TS_DIR: nvim-treesitter
|
NVIM_TS_DIR: nvim-treesitter
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
repository: nvim-treesitter/nvim-treesitter
|
repository: nvim-treesitter/nvim-treesitter
|
||||||
path: ${{ env.NVIM_TS_DIR }}
|
path: ${{ env.NVIM_TS_DIR }}
|
||||||
|
|
@ -55,7 +58,7 @@ jobs:
|
||||||
|
|
||||||
- if: matrix.type == 'build'
|
- if: matrix.type == 'build'
|
||||||
name: Compile parsers
|
name: Compile parsers
|
||||||
run: $NVIM -l ./scripts/install-parsers.lua
|
run: $NVIM -l ./scripts/install-parsers.lua --max-jobs=10
|
||||||
working-directory: ${{ env.NVIM_TS_DIR }}
|
working-directory: ${{ env.NVIM_TS_DIR }}
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
|
|
||||||
63
.github/workflows/release.yml
vendored
63
.github/workflows/release.yml
vendored
|
|
@ -17,13 +17,15 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: build
|
needs: build
|
||||||
permissions:
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
attestations: write
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Download build artifacts
|
- name: Download build artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
|
||||||
|
|
@ -33,9 +35,13 @@ jobs:
|
||||||
|
|
||||||
- name: Prepare release artifacts
|
- name: Prepare release artifacts
|
||||||
run: |
|
run: |
|
||||||
mkdir -p target
|
mkdir -p target web
|
||||||
mv artifacts/tree-sitter.wasm/* target/
|
mv artifacts/tree-sitter.wasm/* web/
|
||||||
|
|
||||||
|
tar -czf target/web-tree-sitter.tar.gz -C web .
|
||||||
|
|
||||||
rm -r artifacts/tree-sitter.wasm
|
rm -r artifacts/tree-sitter.wasm
|
||||||
|
|
||||||
for platform in $(cd artifacts; ls | sed 's/^tree-sitter\.//'); do
|
for platform in $(cd artifacts; ls | sed 's/^tree-sitter\.//'); do
|
||||||
exe=$(ls artifacts/tree-sitter.$platform/tree-sitter*)
|
exe=$(ls artifacts/tree-sitter.$platform/tree-sitter*)
|
||||||
gzip --stdout --name $exe > target/tree-sitter-$platform.gz
|
gzip --stdout --name $exe > target/tree-sitter-$platform.gz
|
||||||
|
|
@ -43,47 +49,65 @@ jobs:
|
||||||
rm -rf artifacts
|
rm -rf artifacts
|
||||||
ls -l target/
|
ls -l target/
|
||||||
|
|
||||||
|
- name: Generate attestations
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
|
with:
|
||||||
|
subject-path: |
|
||||||
|
target/tree-sitter-*.gz
|
||||||
|
target/web-tree-sitter.tar.gz
|
||||||
|
|
||||||
- name: Create release
|
- name: Create release
|
||||||
run: |-
|
run: |-
|
||||||
gh release create \
|
gh release create $GITHUB_REF_NAME \
|
||||||
target/tree-sitter-*.gz \
|
target/tree-sitter-*.gz \
|
||||||
target/tree-sitter.wasm \
|
target/web-tree-sitter.tar.gz
|
||||||
target/tree-sitter.js
|
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
crates_io:
|
crates_io:
|
||||||
name: Publish packages to Crates.io
|
name: Publish packages to Crates.io
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
environment: crates
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
needs: release
|
needs: release
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
||||||
|
- name: Set up registry token
|
||||||
|
id: auth
|
||||||
|
uses: rust-lang/crates-io-auth-action@v1
|
||||||
|
|
||||||
- name: Publish crates to Crates.io
|
- name: Publish crates to Crates.io
|
||||||
uses: katyo/publish-crates@v2
|
uses: katyo/publish-crates@v2
|
||||||
with:
|
with:
|
||||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
registry-token: ${{ steps.auth.outputs.token }}
|
||||||
|
|
||||||
npm:
|
npm:
|
||||||
name: Publish packages to npmjs.com
|
name: Publish packages to npmjs.com
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
environment: npm
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
needs: release
|
needs: release
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
directory: [cli/npm, lib/binding_web]
|
directory: [crates/cli/npm, lib/binding_web]
|
||||||
steps:
|
steps:
|
||||||
- name: CHeckout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Node
|
- name: Set up Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 24
|
||||||
registry-url: https://registry.npmjs.org
|
registry-url: https://registry.npmjs.org
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
|
|
@ -91,10 +115,15 @@ jobs:
|
||||||
|
|
||||||
- name: Build wasm
|
- name: Build wasm
|
||||||
if: matrix.directory == 'lib/binding_web'
|
if: matrix.directory == 'lib/binding_web'
|
||||||
run: cargo xtask build-wasm
|
run: |
|
||||||
|
cd ${{ matrix.directory }}
|
||||||
|
npm ci
|
||||||
|
npm run build
|
||||||
|
npm run build:debug
|
||||||
|
CJS=true npm run build
|
||||||
|
CJS=true npm run build:debug
|
||||||
|
npm run build:dts
|
||||||
|
|
||||||
- name: Publish to npmjs.com
|
- name: Publish to npmjs.com
|
||||||
working-directory: ${{ matrix.directory }}
|
working-directory: ${{ matrix.directory }}
|
||||||
run: npm publish
|
run: npm publish
|
||||||
env:
|
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
||||||
|
|
|
||||||
8
.github/workflows/response.yml
vendored
8
.github/workflows/response.yml
vendored
|
|
@ -17,13 +17,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/close_unresponsive.js
|
sparse-checkout: .github/scripts/close_unresponsive.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/close_unresponsive.js')
|
const script = require('./.github/scripts/close_unresponsive.js')
|
||||||
|
|
@ -35,13 +35,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/remove_response_label.js
|
sparse-checkout: .github/scripts/remove_response_label.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/remove_response_label.js')
|
const script = require('./.github/scripts/remove_response_label.js')
|
||||||
|
|
|
||||||
4
.github/workflows/reviewers_remove.yml
vendored
4
.github/workflows/reviewers_remove.yml
vendored
|
|
@ -12,13 +12,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/reviewers_remove.js
|
sparse-checkout: .github/scripts/reviewers_remove.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/reviewers_remove.js')
|
const script = require('./.github/scripts/reviewers_remove.js')
|
||||||
|
|
|
||||||
2
.github/workflows/sanitize.yml
vendored
2
.github/workflows/sanitize.yml
vendored
|
|
@ -15,7 +15,7 @@ jobs:
|
||||||
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Install UBSAN library
|
- name: Install UBSAN library
|
||||||
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
||||||
|
|
|
||||||
29
.github/workflows/spam.yml
vendored
Normal file
29
.github/workflows/spam.yml
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
name: Close as spam
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [labeled]
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
spam:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.label.name == 'spam'
|
||||||
|
steps:
|
||||||
|
- name: Checkout script
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
sparse-checkout: .github/scripts/close_spam.js
|
||||||
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
|
- name: Run script
|
||||||
|
uses: actions/github-script@v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/scripts/close_spam.js')
|
||||||
|
await script({github, context})
|
||||||
41
.github/workflows/wasm_exports.yml
vendored
Normal file
41
.github/workflows/wasm_exports.yml
vendored
Normal file
|
|
@ -0,0 +1,41 @@
|
||||||
|
name: Check Wasm Exports
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- lib/include/tree_sitter/api.h
|
||||||
|
- lib/binding_web/**
|
||||||
|
- xtask/src/**
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
paths:
|
||||||
|
- lib/include/tree_sitter/api.h
|
||||||
|
- lib/binding_rust/bindings.rs
|
||||||
|
- CMakeLists.txt
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-wasm-exports:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Set up stable Rust toolchain
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Install wasm-objdump
|
||||||
|
run: sudo apt-get update -y && sudo apt-get install -y wabt
|
||||||
|
|
||||||
|
- name: Build C library (make)
|
||||||
|
run: make -j CFLAGS="$CFLAGS"
|
||||||
|
env:
|
||||||
|
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
|
|
||||||
|
- name: Build Wasm Library
|
||||||
|
working-directory: lib/binding_web
|
||||||
|
run: npm ci && npm run build:debug
|
||||||
|
|
||||||
|
- name: Check Wasm exports
|
||||||
|
run: cargo xtask check-wasm-exports
|
||||||
19
.github/workflows/wasm_stdlib.yml
vendored
Normal file
19
.github/workflows/wasm_stdlib.yml
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
name: Check Wasm Stdlib build
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Check directory changes
|
||||||
|
uses: actions/github-script@v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const scriptPath = `${process.env.GITHUB_WORKSPACE}/.github/scripts/wasm_stdlib.js`;
|
||||||
|
const script = require(scriptPath);
|
||||||
|
return script({ github, context, core });
|
||||||
6
.gitignore
vendored
6
.gitignore
vendored
|
|
@ -1,10 +1,12 @@
|
||||||
log*.html
|
log*.html
|
||||||
|
.direnv
|
||||||
|
|
||||||
.idea
|
.idea
|
||||||
*.xcodeproj
|
*.xcodeproj
|
||||||
.vscode
|
.vscode
|
||||||
.cache
|
.cache
|
||||||
.zig-cache
|
.zig-cache
|
||||||
|
.direnv
|
||||||
|
|
||||||
profile*
|
profile*
|
||||||
fuzz-results
|
fuzz-results
|
||||||
|
|
@ -12,7 +14,6 @@ test/fuzz/out
|
||||||
test/fixtures/grammars/*
|
test/fixtures/grammars/*
|
||||||
!test/fixtures/grammars/.gitkeep
|
!test/fixtures/grammars/.gitkeep
|
||||||
|
|
||||||
package-lock.json
|
|
||||||
node_modules
|
node_modules
|
||||||
|
|
||||||
docs/assets/js/tree-sitter.js
|
docs/assets/js/tree-sitter.js
|
||||||
|
|
@ -25,6 +26,7 @@ docs/assets/js/tree-sitter.js
|
||||||
*.dylib
|
*.dylib
|
||||||
*.so
|
*.so
|
||||||
*.so.[0-9]*
|
*.so.[0-9]*
|
||||||
|
*.dll
|
||||||
*.o
|
*.o
|
||||||
*.obj
|
*.obj
|
||||||
*.exp
|
*.exp
|
||||||
|
|
@ -34,3 +36,5 @@ docs/assets/js/tree-sitter.js
|
||||||
.build
|
.build
|
||||||
build
|
build
|
||||||
zig-*
|
zig-*
|
||||||
|
|
||||||
|
/result
|
||||||
|
|
|
||||||
11
.zed/settings.json
Normal file
11
.zed/settings.json
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"lsp": {
|
||||||
|
"rust-analyzer": {
|
||||||
|
"initialization_options": {
|
||||||
|
"cargo": {
|
||||||
|
"features": "all"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
cmake_minimum_required(VERSION 3.13)
|
cmake_minimum_required(VERSION 3.13)
|
||||||
|
|
||||||
project(tree-sitter
|
project(tree-sitter
|
||||||
VERSION "0.25.0"
|
VERSION "0.27.0"
|
||||||
DESCRIPTION "An incremental parsing system for programming tools"
|
DESCRIPTION "An incremental parsing system for programming tools"
|
||||||
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
||||||
LANGUAGES C)
|
LANGUAGES C)
|
||||||
|
|
@ -11,15 +11,15 @@ option(TREE_SITTER_FEATURE_WASM "Enable the Wasm feature" OFF)
|
||||||
option(AMALGAMATED "Build using an amalgamated source" OFF)
|
option(AMALGAMATED "Build using an amalgamated source" OFF)
|
||||||
|
|
||||||
if(AMALGAMATED)
|
if(AMALGAMATED)
|
||||||
set(TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
set(TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/lib/src/lib.c")
|
||||||
else()
|
else()
|
||||||
file(GLOB TS_SOURCE_FILES src/*.c)
|
file(GLOB TS_SOURCE_FILES lib/src/*.c)
|
||||||
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/lib/src/lib.c")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
add_library(tree-sitter ${TS_SOURCE_FILES})
|
add_library(tree-sitter ${TS_SOURCE_FILES})
|
||||||
|
|
||||||
target_include_directories(tree-sitter PRIVATE src src/wasm include)
|
target_include_directories(tree-sitter PRIVATE lib/src lib/src/wasm PUBLIC lib/include)
|
||||||
|
|
||||||
if(MSVC)
|
if(MSVC)
|
||||||
target_compile_options(tree-sitter PRIVATE
|
target_compile_options(tree-sitter PRIVATE
|
||||||
|
|
@ -81,15 +81,15 @@ set_target_properties(tree-sitter
|
||||||
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}"
|
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}"
|
||||||
DEFINE_SYMBOL "")
|
DEFINE_SYMBOL "")
|
||||||
|
|
||||||
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE)
|
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE _BSD_SOURCE _DARWIN_C_SOURCE)
|
||||||
|
|
||||||
configure_file(tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY)
|
|
||||||
|
|
||||||
include(GNUInstallDirs)
|
include(GNUInstallDirs)
|
||||||
|
|
||||||
install(FILES include/tree_sitter/api.h
|
configure_file(lib/tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY)
|
||||||
|
|
||||||
|
install(FILES lib/include/tree_sitter/api.h
|
||||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/tree_sitter")
|
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/tree_sitter")
|
||||||
install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc"
|
install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc"
|
||||||
DESTINATION "${CMAKE_INSTALL_DATAROOTDIR}/pkgconfig")
|
DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
|
||||||
install(TARGETS tree-sitter
|
install(TARGETS tree-sitter
|
||||||
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
|
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
|
||||||
|
|
@ -1 +1 @@
|
||||||
See [section-6-contributing.md](./docs/section-6-contributing.md)
|
See [docs/src/6-contributing.md](./docs/src/6-contributing.md)
|
||||||
|
|
|
||||||
1994
Cargo.lock
generated
1994
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
118
Cargo.toml
118
Cargo.toml
|
|
@ -1,22 +1,26 @@
|
||||||
[workspace]
|
[workspace]
|
||||||
default-members = ["cli"]
|
default-members = ["crates/cli"]
|
||||||
members = [
|
members = [
|
||||||
"cli",
|
"crates/cli",
|
||||||
"cli/config",
|
"crates/config",
|
||||||
"cli/loader",
|
"crates/generate",
|
||||||
|
"crates/highlight",
|
||||||
|
"crates/loader",
|
||||||
|
"crates/tags",
|
||||||
|
"crates/xtask",
|
||||||
|
"crates/language",
|
||||||
"lib",
|
"lib",
|
||||||
"lib/language",
|
|
||||||
"tags",
|
|
||||||
"highlight",
|
|
||||||
"xtask",
|
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.25.0"
|
version = "0.27.0"
|
||||||
authors = ["Max Brunsfeld <maxbrunsfeld@gmail.com>"]
|
authors = [
|
||||||
|
"Max Brunsfeld <maxbrunsfeld@gmail.com>",
|
||||||
|
"Amaan Qureshi <amaanq12@gmail.com>",
|
||||||
|
]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.74.1"
|
rust-version = "1.85"
|
||||||
homepage = "https://tree-sitter.github.io/tree-sitter"
|
homepage = "https://tree-sitter.github.io/tree-sitter"
|
||||||
repository = "https://github.com/tree-sitter/tree-sitter"
|
repository = "https://github.com/tree-sitter/tree-sitter"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
@ -56,6 +60,8 @@ missing_errors_doc = "allow"
|
||||||
missing_panics_doc = "allow"
|
missing_panics_doc = "allow"
|
||||||
module_name_repetitions = "allow"
|
module_name_repetitions = "allow"
|
||||||
multiple_crate_versions = "allow"
|
multiple_crate_versions = "allow"
|
||||||
|
needless_for_each = "allow"
|
||||||
|
obfuscated_if_else = "allow"
|
||||||
option_if_let_else = "allow"
|
option_if_let_else = "allow"
|
||||||
or_fun_call = "allow"
|
or_fun_call = "allow"
|
||||||
range_plus_one = "allow"
|
range_plus_one = "allow"
|
||||||
|
|
@ -72,6 +78,9 @@ unnecessary_wraps = "allow"
|
||||||
unused_self = "allow"
|
unused_self = "allow"
|
||||||
used_underscore_items = "allow"
|
used_underscore_items = "allow"
|
||||||
|
|
||||||
|
[workspace.lints.rust]
|
||||||
|
mismatched_lifetime_syntaxes = "allow"
|
||||||
|
|
||||||
[profile.optimize]
|
[profile.optimize]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
strip = true # Automatically strip symbols from the binary.
|
strip = true # Automatically strip symbols from the binary.
|
||||||
|
|
@ -93,61 +102,62 @@ incremental = true
|
||||||
codegen-units = 256
|
codegen-units = 256
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
anstyle = "1.0.8"
|
ansi_colours = "1.2.3"
|
||||||
anyhow = "1.0.89"
|
anstyle = "1.0.13"
|
||||||
bstr = "1.11.0"
|
anyhow = "1.0.100"
|
||||||
cc = "1.2.1"
|
bstr = "1.12.0"
|
||||||
clap = { version = "4.5.21", features = [
|
cc = "1.2.53"
|
||||||
|
clap = { version = "4.5.54", features = [
|
||||||
"cargo",
|
"cargo",
|
||||||
"derive",
|
"derive",
|
||||||
"env",
|
"env",
|
||||||
"help",
|
"help",
|
||||||
|
"string",
|
||||||
"unstable-styles",
|
"unstable-styles",
|
||||||
] }
|
] }
|
||||||
clap_complete = "4.5.38"
|
clap_complete = "4.5.65"
|
||||||
ctor = "0.2.8"
|
clap_complete_nushell = "4.5.10"
|
||||||
ctrlc = { version = "3.4.5", features = ["termination"] }
|
crc32fast = "1.5.0"
|
||||||
|
ctor = "0.2.9"
|
||||||
|
ctrlc = { version = "3.5.0", features = ["termination"] }
|
||||||
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
||||||
dirs = "5.0.1"
|
etcetera = "0.11.0"
|
||||||
filetime = "0.2.25"
|
fs4 = "0.12.0"
|
||||||
fs4 = "0.9.1"
|
glob = "0.3.3"
|
||||||
git2 = "0.19.0"
|
|
||||||
glob = "0.3.1"
|
|
||||||
heck = "0.5.0"
|
heck = "0.5.0"
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
indexmap = "2.5.0"
|
indexmap = "2.12.1"
|
||||||
indoc = "2.0.5"
|
indoc = "2.0.6"
|
||||||
lazy_static = "1.5.0"
|
libloading = "0.9.0"
|
||||||
libloading = "0.8.5"
|
log = { version = "0.4.28", features = ["std"] }
|
||||||
log = { version = "0.4.22", features = ["std"] }
|
memchr = "2.7.6"
|
||||||
memchr = "2.7.4"
|
once_cell = "1.21.3"
|
||||||
once_cell = "1.19.0"
|
|
||||||
path-slash = "0.2.1"
|
|
||||||
pretty_assertions = "1.4.1"
|
pretty_assertions = "1.4.1"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
regex = "1.10.6"
|
regex = "1.11.3"
|
||||||
regex-syntax = "0.8.4"
|
regex-syntax = "0.8.6"
|
||||||
rustc-hash = "2.0.0"
|
rustc-hash = "2.1.1"
|
||||||
semver = { version = "1.0.23", features = ["serde"] }
|
schemars = "1.0.5"
|
||||||
serde = { version = "1.0.215", features = ["derive"] }
|
semver = { version = "1.0.27", features = ["serde"] }
|
||||||
serde_derive = "1.0.210"
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.133", features = ["preserve_order"] }
|
serde_json = { version = "1.0.149", features = ["preserve_order"] }
|
||||||
similar = "2.6.0"
|
similar = "2.7.0"
|
||||||
smallbitvec = "2.5.3"
|
smallbitvec = "2.6.0"
|
||||||
streaming-iterator = "0.1.9"
|
streaming-iterator = "0.1.9"
|
||||||
tempfile = "3.14.0"
|
tempfile = "3.23.0"
|
||||||
thiserror = "1.0.69"
|
thiserror = "2.0.17"
|
||||||
tiny_http = "0.12.0"
|
tiny_http = "0.12.0"
|
||||||
toml = "0.8.19"
|
topological-sort = "0.2.2"
|
||||||
unindent = "0.2.3"
|
unindent = "0.2.4"
|
||||||
url = { version = "2.5.2", features = ["serde"] }
|
|
||||||
walkdir = "2.5.0"
|
walkdir = "2.5.0"
|
||||||
wasmparser = "0.218.0"
|
wasmparser = "0.243.0"
|
||||||
webbrowser = "1.0.2"
|
webbrowser = "1.0.5"
|
||||||
|
|
||||||
tree-sitter = { version = "0.25.0", path = "./lib" }
|
tree-sitter = { version = "0.27.0", path = "./lib" }
|
||||||
tree-sitter-generate = { version = "0.25.0", path = "./cli/generate" }
|
tree-sitter-generate = { version = "0.27.0", path = "./crates/generate" }
|
||||||
tree-sitter-loader = { version = "0.25.0", path = "./cli/loader" }
|
tree-sitter-loader = { version = "0.27.0", path = "./crates/loader" }
|
||||||
tree-sitter-config = { version = "0.25.0", path = "./cli/config" }
|
tree-sitter-config = { version = "0.27.0", path = "./crates/config" }
|
||||||
tree-sitter-highlight = { version = "0.25.0", path = "./highlight" }
|
tree-sitter-highlight = { version = "0.27.0", path = "./crates/highlight" }
|
||||||
tree-sitter-tags = { version = "0.25.0", path = "./tags" }
|
tree-sitter-tags = { version = "0.27.0", path = "./crates/tags" }
|
||||||
|
|
||||||
|
tree-sitter-language = { version = "0.1", path = "./crates/language" }
|
||||||
|
|
|
||||||
2
LICENSE
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2018-2024 Max Brunsfeld
|
Copyright (c) 2018 Max Brunsfeld
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
||||||
57
Makefile
57
Makefile
|
|
@ -1,8 +1,4 @@
|
||||||
ifeq ($(OS),Windows_NT)
|
VERSION := 0.27.0
|
||||||
$(error Windows is not supported)
|
|
||||||
endif
|
|
||||||
|
|
||||||
VERSION := 0.25.0
|
|
||||||
DESCRIPTION := An incremental parsing system for programming tools
|
DESCRIPTION := An incremental parsing system for programming tools
|
||||||
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||||
|
|
||||||
|
|
@ -10,6 +6,7 @@ HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||||
PREFIX ?= /usr/local
|
PREFIX ?= /usr/local
|
||||||
INCLUDEDIR ?= $(PREFIX)/include
|
INCLUDEDIR ?= $(PREFIX)/include
|
||||||
LIBDIR ?= $(PREFIX)/lib
|
LIBDIR ?= $(PREFIX)/lib
|
||||||
|
BINDIR ?= $(PREFIX)/bin
|
||||||
PCLIBDIR ?= $(LIBDIR)/pkgconfig
|
PCLIBDIR ?= $(LIBDIR)/pkgconfig
|
||||||
|
|
||||||
# collect sources
|
# collect sources
|
||||||
|
|
@ -27,7 +24,7 @@ OBJ := $(SRC:.c=.o)
|
||||||
ARFLAGS := rcs
|
ARFLAGS := rcs
|
||||||
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
||||||
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE
|
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE -D_BSD_SOURCE -D_DARWIN_C_SOURCE
|
||||||
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
||||||
|
|
||||||
# ABI versioning
|
# ABI versioning
|
||||||
|
|
@ -35,20 +32,25 @@ SONAME_MAJOR := $(word 1,$(subst ., ,$(VERSION)))
|
||||||
SONAME_MINOR := $(word 2,$(subst ., ,$(VERSION)))
|
SONAME_MINOR := $(word 2,$(subst ., ,$(VERSION)))
|
||||||
|
|
||||||
# OS-specific bits
|
# OS-specific bits
|
||||||
ifneq ($(findstring darwin,$(shell $(CC) -dumpmachine)),)
|
MACHINE := $(shell $(CC) -dumpmachine)
|
||||||
|
|
||||||
|
ifneq ($(findstring darwin,$(MACHINE)),)
|
||||||
SOEXT = dylib
|
SOEXT = dylib
|
||||||
SOEXTVER_MAJOR = $(SONAME_MAJOR).$(SOEXT)
|
SOEXTVER_MAJOR = $(SONAME_MAJOR).$(SOEXT)
|
||||||
SOEXTVER = $(SONAME_MAJOR).$(SONAME_MINOR).$(SOEXT)
|
SOEXTVER = $(SONAME_MAJOR).$(SONAME_MINOR).$(SOEXT)
|
||||||
LINKSHARED += -dynamiclib -Wl,-install_name,$(LIBDIR)/libtree-sitter.$(SOEXTVER)
|
LINKSHARED += -dynamiclib -Wl,-install_name,$(LIBDIR)/libtree-sitter.$(SOEXTVER)
|
||||||
|
else ifneq ($(findstring mingw32,$(MACHINE)),)
|
||||||
|
SOEXT = dll
|
||||||
|
LINKSHARED += -s -shared -Wl,--out-implib,libtree-sitter.dll.a
|
||||||
else
|
else
|
||||||
SOEXT = so
|
SOEXT = so
|
||||||
SOEXTVER_MAJOR = $(SOEXT).$(SONAME_MAJOR)
|
SOEXTVER_MAJOR = $(SOEXT).$(SONAME_MAJOR)
|
||||||
SOEXTVER = $(SOEXT).$(SONAME_MAJOR).$(SONAME_MINOR)
|
SOEXTVER = $(SOEXT).$(SONAME_MAJOR).$(SONAME_MINOR)
|
||||||
LINKSHARED += -shared -Wl,-soname,libtree-sitter.$(SOEXTVER)
|
LINKSHARED += -shared -Wl,-soname,libtree-sitter.$(SOEXTVER)
|
||||||
endif
|
|
||||||
ifneq ($(filter $(shell uname),FreeBSD NetBSD DragonFly),)
|
ifneq ($(filter $(shell uname),FreeBSD NetBSD DragonFly),)
|
||||||
PCLIBDIR := $(PREFIX)/libdata/pkgconfig
|
PCLIBDIR := $(PREFIX)/libdata/pkgconfig
|
||||||
endif
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
all: libtree-sitter.a libtree-sitter.$(SOEXT) tree-sitter.pc
|
all: libtree-sitter.a libtree-sitter.$(SOEXT) tree-sitter.pc
|
||||||
|
|
||||||
|
|
@ -61,6 +63,10 @@ ifneq ($(STRIP),)
|
||||||
$(STRIP) $@
|
$(STRIP) $@
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
ifneq ($(findstring mingw32,$(MACHINE)),)
|
||||||
|
libtree-sitter.dll.a: libtree-sitter.$(SOEXT)
|
||||||
|
endif
|
||||||
|
|
||||||
tree-sitter.pc: lib/tree-sitter.pc.in
|
tree-sitter.pc: lib/tree-sitter.pc.in
|
||||||
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
||||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
||||||
|
|
@ -69,17 +75,27 @@ tree-sitter.pc: lib/tree-sitter.pc.in
|
||||||
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
||||||
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
||||||
|
|
||||||
|
shared: libtree-sitter.$(SOEXT)
|
||||||
|
|
||||||
|
static: libtree-sitter.a
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
$(RM) $(OBJ) tree-sitter.pc libtree-sitter.a libtree-sitter.$(SOEXT)
|
$(RM) $(OBJ) tree-sitter.pc libtree-sitter.a libtree-sitter.$(SOEXT) libtree-stitter.dll.a
|
||||||
|
|
||||||
install: all
|
install: all
|
||||||
install -d '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
install -d '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
||||||
install -m644 lib/include/tree_sitter/api.h '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h
|
install -m644 lib/include/tree_sitter/api.h '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h
|
||||||
install -m644 tree-sitter.pc '$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
install -m644 tree-sitter.pc '$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
||||||
install -m644 libtree-sitter.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a
|
install -m644 libtree-sitter.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a
|
||||||
|
ifneq ($(findstring mingw32,$(MACHINE)),)
|
||||||
|
install -d '$(DESTDIR)$(BINDIR)'
|
||||||
|
install -m755 libtree-sitter.dll '$(DESTDIR)$(BINDIR)'/libtree-sitter.dll
|
||||||
|
install -m755 libtree-sitter.dll.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.dll.a
|
||||||
|
else
|
||||||
install -m755 libtree-sitter.$(SOEXT) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER)
|
install -m755 libtree-sitter.$(SOEXT) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER)
|
||||||
ln -sf libtree-sitter.$(SOEXTVER) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER_MAJOR)
|
cd '$(DESTDIR)$(LIBDIR)' && ln -sf libtree-sitter.$(SOEXTVER) libtree-sitter.$(SOEXTVER_MAJOR)
|
||||||
ln -sf libtree-sitter.$(SOEXTVER_MAJOR) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT)
|
cd '$(DESTDIR)$(LIBDIR)' && ln -sf libtree-sitter.$(SOEXTVER_MAJOR) libtree-sitter.$(SOEXT)
|
||||||
|
endif
|
||||||
|
|
||||||
uninstall:
|
uninstall:
|
||||||
$(RM) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a \
|
$(RM) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a \
|
||||||
|
|
@ -88,8 +104,9 @@ uninstall:
|
||||||
'$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT) \
|
'$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT) \
|
||||||
'$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h \
|
'$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h \
|
||||||
'$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
'$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
||||||
|
rmdir '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter
|
||||||
|
|
||||||
.PHONY: all install uninstall clean
|
.PHONY: all shared static install uninstall clean
|
||||||
|
|
||||||
|
|
||||||
##### Dev targets #####
|
##### Dev targets #####
|
||||||
|
|
@ -99,20 +116,24 @@ test:
|
||||||
cargo xtask generate-fixtures
|
cargo xtask generate-fixtures
|
||||||
cargo xtask test
|
cargo xtask test
|
||||||
|
|
||||||
test_wasm:
|
test-wasm:
|
||||||
cargo xtask generate-fixtures-wasm
|
cargo xtask generate-fixtures --wasm
|
||||||
cargo xtask test-wasm
|
cargo xtask test-wasm
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
cargo update --workspace --locked --quiet
|
cargo update --workspace --locked --quiet
|
||||||
cargo check --workspace --all-targets
|
cargo check --workspace --all-targets
|
||||||
cargo +nightly fmt --all --check
|
cargo fmt --all --check
|
||||||
cargo +nightly clippy --workspace --all-targets -- -D warnings
|
cargo clippy --workspace --all-targets -- -D warnings
|
||||||
|
|
||||||
|
lint-web:
|
||||||
|
npm --prefix lib/binding_web ci
|
||||||
|
npm --prefix lib/binding_web run lint
|
||||||
|
|
||||||
format:
|
format:
|
||||||
cargo +nightly fmt --all
|
cargo fmt --all
|
||||||
|
|
||||||
changelog:
|
changelog:
|
||||||
@git-cliff --config .github/cliff.toml --prepend CHANGELOG.md --latest --github-token $(shell gh auth token)
|
@git-cliff --config .github/cliff.toml --prepend CHANGELOG.md --latest --github-token $(shell gh auth token)
|
||||||
|
|
||||||
.PHONY: test test_wasm lint format changelog
|
.PHONY: test test-wasm lint format changelog
|
||||||
|
|
|
||||||
|
|
@ -14,11 +14,21 @@ let package = Package(
|
||||||
targets: [
|
targets: [
|
||||||
.target(name: "TreeSitter",
|
.target(name: "TreeSitter",
|
||||||
path: "lib",
|
path: "lib",
|
||||||
sources: ["src/lib.c"],
|
exclude: [
|
||||||
|
"src/unicode/ICU_SHA",
|
||||||
|
"src/unicode/README.md",
|
||||||
|
"src/unicode/LICENSE",
|
||||||
|
"src/wasm/stdlib-symbols.txt",
|
||||||
|
"src/lib.c",
|
||||||
|
],
|
||||||
|
sources: ["src"],
|
||||||
|
publicHeadersPath: "include",
|
||||||
cSettings: [
|
cSettings: [
|
||||||
.headerSearchPath("src"),
|
.headerSearchPath("src"),
|
||||||
.define("_POSIX_C_SOURCE", to: "200112L"),
|
.define("_POSIX_C_SOURCE", to: "200112L"),
|
||||||
.define("_DEFAULT_SOURCE"),
|
.define("_DEFAULT_SOURCE"),
|
||||||
|
.define("_BSD_SOURCE"),
|
||||||
|
.define("_DARWIN_C_SOURCE"),
|
||||||
]),
|
]),
|
||||||
],
|
],
|
||||||
cLanguageStandard: .c11
|
cLanguageStandard: .c11
|
||||||
|
|
|
||||||
|
|
@ -14,8 +14,8 @@ Tree-sitter is a parser generator tool and an incremental parsing library. It ca
|
||||||
## Links
|
## Links
|
||||||
- [Documentation](https://tree-sitter.github.io)
|
- [Documentation](https://tree-sitter.github.io)
|
||||||
- [Rust binding](lib/binding_rust/README.md)
|
- [Rust binding](lib/binding_rust/README.md)
|
||||||
- [WASM binding](lib/binding_web/README.md)
|
- [Wasm binding](lib/binding_web/README.md)
|
||||||
- [Command-line interface](cli/README.md)
|
- [Command-line interface](crates/cli/README.md)
|
||||||
|
|
||||||
[discord]: https://img.shields.io/discord/1063097320771698699?logo=discord&label=discord
|
[discord]: https://img.shields.io/discord/1063097320771698699?logo=discord&label=discord
|
||||||
[matrix]: https://img.shields.io/matrix/tree-sitter-chat%3Amatrix.org?logo=matrix&label=matrix
|
[matrix]: https://img.shields.io/matrix/tree-sitter-chat%3Amatrix.org?logo=matrix&label=matrix
|
||||||
|
|
|
||||||
218
build.zig
218
build.zig
|
|
@ -1,116 +1,142 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
pub fn build(b: *std.Build) !void {
|
pub fn build(b: *std.Build) !void {
|
||||||
const target = b.standardTargetOptions(.{});
|
const target = b.standardTargetOptions(.{});
|
||||||
const optimize = b.standardOptimizeOption(.{});
|
const optimize = b.standardOptimizeOption(.{});
|
||||||
|
|
||||||
const wasm = b.option(bool, "enable-wasm", "Enable Wasm support") orelse false;
|
const wasm = b.option(bool, "enable-wasm", "Enable Wasm support") orelse false;
|
||||||
const shared = b.option(bool, "build-shared", "Build a shared library") orelse false;
|
const shared = b.option(bool, "build-shared", "Build a shared library") orelse false;
|
||||||
const amalgamated = b.option(bool, "amalgamated", "Build using an amalgamated source") orelse false;
|
const amalgamated = b.option(bool, "amalgamated", "Build using an amalgamated source") orelse false;
|
||||||
|
|
||||||
const lib: *std.Build.Step.Compile = if (!shared) b.addStaticLibrary(.{
|
const lib: *std.Build.Step.Compile = b.addLibrary(.{
|
||||||
.name = "tree-sitter",
|
.name = "tree-sitter",
|
||||||
.target = target,
|
.linkage = if (shared) .dynamic else .static,
|
||||||
.optimize = optimize,
|
.root_module = b.createModule(.{
|
||||||
.link_libc = true,
|
.target = target,
|
||||||
}) else b.addSharedLibrary(.{
|
.optimize = optimize,
|
||||||
.name = "tree-sitter",
|
.link_libc = true,
|
||||||
.pic = true,
|
.pic = if (shared) true else null,
|
||||||
.target = target,
|
}),
|
||||||
.optimize = optimize,
|
|
||||||
.link_libc = true,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (amalgamated) {
|
|
||||||
lib.addCSourceFile(.{
|
|
||||||
.file = b.path("lib/src/lib.c"),
|
|
||||||
.flags = &.{"-std=c11"},
|
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
lib.addCSourceFiles(.{
|
|
||||||
.root = b.path("lib/src"),
|
|
||||||
.files = try findSourceFiles(b),
|
|
||||||
.flags = &.{"-std=c11"},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
lib.addIncludePath(b.path("lib/include"));
|
if (amalgamated) {
|
||||||
lib.addIncludePath(b.path("lib/src"));
|
lib.addCSourceFile(.{
|
||||||
lib.addIncludePath(b.path("lib/src/wasm"));
|
.file = b.path("lib/src/lib.c"),
|
||||||
|
.flags = &.{"-std=c11"},
|
||||||
lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L");
|
});
|
||||||
lib.root_module.addCMacro("_DEFAULT_SOURCE", "");
|
} else {
|
||||||
|
const files = try findSourceFiles(b);
|
||||||
if (wasm) {
|
defer b.allocator.free(files);
|
||||||
if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| {
|
lib.addCSourceFiles(.{
|
||||||
lib.root_module.addCMacro("TREE_SITTER_FEATURE_WASM", "");
|
.root = b.path("lib/src"),
|
||||||
lib.addSystemIncludePath(wasmtime.path("include"));
|
.files = files,
|
||||||
lib.addLibraryPath(wasmtime.path("lib"));
|
.flags = &.{"-std=c11"},
|
||||||
lib.linkSystemLibrary("wasmtime");
|
});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
lib.installHeadersDirectory(b.path("lib/include"), ".", .{});
|
lib.addIncludePath(b.path("lib/include"));
|
||||||
|
lib.addIncludePath(b.path("lib/src"));
|
||||||
|
lib.addIncludePath(b.path("lib/src/wasm"));
|
||||||
|
|
||||||
b.installArtifact(lib);
|
lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L");
|
||||||
|
lib.root_module.addCMacro("_DEFAULT_SOURCE", "");
|
||||||
|
lib.root_module.addCMacro("_BSD_SOURCE", "");
|
||||||
|
lib.root_module.addCMacro("_DARWIN_C_SOURCE", "");
|
||||||
|
|
||||||
|
if (wasm) {
|
||||||
|
if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| {
|
||||||
|
lib.root_module.addCMacro("TREE_SITTER_FEATURE_WASM", "");
|
||||||
|
lib.addSystemIncludePath(wasmtime.path("include"));
|
||||||
|
lib.addLibraryPath(wasmtime.path("lib"));
|
||||||
|
if (shared) lib.linkSystemLibrary("wasmtime");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lib.installHeadersDirectory(b.path("lib/include"), ".", .{});
|
||||||
|
|
||||||
|
b.installArtifact(lib);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn wasmtimeDep(target: std.Target) []const u8 {
|
/// Get the name of the wasmtime dependency for this target.
|
||||||
const arch = target.cpu.arch;
|
pub fn wasmtimeDep(target: std.Target) []const u8 {
|
||||||
const os = target.os.tag;
|
const arch = target.cpu.arch;
|
||||||
const abi = target.abi;
|
const os = target.os.tag;
|
||||||
return switch (os) {
|
const abi = target.abi;
|
||||||
.linux => switch (arch) {
|
return @as(?[]const u8, switch (os) {
|
||||||
.x86_64 => switch (abi) {
|
.linux => switch (arch) {
|
||||||
.gnu => "wasmtime_c_api_x86_64_linux",
|
.x86_64 => switch (abi) {
|
||||||
.musl => "wasmtime_c_api_x86_64_musl",
|
.gnu => "wasmtime_c_api_x86_64_linux",
|
||||||
.android => "wasmtime_c_api_x86_64_android",
|
.musl => "wasmtime_c_api_x86_64_musl",
|
||||||
else => null
|
.android => "wasmtime_c_api_x86_64_android",
|
||||||
},
|
else => null,
|
||||||
.aarch64 => switch (abi) {
|
},
|
||||||
.gnu => "wasmtime_c_api_aarch64_linux",
|
.aarch64 => switch (abi) {
|
||||||
.android => "wasmtime_c_api_aarch64_android",
|
.gnu => "wasmtime_c_api_aarch64_linux",
|
||||||
else => null
|
.musl => "wasmtime_c_api_aarch64_musl",
|
||||||
},
|
.android => "wasmtime_c_api_aarch64_android",
|
||||||
.s390x => "wasmtime_c_api_s390x_linux",
|
else => null,
|
||||||
.riscv64 => "wasmtime_c_api_riscv64gc_linux",
|
},
|
||||||
else => null
|
.x86 => switch (abi) {
|
||||||
},
|
.gnu => "wasmtime_c_api_i686_linux",
|
||||||
.windows => switch (arch) {
|
else => null,
|
||||||
.x86_64 => switch (abi) {
|
},
|
||||||
.gnu => "wasmtime_c_api_x86_64_mingw",
|
.arm => switch (abi) {
|
||||||
.msvc => "wasmtime_c_api_x86_64_windows",
|
.gnueabi => "wasmtime_c_api_armv7_linux",
|
||||||
else => null
|
else => null,
|
||||||
},
|
},
|
||||||
else => null
|
.s390x => switch (abi) {
|
||||||
},
|
.gnu => "wasmtime_c_api_s390x_linux",
|
||||||
.macos => switch (arch) {
|
else => null,
|
||||||
.x86_64 => "wasmtime_c_api_x86_64_macos",
|
},
|
||||||
.aarch64 => "wasmtime_c_api_aarch64_macos",
|
.riscv64 => switch (abi) {
|
||||||
else => null
|
.gnu => "wasmtime_c_api_riscv64gc_linux",
|
||||||
},
|
else => null,
|
||||||
else => null
|
},
|
||||||
} orelse std.debug.panic(
|
else => null,
|
||||||
"Unsupported target for wasmtime: {s}-{s}-{s}",
|
},
|
||||||
.{ @tagName(arch), @tagName(os), @tagName(abi) }
|
.windows => switch (arch) {
|
||||||
);
|
.x86_64 => switch (abi) {
|
||||||
|
.gnu => "wasmtime_c_api_x86_64_mingw",
|
||||||
|
.msvc => "wasmtime_c_api_x86_64_windows",
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
.aarch64 => switch (abi) {
|
||||||
|
.msvc => "wasmtime_c_api_aarch64_windows",
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
.x86 => switch (abi) {
|
||||||
|
.msvc => "wasmtime_c_api_i686_windows",
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
.macos => switch (arch) {
|
||||||
|
.x86_64 => "wasmtime_c_api_x86_64_macos",
|
||||||
|
.aarch64 => "wasmtime_c_api_aarch64_macos",
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
else => null,
|
||||||
|
}) orelse std.debug.panic(
|
||||||
|
"Unsupported target for wasmtime: {s}-{s}-{s}",
|
||||||
|
.{ @tagName(arch), @tagName(os), @tagName(abi) },
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn findSourceFiles(b: *std.Build) ![]const []const u8 {
|
fn findSourceFiles(b: *std.Build) ![]const []const u8 {
|
||||||
var sources = std.ArrayList([]const u8).init(b.allocator);
|
var sources: std.ArrayListUnmanaged([]const u8) = .empty;
|
||||||
|
|
||||||
var dir = try b.build_root.handle.openDir("lib/src", .{ .iterate = true });
|
var dir = try b.build_root.handle.openDir("lib/src", .{ .iterate = true });
|
||||||
var iter = dir.iterate();
|
var iter = dir.iterate();
|
||||||
defer dir.close();
|
defer dir.close();
|
||||||
|
|
||||||
while (try iter.next()) |entry| {
|
while (try iter.next()) |entry| {
|
||||||
if (entry.kind != .file) continue;
|
if (entry.kind != .file) continue;
|
||||||
const file = entry.name;
|
const file = entry.name;
|
||||||
const ext = std.fs.path.extension(file);
|
const ext = std.fs.path.extension(file);
|
||||||
if (std.mem.eql(u8, ext, ".c") and !std.mem.eql(u8, file, "lib.c")) {
|
if (std.mem.eql(u8, ext, ".c") and !std.mem.eql(u8, file, "lib.c")) {
|
||||||
try sources.append(b.dupe(file));
|
try sources.append(b.allocator, b.dupe(file));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return sources.items;
|
return sources.toOwnedSlice(b.allocator);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
157
build.zig.zon
157
build.zig.zon
|
|
@ -1,69 +1,96 @@
|
||||||
.{
|
.{
|
||||||
.name = "tree-sitter",
|
.name = .tree_sitter,
|
||||||
.version = "0.25.0",
|
.fingerprint = 0x841224b447ac0d4f,
|
||||||
.paths = .{
|
.version = "0.27.0",
|
||||||
"build.zig",
|
.minimum_zig_version = "0.14.1",
|
||||||
"build.zig.zon",
|
.paths = .{
|
||||||
"lib/src",
|
"build.zig",
|
||||||
"lib/include",
|
"build.zig.zon",
|
||||||
"README.md",
|
"lib/src",
|
||||||
"LICENSE",
|
"lib/include",
|
||||||
},
|
"README.md",
|
||||||
.dependencies = .{
|
"LICENSE",
|
||||||
.wasmtime_c_api_aarch64_android = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-aarch64-android-c-api.tar.xz",
|
|
||||||
.hash = "12208b1c6fc26df81b3bf6b82ba38a2099bcbfb3eea21b93c9cca797d8f0067d891f",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_linux = .{
|
.dependencies = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-aarch64-linux-c-api.tar.xz",
|
.wasmtime_c_api_aarch64_android = .{
|
||||||
.hash = "12209aaa1bd480ad8674b8d9cc89300e8b045f0fc626938b64158a09e87597705a45",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-android-c-api.tar.xz",
|
||||||
.lazy = true,
|
.hash = "N-V-__8AAIfPIgdw2YnV3QyiFQ2NHdrxrXzzCdjYJyxJDOta",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_aarch64_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAIt97QZi7Pf7nNJ2mVY6uxA80Klyuvvtop3pLMRK",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_aarch64_macos = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-macos-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAAO48QQf91w9RmmUDHTja8DrXZA1n6Bmc8waW3qe",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_aarch64_musl = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-musl-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAI196wa9pwADoA2RbCDp5F7bKQg1iOPq6gIh8-FH",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_aarch64_windows = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-windows-c-api.zip",
|
||||||
|
.hash = "N-V-__8AAC9u4wXfqd1Q6XyQaC8_DbQZClXux60Vu5743N05",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_armv7_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-armv7-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAHXe8gWs3s83Cc5G6SIq0_jWxj8fGTT5xG4vb6-x",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_i686_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-i686-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAN2pzgUUfulRCYnipSfis9IIYHoTHVlieLRmKuct",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_i686_windows = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-i686-windows-c-api.zip",
|
||||||
|
.hash = "N-V-__8AAJu0YAUUTFBLxFIOi-MSQVezA6MMkpoFtuaf2Quf",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_riscv64gc_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-riscv64gc-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAG8m-gc3E3AIImtTZ3l1c7HC6HUWazQ9OH5KACX4",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_s390x_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-s390x-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAH314gd-gE4IBp2uvAL3gHeuW1uUZjMiLLeUdXL_",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_android = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-android-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAIPNRwfNkznebrcGb0IKUe7f35bkuZEYOjcx6q3f",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAI8EDwcyTtk_Afhk47SEaqfpoRqGkJeZpGs69ChF",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_macos = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-macos-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAGtGNgVaOpHSxC22IjrampbRIy6lLwscdcAE8nG1",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_mingw = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-mingw-c-api.zip",
|
||||||
|
.hash = "N-V-__8AAPS2PAbVix50L6lnddlgazCPTz3whLUFk1qnRtnZ",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_musl = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-musl-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAF-WEQe0nzvi09PgusM5i46FIuCKJmIDWUleWgQ3",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_windows = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-windows-c-api.zip",
|
||||||
|
.hash = "N-V-__8AAKGNXwbpJQsn0_6kwSIVDDWifSg8cBzf7T2RzsC9",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_macos = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-aarch64-macos-c-api.tar.xz",
|
|
||||||
.hash = "12206de8f3ce815b0cd9fd735fc61ac73f338e7601e973916b06ae050b4fa7118baf",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_riscv64gc_linux = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-riscv64gc-linux-c-api.tar.xz",
|
|
||||||
.hash = "122005e52855c8be82f574b6f35c1e2f5bc6d74ec1e12f16852654e4edd6ac7e2fc1",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_s390x_linux = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-s390x-linux-c-api.tar.xz",
|
|
||||||
.hash = "1220a4643445f5e67daffe6473c8e68267682aa92e4d612355b7ac6d46be41d8511e",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_android = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-x86_64-android-c-api.tar.xz",
|
|
||||||
.hash = "122082a6f5db4787a639d8fa587087d3452aa53a92137fef701dfd2be4d62a70102f",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_linux = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-x86_64-linux-c-api.tar.xz",
|
|
||||||
.hash = "12201e8daa6057abd4ce5d25d29a053f4be66a81b695f32f65a14f999bf075ddc0f2",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_macos = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-x86_64-macos-c-api.tar.xz",
|
|
||||||
.hash = "122063a6a6811cf6a3ae6838a61abb66ff4c348447c657a5ed2348c0d310efc2edbb",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_mingw = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-x86_64-mingw-c-api.zip",
|
|
||||||
.hash = "1220bdd5c3711af386ca07795c7ee8917f58365b0bb6b95255424aa86e08a7fcb4fa",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_musl = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-x86_64-musl-c-api.tar.xz",
|
|
||||||
.hash = "12200037419e1a5f8a529d42e0ec289919dc5baf06981bc98295e61df4976563566d",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_windows = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v26.0.1/wasmtime-v26.0.1-x86_64-windows-c-api.zip",
|
|
||||||
.hash = "122069341103b7d16b1f47c3bb96101614af0845ba63a0664e5cc857e9feb369a772",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
use std::{env, path::PathBuf, process::Command};
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
if let Some(git_sha) = read_git_sha() {
|
|
||||||
println!("cargo:rustc-env=BUILD_SHA={git_sha}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is copied from the build.rs in parent directory. This should be updated if the
|
|
||||||
// parent build.rs gets fixes.
|
|
||||||
fn read_git_sha() -> Option<String> {
|
|
||||||
let crate_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
|
||||||
|
|
||||||
if !crate_path
|
|
||||||
.parent()?
|
|
||||||
.parent()
|
|
||||||
.is_some_and(|p| p.join(".git").exists())
|
|
||||||
{
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Command::new("git")
|
|
||||||
.args(["rev-parse", "HEAD"])
|
|
||||||
.current_dir(crate_path)
|
|
||||||
.output()
|
|
||||||
.map_or(None, |output| {
|
|
||||||
if !output.status.success() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Some(String::from_utf8_lossy(&output.stdout).to_string())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
|
|
||||||
|
|
@ -1,251 +0,0 @@
|
||||||
use std::{
|
|
||||||
env, fs,
|
|
||||||
io::Write,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
process::{Command, Stdio},
|
|
||||||
};
|
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
|
||||||
use build_tables::build_tables;
|
|
||||||
use grammars::InputGrammar;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use parse_grammar::parse_grammar;
|
|
||||||
use prepare_grammar::prepare_grammar;
|
|
||||||
use regex::{Regex, RegexBuilder};
|
|
||||||
use render::render_c_code;
|
|
||||||
use semver::Version;
|
|
||||||
|
|
||||||
mod build_tables;
|
|
||||||
mod dedup;
|
|
||||||
mod grammar_files;
|
|
||||||
mod grammars;
|
|
||||||
mod nfa;
|
|
||||||
mod node_types;
|
|
||||||
pub mod parse_grammar;
|
|
||||||
mod prepare_grammar;
|
|
||||||
mod render;
|
|
||||||
mod rules;
|
|
||||||
mod tables;
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref JSON_COMMENT_REGEX: Regex = RegexBuilder::new("^\\s*//.*")
|
|
||||||
.multi_line(true)
|
|
||||||
.build()
|
|
||||||
.unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
struct GeneratedParser {
|
|
||||||
c_code: String,
|
|
||||||
node_types_json: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const ALLOC_HEADER: &str = include_str!("templates/alloc.h");
|
|
||||||
pub const ARRAY_HEADER: &str = include_str!("templates/array.h");
|
|
||||||
|
|
||||||
pub fn generate_parser_in_directory(
|
|
||||||
repo_path: &Path,
|
|
||||||
out_path: Option<&str>,
|
|
||||||
grammar_path: Option<&str>,
|
|
||||||
abi_version: usize,
|
|
||||||
report_symbol_name: Option<&str>,
|
|
||||||
js_runtime: Option<&str>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let mut repo_path = repo_path.to_owned();
|
|
||||||
let mut grammar_path = grammar_path;
|
|
||||||
|
|
||||||
// Populate a new empty grammar directory.
|
|
||||||
if let Some(path) = grammar_path {
|
|
||||||
let path = PathBuf::from(path);
|
|
||||||
if !path
|
|
||||||
.try_exists()
|
|
||||||
.with_context(|| "Some error with specified path")?
|
|
||||||
{
|
|
||||||
fs::create_dir_all(&path)?;
|
|
||||||
grammar_path = None;
|
|
||||||
repo_path = path;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let grammar_path = grammar_path.map_or_else(|| repo_path.join("grammar.js"), PathBuf::from);
|
|
||||||
|
|
||||||
// Read the grammar file.
|
|
||||||
let grammar_json = load_grammar_file(&grammar_path, js_runtime)?;
|
|
||||||
|
|
||||||
let src_path = out_path.map_or_else(|| repo_path.join("src"), PathBuf::from);
|
|
||||||
let header_path = src_path.join("tree_sitter");
|
|
||||||
|
|
||||||
// Ensure that the output directories exist.
|
|
||||||
fs::create_dir_all(&src_path)?;
|
|
||||||
fs::create_dir_all(&header_path)?;
|
|
||||||
|
|
||||||
if grammar_path.file_name().unwrap() != "grammar.json" {
|
|
||||||
fs::write(src_path.join("grammar.json"), &grammar_json)
|
|
||||||
.with_context(|| format!("Failed to write grammar.json to {src_path:?}"))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse and preprocess the grammar.
|
|
||||||
let input_grammar = parse_grammar(&grammar_json)?;
|
|
||||||
|
|
||||||
// Generate the parser and related files.
|
|
||||||
let GeneratedParser {
|
|
||||||
c_code,
|
|
||||||
node_types_json,
|
|
||||||
} = generate_parser_for_grammar_with_opts(&input_grammar, abi_version, report_symbol_name)?;
|
|
||||||
|
|
||||||
write_file(&src_path.join("parser.c"), c_code)?;
|
|
||||||
write_file(&src_path.join("node-types.json"), node_types_json)?;
|
|
||||||
write_file(&header_path.join("alloc.h"), ALLOC_HEADER)?;
|
|
||||||
write_file(&header_path.join("array.h"), ARRAY_HEADER)?;
|
|
||||||
write_file(&header_path.join("parser.h"), tree_sitter::PARSER_HEADER)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn generate_parser_for_grammar(grammar_json: &str) -> Result<(String, String)> {
|
|
||||||
let grammar_json = JSON_COMMENT_REGEX.replace_all(grammar_json, "\n");
|
|
||||||
let input_grammar = parse_grammar(&grammar_json)?;
|
|
||||||
let parser =
|
|
||||||
generate_parser_for_grammar_with_opts(&input_grammar, tree_sitter::LANGUAGE_VERSION, None)?;
|
|
||||||
Ok((input_grammar.name, parser.c_code))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn generate_parser_for_grammar_with_opts(
|
|
||||||
input_grammar: &InputGrammar,
|
|
||||||
abi_version: usize,
|
|
||||||
report_symbol_name: Option<&str>,
|
|
||||||
) -> Result<GeneratedParser> {
|
|
||||||
let (syntax_grammar, lexical_grammar, inlines, simple_aliases) =
|
|
||||||
prepare_grammar(input_grammar)?;
|
|
||||||
let variable_info =
|
|
||||||
node_types::get_variable_info(&syntax_grammar, &lexical_grammar, &simple_aliases)?;
|
|
||||||
let node_types_json = node_types::generate_node_types_json(
|
|
||||||
&syntax_grammar,
|
|
||||||
&lexical_grammar,
|
|
||||||
&simple_aliases,
|
|
||||||
&variable_info,
|
|
||||||
);
|
|
||||||
let tables = build_tables(
|
|
||||||
&syntax_grammar,
|
|
||||||
&lexical_grammar,
|
|
||||||
&simple_aliases,
|
|
||||||
&variable_info,
|
|
||||||
&inlines,
|
|
||||||
report_symbol_name,
|
|
||||||
)?;
|
|
||||||
let c_code = render_c_code(
|
|
||||||
&input_grammar.name,
|
|
||||||
tables,
|
|
||||||
syntax_grammar,
|
|
||||||
lexical_grammar,
|
|
||||||
simple_aliases,
|
|
||||||
abi_version,
|
|
||||||
);
|
|
||||||
Ok(GeneratedParser {
|
|
||||||
c_code,
|
|
||||||
node_types_json: serde_json::to_string_pretty(&node_types_json).unwrap(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn load_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> Result<String> {
|
|
||||||
if grammar_path.is_dir() {
|
|
||||||
return Err(anyhow!(
|
|
||||||
"Path to a grammar file with `.js` or `.json` extension is required"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
match grammar_path.extension().and_then(|e| e.to_str()) {
|
|
||||||
Some("js") => Ok(load_js_grammar_file(grammar_path, js_runtime)
|
|
||||||
.with_context(|| "Failed to load grammar.js")?),
|
|
||||||
Some("json") => {
|
|
||||||
Ok(fs::read_to_string(grammar_path).with_context(|| "Failed to load grammar.json")?)
|
|
||||||
}
|
|
||||||
_ => Err(anyhow!("Unknown grammar file extension: {grammar_path:?}",)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> Result<String> {
|
|
||||||
let grammar_path = fs::canonicalize(grammar_path)?;
|
|
||||||
|
|
||||||
#[cfg(windows)]
|
|
||||||
let grammar_path = url::Url::from_file_path(grammar_path)
|
|
||||||
.expect("Failed to convert path to URL")
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let js_runtime = js_runtime.unwrap_or("node");
|
|
||||||
|
|
||||||
let mut js_command = Command::new(js_runtime);
|
|
||||||
match js_runtime {
|
|
||||||
"node" => {
|
|
||||||
js_command.args(["--input-type=module", "-"]);
|
|
||||||
}
|
|
||||||
"bun" => {
|
|
||||||
js_command.arg("-");
|
|
||||||
}
|
|
||||||
"deno" => {
|
|
||||||
js_command.args(["run", "--allow-all", "-"]);
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut js_process = js_command
|
|
||||||
.env("TREE_SITTER_GRAMMAR_PATH", grammar_path)
|
|
||||||
.stdin(Stdio::piped())
|
|
||||||
.stdout(Stdio::piped())
|
|
||||||
.spawn()
|
|
||||||
.with_context(|| format!("Failed to run `{js_runtime}`"))?;
|
|
||||||
|
|
||||||
let mut js_stdin = js_process
|
|
||||||
.stdin
|
|
||||||
.take()
|
|
||||||
.with_context(|| format!("Failed to open stdin for {js_runtime}"))?;
|
|
||||||
let cli_version = Version::parse(env!("CARGO_PKG_VERSION"))
|
|
||||||
.with_context(|| "Could not parse this package's version as semver.")?;
|
|
||||||
write!(
|
|
||||||
js_stdin,
|
|
||||||
"globalThis.TREE_SITTER_CLI_VERSION_MAJOR = {};
|
|
||||||
globalThis.TREE_SITTER_CLI_VERSION_MINOR = {};
|
|
||||||
globalThis.TREE_SITTER_CLI_VERSION_PATCH = {};",
|
|
||||||
cli_version.major, cli_version.minor, cli_version.patch,
|
|
||||||
)
|
|
||||||
.with_context(|| format!("Failed to write tree-sitter version to {js_runtime}'s stdin"))?;
|
|
||||||
js_stdin
|
|
||||||
.write(include_bytes!("./dsl.js"))
|
|
||||||
.with_context(|| format!("Failed to write grammar dsl to {js_runtime}'s stdin"))?;
|
|
||||||
drop(js_stdin);
|
|
||||||
|
|
||||||
let output = js_process
|
|
||||||
.wait_with_output()
|
|
||||||
.with_context(|| format!("Failed to read output from {js_runtime}"))?;
|
|
||||||
match output.status.code() {
|
|
||||||
None => panic!("{js_runtime} process was killed"),
|
|
||||||
Some(0) => {
|
|
||||||
let stdout = String::from_utf8(output.stdout)
|
|
||||||
.with_context(|| format!("Got invalid UTF8 from {js_runtime}"))?;
|
|
||||||
|
|
||||||
let mut grammar_json = &stdout[..];
|
|
||||||
|
|
||||||
if let Some(pos) = stdout.rfind('\n') {
|
|
||||||
// If there's a newline, split the last line from the rest of the output
|
|
||||||
let node_output = &stdout[..pos];
|
|
||||||
grammar_json = &stdout[pos + 1..];
|
|
||||||
|
|
||||||
let mut stdout = std::io::stdout().lock();
|
|
||||||
stdout.write_all(node_output.as_bytes())?;
|
|
||||||
stdout.write_all(b"\n")?;
|
|
||||||
stdout.flush()?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(serde_json::to_string_pretty(
|
|
||||||
&serde_json::from_str::<serde_json::Value>(grammar_json)
|
|
||||||
.with_context(|| "Failed to parse grammar JSON")?,
|
|
||||||
)
|
|
||||||
.with_context(|| "Failed to serialize grammar JSON")?
|
|
||||||
+ "\n")
|
|
||||||
}
|
|
||||||
Some(code) => Err(anyhow!("{js_runtime} process exited with status {code}")),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write_file(path: &Path, body: impl AsRef<[u8]>) -> Result<()> {
|
|
||||||
fs::write(path, body)
|
|
||||||
.with_context(|| format!("Failed to write {:?}", path.file_name().unwrap()))
|
|
||||||
}
|
|
||||||
|
|
@ -1,343 +0,0 @@
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
use anyhow::{anyhow, Result};
|
|
||||||
use serde::Deserialize;
|
|
||||||
use serde_json::{Map, Value};
|
|
||||||
|
|
||||||
use super::{
|
|
||||||
grammars::{InputGrammar, PrecedenceEntry, Variable, VariableType},
|
|
||||||
rules::{Precedence, Rule},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
#[serde(tag = "type")]
|
|
||||||
#[allow(non_camel_case_types)]
|
|
||||||
#[allow(clippy::upper_case_acronyms)]
|
|
||||||
enum RuleJSON {
|
|
||||||
ALIAS {
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
named: bool,
|
|
||||||
value: String,
|
|
||||||
},
|
|
||||||
BLANK,
|
|
||||||
STRING {
|
|
||||||
value: String,
|
|
||||||
},
|
|
||||||
PATTERN {
|
|
||||||
value: String,
|
|
||||||
flags: Option<String>,
|
|
||||||
},
|
|
||||||
SYMBOL {
|
|
||||||
name: String,
|
|
||||||
},
|
|
||||||
CHOICE {
|
|
||||||
members: Vec<RuleJSON>,
|
|
||||||
},
|
|
||||||
FIELD {
|
|
||||||
name: String,
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
},
|
|
||||||
SEQ {
|
|
||||||
members: Vec<RuleJSON>,
|
|
||||||
},
|
|
||||||
REPEAT {
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
},
|
|
||||||
REPEAT1 {
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
},
|
|
||||||
PREC_DYNAMIC {
|
|
||||||
value: i32,
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
},
|
|
||||||
PREC_LEFT {
|
|
||||||
value: PrecedenceValueJSON,
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
},
|
|
||||||
PREC_RIGHT {
|
|
||||||
value: PrecedenceValueJSON,
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
},
|
|
||||||
PREC {
|
|
||||||
value: PrecedenceValueJSON,
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
},
|
|
||||||
TOKEN {
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
},
|
|
||||||
IMMEDIATE_TOKEN {
|
|
||||||
content: Box<RuleJSON>,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
#[serde(untagged)]
|
|
||||||
enum PrecedenceValueJSON {
|
|
||||||
Integer(i32),
|
|
||||||
Name(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
|
||||||
pub struct GrammarJSON {
|
|
||||||
pub name: String,
|
|
||||||
rules: Map<String, Value>,
|
|
||||||
#[serde(default)]
|
|
||||||
precedences: Vec<Vec<RuleJSON>>,
|
|
||||||
#[serde(default)]
|
|
||||||
conflicts: Vec<Vec<String>>,
|
|
||||||
#[serde(default)]
|
|
||||||
externals: Vec<RuleJSON>,
|
|
||||||
#[serde(default)]
|
|
||||||
extras: Vec<RuleJSON>,
|
|
||||||
#[serde(default)]
|
|
||||||
inline: Vec<String>,
|
|
||||||
#[serde(default)]
|
|
||||||
supertypes: Vec<String>,
|
|
||||||
word: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
fn rule_is_referenced(rule: &Rule, target: &str) -> bool {
|
|
||||||
match rule {
|
|
||||||
Rule::NamedSymbol(name) => name == target,
|
|
||||||
Rule::Choice(rules) | Rule::Seq(rules) => {
|
|
||||||
rules.iter().any(|r| rule_is_referenced(r, target))
|
|
||||||
}
|
|
||||||
Rule::Metadata { rule, .. } => rule_is_referenced(rule, target),
|
|
||||||
Rule::Repeat(inner) => rule_is_referenced(inner, target),
|
|
||||||
Rule::Blank | Rule::String(_) | Rule::Pattern(_, _) | Rule::Symbol(_) => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn variable_is_used(
|
|
||||||
grammar_rules: &[(String, Rule)],
|
|
||||||
other_rules: (&[Rule], &[Rule]),
|
|
||||||
target_name: &str,
|
|
||||||
in_progress: &mut HashSet<String>,
|
|
||||||
) -> bool {
|
|
||||||
let root = &grammar_rules.first().unwrap().0;
|
|
||||||
if target_name == root {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if other_rules
|
|
||||||
.0
|
|
||||||
.iter()
|
|
||||||
.chain(other_rules.1.iter())
|
|
||||||
.any(|rule| rule_is_referenced(rule, target_name))
|
|
||||||
{
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
in_progress.insert(target_name.to_string());
|
|
||||||
let result = grammar_rules
|
|
||||||
.iter()
|
|
||||||
.filter(|(key, _)| *key != target_name)
|
|
||||||
.any(|(name, rule)| {
|
|
||||||
if !rule_is_referenced(rule, target_name) || in_progress.contains(name) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
variable_is_used(grammar_rules, other_rules, name, in_progress)
|
|
||||||
});
|
|
||||||
in_progress.remove(target_name);
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn parse_grammar(input: &str) -> Result<InputGrammar> {
|
|
||||||
let mut grammar_json = serde_json::from_str::<GrammarJSON>(input)?;
|
|
||||||
|
|
||||||
let mut extra_symbols =
|
|
||||||
grammar_json
|
|
||||||
.extras
|
|
||||||
.into_iter()
|
|
||||||
.try_fold(Vec::new(), |mut acc, item| {
|
|
||||||
let rule = parse_rule(item);
|
|
||||||
if let Rule::String(ref value) = rule {
|
|
||||||
if value.is_empty() {
|
|
||||||
return Err(anyhow!(
|
|
||||||
"Rules in the `extras` array must not contain empty strings"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
acc.push(rule);
|
|
||||||
Ok(acc)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut external_tokens = grammar_json
|
|
||||||
.externals
|
|
||||||
.into_iter()
|
|
||||||
.map(parse_rule)
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let mut precedence_orderings = Vec::with_capacity(grammar_json.precedences.len());
|
|
||||||
for list in grammar_json.precedences {
|
|
||||||
let mut ordering = Vec::with_capacity(list.len());
|
|
||||||
for entry in list {
|
|
||||||
ordering.push(match entry {
|
|
||||||
RuleJSON::STRING { value } => PrecedenceEntry::Name(value),
|
|
||||||
RuleJSON::SYMBOL { name } => PrecedenceEntry::Symbol(name),
|
|
||||||
_ => {
|
|
||||||
return Err(anyhow!(
|
|
||||||
"Invalid rule in precedences array. Only strings and symbols are allowed"
|
|
||||||
))
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
precedence_orderings.push(ordering);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut variables = Vec::with_capacity(grammar_json.rules.len());
|
|
||||||
|
|
||||||
let rules = grammar_json
|
|
||||||
.rules
|
|
||||||
.into_iter()
|
|
||||||
.map(|(n, r)| Ok((n, parse_rule(serde_json::from_value(r)?))))
|
|
||||||
.collect::<Result<Vec<_>>>()?;
|
|
||||||
|
|
||||||
let mut in_progress = HashSet::new();
|
|
||||||
|
|
||||||
for (name, rule) in &rules {
|
|
||||||
if !variable_is_used(
|
|
||||||
&rules,
|
|
||||||
(&extra_symbols, &external_tokens),
|
|
||||||
name,
|
|
||||||
&mut in_progress,
|
|
||||||
) && grammar_json.word.as_ref().is_some_and(|w| w != name)
|
|
||||||
{
|
|
||||||
grammar_json.conflicts.retain(|r| !r.contains(name));
|
|
||||||
grammar_json.supertypes.retain(|r| r != name);
|
|
||||||
grammar_json.inline.retain(|r| r != name);
|
|
||||||
extra_symbols.retain(|r| !rule_is_referenced(r, name));
|
|
||||||
external_tokens.retain(|r| !rule_is_referenced(r, name));
|
|
||||||
precedence_orderings.retain(|r| {
|
|
||||||
!r.iter().any(|e| {
|
|
||||||
let PrecedenceEntry::Symbol(s) = e else {
|
|
||||||
return false;
|
|
||||||
};
|
|
||||||
s == name
|
|
||||||
})
|
|
||||||
});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
variables.push(Variable {
|
|
||||||
name: name.clone(),
|
|
||||||
kind: VariableType::Named,
|
|
||||||
rule: rule.clone(),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(InputGrammar {
|
|
||||||
name: grammar_json.name,
|
|
||||||
word_token: grammar_json.word,
|
|
||||||
expected_conflicts: grammar_json.conflicts,
|
|
||||||
supertype_symbols: grammar_json.supertypes,
|
|
||||||
variables_to_inline: grammar_json.inline,
|
|
||||||
precedence_orderings,
|
|
||||||
variables,
|
|
||||||
extra_symbols,
|
|
||||||
external_tokens,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_rule(json: RuleJSON) -> Rule {
|
|
||||||
match json {
|
|
||||||
RuleJSON::ALIAS {
|
|
||||||
content,
|
|
||||||
value,
|
|
||||||
named,
|
|
||||||
} => Rule::alias(parse_rule(*content), value, named),
|
|
||||||
RuleJSON::BLANK => Rule::Blank,
|
|
||||||
RuleJSON::STRING { value } => Rule::String(value),
|
|
||||||
RuleJSON::PATTERN { value, flags } => Rule::Pattern(
|
|
||||||
value,
|
|
||||||
flags.map_or(String::new(), |f| {
|
|
||||||
f.matches(|c| {
|
|
||||||
if c == 'i' {
|
|
||||||
true
|
|
||||||
} else {
|
|
||||||
// silently ignore unicode flags
|
|
||||||
if c != 'u' && c != 'v' {
|
|
||||||
eprintln!("Warning: unsupported flag {c}");
|
|
||||||
}
|
|
||||||
false
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
RuleJSON::SYMBOL { name } => Rule::NamedSymbol(name),
|
|
||||||
RuleJSON::CHOICE { members } => Rule::choice(members.into_iter().map(parse_rule).collect()),
|
|
||||||
RuleJSON::FIELD { content, name } => Rule::field(name, parse_rule(*content)),
|
|
||||||
RuleJSON::SEQ { members } => Rule::seq(members.into_iter().map(parse_rule).collect()),
|
|
||||||
RuleJSON::REPEAT1 { content } => Rule::repeat(parse_rule(*content)),
|
|
||||||
RuleJSON::REPEAT { content } => {
|
|
||||||
Rule::choice(vec![Rule::repeat(parse_rule(*content)), Rule::Blank])
|
|
||||||
}
|
|
||||||
RuleJSON::PREC { value, content } => Rule::prec(value.into(), parse_rule(*content)),
|
|
||||||
RuleJSON::PREC_LEFT { value, content } => {
|
|
||||||
Rule::prec_left(value.into(), parse_rule(*content))
|
|
||||||
}
|
|
||||||
RuleJSON::PREC_RIGHT { value, content } => {
|
|
||||||
Rule::prec_right(value.into(), parse_rule(*content))
|
|
||||||
}
|
|
||||||
RuleJSON::PREC_DYNAMIC { value, content } => {
|
|
||||||
Rule::prec_dynamic(value, parse_rule(*content))
|
|
||||||
}
|
|
||||||
RuleJSON::TOKEN { content } => Rule::token(parse_rule(*content)),
|
|
||||||
RuleJSON::IMMEDIATE_TOKEN { content } => Rule::immediate_token(parse_rule(*content)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PrecedenceValueJSON> for Precedence {
|
|
||||||
fn from(val: PrecedenceValueJSON) -> Self {
|
|
||||||
match val {
|
|
||||||
PrecedenceValueJSON::Integer(i) => Self::Integer(i),
|
|
||||||
PrecedenceValueJSON::Name(i) => Self::Name(i),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_grammar() {
|
|
||||||
let grammar = parse_grammar(
|
|
||||||
r#"{
|
|
||||||
"name": "my_lang",
|
|
||||||
"rules": {
|
|
||||||
"file": {
|
|
||||||
"type": "REPEAT1",
|
|
||||||
"content": {
|
|
||||||
"type": "SYMBOL",
|
|
||||||
"name": "statement"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"statement": {
|
|
||||||
"type": "STRING",
|
|
||||||
"value": "foo"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}"#,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(grammar.name, "my_lang");
|
|
||||||
assert_eq!(
|
|
||||||
grammar.variables,
|
|
||||||
vec![
|
|
||||||
Variable {
|
|
||||||
name: "file".to_string(),
|
|
||||||
kind: VariableType::Named,
|
|
||||||
rule: Rule::repeat(Rule::NamedSymbol("statement".to_string()))
|
|
||||||
},
|
|
||||||
Variable {
|
|
||||||
name: "statement".to_string(),
|
|
||||||
kind: VariableType::Named,
|
|
||||||
rule: Rule::String("foo".to_string())
|
|
||||||
},
|
|
||||||
]
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
3.1.64
|
|
||||||
992
cli/src/init.rs
992
cli/src/init.rs
|
|
@ -1,992 +0,0 @@
|
||||||
use std::{
|
|
||||||
fs,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
str::{self, FromStr},
|
|
||||||
};
|
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
|
||||||
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
|
|
||||||
use regex::Regex;
|
|
||||||
use semver::Version;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use serde_json::{Map, Value};
|
|
||||||
use tree_sitter_generate::write_file;
|
|
||||||
use tree_sitter_loader::{
|
|
||||||
Author, Bindings, Grammar, Links, Metadata, PackageJSON, PackageJSONAuthor,
|
|
||||||
PackageJSONRepository, PathsJSON, TreeSitterJSON,
|
|
||||||
};
|
|
||||||
use url::Url;
|
|
||||||
|
|
||||||
const CLI_VERSION: &str = env!("CARGO_PKG_VERSION");
|
|
||||||
const CLI_VERSION_PLACEHOLDER: &str = "CLI_VERSION";
|
|
||||||
|
|
||||||
const ABI_VERSION_MAX: usize = tree_sitter::LANGUAGE_VERSION;
|
|
||||||
const ABI_VERSION_MAX_PLACEHOLDER: &str = "ABI_VERSION_MAX";
|
|
||||||
|
|
||||||
const PARSER_NAME_PLACEHOLDER: &str = "PARSER_NAME";
|
|
||||||
const CAMEL_PARSER_NAME_PLACEHOLDER: &str = "CAMEL_PARSER_NAME";
|
|
||||||
const UPPER_PARSER_NAME_PLACEHOLDER: &str = "UPPER_PARSER_NAME";
|
|
||||||
const LOWER_PARSER_NAME_PLACEHOLDER: &str = "LOWER_PARSER_NAME";
|
|
||||||
|
|
||||||
const PARSER_DESCRIPTION_PLACEHOLDER: &str = "PARSER_DESCRIPTION";
|
|
||||||
const PARSER_LICENSE_PLACEHOLDER: &str = "PARSER_LICENSE";
|
|
||||||
const PARSER_URL_PLACEHOLDER: &str = "PARSER_URL";
|
|
||||||
const PARSER_URL_STRIPPED_PLACEHOLDER: &str = "PARSER_URL_STRIPPED";
|
|
||||||
const PARSER_VERSION_PLACEHOLDER: &str = "PARSER_VERSION";
|
|
||||||
|
|
||||||
const AUTHOR_NAME_PLACEHOLDER: &str = "PARSER_AUTHOR_NAME";
|
|
||||||
const AUTHOR_EMAIL_PLACEHOLDER: &str = "PARSER_AUTHOR_EMAIL";
|
|
||||||
const AUTHOR_URL_PLACEHOLDER: &str = "PARSER_AUTHOR_URL";
|
|
||||||
|
|
||||||
const AUTHOR_BLOCK_JS: &str = "\n \"author\": {";
|
|
||||||
const AUTHOR_NAME_PLACEHOLDER_JS: &str = "\n \"name\": \"PARSER_AUTHOR_NAME\",";
|
|
||||||
const AUTHOR_EMAIL_PLACEHOLDER_JS: &str = ",\n \"email\": \"PARSER_AUTHOR_EMAIL\"";
|
|
||||||
const AUTHOR_URL_PLACEHOLDER_JS: &str = ",\n \"url\": \"PARSER_AUTHOR_URL\"";
|
|
||||||
|
|
||||||
const AUTHOR_BLOCK_PY: &str = "\nauthors = [{";
|
|
||||||
const AUTHOR_NAME_PLACEHOLDER_PY: &str = "name = \"PARSER_AUTHOR_NAME\"";
|
|
||||||
const AUTHOR_EMAIL_PLACEHOLDER_PY: &str = ", email = \"PARSER_AUTHOR_EMAIL\"";
|
|
||||||
|
|
||||||
const AUTHOR_BLOCK_RS: &str = "\nauthors = [";
|
|
||||||
const AUTHOR_NAME_PLACEHOLDER_RS: &str = "PARSER_AUTHOR_NAME";
|
|
||||||
const AUTHOR_EMAIL_PLACEHOLDER_RS: &str = " PARSER_AUTHOR_EMAIL";
|
|
||||||
|
|
||||||
const AUTHOR_BLOCK_GRAMMAR: &str = "\n * @author ";
|
|
||||||
const AUTHOR_NAME_PLACEHOLDER_GRAMMAR: &str = "PARSER_AUTHOR_NAME";
|
|
||||||
const AUTHOR_EMAIL_PLACEHOLDER_GRAMMAR: &str = " PARSER_AUTHOR_EMAIL";
|
|
||||||
|
|
||||||
const GRAMMAR_JS_TEMPLATE: &str = include_str!("./templates/grammar.js");
|
|
||||||
const PACKAGE_JSON_TEMPLATE: &str = include_str!("./templates/package.json");
|
|
||||||
const GITIGNORE_TEMPLATE: &str = include_str!("./templates/gitignore");
|
|
||||||
const GITATTRIBUTES_TEMPLATE: &str = include_str!("./templates/gitattributes");
|
|
||||||
const EDITORCONFIG_TEMPLATE: &str = include_str!("./templates/.editorconfig");
|
|
||||||
|
|
||||||
const RUST_BINDING_VERSION: &str = env!("CARGO_PKG_VERSION");
|
|
||||||
const RUST_BINDING_VERSION_PLACEHOLDER: &str = "RUST_BINDING_VERSION";
|
|
||||||
|
|
||||||
const LIB_RS_TEMPLATE: &str = include_str!("./templates/lib.rs");
|
|
||||||
const BUILD_RS_TEMPLATE: &str = include_str!("./templates/build.rs");
|
|
||||||
const CARGO_TOML_TEMPLATE: &str = include_str!("./templates/_cargo.toml");
|
|
||||||
|
|
||||||
const INDEX_JS_TEMPLATE: &str = include_str!("./templates/index.js");
|
|
||||||
const INDEX_D_TS_TEMPLATE: &str = include_str!("./templates/index.d.ts");
|
|
||||||
const JS_BINDING_CC_TEMPLATE: &str = include_str!("./templates/js-binding.cc");
|
|
||||||
const BINDING_GYP_TEMPLATE: &str = include_str!("./templates/binding.gyp");
|
|
||||||
const BINDING_TEST_JS_TEMPLATE: &str = include_str!("./templates/binding_test.js");
|
|
||||||
|
|
||||||
const MAKEFILE_TEMPLATE: &str = include_str!("./templates/makefile");
|
|
||||||
const CMAKELISTS_TXT_TEMPLATE: &str = include_str!("./templates/cmakelists.cmake");
|
|
||||||
const PARSER_NAME_H_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.h");
|
|
||||||
const PARSER_NAME_PC_IN_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.pc.in");
|
|
||||||
|
|
||||||
const GO_MOD_TEMPLATE: &str = include_str!("./templates/go.mod");
|
|
||||||
const BINDING_GO_TEMPLATE: &str = include_str!("./templates/binding.go");
|
|
||||||
const BINDING_TEST_GO_TEMPLATE: &str = include_str!("./templates/binding_test.go");
|
|
||||||
|
|
||||||
const SETUP_PY_TEMPLATE: &str = include_str!("./templates/setup.py");
|
|
||||||
const INIT_PY_TEMPLATE: &str = include_str!("./templates/__init__.py");
|
|
||||||
const INIT_PYI_TEMPLATE: &str = include_str!("./templates/__init__.pyi");
|
|
||||||
const PYPROJECT_TOML_TEMPLATE: &str = include_str!("./templates/pyproject.toml");
|
|
||||||
const PY_BINDING_C_TEMPLATE: &str = include_str!("./templates/py-binding.c");
|
|
||||||
const TEST_BINDING_PY_TEMPLATE: &str = include_str!("./templates/test_binding.py");
|
|
||||||
|
|
||||||
const PACKAGE_SWIFT_TEMPLATE: &str = include_str!("./templates/package.swift");
|
|
||||||
const TESTS_SWIFT_TEMPLATE: &str = include_str!("./templates/tests.swift");
|
|
||||||
|
|
||||||
const TREE_SITTER_JSON_SCHEMA: &str =
|
|
||||||
"https://tree-sitter.github.io/tree-sitter/assets/schemas/config.schema.json";
|
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn path_in_ignore(repo_path: &Path) -> bool {
|
|
||||||
[
|
|
||||||
"bindings",
|
|
||||||
"build",
|
|
||||||
"examples",
|
|
||||||
"node_modules",
|
|
||||||
"queries",
|
|
||||||
"script",
|
|
||||||
"src",
|
|
||||||
"target",
|
|
||||||
"test",
|
|
||||||
"types",
|
|
||||||
]
|
|
||||||
.iter()
|
|
||||||
.any(|dir| repo_path.ends_with(dir))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone)]
|
|
||||||
pub struct JsonConfigOpts {
|
|
||||||
pub name: String,
|
|
||||||
pub camelcase: String,
|
|
||||||
pub description: String,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub repository: Option<Url>,
|
|
||||||
pub scope: String,
|
|
||||||
pub file_types: Vec<String>,
|
|
||||||
pub version: Version,
|
|
||||||
pub license: String,
|
|
||||||
pub author: String,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub email: Option<String>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub url: Option<Url>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl JsonConfigOpts {
|
|
||||||
#[must_use]
|
|
||||||
pub fn to_tree_sitter_json(self) -> TreeSitterJSON {
|
|
||||||
TreeSitterJSON {
|
|
||||||
schema: Some(TREE_SITTER_JSON_SCHEMA.to_string()),
|
|
||||||
grammars: vec![Grammar {
|
|
||||||
name: self.name.clone(),
|
|
||||||
camelcase: Some(self.camelcase),
|
|
||||||
scope: self.scope,
|
|
||||||
path: None,
|
|
||||||
external_files: PathsJSON::Empty,
|
|
||||||
file_types: Some(self.file_types),
|
|
||||||
highlights: PathsJSON::Empty,
|
|
||||||
injections: PathsJSON::Empty,
|
|
||||||
locals: PathsJSON::Empty,
|
|
||||||
tags: PathsJSON::Empty,
|
|
||||||
injection_regex: Some(format!("^{}$", self.name)),
|
|
||||||
first_line_regex: None,
|
|
||||||
content_regex: None,
|
|
||||||
}],
|
|
||||||
metadata: Metadata {
|
|
||||||
version: self.version,
|
|
||||||
license: Some(self.license),
|
|
||||||
description: Some(self.description),
|
|
||||||
authors: Some(vec![Author {
|
|
||||||
name: self.author,
|
|
||||||
email: self.email,
|
|
||||||
url: self.url.map(|url| url.to_string()),
|
|
||||||
}]),
|
|
||||||
links: Some(Links {
|
|
||||||
repository: self.repository.unwrap_or_else(|| {
|
|
||||||
Url::parse(&format!(
|
|
||||||
"https://github.com/tree-sitter/tree-sitter-{}",
|
|
||||||
self.name
|
|
||||||
))
|
|
||||||
.expect("Failed to parse default repository URL")
|
|
||||||
}),
|
|
||||||
homepage: None,
|
|
||||||
}),
|
|
||||||
namespace: None,
|
|
||||||
},
|
|
||||||
bindings: Bindings::default(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for JsonConfigOpts {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
name: String::new(),
|
|
||||||
camelcase: String::new(),
|
|
||||||
description: String::new(),
|
|
||||||
repository: None,
|
|
||||||
scope: String::new(),
|
|
||||||
file_types: vec![],
|
|
||||||
version: Version::from_str("0.1.0").unwrap(),
|
|
||||||
license: String::new(),
|
|
||||||
author: String::new(),
|
|
||||||
email: None,
|
|
||||||
url: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
struct GenerateOpts<'a> {
|
|
||||||
author_name: Option<&'a str>,
|
|
||||||
author_email: Option<&'a str>,
|
|
||||||
author_url: Option<&'a str>,
|
|
||||||
license: Option<&'a str>,
|
|
||||||
description: Option<&'a str>,
|
|
||||||
repository: Option<&'a str>,
|
|
||||||
version: &'a Version,
|
|
||||||
camel_parser_name: &'a str,
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: remove in 0.25
|
|
||||||
// A return value of true means migration was successful, and false if not.
|
|
||||||
pub fn migrate_package_json(repo_path: &Path) -> Result<bool> {
|
|
||||||
let root_path =
|
|
||||||
get_root_path(&repo_path.join("package.json")).unwrap_or_else(|_| repo_path.to_path_buf());
|
|
||||||
let (package_json_path, tree_sitter_json_path) = (
|
|
||||||
root_path.join("package.json"),
|
|
||||||
root_path.join("tree-sitter.json"),
|
|
||||||
);
|
|
||||||
|
|
||||||
let old_config = serde_json::from_str::<PackageJSON>(
|
|
||||||
&fs::read_to_string(&package_json_path)
|
|
||||||
.with_context(|| format!("Failed to read package.json in {}", root_path.display()))?,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
if old_config.tree_sitter.is_none() {
|
|
||||||
eprintln!("Failed to find `tree-sitter` section in package.json, unable to migrate");
|
|
||||||
return Ok(false);
|
|
||||||
}
|
|
||||||
|
|
||||||
let name = old_config.name.replace("tree-sitter-", "");
|
|
||||||
|
|
||||||
let new_config = TreeSitterJSON {
|
|
||||||
schema: Some(TREE_SITTER_JSON_SCHEMA.to_string()),
|
|
||||||
grammars: old_config
|
|
||||||
.tree_sitter
|
|
||||||
.unwrap()
|
|
||||||
.into_iter()
|
|
||||||
.map(|l| Grammar {
|
|
||||||
name: name.clone(),
|
|
||||||
camelcase: Some(name.to_upper_camel_case()),
|
|
||||||
scope: l.scope.unwrap_or_else(|| format!("source.{name}")),
|
|
||||||
path: Some(l.path),
|
|
||||||
external_files: l.external_files,
|
|
||||||
file_types: l.file_types,
|
|
||||||
highlights: l.highlights,
|
|
||||||
injections: l.injections,
|
|
||||||
locals: l.locals,
|
|
||||||
tags: l.tags,
|
|
||||||
injection_regex: l.injection_regex,
|
|
||||||
first_line_regex: l.first_line_regex,
|
|
||||||
content_regex: l.content_regex,
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
metadata: Metadata {
|
|
||||||
version: old_config.version,
|
|
||||||
license: old_config
|
|
||||||
.license
|
|
||||||
.map_or_else(|| Some("MIT".to_string()), Some),
|
|
||||||
description: old_config
|
|
||||||
.description
|
|
||||||
.map_or_else(|| Some(format!("{name} grammar for tree-sitter")), Some),
|
|
||||||
authors: {
|
|
||||||
let authors = old_config
|
|
||||||
.author
|
|
||||||
.map_or_else(|| vec![].into_iter(), |a| vec![a].into_iter())
|
|
||||||
.chain(old_config.maintainers.unwrap_or_default())
|
|
||||||
.filter_map(|a| match a {
|
|
||||||
PackageJSONAuthor::String(s) => {
|
|
||||||
let mut name = s.trim().to_string();
|
|
||||||
if name.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut email = None;
|
|
||||||
let mut url = None;
|
|
||||||
|
|
||||||
if let Some(url_start) = name.rfind('(') {
|
|
||||||
if let Some(url_end) = name.rfind(')') {
|
|
||||||
url = Some(name[url_start + 1..url_end].trim().to_string());
|
|
||||||
name = name[..url_start].trim().to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(email_start) = name.rfind('<') {
|
|
||||||
if let Some(email_end) = name.rfind('>') {
|
|
||||||
email =
|
|
||||||
Some(name[email_start + 1..email_end].trim().to_string());
|
|
||||||
name = name[..email_start].trim().to_string();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Some(Author { name, email, url })
|
|
||||||
}
|
|
||||||
PackageJSONAuthor::Object { name, email, url } => {
|
|
||||||
if name.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(Author { name, email, url })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
if authors.is_empty() {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(authors)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
links: Some(Links {
|
|
||||||
repository: old_config
|
|
||||||
.repository
|
|
||||||
.map(|r| match r {
|
|
||||||
PackageJSONRepository::String(s) => {
|
|
||||||
if let Some(stripped) = s.strip_prefix("github:") {
|
|
||||||
Url::parse(&format!("https://github.com/{stripped}"))
|
|
||||||
} else if Regex::new(r"^[\w.-]+/[\w.-]+$").unwrap().is_match(&s) {
|
|
||||||
Url::parse(&format!("https://github.com/{s}"))
|
|
||||||
} else if let Some(stripped) = s.strip_prefix("gitlab:") {
|
|
||||||
Url::parse(&format!("https://gitlab.com/{stripped}"))
|
|
||||||
} else if let Some(stripped) = s.strip_prefix("bitbucket:") {
|
|
||||||
Url::parse(&format!("https://bitbucket.org/{stripped}"))
|
|
||||||
} else {
|
|
||||||
Url::parse(&s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
PackageJSONRepository::Object { url, .. } => Url::parse(&url),
|
|
||||||
})
|
|
||||||
.transpose()?
|
|
||||||
.unwrap_or_else(|| {
|
|
||||||
Url::parse(&format!(
|
|
||||||
"https://github.com/tree-sitter/tree-sitter-{name}"
|
|
||||||
))
|
|
||||||
.expect("Failed to parse default repository URL")
|
|
||||||
}),
|
|
||||||
homepage: None,
|
|
||||||
}),
|
|
||||||
namespace: None,
|
|
||||||
},
|
|
||||||
bindings: Bindings::default(),
|
|
||||||
};
|
|
||||||
|
|
||||||
write_file(
|
|
||||||
&tree_sitter_json_path,
|
|
||||||
serde_json::to_string_pretty(&new_config)? + "\n",
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Remove the `tree-sitter` field in-place
|
|
||||||
let mut package_json = serde_json::from_str::<Map<String, Value>>(
|
|
||||||
&fs::read_to_string(&package_json_path)
|
|
||||||
.with_context(|| format!("Failed to read package.json in {}", root_path.display()))?,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
package_json.remove("tree-sitter");
|
|
||||||
write_file(
|
|
||||||
&root_path.join("package.json"),
|
|
||||||
serde_json::to_string_pretty(&package_json)? + "\n",
|
|
||||||
)?;
|
|
||||||
|
|
||||||
println!("Warning: your package.json's `tree-sitter` field has been automatically migrated to the new `tree-sitter.json` config file");
|
|
||||||
println!(
|
|
||||||
"For more information, visit https://tree-sitter.github.io/tree-sitter/creating-parsers"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(true)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn generate_grammar_files(
|
|
||||||
repo_path: &Path,
|
|
||||||
language_name: &str,
|
|
||||||
allow_update: bool,
|
|
||||||
opts: Option<&JsonConfigOpts>,
|
|
||||||
) -> Result<()> {
|
|
||||||
let dashed_language_name = language_name.to_kebab_case();
|
|
||||||
|
|
||||||
let tree_sitter_config = missing_path_else(
|
|
||||||
repo_path.join("tree-sitter.json"),
|
|
||||||
true,
|
|
||||||
|path| {
|
|
||||||
// invariant: opts is always Some when `tree-sitter.json` doesn't exist
|
|
||||||
let Some(opts) = opts else { unreachable!() };
|
|
||||||
|
|
||||||
let tree_sitter_json = opts.clone().to_tree_sitter_json();
|
|
||||||
write_file(path, serde_json::to_string_pretty(&tree_sitter_json)?)
|
|
||||||
},
|
|
||||||
|path| {
|
|
||||||
// updating the config, if needed
|
|
||||||
if let Some(opts) = opts {
|
|
||||||
let tree_sitter_json = opts.clone().to_tree_sitter_json();
|
|
||||||
write_file(path, serde_json::to_string_pretty(&tree_sitter_json)?)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let tree_sitter_config = serde_json::from_str::<TreeSitterJSON>(
|
|
||||||
&fs::read_to_string(tree_sitter_config.as_path())
|
|
||||||
.with_context(|| "Failed to read tree-sitter.json")?,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let authors = tree_sitter_config.metadata.authors.as_ref();
|
|
||||||
let camel_name = tree_sitter_config.grammars[0]
|
|
||||||
.camelcase
|
|
||||||
.clone()
|
|
||||||
.unwrap_or_else(|| language_name.to_upper_camel_case());
|
|
||||||
|
|
||||||
let generate_opts = GenerateOpts {
|
|
||||||
author_name: authors
|
|
||||||
.map(|a| a.first().map(|a| a.name.as_str()))
|
|
||||||
.unwrap_or_default(),
|
|
||||||
author_email: authors
|
|
||||||
.map(|a| a.first().and_then(|a| a.email.as_deref()))
|
|
||||||
.unwrap_or_default(),
|
|
||||||
author_url: authors
|
|
||||||
.map(|a| a.first().and_then(|a| a.url.as_deref()))
|
|
||||||
.unwrap_or_default(),
|
|
||||||
license: tree_sitter_config.metadata.license.as_deref(),
|
|
||||||
description: tree_sitter_config.metadata.description.as_deref(),
|
|
||||||
repository: tree_sitter_config
|
|
||||||
.metadata
|
|
||||||
.links
|
|
||||||
.as_ref()
|
|
||||||
.map(|l| l.repository.as_str()),
|
|
||||||
version: &tree_sitter_config.metadata.version,
|
|
||||||
camel_parser_name: &camel_name,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Create package.json
|
|
||||||
missing_path(repo_path.join("package.json"), |path| {
|
|
||||||
generate_file(
|
|
||||||
path,
|
|
||||||
PACKAGE_JSON_TEMPLATE,
|
|
||||||
dashed_language_name.as_str(),
|
|
||||||
&generate_opts,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Do not create a grammar.js file in a repo with multiple language configs
|
|
||||||
if !tree_sitter_config.has_multiple_language_configs() {
|
|
||||||
missing_path(repo_path.join("grammar.js"), |path| {
|
|
||||||
generate_file(path, GRAMMAR_JS_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Write .gitignore file
|
|
||||||
missing_path(repo_path.join(".gitignore"), |path| {
|
|
||||||
generate_file(path, GITIGNORE_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Write .gitattributes file
|
|
||||||
missing_path(repo_path.join(".gitattributes"), |path| {
|
|
||||||
generate_file(path, GITATTRIBUTES_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
// Write .editorconfig file
|
|
||||||
missing_path(repo_path.join(".editorconfig"), |path| {
|
|
||||||
generate_file(path, EDITORCONFIG_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let bindings_dir = repo_path.join("bindings");
|
|
||||||
|
|
||||||
// Generate Rust bindings
|
|
||||||
if tree_sitter_config.bindings.rust {
|
|
||||||
missing_path(bindings_dir.join("rust"), create_dir)?.apply(|path| {
|
|
||||||
missing_path(path.join("lib.rs"), |path| {
|
|
||||||
generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(path.join("build.rs"), |path| {
|
|
||||||
generate_file(path, BUILD_RS_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(repo_path.join("Cargo.toml"), |path| {
|
|
||||||
generate_file(
|
|
||||||
path,
|
|
||||||
CARGO_TOML_TEMPLATE,
|
|
||||||
dashed_language_name.as_str(),
|
|
||||||
&generate_opts,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate Node bindings
|
|
||||||
if tree_sitter_config.bindings.node {
|
|
||||||
missing_path(bindings_dir.join("node"), create_dir)?.apply(|path| {
|
|
||||||
missing_path_else(
|
|
||||||
path.join("index.js"),
|
|
||||||
allow_update,
|
|
||||||
|path| generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts),
|
|
||||||
|path| {
|
|
||||||
let contents = fs::read_to_string(path)?;
|
|
||||||
if !contents.contains("bun") {
|
|
||||||
generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
missing_path(path.join("index.d.ts"), |path| {
|
|
||||||
generate_file(path, INDEX_D_TS_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(path.join("binding_test.js"), |path| {
|
|
||||||
generate_file(
|
|
||||||
path,
|
|
||||||
BINDING_TEST_JS_TEMPLATE,
|
|
||||||
language_name,
|
|
||||||
&generate_opts,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(path.join("binding.cc"), |path| {
|
|
||||||
generate_file(path, JS_BINDING_CC_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(repo_path.join("binding.gyp"), |path| {
|
|
||||||
generate_file(path, BINDING_GYP_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate C bindings
|
|
||||||
if tree_sitter_config.bindings.c {
|
|
||||||
missing_path(bindings_dir.join("c"), create_dir)?.apply(|path| {
|
|
||||||
missing_path(
|
|
||||||
path.join(format!("tree-sitter-{language_name}.h")),
|
|
||||||
|path| generate_file(path, PARSER_NAME_H_TEMPLATE, language_name, &generate_opts),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
missing_path(
|
|
||||||
path.join(format!("tree-sitter-{language_name}.pc.in")),
|
|
||||||
|path| {
|
|
||||||
generate_file(
|
|
||||||
path,
|
|
||||||
PARSER_NAME_PC_IN_TEMPLATE,
|
|
||||||
language_name,
|
|
||||||
&generate_opts,
|
|
||||||
)
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
missing_path(repo_path.join("Makefile"), |path| {
|
|
||||||
generate_file(path, MAKEFILE_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path_else(
|
|
||||||
repo_path.join("CMakeLists.txt"),
|
|
||||||
allow_update,
|
|
||||||
|path| generate_file(path, CMAKELISTS_TXT_TEMPLATE, language_name, &generate_opts),
|
|
||||||
|path| {
|
|
||||||
let contents = fs::read_to_string(path)?;
|
|
||||||
let old = "add_custom_target(test";
|
|
||||||
if contents.contains(old) {
|
|
||||||
write_file(path, contents.replace(old, "add_custom_target(ts-test"))
|
|
||||||
} else {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate Go bindings
|
|
||||||
if tree_sitter_config.bindings.go {
|
|
||||||
missing_path(bindings_dir.join("go"), create_dir)?.apply(|path| {
|
|
||||||
missing_path(path.join("binding.go"), |path| {
|
|
||||||
generate_file(path, BINDING_GO_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(path.join("binding_test.go"), |path| {
|
|
||||||
generate_file(
|
|
||||||
path,
|
|
||||||
BINDING_TEST_GO_TEMPLATE,
|
|
||||||
language_name,
|
|
||||||
&generate_opts,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(repo_path.join("go.mod"), |path| {
|
|
||||||
generate_file(path, GO_MOD_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate Python bindings
|
|
||||||
if tree_sitter_config.bindings.python {
|
|
||||||
missing_path(bindings_dir.join("python"), create_dir)?.apply(|path| {
|
|
||||||
let lang_path = path.join(format!("tree_sitter_{}", language_name.to_snake_case()));
|
|
||||||
missing_path(&lang_path, create_dir)?;
|
|
||||||
|
|
||||||
missing_path(lang_path.join("binding.c"), |path| {
|
|
||||||
generate_file(path, PY_BINDING_C_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(lang_path.join("__init__.py"), |path| {
|
|
||||||
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(lang_path.join("__init__.pyi"), |path| {
|
|
||||||
generate_file(path, INIT_PYI_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(lang_path.join("py.typed"), |path| {
|
|
||||||
generate_file(path, "", language_name, &generate_opts) // py.typed is empty
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(path.join("tests"), create_dir)?.apply(|path| {
|
|
||||||
missing_path(path.join("test_binding.py"), |path| {
|
|
||||||
generate_file(
|
|
||||||
path,
|
|
||||||
TEST_BINDING_PY_TEMPLATE,
|
|
||||||
language_name,
|
|
||||||
&generate_opts,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(repo_path.join("setup.py"), |path| {
|
|
||||||
generate_file(path, SETUP_PY_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(repo_path.join("pyproject.toml"), |path| {
|
|
||||||
generate_file(
|
|
||||||
path,
|
|
||||||
PYPROJECT_TOML_TEMPLATE,
|
|
||||||
dashed_language_name.as_str(),
|
|
||||||
&generate_opts,
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate Swift bindings
|
|
||||||
if tree_sitter_config.bindings.swift {
|
|
||||||
missing_path(bindings_dir.join("swift"), create_dir)?.apply(|path| {
|
|
||||||
let lang_path = path.join(format!("TreeSitter{camel_name}",));
|
|
||||||
missing_path(&lang_path, create_dir)?;
|
|
||||||
|
|
||||||
missing_path(lang_path.join(format!("{language_name}.h")), |path| {
|
|
||||||
generate_file(path, PARSER_NAME_H_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(
|
|
||||||
path.join(format!("TreeSitter{camel_name}Tests",)),
|
|
||||||
create_dir,
|
|
||||||
)?
|
|
||||||
.apply(|path| {
|
|
||||||
missing_path(
|
|
||||||
path.join(format!("TreeSitter{camel_name}Tests.swift")),
|
|
||||||
|path| generate_file(path, TESTS_SWIFT_TEMPLATE, language_name, &generate_opts),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
missing_path(repo_path.join("Package.swift"), |path| {
|
|
||||||
generate_file(path, PACKAGE_SWIFT_TEMPLATE, language_name, &generate_opts)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_root_path(path: &Path) -> Result<PathBuf> {
|
|
||||||
let mut pathbuf = path.to_owned();
|
|
||||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
|
||||||
let is_package_json = filename == "package.json";
|
|
||||||
loop {
|
|
||||||
let json = pathbuf
|
|
||||||
.exists()
|
|
||||||
.then(|| {
|
|
||||||
let contents = fs::read_to_string(pathbuf.as_path())
|
|
||||||
.with_context(|| format!("Failed to read {filename}"))?;
|
|
||||||
if is_package_json {
|
|
||||||
serde_json::from_str::<Map<String, Value>>(&contents)
|
|
||||||
.context(format!("Failed to parse {filename}"))
|
|
||||||
.map(|v| v.contains_key("tree-sitter"))
|
|
||||||
} else {
|
|
||||||
serde_json::from_str::<TreeSitterJSON>(&contents)
|
|
||||||
.context(format!("Failed to parse {filename}"))
|
|
||||||
.map(|_| true)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.transpose()?;
|
|
||||||
if json == Some(true) {
|
|
||||||
return Ok(pathbuf.parent().unwrap().to_path_buf());
|
|
||||||
}
|
|
||||||
pathbuf.pop(); // filename
|
|
||||||
if !pathbuf.pop() {
|
|
||||||
return Err(anyhow!(format!(
|
|
||||||
concat!(
|
|
||||||
"Failed to locate a {} file,",
|
|
||||||
" please ensure you have one, and if you don't then consult the docs",
|
|
||||||
),
|
|
||||||
filename
|
|
||||||
)));
|
|
||||||
}
|
|
||||||
pathbuf.push(filename);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn generate_file(
|
|
||||||
path: &Path,
|
|
||||||
template: &str,
|
|
||||||
language_name: &str,
|
|
||||||
generate_opts: &GenerateOpts,
|
|
||||||
) -> Result<()> {
|
|
||||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
|
||||||
|
|
||||||
let mut replacement = template
|
|
||||||
.replace(
|
|
||||||
CAMEL_PARSER_NAME_PLACEHOLDER,
|
|
||||||
generate_opts.camel_parser_name,
|
|
||||||
)
|
|
||||||
.replace(
|
|
||||||
UPPER_PARSER_NAME_PLACEHOLDER,
|
|
||||||
&language_name.to_shouty_snake_case(),
|
|
||||||
)
|
|
||||||
.replace(
|
|
||||||
LOWER_PARSER_NAME_PLACEHOLDER,
|
|
||||||
&language_name.to_snake_case(),
|
|
||||||
)
|
|
||||||
.replace(PARSER_NAME_PLACEHOLDER, language_name)
|
|
||||||
.replace(CLI_VERSION_PLACEHOLDER, CLI_VERSION)
|
|
||||||
.replace(RUST_BINDING_VERSION_PLACEHOLDER, RUST_BINDING_VERSION)
|
|
||||||
.replace(ABI_VERSION_MAX_PLACEHOLDER, &ABI_VERSION_MAX.to_string())
|
|
||||||
.replace(
|
|
||||||
PARSER_VERSION_PLACEHOLDER,
|
|
||||||
&generate_opts.version.to_string(),
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(name) = generate_opts.author_name {
|
|
||||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER, name);
|
|
||||||
} else {
|
|
||||||
match filename {
|
|
||||||
"package.json" => {
|
|
||||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER_JS, "");
|
|
||||||
}
|
|
||||||
"pyproject.toml" => {
|
|
||||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER_PY, "");
|
|
||||||
}
|
|
||||||
"grammar.js" => {
|
|
||||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER_GRAMMAR, "");
|
|
||||||
}
|
|
||||||
"Cargo.toml" => {
|
|
||||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER_RS, "");
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(email) = generate_opts.author_email {
|
|
||||||
replacement = match filename {
|
|
||||||
"Cargo.toml" | "grammar.js" => {
|
|
||||||
replacement.replace(AUTHOR_EMAIL_PLACEHOLDER, &format!("<{email}>"))
|
|
||||||
}
|
|
||||||
_ => replacement.replace(AUTHOR_EMAIL_PLACEHOLDER, email),
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
match filename {
|
|
||||||
"package.json" => {
|
|
||||||
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER_JS, "");
|
|
||||||
}
|
|
||||||
"pyproject.toml" => {
|
|
||||||
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER_PY, "");
|
|
||||||
}
|
|
||||||
"grammar.js" => {
|
|
||||||
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER_GRAMMAR, "");
|
|
||||||
}
|
|
||||||
"Cargo.toml" => {
|
|
||||||
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER_RS, "");
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if filename == "package.json" {
|
|
||||||
if let Some(url) = generate_opts.author_url {
|
|
||||||
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER, url);
|
|
||||||
} else {
|
|
||||||
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER_JS, "");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if generate_opts.author_name.is_none()
|
|
||||||
&& generate_opts.author_email.is_none()
|
|
||||||
&& generate_opts.author_url.is_none()
|
|
||||||
&& filename == "package.json"
|
|
||||||
{
|
|
||||||
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_JS) {
|
|
||||||
if let Some(end_idx) = replacement[start_idx..]
|
|
||||||
.find("},")
|
|
||||||
.map(|i| i + start_idx + 2)
|
|
||||||
{
|
|
||||||
replacement.replace_range(start_idx..end_idx, "");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if generate_opts.author_name.is_none() && generate_opts.author_email.is_none() {
|
|
||||||
match filename {
|
|
||||||
"pyproject.toml" => {
|
|
||||||
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_PY) {
|
|
||||||
if let Some(end_idx) = replacement[start_idx..]
|
|
||||||
.find("}]")
|
|
||||||
.map(|i| i + start_idx + 2)
|
|
||||||
{
|
|
||||||
replacement.replace_range(start_idx..end_idx, "");
|
|
||||||
} else {
|
|
||||||
println!("none 2");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
println!("none 1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"grammar.js" => {
|
|
||||||
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_GRAMMAR) {
|
|
||||||
if let Some(end_idx) = replacement[start_idx..]
|
|
||||||
.find(" \n")
|
|
||||||
.map(|i| i + start_idx + 1)
|
|
||||||
{
|
|
||||||
replacement.replace_range(start_idx..end_idx, "");
|
|
||||||
} else {
|
|
||||||
println!("none 2");
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
println!("none 1");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"Cargo.toml" => {
|
|
||||||
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_RS) {
|
|
||||||
if let Some(end_idx) = replacement[start_idx..]
|
|
||||||
.find("\"]")
|
|
||||||
.map(|i| i + start_idx + 2)
|
|
||||||
{
|
|
||||||
replacement.replace_range(start_idx..end_idx, "");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match generate_opts.license {
|
|
||||||
Some(license) => replacement = replacement.replace(PARSER_LICENSE_PLACEHOLDER, license),
|
|
||||||
_ => replacement = replacement.replace(PARSER_LICENSE_PLACEHOLDER, "MIT"),
|
|
||||||
}
|
|
||||||
|
|
||||||
match generate_opts.description {
|
|
||||||
Some(description) => {
|
|
||||||
replacement = replacement.replace(PARSER_DESCRIPTION_PLACEHOLDER, description);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
replacement = replacement.replace(
|
|
||||||
PARSER_DESCRIPTION_PLACEHOLDER,
|
|
||||||
&format!(
|
|
||||||
"{} grammar for tree-sitter",
|
|
||||||
generate_opts.camel_parser_name,
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
match generate_opts.repository {
|
|
||||||
Some(repository) => {
|
|
||||||
replacement = replacement
|
|
||||||
.replace(
|
|
||||||
PARSER_URL_STRIPPED_PLACEHOLDER,
|
|
||||||
&repository.replace("https://", "").to_lowercase(),
|
|
||||||
)
|
|
||||||
.replace(PARSER_URL_PLACEHOLDER, &repository.to_lowercase());
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
replacement = replacement
|
|
||||||
.replace(
|
|
||||||
PARSER_URL_STRIPPED_PLACEHOLDER,
|
|
||||||
&format!(
|
|
||||||
"github.com/tree-sitter/tree-sitter-{}",
|
|
||||||
language_name.to_lowercase()
|
|
||||||
),
|
|
||||||
)
|
|
||||||
.replace(
|
|
||||||
PARSER_URL_PLACEHOLDER,
|
|
||||||
&format!(
|
|
||||||
"https://github.com/tree-sitter/tree-sitter-{}",
|
|
||||||
language_name.to_lowercase()
|
|
||||||
),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
write_file(path, replacement)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_dir(path: &Path) -> Result<()> {
|
|
||||||
fs::create_dir_all(path)
|
|
||||||
.with_context(|| format!("Failed to create {:?}", path.to_string_lossy()))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(PartialEq, Eq, Debug)]
|
|
||||||
enum PathState<P>
|
|
||||||
where
|
|
||||||
P: AsRef<Path>,
|
|
||||||
{
|
|
||||||
Exists(P),
|
|
||||||
Missing(P),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
impl<P> PathState<P>
|
|
||||||
where
|
|
||||||
P: AsRef<Path>,
|
|
||||||
{
|
|
||||||
fn exists(&self, mut action: impl FnMut(&Path) -> Result<()>) -> Result<&Self> {
|
|
||||||
if let Self::Exists(path) = self {
|
|
||||||
action(path.as_ref())?;
|
|
||||||
}
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn missing(&self, mut action: impl FnMut(&Path) -> Result<()>) -> Result<&Self> {
|
|
||||||
if let Self::Missing(path) = self {
|
|
||||||
action(path.as_ref())?;
|
|
||||||
}
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn apply(&self, mut action: impl FnMut(&Path) -> Result<()>) -> Result<&Self> {
|
|
||||||
action(self.as_path())?;
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn apply_state(&self, mut action: impl FnMut(&Self) -> Result<()>) -> Result<&Self> {
|
|
||||||
action(self)?;
|
|
||||||
Ok(self)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn as_path(&self) -> &Path {
|
|
||||||
match self {
|
|
||||||
Self::Exists(path) | Self::Missing(path) => path.as_ref(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn missing_path<P, F>(path: P, mut action: F) -> Result<PathState<P>>
|
|
||||||
where
|
|
||||||
P: AsRef<Path>,
|
|
||||||
F: FnMut(&Path) -> Result<()>,
|
|
||||||
{
|
|
||||||
let path_ref = path.as_ref();
|
|
||||||
if !path_ref.exists() {
|
|
||||||
action(path_ref)?;
|
|
||||||
Ok(PathState::Missing(path))
|
|
||||||
} else {
|
|
||||||
Ok(PathState::Exists(path))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn missing_path_else<P, T, F>(
|
|
||||||
path: P,
|
|
||||||
allow_update: bool,
|
|
||||||
mut action: T,
|
|
||||||
mut else_action: F,
|
|
||||||
) -> Result<PathState<P>>
|
|
||||||
where
|
|
||||||
P: AsRef<Path>,
|
|
||||||
T: FnMut(&Path) -> Result<()>,
|
|
||||||
F: FnMut(&Path) -> Result<()>,
|
|
||||||
{
|
|
||||||
let path_ref = path.as_ref();
|
|
||||||
if !path_ref.exists() {
|
|
||||||
action(path_ref)?;
|
|
||||||
Ok(PathState::Missing(path))
|
|
||||||
} else {
|
|
||||||
if allow_update {
|
|
||||||
else_action(path_ref)?;
|
|
||||||
}
|
|
||||||
Ok(PathState::Exists(path))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,30 +0,0 @@
|
||||||
use log::{LevelFilter, Log, Metadata, Record};
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
struct Logger {
|
|
||||||
pub filter: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Log for Logger {
|
|
||||||
fn enabled(&self, _: &Metadata) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn log(&self, record: &Record) {
|
|
||||||
eprintln!(
|
|
||||||
"[{}] {}",
|
|
||||||
record
|
|
||||||
.module_path()
|
|
||||||
.unwrap_or_default()
|
|
||||||
.trim_start_matches("rust_tree_sitter_cli::"),
|
|
||||||
record.args()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&self) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn init() {
|
|
||||||
log::set_boxed_logger(Box::new(Logger { filter: None })).unwrap();
|
|
||||||
log::set_max_level(LevelFilter::Info);
|
|
||||||
}
|
|
||||||
1461
cli/src/main.rs
1461
cli/src/main.rs
File diff suppressed because it is too large
Load diff
|
|
@ -1,176 +0,0 @@
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<title>tree-sitter THE_LANGUAGE_NAME</title>
|
|
||||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/codemirror/5.45.0/codemirror.min.css">
|
|
||||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.18.0/clusterize.min.css">
|
|
||||||
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-32x32.png" sizes="32x32" />
|
|
||||||
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-16x16.png" sizes="16x16" />
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<div id="playground-container" style="visibility: hidden;">
|
|
||||||
<header>
|
|
||||||
<div class=header-item>
|
|
||||||
<bold>THE_LANGUAGE_NAME</bold>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class=header-item>
|
|
||||||
<label for="logging-checkbox">log</label>
|
|
||||||
<input id="logging-checkbox" type="checkbox"></input>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class=header-item>
|
|
||||||
<label for="query-checkbox">query</label>
|
|
||||||
<input id="query-checkbox" type="checkbox"></input>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class=header-item>
|
|
||||||
<label for="update-time">parse time: </label>
|
|
||||||
<span id="update-time"></span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class=header-item>
|
|
||||||
<a href="https://tree-sitter.github.io/tree-sitter/playground#about">(?)</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<select id="language-select" style="display: none;">
|
|
||||||
<option value="parser">Parser</option>
|
|
||||||
</select>
|
|
||||||
</header>
|
|
||||||
|
|
||||||
<main>
|
|
||||||
<div id="input-pane">
|
|
||||||
<div id="code-container">
|
|
||||||
<textarea id="code-input"></textarea>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="query-container" style="visibility: hidden; position: absolute;">
|
|
||||||
<textarea id="query-input"></textarea>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="output-container-scroll">
|
|
||||||
<pre id="output-container" class="highlight"></pre>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script
|
|
||||||
src="https://code.jquery.com/jquery-3.3.1.min.js"
|
|
||||||
crossorigin="anonymous">
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/codemirror/5.45.0/codemirror.min.js"></script>
|
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.18.0/clusterize.min.js"></script>
|
|
||||||
|
|
||||||
<script>LANGUAGE_BASE_URL = "";</script>
|
|
||||||
<script src=tree-sitter.js></script>
|
|
||||||
<script src=playground.js></script>
|
|
||||||
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
#playground-container {
|
|
||||||
width: 100%;
|
|
||||||
height: 100%;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
header {
|
|
||||||
box-sizing: border-box;
|
|
||||||
display: flex;
|
|
||||||
padding: 20px;
|
|
||||||
height: 60px;
|
|
||||||
border-bottom: 1px solid #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
main {
|
|
||||||
flex: 1;
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
#input-pane {
|
|
||||||
position: absolute;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
bottom: 0;
|
|
||||||
right: 50%;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
#code-container, #query-container {
|
|
||||||
flex: 1;
|
|
||||||
position: relative;
|
|
||||||
overflow: hidden;
|
|
||||||
border-right: 1px solid #aaa;
|
|
||||||
border-bottom: 1px solid #aaa;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container-scroll {
|
|
||||||
position: absolute;
|
|
||||||
top: 0;
|
|
||||||
left: 50%;
|
|
||||||
bottom: 0;
|
|
||||||
right: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-item {
|
|
||||||
margin-right: 30px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#playground-container .CodeMirror {
|
|
||||||
position: absolute;
|
|
||||||
top: 0;
|
|
||||||
bottom: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
height: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container-scroll {
|
|
||||||
flex: 1;
|
|
||||||
padding: 0;
|
|
||||||
overflow: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container {
|
|
||||||
padding: 0 10px;
|
|
||||||
margin: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
#logging-checkbox {
|
|
||||||
vertical-align: middle;
|
|
||||||
}
|
|
||||||
|
|
||||||
.CodeMirror div.CodeMirror-cursor {
|
|
||||||
border-left: 3px solid red;
|
|
||||||
}
|
|
||||||
|
|
||||||
a {
|
|
||||||
text-decoration: none;
|
|
||||||
color: #040404;
|
|
||||||
padding: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
a:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
a.highlighted {
|
|
||||||
background-color: #d9d9d9;
|
|
||||||
color: red;
|
|
||||||
border-radius: 3px;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
.query-error {
|
|
||||||
text-decoration: underline red dashed;
|
|
||||||
-webkit-text-decoration: underline red dashed;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</body>
|
|
||||||
146
cli/src/query.rs
146
cli/src/query.rs
|
|
@ -1,146 +0,0 @@
|
||||||
use std::{
|
|
||||||
fs,
|
|
||||||
io::{self, Write},
|
|
||||||
ops::Range,
|
|
||||||
path::Path,
|
|
||||||
time::Instant,
|
|
||||||
};
|
|
||||||
|
|
||||||
use anstyle::AnsiColor;
|
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use streaming_iterator::StreamingIterator;
|
|
||||||
use tree_sitter::{Language, Parser, Point, Query, QueryCursor};
|
|
||||||
|
|
||||||
use crate::{
|
|
||||||
query_testing::{self, to_utf8_point},
|
|
||||||
test::paint,
|
|
||||||
};
|
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
pub fn query_files_at_paths(
|
|
||||||
language: &Language,
|
|
||||||
paths: Vec<String>,
|
|
||||||
query_path: &Path,
|
|
||||||
ordered_captures: bool,
|
|
||||||
byte_range: Option<Range<usize>>,
|
|
||||||
point_range: Option<Range<Point>>,
|
|
||||||
should_test: bool,
|
|
||||||
quiet: bool,
|
|
||||||
print_time: bool,
|
|
||||||
) -> Result<()> {
|
|
||||||
let stdout = io::stdout();
|
|
||||||
let mut stdout = stdout.lock();
|
|
||||||
|
|
||||||
let query_source = fs::read_to_string(query_path)
|
|
||||||
.with_context(|| format!("Error reading query file {query_path:?}"))?;
|
|
||||||
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
|
|
||||||
|
|
||||||
let mut query_cursor = QueryCursor::new();
|
|
||||||
if let Some(range) = byte_range {
|
|
||||||
query_cursor.set_byte_range(range);
|
|
||||||
}
|
|
||||||
if let Some(range) = point_range {
|
|
||||||
query_cursor.set_point_range(range);
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
parser.set_language(language)?;
|
|
||||||
|
|
||||||
for path in paths {
|
|
||||||
let mut results = Vec::new();
|
|
||||||
|
|
||||||
if !should_test {
|
|
||||||
writeln!(&mut stdout, "{path}")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let source_code =
|
|
||||||
fs::read(&path).with_context(|| format!("Error reading source file {path:?}"))?;
|
|
||||||
let tree = parser.parse(&source_code, None).unwrap();
|
|
||||||
|
|
||||||
let start = Instant::now();
|
|
||||||
if ordered_captures {
|
|
||||||
let mut captures =
|
|
||||||
query_cursor.captures(&query, tree.root_node(), source_code.as_slice());
|
|
||||||
while let Some((mat, capture_index)) = captures.next() {
|
|
||||||
let capture = mat.captures[*capture_index];
|
|
||||||
let capture_name = &query.capture_names()[capture.index as usize];
|
|
||||||
if !quiet && !should_test {
|
|
||||||
writeln!(
|
|
||||||
&mut stdout,
|
|
||||||
" pattern: {:>2}, capture: {} - {capture_name}, start: {}, end: {}, text: `{}`",
|
|
||||||
mat.pattern_index,
|
|
||||||
capture.index,
|
|
||||||
capture.node.start_position(),
|
|
||||||
capture.node.end_position(),
|
|
||||||
capture.node.utf8_text(&source_code).unwrap_or("")
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
results.push(query_testing::CaptureInfo {
|
|
||||||
name: (*capture_name).to_string(),
|
|
||||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
|
||||||
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
let mut matches =
|
|
||||||
query_cursor.matches(&query, tree.root_node(), source_code.as_slice());
|
|
||||||
while let Some(m) = matches.next() {
|
|
||||||
if !quiet && !should_test {
|
|
||||||
writeln!(&mut stdout, " pattern: {}", m.pattern_index)?;
|
|
||||||
}
|
|
||||||
for capture in m.captures {
|
|
||||||
let start = capture.node.start_position();
|
|
||||||
let end = capture.node.end_position();
|
|
||||||
let capture_name = &query.capture_names()[capture.index as usize];
|
|
||||||
if !quiet && !should_test {
|
|
||||||
if end.row == start.row {
|
|
||||||
writeln!(
|
|
||||||
&mut stdout,
|
|
||||||
" capture: {} - {capture_name}, start: {start}, end: {end}, text: `{}`",
|
|
||||||
capture.index,
|
|
||||||
capture.node.utf8_text(&source_code).unwrap_or("")
|
|
||||||
)?;
|
|
||||||
} else {
|
|
||||||
writeln!(
|
|
||||||
&mut stdout,
|
|
||||||
" capture: {capture_name}, start: {start}, end: {end}",
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
results.push(query_testing::CaptureInfo {
|
|
||||||
name: (*capture_name).to_string(),
|
|
||||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
|
||||||
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if query_cursor.did_exceed_match_limit() {
|
|
||||||
writeln!(
|
|
||||||
&mut stdout,
|
|
||||||
" WARNING: Query exceeded maximum number of in-progress captures!"
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
if should_test {
|
|
||||||
let path_name = Path::new(&path).file_name().unwrap().to_str().unwrap();
|
|
||||||
match query_testing::assert_expected_captures(&results, &path, &mut parser, language) {
|
|
||||||
Ok(assertion_count) => {
|
|
||||||
println!(
|
|
||||||
" ✓ {} ({} assertions)",
|
|
||||||
paint(Some(AnsiColor::Green), path_name),
|
|
||||||
assertion_count
|
|
||||||
);
|
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
println!(" ✗ {}", paint(Some(AnsiColor::Red), path_name));
|
|
||||||
return Err(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if print_time {
|
|
||||||
writeln!(&mut stdout, "{:?}", start.elapsed())?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
@ -1,98 +0,0 @@
|
||||||
use std::{
|
|
||||||
fs,
|
|
||||||
io::{self, Write},
|
|
||||||
path::Path,
|
|
||||||
str,
|
|
||||||
time::Instant,
|
|
||||||
};
|
|
||||||
|
|
||||||
use anyhow::{anyhow, Result};
|
|
||||||
use tree_sitter_loader::{Config, Loader};
|
|
||||||
use tree_sitter_tags::TagsContext;
|
|
||||||
|
|
||||||
use super::util;
|
|
||||||
|
|
||||||
pub fn generate_tags(
|
|
||||||
loader: &Loader,
|
|
||||||
loader_config: &Config,
|
|
||||||
scope: Option<&str>,
|
|
||||||
paths: &[String],
|
|
||||||
quiet: bool,
|
|
||||||
time: bool,
|
|
||||||
) -> Result<()> {
|
|
||||||
let mut lang = None;
|
|
||||||
if let Some(scope) = scope {
|
|
||||||
lang = loader.language_configuration_for_scope(scope)?;
|
|
||||||
if lang.is_none() {
|
|
||||||
return Err(anyhow!("Unknown scope '{scope}'"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut context = TagsContext::new();
|
|
||||||
let cancellation_flag = util::cancel_on_signal();
|
|
||||||
let stdout = io::stdout();
|
|
||||||
let mut stdout = stdout.lock();
|
|
||||||
|
|
||||||
for path in paths {
|
|
||||||
let path = Path::new(&path);
|
|
||||||
let (language, language_config) = match lang.clone() {
|
|
||||||
Some(v) => v,
|
|
||||||
None => {
|
|
||||||
if let Some(v) = loader.language_configuration_for_file_name(path)? {
|
|
||||||
v
|
|
||||||
} else {
|
|
||||||
eprintln!("{}", util::lang_not_found_for_path(path, loader_config));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(tags_config) = language_config.tags_config(language)? {
|
|
||||||
let indent = if paths.len() > 1 {
|
|
||||||
if !quiet {
|
|
||||||
writeln!(&mut stdout, "{}", path.to_string_lossy())?;
|
|
||||||
}
|
|
||||||
"\t"
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
};
|
|
||||||
|
|
||||||
let source = fs::read(path)?;
|
|
||||||
let t0 = Instant::now();
|
|
||||||
for tag in context
|
|
||||||
.generate_tags(tags_config, &source, Some(&cancellation_flag))?
|
|
||||||
.0
|
|
||||||
{
|
|
||||||
let tag = tag?;
|
|
||||||
if !quiet {
|
|
||||||
write!(
|
|
||||||
&mut stdout,
|
|
||||||
"{indent}{:<10}\t | {:<8}\t{} {} - {} `{}`",
|
|
||||||
str::from_utf8(&source[tag.name_range]).unwrap_or(""),
|
|
||||||
&tags_config.syntax_type_name(tag.syntax_type_id),
|
|
||||||
if tag.is_definition { "def" } else { "ref" },
|
|
||||||
tag.span.start,
|
|
||||||
tag.span.end,
|
|
||||||
str::from_utf8(&source[tag.line_range]).unwrap_or(""),
|
|
||||||
)?;
|
|
||||||
if let Some(docs) = tag.docs {
|
|
||||||
if docs.len() > 120 {
|
|
||||||
write!(&mut stdout, "\t{:?}...", docs.get(0..120).unwrap_or(""))?;
|
|
||||||
} else {
|
|
||||||
write!(&mut stdout, "\t{:?}", &docs)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
writeln!(&mut stdout)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if time {
|
|
||||||
writeln!(&mut stdout, "{indent}time: {}ms", t0.elapsed().as_millis(),)?;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
eprintln!("No tags config found for path {path:?}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
"""PARSER_DESCRIPTION"""
|
|
||||||
|
|
||||||
from importlib.resources import files as _files
|
|
||||||
|
|
||||||
from ._binding import language
|
|
||||||
|
|
||||||
|
|
||||||
def _get_query(name, file):
|
|
||||||
query = _files(f"{__package__}.queries") / file
|
|
||||||
globals()[name] = query.read_text()
|
|
||||||
return globals()[name]
|
|
||||||
|
|
||||||
|
|
||||||
def __getattr__(name):
|
|
||||||
# NOTE: uncomment these to include any queries that this grammar contains:
|
|
||||||
|
|
||||||
# if name == "HIGHLIGHTS_QUERY":
|
|
||||||
# return _get_query("HIGHLIGHTS_QUERY", "highlights.scm")
|
|
||||||
# if name == "INJECTIONS_QUERY":
|
|
||||||
# return _get_query("INJECTIONS_QUERY", "injections.scm")
|
|
||||||
# if name == "LOCALS_QUERY":
|
|
||||||
# return _get_query("LOCALS_QUERY", "locals.scm")
|
|
||||||
# if name == "TAGS_QUERY":
|
|
||||||
# return _get_query("TAGS_QUERY", "tags.scm")
|
|
||||||
|
|
||||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"language",
|
|
||||||
# "HIGHLIGHTS_QUERY",
|
|
||||||
# "INJECTIONS_QUERY",
|
|
||||||
# "LOCALS_QUERY",
|
|
||||||
# "TAGS_QUERY",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def __dir__():
|
|
||||||
return sorted(__all__ + [
|
|
||||||
"__all__", "__builtins__", "__cached__", "__doc__", "__file__",
|
|
||||||
"__loader__", "__name__", "__package__", "__path__", "__spec__",
|
|
||||||
])
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
from typing import Final
|
|
||||||
|
|
||||||
# NOTE: uncomment these to include any queries that this grammar contains:
|
|
||||||
|
|
||||||
# HIGHLIGHTS_QUERY: Final[str]
|
|
||||||
# INJECTIONS_QUERY: Final[str]
|
|
||||||
# LOCALS_QUERY: Final[str]
|
|
||||||
# TAGS_QUERY: Final[str]
|
|
||||||
|
|
||||||
def language() -> object: ...
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
const assert = require("node:assert");
|
|
||||||
const { test } = require("node:test");
|
|
||||||
|
|
||||||
const Parser = require("tree-sitter");
|
|
||||||
|
|
||||||
test("can load grammar", () => {
|
|
||||||
const parser = new Parser();
|
|
||||||
assert.doesNotThrow(() => parser.setLanguage(require(".")));
|
|
||||||
});
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
fn main() {
|
|
||||||
let src_dir = std::path::Path::new("src");
|
|
||||||
|
|
||||||
let mut c_config = cc::Build::new();
|
|
||||||
c_config.std("c11").include(src_dir);
|
|
||||||
|
|
||||||
#[cfg(target_env = "msvc")]
|
|
||||||
c_config.flag("-utf-8");
|
|
||||||
|
|
||||||
let parser_path = src_dir.join("parser.c");
|
|
||||||
c_config.file(&parser_path);
|
|
||||||
println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap());
|
|
||||||
|
|
||||||
let scanner_path = src_dir.join("scanner.c");
|
|
||||||
if scanner_path.exists() {
|
|
||||||
c_config.file(&scanner_path);
|
|
||||||
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
c_config.compile("tree-sitter-PARSER_NAME");
|
|
||||||
}
|
|
||||||
|
|
@ -1,58 +0,0 @@
|
||||||
cmake_minimum_required(VERSION 3.13)
|
|
||||||
|
|
||||||
project(tree-sitter-PARSER_NAME
|
|
||||||
VERSION "PARSER_VERSION"
|
|
||||||
DESCRIPTION "PARSER_DESCRIPTION"
|
|
||||||
HOMEPAGE_URL "PARSER_URL"
|
|
||||||
LANGUAGES C)
|
|
||||||
|
|
||||||
option(BUILD_SHARED_LIBS "Build using shared libraries" ON)
|
|
||||||
option(TREE_SITTER_REUSE_ALLOCATOR "Reuse the library allocator" OFF)
|
|
||||||
|
|
||||||
set(TREE_SITTER_ABI_VERSION ABI_VERSION_MAX CACHE STRING "Tree-sitter ABI version")
|
|
||||||
if(NOT ${TREE_SITTER_ABI_VERSION} MATCHES "^[0-9]+$")
|
|
||||||
unset(TREE_SITTER_ABI_VERSION CACHE)
|
|
||||||
message(FATAL_ERROR "TREE_SITTER_ABI_VERSION must be an integer")
|
|
||||||
endif()
|
|
||||||
|
|
||||||
find_program(TREE_SITTER_CLI tree-sitter DOC "Tree-sitter CLI")
|
|
||||||
|
|
||||||
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c"
|
|
||||||
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
|
||||||
COMMAND "${TREE_SITTER_CLI}" generate src/grammar.json
|
|
||||||
--abi=${TREE_SITTER_ABI_VERSION}
|
|
||||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
|
||||||
COMMENT "Generating parser.c")
|
|
||||||
|
|
||||||
add_library(tree-sitter-PARSER_NAME src/parser.c)
|
|
||||||
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/src/scanner.c)
|
|
||||||
target_sources(tree-sitter-PARSER_NAME PRIVATE src/scanner.c)
|
|
||||||
endif()
|
|
||||||
target_include_directories(tree-sitter-PARSER_NAME PRIVATE src)
|
|
||||||
|
|
||||||
target_compile_definitions(tree-sitter-PARSER_NAME PRIVATE
|
|
||||||
$<$<BOOL:${TREE_SITTER_REUSE_ALLOCATOR}>:TREE_SITTER_REUSE_ALLOCATOR>
|
|
||||||
$<$<CONFIG:Debug>:TREE_SITTER_DEBUG>)
|
|
||||||
|
|
||||||
set_target_properties(tree-sitter-PARSER_NAME
|
|
||||||
PROPERTIES
|
|
||||||
C_STANDARD 11
|
|
||||||
POSITION_INDEPENDENT_CODE ON
|
|
||||||
SOVERSION "${TREE_SITTER_ABI_VERSION}.${PROJECT_VERSION_MAJOR}"
|
|
||||||
DEFINE_SYMBOL "")
|
|
||||||
|
|
||||||
configure_file(bindings/c/tree-sitter-PARSER_NAME.pc.in
|
|
||||||
"${CMAKE_CURRENT_BINARY_DIR}/tree-sitter-PARSER_NAME.pc" @ONLY)
|
|
||||||
|
|
||||||
include(GNUInstallDirs)
|
|
||||||
|
|
||||||
install(FILES bindings/c/tree-sitter-PARSER_NAME.h
|
|
||||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/tree_sitter")
|
|
||||||
install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter-PARSER_NAME.pc"
|
|
||||||
DESTINATION "${CMAKE_INSTALL_DATAROOTDIR}/pkgconfig")
|
|
||||||
install(TARGETS tree-sitter-PARSER_NAME
|
|
||||||
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
|
|
||||||
|
|
||||||
add_custom_target(ts-test "${TREE_SITTER_CLI}" test
|
|
||||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
|
||||||
COMMENT "tree-sitter test")
|
|
||||||
27
cli/src/templates/index.d.ts
vendored
27
cli/src/templates/index.d.ts
vendored
|
|
@ -1,27 +0,0 @@
|
||||||
type BaseNode = {
|
|
||||||
type: string;
|
|
||||||
named: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
type ChildNode = {
|
|
||||||
multiple: boolean;
|
|
||||||
required: boolean;
|
|
||||||
types: BaseNode[];
|
|
||||||
};
|
|
||||||
|
|
||||||
type NodeInfo =
|
|
||||||
| (BaseNode & {
|
|
||||||
subtypes: BaseNode[];
|
|
||||||
})
|
|
||||||
| (BaseNode & {
|
|
||||||
fields: { [name: string]: ChildNode };
|
|
||||||
children: ChildNode[];
|
|
||||||
});
|
|
||||||
|
|
||||||
type Language = {
|
|
||||||
language: unknown;
|
|
||||||
nodeTypeInfo: NodeInfo[];
|
|
||||||
};
|
|
||||||
|
|
||||||
declare const language: Language;
|
|
||||||
export = language;
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
const root = require("path").join(__dirname, "..", "..");
|
|
||||||
|
|
||||||
module.exports =
|
|
||||||
typeof process.versions.bun === "string"
|
|
||||||
// Support `bun build --compile` by being statically analyzable enough to find the .node file at build-time
|
|
||||||
? require(`../../prebuilds/${process.platform}-${process.arch}/tree-sitter-PARSER_NAME.node`)
|
|
||||||
: require("node-gyp-build")(root);
|
|
||||||
|
|
||||||
try {
|
|
||||||
module.exports.nodeTypeInfo = require("../../src/node-types.json");
|
|
||||||
} catch (_) {}
|
|
||||||
|
|
@ -1,53 +0,0 @@
|
||||||
//! This crate provides CAMEL_PARSER_NAME language support for the [tree-sitter][] parsing library.
|
|
||||||
//!
|
|
||||||
//! Typically, you will use the [LANGUAGE][] constant to add this language to a
|
|
||||||
//! tree-sitter [Parser][], and then use the parser to parse some code:
|
|
||||||
//!
|
|
||||||
//! ```
|
|
||||||
//! let code = r#"
|
|
||||||
//! "#;
|
|
||||||
//! let mut parser = tree_sitter::Parser::new();
|
|
||||||
//! let language = tree_sitter_PARSER_NAME::LANGUAGE;
|
|
||||||
//! parser
|
|
||||||
//! .set_language(&language.into())
|
|
||||||
//! .expect("Error loading CAMEL_PARSER_NAME parser");
|
|
||||||
//! let tree = parser.parse(code, None).unwrap();
|
|
||||||
//! assert!(!tree.root_node().has_error());
|
|
||||||
//! ```
|
|
||||||
//!
|
|
||||||
//! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html
|
|
||||||
//! [tree-sitter]: https://tree-sitter.github.io/
|
|
||||||
|
|
||||||
use tree_sitter_language::LanguageFn;
|
|
||||||
|
|
||||||
extern "C" {
|
|
||||||
fn tree_sitter_PARSER_NAME() -> *const ();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The tree-sitter [`LanguageFn`][LanguageFn] for this grammar.
|
|
||||||
///
|
|
||||||
/// [LanguageFn]: https://docs.rs/tree-sitter-language/*/tree_sitter_language/struct.LanguageFn.html
|
|
||||||
pub const LANGUAGE: LanguageFn = unsafe { LanguageFn::from_raw(tree_sitter_PARSER_NAME) };
|
|
||||||
|
|
||||||
/// The content of the [`node-types.json`][] file for this grammar.
|
|
||||||
///
|
|
||||||
/// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types
|
|
||||||
pub const NODE_TYPES: &str = include_str!("../../src/node-types.json");
|
|
||||||
|
|
||||||
// NOTE: uncomment these to include any queries that this grammar contains:
|
|
||||||
|
|
||||||
// pub const HIGHLIGHTS_QUERY: &str = include_str!("../../queries/highlights.scm");
|
|
||||||
// pub const INJECTIONS_QUERY: &str = include_str!("../../queries/injections.scm");
|
|
||||||
// pub const LOCALS_QUERY: &str = include_str!("../../queries/locals.scm");
|
|
||||||
// pub const TAGS_QUERY: &str = include_str!("../../queries/tags.scm");
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
#[test]
|
|
||||||
fn test_can_load_grammar() {
|
|
||||||
let mut parser = tree_sitter::Parser::new();
|
|
||||||
parser
|
|
||||||
.set_language(&super::LANGUAGE.into())
|
|
||||||
.expect("Error loading CAMEL_PARSER_NAME parser");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,64 +0,0 @@
|
||||||
from os import path
|
|
||||||
from platform import system
|
|
||||||
|
|
||||||
from setuptools import Extension, find_packages, setup
|
|
||||||
from setuptools.command.build import build
|
|
||||||
from wheel.bdist_wheel import bdist_wheel
|
|
||||||
|
|
||||||
sources = [
|
|
||||||
"bindings/python/tree_sitter_LOWER_PARSER_NAME/binding.c",
|
|
||||||
"src/parser.c",
|
|
||||||
]
|
|
||||||
if path.exists("src/scanner.c"):
|
|
||||||
sources.extend("src/scanner.c")
|
|
||||||
|
|
||||||
if system() != "Windows":
|
|
||||||
cflags = ["-std=c11", "-fvisibility=hidden"]
|
|
||||||
else:
|
|
||||||
cflags = ["/std:c11", "/utf-8"]
|
|
||||||
|
|
||||||
|
|
||||||
class Build(build):
|
|
||||||
def run(self):
|
|
||||||
if path.isdir("queries"):
|
|
||||||
dest = path.join(self.build_lib, "tree_sitter_PARSER_NAME", "queries")
|
|
||||||
self.copy_tree("queries", dest)
|
|
||||||
super().run()
|
|
||||||
|
|
||||||
|
|
||||||
class BdistWheel(bdist_wheel):
|
|
||||||
def get_tag(self):
|
|
||||||
python, abi, platform = super().get_tag()
|
|
||||||
if python.startswith("cp"):
|
|
||||||
python, abi = "cp39", "abi3"
|
|
||||||
return python, abi, platform
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
|
||||||
packages=find_packages("bindings/python"),
|
|
||||||
package_dir={"": "bindings/python"},
|
|
||||||
package_data={
|
|
||||||
"tree_sitter_LOWER_PARSER_NAME": ["*.pyi", "py.typed"],
|
|
||||||
"tree_sitter_LOWER_PARSER_NAME.queries": ["*.scm"],
|
|
||||||
},
|
|
||||||
ext_package="tree_sitter_LOWER_PARSER_NAME",
|
|
||||||
ext_modules=[
|
|
||||||
Extension(
|
|
||||||
name="_binding",
|
|
||||||
sources=sources,
|
|
||||||
extra_compile_args=cflags,
|
|
||||||
define_macros=[
|
|
||||||
("Py_LIMITED_API", "0x03090000"),
|
|
||||||
("PY_SSIZE_T_CLEAN", None),
|
|
||||||
("TREE_SITTER_HIDE_SYMBOLS", None),
|
|
||||||
],
|
|
||||||
include_dirs=["src"],
|
|
||||||
py_limited_api=True,
|
|
||||||
)
|
|
||||||
],
|
|
||||||
cmdclass={
|
|
||||||
"build": Build,
|
|
||||||
"bdist_wheel": BdistWheel
|
|
||||||
},
|
|
||||||
zip_safe=False
|
|
||||||
)
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
import tree_sitter, tree_sitter_LOWER_PARSER_NAME
|
|
||||||
|
|
||||||
|
|
||||||
class TestLanguage(TestCase):
|
|
||||||
def test_can_load_grammar(self):
|
|
||||||
try:
|
|
||||||
tree_sitter.Language(tree_sitter_LOWER_PARSER_NAME.language())
|
|
||||||
except Exception:
|
|
||||||
self.fail("Error loading CAMEL_PARSER_NAME grammar")
|
|
||||||
1512
cli/src/test.rs
1512
cli/src/test.rs
File diff suppressed because it is too large
Load diff
|
|
@ -1,278 +0,0 @@
|
||||||
use std::{
|
|
||||||
future::Future,
|
|
||||||
pin::{pin, Pin},
|
|
||||||
ptr,
|
|
||||||
task::{self, Context, Poll, RawWaker, RawWakerVTable, Waker},
|
|
||||||
};
|
|
||||||
|
|
||||||
use tree_sitter::Parser;
|
|
||||||
|
|
||||||
use super::helpers::fixtures::get_language;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_in_fut() {
|
|
||||||
let (ret, pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("bash");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
|
|
||||||
let fut_val_fn = || async {
|
|
||||||
yield_now().await;
|
|
||||||
root.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref_fn = || async {
|
|
||||||
yield_now().await;
|
|
||||||
root_ref.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
let f1 = fut_val_fn().await;
|
|
||||||
let f2 = fut_ref_fn().await;
|
|
||||||
assert_eq!(f1, f2);
|
|
||||||
|
|
||||||
let fut_val = async {
|
|
||||||
yield_now().await;
|
|
||||||
root.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
let fut_ref = async {
|
|
||||||
yield_now().await;
|
|
||||||
root_ref.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
let f1 = fut_val.await;
|
|
||||||
let f2 = fut_ref.await;
|
|
||||||
assert_eq!(f1, f2);
|
|
||||||
|
|
||||||
f1
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(ret, "comment");
|
|
||||||
assert_eq!(pended, 5);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_and_cursor_ref_in_fut() {
|
|
||||||
let ((), pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("c");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
let fut_val = async {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root.to_sexp();
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref = async {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root_ref.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
};
|
|
||||||
|
|
||||||
fut_val.await;
|
|
||||||
fut_ref.await;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(pended, 3);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_and_cursor_ref_in_fut_with_fut_fabrics() {
|
|
||||||
let ((), pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("javascript");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
let fut_val = || async {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root.to_sexp();
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref = || async move {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root_ref.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
};
|
|
||||||
|
|
||||||
fut_val().await;
|
|
||||||
fut_val().await;
|
|
||||||
fut_ref().await;
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(pended, 4);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_and_cursor_ref_in_fut_with_inner_spawns() {
|
|
||||||
let (ret, pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("rust");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
let fut_val = || {
|
|
||||||
let tree = tree.clone();
|
|
||||||
async move {
|
|
||||||
let root = tree.root_node();
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref = || {
|
|
||||||
let tree = tree.clone();
|
|
||||||
async move {
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root_ref.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let ((), p1) = tokio_like_spawn(fut_val()).await.unwrap();
|
|
||||||
let ((), p2) = tokio_like_spawn(fut_ref()).await.unwrap();
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
fut_val().await;
|
|
||||||
fut_val().await;
|
|
||||||
fut_ref().await;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
p1 + p2
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(pended, 4);
|
|
||||||
assert_eq!(ret, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tokio_like_spawn<T>(future: T) -> JoinHandle<(T::Output, usize)>
|
|
||||||
where
|
|
||||||
T: Future + Send + 'static,
|
|
||||||
T::Output: Send + 'static,
|
|
||||||
{
|
|
||||||
// No runtime, just noop waker
|
|
||||||
|
|
||||||
let waker = noop_waker();
|
|
||||||
let mut cx = task::Context::from_waker(&waker);
|
|
||||||
|
|
||||||
let mut pending = 0;
|
|
||||||
let mut future = pin!(future);
|
|
||||||
let ret = loop {
|
|
||||||
match future.as_mut().poll(&mut cx) {
|
|
||||||
Poll::Pending => pending += 1,
|
|
||||||
Poll::Ready(r) => {
|
|
||||||
break r;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
JoinHandle::new((ret, pending))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn yield_now() {
|
|
||||||
struct SimpleYieldNow {
|
|
||||||
yielded: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Future for SimpleYieldNow {
|
|
||||||
type Output = ();
|
|
||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {
|
|
||||||
cx.waker().wake_by_ref();
|
|
||||||
if self.yielded {
|
|
||||||
return Poll::Ready(());
|
|
||||||
}
|
|
||||||
self.yielded = true;
|
|
||||||
Poll::Pending
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
SimpleYieldNow { yielded: false }.await;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn noop_waker() -> Waker {
|
|
||||||
const VTABLE: RawWakerVTable = RawWakerVTable::new(
|
|
||||||
// Cloning just returns a new no-op raw waker
|
|
||||||
|_| RAW,
|
|
||||||
// `wake` does nothing
|
|
||||||
|_| {},
|
|
||||||
// `wake_by_ref` does nothing
|
|
||||||
|_| {},
|
|
||||||
// Dropping does nothing as we don't allocate anything
|
|
||||||
|_| {},
|
|
||||||
);
|
|
||||||
const RAW: RawWaker = RawWaker::new(ptr::null(), &VTABLE);
|
|
||||||
unsafe { Waker::from_raw(RAW) }
|
|
||||||
}
|
|
||||||
|
|
||||||
struct JoinHandle<T> {
|
|
||||||
data: Option<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> JoinHandle<T> {
|
|
||||||
#[must_use]
|
|
||||||
const fn new(data: T) -> Self {
|
|
||||||
Self { data: Some(data) }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn join(&mut self) -> T {
|
|
||||||
self.data.take().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Unpin> Future for JoinHandle<T> {
|
|
||||||
type Output = std::result::Result<T, ()>;
|
|
||||||
|
|
||||||
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
|
|
||||||
let data = self.get_mut().data.take().unwrap();
|
|
||||||
Poll::Ready(Ok(data))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,121 +0,0 @@
|
||||||
use std::{
|
|
||||||
collections::HashMap,
|
|
||||||
os::raw::c_void,
|
|
||||||
sync::{
|
|
||||||
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
|
||||||
Mutex,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[ctor::ctor]
|
|
||||||
unsafe fn initialize_allocation_recording() {
|
|
||||||
tree_sitter::set_allocator(
|
|
||||||
Some(ts_record_malloc),
|
|
||||||
Some(ts_record_calloc),
|
|
||||||
Some(ts_record_realloc),
|
|
||||||
Some(ts_record_free),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
struct Allocation(*const c_void);
|
|
||||||
unsafe impl Send for Allocation {}
|
|
||||||
unsafe impl Sync for Allocation {}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct AllocationRecorder {
|
|
||||||
enabled: AtomicBool,
|
|
||||||
allocation_count: AtomicUsize,
|
|
||||||
outstanding_allocations: Mutex<HashMap<Allocation, usize>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
thread_local! {
|
|
||||||
static RECORDER: AllocationRecorder = AllocationRecorder::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
extern "C" {
|
|
||||||
fn malloc(size: usize) -> *mut c_void;
|
|
||||||
fn calloc(count: usize, size: usize) -> *mut c_void;
|
|
||||||
fn realloc(ptr: *mut c_void, size: usize) -> *mut c_void;
|
|
||||||
fn free(ptr: *mut c_void);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn record<T>(f: impl FnOnce() -> T) -> T {
|
|
||||||
RECORDER.with(|recorder| {
|
|
||||||
recorder.enabled.store(true, SeqCst);
|
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
|
||||||
recorder.outstanding_allocations.lock().unwrap().clear();
|
|
||||||
});
|
|
||||||
|
|
||||||
let value = f();
|
|
||||||
|
|
||||||
let outstanding_allocation_indices = RECORDER.with(|recorder| {
|
|
||||||
recorder.enabled.store(false, SeqCst);
|
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
|
||||||
recorder
|
|
||||||
.outstanding_allocations
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.drain()
|
|
||||||
.map(|e| e.1)
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
});
|
|
||||||
assert!(
|
|
||||||
outstanding_allocation_indices.is_empty(),
|
|
||||||
"Leaked allocation indices: {outstanding_allocation_indices:?}"
|
|
||||||
);
|
|
||||||
value
|
|
||||||
}
|
|
||||||
|
|
||||||
fn record_alloc(ptr: *mut c_void) {
|
|
||||||
RECORDER.with(|recorder| {
|
|
||||||
if recorder.enabled.load(SeqCst) {
|
|
||||||
let count = recorder.allocation_count.fetch_add(1, SeqCst);
|
|
||||||
recorder
|
|
||||||
.outstanding_allocations
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.insert(Allocation(ptr), count);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn record_dealloc(ptr: *mut c_void) {
|
|
||||||
RECORDER.with(|recorder| {
|
|
||||||
if recorder.enabled.load(SeqCst) {
|
|
||||||
recorder
|
|
||||||
.outstanding_allocations
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.remove(&Allocation(ptr));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
|
||||||
let result = malloc(size);
|
|
||||||
record_alloc(result);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
|
||||||
let result = calloc(count, size);
|
|
||||||
record_alloc(result);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
|
||||||
let result = realloc(ptr, size);
|
|
||||||
if ptr.is_null() {
|
|
||||||
record_alloc(result);
|
|
||||||
} else if ptr != result {
|
|
||||||
record_dealloc(ptr);
|
|
||||||
record_alloc(result);
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
|
||||||
record_dealloc(ptr);
|
|
||||||
free(ptr);
|
|
||||||
}
|
|
||||||
|
|
@ -1,47 +0,0 @@
|
||||||
lazy_static! {
|
|
||||||
pub static ref ROOT_DIR: PathBuf = PathBuf::from(env!("CARGO_MANIFEST_DIR")).parent().unwrap().to_owned();
|
|
||||||
pub static ref FIXTURES_DIR: PathBuf = ROOT_DIR.join("test").join("fixtures");
|
|
||||||
pub static ref HEADER_DIR: PathBuf = ROOT_DIR.join("lib").join("include");
|
|
||||||
pub static ref GRAMMARS_DIR: PathBuf = ROOT_DIR.join("test").join("fixtures").join("grammars");
|
|
||||||
pub static ref SCRATCH_BASE_DIR: PathBuf = {
|
|
||||||
let result = ROOT_DIR.join("target").join("scratch");
|
|
||||||
fs::create_dir_all(&result).unwrap();
|
|
||||||
result
|
|
||||||
};
|
|
||||||
pub static ref WASM_DIR: PathBuf = ROOT_DIR.join("target").join("release");
|
|
||||||
pub static ref SCRATCH_DIR: PathBuf = {
|
|
||||||
// https://doc.rust-lang.org/reference/conditional-compilation.html
|
|
||||||
let vendor = if cfg!(target_vendor = "apple") {
|
|
||||||
"apple"
|
|
||||||
} else if cfg!(target_vendor = "fortanix") {
|
|
||||||
"fortanix"
|
|
||||||
} else if cfg!(target_vendor = "pc") {
|
|
||||||
"pc"
|
|
||||||
} else {
|
|
||||||
"unknown"
|
|
||||||
};
|
|
||||||
let env = if cfg!(target_env = "gnu") {
|
|
||||||
"gnu"
|
|
||||||
} else if cfg!(target_env = "msvc") {
|
|
||||||
"msvc"
|
|
||||||
} else if cfg!(target_env = "musl") {
|
|
||||||
"musl"
|
|
||||||
} else if cfg!(target_env = "sgx") {
|
|
||||||
"sgx"
|
|
||||||
} else {
|
|
||||||
"unknown"
|
|
||||||
};
|
|
||||||
let endian = if cfg!(target_endian = "little") {
|
|
||||||
"little"
|
|
||||||
} else if cfg!(target_endian = "big") {
|
|
||||||
"big"
|
|
||||||
} else {
|
|
||||||
"unknown"
|
|
||||||
};
|
|
||||||
|
|
||||||
let machine = format!("{}-{}-{vendor}-{env}-{endian}", std::env::consts::ARCH, std::env::consts::OS);
|
|
||||||
let result = SCRATCH_BASE_DIR.join(machine);
|
|
||||||
fs::create_dir_all(&result).unwrap();
|
|
||||||
result
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
@ -1,97 +0,0 @@
|
||||||
use tree_sitter::{self, Parser};
|
|
||||||
|
|
||||||
use super::helpers::fixtures::get_language;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lookahead_iterator() {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("rust");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("struct Stuff {}", None).unwrap();
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
|
|
||||||
assert!(cursor.goto_first_child()); // struct
|
|
||||||
assert!(cursor.goto_first_child()); // struct keyword
|
|
||||||
|
|
||||||
let next_state = cursor.node().next_parse_state();
|
|
||||||
assert_ne!(next_state, 0);
|
|
||||||
assert_eq!(
|
|
||||||
next_state,
|
|
||||||
language.next_state(cursor.node().parse_state(), cursor.node().grammar_id())
|
|
||||||
);
|
|
||||||
assert!((next_state as usize) < language.parse_state_count());
|
|
||||||
assert!(cursor.goto_next_sibling()); // type_identifier
|
|
||||||
assert_eq!(next_state, cursor.node().parse_state());
|
|
||||||
assert_eq!(cursor.node().grammar_name(), "identifier");
|
|
||||||
assert_ne!(cursor.node().grammar_id(), cursor.node().kind_id());
|
|
||||||
|
|
||||||
let expected_symbols = ["//", "/*", "identifier", "line_comment", "block_comment"];
|
|
||||||
let mut lookahead = language.lookahead_iterator(next_state).unwrap();
|
|
||||||
assert_eq!(*lookahead.language(), language);
|
|
||||||
assert!(lookahead.iter_names().eq(expected_symbols));
|
|
||||||
|
|
||||||
lookahead.reset_state(next_state);
|
|
||||||
assert!(lookahead.iter_names().eq(expected_symbols));
|
|
||||||
|
|
||||||
lookahead.reset(&language, next_state);
|
|
||||||
assert!(lookahead
|
|
||||||
.map(|s| language.node_kind_for_id(s).unwrap())
|
|
||||||
.eq(expected_symbols));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_lookahead_iterator_modifiable_only_by_mut() {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("rust");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("struct Stuff {}", None).unwrap();
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
|
|
||||||
assert!(cursor.goto_first_child()); // struct
|
|
||||||
assert!(cursor.goto_first_child()); // struct keyword
|
|
||||||
|
|
||||||
let next_state = cursor.node().next_parse_state();
|
|
||||||
assert_ne!(next_state, 0);
|
|
||||||
|
|
||||||
let mut lookahead = language.lookahead_iterator(next_state).unwrap();
|
|
||||||
let _ = lookahead.next();
|
|
||||||
|
|
||||||
let mut names = lookahead.iter_names();
|
|
||||||
let _ = names.next();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_symbol_metadata_checks() {
|
|
||||||
let language = get_language("rust");
|
|
||||||
for i in 0..language.node_kind_count() {
|
|
||||||
let sym = i as u16;
|
|
||||||
let name = language.node_kind_for_id(sym).unwrap();
|
|
||||||
match name {
|
|
||||||
"_type"
|
|
||||||
| "_expression"
|
|
||||||
| "_pattern"
|
|
||||||
| "_literal"
|
|
||||||
| "_literal_pattern"
|
|
||||||
| "_declaration_statement" => assert!(language.node_kind_is_supertype(sym)),
|
|
||||||
|
|
||||||
"_raw_string_literal_start"
|
|
||||||
| "_raw_string_literal_end"
|
|
||||||
| "_line_doc_comment"
|
|
||||||
| "_error_sentinel" => assert!(!language.node_kind_is_supertype(sym)),
|
|
||||||
|
|
||||||
"enum_item" | "struct_item" | "type_item" => {
|
|
||||||
assert!(language.node_kind_is_named(sym));
|
|
||||||
}
|
|
||||||
|
|
||||||
"=>" | "[" | "]" | "(" | ")" | "{" | "}" => {
|
|
||||||
assert!(language.node_kind_is_visible(sym));
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
mod async_context_test;
|
|
||||||
mod corpus_test;
|
|
||||||
mod detect_language;
|
|
||||||
mod helpers;
|
|
||||||
mod highlight_test;
|
|
||||||
mod language_test;
|
|
||||||
mod node_test;
|
|
||||||
mod parser_hang_test;
|
|
||||||
mod parser_test;
|
|
||||||
mod pathological_test;
|
|
||||||
mod query_test;
|
|
||||||
mod tags_test;
|
|
||||||
mod test_highlight_test;
|
|
||||||
mod test_tags_test;
|
|
||||||
mod text_provider_test;
|
|
||||||
mod tree_test;
|
|
||||||
|
|
||||||
#[cfg(feature = "wasm")]
|
|
||||||
mod wasm_language_test;
|
|
||||||
|
|
||||||
pub use crate::fuzz::{
|
|
||||||
allocations,
|
|
||||||
edits::{get_random_edit, invert_edit},
|
|
||||||
random::Rand,
|
|
||||||
ITERATION_COUNT,
|
|
||||||
};
|
|
||||||
|
|
@ -1,103 +0,0 @@
|
||||||
// For some reasons `Command::spawn` doesn't work in CI env for many exotic arches.
|
|
||||||
#![cfg(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing)))]
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
env::VarError,
|
|
||||||
process::{Command, Stdio},
|
|
||||||
};
|
|
||||||
|
|
||||||
use tree_sitter::Parser;
|
|
||||||
use tree_sitter_generate::{generate_parser_for_grammar, load_grammar_file};
|
|
||||||
|
|
||||||
use crate::tests::helpers::fixtures::{fixtures_dir, get_test_language};
|
|
||||||
|
|
||||||
// The `sanitizing` cfg is required to don't run tests under specific sunitizer
|
|
||||||
// because they don't work well with subprocesses _(it's an assumption)_.
|
|
||||||
//
|
|
||||||
// Below are two alternative examples of how to disable tests for some arches
|
|
||||||
// if a way with excluding the whole mod from compilation wouldn't work well.
|
|
||||||
//
|
|
||||||
// XXX: Also may be it makes sense to keep such tests as ignored by default
|
|
||||||
// to omit surprises and enable them on CI by passing an extra option explicitly:
|
|
||||||
//
|
|
||||||
// > cargo test -- --include-ignored
|
|
||||||
//
|
|
||||||
// #[cfg(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing)))]
|
|
||||||
// #[cfg_attr(not(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing))), ignore)]
|
|
||||||
//
|
|
||||||
#[test]
|
|
||||||
fn test_grammar_that_should_hang_and_not_segfault() {
|
|
||||||
let parent_sleep_millis = 1000;
|
|
||||||
let test_name = "test_grammar_that_should_hang_and_not_segfault";
|
|
||||||
let test_var = "CARGO_HANG_TEST";
|
|
||||||
|
|
||||||
eprintln!(" {test_name}");
|
|
||||||
|
|
||||||
let tests_exec_path = std::env::args()
|
|
||||||
.next()
|
|
||||||
.expect("Failed to get tests executable path");
|
|
||||||
|
|
||||||
match std::env::var(test_var) {
|
|
||||||
Ok(v) if v == test_name => {
|
|
||||||
eprintln!(" child process id {}", std::process::id());
|
|
||||||
hang_test();
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(VarError::NotPresent) => {
|
|
||||||
eprintln!(" parent process id {}", std::process::id());
|
|
||||||
let mut command = Command::new(tests_exec_path);
|
|
||||||
command.arg(test_name).env(test_var, test_name);
|
|
||||||
|
|
||||||
if std::env::args().any(|x| x == "--nocapture") {
|
|
||||||
command.arg("--nocapture");
|
|
||||||
} else {
|
|
||||||
command.stdout(Stdio::null()).stderr(Stdio::null());
|
|
||||||
}
|
|
||||||
|
|
||||||
match command.spawn() {
|
|
||||||
Ok(mut child) => {
|
|
||||||
std::thread::sleep(std::time::Duration::from_millis(parent_sleep_millis));
|
|
||||||
match child.try_wait() {
|
|
||||||
Ok(Some(status)) if status.success() => {
|
|
||||||
panic!("Child didn't hang and exited successfully")
|
|
||||||
}
|
|
||||||
Ok(Some(status)) => panic!(
|
|
||||||
"Child didn't hang and exited with status code: {:?}",
|
|
||||||
status.code()
|
|
||||||
),
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
if let Err(e) = child.kill() {
|
|
||||||
eprintln!(
|
|
||||||
"Failed to kill hang test's process id: {}, error: {e}",
|
|
||||||
child.id()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => panic!("{e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(e) => panic!("Env var error: {e}"),
|
|
||||||
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hang_test() {
|
|
||||||
let test_grammar_dir = fixtures_dir()
|
|
||||||
.join("test_grammars")
|
|
||||||
.join("get_col_should_hang_not_crash");
|
|
||||||
|
|
||||||
let grammar_json = load_grammar_file(&test_grammar_dir.join("grammar.js"), None).unwrap();
|
|
||||||
let (parser_name, parser_code) = generate_parser_for_grammar(grammar_json.as_str()).unwrap();
|
|
||||||
|
|
||||||
let language = get_test_language(&parser_name, &parser_code, Some(test_grammar_dir.as_path()));
|
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let code_that_should_hang = "\nHello";
|
|
||||||
|
|
||||||
parser.parse(code_that_should_hang, None).unwrap();
|
|
||||||
}
|
|
||||||
|
|
@ -1,264 +0,0 @@
|
||||||
use std::{fs, path::PathBuf, process::Command};
|
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
|
||||||
use regex::Regex;
|
|
||||||
use tree_sitter_loader::TreeSitterJSON;
|
|
||||||
|
|
||||||
pub struct Version {
|
|
||||||
pub version: String,
|
|
||||||
pub current_dir: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Version {
|
|
||||||
#[must_use]
|
|
||||||
pub const fn new(version: String, current_dir: PathBuf) -> Self {
|
|
||||||
Self {
|
|
||||||
version,
|
|
||||||
current_dir,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run(self) -> Result<()> {
|
|
||||||
let tree_sitter_json = self.current_dir.join("tree-sitter.json");
|
|
||||||
|
|
||||||
let tree_sitter_json =
|
|
||||||
serde_json::from_str::<TreeSitterJSON>(&fs::read_to_string(tree_sitter_json)?)?;
|
|
||||||
|
|
||||||
let is_multigrammar = tree_sitter_json.grammars.len() > 1;
|
|
||||||
|
|
||||||
self.update_treesitter_json().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update tree-sitter.json at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_cargo_toml().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update Cargo.toml at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_package_json().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update package.json at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_makefile(is_multigrammar).with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update Makefile at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_cmakelists_txt().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update CMakeLists.txt at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_pyproject_toml().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update pyproject.toml at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_treesitter_json(&self) -> Result<()> {
|
|
||||||
let tree_sitter_json = &fs::read_to_string(self.current_dir.join("tree-sitter.json"))?;
|
|
||||||
|
|
||||||
let tree_sitter_json = tree_sitter_json
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.contains("\"version\":") {
|
|
||||||
let prefix_index = line.find("\"version\":").unwrap() + "\"version\":".len();
|
|
||||||
let start_quote = line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
|
||||||
let end_quote = line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
|
||||||
|
|
||||||
format!(
|
|
||||||
"{}{}{}",
|
|
||||||
&line[..start_quote],
|
|
||||||
self.version,
|
|
||||||
&line[end_quote..]
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("tree-sitter.json"), tree_sitter_json)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_cargo_toml(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("Cargo.toml").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let cargo_toml = fs::read_to_string(self.current_dir.join("Cargo.toml"))?;
|
|
||||||
|
|
||||||
let cargo_toml = cargo_toml
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.starts_with("version =") {
|
|
||||||
format!("version = \"{}\"", self.version)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("Cargo.toml"), cargo_toml)?;
|
|
||||||
|
|
||||||
if self.current_dir.join("Cargo.lock").exists() {
|
|
||||||
let Ok(cmd) = Command::new("cargo")
|
|
||||||
.arg("generate-lockfile")
|
|
||||||
.arg("--offline")
|
|
||||||
.current_dir(&self.current_dir)
|
|
||||||
.output()
|
|
||||||
else {
|
|
||||||
return Ok(()); // cargo is not `executable`, ignore
|
|
||||||
};
|
|
||||||
|
|
||||||
if !cmd.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&cmd.stderr);
|
|
||||||
return Err(anyhow!(
|
|
||||||
"Failed to run `cargo generate-lockfile`:\n{stderr}"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_package_json(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("package.json").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let package_json = &fs::read_to_string(self.current_dir.join("package.json"))?;
|
|
||||||
|
|
||||||
let package_json = package_json
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.contains("\"version\":") {
|
|
||||||
let prefix_index = line.find("\"version\":").unwrap() + "\"version\":".len();
|
|
||||||
let start_quote = line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
|
||||||
let end_quote = line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
|
||||||
|
|
||||||
format!(
|
|
||||||
"{}{}{}",
|
|
||||||
&line[..start_quote],
|
|
||||||
self.version,
|
|
||||||
&line[end_quote..]
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("package.json"), package_json)?;
|
|
||||||
|
|
||||||
if self.current_dir.join("package-lock.json").exists() {
|
|
||||||
let Ok(cmd) = Command::new("npm")
|
|
||||||
.arg("install")
|
|
||||||
.arg("--package-lock-only")
|
|
||||||
.current_dir(&self.current_dir)
|
|
||||||
.output()
|
|
||||||
else {
|
|
||||||
return Ok(()); // npm is not `executable`, ignore
|
|
||||||
};
|
|
||||||
|
|
||||||
if !cmd.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&cmd.stderr);
|
|
||||||
return Err(anyhow!("Failed to run `npm install`:\n{stderr}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_makefile(&self, is_multigrammar: bool) -> Result<()> {
|
|
||||||
let makefile = if is_multigrammar {
|
|
||||||
if !self.current_dir.join("common").join("common.mak").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::read_to_string(self.current_dir.join("Makefile"))?
|
|
||||||
} else {
|
|
||||||
if !self.current_dir.join("Makefile").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::read_to_string(self.current_dir.join("Makefile"))?
|
|
||||||
};
|
|
||||||
|
|
||||||
let makefile = makefile
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.starts_with("VERSION") {
|
|
||||||
format!("VERSION := {}", self.version)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("Makefile"), makefile)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_cmakelists_txt(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("CMakeLists.txt").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let cmake = fs::read_to_string(self.current_dir.join("CMakeLists.txt"))?;
|
|
||||||
|
|
||||||
let re = Regex::new(r#"(\s*VERSION\s+)"[0-9]+\.[0-9]+\.[0-9]+""#)?;
|
|
||||||
let cmake = re.replace(&cmake, format!(r#"$1"{}""#, self.version));
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("CMakeLists.txt"), cmake.as_bytes())?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_pyproject_toml(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("pyproject.toml").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let pyproject_toml = fs::read_to_string(self.current_dir.join("pyproject.toml"))?;
|
|
||||||
|
|
||||||
let pyproject_toml = pyproject_toml
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.starts_with("version =") {
|
|
||||||
format!("version = \"{}\"", self.version)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("pyproject.toml"), pyproject_toml)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
258
cli/vendor/xterm-colors.json
vendored
258
cli/vendor/xterm-colors.json
vendored
|
|
@ -1,258 +0,0 @@
|
||||||
[
|
|
||||||
"#000000",
|
|
||||||
"#800000",
|
|
||||||
"#008000",
|
|
||||||
"#808000",
|
|
||||||
"#000080",
|
|
||||||
"#800080",
|
|
||||||
"#008080",
|
|
||||||
"#c0c0c0",
|
|
||||||
"#808080",
|
|
||||||
"#ff0000",
|
|
||||||
"#00ff00",
|
|
||||||
"#ffff00",
|
|
||||||
"#0000ff",
|
|
||||||
"#ff00ff",
|
|
||||||
"#00ffff",
|
|
||||||
"#ffffff",
|
|
||||||
"#000000",
|
|
||||||
"#00005f",
|
|
||||||
"#000087",
|
|
||||||
"#0000af",
|
|
||||||
"#0000d7",
|
|
||||||
"#0000ff",
|
|
||||||
"#005f00",
|
|
||||||
"#005f5f",
|
|
||||||
"#005f87",
|
|
||||||
"#005faf",
|
|
||||||
"#005fd7",
|
|
||||||
"#005fff",
|
|
||||||
"#008700",
|
|
||||||
"#00875f",
|
|
||||||
"#008787",
|
|
||||||
"#0087af",
|
|
||||||
"#0087d7",
|
|
||||||
"#0087ff",
|
|
||||||
"#00af00",
|
|
||||||
"#00af5f",
|
|
||||||
"#00af87",
|
|
||||||
"#00afaf",
|
|
||||||
"#00afd7",
|
|
||||||
"#00afff",
|
|
||||||
"#00d700",
|
|
||||||
"#00d75f",
|
|
||||||
"#00d787",
|
|
||||||
"#00d7af",
|
|
||||||
"#00d7d7",
|
|
||||||
"#00d7ff",
|
|
||||||
"#00ff00",
|
|
||||||
"#00ff5f",
|
|
||||||
"#00ff87",
|
|
||||||
"#00ffaf",
|
|
||||||
"#00ffd7",
|
|
||||||
"#00ffff",
|
|
||||||
"#5f0000",
|
|
||||||
"#5f005f",
|
|
||||||
"#5f0087",
|
|
||||||
"#5f00af",
|
|
||||||
"#5f00d7",
|
|
||||||
"#5f00ff",
|
|
||||||
"#5f5f00",
|
|
||||||
"#5f5f5f",
|
|
||||||
"#5f5f87",
|
|
||||||
"#5f5faf",
|
|
||||||
"#5f5fd7",
|
|
||||||
"#5f5fff",
|
|
||||||
"#5f8700",
|
|
||||||
"#5f875f",
|
|
||||||
"#5f8787",
|
|
||||||
"#5f87af",
|
|
||||||
"#5f87d7",
|
|
||||||
"#5f87ff",
|
|
||||||
"#5faf00",
|
|
||||||
"#5faf5f",
|
|
||||||
"#5faf87",
|
|
||||||
"#5fafaf",
|
|
||||||
"#5fafd7",
|
|
||||||
"#5fafff",
|
|
||||||
"#5fd700",
|
|
||||||
"#5fd75f",
|
|
||||||
"#5fd787",
|
|
||||||
"#5fd7af",
|
|
||||||
"#5fd7d7",
|
|
||||||
"#5fd7ff",
|
|
||||||
"#5fff00",
|
|
||||||
"#5fff5f",
|
|
||||||
"#5fff87",
|
|
||||||
"#5fffaf",
|
|
||||||
"#5fffd7",
|
|
||||||
"#5fffff",
|
|
||||||
"#870000",
|
|
||||||
"#87005f",
|
|
||||||
"#870087",
|
|
||||||
"#8700af",
|
|
||||||
"#8700d7",
|
|
||||||
"#8700ff",
|
|
||||||
"#875f00",
|
|
||||||
"#875f5f",
|
|
||||||
"#875f87",
|
|
||||||
"#875faf",
|
|
||||||
"#875fd7",
|
|
||||||
"#875fff",
|
|
||||||
"#878700",
|
|
||||||
"#87875f",
|
|
||||||
"#878787",
|
|
||||||
"#8787af",
|
|
||||||
"#8787d7",
|
|
||||||
"#8787ff",
|
|
||||||
"#87af00",
|
|
||||||
"#87af5f",
|
|
||||||
"#87af87",
|
|
||||||
"#87afaf",
|
|
||||||
"#87afd7",
|
|
||||||
"#87afff",
|
|
||||||
"#87d700",
|
|
||||||
"#87d75f",
|
|
||||||
"#87d787",
|
|
||||||
"#87d7af",
|
|
||||||
"#87d7d7",
|
|
||||||
"#87d7ff",
|
|
||||||
"#87ff00",
|
|
||||||
"#87ff5f",
|
|
||||||
"#87ff87",
|
|
||||||
"#87ffaf",
|
|
||||||
"#87ffd7",
|
|
||||||
"#87ffff",
|
|
||||||
"#af0000",
|
|
||||||
"#af005f",
|
|
||||||
"#af0087",
|
|
||||||
"#af00af",
|
|
||||||
"#af00d7",
|
|
||||||
"#af00ff",
|
|
||||||
"#af5f00",
|
|
||||||
"#af5f5f",
|
|
||||||
"#af5f87",
|
|
||||||
"#af5faf",
|
|
||||||
"#af5fd7",
|
|
||||||
"#af5fff",
|
|
||||||
"#af8700",
|
|
||||||
"#af875f",
|
|
||||||
"#af8787",
|
|
||||||
"#af87af",
|
|
||||||
"#af87d7",
|
|
||||||
"#af87ff",
|
|
||||||
"#afaf00",
|
|
||||||
"#afaf5f",
|
|
||||||
"#afaf87",
|
|
||||||
"#afafaf",
|
|
||||||
"#afafd7",
|
|
||||||
"#afafff",
|
|
||||||
"#afd700",
|
|
||||||
"#afd75f",
|
|
||||||
"#afd787",
|
|
||||||
"#afd7af",
|
|
||||||
"#afd7d7",
|
|
||||||
"#afd7ff",
|
|
||||||
"#afff00",
|
|
||||||
"#afff5f",
|
|
||||||
"#afff87",
|
|
||||||
"#afffaf",
|
|
||||||
"#afffd7",
|
|
||||||
"#afffff",
|
|
||||||
"#d70000",
|
|
||||||
"#d7005f",
|
|
||||||
"#d70087",
|
|
||||||
"#d700af",
|
|
||||||
"#d700d7",
|
|
||||||
"#d700ff",
|
|
||||||
"#d75f00",
|
|
||||||
"#d75f5f",
|
|
||||||
"#d75f87",
|
|
||||||
"#d75faf",
|
|
||||||
"#d75fd7",
|
|
||||||
"#d75fff",
|
|
||||||
"#d78700",
|
|
||||||
"#d7875f",
|
|
||||||
"#d78787",
|
|
||||||
"#d787af",
|
|
||||||
"#d787d7",
|
|
||||||
"#d787ff",
|
|
||||||
"#d7af00",
|
|
||||||
"#d7af5f",
|
|
||||||
"#d7af87",
|
|
||||||
"#d7afaf",
|
|
||||||
"#d7afd7",
|
|
||||||
"#d7afff",
|
|
||||||
"#d7d700",
|
|
||||||
"#d7d75f",
|
|
||||||
"#d7d787",
|
|
||||||
"#d7d7af",
|
|
||||||
"#d7d7d7",
|
|
||||||
"#d7d7ff",
|
|
||||||
"#d7ff00",
|
|
||||||
"#d7ff5f",
|
|
||||||
"#d7ff87",
|
|
||||||
"#d7ffaf",
|
|
||||||
"#d7ffd7",
|
|
||||||
"#d7ffff",
|
|
||||||
"#ff0000",
|
|
||||||
"#ff005f",
|
|
||||||
"#ff0087",
|
|
||||||
"#ff00af",
|
|
||||||
"#ff00d7",
|
|
||||||
"#ff00ff",
|
|
||||||
"#ff5f00",
|
|
||||||
"#ff5f5f",
|
|
||||||
"#ff5f87",
|
|
||||||
"#ff5faf",
|
|
||||||
"#ff5fd7",
|
|
||||||
"#ff5fff",
|
|
||||||
"#ff8700",
|
|
||||||
"#ff875f",
|
|
||||||
"#ff8787",
|
|
||||||
"#ff87af",
|
|
||||||
"#ff87d7",
|
|
||||||
"#ff87ff",
|
|
||||||
"#ffaf00",
|
|
||||||
"#ffaf5f",
|
|
||||||
"#ffaf87",
|
|
||||||
"#ffafaf",
|
|
||||||
"#ffafd7",
|
|
||||||
"#ffafff",
|
|
||||||
"#ffd700",
|
|
||||||
"#ffd75f",
|
|
||||||
"#ffd787",
|
|
||||||
"#ffd7af",
|
|
||||||
"#ffd7d7",
|
|
||||||
"#ffd7ff",
|
|
||||||
"#ffff00",
|
|
||||||
"#ffff5f",
|
|
||||||
"#ffff87",
|
|
||||||
"#ffffaf",
|
|
||||||
"#ffffd7",
|
|
||||||
"#ffffff",
|
|
||||||
"#080808",
|
|
||||||
"#121212",
|
|
||||||
"#1c1c1c",
|
|
||||||
"#262626",
|
|
||||||
"#303030",
|
|
||||||
"#3a3a3a",
|
|
||||||
"#444444",
|
|
||||||
"#4e4e4e",
|
|
||||||
"#585858",
|
|
||||||
"#626262",
|
|
||||||
"#6c6c6c",
|
|
||||||
"#767676",
|
|
||||||
"#808080",
|
|
||||||
"#8a8a8a",
|
|
||||||
"#949494",
|
|
||||||
"#9e9e9e",
|
|
||||||
"#a8a8a8",
|
|
||||||
"#b2b2b2",
|
|
||||||
"#bcbcbc",
|
|
||||||
"#c6c6c6",
|
|
||||||
"#d0d0d0",
|
|
||||||
"#dadada",
|
|
||||||
"#e4e4e4",
|
|
||||||
"#eeeeee"
|
|
||||||
]
|
|
||||||
|
|
@ -8,13 +8,18 @@ rust-version.workspace = true
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
homepage.workspace = true
|
homepage.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
|
documentation = "https://docs.rs/tree-sitter-cli"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
|
include = ["build.rs", "README.md", "LICENSE", "benches/*", "src/**"]
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
path = "src/tree_sitter_cli.rs"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "tree-sitter"
|
name = "tree-sitter"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
@ -25,40 +30,38 @@ name = "benchmark"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
default = ["qjs-rt"]
|
||||||
wasm = ["tree-sitter/wasm", "tree-sitter-loader/wasm"]
|
wasm = ["tree-sitter/wasm", "tree-sitter-loader/wasm"]
|
||||||
|
qjs-rt = ["tree-sitter-generate/qjs-rt"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
ansi_colours.workspace = true
|
||||||
anstyle.workspace = true
|
anstyle.workspace = true
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
bstr.workspace = true
|
bstr.workspace = true
|
||||||
clap.workspace = true
|
clap.workspace = true
|
||||||
clap_complete.workspace = true
|
clap_complete.workspace = true
|
||||||
|
clap_complete_nushell.workspace = true
|
||||||
|
crc32fast.workspace = true
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
ctrlc.workspace = true
|
ctrlc.workspace = true
|
||||||
dialoguer.workspace = true
|
dialoguer.workspace = true
|
||||||
dirs.workspace = true
|
|
||||||
filetime.workspace = true
|
|
||||||
glob.workspace = true
|
glob.workspace = true
|
||||||
heck.workspace = true
|
heck.workspace = true
|
||||||
html-escape.workspace = true
|
html-escape.workspace = true
|
||||||
indexmap.workspace = true
|
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
lazy_static.workspace = true
|
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
memchr.workspace = true
|
memchr.workspace = true
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
regex-syntax.workspace = true
|
schemars.workspace = true
|
||||||
rustc-hash.workspace = true
|
|
||||||
semver.workspace = true
|
semver.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_derive.workspace = true
|
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
similar.workspace = true
|
similar.workspace = true
|
||||||
smallbitvec.workspace = true
|
|
||||||
streaming-iterator.workspace = true
|
streaming-iterator.workspace = true
|
||||||
|
thiserror.workspace = true
|
||||||
tiny_http.workspace = true
|
tiny_http.workspace = true
|
||||||
url.workspace = true
|
|
||||||
walkdir.workspace = true
|
walkdir.workspace = true
|
||||||
wasmparser.workspace = true
|
wasmparser.workspace = true
|
||||||
webbrowser.workspace = true
|
webbrowser.workspace = true
|
||||||
|
|
@ -72,7 +75,7 @@ tree-sitter-tags.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
encoding_rs = "0.8.35"
|
encoding_rs = "0.8.35"
|
||||||
widestring = "1.1.0"
|
widestring = "1.2.1"
|
||||||
tree_sitter_proc_macro = { path = "src/tests/proc_macro", package = "tree-sitter-tests-proc-macro" }
|
tree_sitter_proc_macro = { path = "src/tests/proc_macro", package = "tree-sitter-tests-proc-macro" }
|
||||||
|
|
||||||
tempfile.workspace = true
|
tempfile.workspace = true
|
||||||
21
crates/cli/LICENSE
Normal file
21
crates/cli/LICENSE
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2018 Max Brunsfeld
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -7,7 +7,8 @@
|
||||||
[npmjs.com]: https://www.npmjs.org/package/tree-sitter-cli
|
[npmjs.com]: https://www.npmjs.org/package/tree-sitter-cli
|
||||||
[npmjs.com badge]: https://img.shields.io/npm/v/tree-sitter-cli.svg?color=%23BF4A4A
|
[npmjs.com badge]: https://img.shields.io/npm/v/tree-sitter-cli.svg?color=%23BF4A4A
|
||||||
|
|
||||||
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`, `Linux`, and `Windows`.
|
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`,
|
||||||
|
`Linux`, and `Windows`.
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
|
|
@ -34,9 +35,11 @@ The `tree-sitter` binary itself has no dependencies, but specific commands have
|
||||||
|
|
||||||
### Commands
|
### Commands
|
||||||
|
|
||||||
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current working directory. See [the documentation] for more information.
|
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current
|
||||||
|
working directory. See [the documentation] for more information.
|
||||||
|
|
||||||
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory. See [the documentation] for more information.
|
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory.
|
||||||
|
See [the documentation] for more information.
|
||||||
|
|
||||||
* `parse` - The `tree-sitter parse` command will parse a file (or list of files) using Tree-sitter parsers.
|
* `parse` - The `tree-sitter parse` command will parse a file (or list of files) using Tree-sitter parsers.
|
||||||
|
|
||||||
|
|
@ -3,70 +3,77 @@ use std::{
|
||||||
env, fs,
|
env, fs,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
str,
|
str,
|
||||||
|
sync::LazyLock,
|
||||||
time::Instant,
|
time::Instant,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use lazy_static::lazy_static;
|
use log::info;
|
||||||
use tree_sitter::{Language, Parser, Query};
|
use tree_sitter::{Language, Parser, Query};
|
||||||
use tree_sitter_loader::{CompileConfig, Loader};
|
use tree_sitter_loader::{CompileConfig, Loader};
|
||||||
|
|
||||||
include!("../src/tests/helpers/dirs.rs");
|
include!("../src/tests/helpers/dirs.rs");
|
||||||
|
|
||||||
lazy_static! {
|
static LANGUAGE_FILTER: LazyLock<Option<String>> =
|
||||||
static ref LANGUAGE_FILTER: Option<String> =
|
LazyLock::new(|| env::var("TREE_SITTER_BENCHMARK_LANGUAGE_FILTER").ok());
|
||||||
env::var("TREE_SITTER_BENCHMARK_LANGUAGE_FILTER").ok();
|
static EXAMPLE_FILTER: LazyLock<Option<String>> =
|
||||||
static ref EXAMPLE_FILTER: Option<String> =
|
LazyLock::new(|| env::var("TREE_SITTER_BENCHMARK_EXAMPLE_FILTER").ok());
|
||||||
env::var("TREE_SITTER_BENCHMARK_EXAMPLE_FILTER").ok();
|
static REPETITION_COUNT: LazyLock<usize> = LazyLock::new(|| {
|
||||||
static ref REPETITION_COUNT: usize = env::var("TREE_SITTER_BENCHMARK_REPETITION_COUNT")
|
env::var("TREE_SITTER_BENCHMARK_REPETITION_COUNT")
|
||||||
.map(|s| s.parse::<usize>().unwrap())
|
.map(|s| s.parse::<usize>().unwrap())
|
||||||
.unwrap_or(5);
|
.unwrap_or(5)
|
||||||
static ref TEST_LOADER: Loader = Loader::with_parser_lib_path(SCRATCH_DIR.clone());
|
});
|
||||||
static ref EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR: BTreeMap<PathBuf, (Vec<PathBuf>, Vec<PathBuf>)> = {
|
static TEST_LOADER: LazyLock<Loader> =
|
||||||
fn process_dir(result: &mut BTreeMap<PathBuf, (Vec<PathBuf>, Vec<PathBuf>)>, dir: &Path) {
|
LazyLock::new(|| Loader::with_parser_lib_path(SCRATCH_DIR.clone()));
|
||||||
if dir.join("grammar.js").exists() {
|
|
||||||
let relative_path = dir.strip_prefix(GRAMMARS_DIR.as_path()).unwrap();
|
|
||||||
let (example_paths, query_paths) =
|
|
||||||
result.entry(relative_path.to_owned()).or_default();
|
|
||||||
|
|
||||||
if let Ok(example_files) = fs::read_dir(dir.join("examples")) {
|
#[allow(clippy::type_complexity)]
|
||||||
example_paths.extend(example_files.filter_map(|p| {
|
static EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR: LazyLock<
|
||||||
let p = p.unwrap().path();
|
BTreeMap<PathBuf, (Vec<PathBuf>, Vec<PathBuf>)>,
|
||||||
if p.is_file() {
|
> = LazyLock::new(|| {
|
||||||
Some(p)
|
fn process_dir(result: &mut BTreeMap<PathBuf, (Vec<PathBuf>, Vec<PathBuf>)>, dir: &Path) {
|
||||||
} else {
|
if dir.join("grammar.js").exists() {
|
||||||
None
|
let relative_path = dir.strip_prefix(GRAMMARS_DIR.as_path()).unwrap();
|
||||||
}
|
let (example_paths, query_paths) = result.entry(relative_path.to_owned()).or_default();
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Ok(query_files) = fs::read_dir(dir.join("queries")) {
|
if let Ok(example_files) = fs::read_dir(dir.join("examples")) {
|
||||||
query_paths.extend(query_files.filter_map(|p| {
|
example_paths.extend(example_files.filter_map(|p| {
|
||||||
let p = p.unwrap().path();
|
let p = p.unwrap().path();
|
||||||
if p.is_file() {
|
if p.is_file() {
|
||||||
Some(p)
|
Some(p)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
for entry in fs::read_dir(dir).unwrap() {
|
|
||||||
let entry = entry.unwrap().path();
|
|
||||||
if entry.is_dir() {
|
|
||||||
process_dir(result, &entry);
|
|
||||||
}
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(query_files) = fs::read_dir(dir.join("queries")) {
|
||||||
|
query_paths.extend(query_files.filter_map(|p| {
|
||||||
|
let p = p.unwrap().path();
|
||||||
|
if p.is_file() {
|
||||||
|
Some(p)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for entry in fs::read_dir(dir).unwrap() {
|
||||||
|
let entry = entry.unwrap().path();
|
||||||
|
if entry.is_dir() {
|
||||||
|
process_dir(result, &entry);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let mut result = BTreeMap::new();
|
let mut result = BTreeMap::new();
|
||||||
process_dir(&mut result, &GRAMMARS_DIR);
|
process_dir(&mut result, &GRAMMARS_DIR);
|
||||||
result
|
result
|
||||||
};
|
});
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
tree_sitter_cli::logger::init();
|
||||||
|
|
||||||
let max_path_length = EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR
|
let max_path_length = EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR
|
||||||
.values()
|
.values()
|
||||||
.flat_map(|(e, q)| {
|
.flat_map(|(e, q)| {
|
||||||
|
|
@ -77,7 +84,7 @@ fn main() {
|
||||||
.max()
|
.max()
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
|
|
||||||
eprintln!("Benchmarking with {} repetitions", *REPETITION_COUNT);
|
info!("Benchmarking with {} repetitions", *REPETITION_COUNT);
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
let mut parser = Parser::new();
|
||||||
let mut all_normal_speeds = Vec::new();
|
let mut all_normal_speeds = Vec::new();
|
||||||
|
|
@ -94,11 +101,11 @@ fn main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!("\nLanguage: {language_name}");
|
info!("\nLanguage: {language_name}");
|
||||||
let language = get_language(language_path);
|
let language = get_language(language_path);
|
||||||
parser.set_language(&language).unwrap();
|
parser.set_language(&language).unwrap();
|
||||||
|
|
||||||
eprintln!(" Constructing Queries");
|
info!(" Constructing Queries");
|
||||||
for path in query_paths {
|
for path in query_paths {
|
||||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||||
if !path.to_str().unwrap().contains(filter.as_str()) {
|
if !path.to_str().unwrap().contains(filter.as_str()) {
|
||||||
|
|
@ -108,12 +115,12 @@ fn main() {
|
||||||
|
|
||||||
parse(path, max_path_length, |source| {
|
parse(path, max_path_length, |source| {
|
||||||
Query::new(&language, str::from_utf8(source).unwrap())
|
Query::new(&language, str::from_utf8(source).unwrap())
|
||||||
.with_context(|| format!("Query file path: {path:?}"))
|
.with_context(|| format!("Query file path: {}", path.display()))
|
||||||
.expect("Failed to parse query");
|
.expect("Failed to parse query");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!(" Parsing Valid Code:");
|
info!(" Parsing Valid Code:");
|
||||||
let mut normal_speeds = Vec::new();
|
let mut normal_speeds = Vec::new();
|
||||||
for example_path in example_paths {
|
for example_path in example_paths {
|
||||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||||
|
|
@ -127,7 +134,7 @@ fn main() {
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!(" Parsing Invalid Code (mismatched languages):");
|
info!(" Parsing Invalid Code (mismatched languages):");
|
||||||
let mut error_speeds = Vec::new();
|
let mut error_speeds = Vec::new();
|
||||||
for (other_language_path, (example_paths, _)) in
|
for (other_language_path, (example_paths, _)) in
|
||||||
EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR.iter()
|
EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR.iter()
|
||||||
|
|
@ -148,30 +155,30 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_normal, worst_normal)) = aggregate(&normal_speeds) {
|
if let Some((average_normal, worst_normal)) = aggregate(&normal_speeds) {
|
||||||
eprintln!(" Average Speed (normal): {average_normal} bytes/ms");
|
info!(" Average Speed (normal): {average_normal} bytes/ms");
|
||||||
eprintln!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
info!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_error, worst_error)) = aggregate(&error_speeds) {
|
if let Some((average_error, worst_error)) = aggregate(&error_speeds) {
|
||||||
eprintln!(" Average Speed (errors): {average_error} bytes/ms");
|
info!(" Average Speed (errors): {average_error} bytes/ms");
|
||||||
eprintln!(" Worst Speed (errors): {worst_error} bytes/ms");
|
info!(" Worst Speed (errors): {worst_error} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
all_normal_speeds.extend(normal_speeds);
|
all_normal_speeds.extend(normal_speeds);
|
||||||
all_error_speeds.extend(error_speeds);
|
all_error_speeds.extend(error_speeds);
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!("\n Overall");
|
info!("\n Overall");
|
||||||
if let Some((average_normal, worst_normal)) = aggregate(&all_normal_speeds) {
|
if let Some((average_normal, worst_normal)) = aggregate(&all_normal_speeds) {
|
||||||
eprintln!(" Average Speed (normal): {average_normal} bytes/ms");
|
info!(" Average Speed (normal): {average_normal} bytes/ms");
|
||||||
eprintln!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
info!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_error, worst_error)) = aggregate(&all_error_speeds) {
|
if let Some((average_error, worst_error)) = aggregate(&all_error_speeds) {
|
||||||
eprintln!(" Average Speed (errors): {average_error} bytes/ms");
|
info!(" Average Speed (errors): {average_error} bytes/ms");
|
||||||
eprintln!(" Worst Speed (errors): {worst_error} bytes/ms");
|
info!(" Worst Speed (errors): {worst_error} bytes/ms");
|
||||||
}
|
}
|
||||||
eprintln!();
|
info!("");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
||||||
|
|
@ -190,14 +197,8 @@ fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) -> usize {
|
fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) -> usize {
|
||||||
eprint!(
|
|
||||||
" {:width$}\t",
|
|
||||||
path.file_name().unwrap().to_str().unwrap(),
|
|
||||||
width = max_path_length
|
|
||||||
);
|
|
||||||
|
|
||||||
let source_code = fs::read(path)
|
let source_code = fs::read(path)
|
||||||
.with_context(|| format!("Failed to read {path:?}"))
|
.with_context(|| format!("Failed to read {}", path.display()))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let time = Instant::now();
|
let time = Instant::now();
|
||||||
for _ in 0..*REPETITION_COUNT {
|
for _ in 0..*REPETITION_COUNT {
|
||||||
|
|
@ -206,8 +207,9 @@ fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) ->
|
||||||
let duration = time.elapsed() / (*REPETITION_COUNT as u32);
|
let duration = time.elapsed() / (*REPETITION_COUNT as u32);
|
||||||
let duration_ns = duration.as_nanos();
|
let duration_ns = duration.as_nanos();
|
||||||
let speed = ((source_code.len() as u128) * 1_000_000) / duration_ns;
|
let speed = ((source_code.len() as u128) * 1_000_000) / duration_ns;
|
||||||
eprintln!(
|
info!(
|
||||||
"time {:>7.2} ms\t\tspeed {speed:>6} bytes/ms",
|
" {:max_path_length$}\ttime {:>7.2} ms\t\tspeed {speed:>6} bytes/ms",
|
||||||
|
path.file_name().unwrap().to_str().unwrap(),
|
||||||
(duration_ns as f64) / 1e6,
|
(duration_ns as f64) / 1e6,
|
||||||
);
|
);
|
||||||
speed as usize
|
speed as usize
|
||||||
|
|
@ -217,6 +219,6 @@ fn get_language(path: &Path) -> Language {
|
||||||
let src_path = GRAMMARS_DIR.join(path).join("src");
|
let src_path = GRAMMARS_DIR.join(path).join("src");
|
||||||
TEST_LOADER
|
TEST_LOADER
|
||||||
.load_language_at_path(CompileConfig::new(&src_path, None, None))
|
.load_language_at_path(CompileConfig::new(&src_path, None, None))
|
||||||
.with_context(|| format!("Failed to load language at path {src_path:?}"))
|
.with_context(|| format!("Failed to load language at path {}", src_path.display()))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
@ -52,16 +52,14 @@ fn main() {
|
||||||
|
|
||||||
fn web_playground_files_present() -> bool {
|
fn web_playground_files_present() -> bool {
|
||||||
let paths = [
|
let paths = [
|
||||||
"../docs/assets/js/playground.js",
|
"../../docs/src/assets/js/playground.js",
|
||||||
"../lib/binding_web/tree-sitter.js",
|
"../../lib/binding_web/web-tree-sitter.js",
|
||||||
"../lib/binding_web/tree-sitter.wasm",
|
"../../lib/binding_web/web-tree-sitter.wasm",
|
||||||
];
|
];
|
||||||
|
|
||||||
paths.iter().all(|p| Path::new(p).exists())
|
paths.iter().all(|p| Path::new(p).exists())
|
||||||
}
|
}
|
||||||
|
|
||||||
// When updating this function, don't forget to also update generate/build.rs which has a
|
|
||||||
// near-identical function.
|
|
||||||
fn read_git_sha() -> Option<String> {
|
fn read_git_sha() -> Option<String> {
|
||||||
let crate_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
let crate_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
||||||
|
|
||||||
1
crates/cli/eslint/.gitignore
vendored
Normal file
1
crates/cli/eslint/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
LICENSE
|
||||||
|
|
@ -305,9 +305,9 @@
|
||||||
"peer": true
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/brace-expansion": {
|
"node_modules/brace-expansion": {
|
||||||
"version": "1.1.11",
|
"version": "1.1.12",
|
||||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
@ -805,9 +805,9 @@
|
||||||
"peer": true
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/js-yaml": {
|
"node_modules/js-yaml": {
|
||||||
"version": "4.1.0",
|
"version": "4.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
|
||||||
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
|
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
@ -4,7 +4,8 @@
|
||||||
"description": "Eslint configuration for Tree-sitter grammar files",
|
"description": "Eslint configuration for Tree-sitter grammar files",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/tree-sitter/tree-sitter.git"
|
"url": "git+https://github.com/tree-sitter/tree-sitter.git",
|
||||||
|
"directory": "crates/cli/eslint"
|
||||||
},
|
},
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "Amaan Qureshi <amaanq12@gmail.com>",
|
"author": "Amaan Qureshi <amaanq12@gmail.com>",
|
||||||
|
|
@ -20,5 +21,9 @@
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"eslint": ">= 9"
|
"eslint": ">= 9"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prepack": "cp ../../../LICENSE .",
|
||||||
|
"postpack": "rm LICENSE"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
46
cli/npm/dsl.d.ts → crates/cli/npm/dsl.d.ts
vendored
46
cli/npm/dsl.d.ts → crates/cli/npm/dsl.d.ts
vendored
|
|
@ -10,6 +10,7 @@ type PrecRightRule = { type: 'PREC_RIGHT'; content: Rule; value: number };
|
||||||
type PrecRule = { type: 'PREC'; content: Rule; value: number };
|
type PrecRule = { type: 'PREC'; content: Rule; value: number };
|
||||||
type Repeat1Rule = { type: 'REPEAT1'; content: Rule };
|
type Repeat1Rule = { type: 'REPEAT1'; content: Rule };
|
||||||
type RepeatRule = { type: 'REPEAT'; content: Rule };
|
type RepeatRule = { type: 'REPEAT'; content: Rule };
|
||||||
|
type ReservedRule = { type: 'RESERVED'; content: Rule; context_name: string };
|
||||||
type SeqRule = { type: 'SEQ'; members: Rule[] };
|
type SeqRule = { type: 'SEQ'; members: Rule[] };
|
||||||
type StringRule = { type: 'STRING'; value: string };
|
type StringRule = { type: 'STRING'; value: string };
|
||||||
type SymbolRule<Name extends string> = { type: 'SYMBOL'; name: Name };
|
type SymbolRule<Name extends string> = { type: 'SYMBOL'; name: Name };
|
||||||
|
|
@ -28,12 +29,19 @@ type Rule =
|
||||||
| PrecRule
|
| PrecRule
|
||||||
| Repeat1Rule
|
| Repeat1Rule
|
||||||
| RepeatRule
|
| RepeatRule
|
||||||
|
| ReservedRule
|
||||||
| SeqRule
|
| SeqRule
|
||||||
| StringRule
|
| StringRule
|
||||||
| SymbolRule<string>
|
| SymbolRule<string>
|
||||||
| TokenRule;
|
| TokenRule;
|
||||||
|
|
||||||
type RuleOrLiteral = Rule | RegExp | string;
|
declare class RustRegex {
|
||||||
|
value: string;
|
||||||
|
|
||||||
|
constructor(pattern: string);
|
||||||
|
}
|
||||||
|
|
||||||
|
type RuleOrLiteral = Rule | RegExp | RustRegex | string;
|
||||||
|
|
||||||
type GrammarSymbols<RuleName extends string> = {
|
type GrammarSymbols<RuleName extends string> = {
|
||||||
[name in RuleName]: SymbolRule<name>;
|
[name in RuleName]: SymbolRule<name>;
|
||||||
|
|
@ -105,7 +113,7 @@ interface Grammar<
|
||||||
* @param $ grammar rules
|
* @param $ grammar rules
|
||||||
* @param previous array of externals from the base schema, if any
|
* @param previous array of externals from the base schema, if any
|
||||||
*
|
*
|
||||||
* @see https://tree-sitter.github.io/tree-sitter/creating-parsers#external-scanners
|
* @see https://tree-sitter.github.io/tree-sitter/creating-parsers/4-external-scanners
|
||||||
*/
|
*/
|
||||||
externals?: (
|
externals?: (
|
||||||
$: Record<string, SymbolRule<string>>,
|
$: Record<string, SymbolRule<string>>,
|
||||||
|
|
@ -143,7 +151,7 @@ interface Grammar<
|
||||||
*
|
*
|
||||||
* @param $ grammar rules
|
* @param $ grammar rules
|
||||||
*
|
*
|
||||||
* @see https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types
|
* @see https://tree-sitter.github.io/tree-sitter/using-parsers/6-static-node-types
|
||||||
*/
|
*/
|
||||||
supertypes?: (
|
supertypes?: (
|
||||||
$: GrammarSymbols<RuleName | BaseGrammarRuleName>,
|
$: GrammarSymbols<RuleName | BaseGrammarRuleName>,
|
||||||
|
|
@ -156,9 +164,20 @@ interface Grammar<
|
||||||
*
|
*
|
||||||
* @param $ grammar rules
|
* @param $ grammar rules
|
||||||
*
|
*
|
||||||
* @see https://tree-sitter.github.io/tree-sitter/creating-parsers#keyword-extraction
|
* @see https://tree-sitter.github.io/tree-sitter/creating-parsers/3-writing-the-grammar#keyword-extraction
|
||||||
*/
|
*/
|
||||||
word?: ($: GrammarSymbols<RuleName | BaseGrammarRuleName>) => RuleOrLiteral;
|
word?: ($: GrammarSymbols<RuleName | BaseGrammarRuleName>) => RuleOrLiteral;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mapping of names to reserved word sets. The first reserved word set is the
|
||||||
|
* global word set, meaning it applies to every rule in every parse state.
|
||||||
|
* The other word sets can be used with the `reserved` function.
|
||||||
|
*/
|
||||||
|
reserved?: Record<
|
||||||
|
string,
|
||||||
|
($: GrammarSymbols<RuleName | BaseGrammarRuleName>) => RuleOrLiteral[]
|
||||||
|
>;
|
||||||
}
|
}
|
||||||
|
|
||||||
type GrammarSchema<RuleName extends string> = {
|
type GrammarSchema<RuleName extends string> = {
|
||||||
|
|
@ -243,7 +262,7 @@ declare function optional(rule: RuleOrLiteral): ChoiceRule;
|
||||||
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
||||||
*/
|
*/
|
||||||
declare const prec: {
|
declare const prec: {
|
||||||
(value: String | number, rule: RuleOrLiteral): PrecRule;
|
(value: string | number, rule: RuleOrLiteral): PrecRule;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the given rule as left-associative (and optionally applies a
|
* Marks the given rule as left-associative (and optionally applies a
|
||||||
|
|
@ -259,7 +278,7 @@ declare const prec: {
|
||||||
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
||||||
*/
|
*/
|
||||||
left(rule: RuleOrLiteral): PrecLeftRule;
|
left(rule: RuleOrLiteral): PrecLeftRule;
|
||||||
left(value: String | number, rule: RuleOrLiteral): PrecLeftRule;
|
left(value: string | number, rule: RuleOrLiteral): PrecLeftRule;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the given rule as right-associative (and optionally applies a
|
* Marks the given rule as right-associative (and optionally applies a
|
||||||
|
|
@ -275,7 +294,7 @@ declare const prec: {
|
||||||
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
||||||
*/
|
*/
|
||||||
right(rule: RuleOrLiteral): PrecRightRule;
|
right(rule: RuleOrLiteral): PrecRightRule;
|
||||||
right(value: String | number, rule: RuleOrLiteral): PrecRightRule;
|
right(value: string | number, rule: RuleOrLiteral): PrecRightRule;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the given rule with a numerical precedence which will be used to
|
* Marks the given rule with a numerical precedence which will be used to
|
||||||
|
|
@ -292,7 +311,7 @@ declare const prec: {
|
||||||
*
|
*
|
||||||
* @see https://www.gnu.org/software/bison/manual/html_node/Generalized-LR-Parsing.html
|
* @see https://www.gnu.org/software/bison/manual/html_node/Generalized-LR-Parsing.html
|
||||||
*/
|
*/
|
||||||
dynamic(value: String | number, rule: RuleOrLiteral): PrecDynamicRule;
|
dynamic(value: string | number, rule: RuleOrLiteral): PrecDynamicRule;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -312,6 +331,15 @@ declare function repeat(rule: RuleOrLiteral): RepeatRule;
|
||||||
*/
|
*/
|
||||||
declare function repeat1(rule: RuleOrLiteral): Repeat1Rule;
|
declare function repeat1(rule: RuleOrLiteral): Repeat1Rule;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Overrides the global reserved word set for a given rule. The word set name
|
||||||
|
* should be defined in the `reserved` field in the grammar.
|
||||||
|
*
|
||||||
|
* @param wordset name of the reserved word set
|
||||||
|
* @param rule rule that will use the reserved word set
|
||||||
|
*/
|
||||||
|
declare function reserved(wordset: string, rule: RuleOrLiteral): ReservedRule;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a rule that matches any number of other rules, one after another.
|
* Creates a rule that matches any number of other rules, one after another.
|
||||||
* It is analogous to simply writing multiple symbols next to each other
|
* It is analogous to simply writing multiple symbols next to each other
|
||||||
|
|
@ -330,7 +358,7 @@ declare function sym<Name extends string>(name: Name): SymbolRule<Name>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the given rule as producing only a single token. Tree-sitter's
|
* Marks the given rule as producing only a single token. Tree-sitter's
|
||||||
* default is to treat each String or RegExp literal in the grammar as a
|
* default is to treat each string or RegExp literal in the grammar as a
|
||||||
* separate token. Each token is matched separately by the lexer and
|
* separate token. Each token is matched separately by the lexer and
|
||||||
* returned as its own leaf node in the tree. The token function allows
|
* returned as its own leaf node in the tree. The token function allows
|
||||||
* you to express a complex rule using the DSL functions (rather
|
* you to express a complex rule using the DSL functions (rather
|
||||||
3
cli/npm/install.js → crates/cli/npm/install.js
Executable file → Normal file
3
cli/npm/install.js → crates/cli/npm/install.js
Executable file → Normal file
|
|
@ -6,7 +6,8 @@ const http = require('http');
|
||||||
const https = require('https');
|
const https = require('https');
|
||||||
const packageJSON = require('./package.json');
|
const packageJSON = require('./package.json');
|
||||||
|
|
||||||
// Look to a results table in https://github.com/tree-sitter/tree-sitter/issues/2196
|
https.globalAgent.keepAlive = false;
|
||||||
|
|
||||||
const matrix = {
|
const matrix = {
|
||||||
platform: {
|
platform: {
|
||||||
'darwin': {
|
'darwin': {
|
||||||
20
crates/cli/npm/package-lock.json
generated
Normal file
20
crates/cli/npm/package-lock.json
generated
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"name": "tree-sitter-cli",
|
||||||
|
"version": "0.27.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"name": "tree-sitter-cli",
|
||||||
|
"version": "0.27.0",
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"tree-sitter": "cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,24 +1,33 @@
|
||||||
{
|
{
|
||||||
"name": "tree-sitter-cli",
|
"name": "tree-sitter-cli",
|
||||||
"version": "0.25.0",
|
"version": "0.27.0",
|
||||||
"author": "Max Brunsfeld",
|
"author": {
|
||||||
|
"name": "Max Brunsfeld",
|
||||||
|
"email": "maxbrunsfeld@gmail.com"
|
||||||
|
},
|
||||||
|
"maintainers": [
|
||||||
|
{
|
||||||
|
"name": "Amaan Qureshi",
|
||||||
|
"email": "amaanq12@gmail.com"
|
||||||
|
}
|
||||||
|
],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/tree-sitter/tree-sitter.git"
|
"url": "git+https://github.com/tree-sitter/tree-sitter.git",
|
||||||
|
"directory": "crates/cli/npm"
|
||||||
},
|
},
|
||||||
"description": "CLI for generating fast incremental parsers",
|
"description": "CLI for generating fast incremental parsers",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"parser",
|
"parser",
|
||||||
"lexer"
|
"lexer"
|
||||||
],
|
],
|
||||||
"main": "lib/api/index.js",
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12.0.0"
|
"node": ">=12.0.0"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"install": "node install.js",
|
"install": "node install.js",
|
||||||
"prepack": "cp ../../LICENSE ../README.md .",
|
"prepack": "cp ../../../LICENSE ../README.md .",
|
||||||
"postpack": "rm LICENSE README.md"
|
"postpack": "rm LICENSE README.md"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
69
crates/cli/package.nix
Normal file
69
crates/cli/package.nix
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
{
|
||||||
|
lib,
|
||||||
|
src,
|
||||||
|
rustPlatform,
|
||||||
|
version,
|
||||||
|
clang,
|
||||||
|
libclang,
|
||||||
|
cmake,
|
||||||
|
pkg-config,
|
||||||
|
nodejs_22,
|
||||||
|
test-grammars,
|
||||||
|
stdenv,
|
||||||
|
installShellFiles,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
isCross = stdenv.targetPlatform == stdenv.buildPlatform;
|
||||||
|
in
|
||||||
|
rustPlatform.buildRustPackage {
|
||||||
|
pname = "tree-sitter-cli";
|
||||||
|
|
||||||
|
inherit src version;
|
||||||
|
|
||||||
|
cargoBuildFlags = [ "--all-features" ];
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
clang
|
||||||
|
cmake
|
||||||
|
pkg-config
|
||||||
|
nodejs_22
|
||||||
|
]
|
||||||
|
++ lib.optionals (!isCross) [ installShellFiles ];
|
||||||
|
|
||||||
|
cargoLock.lockFile = ../../Cargo.lock;
|
||||||
|
|
||||||
|
env.LIBCLANG_PATH = "${libclang.lib}/lib";
|
||||||
|
|
||||||
|
preBuild = ''
|
||||||
|
rm -rf test/fixtures
|
||||||
|
mkdir -p test/fixtures
|
||||||
|
cp -r ${test-grammars}/fixtures/* test/fixtures/
|
||||||
|
chmod -R u+w test/fixtures
|
||||||
|
'';
|
||||||
|
|
||||||
|
preCheck = "export HOME=$TMPDIR";
|
||||||
|
doCheck = !isCross;
|
||||||
|
|
||||||
|
postInstall = lib.optionalString (!isCross) ''
|
||||||
|
installShellCompletion --cmd tree-sitter \
|
||||||
|
--bash <($out/bin/tree-sitter complete --shell bash) \
|
||||||
|
--zsh <($out/bin/tree-sitter complete --shell zsh) \
|
||||||
|
--fish <($out/bin/tree-sitter complete --shell fish)
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
description = "Tree-sitter CLI - A tool for developing, testing, and using Tree-sitter parsers";
|
||||||
|
longDescription = ''
|
||||||
|
Tree-sitter is a parser generator tool and an incremental parsing library.
|
||||||
|
It can build a concrete syntax tree for a source file and efficiently update
|
||||||
|
the syntax tree as the source file is edited. This package provides the CLI
|
||||||
|
tool for developing, testing, and using Tree-sitter parsers.
|
||||||
|
'';
|
||||||
|
homepage = "https://tree-sitter.github.io/tree-sitter";
|
||||||
|
changelog = "https://github.com/tree-sitter/tree-sitter/releases/tag/v${version}";
|
||||||
|
license = lib.licenses.mit;
|
||||||
|
maintainers = with lib.maintainers; [ amaanq ];
|
||||||
|
platforms = lib.platforms.all;
|
||||||
|
mainProgram = "tree-sitter";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
@ -1,6 +1,11 @@
|
||||||
use std::{collections::HashMap, env, fs, path::Path};
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
env, fs,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::LazyLock,
|
||||||
|
};
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
use log::{error, info};
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tree_sitter::{Language, Parser};
|
use tree_sitter::{Language, Parser};
|
||||||
|
|
@ -20,19 +25,30 @@ use crate::{
|
||||||
random::Rand,
|
random::Rand,
|
||||||
},
|
},
|
||||||
parse::perform_edit,
|
parse::perform_edit,
|
||||||
test::{parse_tests, print_diff, print_diff_key, strip_sexp_fields, TestEntry},
|
test::{parse_tests, strip_sexp_fields, DiffKey, TestDiff, TestEntry},
|
||||||
};
|
};
|
||||||
|
|
||||||
lazy_static! {
|
pub static LOG_ENABLED: LazyLock<bool> = LazyLock::new(|| env::var("TREE_SITTER_LOG").is_ok());
|
||||||
pub static ref LOG_ENABLED: bool = env::var("TREE_SITTER_LOG").is_ok();
|
|
||||||
pub static ref LOG_GRAPH_ENABLED: bool = env::var("TREE_SITTER_LOG_GRAPHS").is_ok();
|
pub static LOG_GRAPH_ENABLED: LazyLock<bool> =
|
||||||
pub static ref LANGUAGE_FILTER: Option<String> = env::var("TREE_SITTER_LANGUAGE").ok();
|
LazyLock::new(|| env::var("TREE_SITTER_LOG_GRAPHS").is_ok());
|
||||||
pub static ref EXAMPLE_INCLUDE: Option<Regex> = regex_env_var("TREE_SITTER_EXAMPLE_INCLUDE");
|
|
||||||
pub static ref EXAMPLE_EXCLUDE: Option<Regex> = regex_env_var("TREE_SITTER_EXAMPLE_EXCLUDE");
|
pub static LANGUAGE_FILTER: LazyLock<Option<String>> =
|
||||||
pub static ref START_SEED: usize = new_seed();
|
LazyLock::new(|| env::var("TREE_SITTER_LANGUAGE").ok());
|
||||||
pub static ref EDIT_COUNT: usize = int_env_var("TREE_SITTER_EDITS").unwrap_or(3);
|
|
||||||
pub static ref ITERATION_COUNT: usize = int_env_var("TREE_SITTER_ITERATIONS").unwrap_or(10);
|
pub static EXAMPLE_INCLUDE: LazyLock<Option<Regex>> =
|
||||||
}
|
LazyLock::new(|| regex_env_var("TREE_SITTER_EXAMPLE_INCLUDE"));
|
||||||
|
|
||||||
|
pub static EXAMPLE_EXCLUDE: LazyLock<Option<Regex>> =
|
||||||
|
LazyLock::new(|| regex_env_var("TREE_SITTER_EXAMPLE_EXCLUDE"));
|
||||||
|
|
||||||
|
pub static START_SEED: LazyLock<usize> = LazyLock::new(new_seed);
|
||||||
|
|
||||||
|
pub static EDIT_COUNT: LazyLock<usize> =
|
||||||
|
LazyLock::new(|| int_env_var("TREE_SITTER_EDITS").unwrap_or(3));
|
||||||
|
|
||||||
|
pub static ITERATION_COUNT: LazyLock<usize> =
|
||||||
|
LazyLock::new(|| int_env_var("TREE_SITTER_ITERATIONS").unwrap_or(10));
|
||||||
|
|
||||||
fn int_env_var(name: &'static str) -> Option<usize> {
|
fn int_env_var(name: &'static str) -> Option<usize> {
|
||||||
env::var(name).ok().and_then(|e| e.parse().ok())
|
env::var(name).ok().and_then(|e| e.parse().ok())
|
||||||
|
|
@ -46,13 +62,15 @@ fn regex_env_var(name: &'static str) -> Option<Regex> {
|
||||||
pub fn new_seed() -> usize {
|
pub fn new_seed() -> usize {
|
||||||
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| {
|
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| {
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
rng.gen::<usize>()
|
let seed = rng.gen::<usize>();
|
||||||
|
info!("Seed: {seed}");
|
||||||
|
seed
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FuzzOptions {
|
pub struct FuzzOptions {
|
||||||
pub skipped: Option<Vec<String>>,
|
pub skipped: Option<Vec<String>>,
|
||||||
pub subdir: Option<String>,
|
pub subdir: Option<PathBuf>,
|
||||||
pub edits: usize,
|
pub edits: usize,
|
||||||
pub iterations: usize,
|
pub iterations: usize,
|
||||||
pub include: Option<Regex>,
|
pub include: Option<Regex>,
|
||||||
|
|
@ -91,12 +109,12 @@ pub fn fuzz_language_corpus(
|
||||||
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
|
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
|
||||||
|
|
||||||
if !corpus_dir.exists() || !corpus_dir.is_dir() {
|
if !corpus_dir.exists() || !corpus_dir.is_dir() {
|
||||||
eprintln!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
|
error!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
|
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
|
||||||
eprintln!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
|
error!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -132,7 +150,7 @@ pub fn fuzz_language_corpus(
|
||||||
let dump_edits = env::var("TREE_SITTER_DUMP_EDITS").is_ok();
|
let dump_edits = env::var("TREE_SITTER_DUMP_EDITS").is_ok();
|
||||||
|
|
||||||
if log_seed {
|
if log_seed {
|
||||||
println!(" start seed: {start_seed}");
|
info!(" start seed: {start_seed}");
|
||||||
}
|
}
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
|
|
@ -146,7 +164,7 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
println!(" {test_index}. {test_name}");
|
println!(" {test_index}. {test_name}");
|
||||||
|
|
||||||
let passed = allocations::record(|| {
|
let passed = allocations::record_checked(|| {
|
||||||
let mut log_session = None;
|
let mut log_session = None;
|
||||||
let mut parser = get_parser(&mut log_session, "log.html");
|
let mut parser = get_parser(&mut log_session, "log.html");
|
||||||
parser.set_language(language).unwrap();
|
parser.set_language(language).unwrap();
|
||||||
|
|
@ -165,8 +183,8 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
if actual_output != test.output {
|
if actual_output != test.output {
|
||||||
println!("Incorrect initial parse for {test_name}");
|
println!("Incorrect initial parse for {test_name}");
|
||||||
print_diff_key();
|
DiffKey::print();
|
||||||
print_diff(&actual_output, &test.output, true);
|
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||||
println!();
|
println!();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -174,7 +192,7 @@ pub fn fuzz_language_corpus(
|
||||||
true
|
true
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|e| {
|
.unwrap_or_else(|e| {
|
||||||
eprintln!("Error: {e}");
|
error!("{e}");
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -190,7 +208,7 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
for trial in 0..options.iterations {
|
for trial in 0..options.iterations {
|
||||||
let seed = start_seed + trial;
|
let seed = start_seed + trial;
|
||||||
let passed = allocations::record(|| {
|
let passed = allocations::record_checked(|| {
|
||||||
let mut rand = Rand::new(seed);
|
let mut rand = Rand::new(seed);
|
||||||
let mut log_session = None;
|
let mut log_session = None;
|
||||||
let mut parser = get_parser(&mut log_session, "log.html");
|
let mut parser = get_parser(&mut log_session, "log.html");
|
||||||
|
|
@ -199,19 +217,20 @@ pub fn fuzz_language_corpus(
|
||||||
let mut input = test.input.clone();
|
let mut input = test.input.clone();
|
||||||
|
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
info!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Perform a random series of edits and reparse.
|
// Perform a random series of edits and reparse.
|
||||||
let mut undo_stack = Vec::new();
|
let edit_count = rand.unsigned(*EDIT_COUNT);
|
||||||
for _ in 0..=rand.unsigned(*EDIT_COUNT) {
|
let mut undo_stack = Vec::with_capacity(edit_count);
|
||||||
|
for _ in 0..=edit_count {
|
||||||
let edit = get_random_edit(&mut rand, &input);
|
let edit = get_random_edit(&mut rand, &input);
|
||||||
undo_stack.push(invert_edit(&input, &edit));
|
undo_stack.push(invert_edit(&input, &edit));
|
||||||
perform_edit(&mut tree, &mut input, &edit).unwrap();
|
perform_edit(&mut tree, &mut input, &edit).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
if log_seed {
|
if log_seed {
|
||||||
println!(" {test_index}.{trial:<2} seed: {seed}");
|
info!(" {test_index}.{trial:<2} seed: {seed}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if dump_edits {
|
if dump_edits {
|
||||||
|
|
@ -225,7 +244,7 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
info!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_included_ranges(&mut parser, &input, test.template_delimiters);
|
set_included_ranges(&mut parser, &input, test.template_delimiters);
|
||||||
|
|
@ -234,7 +253,7 @@ pub fn fuzz_language_corpus(
|
||||||
// Check that the new tree is consistent.
|
// Check that the new tree is consistent.
|
||||||
check_consistent_sizes(&tree2, &input);
|
check_consistent_sizes(&tree2, &input);
|
||||||
if let Err(message) = check_changed_ranges(&tree, &tree2, &input) {
|
if let Err(message) = check_changed_ranges(&tree, &tree2, &input) {
|
||||||
println!("\nUnexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n",);
|
error!("\nUnexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n",);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -243,7 +262,7 @@ pub fn fuzz_language_corpus(
|
||||||
perform_edit(&mut tree2, &mut input, &edit).unwrap();
|
perform_edit(&mut tree2, &mut input, &edit).unwrap();
|
||||||
}
|
}
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
info!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_included_ranges(&mut parser, &test.input, test.template_delimiters);
|
set_included_ranges(&mut parser, &test.input, test.template_delimiters);
|
||||||
|
|
@ -257,8 +276,8 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
if actual_output != test.output && !test.error {
|
if actual_output != test.output && !test.error {
|
||||||
println!("Incorrect parse for {test_name} - seed {seed}");
|
println!("Incorrect parse for {test_name} - seed {seed}");
|
||||||
print_diff_key();
|
DiffKey::print();
|
||||||
print_diff(&actual_output, &test.output, true);
|
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||||
println!();
|
println!();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -266,13 +285,13 @@ pub fn fuzz_language_corpus(
|
||||||
// Check that the edited tree is consistent.
|
// Check that the edited tree is consistent.
|
||||||
check_consistent_sizes(&tree3, &input);
|
check_consistent_sizes(&tree3, &input);
|
||||||
if let Err(message) = check_changed_ranges(&tree2, &tree3, &input) {
|
if let Err(message) = check_changed_ranges(&tree2, &tree3, &input) {
|
||||||
println!("Unexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n");
|
error!("Unexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
true
|
true
|
||||||
}).unwrap_or_else(|e| {
|
}).unwrap_or_else(|e| {
|
||||||
eprintln!("Error: {e}");
|
error!("{e}");
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -284,17 +303,17 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if failure_count != 0 {
|
if failure_count != 0 {
|
||||||
eprintln!("{failure_count} {language_name} corpus tests failed fuzzing");
|
info!("{failure_count} {language_name} corpus tests failed fuzzing");
|
||||||
}
|
}
|
||||||
|
|
||||||
skipped.retain(|_, v| *v == 0);
|
skipped.retain(|_, v| *v == 0);
|
||||||
|
|
||||||
if !skipped.is_empty() {
|
if !skipped.is_empty() {
|
||||||
println!("Non matchable skip definitions:");
|
info!("Non matchable skip definitions:");
|
||||||
for k in skipped.keys() {
|
for k in skipped.keys() {
|
||||||
println!(" {k}");
|
info!(" {k}");
|
||||||
}
|
}
|
||||||
panic!("Non matchable skip definitions needs to be removed");
|
panic!("Non matchable skip definitions need to be removed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -40,7 +40,11 @@ extern "C" {
|
||||||
fn free(ptr: *mut c_void);
|
fn free(ptr: *mut c_void);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn record<T>(f: impl FnOnce() -> T) -> Result<T, String> {
|
pub fn record<T>(f: impl FnOnce() -> T) -> T {
|
||||||
|
record_checked(f).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn record_checked<T>(f: impl FnOnce() -> T) -> Result<T, String> {
|
||||||
RECORDER.with(|recorder| {
|
RECORDER.with(|recorder| {
|
||||||
recorder.enabled.store(true, SeqCst);
|
recorder.enabled.store(true, SeqCst);
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
recorder.allocation_count.store(0, SeqCst);
|
||||||
|
|
@ -93,30 +97,49 @@ fn record_dealloc(ptr: *mut c_void) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that the returned pointer is eventually
|
||||||
|
/// freed by calling `ts_record_free`.
|
||||||
|
#[must_use]
|
||||||
|
pub unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
||||||
let result = malloc(size);
|
let result = malloc(size);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that the returned pointer is eventually
|
||||||
|
/// freed by calling `ts_record_free`.
|
||||||
|
#[must_use]
|
||||||
|
pub unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
||||||
let result = calloc(count, size);
|
let result = calloc(count, size);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that the returned pointer is eventually
|
||||||
|
/// freed by calling `ts_record_free`.
|
||||||
|
#[must_use]
|
||||||
|
pub unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
||||||
let result = realloc(ptr, size);
|
let result = realloc(ptr, size);
|
||||||
if ptr.is_null() {
|
if ptr.is_null() {
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
} else if ptr != result {
|
} else if !core::ptr::eq(ptr, result) {
|
||||||
record_dealloc(ptr);
|
record_dealloc(ptr);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that `ptr` was allocated by a previous call
|
||||||
|
/// to `ts_record_malloc`, `ts_record_calloc`, or `ts_record_realloc`.
|
||||||
|
pub unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
||||||
record_dealloc(ptr);
|
record_dealloc(ptr);
|
||||||
free(ptr);
|
free(ptr);
|
||||||
}
|
}
|
||||||
|
|
@ -23,7 +23,7 @@ pub fn check_consistent_sizes(tree: &Tree, input: &[u8]) {
|
||||||
let mut some_child_has_changes = false;
|
let mut some_child_has_changes = false;
|
||||||
let mut actual_named_child_count = 0;
|
let mut actual_named_child_count = 0;
|
||||||
for i in 0..node.child_count() {
|
for i in 0..node.child_count() {
|
||||||
let child = node.child(i).unwrap();
|
let child = node.child(i as u32).unwrap();
|
||||||
assert!(child.start_byte() >= last_child_end_byte);
|
assert!(child.start_byte() >= last_child_end_byte);
|
||||||
assert!(child.start_position() >= last_child_end_point);
|
assert!(child.start_position() >= last_child_end_point);
|
||||||
check(child, line_offsets);
|
check(child, line_offsets);
|
||||||
|
|
@ -20,8 +20,8 @@ impl Rand {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn words(&mut self, max_count: usize) -> Vec<u8> {
|
pub fn words(&mut self, max_count: usize) -> Vec<u8> {
|
||||||
let mut result = Vec::new();
|
|
||||||
let word_count = self.unsigned(max_count);
|
let word_count = self.unsigned(max_count);
|
||||||
|
let mut result = Vec::with_capacity(2 * word_count);
|
||||||
for i in 0..word_count {
|
for i in 0..word_count {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
if self.unsigned(5) == 0 {
|
if self.unsigned(5) == 0 {
|
||||||
|
|
@ -1,22 +1,24 @@
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashMap,
|
collections::{BTreeMap, HashSet},
|
||||||
fmt::Write,
|
fmt::Write,
|
||||||
fs,
|
fs,
|
||||||
io::{self, Write as _},
|
io::{self, Write as _},
|
||||||
path, str,
|
path::{self, Path, PathBuf},
|
||||||
sync::atomic::AtomicUsize,
|
str,
|
||||||
|
sync::{atomic::AtomicUsize, Arc},
|
||||||
time::Instant,
|
time::Instant,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use ansi_colours::{ansi256_from_rgb, rgb_from_ansi256};
|
||||||
use anstyle::{Ansi256Color, AnsiColor, Color, Effects, RgbColor};
|
use anstyle::{Ansi256Color, AnsiColor, Color, Effects, RgbColor};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use lazy_static::lazy_static;
|
use log::{info, warn};
|
||||||
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use tree_sitter_highlight::{HighlightConfiguration, HighlightEvent, Highlighter, HtmlRenderer};
|
use tree_sitter_highlight::{HighlightConfiguration, HighlightEvent, Highlighter, HtmlRenderer};
|
||||||
use tree_sitter_loader::Loader;
|
use tree_sitter_loader::Loader;
|
||||||
|
|
||||||
pub const HTML_HEADER: &str = "
|
pub const HTML_HEAD_HEADER: &str = "
|
||||||
<!doctype HTML>
|
<!doctype HTML>
|
||||||
<head>
|
<head>
|
||||||
<title>Tree-sitter Highlighting</title>
|
<title>Tree-sitter Highlighting</title>
|
||||||
|
|
@ -33,7 +35,9 @@ pub const HTML_HEADER: &str = "
|
||||||
.line {
|
.line {
|
||||||
white-space: pre;
|
white-space: pre;
|
||||||
}
|
}
|
||||||
</style>
|
</style>";
|
||||||
|
|
||||||
|
pub const HTML_BODY_HEADER: &str = "
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
";
|
";
|
||||||
|
|
@ -42,11 +46,6 @@ pub const HTML_FOOTER: &str = "
|
||||||
</body>
|
</body>
|
||||||
";
|
";
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref CSS_STYLES_BY_COLOR_ID: Vec<String> =
|
|
||||||
serde_json::from_str(include_str!("../vendor/xterm-colors.json")).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default)]
|
||||||
pub struct Style {
|
pub struct Style {
|
||||||
pub ansi: anstyle::Style,
|
pub ansi: anstyle::Style,
|
||||||
|
|
@ -84,9 +83,9 @@ impl<'de> Deserialize<'de> for Theme {
|
||||||
{
|
{
|
||||||
let mut styles = Vec::new();
|
let mut styles = Vec::new();
|
||||||
let mut highlight_names = Vec::new();
|
let mut highlight_names = Vec::new();
|
||||||
if let Ok(colors) = HashMap::<String, Value>::deserialize(deserializer) {
|
if let Ok(colors) = BTreeMap::<String, Value>::deserialize(deserializer) {
|
||||||
highlight_names.reserve(colors.len());
|
|
||||||
styles.reserve(colors.len());
|
styles.reserve(colors.len());
|
||||||
|
highlight_names.reserve(colors.len());
|
||||||
for (name, style_value) in colors {
|
for (name, style_value) in colors {
|
||||||
let mut style = Style::default();
|
let mut style = Style::default();
|
||||||
parse_style(&mut style, style_value);
|
parse_style(&mut style, style_value);
|
||||||
|
|
@ -129,7 +128,7 @@ impl Serialize for Theme {
|
||||||
|| effects.contains(Effects::ITALIC)
|
|| effects.contains(Effects::ITALIC)
|
||||||
|| effects.contains(Effects::UNDERLINE)
|
|| effects.contains(Effects::UNDERLINE)
|
||||||
{
|
{
|
||||||
let mut style_json = HashMap::new();
|
let mut style_json = BTreeMap::new();
|
||||||
if let Some(color) = color {
|
if let Some(color) = color {
|
||||||
style_json.insert("color", color);
|
style_json.insert("color", color);
|
||||||
}
|
}
|
||||||
|
|
@ -156,28 +155,32 @@ impl Serialize for Theme {
|
||||||
impl Default for Theme {
|
impl Default for Theme {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
serde_json::from_value(json!({
|
serde_json::from_value(json!({
|
||||||
"attribute": {"color": 124, "italic": true},
|
"attribute": {"color": 124, "italic": true},
|
||||||
"comment": {"color": 245, "italic": true},
|
"comment": {"color": 245, "italic": true},
|
||||||
"constant.builtin": {"color": 94, "bold": true},
|
"constant": 94,
|
||||||
"constant": 94,
|
"constant.builtin": {"color": 94, "bold": true},
|
||||||
"constructor": 136,
|
"constructor": 136,
|
||||||
"embedded": null,
|
"embedded": null,
|
||||||
"function.builtin": {"color": 26, "bold": true},
|
"function": 26,
|
||||||
"function": 26,
|
"function.builtin": {"color": 26, "bold": true},
|
||||||
"keyword": 56,
|
"keyword": 56,
|
||||||
"number": {"color": 94, "bold": true},
|
"module": 136,
|
||||||
"module": 136,
|
"number": {"color": 94, "bold": true},
|
||||||
"property": 124,
|
"operator": {"color": 239, "bold": true},
|
||||||
"operator": {"color": 239, "bold": true},
|
"property": 124,
|
||||||
"punctuation.bracket": 239,
|
"property.builtin": {"color": 124, "bold": true},
|
||||||
"punctuation.delimiter": 239,
|
"punctuation": 239,
|
||||||
"string.special": 30,
|
"punctuation.bracket": 239,
|
||||||
"string": 28,
|
"punctuation.delimiter": 239,
|
||||||
"tag": 18,
|
"punctuation.special": 239,
|
||||||
"type": 23,
|
"string": 28,
|
||||||
"type.builtin": {"color": 23, "bold": true},
|
"string.special": 30,
|
||||||
"variable.builtin": {"bold": true},
|
"tag": 18,
|
||||||
"variable.parameter": {"underline": true}
|
"type": 23,
|
||||||
|
"type.builtin": {"color": 23, "bold": true},
|
||||||
|
"variable": 252,
|
||||||
|
"variable.builtin": {"color": 252, "bold": true},
|
||||||
|
"variable.parameter": {"color": 252, "underline": true}
|
||||||
}))
|
}))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
@ -220,9 +223,8 @@ fn parse_style(style: &mut Style, json: Value) {
|
||||||
|
|
||||||
if let Some(Color::Rgb(RgbColor(red, green, blue))) = style.ansi.get_fg_color() {
|
if let Some(Color::Rgb(RgbColor(red, green, blue))) = style.ansi.get_fg_color() {
|
||||||
if !terminal_supports_truecolor() {
|
if !terminal_supports_truecolor() {
|
||||||
style.ansi = style
|
let ansi256 = Color::Ansi256(Ansi256Color(ansi256_from_rgb((red, green, blue))));
|
||||||
.ansi
|
style.ansi = style.ansi.fg_color(Some(ansi256));
|
||||||
.fg_color(Some(closest_xterm_color(red, green, blue)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -268,7 +270,7 @@ fn hex_string_to_rgb(s: &str) -> Option<(u8, u8, u8)> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn style_to_css(style: anstyle::Style) -> String {
|
fn style_to_css(style: anstyle::Style) -> String {
|
||||||
let mut result = "style='".to_string();
|
let mut result = String::new();
|
||||||
let effects = style.get_effects();
|
let effects = style.get_effects();
|
||||||
if effects.contains(Effects::UNDERLINE) {
|
if effects.contains(Effects::UNDERLINE) {
|
||||||
write!(&mut result, "text-decoration: underline;").unwrap();
|
write!(&mut result, "text-decoration: underline;").unwrap();
|
||||||
|
|
@ -282,7 +284,6 @@ fn style_to_css(style: anstyle::Style) -> String {
|
||||||
if let Some(color) = style.get_fg_color() {
|
if let Some(color) = style.get_fg_color() {
|
||||||
write_color(&mut result, color);
|
write_color(&mut result, color);
|
||||||
}
|
}
|
||||||
result.push('\'');
|
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -300,7 +301,8 @@ fn write_color(buffer: &mut String, color: Color) {
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
},
|
},
|
||||||
Color::Ansi256(Ansi256Color(n)) => {
|
Color::Ansi256(Ansi256Color(n)) => {
|
||||||
write!(buffer, "color: {}", CSS_STYLES_BY_COLOR_ID[n as usize]).unwrap();
|
let (r, g, b) = rgb_from_ansi256(n);
|
||||||
|
write!(buffer, "color: #{r:02x}{g:02x}{b:02x}").unwrap();
|
||||||
}
|
}
|
||||||
Color::Rgb(RgbColor(r, g, b)) => write!(buffer, "color: #{r:02x}{g:02x}{b:02x}").unwrap(),
|
Color::Rgb(RgbColor(r, g, b)) => write!(buffer, "color: #{r:02x}{g:02x}{b:02x}").unwrap(),
|
||||||
}
|
}
|
||||||
|
|
@ -311,115 +313,144 @@ fn terminal_supports_truecolor() -> bool {
|
||||||
.is_ok_and(|truecolor| truecolor == "truecolor" || truecolor == "24bit")
|
.is_ok_and(|truecolor| truecolor == "truecolor" || truecolor == "24bit")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn closest_xterm_color(red: u8, green: u8, blue: u8) -> Color {
|
pub struct HighlightOptions {
|
||||||
use std::cmp::{max, min};
|
pub theme: Theme,
|
||||||
|
pub check: bool,
|
||||||
let colors = CSS_STYLES_BY_COLOR_ID
|
pub captures_path: Option<PathBuf>,
|
||||||
.iter()
|
pub inline_styles: bool,
|
||||||
.enumerate()
|
pub html: bool,
|
||||||
.map(|(color_id, hex)| (color_id as u8, hex_string_to_rgb(hex).unwrap()));
|
pub quiet: bool,
|
||||||
|
pub print_time: bool,
|
||||||
// Get the xterm color with the minimum Euclidean distance to the target color
|
pub cancellation_flag: Arc<AtomicUsize>,
|
||||||
// i.e. distance = √ (r2 - r1)² + (g2 - g1)² + (b2 - b1)²
|
|
||||||
let distances = colors.map(|(color_id, (r, g, b))| {
|
|
||||||
let r_delta = (max(r, red) - min(r, red)) as u32;
|
|
||||||
let g_delta = (max(g, green) - min(g, green)) as u32;
|
|
||||||
let b_delta = (max(b, blue) - min(b, blue)) as u32;
|
|
||||||
let distance = r_delta.pow(2) + g_delta.pow(2) + b_delta.pow(2);
|
|
||||||
// don't need to actually take the square root for the sake of comparison
|
|
||||||
(color_id, distance)
|
|
||||||
});
|
|
||||||
|
|
||||||
Color::Ansi256(Ansi256Color(
|
|
||||||
distances.min_by(|(_, d1), (_, d2)| d1.cmp(d2)).unwrap().0,
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ansi(
|
pub fn highlight(
|
||||||
loader: &Loader,
|
loader: &Loader,
|
||||||
theme: &Theme,
|
path: &Path,
|
||||||
source: &[u8],
|
name: &str,
|
||||||
config: &HighlightConfiguration,
|
config: &HighlightConfiguration,
|
||||||
print_time: bool,
|
print_name: bool,
|
||||||
cancellation_flag: Option<&AtomicUsize>,
|
opts: &HighlightOptions,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
|
if opts.check {
|
||||||
|
let names = if let Some(path) = opts.captures_path.as_deref() {
|
||||||
|
let file = fs::read_to_string(path)?;
|
||||||
|
let capture_names = file
|
||||||
|
.lines()
|
||||||
|
.filter_map(|line| {
|
||||||
|
if line.trim().is_empty() || line.trim().starts_with(';') {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
line.split(';').next().map(|s| s.trim().trim_matches('"'))
|
||||||
|
})
|
||||||
|
.collect::<HashSet<_>>();
|
||||||
|
config.nonconformant_capture_names(&capture_names)
|
||||||
|
} else {
|
||||||
|
config.nonconformant_capture_names(&HashSet::new())
|
||||||
|
};
|
||||||
|
if names.is_empty() {
|
||||||
|
info!("All highlight captures conform to standards.");
|
||||||
|
} else {
|
||||||
|
warn!(
|
||||||
|
"Non-standard highlight {} detected:\n* {}",
|
||||||
|
if names.len() > 1 {
|
||||||
|
"captures"
|
||||||
|
} else {
|
||||||
|
"capture"
|
||||||
|
},
|
||||||
|
names.join("\n* ")
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let source = fs::read(path)?;
|
||||||
let stdout = io::stdout();
|
let stdout = io::stdout();
|
||||||
let mut stdout = stdout.lock();
|
let mut stdout = stdout.lock();
|
||||||
let time = Instant::now();
|
let time = Instant::now();
|
||||||
let mut highlighter = Highlighter::new();
|
let mut highlighter = Highlighter::new();
|
||||||
|
let events =
|
||||||
|
highlighter.highlight(config, &source, Some(&opts.cancellation_flag), |string| {
|
||||||
|
loader.highlight_config_for_injection_string(string)
|
||||||
|
})?;
|
||||||
|
let theme = &opts.theme;
|
||||||
|
|
||||||
let events = highlighter.highlight(config, source, cancellation_flag, |string| {
|
if !opts.quiet && print_name {
|
||||||
loader.highlight_config_for_injection_string(string)
|
writeln!(&mut stdout, "{name}")?;
|
||||||
})?;
|
}
|
||||||
|
|
||||||
let mut style_stack = vec![theme.default_style().ansi];
|
if opts.html {
|
||||||
for event in events {
|
if !opts.quiet {
|
||||||
match event? {
|
writeln!(&mut stdout, "{HTML_HEAD_HEADER}")?;
|
||||||
HighlightEvent::HighlightStart(highlight) => {
|
writeln!(&mut stdout, " <style>")?;
|
||||||
style_stack.push(theme.styles[highlight.0].ansi);
|
let names = theme.highlight_names.iter();
|
||||||
|
let styles = theme.styles.iter();
|
||||||
|
for (name, style) in names.zip(styles) {
|
||||||
|
if let Some(css) = &style.css {
|
||||||
|
writeln!(&mut stdout, " .{name} {{ {css}; }}")?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
HighlightEvent::HighlightEnd => {
|
writeln!(&mut stdout, " </style>")?;
|
||||||
style_stack.pop();
|
writeln!(&mut stdout, "{HTML_BODY_HEADER}")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut renderer = HtmlRenderer::new();
|
||||||
|
renderer.render(events, &source, &move |highlight, output| {
|
||||||
|
if opts.inline_styles {
|
||||||
|
output.extend(b"style='");
|
||||||
|
output.extend(
|
||||||
|
theme.styles[highlight.0]
|
||||||
|
.css
|
||||||
|
.as_ref()
|
||||||
|
.map_or_else(|| "".as_bytes(), |css_style| css_style.as_bytes()),
|
||||||
|
);
|
||||||
|
output.extend(b"'");
|
||||||
|
} else {
|
||||||
|
output.extend(b"class='");
|
||||||
|
let mut parts = theme.highlight_names[highlight.0].split('.').peekable();
|
||||||
|
while let Some(part) = parts.next() {
|
||||||
|
output.extend(part.as_bytes());
|
||||||
|
if parts.peek().is_some() {
|
||||||
|
output.extend(b" ");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
output.extend(b"'");
|
||||||
}
|
}
|
||||||
HighlightEvent::Source { start, end } => {
|
})?;
|
||||||
let style = style_stack.last().unwrap();
|
|
||||||
write!(&mut stdout, "{style}").unwrap();
|
if !opts.quiet {
|
||||||
stdout.write_all(&source[start..end])?;
|
writeln!(&mut stdout, "<table>")?;
|
||||||
write!(&mut stdout, "{style:#}").unwrap();
|
for (i, line) in renderer.lines().enumerate() {
|
||||||
|
writeln!(
|
||||||
|
&mut stdout,
|
||||||
|
"<tr><td class=line-number>{}</td><td class=line>{line}</td></tr>",
|
||||||
|
i + 1,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
writeln!(&mut stdout, "</table>")?;
|
||||||
|
writeln!(&mut stdout, "{HTML_FOOTER}")?;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let mut style_stack = vec![theme.default_style().ansi];
|
||||||
|
for event in events {
|
||||||
|
match event? {
|
||||||
|
HighlightEvent::HighlightStart(highlight) => {
|
||||||
|
style_stack.push(theme.styles[highlight.0].ansi);
|
||||||
|
}
|
||||||
|
HighlightEvent::HighlightEnd => {
|
||||||
|
style_stack.pop();
|
||||||
|
}
|
||||||
|
HighlightEvent::Source { start, end } => {
|
||||||
|
let style = style_stack.last().unwrap();
|
||||||
|
write!(&mut stdout, "{style}").unwrap();
|
||||||
|
stdout.write_all(&source[start..end])?;
|
||||||
|
write!(&mut stdout, "{style:#}").unwrap();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if print_time {
|
if opts.print_time {
|
||||||
eprintln!("Time: {}ms", time.elapsed().as_millis());
|
info!("Time: {}ms", time.elapsed().as_millis());
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn html(
|
|
||||||
loader: &Loader,
|
|
||||||
theme: &Theme,
|
|
||||||
source: &[u8],
|
|
||||||
config: &HighlightConfiguration,
|
|
||||||
quiet: bool,
|
|
||||||
print_time: bool,
|
|
||||||
cancellation_flag: Option<&AtomicUsize>,
|
|
||||||
) -> Result<()> {
|
|
||||||
use std::io::Write;
|
|
||||||
|
|
||||||
let stdout = io::stdout();
|
|
||||||
let mut stdout = stdout.lock();
|
|
||||||
let time = Instant::now();
|
|
||||||
let mut highlighter = Highlighter::new();
|
|
||||||
|
|
||||||
let events = highlighter.highlight(config, source, cancellation_flag, |string| {
|
|
||||||
loader.highlight_config_for_injection_string(string)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut renderer = HtmlRenderer::new();
|
|
||||||
renderer.render(events, source, &move |highlight| {
|
|
||||||
theme.styles[highlight.0]
|
|
||||||
.css
|
|
||||||
.as_ref()
|
|
||||||
.map_or_else(|| "".as_bytes(), |css_style| css_style.as_bytes())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
if !quiet {
|
|
||||||
writeln!(&mut stdout, "<table>")?;
|
|
||||||
for (i, line) in renderer.lines().enumerate() {
|
|
||||||
writeln!(
|
|
||||||
&mut stdout,
|
|
||||||
"<tr><td class=line-number>{}</td><td class=line>{line}</td></tr>",
|
|
||||||
i + 1,
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
writeln!(&mut stdout, "</table>")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if print_time {
|
|
||||||
eprintln!("Time: {}ms", time.elapsed().as_millis());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -449,7 +480,7 @@ mod tests {
|
||||||
style.ansi.get_fg_color(),
|
style.ansi.get_fg_color(),
|
||||||
Some(Color::Ansi256(Ansi256Color(36)))
|
Some(Color::Ansi256(Ansi256Color(36)))
|
||||||
);
|
);
|
||||||
assert_eq!(style.css, Some("style=\'color: #00af87\'".to_string()));
|
assert_eq!(style.css, Some("color: #00af87".to_string()));
|
||||||
|
|
||||||
// junglegreen is not an ANSI color and is preserved when the terminal supports it
|
// junglegreen is not an ANSI color and is preserved when the terminal supports it
|
||||||
env::set_var("COLORTERM", "truecolor");
|
env::set_var("COLORTERM", "truecolor");
|
||||||
|
|
@ -458,16 +489,16 @@ mod tests {
|
||||||
style.ansi.get_fg_color(),
|
style.ansi.get_fg_color(),
|
||||||
Some(Color::Rgb(RgbColor(38, 166, 154)))
|
Some(Color::Rgb(RgbColor(38, 166, 154)))
|
||||||
);
|
);
|
||||||
assert_eq!(style.css, Some("style=\'color: #26a69a\'".to_string()));
|
assert_eq!(style.css, Some("color: #26a69a".to_string()));
|
||||||
|
|
||||||
// junglegreen gets approximated as darkcyan when the terminal does not support it
|
// junglegreen gets approximated as cadetblue when the terminal does not support it
|
||||||
env::set_var("COLORTERM", "");
|
env::set_var("COLORTERM", "");
|
||||||
parse_style(&mut style, Value::String(JUNGLE_GREEN.to_string()));
|
parse_style(&mut style, Value::String(JUNGLE_GREEN.to_string()));
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
style.ansi.get_fg_color(),
|
style.ansi.get_fg_color(),
|
||||||
Some(Color::Ansi256(Ansi256Color(36)))
|
Some(Color::Ansi256(Ansi256Color(72)))
|
||||||
);
|
);
|
||||||
assert_eq!(style.css, Some("style=\'color: #26a69a\'".to_string()));
|
assert_eq!(style.css, Some("color: #26a69a".to_string()));
|
||||||
|
|
||||||
if let Ok(environment_variable) = original_environment_variable {
|
if let Ok(environment_variable) = original_environment_variable {
|
||||||
env::set_var("COLORTERM", environment_variable);
|
env::set_var("COLORTERM", environment_variable);
|
||||||
1548
crates/cli/src/init.rs
Normal file
1548
crates/cli/src/init.rs
Normal file
File diff suppressed because it is too large
Load diff
187
crates/cli/src/input.rs
Normal file
187
crates/cli/src/input.rs
Normal file
|
|
@ -0,0 +1,187 @@
|
||||||
|
use std::{
|
||||||
|
fs,
|
||||||
|
io::{Read, Write},
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::{
|
||||||
|
atomic::{AtomicUsize, Ordering},
|
||||||
|
mpsc, Arc,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
use anyhow::{anyhow, bail, Context, Result};
|
||||||
|
use glob::glob;
|
||||||
|
|
||||||
|
use crate::test::{parse_tests, TestEntry};
|
||||||
|
|
||||||
|
pub enum CliInput {
|
||||||
|
Paths(Vec<PathBuf>),
|
||||||
|
Test {
|
||||||
|
name: String,
|
||||||
|
contents: Vec<u8>,
|
||||||
|
languages: Vec<Box<str>>,
|
||||||
|
},
|
||||||
|
Stdin(Vec<u8>),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_input(
|
||||||
|
paths_file: Option<&Path>,
|
||||||
|
paths: Option<Vec<PathBuf>>,
|
||||||
|
test_number: Option<u32>,
|
||||||
|
cancellation_flag: &Arc<AtomicUsize>,
|
||||||
|
) -> Result<CliInput> {
|
||||||
|
if let Some(paths_file) = paths_file {
|
||||||
|
return Ok(CliInput::Paths(
|
||||||
|
fs::read_to_string(paths_file)
|
||||||
|
.with_context(|| format!("Failed to read paths file {}", paths_file.display()))?
|
||||||
|
.trim()
|
||||||
|
.lines()
|
||||||
|
.map(PathBuf::from)
|
||||||
|
.collect::<Vec<_>>(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(test_number) = test_number {
|
||||||
|
let current_dir = std::env::current_dir().unwrap();
|
||||||
|
let test_dir = current_dir.join("test").join("corpus");
|
||||||
|
|
||||||
|
if !test_dir.exists() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"Test corpus directory not found in current directory, see https://tree-sitter.github.io/tree-sitter/creating-parsers/5-writing-tests"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let test_entry = parse_tests(&test_dir)?;
|
||||||
|
let mut test_num = 0;
|
||||||
|
let Some((name, contents, languages)) =
|
||||||
|
get_test_info(&test_entry, test_number.max(1) - 1, &mut test_num)
|
||||||
|
else {
|
||||||
|
return Err(anyhow!("Failed to fetch contents of test #{test_number}"));
|
||||||
|
};
|
||||||
|
|
||||||
|
return Ok(CliInput::Test {
|
||||||
|
name,
|
||||||
|
contents,
|
||||||
|
languages,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(paths) = paths {
|
||||||
|
let mut result = Vec::new();
|
||||||
|
|
||||||
|
let mut incorporate_path = |path: PathBuf, positive| {
|
||||||
|
if positive {
|
||||||
|
result.push(path);
|
||||||
|
} else if let Some(index) = result.iter().position(|p| *p == path) {
|
||||||
|
result.remove(index);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
for mut path in paths {
|
||||||
|
let mut positive = true;
|
||||||
|
if path.starts_with("!") {
|
||||||
|
positive = false;
|
||||||
|
path = path.strip_prefix("!").unwrap().to_path_buf();
|
||||||
|
}
|
||||||
|
|
||||||
|
if path.exists() {
|
||||||
|
incorporate_path(path, positive);
|
||||||
|
} else {
|
||||||
|
let Some(path_str) = path.to_str() else {
|
||||||
|
bail!("Invalid path: {}", path.display());
|
||||||
|
};
|
||||||
|
let paths = glob(path_str)
|
||||||
|
.with_context(|| format!("Invalid glob pattern {}", path.display()))?;
|
||||||
|
for path in paths {
|
||||||
|
incorporate_path(path?, positive);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.is_empty() {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"No files were found at or matched by the provided pathname/glob"
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(CliInput::Paths(result));
|
||||||
|
}
|
||||||
|
|
||||||
|
let reader_flag = cancellation_flag.clone();
|
||||||
|
let (tx, rx) = mpsc::channel();
|
||||||
|
|
||||||
|
// Spawn a thread to read from stdin, until ctrl-c or EOF is received
|
||||||
|
std::thread::spawn(move || {
|
||||||
|
let mut input = Vec::new();
|
||||||
|
let stdin = std::io::stdin();
|
||||||
|
let mut handle = stdin.lock();
|
||||||
|
|
||||||
|
// Read in chunks, so we can check the ctrl-c flag
|
||||||
|
loop {
|
||||||
|
if reader_flag.load(Ordering::Relaxed) == 1 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let mut buffer = [0; 1024];
|
||||||
|
match handle.read(&mut buffer) {
|
||||||
|
Ok(0) | Err(_) => break,
|
||||||
|
Ok(n) => input.extend_from_slice(&buffer[..n]),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Signal to the main thread that we're done
|
||||||
|
tx.send(input).ok();
|
||||||
|
});
|
||||||
|
|
||||||
|
loop {
|
||||||
|
// If we've received a ctrl-c signal, exit
|
||||||
|
if cancellation_flag.load(Ordering::Relaxed) == 1 {
|
||||||
|
bail!("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we're done receiving input from stdin, return it
|
||||||
|
if let Ok(input) = rx.try_recv() {
|
||||||
|
return Ok(CliInput::Stdin(input));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::thread::sleep(std::time::Duration::from_millis(50));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::type_complexity)]
|
||||||
|
pub fn get_test_info(
|
||||||
|
test_entry: &TestEntry,
|
||||||
|
target_test: u32,
|
||||||
|
test_num: &mut u32,
|
||||||
|
) -> Option<(String, Vec<u8>, Vec<Box<str>>)> {
|
||||||
|
match test_entry {
|
||||||
|
TestEntry::Example {
|
||||||
|
name,
|
||||||
|
input,
|
||||||
|
attributes,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
if *test_num == target_test {
|
||||||
|
return Some((name.clone(), input.clone(), attributes.languages.clone()));
|
||||||
|
}
|
||||||
|
*test_num += 1;
|
||||||
|
}
|
||||||
|
TestEntry::Group { children, .. } => {
|
||||||
|
for child in children {
|
||||||
|
if let Some((name, input, languages)) = get_test_info(child, target_test, test_num)
|
||||||
|
{
|
||||||
|
return Some((name, input, languages));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Writes `contents` to a temporary file and returns the path to that file.
|
||||||
|
pub fn get_tmp_source_file(contents: &[u8]) -> Result<PathBuf> {
|
||||||
|
let parse_path = std::env::temp_dir().join(".tree-sitter-temp");
|
||||||
|
let mut parse_file = std::fs::File::create(&parse_path)?;
|
||||||
|
parse_file.write_all(contents)?;
|
||||||
|
|
||||||
|
Ok(parse_path)
|
||||||
|
}
|
||||||
55
crates/cli/src/logger.rs
Normal file
55
crates/cli/src/logger.rs
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
use std::io::Write;
|
||||||
|
|
||||||
|
use anstyle::{AnsiColor, Color, Style};
|
||||||
|
use log::{Level, LevelFilter, Log, Metadata, Record};
|
||||||
|
|
||||||
|
pub fn paint(color: Option<impl Into<Color>>, text: &str) -> String {
|
||||||
|
let style = Style::new().fg_color(color.map(Into::into));
|
||||||
|
format!("{style}{text}{style:#}")
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Logger;
|
||||||
|
|
||||||
|
impl Log for Logger {
|
||||||
|
fn enabled(&self, _: &Metadata) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn log(&self, record: &Record) {
|
||||||
|
match record.level() {
|
||||||
|
Level::Error => eprintln!(
|
||||||
|
"{} {}",
|
||||||
|
paint(Some(AnsiColor::Red), "Error:"),
|
||||||
|
record.args()
|
||||||
|
),
|
||||||
|
Level::Warn => eprintln!(
|
||||||
|
"{} {}",
|
||||||
|
paint(Some(AnsiColor::Yellow), "Warning:"),
|
||||||
|
record.args()
|
||||||
|
),
|
||||||
|
Level::Info | Level::Debug => eprintln!("{}", record.args()),
|
||||||
|
Level::Trace => eprintln!(
|
||||||
|
"[{}] {}",
|
||||||
|
record
|
||||||
|
.module_path()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.trim_start_matches("rust_tree_sitter_cli::"),
|
||||||
|
record.args()
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&self) {
|
||||||
|
let mut stderr = std::io::stderr().lock();
|
||||||
|
let _ = stderr.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init() {
|
||||||
|
log::set_boxed_logger(Box::new(Logger {})).unwrap();
|
||||||
|
log::set_max_level(LevelFilter::Info);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn enable_debug() {
|
||||||
|
log::set_max_level(LevelFilter::Debug);
|
||||||
|
}
|
||||||
2162
crates/cli/src/main.rs
Normal file
2162
crates/cli/src/main.rs
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -1,23 +1,26 @@
|
||||||
use std::{
|
use std::{
|
||||||
fmt, fs,
|
fmt, fs,
|
||||||
io::{self, StdoutLock, Write},
|
io::{self, Write},
|
||||||
path::Path,
|
ops::ControlFlow,
|
||||||
|
path::{Path, PathBuf},
|
||||||
sync::atomic::{AtomicUsize, Ordering},
|
sync::atomic::{AtomicUsize, Ordering},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
};
|
};
|
||||||
|
|
||||||
use anstyle::{AnsiColor, Color, RgbColor};
|
use anstyle::{AnsiColor, Color, RgbColor};
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use clap::ValueEnum;
|
||||||
|
use log::info;
|
||||||
|
use schemars::JsonSchema;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tree_sitter::{
|
use tree_sitter::{
|
||||||
ffi, InputEdit, Language, LogType, ParseOptions, ParseState, Parser, Point, Range, Tree,
|
ffi, InputEdit, Language, LogType, ParseOptions, ParseState, Parser, Point, Range, Tree,
|
||||||
TreeCursor,
|
TreeCursor,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::util;
|
use crate::{fuzz::edits::Edit, logger::paint, util};
|
||||||
use crate::{fuzz::edits::Edit, test::paint};
|
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Default, Serialize, JsonSchema)]
|
||||||
pub struct Stats {
|
pub struct Stats {
|
||||||
pub successful_parses: usize,
|
pub successful_parses: usize,
|
||||||
pub total_parses: usize,
|
pub total_parses: usize,
|
||||||
|
|
@ -28,18 +31,28 @@ pub struct Stats {
|
||||||
impl fmt::Display for Stats {
|
impl fmt::Display for Stats {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let duration_us = self.total_duration.as_micros();
|
let duration_us = self.total_duration.as_micros();
|
||||||
|
let success_rate = if self.total_parses > 0 {
|
||||||
|
format!(
|
||||||
|
"{:.2}%",
|
||||||
|
((self.successful_parses as f64) / (self.total_parses as f64)) * 100.0,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
"N/A".to_string()
|
||||||
|
};
|
||||||
|
let duration_str = match (self.total_parses, duration_us) {
|
||||||
|
(0, _) => "N/A".to_string(),
|
||||||
|
(_, 0) => "0 bytes/ms".to_string(),
|
||||||
|
(_, _) => format!(
|
||||||
|
"{} bytes/ms",
|
||||||
|
((self.total_bytes as u128) * 1_000) / duration_us
|
||||||
|
),
|
||||||
|
};
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"Total parses: {}; successful parses: {}; failed parses: {}; success percentage: {:.2}%; average speed: {} bytes/ms",
|
"Total parses: {}; successful parses: {}; failed parses: {}; success percentage: {success_rate}; average speed: {duration_str}",
|
||||||
self.total_parses,
|
self.total_parses,
|
||||||
self.successful_parses,
|
self.successful_parses,
|
||||||
self.total_parses - self.successful_parses,
|
self.total_parses - self.successful_parses,
|
||||||
((self.successful_parses as f64) / (self.total_parses as f64)) * 100.0,
|
|
||||||
if duration_us != 0 {
|
|
||||||
((self.total_bytes as u128) * 1_000) / duration_us
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -177,15 +190,79 @@ pub enum ParseOutput {
|
||||||
Dot,
|
Dot,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A position in a multi-line text document, in terms of rows and columns.
|
||||||
|
///
|
||||||
|
/// Rows and columns are zero-based.
|
||||||
|
///
|
||||||
|
/// This serves as a serializable wrapper for `Point`
|
||||||
|
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
|
||||||
|
pub struct ParsePoint {
|
||||||
|
pub row: usize,
|
||||||
|
pub column: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<Point> for ParsePoint {
|
||||||
|
fn from(value: Point) -> Self {
|
||||||
|
Self {
|
||||||
|
row: value.row,
|
||||||
|
column: value.column,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Default, Debug, Clone)]
|
||||||
|
pub struct ParseSummary {
|
||||||
|
pub file: PathBuf,
|
||||||
|
pub successful: bool,
|
||||||
|
pub start: Option<ParsePoint>,
|
||||||
|
pub end: Option<ParsePoint>,
|
||||||
|
pub duration: Option<Duration>,
|
||||||
|
pub bytes: Option<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ParseSummary {
|
||||||
|
#[must_use]
|
||||||
|
pub fn new(path: &Path) -> Self {
|
||||||
|
Self {
|
||||||
|
file: path.to_path_buf(),
|
||||||
|
successful: false,
|
||||||
|
..Default::default()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Debug)]
|
||||||
|
pub struct ParseStats {
|
||||||
|
pub parse_summaries: Vec<ParseSummary>,
|
||||||
|
pub cumulative_stats: Stats,
|
||||||
|
pub source_count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ParseStats {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
parse_summaries: Vec::new(),
|
||||||
|
cumulative_stats: Stats::default(),
|
||||||
|
source_count: 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ValueEnum, Debug, Copy, Clone, Default, Eq, PartialEq)]
|
||||||
|
pub enum ParseDebugType {
|
||||||
|
#[default]
|
||||||
|
Quiet,
|
||||||
|
Normal,
|
||||||
|
Pretty,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct ParseFileOptions<'a> {
|
pub struct ParseFileOptions<'a> {
|
||||||
pub language: Language,
|
|
||||||
pub path: &'a Path,
|
|
||||||
pub edits: &'a [&'a str],
|
pub edits: &'a [&'a str],
|
||||||
pub max_path_length: usize,
|
|
||||||
pub output: ParseOutput,
|
pub output: ParseOutput,
|
||||||
|
pub stats: &'a mut ParseStats,
|
||||||
pub print_time: bool,
|
pub print_time: bool,
|
||||||
pub timeout: u64,
|
pub timeout: u64,
|
||||||
pub debug: bool,
|
pub debug: ParseDebugType,
|
||||||
pub debug_graph: bool,
|
pub debug_graph: bool,
|
||||||
pub cancellation_flag: Option<&'a AtomicUsize>,
|
pub cancellation_flag: Option<&'a AtomicUsize>,
|
||||||
pub encoding: Option<u32>,
|
pub encoding: Option<u32>,
|
||||||
|
|
@ -201,27 +278,65 @@ pub struct ParseResult {
|
||||||
pub duration: Option<Duration>,
|
pub duration: Option<Duration>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_file_at_path(parser: &mut Parser, opts: &ParseFileOptions) -> Result<ParseResult> {
|
pub fn parse_file_at_path(
|
||||||
|
parser: &mut Parser,
|
||||||
|
language: &Language,
|
||||||
|
path: &Path,
|
||||||
|
name: &str,
|
||||||
|
max_path_length: usize,
|
||||||
|
opts: &mut ParseFileOptions,
|
||||||
|
) -> Result<()> {
|
||||||
let mut _log_session = None;
|
let mut _log_session = None;
|
||||||
parser.set_language(&opts.language)?;
|
parser.set_language(language)?;
|
||||||
let mut source_code = fs::read(opts.path)
|
let mut source_code = fs::read(path).with_context(|| format!("Error reading {name:?}"))?;
|
||||||
.with_context(|| format!("Error reading source file {:?}", opts.path))?;
|
|
||||||
|
|
||||||
// Render an HTML graph if `--debug-graph` was passed
|
// Render an HTML graph if `--debug-graph` was passed
|
||||||
if opts.debug_graph {
|
if opts.debug_graph {
|
||||||
_log_session = Some(util::log_graphs(parser, "log.html", opts.open_log)?);
|
_log_session = Some(util::log_graphs(parser, "log.html", opts.open_log)?);
|
||||||
}
|
}
|
||||||
// Log to stderr if `--debug` was passed
|
// Log to stderr if `--debug` was passed
|
||||||
else if opts.debug {
|
else if opts.debug != ParseDebugType::Quiet {
|
||||||
parser.set_logger(Some(Box::new(|log_type, message| {
|
let mut curr_version: usize = 0;
|
||||||
if log_type == LogType::Lex {
|
let use_color = std::env::var("NO_COLOR").map_or(true, |v| v != "1");
|
||||||
io::stderr().write_all(b" ").unwrap();
|
let debug = opts.debug;
|
||||||
|
parser.set_logger(Some(Box::new(move |log_type, message| {
|
||||||
|
if debug == ParseDebugType::Normal {
|
||||||
|
if log_type == LogType::Lex {
|
||||||
|
write!(&mut io::stderr(), " ").unwrap();
|
||||||
|
}
|
||||||
|
writeln!(&mut io::stderr(), "{message}").unwrap();
|
||||||
|
} else {
|
||||||
|
let colors = &[
|
||||||
|
AnsiColor::White,
|
||||||
|
AnsiColor::Red,
|
||||||
|
AnsiColor::Blue,
|
||||||
|
AnsiColor::Green,
|
||||||
|
AnsiColor::Cyan,
|
||||||
|
AnsiColor::Yellow,
|
||||||
|
];
|
||||||
|
if message.starts_with("process version:") {
|
||||||
|
let comma_idx = message.find(',').unwrap();
|
||||||
|
curr_version = message["process version:".len()..comma_idx]
|
||||||
|
.parse()
|
||||||
|
.unwrap();
|
||||||
|
}
|
||||||
|
let color = if use_color {
|
||||||
|
Some(colors[curr_version])
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let mut out = if log_type == LogType::Lex {
|
||||||
|
" ".to_string()
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
};
|
||||||
|
out += &paint(color, message);
|
||||||
|
writeln!(&mut io::stderr(), "{out}").unwrap();
|
||||||
}
|
}
|
||||||
writeln!(&mut io::stderr(), "{message}").unwrap();
|
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
|
||||||
let time = Instant::now();
|
let parse_time = Instant::now();
|
||||||
|
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
fn is_utf16_le_bom(bom_bytes: &[u8]) -> bool {
|
fn is_utf16_le_bom(bom_bytes: &[u8]) -> bool {
|
||||||
|
|
@ -255,15 +370,15 @@ pub fn parse_file_at_path(parser: &mut Parser, opts: &ParseFileOptions) -> Resul
|
||||||
let progress_callback = &mut |_: &ParseState| {
|
let progress_callback = &mut |_: &ParseState| {
|
||||||
if let Some(cancellation_flag) = opts.cancellation_flag {
|
if let Some(cancellation_flag) = opts.cancellation_flag {
|
||||||
if cancellation_flag.load(Ordering::SeqCst) != 0 {
|
if cancellation_flag.load(Ordering::SeqCst) != 0 {
|
||||||
return true;
|
return ControlFlow::Break(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.timeout > 0 && start_time.elapsed().as_micros() > opts.timeout as u128 {
|
if opts.timeout > 0 && start_time.elapsed().as_micros() > opts.timeout as u128 {
|
||||||
return true;
|
return ControlFlow::Break(());
|
||||||
}
|
}
|
||||||
|
|
||||||
false
|
ControlFlow::Continue(())
|
||||||
};
|
};
|
||||||
|
|
||||||
let parse_opts = ParseOptions::new().progress_callback(progress_callback);
|
let parse_opts = ParseOptions::new().progress_callback(progress_callback);
|
||||||
|
|
@ -315,29 +430,32 @@ pub fn parse_file_at_path(parser: &mut Parser, opts: &ParseFileOptions) -> Resul
|
||||||
Some(parse_opts),
|
Some(parse_opts),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
let parse_duration = parse_time.elapsed();
|
||||||
|
|
||||||
let stdout = io::stdout();
|
let stdout = io::stdout();
|
||||||
let mut stdout = stdout.lock();
|
let mut stdout = stdout.lock();
|
||||||
|
|
||||||
if let Some(mut tree) = tree {
|
if let Some(mut tree) = tree {
|
||||||
if opts.debug_graph && !opts.edits.is_empty() {
|
if opts.debug_graph && !opts.edits.is_empty() {
|
||||||
println!("BEFORE:\n{}", String::from_utf8_lossy(&source_code));
|
info!("BEFORE:\n{}", String::from_utf8_lossy(&source_code));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let edit_time = Instant::now();
|
||||||
for (i, edit) in opts.edits.iter().enumerate() {
|
for (i, edit) in opts.edits.iter().enumerate() {
|
||||||
let edit = parse_edit_flag(&source_code, edit)?;
|
let edit = parse_edit_flag(&source_code, edit)?;
|
||||||
perform_edit(&mut tree, &mut source_code, &edit)?;
|
perform_edit(&mut tree, &mut source_code, &edit)?;
|
||||||
tree = parser.parse(&source_code, Some(&tree)).unwrap();
|
tree = parser.parse(&source_code, Some(&tree)).unwrap();
|
||||||
|
|
||||||
if opts.debug_graph {
|
if opts.debug_graph {
|
||||||
println!("AFTER {i}:\n{}", String::from_utf8_lossy(&source_code));
|
info!("AFTER {i}:\n{}", String::from_utf8_lossy(&source_code));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
let edit_duration = edit_time.elapsed();
|
||||||
|
|
||||||
parser.stop_printing_dot_graphs();
|
parser.stop_printing_dot_graphs();
|
||||||
|
|
||||||
let duration = time.elapsed();
|
let parse_duration_ms = parse_duration.as_micros() as f64 / 1e3;
|
||||||
let duration_ms = duration.as_micros() as f64 / 1e3;
|
let edit_duration_ms = edit_duration.as_micros() as f64 / 1e3;
|
||||||
let mut cursor = tree.walk();
|
let mut cursor = tree.walk();
|
||||||
|
|
||||||
if opts.output == ParseOutput::Normal {
|
if opts.output == ParseOutput::Normal {
|
||||||
|
|
@ -396,55 +514,23 @@ pub fn parse_file_at_path(parser: &mut Parser, opts: &ParseFileOptions) -> Resul
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Cst {
|
if opts.output == ParseOutput::Cst {
|
||||||
let lossy_source_code = String::from_utf8_lossy(&source_code);
|
render_cst(&source_code, &tree, &mut cursor, opts, &mut stdout)?;
|
||||||
let total_width = lossy_source_code
|
|
||||||
.lines()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(row, col)| {
|
|
||||||
(row as f64).log10() as usize + (col.len() as f64).log10() as usize + 1
|
|
||||||
})
|
|
||||||
.max()
|
|
||||||
.unwrap_or(1);
|
|
||||||
let mut indent_level = 1;
|
|
||||||
let mut did_visit_children = false;
|
|
||||||
loop {
|
|
||||||
if did_visit_children {
|
|
||||||
if cursor.goto_next_sibling() {
|
|
||||||
did_visit_children = false;
|
|
||||||
} else if cursor.goto_parent() {
|
|
||||||
did_visit_children = true;
|
|
||||||
indent_level -= 1;
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
cst_render_node(
|
|
||||||
opts,
|
|
||||||
&mut cursor,
|
|
||||||
&source_code,
|
|
||||||
&mut stdout,
|
|
||||||
total_width,
|
|
||||||
indent_level,
|
|
||||||
)?;
|
|
||||||
if cursor.goto_first_child() {
|
|
||||||
did_visit_children = false;
|
|
||||||
indent_level += 1;
|
|
||||||
} else {
|
|
||||||
did_visit_children = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
cursor.reset(tree.root_node());
|
|
||||||
println!();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Xml {
|
if opts.output == ParseOutput::Xml {
|
||||||
let mut needs_newline = false;
|
let mut needs_newline = false;
|
||||||
let mut indent_level = 0;
|
let mut indent_level = 2;
|
||||||
let mut did_visit_children = false;
|
let mut did_visit_children = false;
|
||||||
let mut had_named_children = false;
|
let mut had_named_children = false;
|
||||||
let mut tags = Vec::<&str>::new();
|
let mut tags = Vec::<&str>::new();
|
||||||
writeln!(&mut stdout, "<?xml version=\"1.0\"?>")?;
|
|
||||||
|
// If we're parsing the first file, write the header
|
||||||
|
if opts.stats.parse_summaries.is_empty() {
|
||||||
|
writeln!(&mut stdout, "<?xml version=\"1.0\"?>")?;
|
||||||
|
writeln!(&mut stdout, "<sources>")?;
|
||||||
|
}
|
||||||
|
writeln!(&mut stdout, " <source name=\"{}\">", path.display())?;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let node = cursor.node();
|
let node = cursor.node();
|
||||||
let is_named = node.is_named();
|
let is_named = node.is_named();
|
||||||
|
|
@ -459,7 +545,7 @@ pub fn parse_file_at_path(parser: &mut Parser, opts: &ParseFileOptions) -> Resul
|
||||||
write!(&mut stdout, "</{}>", tag.expect("there is a tag"))?;
|
write!(&mut stdout, "</{}>", tag.expect("there is a tag"))?;
|
||||||
// we only write a line in the case where it's the last sibling
|
// we only write a line in the case where it's the last sibling
|
||||||
if let Some(parent) = node.parent() {
|
if let Some(parent) = node.parent() {
|
||||||
if parent.child(parent.child_count() - 1).unwrap() == node {
|
if parent.child(parent.child_count() as u32 - 1).unwrap() == node {
|
||||||
stdout.write_all(b"\n")?;
|
stdout.write_all(b"\n")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -523,8 +609,14 @@ pub fn parse_file_at_path(parser: &mut Parser, opts: &ParseFileOptions) -> Resul
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
writeln!(&mut stdout)?;
|
||||||
|
writeln!(&mut stdout, " </source>")?;
|
||||||
|
|
||||||
|
// If we parsed the last file, write the closing tag for the `sources` header
|
||||||
|
if opts.stats.parse_summaries.len() == opts.stats.source_count - 1 {
|
||||||
|
writeln!(&mut stdout, "</sources>")?;
|
||||||
|
}
|
||||||
cursor.reset(tree.root_node());
|
cursor.reset(tree.root_node());
|
||||||
println!();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Dot {
|
if opts.output == ParseOutput::Dot {
|
||||||
|
|
@ -532,14 +624,39 @@ pub fn parse_file_at_path(parser: &mut Parser, opts: &ParseFileOptions) -> Resul
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut first_error = None;
|
let mut first_error = None;
|
||||||
loop {
|
let mut earliest_node_with_error = None;
|
||||||
|
'outer: loop {
|
||||||
let node = cursor.node();
|
let node = cursor.node();
|
||||||
if node.has_error() {
|
if node.has_error() {
|
||||||
|
if earliest_node_with_error.is_none() {
|
||||||
|
earliest_node_with_error = Some(node);
|
||||||
|
}
|
||||||
if node.is_error() || node.is_missing() {
|
if node.is_error() || node.is_missing() {
|
||||||
first_error = Some(node);
|
first_error = Some(node);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If there's no more children, even though some outer node has an error,
|
||||||
|
// then that means that the first error is hidden, but the later error could be
|
||||||
|
// visible. So, we walk back up to the child of the first node with an error,
|
||||||
|
// and then check its siblings for errors.
|
||||||
if !cursor.goto_first_child() {
|
if !cursor.goto_first_child() {
|
||||||
|
let earliest = earliest_node_with_error.unwrap();
|
||||||
|
while cursor.goto_parent() {
|
||||||
|
if cursor.node().parent().is_some_and(|p| p == earliest) {
|
||||||
|
while cursor.goto_next_sibling() {
|
||||||
|
let sibling = cursor.node();
|
||||||
|
if sibling.is_error() || sibling.is_missing() {
|
||||||
|
first_error = Some(sibling);
|
||||||
|
break 'outer;
|
||||||
|
}
|
||||||
|
if sibling.has_error() && cursor.goto_first_child() {
|
||||||
|
continue 'outer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} else if !cursor.goto_next_sibling() {
|
} else if !cursor.goto_next_sibling() {
|
||||||
|
|
@ -548,63 +665,88 @@ pub fn parse_file_at_path(parser: &mut Parser, opts: &ParseFileOptions) -> Resul
|
||||||
}
|
}
|
||||||
|
|
||||||
if first_error.is_some() || opts.print_time {
|
if first_error.is_some() || opts.print_time {
|
||||||
|
let path = path.to_string_lossy();
|
||||||
write!(
|
write!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
"{:width$}\t{duration_ms:>7.2} ms\t{:>6} bytes/ms",
|
"{:width$}\tParse: {parse_duration_ms:>7.2} ms\t{:>6} bytes/ms",
|
||||||
opts.path.to_str().unwrap(),
|
name,
|
||||||
(source_code.len() as u128 * 1_000_000) / duration.as_nanos(),
|
(source_code.len() as u128 * 1_000_000) / parse_duration.as_nanos(),
|
||||||
width = opts.max_path_length
|
width = max_path_length
|
||||||
)?;
|
)?;
|
||||||
if let Some(node) = first_error {
|
if let Some(node) = first_error {
|
||||||
let start = node.start_position();
|
let node_kind = node.kind();
|
||||||
let end = node.end_position();
|
let mut node_text = String::with_capacity(node_kind.len());
|
||||||
|
for c in node_kind.chars() {
|
||||||
|
if let Some(escaped) = escape_invisible(c) {
|
||||||
|
node_text += escaped;
|
||||||
|
} else {
|
||||||
|
node_text.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
write!(&mut stdout, "\t(")?;
|
write!(&mut stdout, "\t(")?;
|
||||||
if node.is_missing() {
|
if node.is_missing() {
|
||||||
if node.is_named() {
|
if node.is_named() {
|
||||||
write!(&mut stdout, "MISSING {}", node.kind())?;
|
write!(&mut stdout, "MISSING {node_text}")?;
|
||||||
} else {
|
} else {
|
||||||
write!(
|
write!(&mut stdout, "MISSING \"{node_text}\"")?;
|
||||||
&mut stdout,
|
|
||||||
"MISSING \"{}\"",
|
|
||||||
node.kind().replace('\n', "\\n")
|
|
||||||
)?;
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
write!(&mut stdout, "{}", node.kind())?;
|
write!(&mut stdout, "{node_text}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let start = node.start_position();
|
||||||
|
let end = node.end_position();
|
||||||
write!(
|
write!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
" [{}, {}] - [{}, {}])",
|
" [{}, {}] - [{}, {}])",
|
||||||
start.row, start.column, end.row, end.column
|
start.row, start.column, end.row, end.column
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
if !opts.edits.is_empty() {
|
||||||
|
write!(
|
||||||
|
&mut stdout,
|
||||||
|
"\n{:width$}\tEdit: {edit_duration_ms:>7.2} ms",
|
||||||
|
" ".repeat(path.len()),
|
||||||
|
width = max_path_length,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
writeln!(&mut stdout)?;
|
writeln!(&mut stdout)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Ok(ParseResult {
|
opts.stats.parse_summaries.push(ParseSummary {
|
||||||
|
file: path.to_path_buf(),
|
||||||
successful: first_error.is_none(),
|
successful: first_error.is_none(),
|
||||||
bytes: source_code.len(),
|
start: Some(tree.root_node().start_position().into()),
|
||||||
duration: Some(duration),
|
end: Some(tree.root_node().end_position().into()),
|
||||||
|
duration: Some(parse_duration),
|
||||||
|
bytes: Some(source_code.len()),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return Ok(());
|
||||||
}
|
}
|
||||||
parser.stop_printing_dot_graphs();
|
parser.stop_printing_dot_graphs();
|
||||||
|
|
||||||
if opts.print_time {
|
if opts.print_time {
|
||||||
let duration = time.elapsed();
|
let duration = parse_time.elapsed();
|
||||||
let duration_ms = duration.as_micros() as f64 / 1e3;
|
let duration_ms = duration.as_micros() as f64 / 1e3;
|
||||||
writeln!(
|
writeln!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
"{:width$}\t{duration_ms:>7.2} ms\t(timed out)",
|
"{:width$}\tParse: {duration_ms:>7.2} ms\t(timed out)",
|
||||||
opts.path.to_str().unwrap(),
|
path.to_str().unwrap(),
|
||||||
width = opts.max_path_length
|
width = max_path_length
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(ParseResult {
|
opts.stats.parse_summaries.push(ParseSummary {
|
||||||
|
file: path.to_path_buf(),
|
||||||
successful: false,
|
successful: false,
|
||||||
bytes: source_code.len(),
|
start: None,
|
||||||
|
end: None,
|
||||||
duration: None,
|
duration: None,
|
||||||
})
|
bytes: Some(source_code.len()),
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
const fn escape_invisible(c: char) -> Option<&'static str> {
|
const fn escape_invisible(c: char) -> Option<&'static str> {
|
||||||
|
|
@ -620,12 +762,77 @@ const fn escape_invisible(c: char) -> Option<&'static str> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const fn escape_delimiter(c: char) -> Option<&'static str> {
|
||||||
|
Some(match c {
|
||||||
|
'`' => "\\`",
|
||||||
|
'\"' => "\\\"",
|
||||||
|
_ => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn render_cst<'a, 'b: 'a>(
|
||||||
|
source_code: &[u8],
|
||||||
|
tree: &'b Tree,
|
||||||
|
cursor: &mut TreeCursor<'a>,
|
||||||
|
opts: &ParseFileOptions,
|
||||||
|
out: &mut impl Write,
|
||||||
|
) -> Result<()> {
|
||||||
|
let lossy_source_code = String::from_utf8_lossy(source_code);
|
||||||
|
let total_width = lossy_source_code
|
||||||
|
.lines()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(row, col)| (row as f64).log10() as usize + (col.len() as f64).log10() as usize + 1)
|
||||||
|
.max()
|
||||||
|
.unwrap_or(1);
|
||||||
|
let mut indent_level = usize::from(!opts.no_ranges);
|
||||||
|
let mut did_visit_children = false;
|
||||||
|
let mut in_error = false;
|
||||||
|
loop {
|
||||||
|
if did_visit_children {
|
||||||
|
if cursor.goto_next_sibling() {
|
||||||
|
did_visit_children = false;
|
||||||
|
} else if cursor.goto_parent() {
|
||||||
|
did_visit_children = true;
|
||||||
|
indent_level -= 1;
|
||||||
|
if !cursor.node().has_error() {
|
||||||
|
in_error = false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cst_render_node(
|
||||||
|
opts,
|
||||||
|
cursor,
|
||||||
|
source_code,
|
||||||
|
out,
|
||||||
|
total_width,
|
||||||
|
indent_level,
|
||||||
|
in_error,
|
||||||
|
)?;
|
||||||
|
if cursor.goto_first_child() {
|
||||||
|
did_visit_children = false;
|
||||||
|
indent_level += 1;
|
||||||
|
if cursor.node().has_error() {
|
||||||
|
in_error = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
did_visit_children = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cursor.reset(tree.root_node());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn render_node_text(source: &str) -> String {
|
fn render_node_text(source: &str) -> String {
|
||||||
source
|
source
|
||||||
.chars()
|
.chars()
|
||||||
.fold(String::with_capacity(source.len()), |mut acc, c| {
|
.fold(String::with_capacity(source.len()), |mut acc, c| {
|
||||||
if let Some(esc) = escape_invisible(c) {
|
if let Some(esc) = escape_invisible(c) {
|
||||||
acc.push_str(esc);
|
acc.push_str(esc);
|
||||||
|
} else if let Some(esc) = escape_delimiter(c) {
|
||||||
|
acc.push_str(esc);
|
||||||
} else {
|
} else {
|
||||||
acc.push(c);
|
acc.push(c);
|
||||||
}
|
}
|
||||||
|
|
@ -635,7 +842,7 @@ fn render_node_text(source: &str) -> String {
|
||||||
|
|
||||||
fn write_node_text(
|
fn write_node_text(
|
||||||
opts: &ParseFileOptions,
|
opts: &ParseFileOptions,
|
||||||
stdout: &mut StdoutLock<'static>,
|
out: &mut impl Write,
|
||||||
cursor: &TreeCursor,
|
cursor: &TreeCursor,
|
||||||
is_named: bool,
|
is_named: bool,
|
||||||
source: &str,
|
source: &str,
|
||||||
|
|
@ -651,13 +858,14 @@ fn write_node_text(
|
||||||
|
|
||||||
if !is_named {
|
if !is_named {
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}{}{}",
|
"{}{}{}",
|
||||||
paint(quote_color, &String::from(quote)),
|
paint(quote_color, &String::from(quote)),
|
||||||
paint(color, &render_node_text(source)),
|
paint(color, &render_node_text(source)),
|
||||||
paint(quote_color, &String::from(quote)),
|
paint(quote_color, &String::from(quote)),
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
|
let multiline = source.contains('\n');
|
||||||
for (i, line) in source.split_inclusive('\n').enumerate() {
|
for (i, line) in source.split_inclusive('\n').enumerate() {
|
||||||
if line.is_empty() {
|
if line.is_empty() {
|
||||||
break;
|
break;
|
||||||
|
|
@ -667,28 +875,31 @@ fn write_node_text(
|
||||||
// and adjust the column by setting it to the length of *this* line.
|
// and adjust the column by setting it to the length of *this* line.
|
||||||
node_range.start_point.row += i;
|
node_range.start_point.row += i;
|
||||||
node_range.end_point.row = node_range.start_point.row;
|
node_range.end_point.row = node_range.start_point.row;
|
||||||
node_range.end_point.column = line.len();
|
node_range.end_point.column = line.len()
|
||||||
|
+ if i == 0 {
|
||||||
|
node_range.start_point.column
|
||||||
|
} else {
|
||||||
|
0
|
||||||
|
};
|
||||||
let formatted_line = render_line_feed(line, opts);
|
let formatted_line = render_line_feed(line, opts);
|
||||||
if !opts.no_ranges {
|
write!(
|
||||||
write!(
|
out,
|
||||||
stdout,
|
"{}{}{}{}{}{}",
|
||||||
"\n{}{}{}{}{}",
|
if multiline { "\n" } else { " " },
|
||||||
render_node_range(opts, cursor, is_named, true, total_width, node_range),
|
if multiline && !opts.no_ranges {
|
||||||
" ".repeat(indent_level + 1),
|
render_node_range(opts, cursor, is_named, true, total_width, node_range)
|
||||||
paint(quote_color, &String::from(quote)),
|
} else {
|
||||||
&paint(color, &render_node_text(&formatted_line)),
|
String::new()
|
||||||
paint(quote_color, &String::from(quote)),
|
},
|
||||||
)?;
|
if multiline {
|
||||||
} else {
|
" ".repeat(indent_level + 1)
|
||||||
write!(
|
} else {
|
||||||
stdout,
|
String::new()
|
||||||
"\n{}{}{}{}",
|
},
|
||||||
" ".repeat(indent_level + 1),
|
paint(quote_color, &String::from(quote)),
|
||||||
paint(quote_color, &String::from(quote)),
|
paint(color, &render_node_text(&formatted_line)),
|
||||||
&paint(color, &render_node_text(&formatted_line)),
|
paint(quote_color, &String::from(quote)),
|
||||||
paint(quote_color, &String::from(quote)),
|
)?;
|
||||||
)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -742,46 +953,59 @@ fn render_node_range(
|
||||||
|
|
||||||
fn cst_render_node(
|
fn cst_render_node(
|
||||||
opts: &ParseFileOptions,
|
opts: &ParseFileOptions,
|
||||||
cursor: &mut TreeCursor,
|
cursor: &TreeCursor,
|
||||||
source_code: &[u8],
|
source_code: &[u8],
|
||||||
stdout: &mut StdoutLock<'static>,
|
out: &mut impl Write,
|
||||||
total_width: usize,
|
total_width: usize,
|
||||||
indent_level: usize,
|
indent_level: usize,
|
||||||
|
in_error: bool,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let node = cursor.node();
|
let node = cursor.node();
|
||||||
let is_named = node.is_named();
|
let is_named = node.is_named();
|
||||||
if !opts.no_ranges {
|
if !opts.no_ranges {
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}",
|
"{}",
|
||||||
render_node_range(opts, cursor, is_named, false, total_width, node.range())
|
render_node_range(opts, cursor, is_named, false, total_width, node.range())
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
write!(stdout, "{}", " ".repeat(indent_level))?;
|
write!(
|
||||||
|
out,
|
||||||
|
"{}{}",
|
||||||
|
" ".repeat(indent_level),
|
||||||
|
if in_error && !node.has_error() {
|
||||||
|
" "
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
}
|
||||||
|
)?;
|
||||||
if is_named {
|
if is_named {
|
||||||
if let Some(field_name) = cursor.field_name() {
|
if let Some(field_name) = cursor.field_name() {
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}",
|
"{}",
|
||||||
paint(opts.parse_theme.field, &format!("{field_name}: "))
|
paint(opts.parse_theme.field, &format!("{field_name}: "))
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let kind_color = if node.has_error() {
|
if node.has_error() || node.is_error() {
|
||||||
write!(stdout, "{}", paint(opts.parse_theme.error, "•"))?;
|
write!(out, "{}", paint(opts.parse_theme.error, "•"))?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let kind_color = if node.is_error() {
|
||||||
opts.parse_theme.error
|
opts.parse_theme.error
|
||||||
} else if node.is_extra() || node.parent().is_some_and(|p| p.is_extra()) {
|
} else if node.is_extra() || node.parent().is_some_and(|p| p.is_extra() && !p.is_error()) {
|
||||||
opts.parse_theme.extra
|
opts.parse_theme.extra
|
||||||
} else {
|
} else {
|
||||||
opts.parse_theme.node_kind
|
opts.parse_theme.node_kind
|
||||||
};
|
};
|
||||||
write!(stdout, "{} ", paint(kind_color, node.kind()))?;
|
write!(out, "{}", paint(kind_color, node.kind()))?;
|
||||||
|
|
||||||
if node.child_count() == 0 {
|
if node.child_count() == 0 {
|
||||||
// Node text from a pattern or external scanner
|
// Node text from a pattern or external scanner
|
||||||
write_node_text(
|
write_node_text(
|
||||||
opts,
|
opts,
|
||||||
stdout,
|
out,
|
||||||
cursor,
|
cursor,
|
||||||
is_named,
|
is_named,
|
||||||
&String::from_utf8_lossy(&source_code[node.start_byte()..node.end_byte()]),
|
&String::from_utf8_lossy(&source_code[node.start_byte()..node.end_byte()]),
|
||||||
|
|
@ -790,17 +1014,13 @@ fn cst_render_node(
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
} else if node.is_missing() {
|
} else if node.is_missing() {
|
||||||
write!(stdout, "{}: ", paint(opts.parse_theme.missing, "MISSING"))?;
|
write!(out, "{}: ", paint(opts.parse_theme.missing, "MISSING"))?;
|
||||||
write!(
|
write!(out, "\"{}\"", paint(opts.parse_theme.missing, node.kind()))?;
|
||||||
stdout,
|
|
||||||
"\"{}\"",
|
|
||||||
paint(opts.parse_theme.missing, node.kind())
|
|
||||||
)?;
|
|
||||||
} else {
|
} else {
|
||||||
// Terminal literals, like "fn"
|
// Terminal literals, like "fn"
|
||||||
write_node_text(
|
write_node_text(
|
||||||
opts,
|
opts,
|
||||||
stdout,
|
out,
|
||||||
cursor,
|
cursor,
|
||||||
is_named,
|
is_named,
|
||||||
node.kind(),
|
node.kind(),
|
||||||
|
|
@ -808,7 +1028,7 @@ fn cst_render_node(
|
||||||
(total_width, indent_level),
|
(total_width, indent_level),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
writeln!(stdout)?;
|
writeln!(out)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -895,7 +1115,7 @@ pub fn offset_for_position(input: &[u8], position: Point) -> Result<usize> {
|
||||||
if let Some(pos) = iter.next() {
|
if let Some(pos) = iter.next() {
|
||||||
if (pos - offset < position.column) || (input[offset] == b'\n' && position.column > 0) {
|
if (pos - offset < position.column) || (input[offset] == b'\n' && position.column > 0) {
|
||||||
return Err(anyhow!("Failed to address a column: {}", position.column));
|
return Err(anyhow!("Failed to address a column: {}", position.column));
|
||||||
};
|
}
|
||||||
} else if input.len() - offset < position.column {
|
} else if input.len() - offset < position.column {
|
||||||
return Err(anyhow!("Failed to address a column over the end"));
|
return Err(anyhow!("Failed to address a column over the end"));
|
||||||
}
|
}
|
||||||
481
crates/cli/src/playground.html
Normal file
481
crates/cli/src/playground.html
Normal file
|
|
@ -0,0 +1,481 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>tree-sitter THE_LANGUAGE_NAME</title>
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.css">
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.css">
|
||||||
|
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-32x32.png"
|
||||||
|
sizes="32x32" />
|
||||||
|
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-16x16.png"
|
||||||
|
sizes="16x16" />
|
||||||
|
<style>
|
||||||
|
/* Base Variables */
|
||||||
|
:root {
|
||||||
|
--light-bg: #f9f9f9;
|
||||||
|
--light-border: #e0e0e0;
|
||||||
|
--light-text: #333;
|
||||||
|
--light-hover-border: #c1c1c1;
|
||||||
|
--light-scrollbar-track: #f1f1f1;
|
||||||
|
--light-scrollbar-thumb: #c1c1c1;
|
||||||
|
--light-scrollbar-thumb-hover: #a8a8a8;
|
||||||
|
--light-tree-row-bg: #e3f2fd;
|
||||||
|
|
||||||
|
--dark-bg: #1d1f21;
|
||||||
|
--dark-border: #2d2d2d;
|
||||||
|
--dark-text: #c5c8c6;
|
||||||
|
--dark-panel-bg: #252526;
|
||||||
|
--dark-code-bg: #1e1e1e;
|
||||||
|
--dark-scrollbar-track: #25282c;
|
||||||
|
--dark-scrollbar-thumb: #4a4d51;
|
||||||
|
--dark-scrollbar-thumb-hover: #5a5d61;
|
||||||
|
--dark-tree-row-bg: #373737;
|
||||||
|
|
||||||
|
--primary-color: #0550ae;
|
||||||
|
--primary-color-alpha: rgba(5, 80, 174, 0.1);
|
||||||
|
--primary-color-alpha-dark: rgba(121, 192, 255, 0.1);
|
||||||
|
--selection-color: rgba(39, 95, 255, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Theme Colors */
|
||||||
|
[data-theme="dark"] {
|
||||||
|
--bg-color: var(--dark-bg);
|
||||||
|
--border-color: var(--dark-border);
|
||||||
|
--text-color: var(--dark-text);
|
||||||
|
--panel-bg: var(--dark-panel-bg);
|
||||||
|
--code-bg: var(--dark-code-bg);
|
||||||
|
--tree-row-bg: var(--dark-tree-row-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="light"] {
|
||||||
|
--bg-color: var(--light-bg);
|
||||||
|
--border-color: var(--light-border);
|
||||||
|
--text-color: var(--light-text);
|
||||||
|
--panel-bg: white;
|
||||||
|
--code-bg: white;
|
||||||
|
--tree-row-bg: var(--light-tree-row-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Base Styles */
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
font-family: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Layout */
|
||||||
|
#playground-container {
|
||||||
|
width: 100%;
|
||||||
|
height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
header {
|
||||||
|
padding: 16px 24px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 20px;
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.language-name,
|
||||||
|
#language-version {
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
main {
|
||||||
|
flex: 1;
|
||||||
|
display: flex;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#input-pane {
|
||||||
|
width: 50%;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
border-right: 1px solid var(--border-color);
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-container {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
position: relative;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container:not([style*="visibility: hidden"]) {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container .panel-header {
|
||||||
|
flex: 0 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container .CodeMirror {
|
||||||
|
flex: 1;
|
||||||
|
position: relative;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container-scroll {
|
||||||
|
width: 50%;
|
||||||
|
overflow: auto;
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
padding: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container {
|
||||||
|
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
||||||
|
line-height: 1.5;
|
||||||
|
margin: 0;
|
||||||
|
padding: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.panel-header {
|
||||||
|
padding: 8px 16px;
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 14px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.CodeMirror {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
height: 100%;
|
||||||
|
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.6;
|
||||||
|
background-color: var(--code-bg) !important;
|
||||||
|
color: var(--text-color) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-error {
|
||||||
|
text-decoration: underline red dashed;
|
||||||
|
-webkit-text-decoration: underline red dashed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scrollbars */
|
||||||
|
::-webkit-scrollbar {
|
||||||
|
width: 8px;
|
||||||
|
height: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--light-scrollbar-track);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--light-scrollbar-thumb);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: var(--light-scrollbar-thumb-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] {
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
background: var(--dark-scrollbar-track) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
background: var(--dark-scrollbar-thumb) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: var(--dark-scrollbar-thumb-hover) !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Theme Toggle */
|
||||||
|
.theme-toggle {
|
||||||
|
background: none;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 6px;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-toggle:hover {
|
||||||
|
background-color: var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="light"] .moon-icon,
|
||||||
|
[data-theme="dark"] .sun-icon {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Form Elements */
|
||||||
|
input[type="checkbox"] {
|
||||||
|
margin-right: 6px;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
label {
|
||||||
|
font-size: 14px;
|
||||||
|
margin-right: 16px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a {
|
||||||
|
cursor: pointer;
|
||||||
|
text-decoration: none;
|
||||||
|
color: #040404;
|
||||||
|
padding: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.named {
|
||||||
|
color: #0550ae;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous {
|
||||||
|
color: #116329;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous:before {
|
||||||
|
content: '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous:after {
|
||||||
|
content: '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.error {
|
||||||
|
color: #cf222e;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.highlighted {
|
||||||
|
background-color: #cae2ff;
|
||||||
|
color: red;
|
||||||
|
border-radius: 3px;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
#copy-button {
|
||||||
|
background: none;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 6px;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-color);
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
margin-left: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#copy-button:hover {
|
||||||
|
background-color: var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
#copy-button:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--primary-color);
|
||||||
|
box-shadow: 0 0 0 2px var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast {
|
||||||
|
position: fixed;
|
||||||
|
bottom: 20px;
|
||||||
|
right: 20px;
|
||||||
|
background-color: var(--light-text);
|
||||||
|
color: white;
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: 6px;
|
||||||
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 500;
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(20px);
|
||||||
|
transition: all 0.3s ease;
|
||||||
|
z-index: 1000;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast.show {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark Theme Node Colors */
|
||||||
|
[data-theme="dark"] {
|
||||||
|
& #output-container a {
|
||||||
|
color: #d4d4d4;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.named {
|
||||||
|
color: #79c0ff;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.anonymous {
|
||||||
|
color: #7ee787;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.error {
|
||||||
|
color: #ff7b72;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.highlighted {
|
||||||
|
background-color: #656669;
|
||||||
|
color: red;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror {
|
||||||
|
background-color: var(--dark-code-bg) !important;
|
||||||
|
color: var(--dark-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-gutters {
|
||||||
|
background-color: var(--dark-panel-bg) !important;
|
||||||
|
border-color: var(--dark-border) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-cursor {
|
||||||
|
border-color: var(--dark-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-selected {
|
||||||
|
background-color: rgba(255, 255, 255, 0.1) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .toast {
|
||||||
|
background-color: var(--dark-bg);
|
||||||
|
color: var(--dark-text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.tree-row:has(.highlighted) {
|
||||||
|
background-color: var(--tree-row-bg);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="playground-container" style="visibility: hidden;">
|
||||||
|
<header>
|
||||||
|
<div class="header-item">
|
||||||
|
<span class="language-name">Language: THE_LANGUAGE_NAME</span>
|
||||||
|
<span id="language-version"></span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="logging-checkbox" type="checkbox">
|
||||||
|
<label for="logging-checkbox">log</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="anonymous-nodes-checkbox" type="checkbox">
|
||||||
|
<label for="anonymous-nodes-checkbox">show anonymous nodes</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="query-checkbox" type="checkbox">
|
||||||
|
<label for="query-checkbox">query</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="accessibility-checkbox" type="checkbox">
|
||||||
|
<label for="accessibility-checkbox">accessibility</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<label for="update-time">parse time: </label>
|
||||||
|
<span id="update-time"></span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<a href="https://tree-sitter.github.io/tree-sitter/7-playground.html#about">(?)</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<select id="language-select" style="display: none;">
|
||||||
|
<option value="parser">Parser</option>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<button id="theme-toggle" class="theme-toggle" aria-label="Toggle theme">
|
||||||
|
<svg class="sun-icon" viewBox="0 0 24 24" width="16" height="16">
|
||||||
|
<path fill="currentColor"
|
||||||
|
d="M12 17.5a5.5 5.5 0 1 0 0-11 5.5 5.5 0 0 0 0 11zm0 1.5a7 7 0 1 1 0-14 7 7 0 0 1 0 14zm0-16a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0V4a1 1 0 0 1 1-1zm0 15a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0v-2a1 1 0 0 1 1-1zm9-9a1 1 0 0 1-1 1h-2a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1zM4 12a1 1 0 0 1-1 1H1a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1z" />
|
||||||
|
</svg>
|
||||||
|
<svg class="moon-icon" viewBox="0 0 24 24" width="16" height="16">
|
||||||
|
<path fill="currentColor"
|
||||||
|
d="M12.1 22c-5.5 0-10-4.5-10-10s4.5-10 10-10c.2 0 .3 0 .5.1-1.3 1.4-2 3.2-2 5.2 0 4.1 3.4 7.5 7.5 7.5 2 0 3.8-.7 5.2-2 .1.2.1.3.1.5 0 5.4-4.5 9.7-10 9.7z" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
<div id="input-pane">
|
||||||
|
<div class="panel-header">Code</div>
|
||||||
|
<div id="code-container">
|
||||||
|
<textarea id="code-input"></textarea>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="query-container" style="visibility: hidden; position: absolute;">
|
||||||
|
<div class="panel-header">Query</div>
|
||||||
|
<textarea id="query-input"></textarea>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="output-container-scroll">
|
||||||
|
<div class="panel-header">
|
||||||
|
Tree
|
||||||
|
<button type="button" id="copy-button" class="theme-toggle" aria-label="Copy tree">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"
|
||||||
|
stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<rect width="14" height="14" x="8" y="8" rx="2" ry="2" />
|
||||||
|
<path d="M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<pre id="output-container" class="highlight"></pre>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.js"></script>
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.js"></script>
|
||||||
|
|
||||||
|
<script>LANGUAGE_BASE_URL = "";</script>
|
||||||
|
<script type="module" src="playground.js"></script>
|
||||||
|
<script type="module">
|
||||||
|
import * as TreeSitter from './web-tree-sitter.js';
|
||||||
|
window.TreeSitter = TreeSitter;
|
||||||
|
setTimeout(() => window.initializePlayground({local: true}), 1)
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
@ -7,6 +7,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use log::{error, info};
|
||||||
use tiny_http::{Header, Response, Server};
|
use tiny_http::{Header, Response, Server};
|
||||||
|
|
||||||
use super::wasm;
|
use super::wasm;
|
||||||
|
|
@ -18,7 +19,7 @@ macro_rules! optional_resource {
|
||||||
if let Some(tree_sitter_dir) = tree_sitter_dir {
|
if let Some(tree_sitter_dir) = tree_sitter_dir {
|
||||||
Cow::Owned(fs::read(tree_sitter_dir.join($path)).unwrap())
|
Cow::Owned(fs::read(tree_sitter_dir.join($path)).unwrap())
|
||||||
} else {
|
} else {
|
||||||
Cow::Borrowed(include_bytes!(concat!("../../", $path)))
|
Cow::Borrowed(include_bytes!(concat!("../../../", $path)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -33,26 +34,92 @@ macro_rules! optional_resource {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
optional_resource!(get_playground_js, "docs/assets/js/playground.js");
|
optional_resource!(get_playground_js, "docs/src/assets/js/playground.js");
|
||||||
optional_resource!(get_lib_js, "lib/binding_web/tree-sitter.js");
|
optional_resource!(get_lib_js, "lib/binding_web/web-tree-sitter.js");
|
||||||
optional_resource!(get_lib_wasm, "lib/binding_web/tree-sitter.wasm");
|
optional_resource!(get_lib_wasm, "lib/binding_web/web-tree-sitter.wasm");
|
||||||
|
|
||||||
fn get_main_html(tree_sitter_dir: Option<&Path>) -> Cow<'static, [u8]> {
|
fn get_main_html(tree_sitter_dir: Option<&Path>) -> Cow<'static, [u8]> {
|
||||||
tree_sitter_dir.map_or(
|
tree_sitter_dir.map_or(
|
||||||
Cow::Borrowed(include_bytes!("playground.html")),
|
Cow::Borrowed(include_bytes!("playground.html")),
|
||||||
|tree_sitter_dir| {
|
|tree_sitter_dir| {
|
||||||
Cow::Owned(fs::read(tree_sitter_dir.join("cli/src/playground.html")).unwrap())
|
Cow::Owned(fs::read(tree_sitter_dir.join("crates/cli/src/playground.html")).unwrap())
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn export(grammar_path: &Path, export_path: &Path) -> Result<()> {
|
||||||
|
let (grammar_name, language_wasm) = wasm::load_language_wasm_file(grammar_path)?;
|
||||||
|
|
||||||
|
fs::create_dir_all(export_path).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to create export directory: {}",
|
||||||
|
export_path.display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let tree_sitter_dir = env::var("TREE_SITTER_BASE_DIR").map(PathBuf::from).ok();
|
||||||
|
|
||||||
|
let playground_js = get_playground_js(tree_sitter_dir.as_deref());
|
||||||
|
let lib_js = get_lib_js(tree_sitter_dir.as_deref());
|
||||||
|
let lib_wasm = get_lib_wasm(tree_sitter_dir.as_deref());
|
||||||
|
|
||||||
|
let has_local_playground_js = !playground_js.is_empty();
|
||||||
|
let has_local_lib_js = !lib_js.is_empty();
|
||||||
|
let has_local_lib_wasm = !lib_wasm.is_empty();
|
||||||
|
|
||||||
|
let mut main_html = str::from_utf8(&get_main_html(tree_sitter_dir.as_deref()))
|
||||||
|
.unwrap()
|
||||||
|
.replace("THE_LANGUAGE_NAME", &grammar_name);
|
||||||
|
|
||||||
|
if !has_local_playground_js {
|
||||||
|
main_html = main_html.replace(
|
||||||
|
r#"<script type="module" src="playground.js"></script>"#,
|
||||||
|
r#"<script type="module" src="https://tree-sitter.github.io/tree-sitter/assets/js/playground.js"></script>"#
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if !has_local_lib_js {
|
||||||
|
main_html = main_html.replace(
|
||||||
|
"import * as TreeSitter from './web-tree-sitter.js';",
|
||||||
|
"import * as TreeSitter from 'https://tree-sitter.github.io/web-tree-sitter.js';",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::write(export_path.join("index.html"), main_html.as_bytes())
|
||||||
|
.with_context(|| "Failed to write index.html")?;
|
||||||
|
|
||||||
|
fs::write(export_path.join("tree-sitter-parser.wasm"), language_wasm)
|
||||||
|
.with_context(|| "Failed to write parser wasm file")?;
|
||||||
|
|
||||||
|
if has_local_playground_js {
|
||||||
|
fs::write(export_path.join("playground.js"), playground_js)
|
||||||
|
.with_context(|| "Failed to write playground.js")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_local_lib_js {
|
||||||
|
fs::write(export_path.join("web-tree-sitter.js"), lib_js)
|
||||||
|
.with_context(|| "Failed to write web-tree-sitter.js")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_local_lib_wasm {
|
||||||
|
fs::write(export_path.join("web-tree-sitter.wasm"), lib_wasm)
|
||||||
|
.with_context(|| "Failed to write web-tree-sitter.wasm")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"Exported playground to {}",
|
||||||
|
export_path.canonicalize()?.display()
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn serve(grammar_path: &Path, open_in_browser: bool) -> Result<()> {
|
pub fn serve(grammar_path: &Path, open_in_browser: bool) -> Result<()> {
|
||||||
let server = get_server()?;
|
let server = get_server()?;
|
||||||
let (grammar_name, language_wasm) = wasm::load_language_wasm_file(grammar_path)?;
|
let (grammar_name, language_wasm) = wasm::load_language_wasm_file(grammar_path)?;
|
||||||
let url = format!("http://{}", server.server_addr());
|
let url = format!("http://{}", server.server_addr());
|
||||||
println!("Started playground on: {url}");
|
info!("Started playground on: {url}");
|
||||||
if open_in_browser && webbrowser::open(&url).is_err() {
|
if open_in_browser && webbrowser::open(&url).is_err() {
|
||||||
eprintln!("Failed to open '{url}' in a web browser");
|
error!("Failed to open '{url}' in a web browser");
|
||||||
}
|
}
|
||||||
|
|
||||||
let tree_sitter_dir = env::var("TREE_SITTER_BASE_DIR").map(PathBuf::from).ok();
|
let tree_sitter_dir = env::var("TREE_SITTER_BASE_DIR").map(PathBuf::from).ok();
|
||||||
|
|
@ -79,16 +146,16 @@ pub fn serve(grammar_path: &Path, open_in_browser: bool) -> Result<()> {
|
||||||
response(&playground_js, &js_header)
|
response(&playground_js, &js_header)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"/tree-sitter.js" => {
|
"/web-tree-sitter.js" => {
|
||||||
if lib_js.is_empty() {
|
if lib_js.is_empty() {
|
||||||
redirect("https://tree-sitter.github.io/tree-sitter.js")
|
redirect("https://tree-sitter.github.io/web-tree-sitter.js")
|
||||||
} else {
|
} else {
|
||||||
response(&lib_js, &js_header)
|
response(&lib_js, &js_header)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"/tree-sitter.wasm" => {
|
"/web-tree-sitter.wasm" => {
|
||||||
if lib_wasm.is_empty() {
|
if lib_wasm.is_empty() {
|
||||||
redirect("https://tree-sitter.github.io/tree-sitter.wasm")
|
redirect("https://tree-sitter.github.io/web-tree-sitter.wasm")
|
||||||
} else {
|
} else {
|
||||||
response(&lib_wasm, &wasm_header)
|
response(&lib_wasm, &wasm_header)
|
||||||
}
|
}
|
||||||
174
crates/cli/src/query.rs
Normal file
174
crates/cli/src/query.rs
Normal file
|
|
@ -0,0 +1,174 @@
|
||||||
|
use std::{
|
||||||
|
fs,
|
||||||
|
io::{self, Write},
|
||||||
|
ops::Range,
|
||||||
|
path::Path,
|
||||||
|
time::Instant,
|
||||||
|
};
|
||||||
|
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
use log::warn;
|
||||||
|
use streaming_iterator::StreamingIterator;
|
||||||
|
use tree_sitter::{Language, Parser, Point, Query, QueryCursor};
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
query_testing::{self, to_utf8_point},
|
||||||
|
test::{TestInfo, TestOutcome, TestResult, TestSummary},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Default)]
|
||||||
|
pub struct QueryFileOptions {
|
||||||
|
pub ordered_captures: bool,
|
||||||
|
pub byte_range: Option<Range<usize>>,
|
||||||
|
pub point_range: Option<Range<Point>>,
|
||||||
|
pub containing_byte_range: Option<Range<usize>>,
|
||||||
|
pub containing_point_range: Option<Range<Point>>,
|
||||||
|
pub quiet: bool,
|
||||||
|
pub print_time: bool,
|
||||||
|
pub stdin: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn query_file_at_path(
|
||||||
|
language: &Language,
|
||||||
|
path: &Path,
|
||||||
|
name: &str,
|
||||||
|
query_path: &Path,
|
||||||
|
opts: &QueryFileOptions,
|
||||||
|
test_summary: Option<&mut TestSummary>,
|
||||||
|
) -> Result<()> {
|
||||||
|
let stdout = io::stdout();
|
||||||
|
let mut stdout = stdout.lock();
|
||||||
|
|
||||||
|
let query_source = fs::read_to_string(query_path)
|
||||||
|
.with_context(|| format!("Error reading query file {}", query_path.display()))?;
|
||||||
|
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
|
||||||
|
|
||||||
|
let mut query_cursor = QueryCursor::new();
|
||||||
|
if let Some(ref range) = opts.byte_range {
|
||||||
|
query_cursor.set_byte_range(range.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref range) = opts.point_range {
|
||||||
|
query_cursor.set_point_range(range.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref range) = opts.containing_byte_range {
|
||||||
|
query_cursor.set_containing_byte_range(range.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref range) = opts.containing_point_range {
|
||||||
|
query_cursor.set_containing_point_range(range.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut parser = Parser::new();
|
||||||
|
parser.set_language(language)?;
|
||||||
|
|
||||||
|
let mut results = Vec::new();
|
||||||
|
let should_test = test_summary.is_some();
|
||||||
|
|
||||||
|
if !should_test && !opts.stdin {
|
||||||
|
writeln!(&mut stdout, "{name}")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let source_code =
|
||||||
|
fs::read(path).with_context(|| format!("Error reading source file {}", path.display()))?;
|
||||||
|
let tree = parser.parse(&source_code, None).unwrap();
|
||||||
|
|
||||||
|
let start = Instant::now();
|
||||||
|
if opts.ordered_captures {
|
||||||
|
let mut captures = query_cursor.captures(&query, tree.root_node(), source_code.as_slice());
|
||||||
|
while let Some((mat, capture_index)) = captures.next() {
|
||||||
|
let capture = mat.captures[*capture_index];
|
||||||
|
let capture_name = &query.capture_names()[capture.index as usize];
|
||||||
|
if !opts.quiet && !should_test {
|
||||||
|
writeln!(
|
||||||
|
&mut stdout,
|
||||||
|
" pattern: {:>2}, capture: {} - {capture_name}, start: {}, end: {}, text: `{}`",
|
||||||
|
mat.pattern_index,
|
||||||
|
capture.index,
|
||||||
|
capture.node.start_position(),
|
||||||
|
capture.node.end_position(),
|
||||||
|
capture.node.utf8_text(&source_code).unwrap_or("")
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
if should_test {
|
||||||
|
results.push(query_testing::CaptureInfo {
|
||||||
|
name: (*capture_name).to_string(),
|
||||||
|
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||||
|
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let mut matches = query_cursor.matches(&query, tree.root_node(), source_code.as_slice());
|
||||||
|
while let Some(m) = matches.next() {
|
||||||
|
if !opts.quiet && !should_test {
|
||||||
|
writeln!(&mut stdout, " pattern: {}", m.pattern_index)?;
|
||||||
|
}
|
||||||
|
for capture in m.captures {
|
||||||
|
let start = capture.node.start_position();
|
||||||
|
let end = capture.node.end_position();
|
||||||
|
let capture_name = &query.capture_names()[capture.index as usize];
|
||||||
|
if !opts.quiet && !should_test {
|
||||||
|
if end.row == start.row {
|
||||||
|
writeln!(
|
||||||
|
&mut stdout,
|
||||||
|
" capture: {} - {capture_name}, start: {start}, end: {end}, text: `{}`",
|
||||||
|
capture.index,
|
||||||
|
capture.node.utf8_text(&source_code).unwrap_or("")
|
||||||
|
)?;
|
||||||
|
} else {
|
||||||
|
writeln!(
|
||||||
|
&mut stdout,
|
||||||
|
" capture: {capture_name}, start: {start}, end: {end}",
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if should_test {
|
||||||
|
results.push(query_testing::CaptureInfo {
|
||||||
|
name: (*capture_name).to_string(),
|
||||||
|
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||||
|
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if query_cursor.did_exceed_match_limit() {
|
||||||
|
warn!("Query exceeded maximum number of in-progress captures!");
|
||||||
|
}
|
||||||
|
if should_test {
|
||||||
|
let path_name = if opts.stdin {
|
||||||
|
"stdin"
|
||||||
|
} else {
|
||||||
|
Path::new(&path).file_name().unwrap().to_str().unwrap()
|
||||||
|
};
|
||||||
|
// Invariant: `test_summary` will always be `Some` when `should_test` is true
|
||||||
|
let test_summary = test_summary.unwrap();
|
||||||
|
match query_testing::assert_expected_captures(&results, path, &mut parser, language) {
|
||||||
|
Ok(assertion_count) => {
|
||||||
|
test_summary.query_results.add_case(TestResult {
|
||||||
|
name: path_name.to_string(),
|
||||||
|
info: TestInfo::AssertionTest {
|
||||||
|
outcome: TestOutcome::AssertionPassed { assertion_count },
|
||||||
|
test_num: test_summary.test_num,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
test_summary.query_results.add_case(TestResult {
|
||||||
|
name: path_name.to_string(),
|
||||||
|
info: TestInfo::AssertionTest {
|
||||||
|
outcome: TestOutcome::AssertionFailed {
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
test_num: test_summary.test_num,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return Err(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if opts.print_time {
|
||||||
|
writeln!(&mut stdout, "{:?}", start.elapsed())?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue