Compare commits
540 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6739742fb6 | ||
|
|
d251226a3c | ||
|
|
ae8184b8b9 | ||
|
|
470ecf8996 | ||
|
|
0cdb6bef7b | ||
|
|
cd603fa981 | ||
|
|
b12009a746 | ||
|
|
9f9a0bc410 | ||
|
|
5d290a2a75 | ||
|
|
5808350bfe | ||
|
|
e64e74d5ed | ||
|
|
1a88b26a10 | ||
|
|
6c05cdfb0c | ||
|
|
aefae11c0d | ||
|
|
630fa52717 | ||
|
|
eea85f4eff | ||
|
|
cd6672701b | ||
|
|
f4ca3d95ca | ||
|
|
17e3c7a5c5 | ||
|
|
dd60d5cff0 | ||
|
|
f1288ea5c9 | ||
|
|
47ae060966 | ||
|
|
a1893b4420 | ||
|
|
999e041d49 | ||
|
|
0d4d854809 | ||
|
|
93d793d249 | ||
|
|
82486d4b0a | ||
|
|
5d9605a91e | ||
|
|
5293dd683e | ||
|
|
62effdf128 | ||
|
|
8e4f21aba0 | ||
|
|
5208299bbb | ||
|
|
ba7350c7ee | ||
|
|
f96d518ebf | ||
|
|
d5b82fbbab | ||
|
|
a7d8c0cbb2 | ||
|
|
24007727d4 | ||
|
|
6aa63a7213 | ||
|
|
eacb95c85d | ||
|
|
6967640571 | ||
|
|
4ac2d5d276 | ||
|
|
642b56d9af | ||
|
|
0574fcf256 | ||
|
|
98de2bc1a8 | ||
|
|
cd4b6e2ef9 | ||
|
|
8caecbc13f | ||
|
|
1b654ae35d | ||
|
|
3bd44afcaa | ||
|
|
8b8199775f | ||
|
|
744e556f7e | ||
|
|
8a3dcc6155 | ||
|
|
b0afbf3762 | ||
|
|
974be3bb30 | ||
|
|
d861e2bcd9 | ||
|
|
b9c2d1dc89 | ||
|
|
8ca17d1bb1 | ||
|
|
3182efeccc | ||
|
|
bec7c3272b | ||
|
|
e6bfed33ee | ||
|
|
053b264502 | ||
|
|
a8f25fa441 | ||
|
|
f450ce4f6e | ||
|
|
3ff8edf9e8 | ||
|
|
6b6040961c | ||
|
|
888f57657d | ||
|
|
be8fe690d8 | ||
|
|
c0b1710f8a | ||
|
|
7d3feeae9a | ||
|
|
3f85f65e3f | ||
|
|
df8b62fc50 | ||
|
|
14b4708018 | ||
|
|
dcef0cc0ee | ||
|
|
c1a0f48781 | ||
|
|
f6d17fdb04 | ||
|
|
829733a35e | ||
|
|
d64b863030 | ||
|
|
882aa867eb | ||
|
|
de92a9b4c9 | ||
|
|
5880df47e2 | ||
|
|
e92a7803eb | ||
|
|
0d656de98b | ||
|
|
b095968dff | ||
|
|
d592b16ac0 | ||
|
|
320c0865e9 | ||
|
|
60635e0729 | ||
|
|
120f74723e | ||
|
|
02508d5570 | ||
|
|
42e7e9c3e7 | ||
|
|
55b9a25c84 | ||
|
|
877782a8a4 | ||
|
|
0e1f715ef1 | ||
|
|
f3012a999d | ||
|
|
3072d35ed5 | ||
|
|
57e3a7b2ca | ||
|
|
0df2916920 | ||
|
|
61c21aa408 | ||
|
|
7eb23d9f3c | ||
|
|
db2d221ae9 | ||
|
|
67cb3cb881 | ||
|
|
12a31536e1 | ||
|
|
7657cc9d35 | ||
|
|
13ff3935ac | ||
|
|
361287fb56 | ||
|
|
13d4db8bb4 | ||
|
|
419a5a7305 | ||
|
|
c7b5f89392 | ||
|
|
d546e28abf | ||
|
|
86e2fd2337 | ||
|
|
ff255a2354 | ||
|
|
fe67521b3d | ||
|
|
f02d7e7e33 | ||
|
|
6a8676f335 | ||
|
|
944386d25f | ||
|
|
ef03a3f8fe | ||
|
|
18a5243933 | ||
|
|
8444cc3deb | ||
|
|
097c2d4f05 | ||
|
|
b8f52210f9 | ||
|
|
ecc787e221 | ||
|
|
6188010f53 | ||
|
|
70cde4a110 | ||
|
|
77363a65c2 | ||
|
|
605e580063 | ||
|
|
a2f2b16acb | ||
|
|
87d778a1c6 | ||
|
|
e344837e35 | ||
|
|
bdee2c2dd3 | ||
|
|
da5926d6f5 | ||
|
|
b3bc7701cd | ||
|
|
262f1782cc | ||
|
|
00d172bf9f | ||
|
|
ae54350c76 | ||
|
|
3355825a68 | ||
|
|
7d0e029e37 | ||
|
|
0f5ccc4aba | ||
|
|
0cf6e7c507 | ||
|
|
1dc4804b6e | ||
|
|
c5b22a1dc6 | ||
|
|
92efd26380 | ||
|
|
24c8feba3e | ||
|
|
122493b717 | ||
|
|
4edcca9850 | ||
|
|
be0c44f871 | ||
|
|
35b1356e96 | ||
|
|
443acf080a | ||
|
|
00e394f0f1 | ||
|
|
341665824c | ||
|
|
bd02be25d5 | ||
|
|
12a6400c63 | ||
|
|
d86e1b4f5e | ||
|
|
422866a437 | ||
|
|
5f7806f99e | ||
|
|
a9bce7c18a | ||
|
|
335bfabc60 | ||
|
|
e1b424c191 | ||
|
|
ea9c318afb | ||
|
|
9d66dbc28f | ||
|
|
8c22426223 | ||
|
|
90ee433c9b | ||
|
|
f26bd44a43 | ||
|
|
021d9c447d | ||
|
|
ce56465197 | ||
|
|
b0cdab85fe | ||
|
|
47c9256976 | ||
|
|
cf89840460 | ||
|
|
e3294c3faf | ||
|
|
95ab17e444 | ||
|
|
9b914885f1 | ||
|
|
92678f0fc5 | ||
|
|
a1640e4fe4 | ||
|
|
1be51c2129 | ||
|
|
6214f95e7e | ||
|
|
c89e40f008 | ||
|
|
8873c1aeff | ||
|
|
d543e2e50b | ||
|
|
e5c11d9efc | ||
|
|
d13657c40c | ||
|
|
804ef22075 | ||
|
|
fa28b430af | ||
|
|
311585d304 | ||
|
|
60c3bed6a4 | ||
|
|
6dfa79013f | ||
|
|
79ef484392 | ||
|
|
0ca8fe8c12 | ||
|
|
552ab537e8 | ||
|
|
0cf217179c | ||
|
|
48a5077035 | ||
|
|
a69367f739 | ||
|
|
1a0868c487 | ||
|
|
9be3e2bdd8 | ||
|
|
074e991280 | ||
|
|
f09dc3cf46 | ||
|
|
f222db57ce | ||
|
|
d29132512b | ||
|
|
22553b3372 | ||
|
|
46ea65c89b | ||
|
|
6a28a62369 | ||
|
|
db0d05fab3 | ||
|
|
317e2e74c2 | ||
|
|
04cfee5664 | ||
|
|
57c6105897 | ||
|
|
339bad2de4 | ||
|
|
31ff62445b | ||
|
|
c54bc441ba | ||
|
|
070b91628f | ||
|
|
9593737871 | ||
|
|
0269357c5a | ||
|
|
fd68c02072 | ||
|
|
39a67eec61 | ||
|
|
67f50b85f5 | ||
|
|
eedbec8f24 | ||
|
|
7ba7c4a8ce | ||
|
|
b6f45b0a2e | ||
|
|
3d26b8e500 | ||
|
|
46f7f860e6 | ||
|
|
2ae677162f | ||
|
|
4dbfb5b49a | ||
|
|
3a911d578c | ||
|
|
63f48afaeb | ||
|
|
ac39aed7c5 | ||
|
|
c4d02a5254 | ||
|
|
69c42450c3 | ||
|
|
b863b16454 | ||
|
|
0c35511aea | ||
|
|
06741d0d5d | ||
|
|
ca8b944b53 | ||
|
|
580cd9541a | ||
|
|
d60ef9ad0a | ||
|
|
821cf797f2 | ||
|
|
917895e6a3 | ||
|
|
6e53dcc8e1 | ||
|
|
56325d2a3b | ||
|
|
d1160cb820 | ||
|
|
5528cfee17 | ||
|
|
937dcf5fd1 | ||
|
|
57e2f41f42 | ||
|
|
3c1f02a7f9 | ||
|
|
907c7bc80b | ||
|
|
40a8678989 | ||
|
|
6451d2f65d | ||
|
|
e4e643086b | ||
|
|
e9e4316569 | ||
|
|
0719bd6ffb | ||
|
|
8d6d19de1b | ||
|
|
16502e19dd | ||
|
|
6b2ed39df6 | ||
|
|
d517af4c1a | ||
|
|
27e5147a5f | ||
|
|
e659dddad1 | ||
|
|
643f532a70 | ||
|
|
86d86628cb | ||
|
|
17854168d9 | ||
|
|
d287acfcc0 | ||
|
|
595299a3c2 | ||
|
|
3eabba637c | ||
|
|
cd12e66e67 | ||
|
|
1d0ebd1065 | ||
|
|
5fd818babe | ||
|
|
968d39328d | ||
|
|
310c0b86a7 | ||
|
|
cc5463ad44 | ||
|
|
f95a52df4a | ||
|
|
3b0273fc61 | ||
|
|
6cb9486b28 | ||
|
|
f44cbd407f | ||
|
|
0b836b2de0 | ||
|
|
151130a5df | ||
|
|
4559ec51f7 | ||
|
|
5cd6e747a0 | ||
|
|
5263cd0706 | ||
|
|
4535ea6aaa | ||
|
|
6e8ad7e5cc | ||
|
|
22d658518b | ||
|
|
f0cfaffd5e | ||
|
|
94996b26e5 | ||
|
|
f2e71ec95c | ||
|
|
da61d7cac5 | ||
|
|
9b23cd5394 | ||
|
|
b75196bb81 | ||
|
|
dbe88f8bbb | ||
|
|
f9a331a505 | ||
|
|
47beafb836 | ||
|
|
a12a79b366 | ||
|
|
62f93e221d | ||
|
|
07986471b3 | ||
|
|
dba8446d9e | ||
|
|
0c5cdcb161 | ||
|
|
8b3e023ef0 | ||
|
|
a0fd1ded97 | ||
|
|
5272b6b908 | ||
|
|
a866eb5dd0 | ||
|
|
90bdd63a71 | ||
|
|
ed2abf8609 | ||
|
|
d188bf6352 | ||
|
|
4db3edadf4 | ||
|
|
ca27fb5d43 | ||
|
|
e6fe91e2e7 | ||
|
|
b57b7213a9 | ||
|
|
ed91767663 | ||
|
|
489ad07e8b | ||
|
|
88e323ca43 | ||
|
|
721b0e8b11 | ||
|
|
ac171eb280 | ||
|
|
a1211d3fbd | ||
|
|
0be215e152 | ||
|
|
c12b4a1565 | ||
|
|
34ef1157a6 | ||
|
|
e0edfe1cb3 | ||
|
|
8387101a61 | ||
|
|
9fdf7213d4 | ||
|
|
b7f36a13ba | ||
|
|
66ea1a6dda | ||
|
|
298b6775c6 | ||
|
|
2e4b7d26b1 | ||
|
|
dab84a1b10 | ||
|
|
340df02655 | ||
|
|
99d8b58868 | ||
|
|
1152bf4c9d | ||
|
|
107bd800b0 | ||
|
|
e67f9f8f7a | ||
|
|
79177a1cd5 | ||
|
|
0a7274678a | ||
|
|
0d914c860a | ||
|
|
eb5ad7eb26 | ||
|
|
4602e60c1b | ||
|
|
1c640ac2c3 | ||
|
|
10f5a42fd7 | ||
|
|
fee50ad0ce | ||
|
|
be888a5fef | ||
|
|
86b9f81ad6 | ||
|
|
88e0b4cea4 | ||
|
|
7bc8f76667 | ||
|
|
0bb43f7afb | ||
|
|
99c121bfe8 | ||
|
|
c60264b87f | ||
|
|
f9f7ae0850 | ||
|
|
d45642c345 | ||
|
|
790dcc115f | ||
|
|
79a5e6a671 | ||
|
|
3a47bc4435 | ||
|
|
3294b04436 | ||
|
|
2c02f0767b | ||
|
|
6f7f9a3869 | ||
|
|
1704c604bf | ||
|
|
21b38004da | ||
|
|
aed8b8b32c | ||
|
|
1562b719d2 | ||
|
|
e749a97f9f | ||
|
|
3d9f4d1bd3 | ||
|
|
9f2dd2f377 | ||
|
|
d87921bb9c | ||
|
|
f7838748df | ||
|
|
7e7e376046 | ||
|
|
bbec252c51 | ||
|
|
9d619d6fdc | ||
|
|
49ae48f7fe | ||
|
|
46a0e94de7 | ||
|
|
2368af52ea | ||
|
|
d000698847 | ||
|
|
d641249f85 | ||
|
|
346080aad2 | ||
|
|
acfeed006a | ||
|
|
5e1daf0c41 | ||
|
|
7d0b94c008 | ||
|
|
d3c2fed4b3 | ||
|
|
32c88194f5 | ||
|
|
9ced6172de | ||
|
|
07b4c8d05d | ||
|
|
b787f31481 | ||
|
|
6850df969d | ||
|
|
62c53197c4 | ||
|
|
cd2cb661a4 | ||
|
|
8e90799e27 | ||
|
|
d810217e63 | ||
|
|
8676eda663 | ||
|
|
8c61bbdb73 | ||
|
|
99988b7081 | ||
|
|
436162ae7c | ||
|
|
c3012a7d8a | ||
|
|
f0165dee92 | ||
|
|
dff828cdbe | ||
|
|
c18d019db0 | ||
|
|
25c601bd2f | ||
|
|
85ab2929e9 | ||
|
|
86b2c939c7 | ||
|
|
e3b85fd0d5 | ||
|
|
0f79c61188 | ||
|
|
618b9dd66e | ||
|
|
ccc707152a | ||
|
|
aeab755033 | ||
|
|
912167a9cd | ||
|
|
42e2bba8d6 | ||
|
|
56c2fe59cb | ||
|
|
c7f877de96 | ||
|
|
aa11c198af | ||
|
|
f8f93c1ec1 | ||
|
|
f04fab9b7a | ||
|
|
854f527f6e | ||
|
|
c740f244ba | ||
|
|
5ed2c77b59 | ||
|
|
ad0f953c21 | ||
|
|
3c56f53105 | ||
|
|
d2e06bf130 | ||
|
|
36d93aeff3 | ||
|
|
1e7d77c517 | ||
|
|
81849352fc | ||
|
|
01d8a39c0b | ||
|
|
1a3b0375fa | ||
|
|
59bcffe83b | ||
|
|
45318922d8 | ||
|
|
1a2167b155 | ||
|
|
8938309f4b | ||
|
|
51a800b7df | ||
|
|
a6cd6abcfb | ||
|
|
b09a15eb54 | ||
|
|
a9818e4b17 | ||
|
|
be8b3e282a | ||
|
|
259e34435f | ||
|
|
eaa10b279f | ||
|
|
8a15b881fd | ||
|
|
f475182c7d | ||
|
|
4c51f27b0a | ||
|
|
64760ffa76 | ||
|
|
02fff92b91 | ||
|
|
876d5a96bf | ||
|
|
e8d3d01598 | ||
|
|
889015f03b | ||
|
|
a2c98b4b5f | ||
|
|
46309a1f95 | ||
|
|
e1f6e38b57 | ||
|
|
9772270868 | ||
|
|
0fdf569571 | ||
|
|
a6e530b33d | ||
|
|
6cabd9e67f | ||
|
|
ac13c86675 | ||
|
|
50eaf5befd | ||
|
|
0220d9d93d | ||
|
|
88ac3abaa1 | ||
|
|
ad133ecb38 | ||
|
|
6ba73fd888 | ||
|
|
8bd923ab9e | ||
|
|
50622f71f8 | ||
|
|
2ab9c9b590 | ||
|
|
52b719f8fb | ||
|
|
135eeded02 | ||
|
|
0bdf698673 | ||
|
|
e7f9160867 | ||
|
|
ca64399f9f | ||
|
|
19148eaa0d | ||
|
|
b8ba76b1ae | ||
|
|
f91255a201 | ||
|
|
06537fda83 | ||
|
|
299f8a9fb9 | ||
|
|
4339b0fe05 | ||
|
|
08bb6f44a4 | ||
|
|
31b9717ca3 | ||
|
|
52a792384f | ||
|
|
b1d2b7cfb8 | ||
|
|
cc634236b1 | ||
|
|
91274f47e4 | ||
|
|
bfc5d1180c | ||
|
|
21c658a12c | ||
|
|
e057299b0d | ||
|
|
3056dc5be4 | ||
|
|
0191e94226 | ||
|
|
dcdd5bc372 | ||
|
|
d0e0e0322c | ||
|
|
ca7ff033db | ||
|
|
d4d8ed32b3 | ||
|
|
635c49909c | ||
|
|
70cf2a7a22 | ||
|
|
74e6641afc | ||
|
|
c7475e4bf3 | ||
|
|
eee41925aa | ||
|
|
69d553c82a | ||
|
|
043e0c65ec | ||
|
|
4dffb818e2 | ||
|
|
4514751803 | ||
|
|
27fa1088b9 | ||
|
|
853ca46899 | ||
|
|
21390af2dd | ||
|
|
45a281c962 | ||
|
|
c7c1614278 | ||
|
|
c085a772cf | ||
|
|
9fdf685dd5 | ||
|
|
733d7513af | ||
|
|
b341073192 | ||
|
|
92c5d3b8e2 | ||
|
|
abc5c6bc50 | ||
|
|
52d2865365 | ||
|
|
89b04babfb | ||
|
|
755f77231c | ||
|
|
3d4d13ea1e | ||
|
|
4ec8aacaec | ||
|
|
0f949168ef | ||
|
|
71941d8bda | ||
|
|
521da2b0a7 | ||
|
|
37a9ecd5b7 | ||
|
|
ee8d529552 | ||
|
|
dac6300558 | ||
|
|
d05e4ae7ff | ||
|
|
e3db212b0b | ||
|
|
f0e7ac2f18 | ||
|
|
e08fe3b993 | ||
|
|
a380e1a259 | ||
|
|
17471bdfcc | ||
|
|
5985690d45 | ||
|
|
07a1b7fc00 | ||
|
|
12aff698b9 | ||
|
|
276accc210 | ||
|
|
cc3994928c | ||
|
|
efd212ee46 | ||
|
|
ec3a889758 | ||
|
|
1850762118 | ||
|
|
0b28226615 | ||
|
|
a00fab7dc4 | ||
|
|
11071ed682 | ||
|
|
066fd77d39 | ||
|
|
8138dba800 | ||
|
|
b749c02eb5 | ||
|
|
1fdd1d250c | ||
|
|
998fb34d15 | ||
|
|
cb30ec5b17 | ||
|
|
ab7eb70a3c | ||
|
|
3b67861def | ||
|
|
b26b7f8d62 | ||
|
|
2bd400dcee | ||
|
|
dedcc5255a | ||
|
|
14b8ead412 | ||
|
|
f0571b1e33 | ||
|
|
8e79929cb8 | ||
|
|
0a33135483 | ||
|
|
a40265cbeb | ||
|
|
74d7ca8582 | ||
|
|
1a80a1f413 | ||
|
|
f95e0e3a56 | ||
|
|
14647b2a38 | ||
|
|
5311904619 | ||
|
|
60b76c7834 | ||
|
|
2bf04d1f04 |
422 changed files with 21219 additions and 13790 deletions
|
|
@ -10,6 +10,9 @@ insert_final_newline = true
|
||||||
[*.rs]
|
[*.rs]
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.{zig,zon}]
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
[Makefile]
|
[Makefile]
|
||||||
indent_style = tab
|
indent_style = tab
|
||||||
indent_size = 8
|
indent_size = 8
|
||||||
|
|
|
||||||
1
.envrc
Normal file
1
.envrc
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
use flake
|
||||||
1
.gitattributes
vendored
1
.gitattributes
vendored
|
|
@ -3,5 +3,4 @@
|
||||||
/lib/src/unicode/*.h linguist-vendored
|
/lib/src/unicode/*.h linguist-vendored
|
||||||
/lib/src/unicode/LICENSE linguist-vendored
|
/lib/src/unicode/LICENSE linguist-vendored
|
||||||
|
|
||||||
/cli/src/generate/prepare_grammar/*.json -diff
|
|
||||||
Cargo.lock -diff
|
Cargo.lock -diff
|
||||||
|
|
|
||||||
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -1,6 +1,6 @@
|
||||||
name: Bug Report
|
name: Bug Report
|
||||||
description: Report a problem
|
description: Report a problem
|
||||||
labels: [bug]
|
type: Bug
|
||||||
body:
|
body:
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
|
|
|
||||||
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
|
|
@ -1,6 +1,6 @@
|
||||||
name: Feature request
|
name: Feature request
|
||||||
description: Request an enhancement
|
description: Request an enhancement
|
||||||
labels: [enhancement]
|
type: Feature
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
|
|
|
||||||
3
.github/actions/cache/action.yml
vendored
3
.github/actions/cache/action.yml
vendored
|
|
@ -17,10 +17,9 @@ runs:
|
||||||
test/fixtures/grammars
|
test/fixtures/grammars
|
||||||
target/release/tree-sitter-*.wasm
|
target/release/tree-sitter-*.wasm
|
||||||
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
||||||
'cli/generate/src/**',
|
'crates/generate/src/**',
|
||||||
'lib/src/parser.h',
|
'lib/src/parser.h',
|
||||||
'lib/src/array.h',
|
'lib/src/array.h',
|
||||||
'lib/src/alloc.h',
|
'lib/src/alloc.h',
|
||||||
'xtask/src/*',
|
|
||||||
'test/fixtures/grammars/*/**/src/*.c',
|
'test/fixtures/grammars/*/**/src/*.c',
|
||||||
'.github/actions/cache/action.yml') }}
|
'.github/actions/cache/action.yml') }}
|
||||||
|
|
|
||||||
27
.github/dependabot.yml
vendored
27
.github/dependabot.yml
vendored
|
|
@ -4,6 +4,8 @@ updates:
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
cooldown:
|
||||||
|
default-days: 3
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "build(deps)"
|
prefix: "build(deps)"
|
||||||
labels:
|
labels:
|
||||||
|
|
@ -12,10 +14,16 @@ updates:
|
||||||
groups:
|
groups:
|
||||||
cargo:
|
cargo:
|
||||||
patterns: ["*"]
|
patterns: ["*"]
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
||||||
|
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
cooldown:
|
||||||
|
default-days: 3
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "ci"
|
prefix: "ci"
|
||||||
labels:
|
labels:
|
||||||
|
|
@ -24,3 +32,22 @@ updates:
|
||||||
groups:
|
groups:
|
||||||
actions:
|
actions:
|
||||||
patterns: ["*"]
|
patterns: ["*"]
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
versioning-strategy: increase
|
||||||
|
directories:
|
||||||
|
- "/crates/npm"
|
||||||
|
- "/crates/eslint"
|
||||||
|
- "/lib/binding_web"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
cooldown:
|
||||||
|
default-days: 3
|
||||||
|
commit-message:
|
||||||
|
prefix: "build(deps)"
|
||||||
|
labels:
|
||||||
|
- "dependencies"
|
||||||
|
- "npm"
|
||||||
|
groups:
|
||||||
|
npm:
|
||||||
|
patterns: ["*"]
|
||||||
|
|
|
||||||
29
.github/scripts/close_spam.js
vendored
Normal file
29
.github/scripts/close_spam.js
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
module.exports = async ({ github, context }) => {
|
||||||
|
let target = context.payload.issue;
|
||||||
|
if (target) {
|
||||||
|
await github.rest.issues.update({
|
||||||
|
...context.repo,
|
||||||
|
issue_number: target.number,
|
||||||
|
state: "closed",
|
||||||
|
state_reason: "not_planned",
|
||||||
|
title: "[spam]",
|
||||||
|
body: "",
|
||||||
|
type: null,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
target = context.payload.pull_request;
|
||||||
|
await github.rest.pulls.update({
|
||||||
|
...context.repo,
|
||||||
|
pull_number: target.number,
|
||||||
|
state: "closed",
|
||||||
|
title: "[spam]",
|
||||||
|
body: "",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await github.rest.issues.lock({
|
||||||
|
...context.repo,
|
||||||
|
issue_number: target.number,
|
||||||
|
lock_reason: "spam",
|
||||||
|
});
|
||||||
|
};
|
||||||
3
.github/scripts/cross.sh
vendored
3
.github/scripts/cross.sh
vendored
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/bash -eu
|
|
||||||
|
|
||||||
exec docker run --rm -v /home/runner:/home/runner -w "$PWD" "$CROSS_IMAGE" "$@"
|
|
||||||
9
.github/scripts/make.sh
vendored
9
.github/scripts/make.sh
vendored
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash -eu
|
|
||||||
|
|
||||||
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
|
||||||
|
|
||||||
if [[ $BUILD_CMD == cross ]]; then
|
|
||||||
cross.sh make CC="$CC" AR="$AR" "$@"
|
|
||||||
else
|
|
||||||
exec make "$@"
|
|
||||||
fi
|
|
||||||
9
.github/scripts/tree-sitter.sh
vendored
9
.github/scripts/tree-sitter.sh
vendored
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash -eu
|
|
||||||
|
|
||||||
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
|
||||||
|
|
||||||
if [[ $BUILD_CMD == cross ]]; then
|
|
||||||
cross.sh "$CROSS_RUNNER" "$tree_sitter" "$@"
|
|
||||||
else
|
|
||||||
exec "$tree_sitter" "$@"
|
|
||||||
fi
|
|
||||||
25
.github/scripts/wasm_stdlib.js
vendored
Normal file
25
.github/scripts/wasm_stdlib.js
vendored
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
if (context.eventName !== 'pull_request') return;
|
||||||
|
|
||||||
|
const prNumber = context.payload.pull_request.number;
|
||||||
|
const owner = context.repo.owner;
|
||||||
|
const repo = context.repo.repo;
|
||||||
|
|
||||||
|
const { data: files } = await github.rest.pulls.listFiles({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: prNumber
|
||||||
|
});
|
||||||
|
|
||||||
|
const changedFiles = files.map(file => file.filename);
|
||||||
|
|
||||||
|
const wasmStdLibSrc = 'crates/language/wasm/';
|
||||||
|
const dirChanged = changedFiles.some(file => file.startsWith(wasmStdLibSrc));
|
||||||
|
|
||||||
|
if (!dirChanged) return;
|
||||||
|
|
||||||
|
const wasmStdLibHeader = 'lib/src/wasm/wasm-stdlib.h';
|
||||||
|
const requiredChanged = changedFiles.includes(wasmStdLibHeader);
|
||||||
|
|
||||||
|
if (!requiredChanged) core.setFailed(`Changes detected in ${wasmStdLibSrc} but ${wasmStdLibHeader} was not modified.`);
|
||||||
|
};
|
||||||
6
.github/workflows/backport.yml
vendored
6
.github/workflows/backport.yml
vendored
|
|
@ -14,17 +14,17 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Create app token
|
- name: Create app token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@v2
|
||||||
id: app-token
|
id: app-token
|
||||||
with:
|
with:
|
||||||
app-id: ${{ vars.BACKPORT_APP }}
|
app-id: ${{ vars.BACKPORT_APP }}
|
||||||
private-key: ${{ secrets.BACKPORT_KEY }}
|
private-key: ${{ secrets.BACKPORT_KEY }}
|
||||||
|
|
||||||
- name: Create backport PR
|
- name: Create backport PR
|
||||||
uses: korthout/backport-action@v3
|
uses: korthout/backport-action@v4
|
||||||
with:
|
with:
|
||||||
pull_title: "${pull_title}"
|
pull_title: "${pull_title}"
|
||||||
label_pattern: "^ci:backport ([^ ]+)$"
|
label_pattern: "^ci:backport ([^ ]+)$"
|
||||||
|
|
|
||||||
2
.github/workflows/bindgen.yml
vendored
2
.github/workflows/bindgen.yml
vendored
|
|
@ -16,7 +16,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
|
||||||
297
.github/workflows/build.yml
vendored
297
.github/workflows/build.yml
vendored
|
|
@ -1,10 +1,5 @@
|
||||||
name: Build & Test
|
name: Build & Test
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
RUSTFLAGS: "-D warnings"
|
|
||||||
CROSS_DEBUG: 1
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
|
|
@ -31,38 +26,41 @@ jobs:
|
||||||
- windows-x86
|
- windows-x86
|
||||||
- macos-arm64
|
- macos-arm64
|
||||||
- macos-x64
|
- macos-x64
|
||||||
|
- wasm32
|
||||||
|
|
||||||
include:
|
include:
|
||||||
# When adding a new `target`:
|
# When adding a new `target`:
|
||||||
# 1. Define a new platform alias above
|
# 1. Define a new platform alias above
|
||||||
# 2. Add a new record to the matrix map in `cli/npm/install.js`
|
# 2. Add a new record to the matrix map in `crates/cli/npm/install.js`
|
||||||
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-24.04-arm }
|
||||||
- { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-arm , target: armv7-unknown-linux-gnueabihf , os: ubuntu-24.04-arm }
|
||||||
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-22.04 , features: wasm }
|
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-24.04 }
|
||||||
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-24.04 }
|
||||||
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-24.04 }
|
||||||
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest }
|
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-11-arm }
|
||||||
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , features: wasm }
|
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-2025 }
|
||||||
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest }
|
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-2025 }
|
||||||
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-latest , features: wasm }
|
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-15 }
|
||||||
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-13 , features: wasm }
|
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-15-intel }
|
||||||
|
- { platform: wasm32 , target: wasm32-unknown-unknown , os: ubuntu-24.04 }
|
||||||
|
|
||||||
# Cross compilers for C library
|
# Extra features
|
||||||
- { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar }
|
- { platform: linux-arm64 , features: wasm }
|
||||||
- { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar }
|
- { platform: linux-x64 , features: wasm }
|
||||||
- { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar }
|
- { platform: macos-arm64 , features: wasm }
|
||||||
- { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar }
|
- { platform: macos-x64 , features: wasm }
|
||||||
|
|
||||||
# Prevent race condition (see #2041)
|
# Cross-compilation
|
||||||
- { platform: windows-x64 , rust-test-threads: 1 }
|
- { platform: linux-arm , cross: true }
|
||||||
- { platform: windows-x86 , rust-test-threads: 1 }
|
- { platform: linux-x86 , cross: true }
|
||||||
|
- { platform: linux-powerpc64 , cross: true }
|
||||||
|
|
||||||
# Can't natively run CLI on Github runner's host
|
# Compile-only
|
||||||
- { platform: windows-arm64 , no-run: true }
|
- { platform: wasm32 , no-run: true }
|
||||||
|
|
||||||
env:
|
env:
|
||||||
BUILD_CMD: cargo
|
CARGO_TERM_COLOR: always
|
||||||
SUFFIX: ${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
RUSTFLAGS: -D warnings
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
|
|
@ -70,13 +68,28 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Read Emscripten version
|
- name: Set up cross-compilation
|
||||||
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<cli/loader/emscripten-version)" >> $GITHUB_ENV
|
if: matrix.cross
|
||||||
|
run: |
|
||||||
|
for target in armv7-unknown-linux-gnueabihf i686-unknown-linux-gnu powerpc64-unknown-linux-gnu; do
|
||||||
|
camel_target=${target//-/_}; target_cc=${target/-unknown/}
|
||||||
|
printf 'CC_%s=%s\n' "$camel_target" "${target_cc/v7/}-gcc"
|
||||||
|
printf 'AR_%s=%s\n' "$camel_target" "${target_cc/v7/}-ar"
|
||||||
|
printf 'CARGO_TARGET_%s_LINKER=%s\n' "${camel_target^^}" "${target_cc/v7/}-gcc"
|
||||||
|
done >> $GITHUB_ENV
|
||||||
|
{
|
||||||
|
printf 'CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_RUNNER=qemu-arm -L /usr/arm-linux-gnueabihf\n'
|
||||||
|
printf 'CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_RUNNER=qemu-ppc64 -L /usr/powerpc64-linux-gnu\n'
|
||||||
|
} >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Get emscripten version
|
||||||
|
if: contains(matrix.features, 'wasm')
|
||||||
|
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<crates/loader/emscripten-version)" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install Emscripten
|
- name: Install Emscripten
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
if: contains(matrix.features, 'wasm')
|
||||||
uses: mymindstorm/setup-emsdk@v14
|
uses: mymindstorm/setup-emsdk@v14
|
||||||
with:
|
with:
|
||||||
version: ${{ env.EMSCRIPTEN_VERSION }}
|
version: ${{ env.EMSCRIPTEN_VERSION }}
|
||||||
|
|
@ -86,61 +99,82 @@ jobs:
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
|
|
||||||
- name: Install cross
|
- name: Install cross-compilation toolchain
|
||||||
if: ${{ matrix.use-cross }}
|
if: matrix.cross
|
||||||
run: |
|
run: |
|
||||||
if [ ! -x "$(command -v cross)" ]; then
|
sudo apt-get update -qy
|
||||||
# TODO: Remove 'RUSTFLAGS=""' once https://github.com/cross-rs/cross/issues/1561 is resolved
|
if [[ $PLATFORM == linux-arm ]]; then
|
||||||
RUSTFLAGS="" cargo install cross --git https://github.com/cross-rs/cross
|
sudo apt-get install -qy {binutils,gcc}-arm-linux-gnueabihf qemu-user
|
||||||
|
elif [[ $PLATFORM == linux-x86 ]]; then
|
||||||
|
sudo apt-get install -qy {binutils,gcc}-i686-linux-gnu
|
||||||
|
elif [[ $PLATFORM == linux-powerpc64 ]]; then
|
||||||
|
sudo apt-get install -qy {binutils,gcc}-powerpc64-linux-gnu qemu-user
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Configure cross
|
|
||||||
if: ${{ matrix.use-cross }}
|
|
||||||
run: |
|
|
||||||
printf '%s\n' > Cross.toml \
|
|
||||||
'[target.${{ matrix.target }}]' \
|
|
||||||
'image = "ghcr.io/cross-rs/${{ matrix.target }}:edge"' \
|
|
||||||
'[build]' \
|
|
||||||
'pre-build = [' \
|
|
||||||
' "dpkg --add-architecture $CROSS_DEB_ARCH",' \
|
|
||||||
' "curl -fsSL https://deb.nodesource.com/setup_22.x | bash -",' \
|
|
||||||
' "apt-get update && apt-get -y install libssl-dev nodejs"' \
|
|
||||||
']'
|
|
||||||
cat - Cross.toml <<< 'Cross.toml:'
|
|
||||||
printf '%s\n' >> $GITHUB_ENV \
|
|
||||||
"CROSS_CONFIG=$PWD/Cross.toml" \
|
|
||||||
"CROSS_IMAGE=ghcr.io/cross-rs/${{ matrix.target }}:edge"
|
|
||||||
|
|
||||||
- name: Set up environment
|
|
||||||
env:
|
env:
|
||||||
RUST_TEST_THREADS: ${{ matrix.rust-test-threads }}
|
PLATFORM: ${{ matrix.platform }}
|
||||||
USE_CROSS: ${{ matrix.use-cross }}
|
|
||||||
TARGET: ${{ matrix.target }}
|
- name: Install MinGW and Clang (Windows x64 MSYS2)
|
||||||
CC: ${{ matrix.cc }}
|
if: matrix.platform == 'windows-x64'
|
||||||
AR: ${{ matrix.ar }}
|
uses: msys2/setup-msys2@v2
|
||||||
|
with:
|
||||||
|
update: true
|
||||||
|
install: |
|
||||||
|
mingw-w64-x86_64-toolchain
|
||||||
|
mingw-w64-x86_64-clang
|
||||||
|
mingw-w64-x86_64-make
|
||||||
|
mingw-w64-x86_64-cmake
|
||||||
|
|
||||||
|
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
||||||
|
# the `mismatched-lifetime-syntaxes` lint
|
||||||
|
- name: Build wasmtime library (Windows x64 MSYS2)
|
||||||
|
if: contains(matrix.features, 'wasm') && matrix.platform == 'windows-x64'
|
||||||
run: |
|
run: |
|
||||||
PATH="$PWD/.github/scripts:$PATH"
|
mkdir -p target
|
||||||
printf '%s/.github/scripts\n' "$PWD" >> $GITHUB_PATH
|
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
||||||
|
jq -r '.packages[] | select(.name == "wasmtime-c-api-impl") | .version')
|
||||||
|
curl -LSs "$WASMTIME_REPO/archive/refs/tags/v${WASMTIME_VERSION}.tar.gz" | tar xzf - -C target
|
||||||
|
cd target/wasmtime-${WASMTIME_VERSION}
|
||||||
|
cmake -S crates/c-api -B target/c-api \
|
||||||
|
-DCMAKE_INSTALL_PREFIX="$PWD/artifacts" \
|
||||||
|
-DWASMTIME_DISABLE_ALL_FEATURES=ON \
|
||||||
|
-DWASMTIME_FEATURE_CRANELIFT=ON \
|
||||||
|
-DWASMTIME_TARGET='x86_64-pc-windows-gnu'
|
||||||
|
cmake --build target/c-api && cmake --install target/c-api
|
||||||
|
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
||||||
|
RUSTFLAGS: ${{ env.RUSTFLAGS }} --cap-lints allow
|
||||||
|
|
||||||
printf '%s\n' >> $GITHUB_ENV \
|
- name: Build C library (Windows x64 MSYS2 CMake)
|
||||||
'TREE_SITTER=tree-sitter.sh' \
|
if: matrix.platform == 'windows-x64'
|
||||||
"TARGET=$TARGET" \
|
shell: msys2 {0}
|
||||||
"ROOT=$PWD"
|
run: |
|
||||||
|
cmake -G Ninja -S . -B build/static \
|
||||||
|
-DBUILD_SHARED_LIBS=OFF \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
|
-DTREE_SITTER_FEATURE_WASM=$WASM \
|
||||||
|
-DCMAKE_C_COMPILER=clang
|
||||||
|
cmake --build build/static
|
||||||
|
|
||||||
[[ -n $RUST_TEST_THREADS ]] && \
|
cmake -G Ninja -S . -B build/shared \
|
||||||
printf 'RUST_TEST_THREADS=%s\n' "$RUST_TEST_THREADS" >> $GITHUB_ENV
|
-DBUILD_SHARED_LIBS=ON \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
[[ -n $CC ]] && printf 'CC=%s\n' "$CC" >> $GITHUB_ENV
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
[[ -n $AR ]] && printf 'AR=%s\n' "$AR" >> $GITHUB_ENV
|
-DTREE_SITTER_FEATURE_WASM=$WASM \
|
||||||
|
-DCMAKE_C_COMPILER=clang
|
||||||
if [[ $USE_CROSS == true ]]; then
|
cmake --build build/shared
|
||||||
printf 'BUILD_CMD=cross\n' >> $GITHUB_ENV
|
rm -rf \
|
||||||
runner=$(cross.sh bash -c "env | sed -n 's/^CARGO_TARGET_.*_RUNNER=//p'")
|
build/{static,shared} \
|
||||||
[[ -n $runner ]] && printf 'CROSS_RUNNER=%s\n' "$runner" >> $GITHUB_ENV
|
"${CMAKE_PREFIX_PATH}/artifacts" \
|
||||||
fi
|
target/wasmtime-${WASMTIME_VERSION}
|
||||||
|
env:
|
||||||
|
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
||||||
|
|
||||||
|
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
||||||
|
# the `mismatched-lifetime-syntaxes` lint
|
||||||
- name: Build wasmtime library
|
- name: Build wasmtime library
|
||||||
if: ${{ !matrix.use-cross && contains(matrix.features, 'wasm') }}
|
if: contains(matrix.features, 'wasm')
|
||||||
run: |
|
run: |
|
||||||
mkdir -p target
|
mkdir -p target
|
||||||
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
||||||
|
|
@ -156,36 +190,47 @@ jobs:
|
||||||
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
||||||
env:
|
env:
|
||||||
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
||||||
|
RUSTFLAGS: ${{ env.RUSTFLAGS }} --cap-lints allow
|
||||||
|
|
||||||
- name: Build C library (make)
|
- name: Build C library (make)
|
||||||
if: ${{ runner.os != 'Windows' }}
|
if: runner.os != 'Windows'
|
||||||
run: make.sh -j CFLAGS="$CFLAGS"
|
run: |
|
||||||
|
if [[ $PLATFORM == linux-arm ]]; then
|
||||||
|
CC=arm-linux-gnueabihf-gcc; AR=arm-linux-gnueabihf-ar
|
||||||
|
elif [[ $PLATFORM == linux-x86 ]]; then
|
||||||
|
CC=i686-linux-gnu-gcc; AR=i686-linux-gnu-ar
|
||||||
|
elif [[ $PLATFORM == linux-powerpc64 ]]; then
|
||||||
|
CC=powerpc64-linux-gnu-gcc; AR=powerpc64-linux-gnu-ar
|
||||||
|
else
|
||||||
|
CC=gcc; AR=ar
|
||||||
|
fi
|
||||||
|
make -j CFLAGS="$CFLAGS" CC=$CC AR=$AR
|
||||||
env:
|
env:
|
||||||
|
PLATFORM: ${{ matrix.platform }}
|
||||||
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
|
|
||||||
- name: Build C library (CMake)
|
- name: Build C library (CMake)
|
||||||
if: ${{ !matrix.use-cross }}
|
if: "!matrix.cross"
|
||||||
run: |
|
run: |
|
||||||
cmake -S lib -B build/static \
|
cmake -S . -B build/static \
|
||||||
-DBUILD_SHARED_LIBS=OFF \
|
-DBUILD_SHARED_LIBS=OFF \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
cmake --build build/static --verbose
|
cmake --build build/static --verbose
|
||||||
|
|
||||||
cmake -S lib -B build/shared \
|
cmake -S . -B build/shared \
|
||||||
-DBUILD_SHARED_LIBS=ON \
|
-DBUILD_SHARED_LIBS=ON \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
cmake --build build/shared --verbose
|
cmake --build build/shared --verbose
|
||||||
env:
|
env:
|
||||||
CC: ${{ contains(matrix.target, 'linux') && 'clang' || '' }}
|
CC: ${{ contains(matrix.platform, 'linux') && 'clang' || '' }}
|
||||||
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
||||||
|
|
||||||
- name: Build wasm library
|
- name: Build Wasm library
|
||||||
# No reason to build on the same Github runner hosts many times
|
if: contains(matrix.features, 'wasm')
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd lib/binding_web
|
cd lib/binding_web
|
||||||
|
|
@ -195,64 +240,72 @@ jobs:
|
||||||
npm run build
|
npm run build
|
||||||
npm run build:debug
|
npm run build:debug
|
||||||
|
|
||||||
|
- name: Check no_std builds
|
||||||
|
if: inputs.run-test && !matrix.no-run
|
||||||
|
working-directory: lib
|
||||||
|
shell: bash
|
||||||
|
run: cargo check --no-default-features --target='${{ matrix.target }}'
|
||||||
|
|
||||||
- name: Build target
|
- name: Build target
|
||||||
run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.features }}
|
run: cargo build --release --target='${{ matrix.target }}' --features='${{ matrix.features }}' $PACKAGE
|
||||||
|
env:
|
||||||
|
PACKAGE: ${{ matrix.platform == 'wasm32' && '-p tree-sitter' || '' }}
|
||||||
|
|
||||||
- name: Cache fixtures
|
- name: Cache fixtures
|
||||||
id: cache
|
id: cache
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
uses: ./.github/actions/cache
|
uses: ./.github/actions/cache
|
||||||
|
|
||||||
- name: Fetch fixtures
|
- name: Fetch fixtures
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
run: $BUILD_CMD run -p xtask -- fetch-fixtures
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- fetch-fixtures
|
||||||
|
|
||||||
- name: Generate fixtures
|
- name: Generate fixtures
|
||||||
if: ${{ !matrix.no-run && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
if: inputs.run-test && !matrix.no-run && steps.cache.outputs.cache-hit != 'true'
|
||||||
run: $BUILD_CMD run -p xtask -- generate-fixtures
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- generate-fixtures
|
||||||
|
|
||||||
- name: Generate Wasm fixtures
|
- name: Generate Wasm fixtures
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
if: inputs.run-test && !matrix.no-run && contains(matrix.features, 'wasm') && steps.cache.outputs.cache-hit != 'true'
|
||||||
run: $BUILD_CMD run -p xtask -- generate-fixtures --wasm
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- generate-fixtures --wasm
|
||||||
|
|
||||||
- name: Run main tests
|
- name: Run main tests
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
run: $BUILD_CMD test --target=${{ matrix.target }} --features=${{ matrix.features }}
|
run: cargo test --target='${{ matrix.target }}' --features='${{ matrix.features }}'
|
||||||
|
|
||||||
- name: Run wasm tests
|
- name: Run Wasm tests
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run && contains(matrix.features, 'wasm')
|
||||||
run: $BUILD_CMD run -p xtask -- test-wasm
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- test-wasm
|
||||||
|
|
||||||
- name: Run benchmarks
|
|
||||||
# Cross-compiled benchmarks are pointless
|
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
|
||||||
run: $BUILD_CMD bench benchmark -p tree-sitter-cli --target=${{ matrix.target }}
|
|
||||||
|
|
||||||
- name: Upload CLI artifact
|
- name: Upload CLI artifact
|
||||||
uses: actions/upload-artifact@v4
|
if: "!matrix.no-run"
|
||||||
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: tree-sitter.${{ matrix.platform }}
|
name: tree-sitter.${{ matrix.platform }}
|
||||||
path: target/${{ matrix.target }}/release/tree-sitter${{ env.SUFFIX }}
|
path: target/${{ matrix.target }}/release/tree-sitter${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
- name: Upload Wasm artifacts
|
- name: Upload Wasm artifacts
|
||||||
if: ${{ matrix.platform == 'linux-x64' }}
|
if: matrix.platform == 'linux-x64'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: tree-sitter.wasm
|
name: tree-sitter.wasm
|
||||||
path: |
|
path: |
|
||||||
lib/binding_web/tree-sitter.js
|
lib/binding_web/web-tree-sitter.js
|
||||||
lib/binding_web/tree-sitter.js.map
|
lib/binding_web/web-tree-sitter.js.map
|
||||||
lib/binding_web/tree-sitter.cjs
|
lib/binding_web/web-tree-sitter.cjs
|
||||||
lib/binding_web/tree-sitter.cjs.map
|
lib/binding_web/web-tree-sitter.cjs.map
|
||||||
lib/binding_web/tree-sitter.wasm
|
lib/binding_web/web-tree-sitter.wasm
|
||||||
lib/binding_web/tree-sitter.wasm.map
|
lib/binding_web/web-tree-sitter.wasm.map
|
||||||
lib/binding_web/debug/tree-sitter.cjs
|
lib/binding_web/debug/web-tree-sitter.cjs
|
||||||
lib/binding_web/debug/tree-sitter.cjs.map
|
lib/binding_web/debug/web-tree-sitter.cjs.map
|
||||||
lib/binding_web/debug/tree-sitter.js
|
lib/binding_web/debug/web-tree-sitter.js
|
||||||
lib/binding_web/debug/tree-sitter.js.map
|
lib/binding_web/debug/web-tree-sitter.js.map
|
||||||
lib/binding_web/debug/tree-sitter.wasm
|
lib/binding_web/debug/web-tree-sitter.wasm
|
||||||
lib/binding_web/debug/tree-sitter.wasm.map
|
lib/binding_web/debug/web-tree-sitter.wasm.map
|
||||||
|
lib/binding_web/lib/*.c
|
||||||
|
lib/binding_web/lib/*.h
|
||||||
|
lib/binding_web/lib/*.ts
|
||||||
|
lib/binding_web/src/*.ts
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
|
||||||
10
.github/workflows/ci.yml
vendored
10
.github/workflows/ci.yml
vendored
|
|
@ -26,17 +26,12 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
|
|
||||||
- name: Set up nightly Rust toolchain
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
with:
|
|
||||||
toolchain: nightly
|
|
||||||
components: clippy, rustfmt
|
components: clippy, rustfmt
|
||||||
|
|
||||||
- name: Lint files
|
- name: Lint files
|
||||||
|
|
@ -49,3 +44,6 @@ jobs:
|
||||||
|
|
||||||
build:
|
build:
|
||||||
uses: ./.github/workflows/build.yml
|
uses: ./.github/workflows/build.yml
|
||||||
|
|
||||||
|
check-wasm-stdlib:
|
||||||
|
uses: ./.github/workflows/wasm_stdlib.yml
|
||||||
|
|
|
||||||
7
.github/workflows/docs.yml
vendored
7
.github/workflows/docs.yml
vendored
|
|
@ -3,6 +3,7 @@ on:
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
paths: [docs/**]
|
paths: [docs/**]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy-docs:
|
deploy-docs:
|
||||||
|
|
@ -15,7 +16,7 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
@ -25,7 +26,7 @@ jobs:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
run: |
|
run: |
|
||||||
jq_expr='.assets[] | select(.name | contains("x86_64-unknown-linux-gnu")) | .browser_download_url'
|
jq_expr='.assets[] | select(.name | contains("x86_64-unknown-linux-gnu")) | .browser_download_url'
|
||||||
url=$(gh api repos/rust-lang/mdbook/releases/latest --jq "$jq_expr")
|
url=$(gh api repos/rust-lang/mdbook/releases/tags/v0.4.52 --jq "$jq_expr")
|
||||||
mkdir mdbook
|
mkdir mdbook
|
||||||
curl -sSL "$url" | tar -xz -C mdbook
|
curl -sSL "$url" | tar -xz -C mdbook
|
||||||
printf '%s/mdbook\n' "$PWD" >> "$GITHUB_PATH"
|
printf '%s/mdbook\n' "$PWD" >> "$GITHUB_PATH"
|
||||||
|
|
@ -40,7 +41,7 @@ jobs:
|
||||||
uses: actions/configure-pages@v5
|
uses: actions/configure-pages@v5
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
uses: actions/upload-pages-artifact@v3
|
uses: actions/upload-pages-artifact@v4
|
||||||
with:
|
with:
|
||||||
path: docs/book
|
path: docs/book
|
||||||
|
|
||||||
|
|
|
||||||
30
.github/workflows/emscripten.yml
vendored
30
.github/workflows/emscripten.yml
vendored
|
|
@ -1,30 +0,0 @@
|
||||||
name: Update Emscripten
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, synchronize]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-emscripten:
|
|
||||||
if: github.actor == 'dependabot[bot]'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
|
|
||||||
- name: Run emscripten update xtask
|
|
||||||
run: |
|
|
||||||
git config --global user.name "dependabot[bot]"
|
|
||||||
git config --global user.email "49699333+dependabot[bot]@users.noreply.github.com"
|
|
||||||
cargo xtask upgrade-emscripten
|
|
||||||
|
|
||||||
- name: Push updated version
|
|
||||||
run: git push origin HEAD:$GITHUB_HEAD_REF
|
|
||||||
13
.github/workflows/nvim_ts.yml
vendored
13
.github/workflows/nvim_ts.yml
vendored
|
|
@ -3,7 +3,10 @@ name: nvim-treesitter parser tests
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- 'cli/**'
|
- 'crates/cli/**'
|
||||||
|
- 'crates/config/**'
|
||||||
|
- 'crates/generate/**'
|
||||||
|
- 'crates/loader/**'
|
||||||
- '.github/workflows/nvim_ts.yml'
|
- '.github/workflows/nvim_ts.yml'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|
@ -13,7 +16,7 @@ concurrency:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_compilation:
|
check_compilation:
|
||||||
timeout-minutes: 20
|
timeout-minutes: 30
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
|
@ -25,9 +28,9 @@ jobs:
|
||||||
NVIM: ${{ matrix.os == 'windows-latest' && 'nvim-win64\\bin\\nvim.exe' || 'nvim' }}
|
NVIM: ${{ matrix.os == 'windows-latest' && 'nvim-win64\\bin\\nvim.exe' || 'nvim' }}
|
||||||
NVIM_TS_DIR: nvim-treesitter
|
NVIM_TS_DIR: nvim-treesitter
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
repository: nvim-treesitter/nvim-treesitter
|
repository: nvim-treesitter/nvim-treesitter
|
||||||
path: ${{ env.NVIM_TS_DIR }}
|
path: ${{ env.NVIM_TS_DIR }}
|
||||||
|
|
@ -55,7 +58,7 @@ jobs:
|
||||||
|
|
||||||
- if: matrix.type == 'build'
|
- if: matrix.type == 'build'
|
||||||
name: Compile parsers
|
name: Compile parsers
|
||||||
run: $NVIM -l ./scripts/install-parsers.lua
|
run: $NVIM -l ./scripts/install-parsers.lua --max-jobs=10
|
||||||
working-directory: ${{ env.NVIM_TS_DIR }}
|
working-directory: ${{ env.NVIM_TS_DIR }}
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
|
|
||||||
76
.github/workflows/release.yml
vendored
76
.github/workflows/release.yml
vendored
|
|
@ -17,13 +17,15 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: build
|
needs: build
|
||||||
permissions:
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
attestations: write
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Download build artifacts
|
- name: Download build artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
|
||||||
|
|
@ -33,26 +35,13 @@ jobs:
|
||||||
|
|
||||||
- name: Prepare release artifacts
|
- name: Prepare release artifacts
|
||||||
run: |
|
run: |
|
||||||
mkdir -p target
|
mkdir -p target web
|
||||||
mv artifacts/tree-sitter.wasm/* target/
|
mv artifacts/tree-sitter.wasm/* web/
|
||||||
|
|
||||||
# Rename files
|
tar -czf target/web-tree-sitter.tar.gz -C web .
|
||||||
mv target/tree-sitter.js target/web-tree-sitter.js
|
|
||||||
mv target/tree-sitter.js.map target/web-tree-sitter.js.map
|
|
||||||
mv target/tree-sitter.cjs target/web-tree-sitter.cjs
|
|
||||||
mv target/tree-sitter.cjs.map target/web-tree-sitter.cjs.map
|
|
||||||
mv target/tree-sitter.wasm target/web-tree-sitter.wasm
|
|
||||||
mv target/tree-sitter.wasm.map target/web-tree-sitter.wasm.map
|
|
||||||
|
|
||||||
mv target/debug/tree-sitter.js target/web-tree-sitter-debug.js
|
|
||||||
mv target/debug/tree-sitter.js.map target/web-tree-sitter-debug.js.map
|
|
||||||
mv target/debug/tree-sitter.cjs target/web-tree-sitter-debug.cjs
|
|
||||||
mv target/debug/tree-sitter.cjs.map target/web-tree-sitter-debug.cjs.map
|
|
||||||
mv target/debug/tree-sitter.wasm target/web-tree-sitter-debug.wasm
|
|
||||||
mv target/debug/tree-sitter.wasm.map target/web-tree-sitter-debug.wasm.map
|
|
||||||
rm -rf target/debug
|
|
||||||
|
|
||||||
rm -r artifacts/tree-sitter.wasm
|
rm -r artifacts/tree-sitter.wasm
|
||||||
|
|
||||||
for platform in $(cd artifacts; ls | sed 's/^tree-sitter\.//'); do
|
for platform in $(cd artifacts; ls | sed 's/^tree-sitter\.//'); do
|
||||||
exe=$(ls artifacts/tree-sitter.$platform/tree-sitter*)
|
exe=$(ls artifacts/tree-sitter.$platform/tree-sitter*)
|
||||||
gzip --stdout --name $exe > target/tree-sitter-$platform.gz
|
gzip --stdout --name $exe > target/tree-sitter-$platform.gz
|
||||||
|
|
@ -60,57 +49,65 @@ jobs:
|
||||||
rm -rf artifacts
|
rm -rf artifacts
|
||||||
ls -l target/
|
ls -l target/
|
||||||
|
|
||||||
|
- name: Generate attestations
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
|
with:
|
||||||
|
subject-path: |
|
||||||
|
target/tree-sitter-*.gz
|
||||||
|
target/web-tree-sitter.tar.gz
|
||||||
|
|
||||||
- name: Create release
|
- name: Create release
|
||||||
run: |-
|
run: |-
|
||||||
gh release create ${{ github.ref_name }} \
|
gh release create $GITHUB_REF_NAME \
|
||||||
target/tree-sitter-*.gz \
|
target/tree-sitter-*.gz \
|
||||||
target/web-tree-sitter.js \
|
target/web-tree-sitter.tar.gz
|
||||||
target/web-tree-sitter.js.map \
|
|
||||||
target/web-tree-sitter.cjs \
|
|
||||||
target/web-tree-sitter.cjs.map \
|
|
||||||
target/web-tree-sitter.wasm \
|
|
||||||
target/web-tree-sitter.wasm.map \
|
|
||||||
target/web-tree-sitter-debug.js \
|
|
||||||
target/web-tree-sitter-debug.js.map \
|
|
||||||
target/web-tree-sitter-debug.cjs \
|
|
||||||
target/web-tree-sitter-debug.cjs.map \
|
|
||||||
target/web-tree-sitter-debug.wasm \
|
|
||||||
target/web-tree-sitter-debug.wasm.map
|
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
crates_io:
|
crates_io:
|
||||||
name: Publish packages to Crates.io
|
name: Publish packages to Crates.io
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
environment: crates
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
needs: release
|
needs: release
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
||||||
|
- name: Set up registry token
|
||||||
|
id: auth
|
||||||
|
uses: rust-lang/crates-io-auth-action@v1
|
||||||
|
|
||||||
- name: Publish crates to Crates.io
|
- name: Publish crates to Crates.io
|
||||||
uses: katyo/publish-crates@v2
|
uses: katyo/publish-crates@v2
|
||||||
with:
|
with:
|
||||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
registry-token: ${{ steps.auth.outputs.token }}
|
||||||
|
|
||||||
npm:
|
npm:
|
||||||
name: Publish packages to npmjs.com
|
name: Publish packages to npmjs.com
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
environment: npm
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
needs: release
|
needs: release
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
directory: [cli/npm, lib/binding_web]
|
directory: [crates/cli/npm, lib/binding_web]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Node
|
- name: Set up Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 24
|
||||||
registry-url: https://registry.npmjs.org
|
registry-url: https://registry.npmjs.org
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
|
|
@ -125,9 +122,8 @@ jobs:
|
||||||
npm run build:debug
|
npm run build:debug
|
||||||
CJS=true npm run build
|
CJS=true npm run build
|
||||||
CJS=true npm run build:debug
|
CJS=true npm run build:debug
|
||||||
|
npm run build:dts
|
||||||
|
|
||||||
- name: Publish to npmjs.com
|
- name: Publish to npmjs.com
|
||||||
working-directory: ${{ matrix.directory }}
|
working-directory: ${{ matrix.directory }}
|
||||||
run: npm publish
|
run: npm publish
|
||||||
env:
|
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
||||||
|
|
|
||||||
8
.github/workflows/response.yml
vendored
8
.github/workflows/response.yml
vendored
|
|
@ -17,13 +17,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/close_unresponsive.js
|
sparse-checkout: .github/scripts/close_unresponsive.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/close_unresponsive.js')
|
const script = require('./.github/scripts/close_unresponsive.js')
|
||||||
|
|
@ -35,13 +35,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/remove_response_label.js
|
sparse-checkout: .github/scripts/remove_response_label.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/remove_response_label.js')
|
const script = require('./.github/scripts/remove_response_label.js')
|
||||||
|
|
|
||||||
4
.github/workflows/reviewers_remove.yml
vendored
4
.github/workflows/reviewers_remove.yml
vendored
|
|
@ -12,13 +12,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/reviewers_remove.js
|
sparse-checkout: .github/scripts/reviewers_remove.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/reviewers_remove.js')
|
const script = require('./.github/scripts/reviewers_remove.js')
|
||||||
|
|
|
||||||
2
.github/workflows/sanitize.yml
vendored
2
.github/workflows/sanitize.yml
vendored
|
|
@ -15,7 +15,7 @@ jobs:
|
||||||
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Install UBSAN library
|
- name: Install UBSAN library
|
||||||
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
||||||
|
|
|
||||||
29
.github/workflows/spam.yml
vendored
Normal file
29
.github/workflows/spam.yml
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
name: Close as spam
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [labeled]
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
spam:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.label.name == 'spam'
|
||||||
|
steps:
|
||||||
|
- name: Checkout script
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
sparse-checkout: .github/scripts/close_spam.js
|
||||||
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
|
- name: Run script
|
||||||
|
uses: actions/github-script@v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/scripts/close_spam.js')
|
||||||
|
await script({github, context})
|
||||||
11
.github/workflows/wasm_exports.yml
vendored
11
.github/workflows/wasm_exports.yml
vendored
|
|
@ -1,23 +1,24 @@
|
||||||
name: Check WASM Exports
|
name: Check Wasm Exports
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- lib/include/tree_sitter/api.h
|
- lib/include/tree_sitter/api.h
|
||||||
- lib/binding_web/**
|
- lib/binding_web/**
|
||||||
|
- xtask/src/**
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
paths:
|
paths:
|
||||||
- lib/include/tree_sitter/api.h
|
- lib/include/tree_sitter/api.h
|
||||||
- lib/binding_rust/bindings.rs
|
- lib/binding_rust/bindings.rs
|
||||||
- lib/CMakeLists.txt
|
- CMakeLists.txt
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-wasm-exports:
|
check-wasm-exports:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
@ -32,9 +33,9 @@ jobs:
|
||||||
env:
|
env:
|
||||||
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
|
|
||||||
- name: Build WASM Library
|
- name: Build Wasm Library
|
||||||
working-directory: lib/binding_web
|
working-directory: lib/binding_web
|
||||||
run: npm ci && npm run build:debug
|
run: npm ci && npm run build:debug
|
||||||
|
|
||||||
- name: Check WASM exports
|
- name: Check Wasm exports
|
||||||
run: cargo xtask check-wasm-exports
|
run: cargo xtask check-wasm-exports
|
||||||
|
|
|
||||||
19
.github/workflows/wasm_stdlib.yml
vendored
Normal file
19
.github/workflows/wasm_stdlib.yml
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
name: Check Wasm Stdlib build
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Check directory changes
|
||||||
|
uses: actions/github-script@v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const scriptPath = `${process.env.GITHUB_WORKSPACE}/.github/scripts/wasm_stdlib.js`;
|
||||||
|
const script = require(scriptPath);
|
||||||
|
return script({ github, context, core });
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
|
|
@ -1,10 +1,12 @@
|
||||||
log*.html
|
log*.html
|
||||||
|
.direnv
|
||||||
|
|
||||||
.idea
|
.idea
|
||||||
*.xcodeproj
|
*.xcodeproj
|
||||||
.vscode
|
.vscode
|
||||||
.cache
|
.cache
|
||||||
.zig-cache
|
.zig-cache
|
||||||
|
.direnv
|
||||||
|
|
||||||
profile*
|
profile*
|
||||||
fuzz-results
|
fuzz-results
|
||||||
|
|
@ -24,6 +26,7 @@ docs/assets/js/tree-sitter.js
|
||||||
*.dylib
|
*.dylib
|
||||||
*.so
|
*.so
|
||||||
*.so.[0-9]*
|
*.so.[0-9]*
|
||||||
|
*.dll
|
||||||
*.o
|
*.o
|
||||||
*.obj
|
*.obj
|
||||||
*.exp
|
*.exp
|
||||||
|
|
@ -33,3 +36,5 @@ docs/assets/js/tree-sitter.js
|
||||||
.build
|
.build
|
||||||
build
|
build
|
||||||
zig-*
|
zig-*
|
||||||
|
|
||||||
|
/result
|
||||||
|
|
|
||||||
11
.zed/settings.json
Normal file
11
.zed/settings.json
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"lsp": {
|
||||||
|
"rust-analyzer": {
|
||||||
|
"initialization_options": {
|
||||||
|
"cargo": {
|
||||||
|
"features": "all"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
cmake_minimum_required(VERSION 3.13)
|
cmake_minimum_required(VERSION 3.13)
|
||||||
|
|
||||||
project(tree-sitter
|
project(tree-sitter
|
||||||
VERSION "0.25.1"
|
VERSION "0.27.0"
|
||||||
DESCRIPTION "An incremental parsing system for programming tools"
|
DESCRIPTION "An incremental parsing system for programming tools"
|
||||||
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
||||||
LANGUAGES C)
|
LANGUAGES C)
|
||||||
|
|
@ -11,15 +11,15 @@ option(TREE_SITTER_FEATURE_WASM "Enable the Wasm feature" OFF)
|
||||||
option(AMALGAMATED "Build using an amalgamated source" OFF)
|
option(AMALGAMATED "Build using an amalgamated source" OFF)
|
||||||
|
|
||||||
if(AMALGAMATED)
|
if(AMALGAMATED)
|
||||||
set(TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
set(TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/lib/src/lib.c")
|
||||||
else()
|
else()
|
||||||
file(GLOB TS_SOURCE_FILES src/*.c)
|
file(GLOB TS_SOURCE_FILES lib/src/*.c)
|
||||||
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/lib/src/lib.c")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
add_library(tree-sitter ${TS_SOURCE_FILES})
|
add_library(tree-sitter ${TS_SOURCE_FILES})
|
||||||
|
|
||||||
target_include_directories(tree-sitter PRIVATE src src/wasm include)
|
target_include_directories(tree-sitter PRIVATE lib/src lib/src/wasm PUBLIC lib/include)
|
||||||
|
|
||||||
if(MSVC)
|
if(MSVC)
|
||||||
target_compile_options(tree-sitter PRIVATE
|
target_compile_options(tree-sitter PRIVATE
|
||||||
|
|
@ -81,15 +81,15 @@ set_target_properties(tree-sitter
|
||||||
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}"
|
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}"
|
||||||
DEFINE_SYMBOL "")
|
DEFINE_SYMBOL "")
|
||||||
|
|
||||||
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE)
|
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE _BSD_SOURCE _DARWIN_C_SOURCE)
|
||||||
|
|
||||||
configure_file(tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY)
|
|
||||||
|
|
||||||
include(GNUInstallDirs)
|
include(GNUInstallDirs)
|
||||||
|
|
||||||
install(FILES include/tree_sitter/api.h
|
configure_file(lib/tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY)
|
||||||
|
|
||||||
|
install(FILES lib/include/tree_sitter/api.h
|
||||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/tree_sitter")
|
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/tree_sitter")
|
||||||
install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc"
|
install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc"
|
||||||
DESTINATION "${CMAKE_INSTALL_DATAROOTDIR}/pkgconfig")
|
DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
|
||||||
install(TARGETS tree-sitter
|
install(TARGETS tree-sitter
|
||||||
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
|
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
|
||||||
1939
Cargo.lock
generated
1939
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
106
Cargo.toml
106
Cargo.toml
|
|
@ -1,25 +1,26 @@
|
||||||
[workspace]
|
[workspace]
|
||||||
default-members = ["cli"]
|
default-members = ["crates/cli"]
|
||||||
members = [
|
members = [
|
||||||
"cli",
|
"crates/cli",
|
||||||
"cli/config",
|
"crates/config",
|
||||||
"cli/loader",
|
"crates/generate",
|
||||||
|
"crates/highlight",
|
||||||
|
"crates/loader",
|
||||||
|
"crates/tags",
|
||||||
|
"crates/xtask",
|
||||||
|
"crates/language",
|
||||||
"lib",
|
"lib",
|
||||||
"lib/language",
|
|
||||||
"tags",
|
|
||||||
"highlight",
|
|
||||||
"xtask",
|
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.25.1"
|
version = "0.27.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Max Brunsfeld <maxbrunsfeld@gmail.com>",
|
"Max Brunsfeld <maxbrunsfeld@gmail.com>",
|
||||||
"Amaan Qureshi <amaanq12@gmail.com>",
|
"Amaan Qureshi <amaanq12@gmail.com>",
|
||||||
]
|
]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.82"
|
rust-version = "1.85"
|
||||||
homepage = "https://tree-sitter.github.io/tree-sitter"
|
homepage = "https://tree-sitter.github.io/tree-sitter"
|
||||||
repository = "https://github.com/tree-sitter/tree-sitter"
|
repository = "https://github.com/tree-sitter/tree-sitter"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
@ -59,6 +60,8 @@ missing_errors_doc = "allow"
|
||||||
missing_panics_doc = "allow"
|
missing_panics_doc = "allow"
|
||||||
module_name_repetitions = "allow"
|
module_name_repetitions = "allow"
|
||||||
multiple_crate_versions = "allow"
|
multiple_crate_versions = "allow"
|
||||||
|
needless_for_each = "allow"
|
||||||
|
obfuscated_if_else = "allow"
|
||||||
option_if_let_else = "allow"
|
option_if_let_else = "allow"
|
||||||
or_fun_call = "allow"
|
or_fun_call = "allow"
|
||||||
range_plus_one = "allow"
|
range_plus_one = "allow"
|
||||||
|
|
@ -75,6 +78,9 @@ unnecessary_wraps = "allow"
|
||||||
unused_self = "allow"
|
unused_self = "allow"
|
||||||
used_underscore_items = "allow"
|
used_underscore_items = "allow"
|
||||||
|
|
||||||
|
[workspace.lints.rust]
|
||||||
|
mismatched_lifetime_syntaxes = "allow"
|
||||||
|
|
||||||
[profile.optimize]
|
[profile.optimize]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
strip = true # Automatically strip symbols from the binary.
|
strip = true # Automatically strip symbols from the binary.
|
||||||
|
|
@ -97,61 +103,61 @@ codegen-units = 256
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
ansi_colours = "1.2.3"
|
ansi_colours = "1.2.3"
|
||||||
anstyle = "1.0.10"
|
anstyle = "1.0.13"
|
||||||
anyhow = "1.0.95"
|
anyhow = "1.0.100"
|
||||||
bstr = "1.11.3"
|
bstr = "1.12.0"
|
||||||
cc = "1.2.10"
|
cc = "1.2.53"
|
||||||
clap = { version = "4.5.27", features = [
|
clap = { version = "4.5.54", features = [
|
||||||
"cargo",
|
"cargo",
|
||||||
"derive",
|
"derive",
|
||||||
"env",
|
"env",
|
||||||
"help",
|
"help",
|
||||||
|
"string",
|
||||||
"unstable-styles",
|
"unstable-styles",
|
||||||
] }
|
] }
|
||||||
clap_complete = "4.5.42"
|
clap_complete = "4.5.65"
|
||||||
clap_complete_nushell = "4.5.5"
|
clap_complete_nushell = "4.5.10"
|
||||||
|
crc32fast = "1.5.0"
|
||||||
ctor = "0.2.9"
|
ctor = "0.2.9"
|
||||||
ctrlc = { version = "3.4.5", features = ["termination"] }
|
ctrlc = { version = "3.5.0", features = ["termination"] }
|
||||||
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
||||||
etcetera = "0.8.0"
|
etcetera = "0.11.0"
|
||||||
filetime = "0.2.25"
|
|
||||||
fs4 = "0.12.0"
|
fs4 = "0.12.0"
|
||||||
git2 = "0.20.0"
|
glob = "0.3.3"
|
||||||
glob = "0.3.2"
|
|
||||||
heck = "0.5.0"
|
heck = "0.5.0"
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
indexmap = "2.7.1"
|
indexmap = "2.12.1"
|
||||||
indoc = "2.0.5"
|
indoc = "2.0.6"
|
||||||
libloading = "0.8.6"
|
libloading = "0.9.0"
|
||||||
log = { version = "0.4.25", features = ["std"] }
|
log = { version = "0.4.28", features = ["std"] }
|
||||||
memchr = "2.7.4"
|
memchr = "2.7.6"
|
||||||
once_cell = "1.20.2"
|
once_cell = "1.21.3"
|
||||||
path-slash = "0.2.1"
|
|
||||||
pretty_assertions = "1.4.1"
|
pretty_assertions = "1.4.1"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
regex = "1.11.1"
|
regex = "1.11.3"
|
||||||
regex-syntax = "0.8.5"
|
regex-syntax = "0.8.6"
|
||||||
rustc-hash = "2.1.0"
|
rustc-hash = "2.1.1"
|
||||||
semver = { version = "1.0.25", features = ["serde"] }
|
schemars = "1.0.5"
|
||||||
serde = { version = "1.0.217", features = ["derive"] }
|
semver = { version = "1.0.27", features = ["serde"] }
|
||||||
serde_derive = "1.0.217"
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.137", features = ["preserve_order"] }
|
serde_json = { version = "1.0.149", features = ["preserve_order"] }
|
||||||
similar = "2.7.0"
|
similar = "2.7.0"
|
||||||
smallbitvec = "2.5.3"
|
smallbitvec = "2.6.0"
|
||||||
streaming-iterator = "0.1.9"
|
streaming-iterator = "0.1.9"
|
||||||
tempfile = "3.15.0"
|
tempfile = "3.23.0"
|
||||||
thiserror = "2.0.11"
|
thiserror = "2.0.17"
|
||||||
tiny_http = "0.12.0"
|
tiny_http = "0.12.0"
|
||||||
toml = "0.8.19"
|
topological-sort = "0.2.2"
|
||||||
unindent = "0.2.3"
|
unindent = "0.2.4"
|
||||||
url = { version = "2.5.4", features = ["serde"] }
|
|
||||||
walkdir = "2.5.0"
|
walkdir = "2.5.0"
|
||||||
wasmparser = "0.224.0"
|
wasmparser = "0.243.0"
|
||||||
webbrowser = "1.0.3"
|
webbrowser = "1.0.5"
|
||||||
|
|
||||||
tree-sitter = { version = "0.25.1", path = "./lib" }
|
tree-sitter = { version = "0.27.0", path = "./lib" }
|
||||||
tree-sitter-generate = { version = "0.25.1", path = "./cli/generate" }
|
tree-sitter-generate = { version = "0.27.0", path = "./crates/generate" }
|
||||||
tree-sitter-loader = { version = "0.25.1", path = "./cli/loader" }
|
tree-sitter-loader = { version = "0.27.0", path = "./crates/loader" }
|
||||||
tree-sitter-config = { version = "0.25.1", path = "./cli/config" }
|
tree-sitter-config = { version = "0.27.0", path = "./crates/config" }
|
||||||
tree-sitter-highlight = { version = "0.25.1", path = "./highlight" }
|
tree-sitter-highlight = { version = "0.27.0", path = "./crates/highlight" }
|
||||||
tree-sitter-tags = { version = "0.25.1", path = "./tags" }
|
tree-sitter-tags = { version = "0.27.0", path = "./crates/tags" }
|
||||||
|
|
||||||
|
tree-sitter-language = { version = "0.1", path = "./crates/language" }
|
||||||
|
|
|
||||||
2
LICENSE
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2018-2024 Max Brunsfeld
|
Copyright (c) 2018 Max Brunsfeld
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
||||||
47
Makefile
47
Makefile
|
|
@ -1,8 +1,4 @@
|
||||||
ifeq ($(OS),Windows_NT)
|
VERSION := 0.27.0
|
||||||
$(error Windows is not supported)
|
|
||||||
endif
|
|
||||||
|
|
||||||
VERSION := 0.25.1
|
|
||||||
DESCRIPTION := An incremental parsing system for programming tools
|
DESCRIPTION := An incremental parsing system for programming tools
|
||||||
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||||
|
|
||||||
|
|
@ -10,6 +6,7 @@ HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||||
PREFIX ?= /usr/local
|
PREFIX ?= /usr/local
|
||||||
INCLUDEDIR ?= $(PREFIX)/include
|
INCLUDEDIR ?= $(PREFIX)/include
|
||||||
LIBDIR ?= $(PREFIX)/lib
|
LIBDIR ?= $(PREFIX)/lib
|
||||||
|
BINDIR ?= $(PREFIX)/bin
|
||||||
PCLIBDIR ?= $(LIBDIR)/pkgconfig
|
PCLIBDIR ?= $(LIBDIR)/pkgconfig
|
||||||
|
|
||||||
# collect sources
|
# collect sources
|
||||||
|
|
@ -27,7 +24,7 @@ OBJ := $(SRC:.c=.o)
|
||||||
ARFLAGS := rcs
|
ARFLAGS := rcs
|
||||||
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
||||||
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE
|
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE -D_BSD_SOURCE -D_DARWIN_C_SOURCE
|
||||||
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
||||||
|
|
||||||
# ABI versioning
|
# ABI versioning
|
||||||
|
|
@ -35,20 +32,25 @@ SONAME_MAJOR := $(word 1,$(subst ., ,$(VERSION)))
|
||||||
SONAME_MINOR := $(word 2,$(subst ., ,$(VERSION)))
|
SONAME_MINOR := $(word 2,$(subst ., ,$(VERSION)))
|
||||||
|
|
||||||
# OS-specific bits
|
# OS-specific bits
|
||||||
ifneq ($(findstring darwin,$(shell $(CC) -dumpmachine)),)
|
MACHINE := $(shell $(CC) -dumpmachine)
|
||||||
|
|
||||||
|
ifneq ($(findstring darwin,$(MACHINE)),)
|
||||||
SOEXT = dylib
|
SOEXT = dylib
|
||||||
SOEXTVER_MAJOR = $(SONAME_MAJOR).$(SOEXT)
|
SOEXTVER_MAJOR = $(SONAME_MAJOR).$(SOEXT)
|
||||||
SOEXTVER = $(SONAME_MAJOR).$(SONAME_MINOR).$(SOEXT)
|
SOEXTVER = $(SONAME_MAJOR).$(SONAME_MINOR).$(SOEXT)
|
||||||
LINKSHARED += -dynamiclib -Wl,-install_name,$(LIBDIR)/libtree-sitter.$(SOEXTVER)
|
LINKSHARED += -dynamiclib -Wl,-install_name,$(LIBDIR)/libtree-sitter.$(SOEXTVER)
|
||||||
|
else ifneq ($(findstring mingw32,$(MACHINE)),)
|
||||||
|
SOEXT = dll
|
||||||
|
LINKSHARED += -s -shared -Wl,--out-implib,libtree-sitter.dll.a
|
||||||
else
|
else
|
||||||
SOEXT = so
|
SOEXT = so
|
||||||
SOEXTVER_MAJOR = $(SOEXT).$(SONAME_MAJOR)
|
SOEXTVER_MAJOR = $(SOEXT).$(SONAME_MAJOR)
|
||||||
SOEXTVER = $(SOEXT).$(SONAME_MAJOR).$(SONAME_MINOR)
|
SOEXTVER = $(SOEXT).$(SONAME_MAJOR).$(SONAME_MINOR)
|
||||||
LINKSHARED += -shared -Wl,-soname,libtree-sitter.$(SOEXTVER)
|
LINKSHARED += -shared -Wl,-soname,libtree-sitter.$(SOEXTVER)
|
||||||
endif
|
|
||||||
ifneq ($(filter $(shell uname),FreeBSD NetBSD DragonFly),)
|
ifneq ($(filter $(shell uname),FreeBSD NetBSD DragonFly),)
|
||||||
PCLIBDIR := $(PREFIX)/libdata/pkgconfig
|
PCLIBDIR := $(PREFIX)/libdata/pkgconfig
|
||||||
endif
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
all: libtree-sitter.a libtree-sitter.$(SOEXT) tree-sitter.pc
|
all: libtree-sitter.a libtree-sitter.$(SOEXT) tree-sitter.pc
|
||||||
|
|
||||||
|
|
@ -61,6 +63,10 @@ ifneq ($(STRIP),)
|
||||||
$(STRIP) $@
|
$(STRIP) $@
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
ifneq ($(findstring mingw32,$(MACHINE)),)
|
||||||
|
libtree-sitter.dll.a: libtree-sitter.$(SOEXT)
|
||||||
|
endif
|
||||||
|
|
||||||
tree-sitter.pc: lib/tree-sitter.pc.in
|
tree-sitter.pc: lib/tree-sitter.pc.in
|
||||||
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
||||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
||||||
|
|
@ -69,17 +75,27 @@ tree-sitter.pc: lib/tree-sitter.pc.in
|
||||||
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
||||||
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
||||||
|
|
||||||
|
shared: libtree-sitter.$(SOEXT)
|
||||||
|
|
||||||
|
static: libtree-sitter.a
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
$(RM) $(OBJ) tree-sitter.pc libtree-sitter.a libtree-sitter.$(SOEXT)
|
$(RM) $(OBJ) tree-sitter.pc libtree-sitter.a libtree-sitter.$(SOEXT) libtree-stitter.dll.a
|
||||||
|
|
||||||
install: all
|
install: all
|
||||||
install -d '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
install -d '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
||||||
install -m644 lib/include/tree_sitter/api.h '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h
|
install -m644 lib/include/tree_sitter/api.h '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h
|
||||||
install -m644 tree-sitter.pc '$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
install -m644 tree-sitter.pc '$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
||||||
install -m644 libtree-sitter.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a
|
install -m644 libtree-sitter.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a
|
||||||
|
ifneq ($(findstring mingw32,$(MACHINE)),)
|
||||||
|
install -d '$(DESTDIR)$(BINDIR)'
|
||||||
|
install -m755 libtree-sitter.dll '$(DESTDIR)$(BINDIR)'/libtree-sitter.dll
|
||||||
|
install -m755 libtree-sitter.dll.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.dll.a
|
||||||
|
else
|
||||||
install -m755 libtree-sitter.$(SOEXT) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER)
|
install -m755 libtree-sitter.$(SOEXT) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER)
|
||||||
ln -sf libtree-sitter.$(SOEXTVER) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER_MAJOR)
|
cd '$(DESTDIR)$(LIBDIR)' && ln -sf libtree-sitter.$(SOEXTVER) libtree-sitter.$(SOEXTVER_MAJOR)
|
||||||
ln -sf libtree-sitter.$(SOEXTVER_MAJOR) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT)
|
cd '$(DESTDIR)$(LIBDIR)' && ln -sf libtree-sitter.$(SOEXTVER_MAJOR) libtree-sitter.$(SOEXT)
|
||||||
|
endif
|
||||||
|
|
||||||
uninstall:
|
uninstall:
|
||||||
$(RM) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a \
|
$(RM) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a \
|
||||||
|
|
@ -88,8 +104,9 @@ uninstall:
|
||||||
'$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT) \
|
'$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT) \
|
||||||
'$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h \
|
'$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h \
|
||||||
'$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
'$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
||||||
|
rmdir '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter
|
||||||
|
|
||||||
.PHONY: all install uninstall clean
|
.PHONY: all shared static install uninstall clean
|
||||||
|
|
||||||
|
|
||||||
##### Dev targets #####
|
##### Dev targets #####
|
||||||
|
|
@ -106,15 +123,15 @@ test-wasm:
|
||||||
lint:
|
lint:
|
||||||
cargo update --workspace --locked --quiet
|
cargo update --workspace --locked --quiet
|
||||||
cargo check --workspace --all-targets
|
cargo check --workspace --all-targets
|
||||||
cargo +nightly fmt --all --check
|
cargo fmt --all --check
|
||||||
cargo +nightly clippy --workspace --all-targets -- -D warnings
|
cargo clippy --workspace --all-targets -- -D warnings
|
||||||
|
|
||||||
lint-web:
|
lint-web:
|
||||||
npm --prefix lib/binding_web ci
|
npm --prefix lib/binding_web ci
|
||||||
npm --prefix lib/binding_web run lint
|
npm --prefix lib/binding_web run lint
|
||||||
|
|
||||||
format:
|
format:
|
||||||
cargo +nightly fmt --all
|
cargo fmt --all
|
||||||
|
|
||||||
changelog:
|
changelog:
|
||||||
@git-cliff --config .github/cliff.toml --prepend CHANGELOG.md --latest --github-token $(shell gh auth token)
|
@git-cliff --config .github/cliff.toml --prepend CHANGELOG.md --latest --github-token $(shell gh auth token)
|
||||||
|
|
|
||||||
|
|
@ -27,6 +27,8 @@ let package = Package(
|
||||||
.headerSearchPath("src"),
|
.headerSearchPath("src"),
|
||||||
.define("_POSIX_C_SOURCE", to: "200112L"),
|
.define("_POSIX_C_SOURCE", to: "200112L"),
|
||||||
.define("_DEFAULT_SOURCE"),
|
.define("_DEFAULT_SOURCE"),
|
||||||
|
.define("_BSD_SOURCE"),
|
||||||
|
.define("_DARWIN_C_SOURCE"),
|
||||||
]),
|
]),
|
||||||
],
|
],
|
||||||
cLanguageStandard: .c11
|
cLanguageStandard: .c11
|
||||||
|
|
|
||||||
|
|
@ -14,8 +14,8 @@ Tree-sitter is a parser generator tool and an incremental parsing library. It ca
|
||||||
## Links
|
## Links
|
||||||
- [Documentation](https://tree-sitter.github.io)
|
- [Documentation](https://tree-sitter.github.io)
|
||||||
- [Rust binding](lib/binding_rust/README.md)
|
- [Rust binding](lib/binding_rust/README.md)
|
||||||
- [WASM binding](lib/binding_web/README.md)
|
- [Wasm binding](lib/binding_web/README.md)
|
||||||
- [Command-line interface](cli/README.md)
|
- [Command-line interface](crates/cli/README.md)
|
||||||
|
|
||||||
[discord]: https://img.shields.io/discord/1063097320771698699?logo=discord&label=discord
|
[discord]: https://img.shields.io/discord/1063097320771698699?logo=discord&label=discord
|
||||||
[matrix]: https://img.shields.io/matrix/tree-sitter-chat%3Amatrix.org?logo=matrix&label=matrix
|
[matrix]: https://img.shields.io/matrix/tree-sitter-chat%3Amatrix.org?logo=matrix&label=matrix
|
||||||
|
|
|
||||||
218
build.zig
218
build.zig
|
|
@ -1,116 +1,142 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
pub fn build(b: *std.Build) !void {
|
pub fn build(b: *std.Build) !void {
|
||||||
const target = b.standardTargetOptions(.{});
|
const target = b.standardTargetOptions(.{});
|
||||||
const optimize = b.standardOptimizeOption(.{});
|
const optimize = b.standardOptimizeOption(.{});
|
||||||
|
|
||||||
const wasm = b.option(bool, "enable-wasm", "Enable Wasm support") orelse false;
|
const wasm = b.option(bool, "enable-wasm", "Enable Wasm support") orelse false;
|
||||||
const shared = b.option(bool, "build-shared", "Build a shared library") orelse false;
|
const shared = b.option(bool, "build-shared", "Build a shared library") orelse false;
|
||||||
const amalgamated = b.option(bool, "amalgamated", "Build using an amalgamated source") orelse false;
|
const amalgamated = b.option(bool, "amalgamated", "Build using an amalgamated source") orelse false;
|
||||||
|
|
||||||
const lib: *std.Build.Step.Compile = if (!shared) b.addStaticLibrary(.{
|
const lib: *std.Build.Step.Compile = b.addLibrary(.{
|
||||||
.name = "tree-sitter",
|
.name = "tree-sitter",
|
||||||
.target = target,
|
.linkage = if (shared) .dynamic else .static,
|
||||||
.optimize = optimize,
|
.root_module = b.createModule(.{
|
||||||
.link_libc = true,
|
.target = target,
|
||||||
}) else b.addSharedLibrary(.{
|
.optimize = optimize,
|
||||||
.name = "tree-sitter",
|
.link_libc = true,
|
||||||
.pic = true,
|
.pic = if (shared) true else null,
|
||||||
.target = target,
|
}),
|
||||||
.optimize = optimize,
|
|
||||||
.link_libc = true,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (amalgamated) {
|
|
||||||
lib.addCSourceFile(.{
|
|
||||||
.file = b.path("lib/src/lib.c"),
|
|
||||||
.flags = &.{"-std=c11"},
|
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
lib.addCSourceFiles(.{
|
|
||||||
.root = b.path("lib/src"),
|
|
||||||
.files = try findSourceFiles(b),
|
|
||||||
.flags = &.{"-std=c11"},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
lib.addIncludePath(b.path("lib/include"));
|
if (amalgamated) {
|
||||||
lib.addIncludePath(b.path("lib/src"));
|
lib.addCSourceFile(.{
|
||||||
lib.addIncludePath(b.path("lib/src/wasm"));
|
.file = b.path("lib/src/lib.c"),
|
||||||
|
.flags = &.{"-std=c11"},
|
||||||
lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L");
|
});
|
||||||
lib.root_module.addCMacro("_DEFAULT_SOURCE", "");
|
} else {
|
||||||
|
const files = try findSourceFiles(b);
|
||||||
if (wasm) {
|
defer b.allocator.free(files);
|
||||||
if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| {
|
lib.addCSourceFiles(.{
|
||||||
lib.root_module.addCMacro("TREE_SITTER_FEATURE_WASM", "");
|
.root = b.path("lib/src"),
|
||||||
lib.addSystemIncludePath(wasmtime.path("include"));
|
.files = files,
|
||||||
lib.addLibraryPath(wasmtime.path("lib"));
|
.flags = &.{"-std=c11"},
|
||||||
lib.linkSystemLibrary("wasmtime");
|
});
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
lib.installHeadersDirectory(b.path("lib/include"), ".", .{});
|
lib.addIncludePath(b.path("lib/include"));
|
||||||
|
lib.addIncludePath(b.path("lib/src"));
|
||||||
|
lib.addIncludePath(b.path("lib/src/wasm"));
|
||||||
|
|
||||||
b.installArtifact(lib);
|
lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L");
|
||||||
|
lib.root_module.addCMacro("_DEFAULT_SOURCE", "");
|
||||||
|
lib.root_module.addCMacro("_BSD_SOURCE", "");
|
||||||
|
lib.root_module.addCMacro("_DARWIN_C_SOURCE", "");
|
||||||
|
|
||||||
|
if (wasm) {
|
||||||
|
if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| {
|
||||||
|
lib.root_module.addCMacro("TREE_SITTER_FEATURE_WASM", "");
|
||||||
|
lib.addSystemIncludePath(wasmtime.path("include"));
|
||||||
|
lib.addLibraryPath(wasmtime.path("lib"));
|
||||||
|
if (shared) lib.linkSystemLibrary("wasmtime");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
lib.installHeadersDirectory(b.path("lib/include"), ".", .{});
|
||||||
|
|
||||||
|
b.installArtifact(lib);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn wasmtimeDep(target: std.Target) []const u8 {
|
/// Get the name of the wasmtime dependency for this target.
|
||||||
const arch = target.cpu.arch;
|
pub fn wasmtimeDep(target: std.Target) []const u8 {
|
||||||
const os = target.os.tag;
|
const arch = target.cpu.arch;
|
||||||
const abi = target.abi;
|
const os = target.os.tag;
|
||||||
return switch (os) {
|
const abi = target.abi;
|
||||||
.linux => switch (arch) {
|
return @as(?[]const u8, switch (os) {
|
||||||
.x86_64 => switch (abi) {
|
.linux => switch (arch) {
|
||||||
.gnu => "wasmtime_c_api_x86_64_linux",
|
.x86_64 => switch (abi) {
|
||||||
.musl => "wasmtime_c_api_x86_64_musl",
|
.gnu => "wasmtime_c_api_x86_64_linux",
|
||||||
.android => "wasmtime_c_api_x86_64_android",
|
.musl => "wasmtime_c_api_x86_64_musl",
|
||||||
else => null
|
.android => "wasmtime_c_api_x86_64_android",
|
||||||
},
|
else => null,
|
||||||
.aarch64 => switch (abi) {
|
},
|
||||||
.gnu => "wasmtime_c_api_aarch64_linux",
|
.aarch64 => switch (abi) {
|
||||||
.android => "wasmtime_c_api_aarch64_android",
|
.gnu => "wasmtime_c_api_aarch64_linux",
|
||||||
else => null
|
.musl => "wasmtime_c_api_aarch64_musl",
|
||||||
},
|
.android => "wasmtime_c_api_aarch64_android",
|
||||||
.s390x => "wasmtime_c_api_s390x_linux",
|
else => null,
|
||||||
.riscv64 => "wasmtime_c_api_riscv64gc_linux",
|
},
|
||||||
else => null
|
.x86 => switch (abi) {
|
||||||
},
|
.gnu => "wasmtime_c_api_i686_linux",
|
||||||
.windows => switch (arch) {
|
else => null,
|
||||||
.x86_64 => switch (abi) {
|
},
|
||||||
.gnu => "wasmtime_c_api_x86_64_mingw",
|
.arm => switch (abi) {
|
||||||
.msvc => "wasmtime_c_api_x86_64_windows",
|
.gnueabi => "wasmtime_c_api_armv7_linux",
|
||||||
else => null
|
else => null,
|
||||||
},
|
},
|
||||||
else => null
|
.s390x => switch (abi) {
|
||||||
},
|
.gnu => "wasmtime_c_api_s390x_linux",
|
||||||
.macos => switch (arch) {
|
else => null,
|
||||||
.x86_64 => "wasmtime_c_api_x86_64_macos",
|
},
|
||||||
.aarch64 => "wasmtime_c_api_aarch64_macos",
|
.riscv64 => switch (abi) {
|
||||||
else => null
|
.gnu => "wasmtime_c_api_riscv64gc_linux",
|
||||||
},
|
else => null,
|
||||||
else => null
|
},
|
||||||
} orelse std.debug.panic(
|
else => null,
|
||||||
"Unsupported target for wasmtime: {s}-{s}-{s}",
|
},
|
||||||
.{ @tagName(arch), @tagName(os), @tagName(abi) }
|
.windows => switch (arch) {
|
||||||
);
|
.x86_64 => switch (abi) {
|
||||||
|
.gnu => "wasmtime_c_api_x86_64_mingw",
|
||||||
|
.msvc => "wasmtime_c_api_x86_64_windows",
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
.aarch64 => switch (abi) {
|
||||||
|
.msvc => "wasmtime_c_api_aarch64_windows",
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
.x86 => switch (abi) {
|
||||||
|
.msvc => "wasmtime_c_api_i686_windows",
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
.macos => switch (arch) {
|
||||||
|
.x86_64 => "wasmtime_c_api_x86_64_macos",
|
||||||
|
.aarch64 => "wasmtime_c_api_aarch64_macos",
|
||||||
|
else => null,
|
||||||
|
},
|
||||||
|
else => null,
|
||||||
|
}) orelse std.debug.panic(
|
||||||
|
"Unsupported target for wasmtime: {s}-{s}-{s}",
|
||||||
|
.{ @tagName(arch), @tagName(os), @tagName(abi) },
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn findSourceFiles(b: *std.Build) ![]const []const u8 {
|
fn findSourceFiles(b: *std.Build) ![]const []const u8 {
|
||||||
var sources = std.ArrayList([]const u8).init(b.allocator);
|
var sources: std.ArrayListUnmanaged([]const u8) = .empty;
|
||||||
|
|
||||||
var dir = try b.build_root.handle.openDir("lib/src", .{ .iterate = true });
|
var dir = try b.build_root.handle.openDir("lib/src", .{ .iterate = true });
|
||||||
var iter = dir.iterate();
|
var iter = dir.iterate();
|
||||||
defer dir.close();
|
defer dir.close();
|
||||||
|
|
||||||
while (try iter.next()) |entry| {
|
while (try iter.next()) |entry| {
|
||||||
if (entry.kind != .file) continue;
|
if (entry.kind != .file) continue;
|
||||||
const file = entry.name;
|
const file = entry.name;
|
||||||
const ext = std.fs.path.extension(file);
|
const ext = std.fs.path.extension(file);
|
||||||
if (std.mem.eql(u8, ext, ".c") and !std.mem.eql(u8, file, "lib.c")) {
|
if (std.mem.eql(u8, ext, ".c") and !std.mem.eql(u8, file, "lib.c")) {
|
||||||
try sources.append(b.dupe(file));
|
try sources.append(b.allocator, b.dupe(file));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
return sources.items;
|
return sources.toOwnedSlice(b.allocator);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
157
build.zig.zon
157
build.zig.zon
|
|
@ -1,69 +1,96 @@
|
||||||
.{
|
.{
|
||||||
.name = "tree-sitter",
|
.name = .tree_sitter,
|
||||||
.version = "0.25.1",
|
.fingerprint = 0x841224b447ac0d4f,
|
||||||
.paths = .{
|
.version = "0.27.0",
|
||||||
"build.zig",
|
.minimum_zig_version = "0.14.1",
|
||||||
"build.zig.zon",
|
.paths = .{
|
||||||
"lib/src",
|
"build.zig",
|
||||||
"lib/include",
|
"build.zig.zon",
|
||||||
"README.md",
|
"lib/src",
|
||||||
"LICENSE",
|
"lib/include",
|
||||||
},
|
"README.md",
|
||||||
.dependencies = .{
|
"LICENSE",
|
||||||
.wasmtime_c_api_aarch64_android = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-android-c-api.tar.xz",
|
|
||||||
.hash = "12204c77979ad8291c6e395d695a824fb053ffdfeb2cc21de95fffb09f77d77188d1",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_linux = .{
|
.dependencies = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-linux-c-api.tar.xz",
|
.wasmtime_c_api_aarch64_android = .{
|
||||||
.hash = "12203a8e3d823490186fb1e230d54f575148713088e914926305ee5678790b731bba",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-android-c-api.tar.xz",
|
||||||
.lazy = true,
|
.hash = "N-V-__8AAIfPIgdw2YnV3QyiFQ2NHdrxrXzzCdjYJyxJDOta",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_aarch64_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAIt97QZi7Pf7nNJ2mVY6uxA80Klyuvvtop3pLMRK",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_aarch64_macos = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-macos-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAAO48QQf91w9RmmUDHTja8DrXZA1n6Bmc8waW3qe",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_aarch64_musl = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-musl-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAI196wa9pwADoA2RbCDp5F7bKQg1iOPq6gIh8-FH",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_aarch64_windows = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-windows-c-api.zip",
|
||||||
|
.hash = "N-V-__8AAC9u4wXfqd1Q6XyQaC8_DbQZClXux60Vu5743N05",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_armv7_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-armv7-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAHXe8gWs3s83Cc5G6SIq0_jWxj8fGTT5xG4vb6-x",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_i686_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-i686-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAN2pzgUUfulRCYnipSfis9IIYHoTHVlieLRmKuct",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_i686_windows = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-i686-windows-c-api.zip",
|
||||||
|
.hash = "N-V-__8AAJu0YAUUTFBLxFIOi-MSQVezA6MMkpoFtuaf2Quf",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_riscv64gc_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-riscv64gc-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAG8m-gc3E3AIImtTZ3l1c7HC6HUWazQ9OH5KACX4",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_s390x_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-s390x-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAH314gd-gE4IBp2uvAL3gHeuW1uUZjMiLLeUdXL_",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_android = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-android-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAIPNRwfNkznebrcGb0IKUe7f35bkuZEYOjcx6q3f",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAI8EDwcyTtk_Afhk47SEaqfpoRqGkJeZpGs69ChF",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_macos = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-macos-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAGtGNgVaOpHSxC22IjrampbRIy6lLwscdcAE8nG1",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_mingw = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-mingw-c-api.zip",
|
||||||
|
.hash = "N-V-__8AAPS2PAbVix50L6lnddlgazCPTz3whLUFk1qnRtnZ",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_musl = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-musl-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAF-WEQe0nzvi09PgusM5i46FIuCKJmIDWUleWgQ3",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_x86_64_windows = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-windows-c-api.zip",
|
||||||
|
.hash = "N-V-__8AAKGNXwbpJQsn0_6kwSIVDDWifSg8cBzf7T2RzsC9",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_macos = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-macos-c-api.tar.xz",
|
|
||||||
.hash = "122043e8b19079b855b12674b9e3d4a28dc5c399c43b62fbeb8bdf0fdb4ef2d1d38c",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_riscv64gc_linux = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-riscv64gc-linux-c-api.tar.xz",
|
|
||||||
.hash = "12209d07031cf33271bf4b0c63df407b535cd5d65c6402bd6f80d99de439d6feb89b",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_s390x_linux = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-s390x-linux-c-api.tar.xz",
|
|
||||||
.hash = "122033f7d9b04f429063d9b2d9ac75a7a00fce02c425e578208f54ddc40edaa1e355",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_android = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-android-c-api.tar.xz",
|
|
||||||
.hash = "122093cb33df8e09e70b2d1dc09897a0388915b942918389b10bf23f9684bdb6f047",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_linux = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-linux-c-api.tar.xz",
|
|
||||||
.hash = "12209210346e94bf6ef8e249fa5d3f1a84f95050ed19665ac8422a15b5f2246d83af",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_macos = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-macos-c-api.tar.xz",
|
|
||||||
.hash = "12208f875dd3a89092485762f3b184707b3cccae85a84e2ffd38c138cc3a3fd90447",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_mingw = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-mingw-c-api.zip",
|
|
||||||
.hash = "1220bea757df3a777b6ec6322fc498e4ece20d466eedc5e2a3610b338849553cd94d",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_musl = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-musl-c-api.tar.xz",
|
|
||||||
.hash = "1220c9596e6a63edcc3234c0611d0cbac724bf30ac9a0fbaf402c7da649b278b1322",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_x86_64_windows = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-windows-c-api.zip",
|
|
||||||
.hash = "1220440ccb01d72989cf1a47728897a35fc8dd31673cce598f2d62c58e2c3228b0ed",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
use std::{env, path::PathBuf, process::Command};
|
|
||||||
|
|
||||||
fn main() {
|
|
||||||
if let Some(git_sha) = read_git_sha() {
|
|
||||||
println!("cargo:rustc-env=BUILD_SHA={git_sha}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This is copied from the build.rs in parent directory. This should be updated if the
|
|
||||||
// parent build.rs gets fixes.
|
|
||||||
fn read_git_sha() -> Option<String> {
|
|
||||||
let crate_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
|
||||||
|
|
||||||
if !crate_path
|
|
||||||
.parent()?
|
|
||||||
.parent()
|
|
||||||
.is_some_and(|p| p.join(".git").exists())
|
|
||||||
{
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
Command::new("git")
|
|
||||||
.args(["rev-parse", "HEAD"])
|
|
||||||
.current_dir(crate_path)
|
|
||||||
.output()
|
|
||||||
.map_or(None, |output| {
|
|
||||||
if !output.status.success() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
Some(String::from_utf8_lossy(&output.stdout).to_string())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
4.0.1
|
|
||||||
1052
cli/src/init.rs
1052
cli/src/init.rs
File diff suppressed because it is too large
Load diff
|
|
@ -1,30 +0,0 @@
|
||||||
use log::{LevelFilter, Log, Metadata, Record};
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
struct Logger {
|
|
||||||
pub filter: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Log for Logger {
|
|
||||||
fn enabled(&self, _: &Metadata) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn log(&self, record: &Record) {
|
|
||||||
eprintln!(
|
|
||||||
"[{}] {}",
|
|
||||||
record
|
|
||||||
.module_path()
|
|
||||||
.unwrap_or_default()
|
|
||||||
.trim_start_matches("rust_tree_sitter_cli::"),
|
|
||||||
record.args()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&self) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn init() {
|
|
||||||
log::set_boxed_logger(Box::new(Logger { filter: None })).unwrap();
|
|
||||||
log::set_max_level(LevelFilter::Info);
|
|
||||||
}
|
|
||||||
|
|
@ -1,410 +0,0 @@
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<title>tree-sitter THE_LANGUAGE_NAME</title>
|
|
||||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.css">
|
|
||||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.css">
|
|
||||||
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-32x32.png"
|
|
||||||
sizes="32x32" />
|
|
||||||
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-16x16.png"
|
|
||||||
sizes="16x16" />
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<div id="playground-container" style="visibility: hidden;">
|
|
||||||
<header>
|
|
||||||
<div class="header-item">
|
|
||||||
<span class="language-name">Language: THE_LANGUAGE_NAME</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<input id="logging-checkbox" type="checkbox">
|
|
||||||
<label for="logging-checkbox">log</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<input id="anonymous-nodes-checkbox" type="checkbox">
|
|
||||||
<label for="anonymous-nodes-checkbox">show anonymous nodes</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<input id="query-checkbox" type="checkbox">
|
|
||||||
<label for="query-checkbox">query</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<input id="accessibility-checkbox" type="checkbox">
|
|
||||||
<label for="accessibility-checkbox">accessibility</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<label for="update-time">parse time: </label>
|
|
||||||
<span id="update-time"></span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<a href="https://tree-sitter.github.io/tree-sitter/7-playground.html#about">(?)</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<select id="language-select" style="display: none;">
|
|
||||||
<option value="parser">Parser</option>
|
|
||||||
</select>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<button id="theme-toggle" class="theme-toggle" aria-label="Toggle theme">
|
|
||||||
<svg class="sun-icon" viewBox="0 0 24 24" width="16" height="16">
|
|
||||||
<path fill="currentColor"
|
|
||||||
d="M12 17.5a5.5 5.5 0 1 0 0-11 5.5 5.5 0 0 0 0 11zm0 1.5a7 7 0 1 1 0-14 7 7 0 0 1 0 14zm0-16a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0V4a1 1 0 0 1 1-1zm0 15a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0v-2a1 1 0 0 1 1-1zm9-9a1 1 0 0 1-1 1h-2a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1zM4 12a1 1 0 0 1-1 1H1a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1z" />
|
|
||||||
</svg>
|
|
||||||
<svg class="moon-icon" viewBox="0 0 24 24" width="16" height="16">
|
|
||||||
<path fill="currentColor"
|
|
||||||
d="M12.1 22c-5.5 0-10-4.5-10-10s4.5-10 10-10c.2 0 .3 0 .5.1-1.3 1.4-2 3.2-2 5.2 0 4.1 3.4 7.5 7.5 7.5 2 0 3.8-.7 5.2-2 .1.2.1.3.1.5 0 5.4-4.5 9.7-10 9.7z" />
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
|
|
||||||
<main>
|
|
||||||
<div id="input-pane">
|
|
||||||
<div class="panel-header">Code</div>
|
|
||||||
<div id="code-container">
|
|
||||||
<textarea id="code-input"></textarea>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="query-container" style="visibility: hidden; position: absolute;">
|
|
||||||
<div class="panel-header">Query</div>
|
|
||||||
<textarea id="query-input"></textarea>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="output-container-scroll">
|
|
||||||
<div class="panel-header">Tree</div>
|
|
||||||
<pre id="output-container" class="highlight"></pre>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script src="https://code.jquery.com/jquery-3.3.1.min.js" crossorigin="anonymous">
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.js"></script>
|
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.js"></script>
|
|
||||||
|
|
||||||
<script>LANGUAGE_BASE_URL = "";</script>
|
|
||||||
<script type="module" src="playground.js"></script>
|
|
||||||
<script type="module">
|
|
||||||
import * as TreeSitter from './tree-sitter.js';
|
|
||||||
window.TreeSitter = TreeSitter;
|
|
||||||
setTimeout(() => window.initializePlayground({local: true}), 1)
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style>
|
|
||||||
/* Base Variables */
|
|
||||||
:root {
|
|
||||||
--light-bg: #f9f9f9;
|
|
||||||
--light-border: #e0e0e0;
|
|
||||||
--light-text: #333;
|
|
||||||
--light-hover-border: #c1c1c1;
|
|
||||||
--light-scrollbar-track: #f1f1f1;
|
|
||||||
--light-scrollbar-thumb: #c1c1c1;
|
|
||||||
--light-scrollbar-thumb-hover: #a8a8a8;
|
|
||||||
|
|
||||||
--dark-bg: #1d1f21;
|
|
||||||
--dark-border: #2d2d2d;
|
|
||||||
--dark-text: #c5c8c6;
|
|
||||||
--dark-panel-bg: #252526;
|
|
||||||
--dark-code-bg: #1e1e1e;
|
|
||||||
--dark-scrollbar-track: #25282c;
|
|
||||||
--dark-scrollbar-thumb: #4a4d51;
|
|
||||||
--dark-scrollbar-thumb-hover: #5a5d61;
|
|
||||||
|
|
||||||
--primary-color: #0550ae;
|
|
||||||
--primary-color-alpha: rgba(5, 80, 174, 0.1);
|
|
||||||
--primary-color-alpha-dark: rgba(121, 192, 255, 0.1);
|
|
||||||
--selection-color: rgba(39, 95, 255, 0.3);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Theme Colors */
|
|
||||||
[data-theme="dark"] {
|
|
||||||
--bg-color: var(--dark-bg);
|
|
||||||
--border-color: var(--dark-border);
|
|
||||||
--text-color: var(--dark-text);
|
|
||||||
--panel-bg: var(--dark-panel-bg);
|
|
||||||
--code-bg: var(--dark-code-bg);
|
|
||||||
}
|
|
||||||
|
|
||||||
[data-theme="light"] {
|
|
||||||
--bg-color: var(--light-bg);
|
|
||||||
--border-color: var(--light-border);
|
|
||||||
--text-color: var(--light-text);
|
|
||||||
--panel-bg: white;
|
|
||||||
--code-bg: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Base Styles */
|
|
||||||
body {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
font-family: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
|
|
||||||
background-color: var(--bg-color);
|
|
||||||
color: var(--text-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Layout */
|
|
||||||
#playground-container {
|
|
||||||
width: 100%;
|
|
||||||
height: 100vh;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
background-color: var(--bg-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
header {
|
|
||||||
padding: 16px 24px;
|
|
||||||
border-bottom: 1px solid var(--border-color);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 20px;
|
|
||||||
background-color: var(--panel-bg);
|
|
||||||
font-size: 14px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-item {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.language-name {
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
main {
|
|
||||||
flex: 1;
|
|
||||||
display: flex;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
#input-pane {
|
|
||||||
width: 50%;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
border-right: 1px solid var(--border-color);
|
|
||||||
background-color: var(--panel-bg);
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
#code-container {
|
|
||||||
flex: 1;
|
|
||||||
min-height: 0;
|
|
||||||
position: relative;
|
|
||||||
border-bottom: 1px solid var(--border-color);
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
#query-container:not([style*="visibility: hidden"]) {
|
|
||||||
flex: 1;
|
|
||||||
min-height: 0;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
#query-container .panel-header {
|
|
||||||
flex: 0 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
#query-container .CodeMirror {
|
|
||||||
flex: 1;
|
|
||||||
position: relative;
|
|
||||||
min-height: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container-scroll {
|
|
||||||
width: 50%;
|
|
||||||
overflow: auto;
|
|
||||||
background-color: var(--panel-bg);
|
|
||||||
padding: 0;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container {
|
|
||||||
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
|
||||||
line-height: 1.5;
|
|
||||||
margin: 0;
|
|
||||||
padding: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.panel-header {
|
|
||||||
padding: 8px 16px;
|
|
||||||
font-weight: 600;
|
|
||||||
font-size: 14px;
|
|
||||||
border-bottom: 1px solid var(--border-color);
|
|
||||||
background-color: var(--panel-bg);
|
|
||||||
}
|
|
||||||
|
|
||||||
.CodeMirror {
|
|
||||||
position: absolute;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
height: 100%;
|
|
||||||
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
|
||||||
font-size: 14px;
|
|
||||||
line-height: 1.6;
|
|
||||||
background-color: var(--code-bg) !important;
|
|
||||||
color: var(--text-color) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.query-error {
|
|
||||||
text-decoration: underline red dashed;
|
|
||||||
-webkit-text-decoration: underline red dashed;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Scrollbars */
|
|
||||||
::-webkit-scrollbar {
|
|
||||||
width: 8px;
|
|
||||||
height: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-track {
|
|
||||||
border-radius: 4px;
|
|
||||||
background: var(--light-scrollbar-track);
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-thumb {
|
|
||||||
border-radius: 4px;
|
|
||||||
background: var(--light-scrollbar-thumb);
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-thumb:hover {
|
|
||||||
background: var(--light-scrollbar-thumb-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
[data-theme="dark"] {
|
|
||||||
::-webkit-scrollbar-track {
|
|
||||||
background: var(--dark-scrollbar-track) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-thumb {
|
|
||||||
background: var(--dark-scrollbar-thumb) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-thumb:hover {
|
|
||||||
background: var(--dark-scrollbar-thumb-hover) !important;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Theme Toggle */
|
|
||||||
.theme-toggle {
|
|
||||||
background: none;
|
|
||||||
border: 1px solid var(--border-color);
|
|
||||||
border-radius: 4px;
|
|
||||||
padding: 6px;
|
|
||||||
cursor: pointer;
|
|
||||||
color: var(--text-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
.theme-toggle:hover {
|
|
||||||
background-color: var(--primary-color-alpha);
|
|
||||||
}
|
|
||||||
|
|
||||||
[data-theme="light"] .moon-icon,
|
|
||||||
[data-theme="dark"] .sun-icon {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Form Elements */
|
|
||||||
input[type="checkbox"] {
|
|
||||||
margin-right: 6px;
|
|
||||||
vertical-align: middle;
|
|
||||||
}
|
|
||||||
|
|
||||||
label {
|
|
||||||
font-size: 14px;
|
|
||||||
margin-right: 16px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a {
|
|
||||||
cursor: pointer;
|
|
||||||
text-decoration: none;
|
|
||||||
color: #040404;
|
|
||||||
padding: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.named {
|
|
||||||
color: #0550ae;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.anonymous {
|
|
||||||
color: #116329;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.anonymous:before {
|
|
||||||
content: '"';
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.anonymous:after {
|
|
||||||
content: '"';
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.error {
|
|
||||||
color: #cf222e;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.highlighted {
|
|
||||||
background-color: #d9d9d9;
|
|
||||||
color: red;
|
|
||||||
border-radius: 3px;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Dark Theme Node Colors */
|
|
||||||
[data-theme="dark"] {
|
|
||||||
& #output-container a {
|
|
||||||
color: #d4d4d4;
|
|
||||||
}
|
|
||||||
|
|
||||||
& #output-container a.node-link.named {
|
|
||||||
color: #79c0ff;
|
|
||||||
}
|
|
||||||
|
|
||||||
& #output-container a.node-link.anonymous {
|
|
||||||
color: #7ee787;
|
|
||||||
}
|
|
||||||
|
|
||||||
& #output-container a.node-link.error {
|
|
||||||
color: #ff7b72;
|
|
||||||
}
|
|
||||||
|
|
||||||
& #output-container a.highlighted {
|
|
||||||
background-color: #373b41;
|
|
||||||
color: red;
|
|
||||||
}
|
|
||||||
|
|
||||||
& .CodeMirror {
|
|
||||||
background-color: var(--dark-code-bg) !important;
|
|
||||||
color: var(--dark-text) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
& .CodeMirror-gutters {
|
|
||||||
background-color: var(--dark-panel-bg) !important;
|
|
||||||
border-color: var(--dark-border) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
& .CodeMirror-cursor {
|
|
||||||
border-color: var(--dark-text) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
& .CodeMirror-selected {
|
|
||||||
background-color: rgba(255, 255, 255, 0.1) !important;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</body>
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
"""PARSER_DESCRIPTION"""
|
|
||||||
|
|
||||||
from importlib.resources import files as _files
|
|
||||||
|
|
||||||
from ._binding import language
|
|
||||||
|
|
||||||
|
|
||||||
def _get_query(name, file):
|
|
||||||
query = _files(f"{__package__}.queries") / file
|
|
||||||
globals()[name] = query.read_text()
|
|
||||||
return globals()[name]
|
|
||||||
|
|
||||||
|
|
||||||
def __getattr__(name):
|
|
||||||
# NOTE: uncomment these to include any queries that this grammar contains:
|
|
||||||
|
|
||||||
# if name == "HIGHLIGHTS_QUERY":
|
|
||||||
# return _get_query("HIGHLIGHTS_QUERY", "highlights.scm")
|
|
||||||
# if name == "INJECTIONS_QUERY":
|
|
||||||
# return _get_query("INJECTIONS_QUERY", "injections.scm")
|
|
||||||
# if name == "LOCALS_QUERY":
|
|
||||||
# return _get_query("LOCALS_QUERY", "locals.scm")
|
|
||||||
# if name == "TAGS_QUERY":
|
|
||||||
# return _get_query("TAGS_QUERY", "tags.scm")
|
|
||||||
|
|
||||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"language",
|
|
||||||
# "HIGHLIGHTS_QUERY",
|
|
||||||
# "INJECTIONS_QUERY",
|
|
||||||
# "LOCALS_QUERY",
|
|
||||||
# "TAGS_QUERY",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def __dir__():
|
|
||||||
return sorted(__all__ + [
|
|
||||||
"__all__", "__builtins__", "__cached__", "__doc__", "__file__",
|
|
||||||
"__loader__", "__name__", "__package__", "__path__", "__spec__",
|
|
||||||
])
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
from typing import Final
|
|
||||||
|
|
||||||
# NOTE: uncomment these to include any queries that this grammar contains:
|
|
||||||
|
|
||||||
# HIGHLIGHTS_QUERY: Final[str]
|
|
||||||
# INJECTIONS_QUERY: Final[str]
|
|
||||||
# LOCALS_QUERY: Final[str]
|
|
||||||
# TAGS_QUERY: Final[str]
|
|
||||||
|
|
||||||
def language() -> object: ...
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
const assert = require("node:assert");
|
|
||||||
const { test } = require("node:test");
|
|
||||||
|
|
||||||
const Parser = require("tree-sitter");
|
|
||||||
|
|
||||||
test("can load grammar", () => {
|
|
||||||
const parser = new Parser();
|
|
||||||
assert.doesNotThrow(() => parser.setLanguage(require(".")));
|
|
||||||
});
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
fn main() {
|
|
||||||
let src_dir = std::path::Path::new("src");
|
|
||||||
|
|
||||||
let mut c_config = cc::Build::new();
|
|
||||||
c_config.std("c11").include(src_dir);
|
|
||||||
|
|
||||||
#[cfg(target_env = "msvc")]
|
|
||||||
c_config.flag("-utf-8");
|
|
||||||
|
|
||||||
let parser_path = src_dir.join("parser.c");
|
|
||||||
c_config.file(&parser_path);
|
|
||||||
println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap());
|
|
||||||
|
|
||||||
let scanner_path = src_dir.join("scanner.c");
|
|
||||||
if scanner_path.exists() {
|
|
||||||
c_config.file(&scanner_path);
|
|
||||||
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
c_config.compile("tree-sitter-KEBAB_PARSER_NAME");
|
|
||||||
}
|
|
||||||
|
|
@ -1,79 +0,0 @@
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
pub fn build(b: *std.Build) !void {
|
|
||||||
const target = b.standardTargetOptions(.{});
|
|
||||||
const optimize = b.standardOptimizeOption(.{});
|
|
||||||
|
|
||||||
const shared = b.option(bool, "build-shared", "Build a shared library") orelse true;
|
|
||||||
const reuse_alloc = b.option(bool, "reuse-allocator", "Reuse the library allocator") orelse false;
|
|
||||||
|
|
||||||
const lib: *std.Build.Step.Compile = if (shared) b.addSharedLibrary(.{
|
|
||||||
.name = "tree-sitter-PARSER_NAME",
|
|
||||||
.pic = true,
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
.link_libc = true,
|
|
||||||
}) else b.addStaticLibrary(.{
|
|
||||||
.name = "tree-sitter-PARSER_NAME",
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
.link_libc = true,
|
|
||||||
});
|
|
||||||
|
|
||||||
lib.addCSourceFile(.{
|
|
||||||
.file = b.path("src/parser.c"),
|
|
||||||
.flags = &.{"-std=c11"},
|
|
||||||
});
|
|
||||||
if (hasScanner(b.build_root.handle)) {
|
|
||||||
lib.addCSourceFile(.{
|
|
||||||
.file = b.path("src/scanner.c"),
|
|
||||||
.flags = &.{"-std=c11"},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (reuse_alloc) {
|
|
||||||
lib.root_module.addCMacro("TREE_SITTER_REUSE_ALLOCATOR", "");
|
|
||||||
}
|
|
||||||
if (optimize == .Debug) {
|
|
||||||
lib.root_module.addCMacro("TREE_SITTER_DEBUG", "");
|
|
||||||
}
|
|
||||||
|
|
||||||
lib.addIncludePath(b.path("src"));
|
|
||||||
|
|
||||||
b.installArtifact(lib);
|
|
||||||
b.installFile("src/node-types.json", "node-types.json");
|
|
||||||
b.installDirectory(.{ .source_dir = b.path("queries"), .install_dir = .prefix, .install_subdir = "queries", .include_extensions = &.{"scm"} });
|
|
||||||
|
|
||||||
const module = b.addModule("tree-sitter-PARSER_NAME", .{
|
|
||||||
.root_source_file = b.path("bindings/zig/root.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
module.linkLibrary(lib);
|
|
||||||
|
|
||||||
const ts_dep = b.dependency("tree-sitter", .{});
|
|
||||||
const ts_mod = ts_dep.module("tree-sitter");
|
|
||||||
module.addImport("tree-sitter", ts_mod);
|
|
||||||
|
|
||||||
// ╭─────────────────╮
|
|
||||||
// │ Tests │
|
|
||||||
// ╰─────────────────╯
|
|
||||||
|
|
||||||
const tests = b.addTest(.{
|
|
||||||
.root_source_file = b.path("bindings/zig/root.zig"),
|
|
||||||
.target = target,
|
|
||||||
.optimize = optimize,
|
|
||||||
});
|
|
||||||
tests.linkLibrary(lib);
|
|
||||||
tests.root_module.addImport("tree-sitter", ts_mod);
|
|
||||||
|
|
||||||
const run_tests = b.addRunArtifact(tests);
|
|
||||||
|
|
||||||
const test_step = b.step("test", "Run unit tests");
|
|
||||||
test_step.dependOn(&run_tests.step);
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fn hasScanner(dir: std.fs.Dir) bool {
|
|
||||||
dir.access("src/scanner.c", .{}) catch return false;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
@ -1,17 +0,0 @@
|
||||||
.{
|
|
||||||
.name = "tree-sitter-PARSER_NAME",
|
|
||||||
.version = "PARSER_VERSION",
|
|
||||||
.dependencies = .{ .@"tree-sitter" = .{
|
|
||||||
.url = "https://github.com/tree-sitter/zig-tree-sitter/archive/refs/tags/v0.25.0.tar.gz",
|
|
||||||
.hash = "12201a8d5e840678bbbf5128e605519c4024af422295d68e2ba2090e675328e5811d",
|
|
||||||
} },
|
|
||||||
.paths = .{
|
|
||||||
"build.zig",
|
|
||||||
"build.zig.zon",
|
|
||||||
"bindings/zig",
|
|
||||||
"src",
|
|
||||||
"queries",
|
|
||||||
"LICENSE",
|
|
||||||
"README.md",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
27
cli/src/templates/index.d.ts
vendored
27
cli/src/templates/index.d.ts
vendored
|
|
@ -1,27 +0,0 @@
|
||||||
type BaseNode = {
|
|
||||||
type: string;
|
|
||||||
named: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
type ChildNode = {
|
|
||||||
multiple: boolean;
|
|
||||||
required: boolean;
|
|
||||||
types: BaseNode[];
|
|
||||||
};
|
|
||||||
|
|
||||||
type NodeInfo =
|
|
||||||
| (BaseNode & {
|
|
||||||
subtypes: BaseNode[];
|
|
||||||
})
|
|
||||||
| (BaseNode & {
|
|
||||||
fields: { [name: string]: ChildNode };
|
|
||||||
children: ChildNode[];
|
|
||||||
});
|
|
||||||
|
|
||||||
type Language = {
|
|
||||||
language: unknown;
|
|
||||||
nodeTypeInfo: NodeInfo[];
|
|
||||||
};
|
|
||||||
|
|
||||||
declare const language: Language;
|
|
||||||
export = language;
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
const root = require("path").join(__dirname, "..", "..");
|
|
||||||
|
|
||||||
module.exports =
|
|
||||||
typeof process.versions.bun === "string"
|
|
||||||
// Support `bun build --compile` by being statically analyzable enough to find the .node file at build-time
|
|
||||||
? require(`../../prebuilds/${process.platform}-${process.arch}/tree-sitter-KEBAB_PARSER_NAME.node`)
|
|
||||||
: require("node-gyp-build")(root);
|
|
||||||
|
|
||||||
try {
|
|
||||||
module.exports.nodeTypeInfo = require("../../src/node-types.json");
|
|
||||||
} catch (_) {}
|
|
||||||
|
|
@ -1,19 +0,0 @@
|
||||||
const testing = @import("std").testing;
|
|
||||||
|
|
||||||
const ts = @import("tree-sitter");
|
|
||||||
const Language = ts.Language;
|
|
||||||
const Parser = ts.Parser;
|
|
||||||
|
|
||||||
pub extern fn tree_sitter_PARSER_NAME() callconv(.C) *const Language;
|
|
||||||
|
|
||||||
pub export fn language() *const Language {
|
|
||||||
return tree_sitter_PARSER_NAME();
|
|
||||||
}
|
|
||||||
|
|
||||||
test "can load grammar" {
|
|
||||||
const parser = Parser.create();
|
|
||||||
defer parser.destroy();
|
|
||||||
try testing.expectEqual(parser.setLanguage(language()), void{});
|
|
||||||
try testing.expectEqual(parser.getLanguage(), tree_sitter_PARSER_NAME());
|
|
||||||
}
|
|
||||||
|
|
||||||
1584
cli/src/test.rs
1584
cli/src/test.rs
File diff suppressed because it is too large
Load diff
|
|
@ -1,278 +0,0 @@
|
||||||
use std::{
|
|
||||||
future::Future,
|
|
||||||
pin::{pin, Pin},
|
|
||||||
ptr,
|
|
||||||
task::{self, Context, Poll, RawWaker, RawWakerVTable, Waker},
|
|
||||||
};
|
|
||||||
|
|
||||||
use tree_sitter::Parser;
|
|
||||||
|
|
||||||
use super::helpers::fixtures::get_language;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_in_fut() {
|
|
||||||
let (ret, pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("bash");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
|
|
||||||
let fut_val_fn = || async {
|
|
||||||
yield_now().await;
|
|
||||||
root.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref_fn = || async {
|
|
||||||
yield_now().await;
|
|
||||||
root_ref.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
let f1 = fut_val_fn().await;
|
|
||||||
let f2 = fut_ref_fn().await;
|
|
||||||
assert_eq!(f1, f2);
|
|
||||||
|
|
||||||
let fut_val = async {
|
|
||||||
yield_now().await;
|
|
||||||
root.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
let fut_ref = async {
|
|
||||||
yield_now().await;
|
|
||||||
root_ref.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
let f1 = fut_val.await;
|
|
||||||
let f2 = fut_ref.await;
|
|
||||||
assert_eq!(f1, f2);
|
|
||||||
|
|
||||||
f1
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(ret, "comment");
|
|
||||||
assert_eq!(pended, 5);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_and_cursor_ref_in_fut() {
|
|
||||||
let ((), pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("c");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
let fut_val = async {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root.to_sexp();
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref = async {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root_ref.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
};
|
|
||||||
|
|
||||||
fut_val.await;
|
|
||||||
fut_ref.await;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(pended, 3);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_and_cursor_ref_in_fut_with_fut_fabrics() {
|
|
||||||
let ((), pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("javascript");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
let fut_val = || async {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root.to_sexp();
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref = || async move {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root_ref.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
};
|
|
||||||
|
|
||||||
fut_val().await;
|
|
||||||
fut_val().await;
|
|
||||||
fut_ref().await;
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(pended, 4);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_and_cursor_ref_in_fut_with_inner_spawns() {
|
|
||||||
let (ret, pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("rust");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
let fut_val = || {
|
|
||||||
let tree = tree.clone();
|
|
||||||
async move {
|
|
||||||
let root = tree.root_node();
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref = || {
|
|
||||||
let tree = tree.clone();
|
|
||||||
async move {
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root_ref.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let ((), p1) = tokio_like_spawn(fut_val()).await.unwrap();
|
|
||||||
let ((), p2) = tokio_like_spawn(fut_ref()).await.unwrap();
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
fut_val().await;
|
|
||||||
fut_val().await;
|
|
||||||
fut_ref().await;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
p1 + p2
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(pended, 4);
|
|
||||||
assert_eq!(ret, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tokio_like_spawn<T>(future: T) -> JoinHandle<(T::Output, usize)>
|
|
||||||
where
|
|
||||||
T: Future + Send + 'static,
|
|
||||||
T::Output: Send + 'static,
|
|
||||||
{
|
|
||||||
// No runtime, just noop waker
|
|
||||||
|
|
||||||
let waker = noop_waker();
|
|
||||||
let mut cx = task::Context::from_waker(&waker);
|
|
||||||
|
|
||||||
let mut pending = 0;
|
|
||||||
let mut future = pin!(future);
|
|
||||||
let ret = loop {
|
|
||||||
match future.as_mut().poll(&mut cx) {
|
|
||||||
Poll::Pending => pending += 1,
|
|
||||||
Poll::Ready(r) => {
|
|
||||||
break r;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
JoinHandle::new((ret, pending))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn yield_now() {
|
|
||||||
struct SimpleYieldNow {
|
|
||||||
yielded: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Future for SimpleYieldNow {
|
|
||||||
type Output = ();
|
|
||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {
|
|
||||||
cx.waker().wake_by_ref();
|
|
||||||
if self.yielded {
|
|
||||||
return Poll::Ready(());
|
|
||||||
}
|
|
||||||
self.yielded = true;
|
|
||||||
Poll::Pending
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
SimpleYieldNow { yielded: false }.await;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn noop_waker() -> Waker {
|
|
||||||
const VTABLE: RawWakerVTable = RawWakerVTable::new(
|
|
||||||
// Cloning just returns a new no-op raw waker
|
|
||||||
|_| RAW,
|
|
||||||
// `wake` does nothing
|
|
||||||
|_| {},
|
|
||||||
// `wake_by_ref` does nothing
|
|
||||||
|_| {},
|
|
||||||
// Dropping does nothing as we don't allocate anything
|
|
||||||
|_| {},
|
|
||||||
);
|
|
||||||
const RAW: RawWaker = RawWaker::new(ptr::null(), &VTABLE);
|
|
||||||
unsafe { Waker::from_raw(RAW) }
|
|
||||||
}
|
|
||||||
|
|
||||||
struct JoinHandle<T> {
|
|
||||||
data: Option<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> JoinHandle<T> {
|
|
||||||
#[must_use]
|
|
||||||
const fn new(data: T) -> Self {
|
|
||||||
Self { data: Some(data) }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn join(&mut self) -> T {
|
|
||||||
self.data.take().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Unpin> Future for JoinHandle<T> {
|
|
||||||
type Output = std::result::Result<T, ()>;
|
|
||||||
|
|
||||||
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
|
|
||||||
let data = self.get_mut().data.take().unwrap();
|
|
||||||
Poll::Ready(Ok(data))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,121 +0,0 @@
|
||||||
use std::{
|
|
||||||
collections::HashMap,
|
|
||||||
os::raw::c_void,
|
|
||||||
sync::{
|
|
||||||
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
|
||||||
Mutex,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[ctor::ctor]
|
|
||||||
unsafe fn initialize_allocation_recording() {
|
|
||||||
tree_sitter::set_allocator(
|
|
||||||
Some(ts_record_malloc),
|
|
||||||
Some(ts_record_calloc),
|
|
||||||
Some(ts_record_realloc),
|
|
||||||
Some(ts_record_free),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
struct Allocation(*const c_void);
|
|
||||||
unsafe impl Send for Allocation {}
|
|
||||||
unsafe impl Sync for Allocation {}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct AllocationRecorder {
|
|
||||||
enabled: AtomicBool,
|
|
||||||
allocation_count: AtomicUsize,
|
|
||||||
outstanding_allocations: Mutex<HashMap<Allocation, usize>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
thread_local! {
|
|
||||||
static RECORDER: AllocationRecorder = AllocationRecorder::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
extern "C" {
|
|
||||||
fn malloc(size: usize) -> *mut c_void;
|
|
||||||
fn calloc(count: usize, size: usize) -> *mut c_void;
|
|
||||||
fn realloc(ptr: *mut c_void, size: usize) -> *mut c_void;
|
|
||||||
fn free(ptr: *mut c_void);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn record<T>(f: impl FnOnce() -> T) -> T {
|
|
||||||
RECORDER.with(|recorder| {
|
|
||||||
recorder.enabled.store(true, SeqCst);
|
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
|
||||||
recorder.outstanding_allocations.lock().unwrap().clear();
|
|
||||||
});
|
|
||||||
|
|
||||||
let value = f();
|
|
||||||
|
|
||||||
let outstanding_allocation_indices = RECORDER.with(|recorder| {
|
|
||||||
recorder.enabled.store(false, SeqCst);
|
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
|
||||||
recorder
|
|
||||||
.outstanding_allocations
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.drain()
|
|
||||||
.map(|e| e.1)
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
});
|
|
||||||
assert!(
|
|
||||||
outstanding_allocation_indices.is_empty(),
|
|
||||||
"Leaked allocation indices: {outstanding_allocation_indices:?}"
|
|
||||||
);
|
|
||||||
value
|
|
||||||
}
|
|
||||||
|
|
||||||
fn record_alloc(ptr: *mut c_void) {
|
|
||||||
RECORDER.with(|recorder| {
|
|
||||||
if recorder.enabled.load(SeqCst) {
|
|
||||||
let count = recorder.allocation_count.fetch_add(1, SeqCst);
|
|
||||||
recorder
|
|
||||||
.outstanding_allocations
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.insert(Allocation(ptr), count);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn record_dealloc(ptr: *mut c_void) {
|
|
||||||
RECORDER.with(|recorder| {
|
|
||||||
if recorder.enabled.load(SeqCst) {
|
|
||||||
recorder
|
|
||||||
.outstanding_allocations
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.remove(&Allocation(ptr));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
|
||||||
let result = malloc(size);
|
|
||||||
record_alloc(result);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
|
||||||
let result = calloc(count, size);
|
|
||||||
record_alloc(result);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
|
||||||
let result = realloc(ptr, size);
|
|
||||||
if ptr.is_null() {
|
|
||||||
record_alloc(result);
|
|
||||||
} else if ptr != result {
|
|
||||||
record_dealloc(ptr);
|
|
||||||
record_alloc(result);
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
|
||||||
record_dealloc(ptr);
|
|
||||||
free(ptr);
|
|
||||||
}
|
|
||||||
|
|
@ -1,104 +0,0 @@
|
||||||
// For some reasons `Command::spawn` doesn't work in CI env for many exotic arches.
|
|
||||||
#![cfg(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing)))]
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
env::VarError,
|
|
||||||
process::{Command, Stdio},
|
|
||||||
};
|
|
||||||
|
|
||||||
use tree_sitter::Parser;
|
|
||||||
use tree_sitter_generate::load_grammar_file;
|
|
||||||
|
|
||||||
use super::generate_parser;
|
|
||||||
use crate::tests::helpers::fixtures::{fixtures_dir, get_test_language};
|
|
||||||
|
|
||||||
// The `sanitizing` cfg is required to don't run tests under specific sunitizer
|
|
||||||
// because they don't work well with subprocesses _(it's an assumption)_.
|
|
||||||
//
|
|
||||||
// Below are two alternative examples of how to disable tests for some arches
|
|
||||||
// if a way with excluding the whole mod from compilation wouldn't work well.
|
|
||||||
//
|
|
||||||
// XXX: Also may be it makes sense to keep such tests as ignored by default
|
|
||||||
// to omit surprises and enable them on CI by passing an extra option explicitly:
|
|
||||||
//
|
|
||||||
// > cargo test -- --include-ignored
|
|
||||||
//
|
|
||||||
// #[cfg(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing)))]
|
|
||||||
// #[cfg_attr(not(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing))), ignore)]
|
|
||||||
//
|
|
||||||
#[test]
|
|
||||||
fn test_grammar_that_should_hang_and_not_segfault() {
|
|
||||||
let parent_sleep_millis = 1000;
|
|
||||||
let test_name = "test_grammar_that_should_hang_and_not_segfault";
|
|
||||||
let test_var = "CARGO_HANG_TEST";
|
|
||||||
|
|
||||||
eprintln!(" {test_name}");
|
|
||||||
|
|
||||||
let tests_exec_path = std::env::args()
|
|
||||||
.next()
|
|
||||||
.expect("Failed to get tests executable path");
|
|
||||||
|
|
||||||
match std::env::var(test_var) {
|
|
||||||
Ok(v) if v == test_name => {
|
|
||||||
eprintln!(" child process id {}", std::process::id());
|
|
||||||
hang_test();
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(VarError::NotPresent) => {
|
|
||||||
eprintln!(" parent process id {}", std::process::id());
|
|
||||||
let mut command = Command::new(tests_exec_path);
|
|
||||||
command.arg(test_name).env(test_var, test_name);
|
|
||||||
|
|
||||||
if std::env::args().any(|x| x == "--nocapture") {
|
|
||||||
command.arg("--nocapture");
|
|
||||||
} else {
|
|
||||||
command.stdout(Stdio::null()).stderr(Stdio::null());
|
|
||||||
}
|
|
||||||
|
|
||||||
match command.spawn() {
|
|
||||||
Ok(mut child) => {
|
|
||||||
std::thread::sleep(std::time::Duration::from_millis(parent_sleep_millis));
|
|
||||||
match child.try_wait() {
|
|
||||||
Ok(Some(status)) if status.success() => {
|
|
||||||
panic!("Child didn't hang and exited successfully")
|
|
||||||
}
|
|
||||||
Ok(Some(status)) => panic!(
|
|
||||||
"Child didn't hang and exited with status code: {:?}",
|
|
||||||
status.code()
|
|
||||||
),
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
if let Err(e) = child.kill() {
|
|
||||||
eprintln!(
|
|
||||||
"Failed to kill hang test's process id: {}, error: {e}",
|
|
||||||
child.id()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => panic!("{e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(e) => panic!("Env var error: {e}"),
|
|
||||||
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hang_test() {
|
|
||||||
let test_grammar_dir = fixtures_dir()
|
|
||||||
.join("test_grammars")
|
|
||||||
.join("get_col_should_hang_not_crash");
|
|
||||||
|
|
||||||
let grammar_json = load_grammar_file(&test_grammar_dir.join("grammar.js"), None).unwrap();
|
|
||||||
let (parser_name, parser_code) = generate_parser(grammar_json.as_str()).unwrap();
|
|
||||||
|
|
||||||
let language = get_test_language(&parser_name, &parser_code, Some(test_grammar_dir.as_path()));
|
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let code_that_should_hang = "\nHello";
|
|
||||||
|
|
||||||
parser.parse(code_that_should_hang, None).unwrap();
|
|
||||||
}
|
|
||||||
|
|
@ -1,264 +0,0 @@
|
||||||
use std::{fs, path::PathBuf, process::Command};
|
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
|
||||||
use regex::Regex;
|
|
||||||
use tree_sitter_loader::TreeSitterJSON;
|
|
||||||
|
|
||||||
pub struct Version {
|
|
||||||
pub version: String,
|
|
||||||
pub current_dir: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Version {
|
|
||||||
#[must_use]
|
|
||||||
pub const fn new(version: String, current_dir: PathBuf) -> Self {
|
|
||||||
Self {
|
|
||||||
version,
|
|
||||||
current_dir,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run(self) -> Result<()> {
|
|
||||||
let tree_sitter_json = self.current_dir.join("tree-sitter.json");
|
|
||||||
|
|
||||||
let tree_sitter_json =
|
|
||||||
serde_json::from_str::<TreeSitterJSON>(&fs::read_to_string(tree_sitter_json)?)?;
|
|
||||||
|
|
||||||
let is_multigrammar = tree_sitter_json.grammars.len() > 1;
|
|
||||||
|
|
||||||
self.update_treesitter_json().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update tree-sitter.json at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_cargo_toml().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update Cargo.toml at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_package_json().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update package.json at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_makefile(is_multigrammar).with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update Makefile at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_cmakelists_txt().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update CMakeLists.txt at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_pyproject_toml().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update pyproject.toml at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_treesitter_json(&self) -> Result<()> {
|
|
||||||
let tree_sitter_json = &fs::read_to_string(self.current_dir.join("tree-sitter.json"))?;
|
|
||||||
|
|
||||||
let tree_sitter_json = tree_sitter_json
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.contains("\"version\":") {
|
|
||||||
let prefix_index = line.find("\"version\":").unwrap() + "\"version\":".len();
|
|
||||||
let start_quote = line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
|
||||||
let end_quote = line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
|
||||||
|
|
||||||
format!(
|
|
||||||
"{}{}{}",
|
|
||||||
&line[..start_quote],
|
|
||||||
self.version,
|
|
||||||
&line[end_quote..]
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("tree-sitter.json"), tree_sitter_json)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_cargo_toml(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("Cargo.toml").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let cargo_toml = fs::read_to_string(self.current_dir.join("Cargo.toml"))?;
|
|
||||||
|
|
||||||
let cargo_toml = cargo_toml
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.starts_with("version =") {
|
|
||||||
format!("version = \"{}\"", self.version)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("Cargo.toml"), cargo_toml)?;
|
|
||||||
|
|
||||||
if self.current_dir.join("Cargo.lock").exists() {
|
|
||||||
let Ok(cmd) = Command::new("cargo")
|
|
||||||
.arg("generate-lockfile")
|
|
||||||
.arg("--offline")
|
|
||||||
.current_dir(&self.current_dir)
|
|
||||||
.output()
|
|
||||||
else {
|
|
||||||
return Ok(()); // cargo is not `executable`, ignore
|
|
||||||
};
|
|
||||||
|
|
||||||
if !cmd.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&cmd.stderr);
|
|
||||||
return Err(anyhow!(
|
|
||||||
"Failed to run `cargo generate-lockfile`:\n{stderr}"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_package_json(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("package.json").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let package_json = &fs::read_to_string(self.current_dir.join("package.json"))?;
|
|
||||||
|
|
||||||
let package_json = package_json
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.contains("\"version\":") {
|
|
||||||
let prefix_index = line.find("\"version\":").unwrap() + "\"version\":".len();
|
|
||||||
let start_quote = line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
|
||||||
let end_quote = line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
|
||||||
|
|
||||||
format!(
|
|
||||||
"{}{}{}",
|
|
||||||
&line[..start_quote],
|
|
||||||
self.version,
|
|
||||||
&line[end_quote..]
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("package.json"), package_json)?;
|
|
||||||
|
|
||||||
if self.current_dir.join("package-lock.json").exists() {
|
|
||||||
let Ok(cmd) = Command::new("npm")
|
|
||||||
.arg("install")
|
|
||||||
.arg("--package-lock-only")
|
|
||||||
.current_dir(&self.current_dir)
|
|
||||||
.output()
|
|
||||||
else {
|
|
||||||
return Ok(()); // npm is not `executable`, ignore
|
|
||||||
};
|
|
||||||
|
|
||||||
if !cmd.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&cmd.stderr);
|
|
||||||
return Err(anyhow!("Failed to run `npm install`:\n{stderr}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_makefile(&self, is_multigrammar: bool) -> Result<()> {
|
|
||||||
let makefile = if is_multigrammar {
|
|
||||||
if !self.current_dir.join("common").join("common.mak").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::read_to_string(self.current_dir.join("Makefile"))?
|
|
||||||
} else {
|
|
||||||
if !self.current_dir.join("Makefile").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::read_to_string(self.current_dir.join("Makefile"))?
|
|
||||||
};
|
|
||||||
|
|
||||||
let makefile = makefile
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.starts_with("VERSION") {
|
|
||||||
format!("VERSION := {}", self.version)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("Makefile"), makefile)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_cmakelists_txt(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("CMakeLists.txt").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let cmake = fs::read_to_string(self.current_dir.join("CMakeLists.txt"))?;
|
|
||||||
|
|
||||||
let re = Regex::new(r#"(\s*VERSION\s+)"[0-9]+\.[0-9]+\.[0-9]+""#)?;
|
|
||||||
let cmake = re.replace(&cmake, format!(r#"$1"{}""#, self.version));
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("CMakeLists.txt"), cmake.as_bytes())?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_pyproject_toml(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("pyproject.toml").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let pyproject_toml = fs::read_to_string(self.current_dir.join("pyproject.toml"))?;
|
|
||||||
|
|
||||||
let pyproject_toml = pyproject_toml
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.starts_with("version =") {
|
|
||||||
format!("version = \"{}\"", self.version)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("pyproject.toml"), pyproject_toml)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -8,14 +8,18 @@ rust-version.workspace = true
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
homepage.workspace = true
|
homepage.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
|
documentation = "https://docs.rs/tree-sitter-cli"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
include = ["build.rs", "README.md", "benches/*", "src/**"]
|
include = ["build.rs", "README.md", "LICENSE", "benches/*", "src/**"]
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
path = "src/tree_sitter_cli.rs"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "tree-sitter"
|
name = "tree-sitter"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
@ -26,7 +30,9 @@ name = "benchmark"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
default = ["qjs-rt"]
|
||||||
wasm = ["tree-sitter/wasm", "tree-sitter-loader/wasm"]
|
wasm = ["tree-sitter/wasm", "tree-sitter-loader/wasm"]
|
||||||
|
qjs-rt = ["tree-sitter-generate/qjs-rt"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ansi_colours.workspace = true
|
ansi_colours.workspace = true
|
||||||
|
|
@ -36,30 +42,26 @@ bstr.workspace = true
|
||||||
clap.workspace = true
|
clap.workspace = true
|
||||||
clap_complete.workspace = true
|
clap_complete.workspace = true
|
||||||
clap_complete_nushell.workspace = true
|
clap_complete_nushell.workspace = true
|
||||||
|
crc32fast.workspace = true
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
ctrlc.workspace = true
|
ctrlc.workspace = true
|
||||||
dialoguer.workspace = true
|
dialoguer.workspace = true
|
||||||
filetime.workspace = true
|
|
||||||
glob.workspace = true
|
glob.workspace = true
|
||||||
heck.workspace = true
|
heck.workspace = true
|
||||||
html-escape.workspace = true
|
html-escape.workspace = true
|
||||||
indexmap.workspace = true
|
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
memchr.workspace = true
|
memchr.workspace = true
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
regex-syntax.workspace = true
|
schemars.workspace = true
|
||||||
rustc-hash.workspace = true
|
|
||||||
semver.workspace = true
|
semver.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_derive.workspace = true
|
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
similar.workspace = true
|
similar.workspace = true
|
||||||
smallbitvec.workspace = true
|
|
||||||
streaming-iterator.workspace = true
|
streaming-iterator.workspace = true
|
||||||
|
thiserror.workspace = true
|
||||||
tiny_http.workspace = true
|
tiny_http.workspace = true
|
||||||
url.workspace = true
|
|
||||||
walkdir.workspace = true
|
walkdir.workspace = true
|
||||||
wasmparser.workspace = true
|
wasmparser.workspace = true
|
||||||
webbrowser.workspace = true
|
webbrowser.workspace = true
|
||||||
|
|
@ -73,7 +75,7 @@ tree-sitter-tags.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
encoding_rs = "0.8.35"
|
encoding_rs = "0.8.35"
|
||||||
widestring = "1.1.0"
|
widestring = "1.2.1"
|
||||||
tree_sitter_proc_macro = { path = "src/tests/proc_macro", package = "tree-sitter-tests-proc-macro" }
|
tree_sitter_proc_macro = { path = "src/tests/proc_macro", package = "tree-sitter-tests-proc-macro" }
|
||||||
|
|
||||||
tempfile.workspace = true
|
tempfile.workspace = true
|
||||||
21
crates/cli/LICENSE
Normal file
21
crates/cli/LICENSE
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2018 Max Brunsfeld
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -7,7 +7,8 @@
|
||||||
[npmjs.com]: https://www.npmjs.org/package/tree-sitter-cli
|
[npmjs.com]: https://www.npmjs.org/package/tree-sitter-cli
|
||||||
[npmjs.com badge]: https://img.shields.io/npm/v/tree-sitter-cli.svg?color=%23BF4A4A
|
[npmjs.com badge]: https://img.shields.io/npm/v/tree-sitter-cli.svg?color=%23BF4A4A
|
||||||
|
|
||||||
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`, `Linux`, and `Windows`.
|
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`,
|
||||||
|
`Linux`, and `Windows`.
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
|
|
@ -34,9 +35,11 @@ The `tree-sitter` binary itself has no dependencies, but specific commands have
|
||||||
|
|
||||||
### Commands
|
### Commands
|
||||||
|
|
||||||
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current working directory. See [the documentation] for more information.
|
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current
|
||||||
|
working directory. See [the documentation] for more information.
|
||||||
|
|
||||||
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory. See [the documentation] for more information.
|
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory.
|
||||||
|
See [the documentation] for more information.
|
||||||
|
|
||||||
* `parse` - The `tree-sitter parse` command will parse a file (or list of files) using Tree-sitter parsers.
|
* `parse` - The `tree-sitter parse` command will parse a file (or list of files) using Tree-sitter parsers.
|
||||||
|
|
||||||
|
|
@ -8,6 +8,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
use log::info;
|
||||||
use tree_sitter::{Language, Parser, Query};
|
use tree_sitter::{Language, Parser, Query};
|
||||||
use tree_sitter_loader::{CompileConfig, Loader};
|
use tree_sitter_loader::{CompileConfig, Loader};
|
||||||
|
|
||||||
|
|
@ -71,6 +72,8 @@ static EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR: LazyLock<
|
||||||
});
|
});
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
tree_sitter_cli::logger::init();
|
||||||
|
|
||||||
let max_path_length = EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR
|
let max_path_length = EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR
|
||||||
.values()
|
.values()
|
||||||
.flat_map(|(e, q)| {
|
.flat_map(|(e, q)| {
|
||||||
|
|
@ -81,7 +84,7 @@ fn main() {
|
||||||
.max()
|
.max()
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
|
|
||||||
eprintln!("Benchmarking with {} repetitions", *REPETITION_COUNT);
|
info!("Benchmarking with {} repetitions", *REPETITION_COUNT);
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
let mut parser = Parser::new();
|
||||||
let mut all_normal_speeds = Vec::new();
|
let mut all_normal_speeds = Vec::new();
|
||||||
|
|
@ -98,11 +101,11 @@ fn main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!("\nLanguage: {language_name}");
|
info!("\nLanguage: {language_name}");
|
||||||
let language = get_language(language_path);
|
let language = get_language(language_path);
|
||||||
parser.set_language(&language).unwrap();
|
parser.set_language(&language).unwrap();
|
||||||
|
|
||||||
eprintln!(" Constructing Queries");
|
info!(" Constructing Queries");
|
||||||
for path in query_paths {
|
for path in query_paths {
|
||||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||||
if !path.to_str().unwrap().contains(filter.as_str()) {
|
if !path.to_str().unwrap().contains(filter.as_str()) {
|
||||||
|
|
@ -112,12 +115,12 @@ fn main() {
|
||||||
|
|
||||||
parse(path, max_path_length, |source| {
|
parse(path, max_path_length, |source| {
|
||||||
Query::new(&language, str::from_utf8(source).unwrap())
|
Query::new(&language, str::from_utf8(source).unwrap())
|
||||||
.with_context(|| format!("Query file path: {path:?}"))
|
.with_context(|| format!("Query file path: {}", path.display()))
|
||||||
.expect("Failed to parse query");
|
.expect("Failed to parse query");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!(" Parsing Valid Code:");
|
info!(" Parsing Valid Code:");
|
||||||
let mut normal_speeds = Vec::new();
|
let mut normal_speeds = Vec::new();
|
||||||
for example_path in example_paths {
|
for example_path in example_paths {
|
||||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||||
|
|
@ -131,7 +134,7 @@ fn main() {
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!(" Parsing Invalid Code (mismatched languages):");
|
info!(" Parsing Invalid Code (mismatched languages):");
|
||||||
let mut error_speeds = Vec::new();
|
let mut error_speeds = Vec::new();
|
||||||
for (other_language_path, (example_paths, _)) in
|
for (other_language_path, (example_paths, _)) in
|
||||||
EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR.iter()
|
EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR.iter()
|
||||||
|
|
@ -152,30 +155,30 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_normal, worst_normal)) = aggregate(&normal_speeds) {
|
if let Some((average_normal, worst_normal)) = aggregate(&normal_speeds) {
|
||||||
eprintln!(" Average Speed (normal): {average_normal} bytes/ms");
|
info!(" Average Speed (normal): {average_normal} bytes/ms");
|
||||||
eprintln!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
info!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_error, worst_error)) = aggregate(&error_speeds) {
|
if let Some((average_error, worst_error)) = aggregate(&error_speeds) {
|
||||||
eprintln!(" Average Speed (errors): {average_error} bytes/ms");
|
info!(" Average Speed (errors): {average_error} bytes/ms");
|
||||||
eprintln!(" Worst Speed (errors): {worst_error} bytes/ms");
|
info!(" Worst Speed (errors): {worst_error} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
all_normal_speeds.extend(normal_speeds);
|
all_normal_speeds.extend(normal_speeds);
|
||||||
all_error_speeds.extend(error_speeds);
|
all_error_speeds.extend(error_speeds);
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!("\n Overall");
|
info!("\n Overall");
|
||||||
if let Some((average_normal, worst_normal)) = aggregate(&all_normal_speeds) {
|
if let Some((average_normal, worst_normal)) = aggregate(&all_normal_speeds) {
|
||||||
eprintln!(" Average Speed (normal): {average_normal} bytes/ms");
|
info!(" Average Speed (normal): {average_normal} bytes/ms");
|
||||||
eprintln!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
info!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_error, worst_error)) = aggregate(&all_error_speeds) {
|
if let Some((average_error, worst_error)) = aggregate(&all_error_speeds) {
|
||||||
eprintln!(" Average Speed (errors): {average_error} bytes/ms");
|
info!(" Average Speed (errors): {average_error} bytes/ms");
|
||||||
eprintln!(" Worst Speed (errors): {worst_error} bytes/ms");
|
info!(" Worst Speed (errors): {worst_error} bytes/ms");
|
||||||
}
|
}
|
||||||
eprintln!();
|
info!("");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
||||||
|
|
@ -194,14 +197,8 @@ fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) -> usize {
|
fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) -> usize {
|
||||||
eprint!(
|
|
||||||
" {:width$}\t",
|
|
||||||
path.file_name().unwrap().to_str().unwrap(),
|
|
||||||
width = max_path_length
|
|
||||||
);
|
|
||||||
|
|
||||||
let source_code = fs::read(path)
|
let source_code = fs::read(path)
|
||||||
.with_context(|| format!("Failed to read {path:?}"))
|
.with_context(|| format!("Failed to read {}", path.display()))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
let time = Instant::now();
|
let time = Instant::now();
|
||||||
for _ in 0..*REPETITION_COUNT {
|
for _ in 0..*REPETITION_COUNT {
|
||||||
|
|
@ -210,8 +207,9 @@ fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) ->
|
||||||
let duration = time.elapsed() / (*REPETITION_COUNT as u32);
|
let duration = time.elapsed() / (*REPETITION_COUNT as u32);
|
||||||
let duration_ns = duration.as_nanos();
|
let duration_ns = duration.as_nanos();
|
||||||
let speed = ((source_code.len() as u128) * 1_000_000) / duration_ns;
|
let speed = ((source_code.len() as u128) * 1_000_000) / duration_ns;
|
||||||
eprintln!(
|
info!(
|
||||||
"time {:>7.2} ms\t\tspeed {speed:>6} bytes/ms",
|
" {:max_path_length$}\ttime {:>7.2} ms\t\tspeed {speed:>6} bytes/ms",
|
||||||
|
path.file_name().unwrap().to_str().unwrap(),
|
||||||
(duration_ns as f64) / 1e6,
|
(duration_ns as f64) / 1e6,
|
||||||
);
|
);
|
||||||
speed as usize
|
speed as usize
|
||||||
|
|
@ -221,6 +219,6 @@ fn get_language(path: &Path) -> Language {
|
||||||
let src_path = GRAMMARS_DIR.join(path).join("src");
|
let src_path = GRAMMARS_DIR.join(path).join("src");
|
||||||
TEST_LOADER
|
TEST_LOADER
|
||||||
.load_language_at_path(CompileConfig::new(&src_path, None, None))
|
.load_language_at_path(CompileConfig::new(&src_path, None, None))
|
||||||
.with_context(|| format!("Failed to load language at path {src_path:?}"))
|
.with_context(|| format!("Failed to load language at path {}", src_path.display()))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
@ -52,16 +52,14 @@ fn main() {
|
||||||
|
|
||||||
fn web_playground_files_present() -> bool {
|
fn web_playground_files_present() -> bool {
|
||||||
let paths = [
|
let paths = [
|
||||||
"../docs/src/assets/js/playground.js",
|
"../../docs/src/assets/js/playground.js",
|
||||||
"../lib/binding_web/tree-sitter.js",
|
"../../lib/binding_web/web-tree-sitter.js",
|
||||||
"../lib/binding_web/tree-sitter.wasm",
|
"../../lib/binding_web/web-tree-sitter.wasm",
|
||||||
];
|
];
|
||||||
|
|
||||||
paths.iter().all(|p| Path::new(p).exists())
|
paths.iter().all(|p| Path::new(p).exists())
|
||||||
}
|
}
|
||||||
|
|
||||||
// When updating this function, don't forget to also update generate/build.rs which has a
|
|
||||||
// near-identical function.
|
|
||||||
fn read_git_sha() -> Option<String> {
|
fn read_git_sha() -> Option<String> {
|
||||||
let crate_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
let crate_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap());
|
||||||
|
|
||||||
1
crates/cli/eslint/.gitignore
vendored
Normal file
1
crates/cli/eslint/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
LICENSE
|
||||||
|
|
@ -305,9 +305,9 @@
|
||||||
"peer": true
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/brace-expansion": {
|
"node_modules/brace-expansion": {
|
||||||
"version": "1.1.11",
|
"version": "1.1.12",
|
||||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
@ -805,9 +805,9 @@
|
||||||
"peer": true
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/js-yaml": {
|
"node_modules/js-yaml": {
|
||||||
"version": "4.1.0",
|
"version": "4.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
|
||||||
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
|
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
@ -4,7 +4,8 @@
|
||||||
"description": "Eslint configuration for Tree-sitter grammar files",
|
"description": "Eslint configuration for Tree-sitter grammar files",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://github.com/tree-sitter/tree-sitter.git"
|
"url": "git+https://github.com/tree-sitter/tree-sitter.git",
|
||||||
|
"directory": "crates/cli/eslint"
|
||||||
},
|
},
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"author": "Amaan Qureshi <amaanq12@gmail.com>",
|
"author": "Amaan Qureshi <amaanq12@gmail.com>",
|
||||||
|
|
@ -20,5 +21,9 @@
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"eslint": ">= 9"
|
"eslint": ">= 9"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prepack": "cp ../../../LICENSE .",
|
||||||
|
"postpack": "rm LICENSE"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
38
cli/npm/dsl.d.ts → crates/cli/npm/dsl.d.ts
vendored
38
cli/npm/dsl.d.ts → crates/cli/npm/dsl.d.ts
vendored
|
|
@ -10,6 +10,7 @@ type PrecRightRule = { type: 'PREC_RIGHT'; content: Rule; value: number };
|
||||||
type PrecRule = { type: 'PREC'; content: Rule; value: number };
|
type PrecRule = { type: 'PREC'; content: Rule; value: number };
|
||||||
type Repeat1Rule = { type: 'REPEAT1'; content: Rule };
|
type Repeat1Rule = { type: 'REPEAT1'; content: Rule };
|
||||||
type RepeatRule = { type: 'REPEAT'; content: Rule };
|
type RepeatRule = { type: 'REPEAT'; content: Rule };
|
||||||
|
type ReservedRule = { type: 'RESERVED'; content: Rule; context_name: string };
|
||||||
type SeqRule = { type: 'SEQ'; members: Rule[] };
|
type SeqRule = { type: 'SEQ'; members: Rule[] };
|
||||||
type StringRule = { type: 'STRING'; value: string };
|
type StringRule = { type: 'STRING'; value: string };
|
||||||
type SymbolRule<Name extends string> = { type: 'SYMBOL'; name: Name };
|
type SymbolRule<Name extends string> = { type: 'SYMBOL'; name: Name };
|
||||||
|
|
@ -28,17 +29,16 @@ type Rule =
|
||||||
| PrecRule
|
| PrecRule
|
||||||
| Repeat1Rule
|
| Repeat1Rule
|
||||||
| RepeatRule
|
| RepeatRule
|
||||||
|
| ReservedRule
|
||||||
| SeqRule
|
| SeqRule
|
||||||
| StringRule
|
| StringRule
|
||||||
| SymbolRule<string>
|
| SymbolRule<string>
|
||||||
| TokenRule;
|
| TokenRule;
|
||||||
|
|
||||||
class RustRegex {
|
declare class RustRegex {
|
||||||
value: string;
|
value: string;
|
||||||
|
|
||||||
constructor(pattern: string) {
|
constructor(pattern: string);
|
||||||
this.value = pattern;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type RuleOrLiteral = Rule | RegExp | RustRegex | string;
|
type RuleOrLiteral = Rule | RegExp | RustRegex | string;
|
||||||
|
|
@ -167,6 +167,17 @@ interface Grammar<
|
||||||
* @see https://tree-sitter.github.io/tree-sitter/creating-parsers/3-writing-the-grammar#keyword-extraction
|
* @see https://tree-sitter.github.io/tree-sitter/creating-parsers/3-writing-the-grammar#keyword-extraction
|
||||||
*/
|
*/
|
||||||
word?: ($: GrammarSymbols<RuleName | BaseGrammarRuleName>) => RuleOrLiteral;
|
word?: ($: GrammarSymbols<RuleName | BaseGrammarRuleName>) => RuleOrLiteral;
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mapping of names to reserved word sets. The first reserved word set is the
|
||||||
|
* global word set, meaning it applies to every rule in every parse state.
|
||||||
|
* The other word sets can be used with the `reserved` function.
|
||||||
|
*/
|
||||||
|
reserved?: Record<
|
||||||
|
string,
|
||||||
|
($: GrammarSymbols<RuleName | BaseGrammarRuleName>) => RuleOrLiteral[]
|
||||||
|
>;
|
||||||
}
|
}
|
||||||
|
|
||||||
type GrammarSchema<RuleName extends string> = {
|
type GrammarSchema<RuleName extends string> = {
|
||||||
|
|
@ -251,7 +262,7 @@ declare function optional(rule: RuleOrLiteral): ChoiceRule;
|
||||||
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
||||||
*/
|
*/
|
||||||
declare const prec: {
|
declare const prec: {
|
||||||
(value: String | number, rule: RuleOrLiteral): PrecRule;
|
(value: string | number, rule: RuleOrLiteral): PrecRule;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the given rule as left-associative (and optionally applies a
|
* Marks the given rule as left-associative (and optionally applies a
|
||||||
|
|
@ -267,7 +278,7 @@ declare const prec: {
|
||||||
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
||||||
*/
|
*/
|
||||||
left(rule: RuleOrLiteral): PrecLeftRule;
|
left(rule: RuleOrLiteral): PrecLeftRule;
|
||||||
left(value: String | number, rule: RuleOrLiteral): PrecLeftRule;
|
left(value: string | number, rule: RuleOrLiteral): PrecLeftRule;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the given rule as right-associative (and optionally applies a
|
* Marks the given rule as right-associative (and optionally applies a
|
||||||
|
|
@ -283,7 +294,7 @@ declare const prec: {
|
||||||
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
* @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html
|
||||||
*/
|
*/
|
||||||
right(rule: RuleOrLiteral): PrecRightRule;
|
right(rule: RuleOrLiteral): PrecRightRule;
|
||||||
right(value: String | number, rule: RuleOrLiteral): PrecRightRule;
|
right(value: string | number, rule: RuleOrLiteral): PrecRightRule;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the given rule with a numerical precedence which will be used to
|
* Marks the given rule with a numerical precedence which will be used to
|
||||||
|
|
@ -300,7 +311,7 @@ declare const prec: {
|
||||||
*
|
*
|
||||||
* @see https://www.gnu.org/software/bison/manual/html_node/Generalized-LR-Parsing.html
|
* @see https://www.gnu.org/software/bison/manual/html_node/Generalized-LR-Parsing.html
|
||||||
*/
|
*/
|
||||||
dynamic(value: String | number, rule: RuleOrLiteral): PrecDynamicRule;
|
dynamic(value: string | number, rule: RuleOrLiteral): PrecDynamicRule;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -320,6 +331,15 @@ declare function repeat(rule: RuleOrLiteral): RepeatRule;
|
||||||
*/
|
*/
|
||||||
declare function repeat1(rule: RuleOrLiteral): Repeat1Rule;
|
declare function repeat1(rule: RuleOrLiteral): Repeat1Rule;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Overrides the global reserved word set for a given rule. The word set name
|
||||||
|
* should be defined in the `reserved` field in the grammar.
|
||||||
|
*
|
||||||
|
* @param wordset name of the reserved word set
|
||||||
|
* @param rule rule that will use the reserved word set
|
||||||
|
*/
|
||||||
|
declare function reserved(wordset: string, rule: RuleOrLiteral): ReservedRule;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a rule that matches any number of other rules, one after another.
|
* Creates a rule that matches any number of other rules, one after another.
|
||||||
* It is analogous to simply writing multiple symbols next to each other
|
* It is analogous to simply writing multiple symbols next to each other
|
||||||
|
|
@ -338,7 +358,7 @@ declare function sym<Name extends string>(name: Name): SymbolRule<Name>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Marks the given rule as producing only a single token. Tree-sitter's
|
* Marks the given rule as producing only a single token. Tree-sitter's
|
||||||
* default is to treat each String or RegExp literal in the grammar as a
|
* default is to treat each string or RegExp literal in the grammar as a
|
||||||
* separate token. Each token is matched separately by the lexer and
|
* separate token. Each token is matched separately by the lexer and
|
||||||
* returned as its own leaf node in the tree. The token function allows
|
* returned as its own leaf node in the tree. The token function allows
|
||||||
* you to express a complex rule using the DSL functions (rather
|
* you to express a complex rule using the DSL functions (rather
|
||||||
3
cli/npm/install.js → crates/cli/npm/install.js
Executable file → Normal file
3
cli/npm/install.js → crates/cli/npm/install.js
Executable file → Normal file
|
|
@ -6,7 +6,8 @@ const http = require('http');
|
||||||
const https = require('https');
|
const https = require('https');
|
||||||
const packageJSON = require('./package.json');
|
const packageJSON = require('./package.json');
|
||||||
|
|
||||||
// Look to a results table in https://github.com/tree-sitter/tree-sitter/issues/2196
|
https.globalAgent.keepAlive = false;
|
||||||
|
|
||||||
const matrix = {
|
const matrix = {
|
||||||
platform: {
|
platform: {
|
||||||
'darwin': {
|
'darwin': {
|
||||||
20
crates/cli/npm/package-lock.json
generated
Normal file
20
crates/cli/npm/package-lock.json
generated
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"name": "tree-sitter-cli",
|
||||||
|
"version": "0.27.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"name": "tree-sitter-cli",
|
||||||
|
"version": "0.27.0",
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"tree-sitter": "cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "tree-sitter-cli",
|
"name": "tree-sitter-cli",
|
||||||
"version": "0.25.1",
|
"version": "0.27.0",
|
||||||
"author": {
|
"author": {
|
||||||
"name": "Max Brunsfeld",
|
"name": "Max Brunsfeld",
|
||||||
"email": "maxbrunsfeld@gmail.com"
|
"email": "maxbrunsfeld@gmail.com"
|
||||||
|
|
@ -14,20 +14,20 @@
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/tree-sitter/tree-sitter.git"
|
"url": "git+https://github.com/tree-sitter/tree-sitter.git",
|
||||||
|
"directory": "crates/cli/npm"
|
||||||
},
|
},
|
||||||
"description": "CLI for generating fast incremental parsers",
|
"description": "CLI for generating fast incremental parsers",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"parser",
|
"parser",
|
||||||
"lexer"
|
"lexer"
|
||||||
],
|
],
|
||||||
"main": "lib/api/index.js",
|
|
||||||
"engines": {
|
"engines": {
|
||||||
"node": ">=12.0.0"
|
"node": ">=12.0.0"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"install": "node install.js",
|
"install": "node install.js",
|
||||||
"prepack": "cp ../../LICENSE ../README.md .",
|
"prepack": "cp ../../../LICENSE ../README.md .",
|
||||||
"postpack": "rm LICENSE README.md"
|
"postpack": "rm LICENSE README.md"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
69
crates/cli/package.nix
Normal file
69
crates/cli/package.nix
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
{
|
||||||
|
lib,
|
||||||
|
src,
|
||||||
|
rustPlatform,
|
||||||
|
version,
|
||||||
|
clang,
|
||||||
|
libclang,
|
||||||
|
cmake,
|
||||||
|
pkg-config,
|
||||||
|
nodejs_22,
|
||||||
|
test-grammars,
|
||||||
|
stdenv,
|
||||||
|
installShellFiles,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
isCross = stdenv.targetPlatform == stdenv.buildPlatform;
|
||||||
|
in
|
||||||
|
rustPlatform.buildRustPackage {
|
||||||
|
pname = "tree-sitter-cli";
|
||||||
|
|
||||||
|
inherit src version;
|
||||||
|
|
||||||
|
cargoBuildFlags = [ "--all-features" ];
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
clang
|
||||||
|
cmake
|
||||||
|
pkg-config
|
||||||
|
nodejs_22
|
||||||
|
]
|
||||||
|
++ lib.optionals (!isCross) [ installShellFiles ];
|
||||||
|
|
||||||
|
cargoLock.lockFile = ../../Cargo.lock;
|
||||||
|
|
||||||
|
env.LIBCLANG_PATH = "${libclang.lib}/lib";
|
||||||
|
|
||||||
|
preBuild = ''
|
||||||
|
rm -rf test/fixtures
|
||||||
|
mkdir -p test/fixtures
|
||||||
|
cp -r ${test-grammars}/fixtures/* test/fixtures/
|
||||||
|
chmod -R u+w test/fixtures
|
||||||
|
'';
|
||||||
|
|
||||||
|
preCheck = "export HOME=$TMPDIR";
|
||||||
|
doCheck = !isCross;
|
||||||
|
|
||||||
|
postInstall = lib.optionalString (!isCross) ''
|
||||||
|
installShellCompletion --cmd tree-sitter \
|
||||||
|
--bash <($out/bin/tree-sitter complete --shell bash) \
|
||||||
|
--zsh <($out/bin/tree-sitter complete --shell zsh) \
|
||||||
|
--fish <($out/bin/tree-sitter complete --shell fish)
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
description = "Tree-sitter CLI - A tool for developing, testing, and using Tree-sitter parsers";
|
||||||
|
longDescription = ''
|
||||||
|
Tree-sitter is a parser generator tool and an incremental parsing library.
|
||||||
|
It can build a concrete syntax tree for a source file and efficiently update
|
||||||
|
the syntax tree as the source file is edited. This package provides the CLI
|
||||||
|
tool for developing, testing, and using Tree-sitter parsers.
|
||||||
|
'';
|
||||||
|
homepage = "https://tree-sitter.github.io/tree-sitter";
|
||||||
|
changelog = "https://github.com/tree-sitter/tree-sitter/releases/tag/v${version}";
|
||||||
|
license = lib.licenses.mit;
|
||||||
|
maintainers = with lib.maintainers; [ amaanq ];
|
||||||
|
platforms = lib.platforms.all;
|
||||||
|
mainProgram = "tree-sitter";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
@ -1,5 +1,11 @@
|
||||||
use std::{collections::HashMap, env, fs, path::Path, sync::LazyLock};
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
env, fs,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::LazyLock,
|
||||||
|
};
|
||||||
|
|
||||||
|
use log::{error, info};
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tree_sitter::{Language, Parser};
|
use tree_sitter::{Language, Parser};
|
||||||
|
|
@ -19,7 +25,7 @@ use crate::{
|
||||||
random::Rand,
|
random::Rand,
|
||||||
},
|
},
|
||||||
parse::perform_edit,
|
parse::perform_edit,
|
||||||
test::{parse_tests, print_diff, print_diff_key, strip_sexp_fields, TestEntry},
|
test::{parse_tests, strip_sexp_fields, DiffKey, TestDiff, TestEntry},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub static LOG_ENABLED: LazyLock<bool> = LazyLock::new(|| env::var("TREE_SITTER_LOG").is_ok());
|
pub static LOG_ENABLED: LazyLock<bool> = LazyLock::new(|| env::var("TREE_SITTER_LOG").is_ok());
|
||||||
|
|
@ -56,13 +62,15 @@ fn regex_env_var(name: &'static str) -> Option<Regex> {
|
||||||
pub fn new_seed() -> usize {
|
pub fn new_seed() -> usize {
|
||||||
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| {
|
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| {
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
rng.gen::<usize>()
|
let seed = rng.gen::<usize>();
|
||||||
|
info!("Seed: {seed}");
|
||||||
|
seed
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FuzzOptions {
|
pub struct FuzzOptions {
|
||||||
pub skipped: Option<Vec<String>>,
|
pub skipped: Option<Vec<String>>,
|
||||||
pub subdir: Option<String>,
|
pub subdir: Option<PathBuf>,
|
||||||
pub edits: usize,
|
pub edits: usize,
|
||||||
pub iterations: usize,
|
pub iterations: usize,
|
||||||
pub include: Option<Regex>,
|
pub include: Option<Regex>,
|
||||||
|
|
@ -101,12 +109,12 @@ pub fn fuzz_language_corpus(
|
||||||
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
|
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
|
||||||
|
|
||||||
if !corpus_dir.exists() || !corpus_dir.is_dir() {
|
if !corpus_dir.exists() || !corpus_dir.is_dir() {
|
||||||
eprintln!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
|
error!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
|
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
|
||||||
eprintln!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
|
error!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -142,7 +150,7 @@ pub fn fuzz_language_corpus(
|
||||||
let dump_edits = env::var("TREE_SITTER_DUMP_EDITS").is_ok();
|
let dump_edits = env::var("TREE_SITTER_DUMP_EDITS").is_ok();
|
||||||
|
|
||||||
if log_seed {
|
if log_seed {
|
||||||
println!(" start seed: {start_seed}");
|
info!(" start seed: {start_seed}");
|
||||||
}
|
}
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
|
|
@ -156,7 +164,7 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
println!(" {test_index}. {test_name}");
|
println!(" {test_index}. {test_name}");
|
||||||
|
|
||||||
let passed = allocations::record(|| {
|
let passed = allocations::record_checked(|| {
|
||||||
let mut log_session = None;
|
let mut log_session = None;
|
||||||
let mut parser = get_parser(&mut log_session, "log.html");
|
let mut parser = get_parser(&mut log_session, "log.html");
|
||||||
parser.set_language(language).unwrap();
|
parser.set_language(language).unwrap();
|
||||||
|
|
@ -175,8 +183,8 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
if actual_output != test.output {
|
if actual_output != test.output {
|
||||||
println!("Incorrect initial parse for {test_name}");
|
println!("Incorrect initial parse for {test_name}");
|
||||||
print_diff_key();
|
DiffKey::print();
|
||||||
print_diff(&actual_output, &test.output, true);
|
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||||
println!();
|
println!();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -184,7 +192,7 @@ pub fn fuzz_language_corpus(
|
||||||
true
|
true
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|e| {
|
.unwrap_or_else(|e| {
|
||||||
eprintln!("Error: {e}");
|
error!("{e}");
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -200,7 +208,7 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
for trial in 0..options.iterations {
|
for trial in 0..options.iterations {
|
||||||
let seed = start_seed + trial;
|
let seed = start_seed + trial;
|
||||||
let passed = allocations::record(|| {
|
let passed = allocations::record_checked(|| {
|
||||||
let mut rand = Rand::new(seed);
|
let mut rand = Rand::new(seed);
|
||||||
let mut log_session = None;
|
let mut log_session = None;
|
||||||
let mut parser = get_parser(&mut log_session, "log.html");
|
let mut parser = get_parser(&mut log_session, "log.html");
|
||||||
|
|
@ -209,19 +217,20 @@ pub fn fuzz_language_corpus(
|
||||||
let mut input = test.input.clone();
|
let mut input = test.input.clone();
|
||||||
|
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
info!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Perform a random series of edits and reparse.
|
// Perform a random series of edits and reparse.
|
||||||
let mut undo_stack = Vec::new();
|
let edit_count = rand.unsigned(*EDIT_COUNT);
|
||||||
for _ in 0..=rand.unsigned(*EDIT_COUNT) {
|
let mut undo_stack = Vec::with_capacity(edit_count);
|
||||||
|
for _ in 0..=edit_count {
|
||||||
let edit = get_random_edit(&mut rand, &input);
|
let edit = get_random_edit(&mut rand, &input);
|
||||||
undo_stack.push(invert_edit(&input, &edit));
|
undo_stack.push(invert_edit(&input, &edit));
|
||||||
perform_edit(&mut tree, &mut input, &edit).unwrap();
|
perform_edit(&mut tree, &mut input, &edit).unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
if log_seed {
|
if log_seed {
|
||||||
println!(" {test_index}.{trial:<2} seed: {seed}");
|
info!(" {test_index}.{trial:<2} seed: {seed}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if dump_edits {
|
if dump_edits {
|
||||||
|
|
@ -235,7 +244,7 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
info!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_included_ranges(&mut parser, &input, test.template_delimiters);
|
set_included_ranges(&mut parser, &input, test.template_delimiters);
|
||||||
|
|
@ -244,7 +253,7 @@ pub fn fuzz_language_corpus(
|
||||||
// Check that the new tree is consistent.
|
// Check that the new tree is consistent.
|
||||||
check_consistent_sizes(&tree2, &input);
|
check_consistent_sizes(&tree2, &input);
|
||||||
if let Err(message) = check_changed_ranges(&tree, &tree2, &input) {
|
if let Err(message) = check_changed_ranges(&tree, &tree2, &input) {
|
||||||
println!("\nUnexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n",);
|
error!("\nUnexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n",);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -253,7 +262,7 @@ pub fn fuzz_language_corpus(
|
||||||
perform_edit(&mut tree2, &mut input, &edit).unwrap();
|
perform_edit(&mut tree2, &mut input, &edit).unwrap();
|
||||||
}
|
}
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
info!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_included_ranges(&mut parser, &test.input, test.template_delimiters);
|
set_included_ranges(&mut parser, &test.input, test.template_delimiters);
|
||||||
|
|
@ -267,8 +276,8 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
if actual_output != test.output && !test.error {
|
if actual_output != test.output && !test.error {
|
||||||
println!("Incorrect parse for {test_name} - seed {seed}");
|
println!("Incorrect parse for {test_name} - seed {seed}");
|
||||||
print_diff_key();
|
DiffKey::print();
|
||||||
print_diff(&actual_output, &test.output, true);
|
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||||
println!();
|
println!();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -276,13 +285,13 @@ pub fn fuzz_language_corpus(
|
||||||
// Check that the edited tree is consistent.
|
// Check that the edited tree is consistent.
|
||||||
check_consistent_sizes(&tree3, &input);
|
check_consistent_sizes(&tree3, &input);
|
||||||
if let Err(message) = check_changed_ranges(&tree2, &tree3, &input) {
|
if let Err(message) = check_changed_ranges(&tree2, &tree3, &input) {
|
||||||
println!("Unexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n");
|
error!("Unexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
true
|
true
|
||||||
}).unwrap_or_else(|e| {
|
}).unwrap_or_else(|e| {
|
||||||
eprintln!("Error: {e}");
|
error!("{e}");
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -294,17 +303,17 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if failure_count != 0 {
|
if failure_count != 0 {
|
||||||
eprintln!("{failure_count} {language_name} corpus tests failed fuzzing");
|
info!("{failure_count} {language_name} corpus tests failed fuzzing");
|
||||||
}
|
}
|
||||||
|
|
||||||
skipped.retain(|_, v| *v == 0);
|
skipped.retain(|_, v| *v == 0);
|
||||||
|
|
||||||
if !skipped.is_empty() {
|
if !skipped.is_empty() {
|
||||||
println!("Non matchable skip definitions:");
|
info!("Non matchable skip definitions:");
|
||||||
for k in skipped.keys() {
|
for k in skipped.keys() {
|
||||||
println!(" {k}");
|
info!(" {k}");
|
||||||
}
|
}
|
||||||
panic!("Non matchable skip definitions needs to be removed");
|
panic!("Non matchable skip definitions need to be removed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -40,7 +40,11 @@ extern "C" {
|
||||||
fn free(ptr: *mut c_void);
|
fn free(ptr: *mut c_void);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn record<T>(f: impl FnOnce() -> T) -> Result<T, String> {
|
pub fn record<T>(f: impl FnOnce() -> T) -> T {
|
||||||
|
record_checked(f).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn record_checked<T>(f: impl FnOnce() -> T) -> Result<T, String> {
|
||||||
RECORDER.with(|recorder| {
|
RECORDER.with(|recorder| {
|
||||||
recorder.enabled.store(true, SeqCst);
|
recorder.enabled.store(true, SeqCst);
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
recorder.allocation_count.store(0, SeqCst);
|
||||||
|
|
@ -93,30 +97,49 @@ fn record_dealloc(ptr: *mut c_void) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that the returned pointer is eventually
|
||||||
|
/// freed by calling `ts_record_free`.
|
||||||
|
#[must_use]
|
||||||
|
pub unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
||||||
let result = malloc(size);
|
let result = malloc(size);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that the returned pointer is eventually
|
||||||
|
/// freed by calling `ts_record_free`.
|
||||||
|
#[must_use]
|
||||||
|
pub unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
||||||
let result = calloc(count, size);
|
let result = calloc(count, size);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that the returned pointer is eventually
|
||||||
|
/// freed by calling `ts_record_free`.
|
||||||
|
#[must_use]
|
||||||
|
pub unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
||||||
let result = realloc(ptr, size);
|
let result = realloc(ptr, size);
|
||||||
if ptr.is_null() {
|
if ptr.is_null() {
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
} else if ptr != result {
|
} else if !core::ptr::eq(ptr, result) {
|
||||||
record_dealloc(ptr);
|
record_dealloc(ptr);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that `ptr` was allocated by a previous call
|
||||||
|
/// to `ts_record_malloc`, `ts_record_calloc`, or `ts_record_realloc`.
|
||||||
|
pub unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
||||||
record_dealloc(ptr);
|
record_dealloc(ptr);
|
||||||
free(ptr);
|
free(ptr);
|
||||||
}
|
}
|
||||||
|
|
@ -23,7 +23,7 @@ pub fn check_consistent_sizes(tree: &Tree, input: &[u8]) {
|
||||||
let mut some_child_has_changes = false;
|
let mut some_child_has_changes = false;
|
||||||
let mut actual_named_child_count = 0;
|
let mut actual_named_child_count = 0;
|
||||||
for i in 0..node.child_count() {
|
for i in 0..node.child_count() {
|
||||||
let child = node.child(i).unwrap();
|
let child = node.child(i as u32).unwrap();
|
||||||
assert!(child.start_byte() >= last_child_end_byte);
|
assert!(child.start_byte() >= last_child_end_byte);
|
||||||
assert!(child.start_position() >= last_child_end_point);
|
assert!(child.start_position() >= last_child_end_point);
|
||||||
check(child, line_offsets);
|
check(child, line_offsets);
|
||||||
|
|
@ -20,8 +20,8 @@ impl Rand {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn words(&mut self, max_count: usize) -> Vec<u8> {
|
pub fn words(&mut self, max_count: usize) -> Vec<u8> {
|
||||||
let mut result = Vec::new();
|
|
||||||
let word_count = self.unsigned(max_count);
|
let word_count = self.unsigned(max_count);
|
||||||
|
let mut result = Vec::with_capacity(2 * word_count);
|
||||||
for i in 0..word_count {
|
for i in 0..word_count {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
if self.unsigned(5) == 0 {
|
if self.unsigned(5) == 0 {
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{BTreeMap, HashSet},
|
||||||
fmt::Write,
|
fmt::Write,
|
||||||
fs,
|
fs,
|
||||||
io::{self, Write as _},
|
io::{self, Write as _},
|
||||||
|
|
@ -12,6 +12,7 @@ use std::{
|
||||||
use ansi_colours::{ansi256_from_rgb, rgb_from_ansi256};
|
use ansi_colours::{ansi256_from_rgb, rgb_from_ansi256};
|
||||||
use anstyle::{Ansi256Color, AnsiColor, Color, Effects, RgbColor};
|
use anstyle::{Ansi256Color, AnsiColor, Color, Effects, RgbColor};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use log::{info, warn};
|
||||||
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use tree_sitter_highlight::{HighlightConfiguration, HighlightEvent, Highlighter, HtmlRenderer};
|
use tree_sitter_highlight::{HighlightConfiguration, HighlightEvent, Highlighter, HtmlRenderer};
|
||||||
|
|
@ -82,9 +83,9 @@ impl<'de> Deserialize<'de> for Theme {
|
||||||
{
|
{
|
||||||
let mut styles = Vec::new();
|
let mut styles = Vec::new();
|
||||||
let mut highlight_names = Vec::new();
|
let mut highlight_names = Vec::new();
|
||||||
if let Ok(colors) = HashMap::<String, Value>::deserialize(deserializer) {
|
if let Ok(colors) = BTreeMap::<String, Value>::deserialize(deserializer) {
|
||||||
highlight_names.reserve(colors.len());
|
|
||||||
styles.reserve(colors.len());
|
styles.reserve(colors.len());
|
||||||
|
highlight_names.reserve(colors.len());
|
||||||
for (name, style_value) in colors {
|
for (name, style_value) in colors {
|
||||||
let mut style = Style::default();
|
let mut style = Style::default();
|
||||||
parse_style(&mut style, style_value);
|
parse_style(&mut style, style_value);
|
||||||
|
|
@ -127,7 +128,7 @@ impl Serialize for Theme {
|
||||||
|| effects.contains(Effects::ITALIC)
|
|| effects.contains(Effects::ITALIC)
|
||||||
|| effects.contains(Effects::UNDERLINE)
|
|| effects.contains(Effects::UNDERLINE)
|
||||||
{
|
{
|
||||||
let mut style_json = HashMap::new();
|
let mut style_json = BTreeMap::new();
|
||||||
if let Some(color) = color {
|
if let Some(color) = color {
|
||||||
style_json.insert("color", color);
|
style_json.insert("color", color);
|
||||||
}
|
}
|
||||||
|
|
@ -348,19 +349,17 @@ pub fn highlight(
|
||||||
config.nonconformant_capture_names(&HashSet::new())
|
config.nonconformant_capture_names(&HashSet::new())
|
||||||
};
|
};
|
||||||
if names.is_empty() {
|
if names.is_empty() {
|
||||||
eprintln!("All highlight captures conform to standards.");
|
info!("All highlight captures conform to standards.");
|
||||||
} else {
|
} else {
|
||||||
eprintln!(
|
warn!(
|
||||||
"Non-standard highlight {} detected:",
|
"Non-standard highlight {} detected:\n* {}",
|
||||||
if names.len() > 1 {
|
if names.len() > 1 {
|
||||||
"captures"
|
"captures"
|
||||||
} else {
|
} else {
|
||||||
"capture"
|
"capture"
|
||||||
}
|
},
|
||||||
|
names.join("\n* ")
|
||||||
);
|
);
|
||||||
for name in names {
|
|
||||||
eprintln!("* {name}");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -451,7 +450,7 @@ pub fn highlight(
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.print_time {
|
if opts.print_time {
|
||||||
eprintln!("Time: {}ms", time.elapsed().as_millis());
|
info!("Time: {}ms", time.elapsed().as_millis());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
1548
crates/cli/src/init.rs
Normal file
1548
crates/cli/src/init.rs
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -89,8 +89,8 @@ pub fn get_input(
|
||||||
let Some(path_str) = path.to_str() else {
|
let Some(path_str) = path.to_str() else {
|
||||||
bail!("Invalid path: {}", path.display());
|
bail!("Invalid path: {}", path.display());
|
||||||
};
|
};
|
||||||
let paths =
|
let paths = glob(path_str)
|
||||||
glob(path_str).with_context(|| format!("Invalid glob pattern {path:?}"))?;
|
.with_context(|| format!("Invalid glob pattern {}", path.display()))?;
|
||||||
for path in paths {
|
for path in paths {
|
||||||
incorporate_path(path?, positive);
|
incorporate_path(path?, positive);
|
||||||
}
|
}
|
||||||
55
crates/cli/src/logger.rs
Normal file
55
crates/cli/src/logger.rs
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
use std::io::Write;
|
||||||
|
|
||||||
|
use anstyle::{AnsiColor, Color, Style};
|
||||||
|
use log::{Level, LevelFilter, Log, Metadata, Record};
|
||||||
|
|
||||||
|
pub fn paint(color: Option<impl Into<Color>>, text: &str) -> String {
|
||||||
|
let style = Style::new().fg_color(color.map(Into::into));
|
||||||
|
format!("{style}{text}{style:#}")
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Logger;
|
||||||
|
|
||||||
|
impl Log for Logger {
|
||||||
|
fn enabled(&self, _: &Metadata) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn log(&self, record: &Record) {
|
||||||
|
match record.level() {
|
||||||
|
Level::Error => eprintln!(
|
||||||
|
"{} {}",
|
||||||
|
paint(Some(AnsiColor::Red), "Error:"),
|
||||||
|
record.args()
|
||||||
|
),
|
||||||
|
Level::Warn => eprintln!(
|
||||||
|
"{} {}",
|
||||||
|
paint(Some(AnsiColor::Yellow), "Warning:"),
|
||||||
|
record.args()
|
||||||
|
),
|
||||||
|
Level::Info | Level::Debug => eprintln!("{}", record.args()),
|
||||||
|
Level::Trace => eprintln!(
|
||||||
|
"[{}] {}",
|
||||||
|
record
|
||||||
|
.module_path()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.trim_start_matches("rust_tree_sitter_cli::"),
|
||||||
|
record.args()
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&self) {
|
||||||
|
let mut stderr = std::io::stderr().lock();
|
||||||
|
let _ = stderr.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init() {
|
||||||
|
log::set_boxed_logger(Box::new(Logger {})).unwrap();
|
||||||
|
log::set_max_level(LevelFilter::Info);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn enable_debug() {
|
||||||
|
log::set_max_level(LevelFilter::Debug);
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,7 @@
|
||||||
use std::{
|
use std::{
|
||||||
fmt, fs,
|
fmt, fs,
|
||||||
io::{self, StdoutLock, Write},
|
io::{self, Write},
|
||||||
|
ops::ControlFlow,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::atomic::{AtomicUsize, Ordering},
|
sync::atomic::{AtomicUsize, Ordering},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
|
|
@ -9,16 +10,17 @@ use std::{
|
||||||
use anstyle::{AnsiColor, Color, RgbColor};
|
use anstyle::{AnsiColor, Color, RgbColor};
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use clap::ValueEnum;
|
use clap::ValueEnum;
|
||||||
|
use log::info;
|
||||||
|
use schemars::JsonSchema;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tree_sitter::{
|
use tree_sitter::{
|
||||||
ffi, InputEdit, Language, LogType, ParseOptions, ParseState, Parser, Point, Range, Tree,
|
ffi, InputEdit, Language, LogType, ParseOptions, ParseState, Parser, Point, Range, Tree,
|
||||||
TreeCursor,
|
TreeCursor,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::util;
|
use crate::{fuzz::edits::Edit, logger::paint, util};
|
||||||
use crate::{fuzz::edits::Edit, test::paint};
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize)]
|
#[derive(Debug, Default, Serialize, JsonSchema)]
|
||||||
pub struct Stats {
|
pub struct Stats {
|
||||||
pub successful_parses: usize,
|
pub successful_parses: usize,
|
||||||
pub total_parses: usize,
|
pub total_parses: usize,
|
||||||
|
|
@ -29,18 +31,28 @@ pub struct Stats {
|
||||||
impl fmt::Display for Stats {
|
impl fmt::Display for Stats {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let duration_us = self.total_duration.as_micros();
|
let duration_us = self.total_duration.as_micros();
|
||||||
|
let success_rate = if self.total_parses > 0 {
|
||||||
|
format!(
|
||||||
|
"{:.2}%",
|
||||||
|
((self.successful_parses as f64) / (self.total_parses as f64)) * 100.0,
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
"N/A".to_string()
|
||||||
|
};
|
||||||
|
let duration_str = match (self.total_parses, duration_us) {
|
||||||
|
(0, _) => "N/A".to_string(),
|
||||||
|
(_, 0) => "0 bytes/ms".to_string(),
|
||||||
|
(_, _) => format!(
|
||||||
|
"{} bytes/ms",
|
||||||
|
((self.total_bytes as u128) * 1_000) / duration_us
|
||||||
|
),
|
||||||
|
};
|
||||||
writeln!(
|
writeln!(
|
||||||
f,
|
f,
|
||||||
"Total parses: {}; successful parses: {}; failed parses: {}; success percentage: {:.2}%; average speed: {} bytes/ms",
|
"Total parses: {}; successful parses: {}; failed parses: {}; success percentage: {success_rate}; average speed: {duration_str}",
|
||||||
self.total_parses,
|
self.total_parses,
|
||||||
self.successful_parses,
|
self.successful_parses,
|
||||||
self.total_parses - self.successful_parses,
|
self.total_parses - self.successful_parses,
|
||||||
((self.successful_parses as f64) / (self.total_parses as f64)) * 100.0,
|
|
||||||
if duration_us != 0 {
|
|
||||||
((self.total_bytes as u128) * 1_000) / duration_us
|
|
||||||
} else {
|
|
||||||
0
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -219,13 +231,24 @@ impl ParseSummary {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, Default)]
|
#[derive(Serialize, Debug)]
|
||||||
pub struct ParseStats {
|
pub struct ParseStats {
|
||||||
pub parse_summaries: Vec<ParseSummary>,
|
pub parse_summaries: Vec<ParseSummary>,
|
||||||
pub cumulative_stats: Stats,
|
pub cumulative_stats: Stats,
|
||||||
|
pub source_count: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ValueEnum, Debug, Clone, Default, Eq, PartialEq)]
|
impl Default for ParseStats {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
parse_summaries: Vec::new(),
|
||||||
|
cumulative_stats: Stats::default(),
|
||||||
|
source_count: 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ValueEnum, Debug, Copy, Clone, Default, Eq, PartialEq)]
|
||||||
pub enum ParseDebugType {
|
pub enum ParseDebugType {
|
||||||
#[default]
|
#[default]
|
||||||
Quiet,
|
Quiet,
|
||||||
|
|
@ -273,10 +296,11 @@ pub fn parse_file_at_path(
|
||||||
}
|
}
|
||||||
// Log to stderr if `--debug` was passed
|
// Log to stderr if `--debug` was passed
|
||||||
else if opts.debug != ParseDebugType::Quiet {
|
else if opts.debug != ParseDebugType::Quiet {
|
||||||
let mut curr_version: usize = 0usize;
|
let mut curr_version: usize = 0;
|
||||||
let use_color = std::env::var("NO_COLOR").map_or(true, |v| v != "1");
|
let use_color = std::env::var("NO_COLOR").map_or(true, |v| v != "1");
|
||||||
parser.set_logger(Some(Box::new(|log_type, message| {
|
let debug = opts.debug;
|
||||||
if opts.debug == ParseDebugType::Normal {
|
parser.set_logger(Some(Box::new(move |log_type, message| {
|
||||||
|
if debug == ParseDebugType::Normal {
|
||||||
if log_type == LogType::Lex {
|
if log_type == LogType::Lex {
|
||||||
write!(&mut io::stderr(), " ").unwrap();
|
write!(&mut io::stderr(), " ").unwrap();
|
||||||
}
|
}
|
||||||
|
|
@ -346,15 +370,15 @@ pub fn parse_file_at_path(
|
||||||
let progress_callback = &mut |_: &ParseState| {
|
let progress_callback = &mut |_: &ParseState| {
|
||||||
if let Some(cancellation_flag) = opts.cancellation_flag {
|
if let Some(cancellation_flag) = opts.cancellation_flag {
|
||||||
if cancellation_flag.load(Ordering::SeqCst) != 0 {
|
if cancellation_flag.load(Ordering::SeqCst) != 0 {
|
||||||
return true;
|
return ControlFlow::Break(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.timeout > 0 && start_time.elapsed().as_micros() > opts.timeout as u128 {
|
if opts.timeout > 0 && start_time.elapsed().as_micros() > opts.timeout as u128 {
|
||||||
return true;
|
return ControlFlow::Break(());
|
||||||
}
|
}
|
||||||
|
|
||||||
false
|
ControlFlow::Continue(())
|
||||||
};
|
};
|
||||||
|
|
||||||
let parse_opts = ParseOptions::new().progress_callback(progress_callback);
|
let parse_opts = ParseOptions::new().progress_callback(progress_callback);
|
||||||
|
|
@ -413,7 +437,7 @@ pub fn parse_file_at_path(
|
||||||
|
|
||||||
if let Some(mut tree) = tree {
|
if let Some(mut tree) = tree {
|
||||||
if opts.debug_graph && !opts.edits.is_empty() {
|
if opts.debug_graph && !opts.edits.is_empty() {
|
||||||
println!("BEFORE:\n{}", String::from_utf8_lossy(&source_code));
|
info!("BEFORE:\n{}", String::from_utf8_lossy(&source_code));
|
||||||
}
|
}
|
||||||
|
|
||||||
let edit_time = Instant::now();
|
let edit_time = Instant::now();
|
||||||
|
|
@ -423,7 +447,7 @@ pub fn parse_file_at_path(
|
||||||
tree = parser.parse(&source_code, Some(&tree)).unwrap();
|
tree = parser.parse(&source_code, Some(&tree)).unwrap();
|
||||||
|
|
||||||
if opts.debug_graph {
|
if opts.debug_graph {
|
||||||
println!("AFTER {i}:\n{}", String::from_utf8_lossy(&source_code));
|
info!("AFTER {i}:\n{}", String::from_utf8_lossy(&source_code));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let edit_duration = edit_time.elapsed();
|
let edit_duration = edit_time.elapsed();
|
||||||
|
|
@ -490,63 +514,23 @@ pub fn parse_file_at_path(
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Cst {
|
if opts.output == ParseOutput::Cst {
|
||||||
let lossy_source_code = String::from_utf8_lossy(&source_code);
|
render_cst(&source_code, &tree, &mut cursor, opts, &mut stdout)?;
|
||||||
let total_width = lossy_source_code
|
|
||||||
.lines()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(row, col)| {
|
|
||||||
(row as f64).log10() as usize + (col.len() as f64).log10() as usize + 1
|
|
||||||
})
|
|
||||||
.max()
|
|
||||||
.unwrap_or(1);
|
|
||||||
let mut indent_level = 1;
|
|
||||||
let mut did_visit_children = false;
|
|
||||||
let mut in_error = false;
|
|
||||||
loop {
|
|
||||||
if did_visit_children {
|
|
||||||
if cursor.goto_next_sibling() {
|
|
||||||
did_visit_children = false;
|
|
||||||
} else if cursor.goto_parent() {
|
|
||||||
did_visit_children = true;
|
|
||||||
indent_level -= 1;
|
|
||||||
if !cursor.node().has_error() {
|
|
||||||
in_error = false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
cst_render_node(
|
|
||||||
opts,
|
|
||||||
&mut cursor,
|
|
||||||
&source_code,
|
|
||||||
&mut stdout,
|
|
||||||
total_width,
|
|
||||||
indent_level,
|
|
||||||
in_error,
|
|
||||||
)?;
|
|
||||||
if cursor.goto_first_child() {
|
|
||||||
did_visit_children = false;
|
|
||||||
indent_level += 1;
|
|
||||||
if cursor.node().has_error() {
|
|
||||||
in_error = true;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
did_visit_children = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
cursor.reset(tree.root_node());
|
|
||||||
println!();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Xml {
|
if opts.output == ParseOutput::Xml {
|
||||||
let mut needs_newline = false;
|
let mut needs_newline = false;
|
||||||
let mut indent_level = 0;
|
let mut indent_level = 2;
|
||||||
let mut did_visit_children = false;
|
let mut did_visit_children = false;
|
||||||
let mut had_named_children = false;
|
let mut had_named_children = false;
|
||||||
let mut tags = Vec::<&str>::new();
|
let mut tags = Vec::<&str>::new();
|
||||||
writeln!(&mut stdout, "<?xml version=\"1.0\"?>")?;
|
|
||||||
|
// If we're parsing the first file, write the header
|
||||||
|
if opts.stats.parse_summaries.is_empty() {
|
||||||
|
writeln!(&mut stdout, "<?xml version=\"1.0\"?>")?;
|
||||||
|
writeln!(&mut stdout, "<sources>")?;
|
||||||
|
}
|
||||||
|
writeln!(&mut stdout, " <source name=\"{}\">", path.display())?;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let node = cursor.node();
|
let node = cursor.node();
|
||||||
let is_named = node.is_named();
|
let is_named = node.is_named();
|
||||||
|
|
@ -561,7 +545,7 @@ pub fn parse_file_at_path(
|
||||||
write!(&mut stdout, "</{}>", tag.expect("there is a tag"))?;
|
write!(&mut stdout, "</{}>", tag.expect("there is a tag"))?;
|
||||||
// we only write a line in the case where it's the last sibling
|
// we only write a line in the case where it's the last sibling
|
||||||
if let Some(parent) = node.parent() {
|
if let Some(parent) = node.parent() {
|
||||||
if parent.child(parent.child_count() - 1).unwrap() == node {
|
if parent.child(parent.child_count() as u32 - 1).unwrap() == node {
|
||||||
stdout.write_all(b"\n")?;
|
stdout.write_all(b"\n")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -625,8 +609,14 @@ pub fn parse_file_at_path(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
writeln!(&mut stdout)?;
|
||||||
|
writeln!(&mut stdout, " </source>")?;
|
||||||
|
|
||||||
|
// If we parsed the last file, write the closing tag for the `sources` header
|
||||||
|
if opts.stats.parse_summaries.len() == opts.stats.source_count - 1 {
|
||||||
|
writeln!(&mut stdout, "</sources>")?;
|
||||||
|
}
|
||||||
cursor.reset(tree.root_node());
|
cursor.reset(tree.root_node());
|
||||||
println!();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Dot {
|
if opts.output == ParseOutput::Dot {
|
||||||
|
|
@ -684,22 +674,28 @@ pub fn parse_file_at_path(
|
||||||
width = max_path_length
|
width = max_path_length
|
||||||
)?;
|
)?;
|
||||||
if let Some(node) = first_error {
|
if let Some(node) = first_error {
|
||||||
let start = node.start_position();
|
let node_kind = node.kind();
|
||||||
let end = node.end_position();
|
let mut node_text = String::with_capacity(node_kind.len());
|
||||||
|
for c in node_kind.chars() {
|
||||||
|
if let Some(escaped) = escape_invisible(c) {
|
||||||
|
node_text += escaped;
|
||||||
|
} else {
|
||||||
|
node_text.push(c);
|
||||||
|
}
|
||||||
|
}
|
||||||
write!(&mut stdout, "\t(")?;
|
write!(&mut stdout, "\t(")?;
|
||||||
if node.is_missing() {
|
if node.is_missing() {
|
||||||
if node.is_named() {
|
if node.is_named() {
|
||||||
write!(&mut stdout, "MISSING {}", node.kind())?;
|
write!(&mut stdout, "MISSING {node_text}")?;
|
||||||
} else {
|
} else {
|
||||||
write!(
|
write!(&mut stdout, "MISSING \"{node_text}\"")?;
|
||||||
&mut stdout,
|
|
||||||
"MISSING \"{}\"",
|
|
||||||
node.kind().replace('\n', "\\n")
|
|
||||||
)?;
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
write!(&mut stdout, "{}", node.kind())?;
|
write!(&mut stdout, "{node_text}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let start = node.start_position();
|
||||||
|
let end = node.end_position();
|
||||||
write!(
|
write!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
" [{}, {}] - [{}, {}])",
|
" [{}, {}] - [{}, {}])",
|
||||||
|
|
@ -766,12 +762,77 @@ const fn escape_invisible(c: char) -> Option<&'static str> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const fn escape_delimiter(c: char) -> Option<&'static str> {
|
||||||
|
Some(match c {
|
||||||
|
'`' => "\\`",
|
||||||
|
'\"' => "\\\"",
|
||||||
|
_ => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn render_cst<'a, 'b: 'a>(
|
||||||
|
source_code: &[u8],
|
||||||
|
tree: &'b Tree,
|
||||||
|
cursor: &mut TreeCursor<'a>,
|
||||||
|
opts: &ParseFileOptions,
|
||||||
|
out: &mut impl Write,
|
||||||
|
) -> Result<()> {
|
||||||
|
let lossy_source_code = String::from_utf8_lossy(source_code);
|
||||||
|
let total_width = lossy_source_code
|
||||||
|
.lines()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(row, col)| (row as f64).log10() as usize + (col.len() as f64).log10() as usize + 1)
|
||||||
|
.max()
|
||||||
|
.unwrap_or(1);
|
||||||
|
let mut indent_level = usize::from(!opts.no_ranges);
|
||||||
|
let mut did_visit_children = false;
|
||||||
|
let mut in_error = false;
|
||||||
|
loop {
|
||||||
|
if did_visit_children {
|
||||||
|
if cursor.goto_next_sibling() {
|
||||||
|
did_visit_children = false;
|
||||||
|
} else if cursor.goto_parent() {
|
||||||
|
did_visit_children = true;
|
||||||
|
indent_level -= 1;
|
||||||
|
if !cursor.node().has_error() {
|
||||||
|
in_error = false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cst_render_node(
|
||||||
|
opts,
|
||||||
|
cursor,
|
||||||
|
source_code,
|
||||||
|
out,
|
||||||
|
total_width,
|
||||||
|
indent_level,
|
||||||
|
in_error,
|
||||||
|
)?;
|
||||||
|
if cursor.goto_first_child() {
|
||||||
|
did_visit_children = false;
|
||||||
|
indent_level += 1;
|
||||||
|
if cursor.node().has_error() {
|
||||||
|
in_error = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
did_visit_children = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cursor.reset(tree.root_node());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn render_node_text(source: &str) -> String {
|
fn render_node_text(source: &str) -> String {
|
||||||
source
|
source
|
||||||
.chars()
|
.chars()
|
||||||
.fold(String::with_capacity(source.len()), |mut acc, c| {
|
.fold(String::with_capacity(source.len()), |mut acc, c| {
|
||||||
if let Some(esc) = escape_invisible(c) {
|
if let Some(esc) = escape_invisible(c) {
|
||||||
acc.push_str(esc);
|
acc.push_str(esc);
|
||||||
|
} else if let Some(esc) = escape_delimiter(c) {
|
||||||
|
acc.push_str(esc);
|
||||||
} else {
|
} else {
|
||||||
acc.push(c);
|
acc.push(c);
|
||||||
}
|
}
|
||||||
|
|
@ -781,7 +842,7 @@ fn render_node_text(source: &str) -> String {
|
||||||
|
|
||||||
fn write_node_text(
|
fn write_node_text(
|
||||||
opts: &ParseFileOptions,
|
opts: &ParseFileOptions,
|
||||||
stdout: &mut StdoutLock<'static>,
|
out: &mut impl Write,
|
||||||
cursor: &TreeCursor,
|
cursor: &TreeCursor,
|
||||||
is_named: bool,
|
is_named: bool,
|
||||||
source: &str,
|
source: &str,
|
||||||
|
|
@ -797,7 +858,7 @@ fn write_node_text(
|
||||||
|
|
||||||
if !is_named {
|
if !is_named {
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}{}{}",
|
"{}{}{}",
|
||||||
paint(quote_color, &String::from(quote)),
|
paint(quote_color, &String::from(quote)),
|
||||||
paint(color, &render_node_text(source)),
|
paint(color, &render_node_text(source)),
|
||||||
|
|
@ -821,35 +882,24 @@ fn write_node_text(
|
||||||
0
|
0
|
||||||
};
|
};
|
||||||
let formatted_line = render_line_feed(line, opts);
|
let formatted_line = render_line_feed(line, opts);
|
||||||
if !opts.no_ranges {
|
write!(
|
||||||
write!(
|
out,
|
||||||
stdout,
|
"{}{}{}{}{}{}",
|
||||||
"{}{}{}{}{}{}",
|
if multiline { "\n" } else { " " },
|
||||||
if multiline { "\n" } else { "" },
|
if multiline && !opts.no_ranges {
|
||||||
if multiline {
|
render_node_range(opts, cursor, is_named, true, total_width, node_range)
|
||||||
render_node_range(opts, cursor, is_named, true, total_width, node_range)
|
} else {
|
||||||
} else {
|
String::new()
|
||||||
String::new()
|
},
|
||||||
},
|
if multiline {
|
||||||
if multiline {
|
" ".repeat(indent_level + 1)
|
||||||
" ".repeat(indent_level + 1)
|
} else {
|
||||||
} else {
|
String::new()
|
||||||
String::new()
|
},
|
||||||
},
|
paint(quote_color, &String::from(quote)),
|
||||||
paint(quote_color, &String::from(quote)),
|
paint(color, &render_node_text(&formatted_line)),
|
||||||
&paint(color, &render_node_text(&formatted_line)),
|
paint(quote_color, &String::from(quote)),
|
||||||
paint(quote_color, &String::from(quote)),
|
)?;
|
||||||
)?;
|
|
||||||
} else {
|
|
||||||
write!(
|
|
||||||
stdout,
|
|
||||||
"\n{}{}{}{}",
|
|
||||||
" ".repeat(indent_level + 1),
|
|
||||||
paint(quote_color, &String::from(quote)),
|
|
||||||
&paint(color, &render_node_text(&formatted_line)),
|
|
||||||
paint(quote_color, &String::from(quote)),
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -903,9 +953,9 @@ fn render_node_range(
|
||||||
|
|
||||||
fn cst_render_node(
|
fn cst_render_node(
|
||||||
opts: &ParseFileOptions,
|
opts: &ParseFileOptions,
|
||||||
cursor: &mut TreeCursor,
|
cursor: &TreeCursor,
|
||||||
source_code: &[u8],
|
source_code: &[u8],
|
||||||
stdout: &mut StdoutLock<'static>,
|
out: &mut impl Write,
|
||||||
total_width: usize,
|
total_width: usize,
|
||||||
indent_level: usize,
|
indent_level: usize,
|
||||||
in_error: bool,
|
in_error: bool,
|
||||||
|
|
@ -914,13 +964,13 @@ fn cst_render_node(
|
||||||
let is_named = node.is_named();
|
let is_named = node.is_named();
|
||||||
if !opts.no_ranges {
|
if !opts.no_ranges {
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}",
|
"{}",
|
||||||
render_node_range(opts, cursor, is_named, false, total_width, node.range())
|
render_node_range(opts, cursor, is_named, false, total_width, node.range())
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}{}",
|
"{}{}",
|
||||||
" ".repeat(indent_level),
|
" ".repeat(indent_level),
|
||||||
if in_error && !node.has_error() {
|
if in_error && !node.has_error() {
|
||||||
|
|
@ -932,14 +982,14 @@ fn cst_render_node(
|
||||||
if is_named {
|
if is_named {
|
||||||
if let Some(field_name) = cursor.field_name() {
|
if let Some(field_name) = cursor.field_name() {
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}",
|
"{}",
|
||||||
paint(opts.parse_theme.field, &format!("{field_name}: "))
|
paint(opts.parse_theme.field, &format!("{field_name}: "))
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.has_error() || node.is_error() {
|
if node.has_error() || node.is_error() {
|
||||||
write!(stdout, "{}", paint(opts.parse_theme.error, "•"))?;
|
write!(out, "{}", paint(opts.parse_theme.error, "•"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let kind_color = if node.is_error() {
|
let kind_color = if node.is_error() {
|
||||||
|
|
@ -949,13 +999,13 @@ fn cst_render_node(
|
||||||
} else {
|
} else {
|
||||||
opts.parse_theme.node_kind
|
opts.parse_theme.node_kind
|
||||||
};
|
};
|
||||||
write!(stdout, "{} ", paint(kind_color, node.kind()))?;
|
write!(out, "{}", paint(kind_color, node.kind()))?;
|
||||||
|
|
||||||
if node.child_count() == 0 {
|
if node.child_count() == 0 {
|
||||||
// Node text from a pattern or external scanner
|
// Node text from a pattern or external scanner
|
||||||
write_node_text(
|
write_node_text(
|
||||||
opts,
|
opts,
|
||||||
stdout,
|
out,
|
||||||
cursor,
|
cursor,
|
||||||
is_named,
|
is_named,
|
||||||
&String::from_utf8_lossy(&source_code[node.start_byte()..node.end_byte()]),
|
&String::from_utf8_lossy(&source_code[node.start_byte()..node.end_byte()]),
|
||||||
|
|
@ -964,17 +1014,13 @@ fn cst_render_node(
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
} else if node.is_missing() {
|
} else if node.is_missing() {
|
||||||
write!(stdout, "{}: ", paint(opts.parse_theme.missing, "MISSING"))?;
|
write!(out, "{}: ", paint(opts.parse_theme.missing, "MISSING"))?;
|
||||||
write!(
|
write!(out, "\"{}\"", paint(opts.parse_theme.missing, node.kind()))?;
|
||||||
stdout,
|
|
||||||
"\"{}\"",
|
|
||||||
paint(opts.parse_theme.missing, node.kind())
|
|
||||||
)?;
|
|
||||||
} else {
|
} else {
|
||||||
// Terminal literals, like "fn"
|
// Terminal literals, like "fn"
|
||||||
write_node_text(
|
write_node_text(
|
||||||
opts,
|
opts,
|
||||||
stdout,
|
out,
|
||||||
cursor,
|
cursor,
|
||||||
is_named,
|
is_named,
|
||||||
node.kind(),
|
node.kind(),
|
||||||
|
|
@ -982,7 +1028,7 @@ fn cst_render_node(
|
||||||
(total_width, indent_level),
|
(total_width, indent_level),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
writeln!(stdout)?;
|
writeln!(out)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
481
crates/cli/src/playground.html
Normal file
481
crates/cli/src/playground.html
Normal file
|
|
@ -0,0 +1,481 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>tree-sitter THE_LANGUAGE_NAME</title>
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.css">
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.css">
|
||||||
|
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-32x32.png"
|
||||||
|
sizes="32x32" />
|
||||||
|
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-16x16.png"
|
||||||
|
sizes="16x16" />
|
||||||
|
<style>
|
||||||
|
/* Base Variables */
|
||||||
|
:root {
|
||||||
|
--light-bg: #f9f9f9;
|
||||||
|
--light-border: #e0e0e0;
|
||||||
|
--light-text: #333;
|
||||||
|
--light-hover-border: #c1c1c1;
|
||||||
|
--light-scrollbar-track: #f1f1f1;
|
||||||
|
--light-scrollbar-thumb: #c1c1c1;
|
||||||
|
--light-scrollbar-thumb-hover: #a8a8a8;
|
||||||
|
--light-tree-row-bg: #e3f2fd;
|
||||||
|
|
||||||
|
--dark-bg: #1d1f21;
|
||||||
|
--dark-border: #2d2d2d;
|
||||||
|
--dark-text: #c5c8c6;
|
||||||
|
--dark-panel-bg: #252526;
|
||||||
|
--dark-code-bg: #1e1e1e;
|
||||||
|
--dark-scrollbar-track: #25282c;
|
||||||
|
--dark-scrollbar-thumb: #4a4d51;
|
||||||
|
--dark-scrollbar-thumb-hover: #5a5d61;
|
||||||
|
--dark-tree-row-bg: #373737;
|
||||||
|
|
||||||
|
--primary-color: #0550ae;
|
||||||
|
--primary-color-alpha: rgba(5, 80, 174, 0.1);
|
||||||
|
--primary-color-alpha-dark: rgba(121, 192, 255, 0.1);
|
||||||
|
--selection-color: rgba(39, 95, 255, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Theme Colors */
|
||||||
|
[data-theme="dark"] {
|
||||||
|
--bg-color: var(--dark-bg);
|
||||||
|
--border-color: var(--dark-border);
|
||||||
|
--text-color: var(--dark-text);
|
||||||
|
--panel-bg: var(--dark-panel-bg);
|
||||||
|
--code-bg: var(--dark-code-bg);
|
||||||
|
--tree-row-bg: var(--dark-tree-row-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="light"] {
|
||||||
|
--bg-color: var(--light-bg);
|
||||||
|
--border-color: var(--light-border);
|
||||||
|
--text-color: var(--light-text);
|
||||||
|
--panel-bg: white;
|
||||||
|
--code-bg: white;
|
||||||
|
--tree-row-bg: var(--light-tree-row-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Base Styles */
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
font-family: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Layout */
|
||||||
|
#playground-container {
|
||||||
|
width: 100%;
|
||||||
|
height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
header {
|
||||||
|
padding: 16px 24px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 20px;
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.language-name,
|
||||||
|
#language-version {
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
main {
|
||||||
|
flex: 1;
|
||||||
|
display: flex;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#input-pane {
|
||||||
|
width: 50%;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
border-right: 1px solid var(--border-color);
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-container {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
position: relative;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container:not([style*="visibility: hidden"]) {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container .panel-header {
|
||||||
|
flex: 0 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container .CodeMirror {
|
||||||
|
flex: 1;
|
||||||
|
position: relative;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container-scroll {
|
||||||
|
width: 50%;
|
||||||
|
overflow: auto;
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
padding: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container {
|
||||||
|
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
||||||
|
line-height: 1.5;
|
||||||
|
margin: 0;
|
||||||
|
padding: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.panel-header {
|
||||||
|
padding: 8px 16px;
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 14px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.CodeMirror {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
height: 100%;
|
||||||
|
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.6;
|
||||||
|
background-color: var(--code-bg) !important;
|
||||||
|
color: var(--text-color) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-error {
|
||||||
|
text-decoration: underline red dashed;
|
||||||
|
-webkit-text-decoration: underline red dashed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scrollbars */
|
||||||
|
::-webkit-scrollbar {
|
||||||
|
width: 8px;
|
||||||
|
height: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--light-scrollbar-track);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--light-scrollbar-thumb);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: var(--light-scrollbar-thumb-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] {
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
background: var(--dark-scrollbar-track) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
background: var(--dark-scrollbar-thumb) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: var(--dark-scrollbar-thumb-hover) !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Theme Toggle */
|
||||||
|
.theme-toggle {
|
||||||
|
background: none;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 6px;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-toggle:hover {
|
||||||
|
background-color: var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="light"] .moon-icon,
|
||||||
|
[data-theme="dark"] .sun-icon {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Form Elements */
|
||||||
|
input[type="checkbox"] {
|
||||||
|
margin-right: 6px;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
label {
|
||||||
|
font-size: 14px;
|
||||||
|
margin-right: 16px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a {
|
||||||
|
cursor: pointer;
|
||||||
|
text-decoration: none;
|
||||||
|
color: #040404;
|
||||||
|
padding: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.named {
|
||||||
|
color: #0550ae;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous {
|
||||||
|
color: #116329;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous:before {
|
||||||
|
content: '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous:after {
|
||||||
|
content: '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.error {
|
||||||
|
color: #cf222e;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.highlighted {
|
||||||
|
background-color: #cae2ff;
|
||||||
|
color: red;
|
||||||
|
border-radius: 3px;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
#copy-button {
|
||||||
|
background: none;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 6px;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-color);
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
margin-left: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#copy-button:hover {
|
||||||
|
background-color: var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
#copy-button:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--primary-color);
|
||||||
|
box-shadow: 0 0 0 2px var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast {
|
||||||
|
position: fixed;
|
||||||
|
bottom: 20px;
|
||||||
|
right: 20px;
|
||||||
|
background-color: var(--light-text);
|
||||||
|
color: white;
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: 6px;
|
||||||
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 500;
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(20px);
|
||||||
|
transition: all 0.3s ease;
|
||||||
|
z-index: 1000;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast.show {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark Theme Node Colors */
|
||||||
|
[data-theme="dark"] {
|
||||||
|
& #output-container a {
|
||||||
|
color: #d4d4d4;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.named {
|
||||||
|
color: #79c0ff;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.anonymous {
|
||||||
|
color: #7ee787;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.error {
|
||||||
|
color: #ff7b72;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.highlighted {
|
||||||
|
background-color: #656669;
|
||||||
|
color: red;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror {
|
||||||
|
background-color: var(--dark-code-bg) !important;
|
||||||
|
color: var(--dark-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-gutters {
|
||||||
|
background-color: var(--dark-panel-bg) !important;
|
||||||
|
border-color: var(--dark-border) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-cursor {
|
||||||
|
border-color: var(--dark-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-selected {
|
||||||
|
background-color: rgba(255, 255, 255, 0.1) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .toast {
|
||||||
|
background-color: var(--dark-bg);
|
||||||
|
color: var(--dark-text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.tree-row:has(.highlighted) {
|
||||||
|
background-color: var(--tree-row-bg);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="playground-container" style="visibility: hidden;">
|
||||||
|
<header>
|
||||||
|
<div class="header-item">
|
||||||
|
<span class="language-name">Language: THE_LANGUAGE_NAME</span>
|
||||||
|
<span id="language-version"></span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="logging-checkbox" type="checkbox">
|
||||||
|
<label for="logging-checkbox">log</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="anonymous-nodes-checkbox" type="checkbox">
|
||||||
|
<label for="anonymous-nodes-checkbox">show anonymous nodes</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="query-checkbox" type="checkbox">
|
||||||
|
<label for="query-checkbox">query</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="accessibility-checkbox" type="checkbox">
|
||||||
|
<label for="accessibility-checkbox">accessibility</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<label for="update-time">parse time: </label>
|
||||||
|
<span id="update-time"></span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<a href="https://tree-sitter.github.io/tree-sitter/7-playground.html#about">(?)</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<select id="language-select" style="display: none;">
|
||||||
|
<option value="parser">Parser</option>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<button id="theme-toggle" class="theme-toggle" aria-label="Toggle theme">
|
||||||
|
<svg class="sun-icon" viewBox="0 0 24 24" width="16" height="16">
|
||||||
|
<path fill="currentColor"
|
||||||
|
d="M12 17.5a5.5 5.5 0 1 0 0-11 5.5 5.5 0 0 0 0 11zm0 1.5a7 7 0 1 1 0-14 7 7 0 0 1 0 14zm0-16a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0V4a1 1 0 0 1 1-1zm0 15a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0v-2a1 1 0 0 1 1-1zm9-9a1 1 0 0 1-1 1h-2a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1zM4 12a1 1 0 0 1-1 1H1a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1z" />
|
||||||
|
</svg>
|
||||||
|
<svg class="moon-icon" viewBox="0 0 24 24" width="16" height="16">
|
||||||
|
<path fill="currentColor"
|
||||||
|
d="M12.1 22c-5.5 0-10-4.5-10-10s4.5-10 10-10c.2 0 .3 0 .5.1-1.3 1.4-2 3.2-2 5.2 0 4.1 3.4 7.5 7.5 7.5 2 0 3.8-.7 5.2-2 .1.2.1.3.1.5 0 5.4-4.5 9.7-10 9.7z" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
<div id="input-pane">
|
||||||
|
<div class="panel-header">Code</div>
|
||||||
|
<div id="code-container">
|
||||||
|
<textarea id="code-input"></textarea>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="query-container" style="visibility: hidden; position: absolute;">
|
||||||
|
<div class="panel-header">Query</div>
|
||||||
|
<textarea id="query-input"></textarea>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="output-container-scroll">
|
||||||
|
<div class="panel-header">
|
||||||
|
Tree
|
||||||
|
<button type="button" id="copy-button" class="theme-toggle" aria-label="Copy tree">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"
|
||||||
|
stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<rect width="14" height="14" x="8" y="8" rx="2" ry="2" />
|
||||||
|
<path d="M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<pre id="output-container" class="highlight"></pre>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.js"></script>
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.js"></script>
|
||||||
|
|
||||||
|
<script>LANGUAGE_BASE_URL = "";</script>
|
||||||
|
<script type="module" src="playground.js"></script>
|
||||||
|
<script type="module">
|
||||||
|
import * as TreeSitter from './web-tree-sitter.js';
|
||||||
|
window.TreeSitter = TreeSitter;
|
||||||
|
setTimeout(() => window.initializePlayground({local: true}), 1)
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
@ -7,6 +7,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use log::{error, info};
|
||||||
use tiny_http::{Header, Response, Server};
|
use tiny_http::{Header, Response, Server};
|
||||||
|
|
||||||
use super::wasm;
|
use super::wasm;
|
||||||
|
|
@ -18,7 +19,7 @@ macro_rules! optional_resource {
|
||||||
if let Some(tree_sitter_dir) = tree_sitter_dir {
|
if let Some(tree_sitter_dir) = tree_sitter_dir {
|
||||||
Cow::Owned(fs::read(tree_sitter_dir.join($path)).unwrap())
|
Cow::Owned(fs::read(tree_sitter_dir.join($path)).unwrap())
|
||||||
} else {
|
} else {
|
||||||
Cow::Borrowed(include_bytes!(concat!("../../", $path)))
|
Cow::Borrowed(include_bytes!(concat!("../../../", $path)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -34,25 +35,91 @@ macro_rules! optional_resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
optional_resource!(get_playground_js, "docs/src/assets/js/playground.js");
|
optional_resource!(get_playground_js, "docs/src/assets/js/playground.js");
|
||||||
optional_resource!(get_lib_js, "lib/binding_web/tree-sitter.js");
|
optional_resource!(get_lib_js, "lib/binding_web/web-tree-sitter.js");
|
||||||
optional_resource!(get_lib_wasm, "lib/binding_web/tree-sitter.wasm");
|
optional_resource!(get_lib_wasm, "lib/binding_web/web-tree-sitter.wasm");
|
||||||
|
|
||||||
fn get_main_html(tree_sitter_dir: Option<&Path>) -> Cow<'static, [u8]> {
|
fn get_main_html(tree_sitter_dir: Option<&Path>) -> Cow<'static, [u8]> {
|
||||||
tree_sitter_dir.map_or(
|
tree_sitter_dir.map_or(
|
||||||
Cow::Borrowed(include_bytes!("playground.html")),
|
Cow::Borrowed(include_bytes!("playground.html")),
|
||||||
|tree_sitter_dir| {
|
|tree_sitter_dir| {
|
||||||
Cow::Owned(fs::read(tree_sitter_dir.join("cli/src/playground.html")).unwrap())
|
Cow::Owned(fs::read(tree_sitter_dir.join("crates/cli/src/playground.html")).unwrap())
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn export(grammar_path: &Path, export_path: &Path) -> Result<()> {
|
||||||
|
let (grammar_name, language_wasm) = wasm::load_language_wasm_file(grammar_path)?;
|
||||||
|
|
||||||
|
fs::create_dir_all(export_path).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to create export directory: {}",
|
||||||
|
export_path.display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let tree_sitter_dir = env::var("TREE_SITTER_BASE_DIR").map(PathBuf::from).ok();
|
||||||
|
|
||||||
|
let playground_js = get_playground_js(tree_sitter_dir.as_deref());
|
||||||
|
let lib_js = get_lib_js(tree_sitter_dir.as_deref());
|
||||||
|
let lib_wasm = get_lib_wasm(tree_sitter_dir.as_deref());
|
||||||
|
|
||||||
|
let has_local_playground_js = !playground_js.is_empty();
|
||||||
|
let has_local_lib_js = !lib_js.is_empty();
|
||||||
|
let has_local_lib_wasm = !lib_wasm.is_empty();
|
||||||
|
|
||||||
|
let mut main_html = str::from_utf8(&get_main_html(tree_sitter_dir.as_deref()))
|
||||||
|
.unwrap()
|
||||||
|
.replace("THE_LANGUAGE_NAME", &grammar_name);
|
||||||
|
|
||||||
|
if !has_local_playground_js {
|
||||||
|
main_html = main_html.replace(
|
||||||
|
r#"<script type="module" src="playground.js"></script>"#,
|
||||||
|
r#"<script type="module" src="https://tree-sitter.github.io/tree-sitter/assets/js/playground.js"></script>"#
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if !has_local_lib_js {
|
||||||
|
main_html = main_html.replace(
|
||||||
|
"import * as TreeSitter from './web-tree-sitter.js';",
|
||||||
|
"import * as TreeSitter from 'https://tree-sitter.github.io/web-tree-sitter.js';",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::write(export_path.join("index.html"), main_html.as_bytes())
|
||||||
|
.with_context(|| "Failed to write index.html")?;
|
||||||
|
|
||||||
|
fs::write(export_path.join("tree-sitter-parser.wasm"), language_wasm)
|
||||||
|
.with_context(|| "Failed to write parser wasm file")?;
|
||||||
|
|
||||||
|
if has_local_playground_js {
|
||||||
|
fs::write(export_path.join("playground.js"), playground_js)
|
||||||
|
.with_context(|| "Failed to write playground.js")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_local_lib_js {
|
||||||
|
fs::write(export_path.join("web-tree-sitter.js"), lib_js)
|
||||||
|
.with_context(|| "Failed to write web-tree-sitter.js")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_local_lib_wasm {
|
||||||
|
fs::write(export_path.join("web-tree-sitter.wasm"), lib_wasm)
|
||||||
|
.with_context(|| "Failed to write web-tree-sitter.wasm")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"Exported playground to {}",
|
||||||
|
export_path.canonicalize()?.display()
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn serve(grammar_path: &Path, open_in_browser: bool) -> Result<()> {
|
pub fn serve(grammar_path: &Path, open_in_browser: bool) -> Result<()> {
|
||||||
let server = get_server()?;
|
let server = get_server()?;
|
||||||
let (grammar_name, language_wasm) = wasm::load_language_wasm_file(grammar_path)?;
|
let (grammar_name, language_wasm) = wasm::load_language_wasm_file(grammar_path)?;
|
||||||
let url = format!("http://{}", server.server_addr());
|
let url = format!("http://{}", server.server_addr());
|
||||||
println!("Started playground on: {url}");
|
info!("Started playground on: {url}");
|
||||||
if open_in_browser && webbrowser::open(&url).is_err() {
|
if open_in_browser && webbrowser::open(&url).is_err() {
|
||||||
eprintln!("Failed to open '{url}' in a web browser");
|
error!("Failed to open '{url}' in a web browser");
|
||||||
}
|
}
|
||||||
|
|
||||||
let tree_sitter_dir = env::var("TREE_SITTER_BASE_DIR").map(PathBuf::from).ok();
|
let tree_sitter_dir = env::var("TREE_SITTER_BASE_DIR").map(PathBuf::from).ok();
|
||||||
|
|
@ -79,16 +146,16 @@ pub fn serve(grammar_path: &Path, open_in_browser: bool) -> Result<()> {
|
||||||
response(&playground_js, &js_header)
|
response(&playground_js, &js_header)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"/tree-sitter.js" => {
|
"/web-tree-sitter.js" => {
|
||||||
if lib_js.is_empty() {
|
if lib_js.is_empty() {
|
||||||
redirect("https://tree-sitter.github.io/tree-sitter.js")
|
redirect("https://tree-sitter.github.io/web-tree-sitter.js")
|
||||||
} else {
|
} else {
|
||||||
response(&lib_js, &js_header)
|
response(&lib_js, &js_header)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"/tree-sitter.wasm" => {
|
"/web-tree-sitter.wasm" => {
|
||||||
if lib_wasm.is_empty() {
|
if lib_wasm.is_empty() {
|
||||||
redirect("https://tree-sitter.github.io/tree-sitter.wasm")
|
redirect("https://tree-sitter.github.io/web-tree-sitter.wasm")
|
||||||
} else {
|
} else {
|
||||||
response(&lib_wasm, &wasm_header)
|
response(&lib_wasm, &wasm_header)
|
||||||
}
|
}
|
||||||
|
|
@ -6,65 +6,78 @@ use std::{
|
||||||
time::Instant,
|
time::Instant,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anstyle::AnsiColor;
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
|
use log::warn;
|
||||||
use streaming_iterator::StreamingIterator;
|
use streaming_iterator::StreamingIterator;
|
||||||
use tree_sitter::{Language, Parser, Point, Query, QueryCursor};
|
use tree_sitter::{Language, Parser, Point, Query, QueryCursor};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
query_testing::{self, to_utf8_point},
|
query_testing::{self, to_utf8_point},
|
||||||
test::paint,
|
test::{TestInfo, TestOutcome, TestResult, TestSummary},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[derive(Default)]
|
||||||
|
pub struct QueryFileOptions {
|
||||||
|
pub ordered_captures: bool,
|
||||||
|
pub byte_range: Option<Range<usize>>,
|
||||||
|
pub point_range: Option<Range<Point>>,
|
||||||
|
pub containing_byte_range: Option<Range<usize>>,
|
||||||
|
pub containing_point_range: Option<Range<Point>>,
|
||||||
|
pub quiet: bool,
|
||||||
|
pub print_time: bool,
|
||||||
|
pub stdin: bool,
|
||||||
|
}
|
||||||
|
|
||||||
pub fn query_file_at_path(
|
pub fn query_file_at_path(
|
||||||
language: &Language,
|
language: &Language,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
name: &str,
|
name: &str,
|
||||||
query_path: &Path,
|
query_path: &Path,
|
||||||
ordered_captures: bool,
|
opts: &QueryFileOptions,
|
||||||
byte_range: Option<Range<usize>>,
|
test_summary: Option<&mut TestSummary>,
|
||||||
point_range: Option<Range<Point>>,
|
|
||||||
should_test: bool,
|
|
||||||
quiet: bool,
|
|
||||||
print_time: bool,
|
|
||||||
stdin: bool,
|
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let stdout = io::stdout();
|
let stdout = io::stdout();
|
||||||
let mut stdout = stdout.lock();
|
let mut stdout = stdout.lock();
|
||||||
|
|
||||||
let query_source = fs::read_to_string(query_path)
|
let query_source = fs::read_to_string(query_path)
|
||||||
.with_context(|| format!("Error reading query file {query_path:?}"))?;
|
.with_context(|| format!("Error reading query file {}", query_path.display()))?;
|
||||||
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
|
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
|
||||||
|
|
||||||
let mut query_cursor = QueryCursor::new();
|
let mut query_cursor = QueryCursor::new();
|
||||||
if let Some(range) = byte_range {
|
if let Some(ref range) = opts.byte_range {
|
||||||
query_cursor.set_byte_range(range);
|
query_cursor.set_byte_range(range.clone());
|
||||||
}
|
}
|
||||||
if let Some(range) = point_range {
|
if let Some(ref range) = opts.point_range {
|
||||||
query_cursor.set_point_range(range);
|
query_cursor.set_point_range(range.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref range) = opts.containing_byte_range {
|
||||||
|
query_cursor.set_containing_byte_range(range.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref range) = opts.containing_point_range {
|
||||||
|
query_cursor.set_containing_point_range(range.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
let mut parser = Parser::new();
|
||||||
parser.set_language(language)?;
|
parser.set_language(language)?;
|
||||||
|
|
||||||
let mut results = Vec::new();
|
let mut results = Vec::new();
|
||||||
|
let should_test = test_summary.is_some();
|
||||||
|
|
||||||
if !should_test && !stdin {
|
if !should_test && !opts.stdin {
|
||||||
writeln!(&mut stdout, "{name}")?;
|
writeln!(&mut stdout, "{name}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let source_code =
|
let source_code =
|
||||||
fs::read(path).with_context(|| format!("Error reading source file {path:?}"))?;
|
fs::read(path).with_context(|| format!("Error reading source file {}", path.display()))?;
|
||||||
let tree = parser.parse(&source_code, None).unwrap();
|
let tree = parser.parse(&source_code, None).unwrap();
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
if ordered_captures {
|
if opts.ordered_captures {
|
||||||
let mut captures = query_cursor.captures(&query, tree.root_node(), source_code.as_slice());
|
let mut captures = query_cursor.captures(&query, tree.root_node(), source_code.as_slice());
|
||||||
while let Some((mat, capture_index)) = captures.next() {
|
while let Some((mat, capture_index)) = captures.next() {
|
||||||
let capture = mat.captures[*capture_index];
|
let capture = mat.captures[*capture_index];
|
||||||
let capture_name = &query.capture_names()[capture.index as usize];
|
let capture_name = &query.capture_names()[capture.index as usize];
|
||||||
if !quiet && !should_test {
|
if !opts.quiet && !should_test {
|
||||||
writeln!(
|
writeln!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
" pattern: {:>2}, capture: {} - {capture_name}, start: {}, end: {}, text: `{}`",
|
" pattern: {:>2}, capture: {} - {capture_name}, start: {}, end: {}, text: `{}`",
|
||||||
|
|
@ -75,23 +88,25 @@ pub fn query_file_at_path(
|
||||||
capture.node.utf8_text(&source_code).unwrap_or("")
|
capture.node.utf8_text(&source_code).unwrap_or("")
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
results.push(query_testing::CaptureInfo {
|
if should_test {
|
||||||
name: (*capture_name).to_string(),
|
results.push(query_testing::CaptureInfo {
|
||||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
name: (*capture_name).to_string(),
|
||||||
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||||
});
|
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let mut matches = query_cursor.matches(&query, tree.root_node(), source_code.as_slice());
|
let mut matches = query_cursor.matches(&query, tree.root_node(), source_code.as_slice());
|
||||||
while let Some(m) = matches.next() {
|
while let Some(m) = matches.next() {
|
||||||
if !quiet && !should_test {
|
if !opts.quiet && !should_test {
|
||||||
writeln!(&mut stdout, " pattern: {}", m.pattern_index)?;
|
writeln!(&mut stdout, " pattern: {}", m.pattern_index)?;
|
||||||
}
|
}
|
||||||
for capture in m.captures {
|
for capture in m.captures {
|
||||||
let start = capture.node.start_position();
|
let start = capture.node.start_position();
|
||||||
let end = capture.node.end_position();
|
let end = capture.node.end_position();
|
||||||
let capture_name = &query.capture_names()[capture.index as usize];
|
let capture_name = &query.capture_names()[capture.index as usize];
|
||||||
if !quiet && !should_test {
|
if !opts.quiet && !should_test {
|
||||||
if end.row == start.row {
|
if end.row == start.row {
|
||||||
writeln!(
|
writeln!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
|
|
@ -106,41 +121,52 @@ pub fn query_file_at_path(
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
results.push(query_testing::CaptureInfo {
|
if should_test {
|
||||||
name: (*capture_name).to_string(),
|
results.push(query_testing::CaptureInfo {
|
||||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
name: (*capture_name).to_string(),
|
||||||
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||||
});
|
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if query_cursor.did_exceed_match_limit() {
|
if query_cursor.did_exceed_match_limit() {
|
||||||
writeln!(
|
warn!("Query exceeded maximum number of in-progress captures!");
|
||||||
&mut stdout,
|
|
||||||
" WARNING: Query exceeded maximum number of in-progress captures!"
|
|
||||||
)?;
|
|
||||||
}
|
}
|
||||||
if should_test {
|
if should_test {
|
||||||
let path_name = if stdin {
|
let path_name = if opts.stdin {
|
||||||
"stdin"
|
"stdin"
|
||||||
} else {
|
} else {
|
||||||
Path::new(&path).file_name().unwrap().to_str().unwrap()
|
Path::new(&path).file_name().unwrap().to_str().unwrap()
|
||||||
};
|
};
|
||||||
|
// Invariant: `test_summary` will always be `Some` when `should_test` is true
|
||||||
|
let test_summary = test_summary.unwrap();
|
||||||
match query_testing::assert_expected_captures(&results, path, &mut parser, language) {
|
match query_testing::assert_expected_captures(&results, path, &mut parser, language) {
|
||||||
Ok(assertion_count) => {
|
Ok(assertion_count) => {
|
||||||
println!(
|
test_summary.query_results.add_case(TestResult {
|
||||||
" ✓ {} ({} assertions)",
|
name: path_name.to_string(),
|
||||||
paint(Some(AnsiColor::Green), path_name),
|
info: TestInfo::AssertionTest {
|
||||||
assertion_count
|
outcome: TestOutcome::AssertionPassed { assertion_count },
|
||||||
);
|
test_num: test_summary.test_num,
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
println!(" ✗ {}", paint(Some(AnsiColor::Red), path_name));
|
test_summary.query_results.add_case(TestResult {
|
||||||
|
name: path_name.to_string(),
|
||||||
|
info: TestInfo::AssertionTest {
|
||||||
|
outcome: TestOutcome::AssertionFailed {
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
test_num: test_summary.test_num,
|
||||||
|
},
|
||||||
|
});
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if print_time {
|
if opts.print_time {
|
||||||
writeln!(&mut stdout, "{:?}", start.elapsed())?;
|
writeln!(&mut stdout, "{:?}", start.elapsed())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -237,8 +237,8 @@ pub fn assert_expected_captures(
|
||||||
return Err(anyhow!(
|
return Err(anyhow!(
|
||||||
"Assertion failed: at {}, found {}, expected {}",
|
"Assertion failed: at {}, found {}, expected {}",
|
||||||
found.start,
|
found.start,
|
||||||
|
found.name,
|
||||||
assertion.expected_capture_name,
|
assertion.expected_capture_name,
|
||||||
found.name
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -3,11 +3,11 @@ root = true
|
||||||
[*]
|
[*]
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
|
|
||||||
[*.{json,toml,yml,gyp}]
|
[*.{json,toml,yml,gyp,xml}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
[*.js]
|
[*.{js,ts}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
|
@ -31,6 +31,10 @@ indent_size = 4
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.java]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
[*.go]
|
[*.go]
|
||||||
indent_style = tab
|
indent_style = tab
|
||||||
indent_size = 8
|
indent_size = 8
|
||||||
43
crates/cli/src/templates/__init__.py
Normal file
43
crates/cli/src/templates/__init__.py
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
"""PARSER_DESCRIPTION"""
|
||||||
|
|
||||||
|
from importlib.resources import files as _files
|
||||||
|
|
||||||
|
from ._binding import language
|
||||||
|
|
||||||
|
|
||||||
|
def _get_query(name, file):
|
||||||
|
try:
|
||||||
|
query = _files(f"{__package__}") / file
|
||||||
|
globals()[name] = query.read_text()
|
||||||
|
except FileNotFoundError:
|
||||||
|
globals()[name] = None
|
||||||
|
return globals()[name]
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name):
|
||||||
|
if name == "HIGHLIGHTS_QUERY":
|
||||||
|
return _get_query("HIGHLIGHTS_QUERY", "HIGHLIGHTS_QUERY_PATH")
|
||||||
|
if name == "INJECTIONS_QUERY":
|
||||||
|
return _get_query("INJECTIONS_QUERY", "INJECTIONS_QUERY_PATH")
|
||||||
|
if name == "LOCALS_QUERY":
|
||||||
|
return _get_query("LOCALS_QUERY", "LOCALS_QUERY_PATH")
|
||||||
|
if name == "TAGS_QUERY":
|
||||||
|
return _get_query("TAGS_QUERY", "TAGS_QUERY_PATH")
|
||||||
|
|
||||||
|
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"language",
|
||||||
|
"HIGHLIGHTS_QUERY",
|
||||||
|
"INJECTIONS_QUERY",
|
||||||
|
"LOCALS_QUERY",
|
||||||
|
"TAGS_QUERY",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def __dir__():
|
||||||
|
return sorted(__all__ + [
|
||||||
|
"__all__", "__builtins__", "__cached__", "__doc__", "__file__",
|
||||||
|
"__loader__", "__name__", "__package__", "__path__", "__spec__",
|
||||||
|
])
|
||||||
17
crates/cli/src/templates/__init__.pyi
Normal file
17
crates/cli/src/templates/__init__.pyi
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
from typing import Final
|
||||||
|
from typing_extensions import CapsuleType
|
||||||
|
|
||||||
|
HIGHLIGHTS_QUERY: Final[str] | None
|
||||||
|
"""The syntax highlighting query for this grammar."""
|
||||||
|
|
||||||
|
INJECTIONS_QUERY: Final[str] | None
|
||||||
|
"""The language injection query for this grammar."""
|
||||||
|
|
||||||
|
LOCALS_QUERY: Final[str] | None
|
||||||
|
"""The local variable query for this grammar."""
|
||||||
|
|
||||||
|
TAGS_QUERY: Final[str] | None
|
||||||
|
"""The symbol tagging query for this grammar."""
|
||||||
|
|
||||||
|
def language() -> CapsuleType:
|
||||||
|
"""The tree-sitter language function for this grammar."""
|
||||||
|
|
@ -18,7 +18,7 @@ include = [
|
||||||
"queries/*",
|
"queries/*",
|
||||||
"src/*",
|
"src/*",
|
||||||
"tree-sitter.json",
|
"tree-sitter.json",
|
||||||
"LICENSE",
|
"/LICENSE",
|
||||||
]
|
]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
65
crates/cli/src/templates/binding.java
Normal file
65
crates/cli/src/templates/binding.java
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|
package PARSER_NS_CLEANED.jtreesitter.LOWER_PARSER_NAME;
|
||||||
|
|
||||||
|
import java.lang.foreign.*;
|
||||||
|
|
||||||
|
public final class PARSER_CLASS_NAME {
|
||||||
|
private static final ValueLayout VOID_PTR =
|
||||||
|
ValueLayout.ADDRESS.withTargetLayout(MemoryLayout.sequenceLayout(Long.MAX_VALUE, ValueLayout.JAVA_BYTE));
|
||||||
|
private static final FunctionDescriptor FUNC_DESC = FunctionDescriptor.of(VOID_PTR);
|
||||||
|
private static final Linker LINKER = Linker.nativeLinker();
|
||||||
|
private static final PARSER_CLASS_NAME INSTANCE = new PARSER_CLASS_NAME();
|
||||||
|
|
||||||
|
private final Arena arena = Arena.ofAuto();
|
||||||
|
private volatile SymbolLookup lookup = null;
|
||||||
|
|
||||||
|
private PARSER_CLASS_NAME() {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the tree-sitter language for this grammar.
|
||||||
|
*/
|
||||||
|
public static MemorySegment language() {
|
||||||
|
if (INSTANCE.lookup == null)
|
||||||
|
INSTANCE.lookup = INSTANCE.findLibrary();
|
||||||
|
return language(INSTANCE.lookup);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the tree-sitter language for this grammar.
|
||||||
|
*
|
||||||
|
* <strong>The {@linkplain Arena} used in the {@code lookup}
|
||||||
|
* must not be closed while the language is being used.</strong>
|
||||||
|
*/
|
||||||
|
public static MemorySegment language(SymbolLookup lookup) {
|
||||||
|
return call(lookup, "tree_sitter_PARSER_NAME");
|
||||||
|
}
|
||||||
|
|
||||||
|
private SymbolLookup findLibrary() {
|
||||||
|
try {
|
||||||
|
var library = System.mapLibraryName("tree-sitter-KEBAB_PARSER_NAME");
|
||||||
|
return SymbolLookup.libraryLookup(library, arena);
|
||||||
|
} catch (IllegalArgumentException ex1) {
|
||||||
|
try {
|
||||||
|
System.loadLibrary("tree-sitter-KEBAB_PARSER_NAME");
|
||||||
|
return SymbolLookup.loaderLookup();
|
||||||
|
} catch (UnsatisfiedLinkError ex2) {
|
||||||
|
ex1.addSuppressed(ex2);
|
||||||
|
throw ex1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static UnsatisfiedLinkError unresolved(String name) {
|
||||||
|
return new UnsatisfiedLinkError("Unresolved symbol: %s".formatted(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("SameParameterValue")
|
||||||
|
private static MemorySegment call(SymbolLookup lookup, String name) throws UnsatisfiedLinkError {
|
||||||
|
var address = lookup.find(name).orElseThrow(() -> unresolved(name));
|
||||||
|
try {
|
||||||
|
var function = LINKER.downcallHandle(address, FUNC_DESC);
|
||||||
|
return (MemorySegment) function.invokeExact();
|
||||||
|
} catch (Throwable e) {
|
||||||
|
throw new RuntimeException("Call to %s failed".formatted(name), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue