Compare commits
540 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6739742fb6 | ||
|
|
d251226a3c | ||
|
|
ae8184b8b9 | ||
|
|
470ecf8996 | ||
|
|
0cdb6bef7b | ||
|
|
cd603fa981 | ||
|
|
b12009a746 | ||
|
|
9f9a0bc410 | ||
|
|
5d290a2a75 | ||
|
|
5808350bfe | ||
|
|
e64e74d5ed | ||
|
|
1a88b26a10 | ||
|
|
6c05cdfb0c | ||
|
|
aefae11c0d | ||
|
|
630fa52717 | ||
|
|
eea85f4eff | ||
|
|
cd6672701b | ||
|
|
f4ca3d95ca | ||
|
|
17e3c7a5c5 | ||
|
|
dd60d5cff0 | ||
|
|
f1288ea5c9 | ||
|
|
47ae060966 | ||
|
|
a1893b4420 | ||
|
|
999e041d49 | ||
|
|
0d4d854809 | ||
|
|
93d793d249 | ||
|
|
82486d4b0a | ||
|
|
5d9605a91e | ||
|
|
5293dd683e | ||
|
|
62effdf128 | ||
|
|
8e4f21aba0 | ||
|
|
5208299bbb | ||
|
|
ba7350c7ee | ||
|
|
f96d518ebf | ||
|
|
d5b82fbbab | ||
|
|
a7d8c0cbb2 | ||
|
|
24007727d4 | ||
|
|
6aa63a7213 | ||
|
|
eacb95c85d | ||
|
|
6967640571 | ||
|
|
4ac2d5d276 | ||
|
|
642b56d9af | ||
|
|
0574fcf256 | ||
|
|
98de2bc1a8 | ||
|
|
cd4b6e2ef9 | ||
|
|
8caecbc13f | ||
|
|
1b654ae35d | ||
|
|
3bd44afcaa | ||
|
|
8b8199775f | ||
|
|
744e556f7e | ||
|
|
8a3dcc6155 | ||
|
|
b0afbf3762 | ||
|
|
974be3bb30 | ||
|
|
d861e2bcd9 | ||
|
|
b9c2d1dc89 | ||
|
|
8ca17d1bb1 | ||
|
|
3182efeccc | ||
|
|
bec7c3272b | ||
|
|
e6bfed33ee | ||
|
|
053b264502 | ||
|
|
a8f25fa441 | ||
|
|
f450ce4f6e | ||
|
|
3ff8edf9e8 | ||
|
|
6b6040961c | ||
|
|
888f57657d | ||
|
|
be8fe690d8 | ||
|
|
c0b1710f8a | ||
|
|
7d3feeae9a | ||
|
|
3f85f65e3f | ||
|
|
df8b62fc50 | ||
|
|
14b4708018 | ||
|
|
dcef0cc0ee | ||
|
|
c1a0f48781 | ||
|
|
f6d17fdb04 | ||
|
|
829733a35e | ||
|
|
d64b863030 | ||
|
|
882aa867eb | ||
|
|
de92a9b4c9 | ||
|
|
5880df47e2 | ||
|
|
e92a7803eb | ||
|
|
0d656de98b | ||
|
|
b095968dff | ||
|
|
d592b16ac0 | ||
|
|
320c0865e9 | ||
|
|
60635e0729 | ||
|
|
120f74723e | ||
|
|
02508d5570 | ||
|
|
42e7e9c3e7 | ||
|
|
55b9a25c84 | ||
|
|
877782a8a4 | ||
|
|
0e1f715ef1 | ||
|
|
f3012a999d | ||
|
|
3072d35ed5 | ||
|
|
57e3a7b2ca | ||
|
|
0df2916920 | ||
|
|
61c21aa408 | ||
|
|
7eb23d9f3c | ||
|
|
db2d221ae9 | ||
|
|
67cb3cb881 | ||
|
|
12a31536e1 | ||
|
|
7657cc9d35 | ||
|
|
13ff3935ac | ||
|
|
361287fb56 | ||
|
|
13d4db8bb4 | ||
|
|
419a5a7305 | ||
|
|
c7b5f89392 | ||
|
|
d546e28abf | ||
|
|
86e2fd2337 | ||
|
|
ff255a2354 | ||
|
|
fe67521b3d | ||
|
|
f02d7e7e33 | ||
|
|
6a8676f335 | ||
|
|
944386d25f | ||
|
|
ef03a3f8fe | ||
|
|
18a5243933 | ||
|
|
8444cc3deb | ||
|
|
097c2d4f05 | ||
|
|
b8f52210f9 | ||
|
|
ecc787e221 | ||
|
|
6188010f53 | ||
|
|
70cde4a110 | ||
|
|
77363a65c2 | ||
|
|
605e580063 | ||
|
|
a2f2b16acb | ||
|
|
87d778a1c6 | ||
|
|
e344837e35 | ||
|
|
bdee2c2dd3 | ||
|
|
da5926d6f5 | ||
|
|
b3bc7701cd | ||
|
|
262f1782cc | ||
|
|
00d172bf9f | ||
|
|
ae54350c76 | ||
|
|
3355825a68 | ||
|
|
7d0e029e37 | ||
|
|
0f5ccc4aba | ||
|
|
0cf6e7c507 | ||
|
|
1dc4804b6e | ||
|
|
c5b22a1dc6 | ||
|
|
92efd26380 | ||
|
|
24c8feba3e | ||
|
|
122493b717 | ||
|
|
4edcca9850 | ||
|
|
be0c44f871 | ||
|
|
35b1356e96 | ||
|
|
443acf080a | ||
|
|
00e394f0f1 | ||
|
|
341665824c | ||
|
|
bd02be25d5 | ||
|
|
12a6400c63 | ||
|
|
d86e1b4f5e | ||
|
|
422866a437 | ||
|
|
5f7806f99e | ||
|
|
a9bce7c18a | ||
|
|
335bfabc60 | ||
|
|
e1b424c191 | ||
|
|
ea9c318afb | ||
|
|
9d66dbc28f | ||
|
|
8c22426223 | ||
|
|
90ee433c9b | ||
|
|
f26bd44a43 | ||
|
|
021d9c447d | ||
|
|
ce56465197 | ||
|
|
b0cdab85fe | ||
|
|
47c9256976 | ||
|
|
cf89840460 | ||
|
|
e3294c3faf | ||
|
|
95ab17e444 | ||
|
|
9b914885f1 | ||
|
|
92678f0fc5 | ||
|
|
a1640e4fe4 | ||
|
|
1be51c2129 | ||
|
|
6214f95e7e | ||
|
|
c89e40f008 | ||
|
|
8873c1aeff | ||
|
|
d543e2e50b | ||
|
|
e5c11d9efc | ||
|
|
d13657c40c | ||
|
|
804ef22075 | ||
|
|
fa28b430af | ||
|
|
311585d304 | ||
|
|
60c3bed6a4 | ||
|
|
6dfa79013f | ||
|
|
79ef484392 | ||
|
|
0ca8fe8c12 | ||
|
|
552ab537e8 | ||
|
|
0cf217179c | ||
|
|
48a5077035 | ||
|
|
a69367f739 | ||
|
|
1a0868c487 | ||
|
|
9be3e2bdd8 | ||
|
|
074e991280 | ||
|
|
f09dc3cf46 | ||
|
|
f222db57ce | ||
|
|
d29132512b | ||
|
|
22553b3372 | ||
|
|
46ea65c89b | ||
|
|
6a28a62369 | ||
|
|
db0d05fab3 | ||
|
|
317e2e74c2 | ||
|
|
04cfee5664 | ||
|
|
57c6105897 | ||
|
|
339bad2de4 | ||
|
|
31ff62445b | ||
|
|
c54bc441ba | ||
|
|
070b91628f | ||
|
|
9593737871 | ||
|
|
0269357c5a | ||
|
|
fd68c02072 | ||
|
|
39a67eec61 | ||
|
|
67f50b85f5 | ||
|
|
eedbec8f24 | ||
|
|
7ba7c4a8ce | ||
|
|
b6f45b0a2e | ||
|
|
3d26b8e500 | ||
|
|
46f7f860e6 | ||
|
|
2ae677162f | ||
|
|
4dbfb5b49a | ||
|
|
3a911d578c | ||
|
|
63f48afaeb | ||
|
|
ac39aed7c5 | ||
|
|
c4d02a5254 | ||
|
|
69c42450c3 | ||
|
|
b863b16454 | ||
|
|
0c35511aea | ||
|
|
06741d0d5d | ||
|
|
ca8b944b53 | ||
|
|
580cd9541a | ||
|
|
d60ef9ad0a | ||
|
|
821cf797f2 | ||
|
|
917895e6a3 | ||
|
|
6e53dcc8e1 | ||
|
|
56325d2a3b | ||
|
|
d1160cb820 | ||
|
|
5528cfee17 | ||
|
|
937dcf5fd1 | ||
|
|
57e2f41f42 | ||
|
|
3c1f02a7f9 | ||
|
|
907c7bc80b | ||
|
|
40a8678989 | ||
|
|
6451d2f65d | ||
|
|
e4e643086b | ||
|
|
e9e4316569 | ||
|
|
0719bd6ffb | ||
|
|
8d6d19de1b | ||
|
|
16502e19dd | ||
|
|
6b2ed39df6 | ||
|
|
d517af4c1a | ||
|
|
27e5147a5f | ||
|
|
e659dddad1 | ||
|
|
643f532a70 | ||
|
|
86d86628cb | ||
|
|
17854168d9 | ||
|
|
d287acfcc0 | ||
|
|
595299a3c2 | ||
|
|
3eabba637c | ||
|
|
cd12e66e67 | ||
|
|
1d0ebd1065 | ||
|
|
5fd818babe | ||
|
|
968d39328d | ||
|
|
310c0b86a7 | ||
|
|
cc5463ad44 | ||
|
|
f95a52df4a | ||
|
|
3b0273fc61 | ||
|
|
6cb9486b28 | ||
|
|
f44cbd407f | ||
|
|
0b836b2de0 | ||
|
|
151130a5df | ||
|
|
4559ec51f7 | ||
|
|
5cd6e747a0 | ||
|
|
5263cd0706 | ||
|
|
4535ea6aaa | ||
|
|
6e8ad7e5cc | ||
|
|
22d658518b | ||
|
|
f0cfaffd5e | ||
|
|
94996b26e5 | ||
|
|
f2e71ec95c | ||
|
|
da61d7cac5 | ||
|
|
9b23cd5394 | ||
|
|
b75196bb81 | ||
|
|
dbe88f8bbb | ||
|
|
f9a331a505 | ||
|
|
47beafb836 | ||
|
|
a12a79b366 | ||
|
|
62f93e221d | ||
|
|
07986471b3 | ||
|
|
dba8446d9e | ||
|
|
0c5cdcb161 | ||
|
|
8b3e023ef0 | ||
|
|
a0fd1ded97 | ||
|
|
5272b6b908 | ||
|
|
a866eb5dd0 | ||
|
|
90bdd63a71 | ||
|
|
ed2abf8609 | ||
|
|
d188bf6352 | ||
|
|
4db3edadf4 | ||
|
|
ca27fb5d43 | ||
|
|
e6fe91e2e7 | ||
|
|
b57b7213a9 | ||
|
|
ed91767663 | ||
|
|
489ad07e8b | ||
|
|
88e323ca43 | ||
|
|
721b0e8b11 | ||
|
|
ac171eb280 | ||
|
|
a1211d3fbd | ||
|
|
0be215e152 | ||
|
|
c12b4a1565 | ||
|
|
34ef1157a6 | ||
|
|
e0edfe1cb3 | ||
|
|
8387101a61 | ||
|
|
9fdf7213d4 | ||
|
|
b7f36a13ba | ||
|
|
66ea1a6dda | ||
|
|
298b6775c6 | ||
|
|
2e4b7d26b1 | ||
|
|
dab84a1b10 | ||
|
|
340df02655 | ||
|
|
99d8b58868 | ||
|
|
1152bf4c9d | ||
|
|
107bd800b0 | ||
|
|
e67f9f8f7a | ||
|
|
79177a1cd5 | ||
|
|
0a7274678a | ||
|
|
0d914c860a | ||
|
|
eb5ad7eb26 | ||
|
|
4602e60c1b | ||
|
|
1c640ac2c3 | ||
|
|
10f5a42fd7 | ||
|
|
fee50ad0ce | ||
|
|
be888a5fef | ||
|
|
86b9f81ad6 | ||
|
|
88e0b4cea4 | ||
|
|
7bc8f76667 | ||
|
|
0bb43f7afb | ||
|
|
99c121bfe8 | ||
|
|
c60264b87f | ||
|
|
f9f7ae0850 | ||
|
|
d45642c345 | ||
|
|
790dcc115f | ||
|
|
79a5e6a671 | ||
|
|
3a47bc4435 | ||
|
|
3294b04436 | ||
|
|
2c02f0767b | ||
|
|
6f7f9a3869 | ||
|
|
1704c604bf | ||
|
|
21b38004da | ||
|
|
aed8b8b32c | ||
|
|
1562b719d2 | ||
|
|
e749a97f9f | ||
|
|
3d9f4d1bd3 | ||
|
|
9f2dd2f377 | ||
|
|
d87921bb9c | ||
|
|
f7838748df | ||
|
|
7e7e376046 | ||
|
|
bbec252c51 | ||
|
|
9d619d6fdc | ||
|
|
49ae48f7fe | ||
|
|
46a0e94de7 | ||
|
|
2368af52ea | ||
|
|
d000698847 | ||
|
|
d641249f85 | ||
|
|
346080aad2 | ||
|
|
acfeed006a | ||
|
|
5e1daf0c41 | ||
|
|
7d0b94c008 | ||
|
|
d3c2fed4b3 | ||
|
|
32c88194f5 | ||
|
|
9ced6172de | ||
|
|
07b4c8d05d | ||
|
|
b787f31481 | ||
|
|
6850df969d | ||
|
|
62c53197c4 | ||
|
|
cd2cb661a4 | ||
|
|
8e90799e27 | ||
|
|
d810217e63 | ||
|
|
8676eda663 | ||
|
|
8c61bbdb73 | ||
|
|
99988b7081 | ||
|
|
436162ae7c | ||
|
|
c3012a7d8a | ||
|
|
f0165dee92 | ||
|
|
dff828cdbe | ||
|
|
c18d019db0 | ||
|
|
25c601bd2f | ||
|
|
85ab2929e9 | ||
|
|
86b2c939c7 | ||
|
|
e3b85fd0d5 | ||
|
|
0f79c61188 | ||
|
|
618b9dd66e | ||
|
|
ccc707152a | ||
|
|
aeab755033 | ||
|
|
912167a9cd | ||
|
|
42e2bba8d6 | ||
|
|
56c2fe59cb | ||
|
|
c7f877de96 | ||
|
|
aa11c198af | ||
|
|
f8f93c1ec1 | ||
|
|
f04fab9b7a | ||
|
|
854f527f6e | ||
|
|
c740f244ba | ||
|
|
5ed2c77b59 | ||
|
|
ad0f953c21 | ||
|
|
3c56f53105 | ||
|
|
d2e06bf130 | ||
|
|
36d93aeff3 | ||
|
|
1e7d77c517 | ||
|
|
81849352fc | ||
|
|
01d8a39c0b | ||
|
|
1a3b0375fa | ||
|
|
59bcffe83b | ||
|
|
45318922d8 | ||
|
|
1a2167b155 | ||
|
|
8938309f4b | ||
|
|
51a800b7df | ||
|
|
a6cd6abcfb | ||
|
|
b09a15eb54 | ||
|
|
a9818e4b17 | ||
|
|
be8b3e282a | ||
|
|
259e34435f | ||
|
|
eaa10b279f | ||
|
|
8a15b881fd | ||
|
|
f475182c7d | ||
|
|
4c51f27b0a | ||
|
|
64760ffa76 | ||
|
|
02fff92b91 | ||
|
|
876d5a96bf | ||
|
|
e8d3d01598 | ||
|
|
889015f03b | ||
|
|
a2c98b4b5f | ||
|
|
46309a1f95 | ||
|
|
e1f6e38b57 | ||
|
|
9772270868 | ||
|
|
0fdf569571 | ||
|
|
a6e530b33d | ||
|
|
6cabd9e67f | ||
|
|
ac13c86675 | ||
|
|
50eaf5befd | ||
|
|
0220d9d93d | ||
|
|
88ac3abaa1 | ||
|
|
ad133ecb38 | ||
|
|
6ba73fd888 | ||
|
|
8bd923ab9e | ||
|
|
50622f71f8 | ||
|
|
2ab9c9b590 | ||
|
|
52b719f8fb | ||
|
|
135eeded02 | ||
|
|
0bdf698673 | ||
|
|
e7f9160867 | ||
|
|
ca64399f9f | ||
|
|
19148eaa0d | ||
|
|
b8ba76b1ae | ||
|
|
f91255a201 | ||
|
|
06537fda83 | ||
|
|
299f8a9fb9 | ||
|
|
4339b0fe05 | ||
|
|
08bb6f44a4 | ||
|
|
31b9717ca3 | ||
|
|
52a792384f | ||
|
|
b1d2b7cfb8 | ||
|
|
cc634236b1 | ||
|
|
91274f47e4 | ||
|
|
bfc5d1180c | ||
|
|
21c658a12c | ||
|
|
e057299b0d | ||
|
|
3056dc5be4 | ||
|
|
0191e94226 | ||
|
|
dcdd5bc372 | ||
|
|
d0e0e0322c | ||
|
|
ca7ff033db | ||
|
|
d4d8ed32b3 | ||
|
|
635c49909c | ||
|
|
70cf2a7a22 | ||
|
|
74e6641afc | ||
|
|
c7475e4bf3 | ||
|
|
eee41925aa | ||
|
|
69d553c82a | ||
|
|
043e0c65ec | ||
|
|
4dffb818e2 | ||
|
|
4514751803 | ||
|
|
27fa1088b9 | ||
|
|
853ca46899 | ||
|
|
21390af2dd | ||
|
|
45a281c962 | ||
|
|
c7c1614278 | ||
|
|
c085a772cf | ||
|
|
9fdf685dd5 | ||
|
|
733d7513af | ||
|
|
b341073192 | ||
|
|
92c5d3b8e2 | ||
|
|
abc5c6bc50 | ||
|
|
52d2865365 | ||
|
|
89b04babfb | ||
|
|
755f77231c | ||
|
|
3d4d13ea1e | ||
|
|
4ec8aacaec | ||
|
|
0f949168ef | ||
|
|
71941d8bda | ||
|
|
521da2b0a7 | ||
|
|
37a9ecd5b7 | ||
|
|
ee8d529552 | ||
|
|
dac6300558 | ||
|
|
d05e4ae7ff | ||
|
|
e3db212b0b | ||
|
|
f0e7ac2f18 | ||
|
|
e08fe3b993 | ||
|
|
a380e1a259 | ||
|
|
17471bdfcc | ||
|
|
5985690d45 | ||
|
|
07a1b7fc00 | ||
|
|
12aff698b9 | ||
|
|
276accc210 | ||
|
|
cc3994928c | ||
|
|
efd212ee46 | ||
|
|
ec3a889758 | ||
|
|
1850762118 | ||
|
|
0b28226615 | ||
|
|
a00fab7dc4 | ||
|
|
11071ed682 | ||
|
|
066fd77d39 | ||
|
|
8138dba800 | ||
|
|
b749c02eb5 | ||
|
|
1fdd1d250c | ||
|
|
998fb34d15 | ||
|
|
cb30ec5b17 | ||
|
|
ab7eb70a3c | ||
|
|
3b67861def | ||
|
|
b26b7f8d62 | ||
|
|
2bd400dcee | ||
|
|
dedcc5255a | ||
|
|
14b8ead412 | ||
|
|
f0571b1e33 | ||
|
|
8e79929cb8 | ||
|
|
0a33135483 | ||
|
|
a40265cbeb | ||
|
|
74d7ca8582 | ||
|
|
1a80a1f413 | ||
|
|
f95e0e3a56 | ||
|
|
14647b2a38 | ||
|
|
5311904619 | ||
|
|
60b76c7834 | ||
|
|
2bf04d1f04 |
400 changed files with 17594 additions and 11854 deletions
1
.envrc
Normal file
1
.envrc
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
use flake
|
||||||
1
.gitattributes
vendored
1
.gitattributes
vendored
|
|
@ -3,5 +3,4 @@
|
||||||
/lib/src/unicode/*.h linguist-vendored
|
/lib/src/unicode/*.h linguist-vendored
|
||||||
/lib/src/unicode/LICENSE linguist-vendored
|
/lib/src/unicode/LICENSE linguist-vendored
|
||||||
|
|
||||||
/cli/src/generate/prepare_grammar/*.json -diff
|
|
||||||
Cargo.lock -diff
|
Cargo.lock -diff
|
||||||
|
|
|
||||||
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -1,6 +1,6 @@
|
||||||
name: Bug Report
|
name: Bug Report
|
||||||
description: Report a problem
|
description: Report a problem
|
||||||
labels: [bug]
|
type: Bug
|
||||||
body:
|
body:
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
|
|
|
||||||
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
|
|
@ -1,6 +1,6 @@
|
||||||
name: Feature request
|
name: Feature request
|
||||||
description: Request an enhancement
|
description: Request an enhancement
|
||||||
labels: [enhancement]
|
type: Feature
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
|
|
|
||||||
3
.github/actions/cache/action.yml
vendored
3
.github/actions/cache/action.yml
vendored
|
|
@ -17,10 +17,9 @@ runs:
|
||||||
test/fixtures/grammars
|
test/fixtures/grammars
|
||||||
target/release/tree-sitter-*.wasm
|
target/release/tree-sitter-*.wasm
|
||||||
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
||||||
'cli/generate/src/**',
|
'crates/generate/src/**',
|
||||||
'lib/src/parser.h',
|
'lib/src/parser.h',
|
||||||
'lib/src/array.h',
|
'lib/src/array.h',
|
||||||
'lib/src/alloc.h',
|
'lib/src/alloc.h',
|
||||||
'xtask/src/*',
|
|
||||||
'test/fixtures/grammars/*/**/src/*.c',
|
'test/fixtures/grammars/*/**/src/*.c',
|
||||||
'.github/actions/cache/action.yml') }}
|
'.github/actions/cache/action.yml') }}
|
||||||
|
|
|
||||||
27
.github/dependabot.yml
vendored
27
.github/dependabot.yml
vendored
|
|
@ -4,6 +4,8 @@ updates:
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
cooldown:
|
||||||
|
default-days: 3
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "build(deps)"
|
prefix: "build(deps)"
|
||||||
labels:
|
labels:
|
||||||
|
|
@ -12,10 +14,16 @@ updates:
|
||||||
groups:
|
groups:
|
||||||
cargo:
|
cargo:
|
||||||
patterns: ["*"]
|
patterns: ["*"]
|
||||||
|
ignore:
|
||||||
|
- dependency-name: "*"
|
||||||
|
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
||||||
|
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
cooldown:
|
||||||
|
default-days: 3
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "ci"
|
prefix: "ci"
|
||||||
labels:
|
labels:
|
||||||
|
|
@ -24,3 +32,22 @@ updates:
|
||||||
groups:
|
groups:
|
||||||
actions:
|
actions:
|
||||||
patterns: ["*"]
|
patterns: ["*"]
|
||||||
|
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
versioning-strategy: increase
|
||||||
|
directories:
|
||||||
|
- "/crates/npm"
|
||||||
|
- "/crates/eslint"
|
||||||
|
- "/lib/binding_web"
|
||||||
|
schedule:
|
||||||
|
interval: "weekly"
|
||||||
|
cooldown:
|
||||||
|
default-days: 3
|
||||||
|
commit-message:
|
||||||
|
prefix: "build(deps)"
|
||||||
|
labels:
|
||||||
|
- "dependencies"
|
||||||
|
- "npm"
|
||||||
|
groups:
|
||||||
|
npm:
|
||||||
|
patterns: ["*"]
|
||||||
|
|
|
||||||
29
.github/scripts/close_spam.js
vendored
Normal file
29
.github/scripts/close_spam.js
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
module.exports = async ({ github, context }) => {
|
||||||
|
let target = context.payload.issue;
|
||||||
|
if (target) {
|
||||||
|
await github.rest.issues.update({
|
||||||
|
...context.repo,
|
||||||
|
issue_number: target.number,
|
||||||
|
state: "closed",
|
||||||
|
state_reason: "not_planned",
|
||||||
|
title: "[spam]",
|
||||||
|
body: "",
|
||||||
|
type: null,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
target = context.payload.pull_request;
|
||||||
|
await github.rest.pulls.update({
|
||||||
|
...context.repo,
|
||||||
|
pull_number: target.number,
|
||||||
|
state: "closed",
|
||||||
|
title: "[spam]",
|
||||||
|
body: "",
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await github.rest.issues.lock({
|
||||||
|
...context.repo,
|
||||||
|
issue_number: target.number,
|
||||||
|
lock_reason: "spam",
|
||||||
|
});
|
||||||
|
};
|
||||||
3
.github/scripts/cross.sh
vendored
3
.github/scripts/cross.sh
vendored
|
|
@ -1,3 +0,0 @@
|
||||||
#!/bin/bash -eu
|
|
||||||
|
|
||||||
exec docker run --rm -v /home/runner:/home/runner -w "$PWD" "$CROSS_IMAGE" "$@"
|
|
||||||
9
.github/scripts/make.sh
vendored
9
.github/scripts/make.sh
vendored
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash -eu
|
|
||||||
|
|
||||||
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
|
||||||
|
|
||||||
if [[ $BUILD_CMD == cross ]]; then
|
|
||||||
cross.sh make CC="$CC" AR="$AR" "$@"
|
|
||||||
else
|
|
||||||
exec make "$@"
|
|
||||||
fi
|
|
||||||
9
.github/scripts/tree-sitter.sh
vendored
9
.github/scripts/tree-sitter.sh
vendored
|
|
@ -1,9 +0,0 @@
|
||||||
#!/bin/bash -eu
|
|
||||||
|
|
||||||
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
|
||||||
|
|
||||||
if [[ $BUILD_CMD == cross ]]; then
|
|
||||||
cross.sh "$CROSS_RUNNER" "$tree_sitter" "$@"
|
|
||||||
else
|
|
||||||
exec "$tree_sitter" "$@"
|
|
||||||
fi
|
|
||||||
25
.github/scripts/wasm_stdlib.js
vendored
Normal file
25
.github/scripts/wasm_stdlib.js
vendored
Normal file
|
|
@ -0,0 +1,25 @@
|
||||||
|
module.exports = async ({ github, context, core }) => {
|
||||||
|
if (context.eventName !== 'pull_request') return;
|
||||||
|
|
||||||
|
const prNumber = context.payload.pull_request.number;
|
||||||
|
const owner = context.repo.owner;
|
||||||
|
const repo = context.repo.repo;
|
||||||
|
|
||||||
|
const { data: files } = await github.rest.pulls.listFiles({
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
pull_number: prNumber
|
||||||
|
});
|
||||||
|
|
||||||
|
const changedFiles = files.map(file => file.filename);
|
||||||
|
|
||||||
|
const wasmStdLibSrc = 'crates/language/wasm/';
|
||||||
|
const dirChanged = changedFiles.some(file => file.startsWith(wasmStdLibSrc));
|
||||||
|
|
||||||
|
if (!dirChanged) return;
|
||||||
|
|
||||||
|
const wasmStdLibHeader = 'lib/src/wasm/wasm-stdlib.h';
|
||||||
|
const requiredChanged = changedFiles.includes(wasmStdLibHeader);
|
||||||
|
|
||||||
|
if (!requiredChanged) core.setFailed(`Changes detected in ${wasmStdLibSrc} but ${wasmStdLibHeader} was not modified.`);
|
||||||
|
};
|
||||||
6
.github/workflows/backport.yml
vendored
6
.github/workflows/backport.yml
vendored
|
|
@ -14,17 +14,17 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Create app token
|
- name: Create app token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@v2
|
||||||
id: app-token
|
id: app-token
|
||||||
with:
|
with:
|
||||||
app-id: ${{ vars.BACKPORT_APP }}
|
app-id: ${{ vars.BACKPORT_APP }}
|
||||||
private-key: ${{ secrets.BACKPORT_KEY }}
|
private-key: ${{ secrets.BACKPORT_KEY }}
|
||||||
|
|
||||||
- name: Create backport PR
|
- name: Create backport PR
|
||||||
uses: korthout/backport-action@v3
|
uses: korthout/backport-action@v4
|
||||||
with:
|
with:
|
||||||
pull_title: "${pull_title}"
|
pull_title: "${pull_title}"
|
||||||
label_pattern: "^ci:backport ([^ ]+)$"
|
label_pattern: "^ci:backport ([^ ]+)$"
|
||||||
|
|
|
||||||
2
.github/workflows/bindgen.yml
vendored
2
.github/workflows/bindgen.yml
vendored
|
|
@ -16,7 +16,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
|
||||||
297
.github/workflows/build.yml
vendored
297
.github/workflows/build.yml
vendored
|
|
@ -1,10 +1,5 @@
|
||||||
name: Build & Test
|
name: Build & Test
|
||||||
|
|
||||||
env:
|
|
||||||
CARGO_TERM_COLOR: always
|
|
||||||
RUSTFLAGS: "-D warnings"
|
|
||||||
CROSS_DEBUG: 1
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
|
|
@ -31,38 +26,41 @@ jobs:
|
||||||
- windows-x86
|
- windows-x86
|
||||||
- macos-arm64
|
- macos-arm64
|
||||||
- macos-x64
|
- macos-x64
|
||||||
|
- wasm32
|
||||||
|
|
||||||
include:
|
include:
|
||||||
# When adding a new `target`:
|
# When adding a new `target`:
|
||||||
# 1. Define a new platform alias above
|
# 1. Define a new platform alias above
|
||||||
# 2. Add a new record to the matrix map in `cli/npm/install.js`
|
# 2. Add a new record to the matrix map in `crates/cli/npm/install.js`
|
||||||
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-24.04-arm }
|
||||||
- { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-arm , target: armv7-unknown-linux-gnueabihf , os: ubuntu-24.04-arm }
|
||||||
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-22.04 , features: wasm }
|
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-24.04 }
|
||||||
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-24.04 }
|
||||||
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-24.04 }
|
||||||
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest }
|
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-11-arm }
|
||||||
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , features: wasm }
|
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-2025 }
|
||||||
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest }
|
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-2025 }
|
||||||
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-latest , features: wasm }
|
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-15 }
|
||||||
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-13 , features: wasm }
|
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-15-intel }
|
||||||
|
- { platform: wasm32 , target: wasm32-unknown-unknown , os: ubuntu-24.04 }
|
||||||
|
|
||||||
# Cross compilers for C library
|
# Extra features
|
||||||
- { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar }
|
- { platform: linux-arm64 , features: wasm }
|
||||||
- { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar }
|
- { platform: linux-x64 , features: wasm }
|
||||||
- { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar }
|
- { platform: macos-arm64 , features: wasm }
|
||||||
- { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar }
|
- { platform: macos-x64 , features: wasm }
|
||||||
|
|
||||||
# Prevent race condition (see #2041)
|
# Cross-compilation
|
||||||
- { platform: windows-x64 , rust-test-threads: 1 }
|
- { platform: linux-arm , cross: true }
|
||||||
- { platform: windows-x86 , rust-test-threads: 1 }
|
- { platform: linux-x86 , cross: true }
|
||||||
|
- { platform: linux-powerpc64 , cross: true }
|
||||||
|
|
||||||
# Can't natively run CLI on Github runner's host
|
# Compile-only
|
||||||
- { platform: windows-arm64 , no-run: true }
|
- { platform: wasm32 , no-run: true }
|
||||||
|
|
||||||
env:
|
env:
|
||||||
BUILD_CMD: cargo
|
CARGO_TERM_COLOR: always
|
||||||
SUFFIX: ${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
RUSTFLAGS: -D warnings
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
|
|
@ -70,13 +68,28 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Read Emscripten version
|
- name: Set up cross-compilation
|
||||||
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<cli/loader/emscripten-version)" >> $GITHUB_ENV
|
if: matrix.cross
|
||||||
|
run: |
|
||||||
|
for target in armv7-unknown-linux-gnueabihf i686-unknown-linux-gnu powerpc64-unknown-linux-gnu; do
|
||||||
|
camel_target=${target//-/_}; target_cc=${target/-unknown/}
|
||||||
|
printf 'CC_%s=%s\n' "$camel_target" "${target_cc/v7/}-gcc"
|
||||||
|
printf 'AR_%s=%s\n' "$camel_target" "${target_cc/v7/}-ar"
|
||||||
|
printf 'CARGO_TARGET_%s_LINKER=%s\n' "${camel_target^^}" "${target_cc/v7/}-gcc"
|
||||||
|
done >> $GITHUB_ENV
|
||||||
|
{
|
||||||
|
printf 'CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_RUNNER=qemu-arm -L /usr/arm-linux-gnueabihf\n'
|
||||||
|
printf 'CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_RUNNER=qemu-ppc64 -L /usr/powerpc64-linux-gnu\n'
|
||||||
|
} >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Get emscripten version
|
||||||
|
if: contains(matrix.features, 'wasm')
|
||||||
|
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<crates/loader/emscripten-version)" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install Emscripten
|
- name: Install Emscripten
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
if: contains(matrix.features, 'wasm')
|
||||||
uses: mymindstorm/setup-emsdk@v14
|
uses: mymindstorm/setup-emsdk@v14
|
||||||
with:
|
with:
|
||||||
version: ${{ env.EMSCRIPTEN_VERSION }}
|
version: ${{ env.EMSCRIPTEN_VERSION }}
|
||||||
|
|
@ -86,63 +99,82 @@ jobs:
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
|
|
||||||
- name: Install cross
|
- name: Install cross-compilation toolchain
|
||||||
if: ${{ matrix.use-cross }}
|
if: matrix.cross
|
||||||
run: |
|
run: |
|
||||||
if [ ! -x "$(command -v cross)" ]; then
|
sudo apt-get update -qy
|
||||||
# TODO: Remove 'RUSTFLAGS=""' once https://github.com/cross-rs/cross/issues/1561 is resolved
|
if [[ $PLATFORM == linux-arm ]]; then
|
||||||
RUSTFLAGS="" cargo install cross --git https://github.com/cross-rs/cross
|
sudo apt-get install -qy {binutils,gcc}-arm-linux-gnueabihf qemu-user
|
||||||
|
elif [[ $PLATFORM == linux-x86 ]]; then
|
||||||
|
sudo apt-get install -qy {binutils,gcc}-i686-linux-gnu
|
||||||
|
elif [[ $PLATFORM == linux-powerpc64 ]]; then
|
||||||
|
sudo apt-get install -qy {binutils,gcc}-powerpc64-linux-gnu qemu-user
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Configure cross
|
|
||||||
if: ${{ matrix.use-cross }}
|
|
||||||
run: |
|
|
||||||
printf '%s\n' > Cross.toml \
|
|
||||||
'[target.${{ matrix.target }}]' \
|
|
||||||
'image = "ghcr.io/cross-rs/${{ matrix.target }}:edge"' \
|
|
||||||
'[build]' \
|
|
||||||
'pre-build = [' \
|
|
||||||
' "dpkg --add-architecture $CROSS_DEB_ARCH",' \
|
|
||||||
' "curl -fsSL https://deb.nodesource.com/setup_22.x | bash -",' \
|
|
||||||
' "apt-get update && apt-get -y install libssl-dev nodejs"' \
|
|
||||||
']'
|
|
||||||
cat - Cross.toml <<< 'Cross.toml:'
|
|
||||||
printf '%s\n' >> $GITHUB_ENV \
|
|
||||||
"CROSS_CONFIG=$PWD/Cross.toml" \
|
|
||||||
"CROSS_IMAGE=ghcr.io/cross-rs/${{ matrix.target }}:edge"
|
|
||||||
|
|
||||||
- name: Set up environment
|
|
||||||
env:
|
env:
|
||||||
RUST_TEST_THREADS: ${{ matrix.rust-test-threads }}
|
PLATFORM: ${{ matrix.platform }}
|
||||||
USE_CROSS: ${{ matrix.use-cross }}
|
|
||||||
TARGET: ${{ matrix.target }}
|
- name: Install MinGW and Clang (Windows x64 MSYS2)
|
||||||
CC: ${{ matrix.cc }}
|
if: matrix.platform == 'windows-x64'
|
||||||
AR: ${{ matrix.ar }}
|
uses: msys2/setup-msys2@v2
|
||||||
|
with:
|
||||||
|
update: true
|
||||||
|
install: |
|
||||||
|
mingw-w64-x86_64-toolchain
|
||||||
|
mingw-w64-x86_64-clang
|
||||||
|
mingw-w64-x86_64-make
|
||||||
|
mingw-w64-x86_64-cmake
|
||||||
|
|
||||||
|
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
||||||
|
# the `mismatched-lifetime-syntaxes` lint
|
||||||
|
- name: Build wasmtime library (Windows x64 MSYS2)
|
||||||
|
if: contains(matrix.features, 'wasm') && matrix.platform == 'windows-x64'
|
||||||
run: |
|
run: |
|
||||||
PATH="$PWD/.github/scripts:$PATH"
|
mkdir -p target
|
||||||
printf '%s/.github/scripts\n' "$PWD" >> $GITHUB_PATH
|
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
||||||
|
jq -r '.packages[] | select(.name == "wasmtime-c-api-impl") | .version')
|
||||||
|
curl -LSs "$WASMTIME_REPO/archive/refs/tags/v${WASMTIME_VERSION}.tar.gz" | tar xzf - -C target
|
||||||
|
cd target/wasmtime-${WASMTIME_VERSION}
|
||||||
|
cmake -S crates/c-api -B target/c-api \
|
||||||
|
-DCMAKE_INSTALL_PREFIX="$PWD/artifacts" \
|
||||||
|
-DWASMTIME_DISABLE_ALL_FEATURES=ON \
|
||||||
|
-DWASMTIME_FEATURE_CRANELIFT=ON \
|
||||||
|
-DWASMTIME_TARGET='x86_64-pc-windows-gnu'
|
||||||
|
cmake --build target/c-api && cmake --install target/c-api
|
||||||
|
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
||||||
|
RUSTFLAGS: ${{ env.RUSTFLAGS }} --cap-lints allow
|
||||||
|
|
||||||
printf '%s\n' >> $GITHUB_ENV \
|
- name: Build C library (Windows x64 MSYS2 CMake)
|
||||||
'TREE_SITTER=tree-sitter.sh' \
|
if: matrix.platform == 'windows-x64'
|
||||||
"TARGET=$TARGET" \
|
shell: msys2 {0}
|
||||||
"ROOT=$PWD"
|
run: |
|
||||||
|
cmake -G Ninja -S . -B build/static \
|
||||||
|
-DBUILD_SHARED_LIBS=OFF \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
|
-DTREE_SITTER_FEATURE_WASM=$WASM \
|
||||||
|
-DCMAKE_C_COMPILER=clang
|
||||||
|
cmake --build build/static
|
||||||
|
|
||||||
[[ -n $RUST_TEST_THREADS ]] && \
|
cmake -G Ninja -S . -B build/shared \
|
||||||
printf 'RUST_TEST_THREADS=%s\n' "$RUST_TEST_THREADS" >> $GITHUB_ENV
|
-DBUILD_SHARED_LIBS=ON \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
[[ -n $CC ]] && printf 'CC=%s\n' "$CC" >> $GITHUB_ENV
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
[[ -n $AR ]] && printf 'AR=%s\n' "$AR" >> $GITHUB_ENV
|
-DTREE_SITTER_FEATURE_WASM=$WASM \
|
||||||
|
-DCMAKE_C_COMPILER=clang
|
||||||
if [[ $USE_CROSS == true ]]; then
|
cmake --build build/shared
|
||||||
printf 'BUILD_CMD=cross\n' >> $GITHUB_ENV
|
rm -rf \
|
||||||
runner=$(cross.sh bash -c "env | sed -n 's/^CARGO_TARGET_.*_RUNNER=//p'")
|
build/{static,shared} \
|
||||||
[[ -n $runner ]] && printf 'CROSS_RUNNER=%s\n' "$runner" >> $GITHUB_ENV
|
"${CMAKE_PREFIX_PATH}/artifacts" \
|
||||||
fi
|
target/wasmtime-${WASMTIME_VERSION}
|
||||||
|
env:
|
||||||
|
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
||||||
|
|
||||||
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
||||||
# the `mismatched-lifetime-syntaxes` lint
|
# the `mismatched-lifetime-syntaxes` lint
|
||||||
- name: Build wasmtime library
|
- name: Build wasmtime library
|
||||||
if: ${{ !matrix.use-cross && contains(matrix.features, 'wasm') }}
|
if: contains(matrix.features, 'wasm')
|
||||||
run: |
|
run: |
|
||||||
mkdir -p target
|
mkdir -p target
|
||||||
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
||||||
|
|
@ -158,37 +190,47 @@ jobs:
|
||||||
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
||||||
env:
|
env:
|
||||||
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
||||||
RUSTFLAGS: "--cap-lints allow"
|
RUSTFLAGS: ${{ env.RUSTFLAGS }} --cap-lints allow
|
||||||
|
|
||||||
- name: Build C library (make)
|
- name: Build C library (make)
|
||||||
if: ${{ runner.os != 'Windows' }}
|
if: runner.os != 'Windows'
|
||||||
run: make.sh -j CFLAGS="$CFLAGS"
|
run: |
|
||||||
|
if [[ $PLATFORM == linux-arm ]]; then
|
||||||
|
CC=arm-linux-gnueabihf-gcc; AR=arm-linux-gnueabihf-ar
|
||||||
|
elif [[ $PLATFORM == linux-x86 ]]; then
|
||||||
|
CC=i686-linux-gnu-gcc; AR=i686-linux-gnu-ar
|
||||||
|
elif [[ $PLATFORM == linux-powerpc64 ]]; then
|
||||||
|
CC=powerpc64-linux-gnu-gcc; AR=powerpc64-linux-gnu-ar
|
||||||
|
else
|
||||||
|
CC=gcc; AR=ar
|
||||||
|
fi
|
||||||
|
make -j CFLAGS="$CFLAGS" CC=$CC AR=$AR
|
||||||
env:
|
env:
|
||||||
|
PLATFORM: ${{ matrix.platform }}
|
||||||
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
|
|
||||||
- name: Build C library (CMake)
|
- name: Build C library (CMake)
|
||||||
if: ${{ !matrix.use-cross }}
|
if: "!matrix.cross"
|
||||||
run: |
|
run: |
|
||||||
cmake -S lib -B build/static \
|
cmake -S . -B build/static \
|
||||||
-DBUILD_SHARED_LIBS=OFF \
|
-DBUILD_SHARED_LIBS=OFF \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
cmake --build build/static --verbose
|
cmake --build build/static --verbose
|
||||||
|
|
||||||
cmake -S lib -B build/shared \
|
cmake -S . -B build/shared \
|
||||||
-DBUILD_SHARED_LIBS=ON \
|
-DBUILD_SHARED_LIBS=ON \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
cmake --build build/shared --verbose
|
cmake --build build/shared --verbose
|
||||||
env:
|
env:
|
||||||
CC: ${{ contains(matrix.target, 'linux') && 'clang' || '' }}
|
CC: ${{ contains(matrix.platform, 'linux') && 'clang' || '' }}
|
||||||
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
||||||
|
|
||||||
- name: Build wasm library
|
- name: Build Wasm library
|
||||||
# No reason to build on the same Github runner hosts many times
|
if: contains(matrix.features, 'wasm')
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd lib/binding_web
|
cd lib/binding_web
|
||||||
|
|
@ -199,70 +241,71 @@ jobs:
|
||||||
npm run build:debug
|
npm run build:debug
|
||||||
|
|
||||||
- name: Check no_std builds
|
- name: Check no_std builds
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
|
working-directory: lib
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: cargo check --no-default-features --target='${{ matrix.target }}'
|
||||||
cd lib
|
|
||||||
$BUILD_CMD check --no-default-features
|
|
||||||
|
|
||||||
- name: Build target
|
- name: Build target
|
||||||
run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.features }}
|
run: cargo build --release --target='${{ matrix.target }}' --features='${{ matrix.features }}' $PACKAGE
|
||||||
|
env:
|
||||||
|
PACKAGE: ${{ matrix.platform == 'wasm32' && '-p tree-sitter' || '' }}
|
||||||
|
|
||||||
- name: Cache fixtures
|
- name: Cache fixtures
|
||||||
id: cache
|
id: cache
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
uses: ./.github/actions/cache
|
uses: ./.github/actions/cache
|
||||||
|
|
||||||
- name: Fetch fixtures
|
- name: Fetch fixtures
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
run: $BUILD_CMD run -p xtask -- fetch-fixtures
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- fetch-fixtures
|
||||||
|
|
||||||
- name: Generate fixtures
|
- name: Generate fixtures
|
||||||
if: ${{ !matrix.no-run && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
if: inputs.run-test && !matrix.no-run && steps.cache.outputs.cache-hit != 'true'
|
||||||
run: $BUILD_CMD run -p xtask -- generate-fixtures
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- generate-fixtures
|
||||||
|
|
||||||
- name: Generate Wasm fixtures
|
- name: Generate Wasm fixtures
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
if: inputs.run-test && !matrix.no-run && contains(matrix.features, 'wasm') && steps.cache.outputs.cache-hit != 'true'
|
||||||
run: $BUILD_CMD run -p xtask -- generate-fixtures --wasm
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- generate-fixtures --wasm
|
||||||
|
|
||||||
- name: Run main tests
|
- name: Run main tests
|
||||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run
|
||||||
run: $BUILD_CMD test --target=${{ matrix.target }} --features=${{ matrix.features }}
|
run: cargo test --target='${{ matrix.target }}' --features='${{ matrix.features }}'
|
||||||
|
|
||||||
- name: Run wasm tests
|
- name: Run Wasm tests
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
if: inputs.run-test && !matrix.no-run && contains(matrix.features, 'wasm')
|
||||||
run: $BUILD_CMD run -p xtask -- test-wasm
|
run: cargo run -p xtask --target='${{ matrix.target }}' -- test-wasm
|
||||||
|
|
||||||
- name: Run benchmarks
|
|
||||||
# Cross-compiled benchmarks are pointless
|
|
||||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
|
||||||
run: $BUILD_CMD bench benchmark -p tree-sitter-cli --target=${{ matrix.target }}
|
|
||||||
|
|
||||||
- name: Upload CLI artifact
|
- name: Upload CLI artifact
|
||||||
uses: actions/upload-artifact@v4
|
if: "!matrix.no-run"
|
||||||
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: tree-sitter.${{ matrix.platform }}
|
name: tree-sitter.${{ matrix.platform }}
|
||||||
path: target/${{ matrix.target }}/release/tree-sitter${{ env.SUFFIX }}
|
path: target/${{ matrix.target }}/release/tree-sitter${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
- name: Upload Wasm artifacts
|
- name: Upload Wasm artifacts
|
||||||
if: ${{ matrix.platform == 'linux-x64' }}
|
if: matrix.platform == 'linux-x64'
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v6
|
||||||
with:
|
with:
|
||||||
name: tree-sitter.wasm
|
name: tree-sitter.wasm
|
||||||
path: |
|
path: |
|
||||||
lib/binding_web/tree-sitter.js
|
lib/binding_web/web-tree-sitter.js
|
||||||
lib/binding_web/tree-sitter.js.map
|
lib/binding_web/web-tree-sitter.js.map
|
||||||
lib/binding_web/tree-sitter.cjs
|
lib/binding_web/web-tree-sitter.cjs
|
||||||
lib/binding_web/tree-sitter.cjs.map
|
lib/binding_web/web-tree-sitter.cjs.map
|
||||||
lib/binding_web/tree-sitter.wasm
|
lib/binding_web/web-tree-sitter.wasm
|
||||||
lib/binding_web/tree-sitter.wasm.map
|
lib/binding_web/web-tree-sitter.wasm.map
|
||||||
lib/binding_web/debug/tree-sitter.cjs
|
lib/binding_web/debug/web-tree-sitter.cjs
|
||||||
lib/binding_web/debug/tree-sitter.cjs.map
|
lib/binding_web/debug/web-tree-sitter.cjs.map
|
||||||
lib/binding_web/debug/tree-sitter.js
|
lib/binding_web/debug/web-tree-sitter.js
|
||||||
lib/binding_web/debug/tree-sitter.js.map
|
lib/binding_web/debug/web-tree-sitter.js.map
|
||||||
lib/binding_web/debug/tree-sitter.wasm
|
lib/binding_web/debug/web-tree-sitter.wasm
|
||||||
lib/binding_web/debug/tree-sitter.wasm.map
|
lib/binding_web/debug/web-tree-sitter.wasm.map
|
||||||
|
lib/binding_web/lib/*.c
|
||||||
|
lib/binding_web/lib/*.h
|
||||||
|
lib/binding_web/lib/*.ts
|
||||||
|
lib/binding_web/src/*.ts
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
|
||||||
5
.github/workflows/ci.yml
vendored
5
.github/workflows/ci.yml
vendored
|
|
@ -26,7 +26,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
@ -44,3 +44,6 @@ jobs:
|
||||||
|
|
||||||
build:
|
build:
|
||||||
uses: ./.github/workflows/build.yml
|
uses: ./.github/workflows/build.yml
|
||||||
|
|
||||||
|
check-wasm-stdlib:
|
||||||
|
uses: ./.github/workflows/wasm_stdlib.yml
|
||||||
|
|
|
||||||
7
.github/workflows/docs.yml
vendored
7
.github/workflows/docs.yml
vendored
|
|
@ -3,6 +3,7 @@ on:
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
paths: [docs/**]
|
paths: [docs/**]
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy-docs:
|
deploy-docs:
|
||||||
|
|
@ -15,7 +16,7 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
@ -25,7 +26,7 @@ jobs:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
run: |
|
run: |
|
||||||
jq_expr='.assets[] | select(.name | contains("x86_64-unknown-linux-gnu")) | .browser_download_url'
|
jq_expr='.assets[] | select(.name | contains("x86_64-unknown-linux-gnu")) | .browser_download_url'
|
||||||
url=$(gh api repos/rust-lang/mdbook/releases/latest --jq "$jq_expr")
|
url=$(gh api repos/rust-lang/mdbook/releases/tags/v0.4.52 --jq "$jq_expr")
|
||||||
mkdir mdbook
|
mkdir mdbook
|
||||||
curl -sSL "$url" | tar -xz -C mdbook
|
curl -sSL "$url" | tar -xz -C mdbook
|
||||||
printf '%s/mdbook\n' "$PWD" >> "$GITHUB_PATH"
|
printf '%s/mdbook\n' "$PWD" >> "$GITHUB_PATH"
|
||||||
|
|
@ -40,7 +41,7 @@ jobs:
|
||||||
uses: actions/configure-pages@v5
|
uses: actions/configure-pages@v5
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
uses: actions/upload-pages-artifact@v3
|
uses: actions/upload-pages-artifact@v4
|
||||||
with:
|
with:
|
||||||
path: docs/book
|
path: docs/book
|
||||||
|
|
||||||
|
|
|
||||||
30
.github/workflows/emscripten.yml
vendored
30
.github/workflows/emscripten.yml
vendored
|
|
@ -1,30 +0,0 @@
|
||||||
name: Update Emscripten
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
types: [opened, synchronize]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: read
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-emscripten:
|
|
||||||
if: github.actor == 'dependabot[bot]'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
ref: ${{ github.event.pull_request.head.sha }}
|
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
|
||||||
|
|
||||||
- name: Run emscripten update xtask
|
|
||||||
run: |
|
|
||||||
git config --global user.name "dependabot[bot]"
|
|
||||||
git config --global user.email "49699333+dependabot[bot]@users.noreply.github.com"
|
|
||||||
cargo xtask upgrade-emscripten
|
|
||||||
|
|
||||||
- name: Push updated version
|
|
||||||
run: git push origin HEAD:$GITHUB_HEAD_REF
|
|
||||||
13
.github/workflows/nvim_ts.yml
vendored
13
.github/workflows/nvim_ts.yml
vendored
|
|
@ -3,7 +3,10 @@ name: nvim-treesitter parser tests
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- 'cli/**'
|
- 'crates/cli/**'
|
||||||
|
- 'crates/config/**'
|
||||||
|
- 'crates/generate/**'
|
||||||
|
- 'crates/loader/**'
|
||||||
- '.github/workflows/nvim_ts.yml'
|
- '.github/workflows/nvim_ts.yml'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|
@ -13,7 +16,7 @@ concurrency:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_compilation:
|
check_compilation:
|
||||||
timeout-minutes: 20
|
timeout-minutes: 30
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
|
@ -25,9 +28,9 @@ jobs:
|
||||||
NVIM: ${{ matrix.os == 'windows-latest' && 'nvim-win64\\bin\\nvim.exe' || 'nvim' }}
|
NVIM: ${{ matrix.os == 'windows-latest' && 'nvim-win64\\bin\\nvim.exe' || 'nvim' }}
|
||||||
NVIM_TS_DIR: nvim-treesitter
|
NVIM_TS_DIR: nvim-treesitter
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
repository: nvim-treesitter/nvim-treesitter
|
repository: nvim-treesitter/nvim-treesitter
|
||||||
path: ${{ env.NVIM_TS_DIR }}
|
path: ${{ env.NVIM_TS_DIR }}
|
||||||
|
|
@ -55,7 +58,7 @@ jobs:
|
||||||
|
|
||||||
- if: matrix.type == 'build'
|
- if: matrix.type == 'build'
|
||||||
name: Compile parsers
|
name: Compile parsers
|
||||||
run: $NVIM -l ./scripts/install-parsers.lua
|
run: $NVIM -l ./scripts/install-parsers.lua --max-jobs=10
|
||||||
working-directory: ${{ env.NVIM_TS_DIR }}
|
working-directory: ${{ env.NVIM_TS_DIR }}
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
|
|
||||||
76
.github/workflows/release.yml
vendored
76
.github/workflows/release.yml
vendored
|
|
@ -17,13 +17,15 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: build
|
needs: build
|
||||||
permissions:
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
attestations: write
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Download build artifacts
|
- name: Download build artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v7
|
||||||
with:
|
with:
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
|
||||||
|
|
@ -33,26 +35,13 @@ jobs:
|
||||||
|
|
||||||
- name: Prepare release artifacts
|
- name: Prepare release artifacts
|
||||||
run: |
|
run: |
|
||||||
mkdir -p target
|
mkdir -p target web
|
||||||
mv artifacts/tree-sitter.wasm/* target/
|
mv artifacts/tree-sitter.wasm/* web/
|
||||||
|
|
||||||
# Rename files
|
tar -czf target/web-tree-sitter.tar.gz -C web .
|
||||||
mv target/tree-sitter.js target/web-tree-sitter.js
|
|
||||||
mv target/tree-sitter.js.map target/web-tree-sitter.js.map
|
|
||||||
mv target/tree-sitter.cjs target/web-tree-sitter.cjs
|
|
||||||
mv target/tree-sitter.cjs.map target/web-tree-sitter.cjs.map
|
|
||||||
mv target/tree-sitter.wasm target/web-tree-sitter.wasm
|
|
||||||
mv target/tree-sitter.wasm.map target/web-tree-sitter.wasm.map
|
|
||||||
|
|
||||||
mv target/debug/tree-sitter.js target/web-tree-sitter-debug.js
|
|
||||||
mv target/debug/tree-sitter.js.map target/web-tree-sitter-debug.js.map
|
|
||||||
mv target/debug/tree-sitter.cjs target/web-tree-sitter-debug.cjs
|
|
||||||
mv target/debug/tree-sitter.cjs.map target/web-tree-sitter-debug.cjs.map
|
|
||||||
mv target/debug/tree-sitter.wasm target/web-tree-sitter-debug.wasm
|
|
||||||
mv target/debug/tree-sitter.wasm.map target/web-tree-sitter-debug.wasm.map
|
|
||||||
rm -rf target/debug
|
|
||||||
|
|
||||||
rm -r artifacts/tree-sitter.wasm
|
rm -r artifacts/tree-sitter.wasm
|
||||||
|
|
||||||
for platform in $(cd artifacts; ls | sed 's/^tree-sitter\.//'); do
|
for platform in $(cd artifacts; ls | sed 's/^tree-sitter\.//'); do
|
||||||
exe=$(ls artifacts/tree-sitter.$platform/tree-sitter*)
|
exe=$(ls artifacts/tree-sitter.$platform/tree-sitter*)
|
||||||
gzip --stdout --name $exe > target/tree-sitter-$platform.gz
|
gzip --stdout --name $exe > target/tree-sitter-$platform.gz
|
||||||
|
|
@ -60,57 +49,65 @@ jobs:
|
||||||
rm -rf artifacts
|
rm -rf artifacts
|
||||||
ls -l target/
|
ls -l target/
|
||||||
|
|
||||||
|
- name: Generate attestations
|
||||||
|
uses: actions/attest-build-provenance@v3
|
||||||
|
with:
|
||||||
|
subject-path: |
|
||||||
|
target/tree-sitter-*.gz
|
||||||
|
target/web-tree-sitter.tar.gz
|
||||||
|
|
||||||
- name: Create release
|
- name: Create release
|
||||||
run: |-
|
run: |-
|
||||||
gh release create ${{ github.ref_name }} \
|
gh release create $GITHUB_REF_NAME \
|
||||||
target/tree-sitter-*.gz \
|
target/tree-sitter-*.gz \
|
||||||
target/web-tree-sitter.js \
|
target/web-tree-sitter.tar.gz
|
||||||
target/web-tree-sitter.js.map \
|
|
||||||
target/web-tree-sitter.cjs \
|
|
||||||
target/web-tree-sitter.cjs.map \
|
|
||||||
target/web-tree-sitter.wasm \
|
|
||||||
target/web-tree-sitter.wasm.map \
|
|
||||||
target/web-tree-sitter-debug.js \
|
|
||||||
target/web-tree-sitter-debug.js.map \
|
|
||||||
target/web-tree-sitter-debug.cjs \
|
|
||||||
target/web-tree-sitter-debug.cjs.map \
|
|
||||||
target/web-tree-sitter-debug.wasm \
|
|
||||||
target/web-tree-sitter-debug.wasm.map
|
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
crates_io:
|
crates_io:
|
||||||
name: Publish packages to Crates.io
|
name: Publish packages to Crates.io
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
environment: crates
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
needs: release
|
needs: release
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
||||||
|
- name: Set up registry token
|
||||||
|
id: auth
|
||||||
|
uses: rust-lang/crates-io-auth-action@v1
|
||||||
|
|
||||||
- name: Publish crates to Crates.io
|
- name: Publish crates to Crates.io
|
||||||
uses: katyo/publish-crates@v2
|
uses: katyo/publish-crates@v2
|
||||||
with:
|
with:
|
||||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
registry-token: ${{ steps.auth.outputs.token }}
|
||||||
|
|
||||||
npm:
|
npm:
|
||||||
name: Publish packages to npmjs.com
|
name: Publish packages to npmjs.com
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
environment: npm
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
needs: release
|
needs: release
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
directory: [cli/npm, lib/binding_web]
|
directory: [crates/cli/npm, lib/binding_web]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Node
|
- name: Set up Node
|
||||||
uses: actions/setup-node@v4
|
uses: actions/setup-node@v6
|
||||||
with:
|
with:
|
||||||
node-version: 20
|
node-version: 24
|
||||||
registry-url: https://registry.npmjs.org
|
registry-url: https://registry.npmjs.org
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
|
|
@ -125,9 +122,8 @@ jobs:
|
||||||
npm run build:debug
|
npm run build:debug
|
||||||
CJS=true npm run build
|
CJS=true npm run build
|
||||||
CJS=true npm run build:debug
|
CJS=true npm run build:debug
|
||||||
|
npm run build:dts
|
||||||
|
|
||||||
- name: Publish to npmjs.com
|
- name: Publish to npmjs.com
|
||||||
working-directory: ${{ matrix.directory }}
|
working-directory: ${{ matrix.directory }}
|
||||||
run: npm publish
|
run: npm publish
|
||||||
env:
|
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
|
||||||
|
|
|
||||||
8
.github/workflows/response.yml
vendored
8
.github/workflows/response.yml
vendored
|
|
@ -17,13 +17,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/close_unresponsive.js
|
sparse-checkout: .github/scripts/close_unresponsive.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/close_unresponsive.js')
|
const script = require('./.github/scripts/close_unresponsive.js')
|
||||||
|
|
@ -35,13 +35,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/remove_response_label.js
|
sparse-checkout: .github/scripts/remove_response_label.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/remove_response_label.js')
|
const script = require('./.github/scripts/remove_response_label.js')
|
||||||
|
|
|
||||||
4
.github/workflows/reviewers_remove.yml
vendored
4
.github/workflows/reviewers_remove.yml
vendored
|
|
@ -12,13 +12,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/reviewers_remove.js
|
sparse-checkout: .github/scripts/reviewers_remove.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v8
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/reviewers_remove.js')
|
const script = require('./.github/scripts/reviewers_remove.js')
|
||||||
|
|
|
||||||
2
.github/workflows/sanitize.yml
vendored
2
.github/workflows/sanitize.yml
vendored
|
|
@ -15,7 +15,7 @@ jobs:
|
||||||
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Install UBSAN library
|
- name: Install UBSAN library
|
||||||
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
||||||
|
|
|
||||||
29
.github/workflows/spam.yml
vendored
Normal file
29
.github/workflows/spam.yml
vendored
Normal file
|
|
@ -0,0 +1,29 @@
|
||||||
|
name: Close as spam
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [labeled]
|
||||||
|
pull_request_target:
|
||||||
|
types: [labeled]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
spam:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event.label.name == 'spam'
|
||||||
|
steps:
|
||||||
|
- name: Checkout script
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
sparse-checkout: .github/scripts/close_spam.js
|
||||||
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
|
- name: Run script
|
||||||
|
uses: actions/github-script@v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const script = require('./.github/scripts/close_spam.js')
|
||||||
|
await script({github, context})
|
||||||
11
.github/workflows/wasm_exports.yml
vendored
11
.github/workflows/wasm_exports.yml
vendored
|
|
@ -1,23 +1,24 @@
|
||||||
name: Check WASM Exports
|
name: Check Wasm Exports
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- lib/include/tree_sitter/api.h
|
- lib/include/tree_sitter/api.h
|
||||||
- lib/binding_web/**
|
- lib/binding_web/**
|
||||||
|
- xtask/src/**
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
paths:
|
paths:
|
||||||
- lib/include/tree_sitter/api.h
|
- lib/include/tree_sitter/api.h
|
||||||
- lib/binding_rust/bindings.rs
|
- lib/binding_rust/bindings.rs
|
||||||
- lib/CMakeLists.txt
|
- CMakeLists.txt
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-wasm-exports:
|
check-wasm-exports:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
@ -32,9 +33,9 @@ jobs:
|
||||||
env:
|
env:
|
||||||
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
|
|
||||||
- name: Build WASM Library
|
- name: Build Wasm Library
|
||||||
working-directory: lib/binding_web
|
working-directory: lib/binding_web
|
||||||
run: npm ci && npm run build:debug
|
run: npm ci && npm run build:debug
|
||||||
|
|
||||||
- name: Check WASM exports
|
- name: Check Wasm exports
|
||||||
run: cargo xtask check-wasm-exports
|
run: cargo xtask check-wasm-exports
|
||||||
|
|
|
||||||
19
.github/workflows/wasm_stdlib.yml
vendored
Normal file
19
.github/workflows/wasm_stdlib.yml
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
||||||
|
name: Check Wasm Stdlib build
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Check directory changes
|
||||||
|
uses: actions/github-script@v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
const scriptPath = `${process.env.GITHUB_WORKSPACE}/.github/scripts/wasm_stdlib.js`;
|
||||||
|
const script = require(scriptPath);
|
||||||
|
return script({ github, context, core });
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
|
|
@ -1,10 +1,12 @@
|
||||||
log*.html
|
log*.html
|
||||||
|
.direnv
|
||||||
|
|
||||||
.idea
|
.idea
|
||||||
*.xcodeproj
|
*.xcodeproj
|
||||||
.vscode
|
.vscode
|
||||||
.cache
|
.cache
|
||||||
.zig-cache
|
.zig-cache
|
||||||
|
.direnv
|
||||||
|
|
||||||
profile*
|
profile*
|
||||||
fuzz-results
|
fuzz-results
|
||||||
|
|
@ -24,6 +26,7 @@ docs/assets/js/tree-sitter.js
|
||||||
*.dylib
|
*.dylib
|
||||||
*.so
|
*.so
|
||||||
*.so.[0-9]*
|
*.so.[0-9]*
|
||||||
|
*.dll
|
||||||
*.o
|
*.o
|
||||||
*.obj
|
*.obj
|
||||||
*.exp
|
*.exp
|
||||||
|
|
@ -33,3 +36,5 @@ docs/assets/js/tree-sitter.js
|
||||||
.build
|
.build
|
||||||
build
|
build
|
||||||
zig-*
|
zig-*
|
||||||
|
|
||||||
|
/result
|
||||||
|
|
|
||||||
11
.zed/settings.json
Normal file
11
.zed/settings.json
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"lsp": {
|
||||||
|
"rust-analyzer": {
|
||||||
|
"initialization_options": {
|
||||||
|
"cargo": {
|
||||||
|
"features": "all"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
cmake_minimum_required(VERSION 3.13)
|
cmake_minimum_required(VERSION 3.13)
|
||||||
|
|
||||||
project(tree-sitter
|
project(tree-sitter
|
||||||
VERSION "0.25.9"
|
VERSION "0.27.0"
|
||||||
DESCRIPTION "An incremental parsing system for programming tools"
|
DESCRIPTION "An incremental parsing system for programming tools"
|
||||||
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
||||||
LANGUAGES C)
|
LANGUAGES C)
|
||||||
|
|
@ -11,15 +11,15 @@ option(TREE_SITTER_FEATURE_WASM "Enable the Wasm feature" OFF)
|
||||||
option(AMALGAMATED "Build using an amalgamated source" OFF)
|
option(AMALGAMATED "Build using an amalgamated source" OFF)
|
||||||
|
|
||||||
if(AMALGAMATED)
|
if(AMALGAMATED)
|
||||||
set(TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
set(TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/lib/src/lib.c")
|
||||||
else()
|
else()
|
||||||
file(GLOB TS_SOURCE_FILES src/*.c)
|
file(GLOB TS_SOURCE_FILES lib/src/*.c)
|
||||||
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/lib/src/lib.c")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
add_library(tree-sitter ${TS_SOURCE_FILES})
|
add_library(tree-sitter ${TS_SOURCE_FILES})
|
||||||
|
|
||||||
target_include_directories(tree-sitter PRIVATE src src/wasm include)
|
target_include_directories(tree-sitter PRIVATE lib/src lib/src/wasm PUBLIC lib/include)
|
||||||
|
|
||||||
if(MSVC)
|
if(MSVC)
|
||||||
target_compile_options(tree-sitter PRIVATE
|
target_compile_options(tree-sitter PRIVATE
|
||||||
|
|
@ -81,13 +81,13 @@ set_target_properties(tree-sitter
|
||||||
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}"
|
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}"
|
||||||
DEFINE_SYMBOL "")
|
DEFINE_SYMBOL "")
|
||||||
|
|
||||||
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE)
|
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE _BSD_SOURCE _DARWIN_C_SOURCE)
|
||||||
|
|
||||||
include(GNUInstallDirs)
|
include(GNUInstallDirs)
|
||||||
|
|
||||||
configure_file(tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY)
|
configure_file(lib/tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY)
|
||||||
|
|
||||||
install(FILES include/tree_sitter/api.h
|
install(FILES lib/include/tree_sitter/api.h
|
||||||
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/tree_sitter")
|
DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/tree_sitter")
|
||||||
install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc"
|
install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc"
|
||||||
DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
|
DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
|
||||||
1924
Cargo.lock
generated
1924
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
101
Cargo.toml
101
Cargo.toml
|
|
@ -1,26 +1,26 @@
|
||||||
[workspace]
|
[workspace]
|
||||||
default-members = ["cli"]
|
default-members = ["crates/cli"]
|
||||||
members = [
|
members = [
|
||||||
"cli",
|
"crates/cli",
|
||||||
"cli/config",
|
"crates/config",
|
||||||
"cli/generate",
|
"crates/generate",
|
||||||
"cli/loader",
|
"crates/highlight",
|
||||||
|
"crates/loader",
|
||||||
|
"crates/tags",
|
||||||
|
"crates/xtask",
|
||||||
|
"crates/language",
|
||||||
"lib",
|
"lib",
|
||||||
"lib/language",
|
|
||||||
"tags",
|
|
||||||
"highlight",
|
|
||||||
"xtask",
|
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.25.9"
|
version = "0.27.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Max Brunsfeld <maxbrunsfeld@gmail.com>",
|
"Max Brunsfeld <maxbrunsfeld@gmail.com>",
|
||||||
"Amaan Qureshi <amaanq12@gmail.com>",
|
"Amaan Qureshi <amaanq12@gmail.com>",
|
||||||
]
|
]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.82"
|
rust-version = "1.85"
|
||||||
homepage = "https://tree-sitter.github.io/tree-sitter"
|
homepage = "https://tree-sitter.github.io/tree-sitter"
|
||||||
repository = "https://github.com/tree-sitter/tree-sitter"
|
repository = "https://github.com/tree-sitter/tree-sitter"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
@ -103,62 +103,61 @@ codegen-units = 256
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
ansi_colours = "1.2.3"
|
ansi_colours = "1.2.3"
|
||||||
anstyle = "1.0.10"
|
anstyle = "1.0.13"
|
||||||
anyhow = "1.0.95"
|
anyhow = "1.0.100"
|
||||||
bstr = "1.11.3"
|
bstr = "1.12.0"
|
||||||
cc = "1.2.10"
|
cc = "1.2.53"
|
||||||
clap = { version = "4.5.27", features = [
|
clap = { version = "4.5.54", features = [
|
||||||
"cargo",
|
"cargo",
|
||||||
"derive",
|
"derive",
|
||||||
"env",
|
"env",
|
||||||
"help",
|
"help",
|
||||||
|
"string",
|
||||||
"unstable-styles",
|
"unstable-styles",
|
||||||
] }
|
] }
|
||||||
clap_complete = "4.5.42"
|
clap_complete = "4.5.65"
|
||||||
clap_complete_nushell = "4.5.5"
|
clap_complete_nushell = "4.5.10"
|
||||||
|
crc32fast = "1.5.0"
|
||||||
ctor = "0.2.9"
|
ctor = "0.2.9"
|
||||||
ctrlc = { version = "3.4.5", features = ["termination"] }
|
ctrlc = { version = "3.5.0", features = ["termination"] }
|
||||||
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
||||||
etcetera = "0.8.0"
|
etcetera = "0.11.0"
|
||||||
filetime = "0.2.25"
|
|
||||||
fs4 = "0.12.0"
|
fs4 = "0.12.0"
|
||||||
git2 = "0.20.0"
|
glob = "0.3.3"
|
||||||
glob = "0.3.2"
|
|
||||||
heck = "0.5.0"
|
heck = "0.5.0"
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
indexmap = "2.7.1"
|
indexmap = "2.12.1"
|
||||||
indoc = "2.0.5"
|
indoc = "2.0.6"
|
||||||
libloading = "0.8.6"
|
libloading = "0.9.0"
|
||||||
log = { version = "0.4.25", features = ["std"] }
|
log = { version = "0.4.28", features = ["std"] }
|
||||||
memchr = "2.7.4"
|
memchr = "2.7.6"
|
||||||
once_cell = "1.20.2"
|
once_cell = "1.21.3"
|
||||||
path-slash = "0.2.1"
|
|
||||||
pretty_assertions = "1.4.1"
|
pretty_assertions = "1.4.1"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
regex = "1.11.1"
|
regex = "1.11.3"
|
||||||
regex-syntax = "0.8.5"
|
regex-syntax = "0.8.6"
|
||||||
rustc-hash = "2.1.0"
|
rustc-hash = "2.1.1"
|
||||||
semver = { version = "1.0.25", features = ["serde"] }
|
schemars = "1.0.5"
|
||||||
serde = { version = "1.0.217", features = ["derive"] }
|
semver = { version = "1.0.27", features = ["serde"] }
|
||||||
serde_derive = "1.0.217"
|
serde = { version = "1.0.219", features = ["derive"] }
|
||||||
serde_json = { version = "1.0.137", features = ["preserve_order"] }
|
serde_json = { version = "1.0.149", features = ["preserve_order"] }
|
||||||
similar = "2.7.0"
|
similar = "2.7.0"
|
||||||
smallbitvec = "2.5.3"
|
smallbitvec = "2.6.0"
|
||||||
streaming-iterator = "0.1.9"
|
streaming-iterator = "0.1.9"
|
||||||
tempfile = "3.15.0"
|
tempfile = "3.23.0"
|
||||||
thiserror = "2.0.11"
|
thiserror = "2.0.17"
|
||||||
tiny_http = "0.12.0"
|
tiny_http = "0.12.0"
|
||||||
toml = "0.8.19"
|
|
||||||
topological-sort = "0.2.2"
|
topological-sort = "0.2.2"
|
||||||
unindent = "0.2.3"
|
unindent = "0.2.4"
|
||||||
url = { version = "2.5.4", features = ["serde"] }
|
|
||||||
walkdir = "2.5.0"
|
walkdir = "2.5.0"
|
||||||
wasmparser = "0.224.0"
|
wasmparser = "0.243.0"
|
||||||
webbrowser = "1.0.3"
|
webbrowser = "1.0.5"
|
||||||
|
|
||||||
tree-sitter = { version = "0.25.9", path = "./lib" }
|
tree-sitter = { version = "0.27.0", path = "./lib" }
|
||||||
tree-sitter-generate = { version = "0.25.9", path = "./cli/generate" }
|
tree-sitter-generate = { version = "0.27.0", path = "./crates/generate" }
|
||||||
tree-sitter-loader = { version = "0.25.9", path = "./cli/loader" }
|
tree-sitter-loader = { version = "0.27.0", path = "./crates/loader" }
|
||||||
tree-sitter-config = { version = "0.25.9", path = "./cli/config" }
|
tree-sitter-config = { version = "0.27.0", path = "./crates/config" }
|
||||||
tree-sitter-highlight = { version = "0.25.9", path = "./highlight" }
|
tree-sitter-highlight = { version = "0.27.0", path = "./crates/highlight" }
|
||||||
tree-sitter-tags = { version = "0.25.9", path = "./tags" }
|
tree-sitter-tags = { version = "0.27.0", path = "./crates/tags" }
|
||||||
|
|
||||||
|
tree-sitter-language = { version = "0.1", path = "./crates/language" }
|
||||||
|
|
|
||||||
2
LICENSE
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2018-2024 Max Brunsfeld
|
Copyright (c) 2018 Max Brunsfeld
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
||||||
41
Makefile
41
Makefile
|
|
@ -1,8 +1,4 @@
|
||||||
ifeq ($(OS),Windows_NT)
|
VERSION := 0.27.0
|
||||||
$(error Windows is not supported)
|
|
||||||
endif
|
|
||||||
|
|
||||||
VERSION := 0.25.9
|
|
||||||
DESCRIPTION := An incremental parsing system for programming tools
|
DESCRIPTION := An incremental parsing system for programming tools
|
||||||
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||||
|
|
||||||
|
|
@ -10,6 +6,7 @@ HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||||
PREFIX ?= /usr/local
|
PREFIX ?= /usr/local
|
||||||
INCLUDEDIR ?= $(PREFIX)/include
|
INCLUDEDIR ?= $(PREFIX)/include
|
||||||
LIBDIR ?= $(PREFIX)/lib
|
LIBDIR ?= $(PREFIX)/lib
|
||||||
|
BINDIR ?= $(PREFIX)/bin
|
||||||
PCLIBDIR ?= $(LIBDIR)/pkgconfig
|
PCLIBDIR ?= $(LIBDIR)/pkgconfig
|
||||||
|
|
||||||
# collect sources
|
# collect sources
|
||||||
|
|
@ -27,7 +24,7 @@ OBJ := $(SRC:.c=.o)
|
||||||
ARFLAGS := rcs
|
ARFLAGS := rcs
|
||||||
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
||||||
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE
|
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE -D_BSD_SOURCE -D_DARWIN_C_SOURCE
|
||||||
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
||||||
|
|
||||||
# ABI versioning
|
# ABI versioning
|
||||||
|
|
@ -35,20 +32,25 @@ SONAME_MAJOR := $(word 1,$(subst ., ,$(VERSION)))
|
||||||
SONAME_MINOR := $(word 2,$(subst ., ,$(VERSION)))
|
SONAME_MINOR := $(word 2,$(subst ., ,$(VERSION)))
|
||||||
|
|
||||||
# OS-specific bits
|
# OS-specific bits
|
||||||
ifneq ($(findstring darwin,$(shell $(CC) -dumpmachine)),)
|
MACHINE := $(shell $(CC) -dumpmachine)
|
||||||
|
|
||||||
|
ifneq ($(findstring darwin,$(MACHINE)),)
|
||||||
SOEXT = dylib
|
SOEXT = dylib
|
||||||
SOEXTVER_MAJOR = $(SONAME_MAJOR).$(SOEXT)
|
SOEXTVER_MAJOR = $(SONAME_MAJOR).$(SOEXT)
|
||||||
SOEXTVER = $(SONAME_MAJOR).$(SONAME_MINOR).$(SOEXT)
|
SOEXTVER = $(SONAME_MAJOR).$(SONAME_MINOR).$(SOEXT)
|
||||||
LINKSHARED += -dynamiclib -Wl,-install_name,$(LIBDIR)/libtree-sitter.$(SOEXTVER)
|
LINKSHARED += -dynamiclib -Wl,-install_name,$(LIBDIR)/libtree-sitter.$(SOEXTVER)
|
||||||
|
else ifneq ($(findstring mingw32,$(MACHINE)),)
|
||||||
|
SOEXT = dll
|
||||||
|
LINKSHARED += -s -shared -Wl,--out-implib,libtree-sitter.dll.a
|
||||||
else
|
else
|
||||||
SOEXT = so
|
SOEXT = so
|
||||||
SOEXTVER_MAJOR = $(SOEXT).$(SONAME_MAJOR)
|
SOEXTVER_MAJOR = $(SOEXT).$(SONAME_MAJOR)
|
||||||
SOEXTVER = $(SOEXT).$(SONAME_MAJOR).$(SONAME_MINOR)
|
SOEXTVER = $(SOEXT).$(SONAME_MAJOR).$(SONAME_MINOR)
|
||||||
LINKSHARED += -shared -Wl,-soname,libtree-sitter.$(SOEXTVER)
|
LINKSHARED += -shared -Wl,-soname,libtree-sitter.$(SOEXTVER)
|
||||||
endif
|
|
||||||
ifneq ($(filter $(shell uname),FreeBSD NetBSD DragonFly),)
|
ifneq ($(filter $(shell uname),FreeBSD NetBSD DragonFly),)
|
||||||
PCLIBDIR := $(PREFIX)/libdata/pkgconfig
|
PCLIBDIR := $(PREFIX)/libdata/pkgconfig
|
||||||
endif
|
endif
|
||||||
|
endif
|
||||||
|
|
||||||
all: libtree-sitter.a libtree-sitter.$(SOEXT) tree-sitter.pc
|
all: libtree-sitter.a libtree-sitter.$(SOEXT) tree-sitter.pc
|
||||||
|
|
||||||
|
|
@ -61,6 +63,10 @@ ifneq ($(STRIP),)
|
||||||
$(STRIP) $@
|
$(STRIP) $@
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
ifneq ($(findstring mingw32,$(MACHINE)),)
|
||||||
|
libtree-sitter.dll.a: libtree-sitter.$(SOEXT)
|
||||||
|
endif
|
||||||
|
|
||||||
tree-sitter.pc: lib/tree-sitter.pc.in
|
tree-sitter.pc: lib/tree-sitter.pc.in
|
||||||
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
||||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
||||||
|
|
@ -69,17 +75,27 @@ tree-sitter.pc: lib/tree-sitter.pc.in
|
||||||
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
||||||
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
||||||
|
|
||||||
|
shared: libtree-sitter.$(SOEXT)
|
||||||
|
|
||||||
|
static: libtree-sitter.a
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
$(RM) $(OBJ) tree-sitter.pc libtree-sitter.a libtree-sitter.$(SOEXT)
|
$(RM) $(OBJ) tree-sitter.pc libtree-sitter.a libtree-sitter.$(SOEXT) libtree-stitter.dll.a
|
||||||
|
|
||||||
install: all
|
install: all
|
||||||
install -d '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
install -d '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
||||||
install -m644 lib/include/tree_sitter/api.h '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h
|
install -m644 lib/include/tree_sitter/api.h '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h
|
||||||
install -m644 tree-sitter.pc '$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
install -m644 tree-sitter.pc '$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
||||||
install -m644 libtree-sitter.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a
|
install -m644 libtree-sitter.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a
|
||||||
|
ifneq ($(findstring mingw32,$(MACHINE)),)
|
||||||
|
install -d '$(DESTDIR)$(BINDIR)'
|
||||||
|
install -m755 libtree-sitter.dll '$(DESTDIR)$(BINDIR)'/libtree-sitter.dll
|
||||||
|
install -m755 libtree-sitter.dll.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.dll.a
|
||||||
|
else
|
||||||
install -m755 libtree-sitter.$(SOEXT) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER)
|
install -m755 libtree-sitter.$(SOEXT) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER)
|
||||||
ln -sf libtree-sitter.$(SOEXTVER) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER_MAJOR)
|
cd '$(DESTDIR)$(LIBDIR)' && ln -sf libtree-sitter.$(SOEXTVER) libtree-sitter.$(SOEXTVER_MAJOR)
|
||||||
ln -sf libtree-sitter.$(SOEXTVER_MAJOR) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT)
|
cd '$(DESTDIR)$(LIBDIR)' && ln -sf libtree-sitter.$(SOEXTVER_MAJOR) libtree-sitter.$(SOEXT)
|
||||||
|
endif
|
||||||
|
|
||||||
uninstall:
|
uninstall:
|
||||||
$(RM) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a \
|
$(RM) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a \
|
||||||
|
|
@ -88,8 +104,9 @@ uninstall:
|
||||||
'$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT) \
|
'$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT) \
|
||||||
'$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h \
|
'$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h \
|
||||||
'$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
'$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
||||||
|
rmdir '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter
|
||||||
|
|
||||||
.PHONY: all install uninstall clean
|
.PHONY: all shared static install uninstall clean
|
||||||
|
|
||||||
|
|
||||||
##### Dev targets #####
|
##### Dev targets #####
|
||||||
|
|
|
||||||
|
|
@ -27,6 +27,8 @@ let package = Package(
|
||||||
.headerSearchPath("src"),
|
.headerSearchPath("src"),
|
||||||
.define("_POSIX_C_SOURCE", to: "200112L"),
|
.define("_POSIX_C_SOURCE", to: "200112L"),
|
||||||
.define("_DEFAULT_SOURCE"),
|
.define("_DEFAULT_SOURCE"),
|
||||||
|
.define("_BSD_SOURCE"),
|
||||||
|
.define("_DARWIN_C_SOURCE"),
|
||||||
]),
|
]),
|
||||||
],
|
],
|
||||||
cLanguageStandard: .c11
|
cLanguageStandard: .c11
|
||||||
|
|
|
||||||
|
|
@ -14,8 +14,8 @@ Tree-sitter is a parser generator tool and an incremental parsing library. It ca
|
||||||
## Links
|
## Links
|
||||||
- [Documentation](https://tree-sitter.github.io)
|
- [Documentation](https://tree-sitter.github.io)
|
||||||
- [Rust binding](lib/binding_rust/README.md)
|
- [Rust binding](lib/binding_rust/README.md)
|
||||||
- [WASM binding](lib/binding_web/README.md)
|
- [Wasm binding](lib/binding_web/README.md)
|
||||||
- [Command-line interface](cli/README.md)
|
- [Command-line interface](crates/cli/README.md)
|
||||||
|
|
||||||
[discord]: https://img.shields.io/discord/1063097320771698699?logo=discord&label=discord
|
[discord]: https://img.shields.io/discord/1063097320771698699?logo=discord&label=discord
|
||||||
[matrix]: https://img.shields.io/matrix/tree-sitter-chat%3Amatrix.org?logo=matrix&label=matrix
|
[matrix]: https://img.shields.io/matrix/tree-sitter-chat%3Amatrix.org?logo=matrix&label=matrix
|
||||||
|
|
|
||||||
|
|
@ -40,6 +40,8 @@ pub fn build(b: *std.Build) !void {
|
||||||
|
|
||||||
lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L");
|
lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L");
|
||||||
lib.root_module.addCMacro("_DEFAULT_SOURCE", "");
|
lib.root_module.addCMacro("_DEFAULT_SOURCE", "");
|
||||||
|
lib.root_module.addCMacro("_BSD_SOURCE", "");
|
||||||
|
lib.root_module.addCMacro("_DARWIN_C_SOURCE", "");
|
||||||
|
|
||||||
if (wasm) {
|
if (wasm) {
|
||||||
if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| {
|
if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
.{
|
.{
|
||||||
.name = .tree_sitter,
|
.name = .tree_sitter,
|
||||||
.fingerprint = 0x841224b447ac0d4f,
|
.fingerprint = 0x841224b447ac0d4f,
|
||||||
.version = "0.25.9",
|
.version = "0.27.0",
|
||||||
.minimum_zig_version = "0.14.1",
|
.minimum_zig_version = "0.14.1",
|
||||||
.paths = .{
|
.paths = .{
|
||||||
"build.zig",
|
"build.zig",
|
||||||
|
|
@ -13,63 +13,83 @@
|
||||||
},
|
},
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.wasmtime_c_api_aarch64_android = .{
|
.wasmtime_c_api_aarch64_android = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-android-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-android-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAC3KCQZMd5ea2CkcbjldaVqCT7BT_9_rLMId6V__",
|
.hash = "N-V-__8AAIfPIgdw2YnV3QyiFQ2NHdrxrXzzCdjYJyxJDOta",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_linux = .{
|
.wasmtime_c_api_aarch64_linux = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-linux-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-linux-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAGUY3gU6jj2CNJAYb7HiMNVPV1FIcTCI6RSSYwXu",
|
.hash = "N-V-__8AAIt97QZi7Pf7nNJ2mVY6uxA80Klyuvvtop3pLMRK",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_macos = .{
|
.wasmtime_c_api_aarch64_macos = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-macos-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-macos-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAM1GMARD6LGQebhVsSZ0uePUoo3Fw5nEO2L764vf",
|
.hash = "N-V-__8AAAO48QQf91w9RmmUDHTja8DrXZA1n6Bmc8waW3qe",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_aarch64_musl = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-musl-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAI196wa9pwADoA2RbCDp5F7bKQg1iOPq6gIh8-FH",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_windows = .{
|
.wasmtime_c_api_aarch64_windows = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-windows-c-api.zip",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-windows-c-api.zip",
|
||||||
.hash = "N-V-__8AAH8a_wQ7oAeVVsaJcoOZhKTMkHIBc_XjDyLlHp2x",
|
.hash = "N-V-__8AAC9u4wXfqd1Q6XyQaC8_DbQZClXux60Vu5743N05",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_armv7_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-armv7-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAHXe8gWs3s83Cc5G6SIq0_jWxj8fGTT5xG4vb6-x",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_i686_linux = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-i686-linux-c-api.tar.xz",
|
||||||
|
.hash = "N-V-__8AAN2pzgUUfulRCYnipSfis9IIYHoTHVlieLRmKuct",
|
||||||
|
.lazy = true,
|
||||||
|
},
|
||||||
|
.wasmtime_c_api_i686_windows = .{
|
||||||
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-i686-windows-c-api.zip",
|
||||||
|
.hash = "N-V-__8AAJu0YAUUTFBLxFIOi-MSQVezA6MMkpoFtuaf2Quf",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_riscv64gc_linux = .{
|
.wasmtime_c_api_riscv64gc_linux = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-riscv64gc-linux-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-riscv64gc-linux-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAN2cuQadBwMc8zJxv0sMY99Ae1Nc1dZcZAK9b4DZ",
|
.hash = "N-V-__8AAG8m-gc3E3AIImtTZ3l1c7HC6HUWazQ9OH5KACX4",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_s390x_linux = .{
|
.wasmtime_c_api_s390x_linux = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-s390x-linux-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-s390x-linux-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAPevngYz99mwT0KQY9my2ax1p6APzgLEJeV4II9U",
|
.hash = "N-V-__8AAH314gd-gE4IBp2uvAL3gHeuW1uUZjMiLLeUdXL_",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_android = .{
|
.wasmtime_c_api_x86_64_android = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-android-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-android-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AABHIEgaTyzPfjgnnCy0dwJiXoDiJFblCkYOJsQvy",
|
.hash = "N-V-__8AAIPNRwfNkznebrcGb0IKUe7f35bkuZEYOjcx6q3f",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_linux = .{
|
.wasmtime_c_api_x86_64_linux = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-linux-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-linux-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AALUN5AWSEDRulL9u-OJJ-l0_GoT5UFDtGWZayEIq",
|
.hash = "N-V-__8AAI8EDwcyTtk_Afhk47SEaqfpoRqGkJeZpGs69ChF",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_macos = .{
|
.wasmtime_c_api_x86_64_macos = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-macos-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-macos-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AANUeXwSPh13TqJCSSFdi87GEcHs8zK6FqE4v_TjB",
|
.hash = "N-V-__8AAGtGNgVaOpHSxC22IjrampbRIy6lLwscdcAE8nG1",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_mingw = .{
|
.wasmtime_c_api_x86_64_mingw = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-mingw-c-api.zip",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-mingw-c-api.zip",
|
||||||
.hash = "N-V-__8AALundgW-p1ffOnd7bsYyL8SY5OziDUZu7cXio2EL",
|
.hash = "N-V-__8AAPS2PAbVix50L6lnddlgazCPTz3whLUFk1qnRtnZ",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_musl = .{
|
.wasmtime_c_api_x86_64_musl = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-musl-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-musl-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AALMZ5wXJWW5qY-3MMjTAYR0MusckvzCsmg-69ALH",
|
.hash = "N-V-__8AAF-WEQe0nzvi09PgusM5i46FIuCKJmIDWUleWgQ3",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_windows = .{
|
.wasmtime_c_api_x86_64_windows = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-windows-c-api.zip",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-windows-c-api.zip",
|
||||||
.hash = "N-V-__8AAG-uVQVEDMsB1ymJzxpHcoiXo1_I3TFnPM5Zjy1i",
|
.hash = "N-V-__8AAKGNXwbpJQsn0_6kwSIVDDWifSg8cBzf7T2RzsC9",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
|
|
||||||
|
|
@ -1 +0,0 @@
|
||||||
4.0.4
|
|
||||||
|
|
@ -1,30 +0,0 @@
|
||||||
use log::{LevelFilter, Log, Metadata, Record};
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
struct Logger {
|
|
||||||
pub filter: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Log for Logger {
|
|
||||||
fn enabled(&self, _: &Metadata) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
fn log(&self, record: &Record) {
|
|
||||||
eprintln!(
|
|
||||||
"[{}] {}",
|
|
||||||
record
|
|
||||||
.module_path()
|
|
||||||
.unwrap_or_default()
|
|
||||||
.trim_start_matches("rust_tree_sitter_cli::"),
|
|
||||||
record.args()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn flush(&self) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn init() {
|
|
||||||
log::set_boxed_logger(Box::new(Logger { filter: None })).unwrap();
|
|
||||||
log::set_max_level(LevelFilter::Info);
|
|
||||||
}
|
|
||||||
|
|
@ -1,410 +0,0 @@
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<title>tree-sitter THE_LANGUAGE_NAME</title>
|
|
||||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.css">
|
|
||||||
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.css">
|
|
||||||
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-32x32.png"
|
|
||||||
sizes="32x32" />
|
|
||||||
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-16x16.png"
|
|
||||||
sizes="16x16" />
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<div id="playground-container" style="visibility: hidden;">
|
|
||||||
<header>
|
|
||||||
<div class="header-item">
|
|
||||||
<span class="language-name">Language: THE_LANGUAGE_NAME</span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<input id="logging-checkbox" type="checkbox">
|
|
||||||
<label for="logging-checkbox">log</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<input id="anonymous-nodes-checkbox" type="checkbox">
|
|
||||||
<label for="anonymous-nodes-checkbox">show anonymous nodes</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<input id="query-checkbox" type="checkbox">
|
|
||||||
<label for="query-checkbox">query</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<input id="accessibility-checkbox" type="checkbox">
|
|
||||||
<label for="accessibility-checkbox">accessibility</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<label for="update-time">parse time: </label>
|
|
||||||
<span id="update-time"></span>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<a href="https://tree-sitter.github.io/tree-sitter/7-playground.html#about">(?)</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<select id="language-select" style="display: none;">
|
|
||||||
<option value="parser">Parser</option>
|
|
||||||
</select>
|
|
||||||
|
|
||||||
<div class="header-item">
|
|
||||||
<button id="theme-toggle" class="theme-toggle" aria-label="Toggle theme">
|
|
||||||
<svg class="sun-icon" viewBox="0 0 24 24" width="16" height="16">
|
|
||||||
<path fill="currentColor"
|
|
||||||
d="M12 17.5a5.5 5.5 0 1 0 0-11 5.5 5.5 0 0 0 0 11zm0 1.5a7 7 0 1 1 0-14 7 7 0 0 1 0 14zm0-16a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0V4a1 1 0 0 1 1-1zm0 15a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0v-2a1 1 0 0 1 1-1zm9-9a1 1 0 0 1-1 1h-2a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1zM4 12a1 1 0 0 1-1 1H1a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1z" />
|
|
||||||
</svg>
|
|
||||||
<svg class="moon-icon" viewBox="0 0 24 24" width="16" height="16">
|
|
||||||
<path fill="currentColor"
|
|
||||||
d="M12.1 22c-5.5 0-10-4.5-10-10s4.5-10 10-10c.2 0 .3 0 .5.1-1.3 1.4-2 3.2-2 5.2 0 4.1 3.4 7.5 7.5 7.5 2 0 3.8-.7 5.2-2 .1.2.1.3.1.5 0 5.4-4.5 9.7-10 9.7z" />
|
|
||||||
</svg>
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
|
|
||||||
<main>
|
|
||||||
<div id="input-pane">
|
|
||||||
<div class="panel-header">Code</div>
|
|
||||||
<div id="code-container">
|
|
||||||
<textarea id="code-input"></textarea>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="query-container" style="visibility: hidden; position: absolute;">
|
|
||||||
<div class="panel-header">Query</div>
|
|
||||||
<textarea id="query-input"></textarea>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="output-container-scroll">
|
|
||||||
<div class="panel-header">Tree</div>
|
|
||||||
<pre id="output-container" class="highlight"></pre>
|
|
||||||
</div>
|
|
||||||
</main>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script src="https://code.jquery.com/jquery-3.3.1.min.js" crossorigin="anonymous">
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.js"></script>
|
|
||||||
<script src="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.js"></script>
|
|
||||||
|
|
||||||
<script>LANGUAGE_BASE_URL = "";</script>
|
|
||||||
<script type="module" src="playground.js"></script>
|
|
||||||
<script type="module">
|
|
||||||
import * as TreeSitter from './tree-sitter.js';
|
|
||||||
window.TreeSitter = TreeSitter;
|
|
||||||
setTimeout(() => window.initializePlayground({local: true}), 1)
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<style>
|
|
||||||
/* Base Variables */
|
|
||||||
:root {
|
|
||||||
--light-bg: #f9f9f9;
|
|
||||||
--light-border: #e0e0e0;
|
|
||||||
--light-text: #333;
|
|
||||||
--light-hover-border: #c1c1c1;
|
|
||||||
--light-scrollbar-track: #f1f1f1;
|
|
||||||
--light-scrollbar-thumb: #c1c1c1;
|
|
||||||
--light-scrollbar-thumb-hover: #a8a8a8;
|
|
||||||
|
|
||||||
--dark-bg: #1d1f21;
|
|
||||||
--dark-border: #2d2d2d;
|
|
||||||
--dark-text: #c5c8c6;
|
|
||||||
--dark-panel-bg: #252526;
|
|
||||||
--dark-code-bg: #1e1e1e;
|
|
||||||
--dark-scrollbar-track: #25282c;
|
|
||||||
--dark-scrollbar-thumb: #4a4d51;
|
|
||||||
--dark-scrollbar-thumb-hover: #5a5d61;
|
|
||||||
|
|
||||||
--primary-color: #0550ae;
|
|
||||||
--primary-color-alpha: rgba(5, 80, 174, 0.1);
|
|
||||||
--primary-color-alpha-dark: rgba(121, 192, 255, 0.1);
|
|
||||||
--selection-color: rgba(39, 95, 255, 0.3);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Theme Colors */
|
|
||||||
[data-theme="dark"] {
|
|
||||||
--bg-color: var(--dark-bg);
|
|
||||||
--border-color: var(--dark-border);
|
|
||||||
--text-color: var(--dark-text);
|
|
||||||
--panel-bg: var(--dark-panel-bg);
|
|
||||||
--code-bg: var(--dark-code-bg);
|
|
||||||
}
|
|
||||||
|
|
||||||
[data-theme="light"] {
|
|
||||||
--bg-color: var(--light-bg);
|
|
||||||
--border-color: var(--light-border);
|
|
||||||
--text-color: var(--light-text);
|
|
||||||
--panel-bg: white;
|
|
||||||
--code-bg: white;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Base Styles */
|
|
||||||
body {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
font-family: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
|
|
||||||
background-color: var(--bg-color);
|
|
||||||
color: var(--text-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Layout */
|
|
||||||
#playground-container {
|
|
||||||
width: 100%;
|
|
||||||
height: 100vh;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
background-color: var(--bg-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
header {
|
|
||||||
padding: 16px 24px;
|
|
||||||
border-bottom: 1px solid var(--border-color);
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 20px;
|
|
||||||
background-color: var(--panel-bg);
|
|
||||||
font-size: 14px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.header-item {
|
|
||||||
display: flex;
|
|
||||||
align-items: center;
|
|
||||||
gap: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.language-name {
|
|
||||||
font-weight: 600;
|
|
||||||
}
|
|
||||||
|
|
||||||
main {
|
|
||||||
flex: 1;
|
|
||||||
display: flex;
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
#input-pane {
|
|
||||||
width: 50%;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
border-right: 1px solid var(--border-color);
|
|
||||||
background-color: var(--panel-bg);
|
|
||||||
overflow: hidden;
|
|
||||||
}
|
|
||||||
|
|
||||||
#code-container {
|
|
||||||
flex: 1;
|
|
||||||
min-height: 0;
|
|
||||||
position: relative;
|
|
||||||
border-bottom: 1px solid var(--border-color);
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
#query-container:not([style*="visibility: hidden"]) {
|
|
||||||
flex: 1;
|
|
||||||
min-height: 0;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
#query-container .panel-header {
|
|
||||||
flex: 0 0 auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
#query-container .CodeMirror {
|
|
||||||
flex: 1;
|
|
||||||
position: relative;
|
|
||||||
min-height: 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container-scroll {
|
|
||||||
width: 50%;
|
|
||||||
overflow: auto;
|
|
||||||
background-color: var(--panel-bg);
|
|
||||||
padding: 0;
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container {
|
|
||||||
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
|
||||||
line-height: 1.5;
|
|
||||||
margin: 0;
|
|
||||||
padding: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.panel-header {
|
|
||||||
padding: 8px 16px;
|
|
||||||
font-weight: 600;
|
|
||||||
font-size: 14px;
|
|
||||||
border-bottom: 1px solid var(--border-color);
|
|
||||||
background-color: var(--panel-bg);
|
|
||||||
}
|
|
||||||
|
|
||||||
.CodeMirror {
|
|
||||||
position: absolute;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
right: 0;
|
|
||||||
bottom: 0;
|
|
||||||
height: 100%;
|
|
||||||
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
|
||||||
font-size: 14px;
|
|
||||||
line-height: 1.6;
|
|
||||||
background-color: var(--code-bg) !important;
|
|
||||||
color: var(--text-color) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.query-error {
|
|
||||||
text-decoration: underline red dashed;
|
|
||||||
-webkit-text-decoration: underline red dashed;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Scrollbars */
|
|
||||||
::-webkit-scrollbar {
|
|
||||||
width: 8px;
|
|
||||||
height: 8px;
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-track {
|
|
||||||
border-radius: 4px;
|
|
||||||
background: var(--light-scrollbar-track);
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-thumb {
|
|
||||||
border-radius: 4px;
|
|
||||||
background: var(--light-scrollbar-thumb);
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-thumb:hover {
|
|
||||||
background: var(--light-scrollbar-thumb-hover);
|
|
||||||
}
|
|
||||||
|
|
||||||
[data-theme="dark"] {
|
|
||||||
::-webkit-scrollbar-track {
|
|
||||||
background: var(--dark-scrollbar-track) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-thumb {
|
|
||||||
background: var(--dark-scrollbar-thumb) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
::-webkit-scrollbar-thumb:hover {
|
|
||||||
background: var(--dark-scrollbar-thumb-hover) !important;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Theme Toggle */
|
|
||||||
.theme-toggle {
|
|
||||||
background: none;
|
|
||||||
border: 1px solid var(--border-color);
|
|
||||||
border-radius: 4px;
|
|
||||||
padding: 6px;
|
|
||||||
cursor: pointer;
|
|
||||||
color: var(--text-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
.theme-toggle:hover {
|
|
||||||
background-color: var(--primary-color-alpha);
|
|
||||||
}
|
|
||||||
|
|
||||||
[data-theme="light"] .moon-icon,
|
|
||||||
[data-theme="dark"] .sun-icon {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Form Elements */
|
|
||||||
input[type="checkbox"] {
|
|
||||||
margin-right: 6px;
|
|
||||||
vertical-align: middle;
|
|
||||||
}
|
|
||||||
|
|
||||||
label {
|
|
||||||
font-size: 14px;
|
|
||||||
margin-right: 16px;
|
|
||||||
cursor: pointer;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a {
|
|
||||||
cursor: pointer;
|
|
||||||
text-decoration: none;
|
|
||||||
color: #040404;
|
|
||||||
padding: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.named {
|
|
||||||
color: #0550ae;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.anonymous {
|
|
||||||
color: #116329;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.anonymous:before {
|
|
||||||
content: '"';
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.anonymous:after {
|
|
||||||
content: '"';
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.node-link.error {
|
|
||||||
color: #cf222e;
|
|
||||||
}
|
|
||||||
|
|
||||||
#output-container a.highlighted {
|
|
||||||
background-color: #d9d9d9;
|
|
||||||
color: red;
|
|
||||||
border-radius: 3px;
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Dark Theme Node Colors */
|
|
||||||
[data-theme="dark"] {
|
|
||||||
& #output-container a {
|
|
||||||
color: #d4d4d4;
|
|
||||||
}
|
|
||||||
|
|
||||||
& #output-container a.node-link.named {
|
|
||||||
color: #79c0ff;
|
|
||||||
}
|
|
||||||
|
|
||||||
& #output-container a.node-link.anonymous {
|
|
||||||
color: #7ee787;
|
|
||||||
}
|
|
||||||
|
|
||||||
& #output-container a.node-link.error {
|
|
||||||
color: #ff7b72;
|
|
||||||
}
|
|
||||||
|
|
||||||
& #output-container a.highlighted {
|
|
||||||
background-color: #373b41;
|
|
||||||
color: red;
|
|
||||||
}
|
|
||||||
|
|
||||||
& .CodeMirror {
|
|
||||||
background-color: var(--dark-code-bg) !important;
|
|
||||||
color: var(--dark-text) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
& .CodeMirror-gutters {
|
|
||||||
background-color: var(--dark-panel-bg) !important;
|
|
||||||
border-color: var(--dark-border) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
& .CodeMirror-cursor {
|
|
||||||
border-color: var(--dark-text) !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
& .CodeMirror-selected {
|
|
||||||
background-color: rgba(255, 255, 255, 0.1) !important;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</body>
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
"""PARSER_DESCRIPTION"""
|
|
||||||
|
|
||||||
from importlib.resources import files as _files
|
|
||||||
|
|
||||||
from ._binding import language
|
|
||||||
|
|
||||||
|
|
||||||
def _get_query(name, file):
|
|
||||||
query = _files(f"{__package__}.queries") / file
|
|
||||||
globals()[name] = query.read_text()
|
|
||||||
return globals()[name]
|
|
||||||
|
|
||||||
|
|
||||||
def __getattr__(name):
|
|
||||||
# NOTE: uncomment these to include any queries that this grammar contains:
|
|
||||||
|
|
||||||
# if name == "HIGHLIGHTS_QUERY":
|
|
||||||
# return _get_query("HIGHLIGHTS_QUERY", "highlights.scm")
|
|
||||||
# if name == "INJECTIONS_QUERY":
|
|
||||||
# return _get_query("INJECTIONS_QUERY", "injections.scm")
|
|
||||||
# if name == "LOCALS_QUERY":
|
|
||||||
# return _get_query("LOCALS_QUERY", "locals.scm")
|
|
||||||
# if name == "TAGS_QUERY":
|
|
||||||
# return _get_query("TAGS_QUERY", "tags.scm")
|
|
||||||
|
|
||||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"language",
|
|
||||||
# "HIGHLIGHTS_QUERY",
|
|
||||||
# "INJECTIONS_QUERY",
|
|
||||||
# "LOCALS_QUERY",
|
|
||||||
# "TAGS_QUERY",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def __dir__():
|
|
||||||
return sorted(__all__ + [
|
|
||||||
"__all__", "__builtins__", "__cached__", "__doc__", "__file__",
|
|
||||||
"__loader__", "__name__", "__package__", "__path__", "__spec__",
|
|
||||||
])
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
from typing import Final
|
|
||||||
|
|
||||||
# NOTE: uncomment these to include any queries that this grammar contains:
|
|
||||||
|
|
||||||
# HIGHLIGHTS_QUERY: Final[str]
|
|
||||||
# INJECTIONS_QUERY: Final[str]
|
|
||||||
# LOCALS_QUERY: Final[str]
|
|
||||||
# TAGS_QUERY: Final[str]
|
|
||||||
|
|
||||||
def language() -> object: ...
|
|
||||||
|
|
@ -1,9 +0,0 @@
|
||||||
const assert = require("node:assert");
|
|
||||||
const { test } = require("node:test");
|
|
||||||
|
|
||||||
const Parser = require("tree-sitter");
|
|
||||||
|
|
||||||
test("can load grammar", () => {
|
|
||||||
const parser = new Parser();
|
|
||||||
assert.doesNotThrow(() => parser.setLanguage(require(".")));
|
|
||||||
});
|
|
||||||
|
|
@ -1,21 +0,0 @@
|
||||||
fn main() {
|
|
||||||
let src_dir = std::path::Path::new("src");
|
|
||||||
|
|
||||||
let mut c_config = cc::Build::new();
|
|
||||||
c_config.std("c11").include(src_dir);
|
|
||||||
|
|
||||||
#[cfg(target_env = "msvc")]
|
|
||||||
c_config.flag("-utf-8");
|
|
||||||
|
|
||||||
let parser_path = src_dir.join("parser.c");
|
|
||||||
c_config.file(&parser_path);
|
|
||||||
println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap());
|
|
||||||
|
|
||||||
let scanner_path = src_dir.join("scanner.c");
|
|
||||||
if scanner_path.exists() {
|
|
||||||
c_config.file(&scanner_path);
|
|
||||||
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
|
||||||
}
|
|
||||||
|
|
||||||
c_config.compile("tree-sitter-KEBAB_PARSER_NAME");
|
|
||||||
}
|
|
||||||
27
cli/src/templates/index.d.ts
vendored
27
cli/src/templates/index.d.ts
vendored
|
|
@ -1,27 +0,0 @@
|
||||||
type BaseNode = {
|
|
||||||
type: string;
|
|
||||||
named: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
type ChildNode = {
|
|
||||||
multiple: boolean;
|
|
||||||
required: boolean;
|
|
||||||
types: BaseNode[];
|
|
||||||
};
|
|
||||||
|
|
||||||
type NodeInfo =
|
|
||||||
| (BaseNode & {
|
|
||||||
subtypes: BaseNode[];
|
|
||||||
})
|
|
||||||
| (BaseNode & {
|
|
||||||
fields: { [name: string]: ChildNode };
|
|
||||||
children: ChildNode[];
|
|
||||||
});
|
|
||||||
|
|
||||||
type Language = {
|
|
||||||
language: unknown;
|
|
||||||
nodeTypeInfo: NodeInfo[];
|
|
||||||
};
|
|
||||||
|
|
||||||
declare const language: Language;
|
|
||||||
export = language;
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
const root = require("path").join(__dirname, "..", "..");
|
|
||||||
|
|
||||||
module.exports =
|
|
||||||
typeof process.versions.bun === "string"
|
|
||||||
// Support `bun build --compile` by being statically analyzable enough to find the .node file at build-time
|
|
||||||
? require(`../../prebuilds/${process.platform}-${process.arch}/tree-sitter-KEBAB_PARSER_NAME.node`)
|
|
||||||
: require("node-gyp-build")(root);
|
|
||||||
|
|
||||||
try {
|
|
||||||
module.exports.nodeTypeInfo = require("../../src/node-types.json");
|
|
||||||
} catch (_) {}
|
|
||||||
1584
cli/src/test.rs
1584
cli/src/test.rs
File diff suppressed because it is too large
Load diff
|
|
@ -1,278 +0,0 @@
|
||||||
use std::{
|
|
||||||
future::Future,
|
|
||||||
pin::{pin, Pin},
|
|
||||||
ptr,
|
|
||||||
task::{self, Context, Poll, RawWaker, RawWakerVTable, Waker},
|
|
||||||
};
|
|
||||||
|
|
||||||
use tree_sitter::Parser;
|
|
||||||
|
|
||||||
use super::helpers::fixtures::get_language;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_in_fut() {
|
|
||||||
let (ret, pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("bash");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
|
|
||||||
let fut_val_fn = || async {
|
|
||||||
yield_now().await;
|
|
||||||
root.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref_fn = || async {
|
|
||||||
yield_now().await;
|
|
||||||
root_ref.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
let f1 = fut_val_fn().await;
|
|
||||||
let f2 = fut_ref_fn().await;
|
|
||||||
assert_eq!(f1, f2);
|
|
||||||
|
|
||||||
let fut_val = async {
|
|
||||||
yield_now().await;
|
|
||||||
root.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
let fut_ref = async {
|
|
||||||
yield_now().await;
|
|
||||||
root_ref.child(0).unwrap().kind()
|
|
||||||
};
|
|
||||||
|
|
||||||
let f1 = fut_val.await;
|
|
||||||
let f2 = fut_ref.await;
|
|
||||||
assert_eq!(f1, f2);
|
|
||||||
|
|
||||||
f1
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(ret, "comment");
|
|
||||||
assert_eq!(pended, 5);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_and_cursor_ref_in_fut() {
|
|
||||||
let ((), pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("c");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
let fut_val = async {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root.to_sexp();
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref = async {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root_ref.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
};
|
|
||||||
|
|
||||||
fut_val.await;
|
|
||||||
fut_ref.await;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(pended, 3);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_and_cursor_ref_in_fut_with_fut_fabrics() {
|
|
||||||
let ((), pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("javascript");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
let fut_val = || async {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root.to_sexp();
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref = || async move {
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root_ref.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
};
|
|
||||||
|
|
||||||
fut_val().await;
|
|
||||||
fut_val().await;
|
|
||||||
fut_ref().await;
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(pended, 4);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_node_and_cursor_ref_in_fut_with_inner_spawns() {
|
|
||||||
let (ret, pended) = tokio_like_spawn(async {
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
let language = get_language("rust");
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let tree = parser.parse("#", None).unwrap();
|
|
||||||
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
let fut_val = || {
|
|
||||||
let tree = tree.clone();
|
|
||||||
async move {
|
|
||||||
let root = tree.root_node();
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
yield_now().await;
|
|
||||||
|
|
||||||
let fut_ref = || {
|
|
||||||
let tree = tree.clone();
|
|
||||||
async move {
|
|
||||||
let root = tree.root_node();
|
|
||||||
let root_ref = &root;
|
|
||||||
let mut cursor = tree.walk();
|
|
||||||
let cursor_ref = &mut cursor;
|
|
||||||
yield_now().await;
|
|
||||||
let _ = root_ref.to_sexp();
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let ((), p1) = tokio_like_spawn(fut_val()).await.unwrap();
|
|
||||||
let ((), p2) = tokio_like_spawn(fut_ref()).await.unwrap();
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
fut_val().await;
|
|
||||||
fut_val().await;
|
|
||||||
fut_ref().await;
|
|
||||||
|
|
||||||
cursor_ref.goto_first_child();
|
|
||||||
|
|
||||||
p1 + p2
|
|
||||||
})
|
|
||||||
.join();
|
|
||||||
assert_eq!(pended, 4);
|
|
||||||
assert_eq!(ret, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn tokio_like_spawn<T>(future: T) -> JoinHandle<(T::Output, usize)>
|
|
||||||
where
|
|
||||||
T: Future + Send + 'static,
|
|
||||||
T::Output: Send + 'static,
|
|
||||||
{
|
|
||||||
// No runtime, just noop waker
|
|
||||||
|
|
||||||
let waker = noop_waker();
|
|
||||||
let mut cx = task::Context::from_waker(&waker);
|
|
||||||
|
|
||||||
let mut pending = 0;
|
|
||||||
let mut future = pin!(future);
|
|
||||||
let ret = loop {
|
|
||||||
match future.as_mut().poll(&mut cx) {
|
|
||||||
Poll::Pending => pending += 1,
|
|
||||||
Poll::Ready(r) => {
|
|
||||||
break r;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
JoinHandle::new((ret, pending))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn yield_now() {
|
|
||||||
struct SimpleYieldNow {
|
|
||||||
yielded: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Future for SimpleYieldNow {
|
|
||||||
type Output = ();
|
|
||||||
|
|
||||||
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {
|
|
||||||
cx.waker().wake_by_ref();
|
|
||||||
if self.yielded {
|
|
||||||
return Poll::Ready(());
|
|
||||||
}
|
|
||||||
self.yielded = true;
|
|
||||||
Poll::Pending
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
SimpleYieldNow { yielded: false }.await;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub const fn noop_waker() -> Waker {
|
|
||||||
const VTABLE: RawWakerVTable = RawWakerVTable::new(
|
|
||||||
// Cloning just returns a new no-op raw waker
|
|
||||||
|_| RAW,
|
|
||||||
// `wake` does nothing
|
|
||||||
|_| {},
|
|
||||||
// `wake_by_ref` does nothing
|
|
||||||
|_| {},
|
|
||||||
// Dropping does nothing as we don't allocate anything
|
|
||||||
|_| {},
|
|
||||||
);
|
|
||||||
const RAW: RawWaker = RawWaker::new(ptr::null(), &VTABLE);
|
|
||||||
unsafe { Waker::from_raw(RAW) }
|
|
||||||
}
|
|
||||||
|
|
||||||
struct JoinHandle<T> {
|
|
||||||
data: Option<T>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T> JoinHandle<T> {
|
|
||||||
#[must_use]
|
|
||||||
const fn new(data: T) -> Self {
|
|
||||||
Self { data: Some(data) }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn join(&mut self) -> T {
|
|
||||||
self.data.take().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Unpin> Future for JoinHandle<T> {
|
|
||||||
type Output = std::result::Result<T, ()>;
|
|
||||||
|
|
||||||
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
|
|
||||||
let data = self.get_mut().data.take().unwrap();
|
|
||||||
Poll::Ready(Ok(data))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -1,121 +0,0 @@
|
||||||
use std::{
|
|
||||||
collections::HashMap,
|
|
||||||
os::raw::c_void,
|
|
||||||
sync::{
|
|
||||||
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
|
|
||||||
Mutex,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
#[ctor::ctor]
|
|
||||||
unsafe fn initialize_allocation_recording() {
|
|
||||||
tree_sitter::set_allocator(
|
|
||||||
Some(ts_record_malloc),
|
|
||||||
Some(ts_record_calloc),
|
|
||||||
Some(ts_record_realloc),
|
|
||||||
Some(ts_record_free),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
|
||||||
struct Allocation(*const c_void);
|
|
||||||
unsafe impl Send for Allocation {}
|
|
||||||
unsafe impl Sync for Allocation {}
|
|
||||||
|
|
||||||
#[derive(Default)]
|
|
||||||
struct AllocationRecorder {
|
|
||||||
enabled: AtomicBool,
|
|
||||||
allocation_count: AtomicUsize,
|
|
||||||
outstanding_allocations: Mutex<HashMap<Allocation, usize>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
thread_local! {
|
|
||||||
static RECORDER: AllocationRecorder = AllocationRecorder::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
extern "C" {
|
|
||||||
fn malloc(size: usize) -> *mut c_void;
|
|
||||||
fn calloc(count: usize, size: usize) -> *mut c_void;
|
|
||||||
fn realloc(ptr: *mut c_void, size: usize) -> *mut c_void;
|
|
||||||
fn free(ptr: *mut c_void);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn record<T>(f: impl FnOnce() -> T) -> T {
|
|
||||||
RECORDER.with(|recorder| {
|
|
||||||
recorder.enabled.store(true, SeqCst);
|
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
|
||||||
recorder.outstanding_allocations.lock().unwrap().clear();
|
|
||||||
});
|
|
||||||
|
|
||||||
let value = f();
|
|
||||||
|
|
||||||
let outstanding_allocation_indices = RECORDER.with(|recorder| {
|
|
||||||
recorder.enabled.store(false, SeqCst);
|
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
|
||||||
recorder
|
|
||||||
.outstanding_allocations
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.drain()
|
|
||||||
.map(|e| e.1)
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
});
|
|
||||||
assert!(
|
|
||||||
outstanding_allocation_indices.is_empty(),
|
|
||||||
"Leaked allocation indices: {outstanding_allocation_indices:?}"
|
|
||||||
);
|
|
||||||
value
|
|
||||||
}
|
|
||||||
|
|
||||||
fn record_alloc(ptr: *mut c_void) {
|
|
||||||
RECORDER.with(|recorder| {
|
|
||||||
if recorder.enabled.load(SeqCst) {
|
|
||||||
let count = recorder.allocation_count.fetch_add(1, SeqCst);
|
|
||||||
recorder
|
|
||||||
.outstanding_allocations
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.insert(Allocation(ptr), count);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
fn record_dealloc(ptr: *mut c_void) {
|
|
||||||
RECORDER.with(|recorder| {
|
|
||||||
if recorder.enabled.load(SeqCst) {
|
|
||||||
recorder
|
|
||||||
.outstanding_allocations
|
|
||||||
.lock()
|
|
||||||
.unwrap()
|
|
||||||
.remove(&Allocation(ptr));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
|
||||||
let result = malloc(size);
|
|
||||||
record_alloc(result);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
|
||||||
let result = calloc(count, size);
|
|
||||||
record_alloc(result);
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
|
||||||
let result = realloc(ptr, size);
|
|
||||||
if ptr.is_null() {
|
|
||||||
record_alloc(result);
|
|
||||||
} else if !core::ptr::eq(ptr, result) {
|
|
||||||
record_dealloc(ptr);
|
|
||||||
record_alloc(result);
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
|
||||||
record_dealloc(ptr);
|
|
||||||
free(ptr);
|
|
||||||
}
|
|
||||||
|
|
@ -1,104 +0,0 @@
|
||||||
// For some reasons `Command::spawn` doesn't work in CI env for many exotic arches.
|
|
||||||
#![cfg(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing)))]
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
env::VarError,
|
|
||||||
process::{Command, Stdio},
|
|
||||||
};
|
|
||||||
|
|
||||||
use tree_sitter::Parser;
|
|
||||||
use tree_sitter_generate::load_grammar_file;
|
|
||||||
|
|
||||||
use super::generate_parser;
|
|
||||||
use crate::tests::helpers::fixtures::{fixtures_dir, get_test_language};
|
|
||||||
|
|
||||||
// The `sanitizing` cfg is required to don't run tests under specific sunitizer
|
|
||||||
// because they don't work well with subprocesses _(it's an assumption)_.
|
|
||||||
//
|
|
||||||
// Below are two alternative examples of how to disable tests for some arches
|
|
||||||
// if a way with excluding the whole mod from compilation wouldn't work well.
|
|
||||||
//
|
|
||||||
// XXX: Also may be it makes sense to keep such tests as ignored by default
|
|
||||||
// to omit surprises and enable them on CI by passing an extra option explicitly:
|
|
||||||
//
|
|
||||||
// > cargo test -- --include-ignored
|
|
||||||
//
|
|
||||||
// #[cfg(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing)))]
|
|
||||||
// #[cfg_attr(not(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing))), ignore)]
|
|
||||||
//
|
|
||||||
#[test]
|
|
||||||
fn test_grammar_that_should_hang_and_not_segfault() {
|
|
||||||
let parent_sleep_millis = 1000;
|
|
||||||
let test_name = "test_grammar_that_should_hang_and_not_segfault";
|
|
||||||
let test_var = "CARGO_HANG_TEST";
|
|
||||||
|
|
||||||
eprintln!(" {test_name}");
|
|
||||||
|
|
||||||
let tests_exec_path = std::env::args()
|
|
||||||
.next()
|
|
||||||
.expect("Failed to get tests executable path");
|
|
||||||
|
|
||||||
match std::env::var(test_var) {
|
|
||||||
Ok(v) if v == test_name => {
|
|
||||||
eprintln!(" child process id {}", std::process::id());
|
|
||||||
hang_test();
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(VarError::NotPresent) => {
|
|
||||||
eprintln!(" parent process id {}", std::process::id());
|
|
||||||
let mut command = Command::new(tests_exec_path);
|
|
||||||
command.arg(test_name).env(test_var, test_name);
|
|
||||||
|
|
||||||
if std::env::args().any(|x| x == "--nocapture") {
|
|
||||||
command.arg("--nocapture");
|
|
||||||
} else {
|
|
||||||
command.stdout(Stdio::null()).stderr(Stdio::null());
|
|
||||||
}
|
|
||||||
|
|
||||||
match command.spawn() {
|
|
||||||
Ok(mut child) => {
|
|
||||||
std::thread::sleep(std::time::Duration::from_millis(parent_sleep_millis));
|
|
||||||
match child.try_wait() {
|
|
||||||
Ok(Some(status)) if status.success() => {
|
|
||||||
panic!("Child didn't hang and exited successfully")
|
|
||||||
}
|
|
||||||
Ok(Some(status)) => panic!(
|
|
||||||
"Child didn't hang and exited with status code: {:?}",
|
|
||||||
status.code()
|
|
||||||
),
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
if let Err(e) = child.kill() {
|
|
||||||
eprintln!(
|
|
||||||
"Failed to kill hang test's process id: {}, error: {e}",
|
|
||||||
child.id()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(e) => panic!("{e}"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(e) => panic!("Env var error: {e}"),
|
|
||||||
|
|
||||||
_ => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hang_test() {
|
|
||||||
let test_grammar_dir = fixtures_dir()
|
|
||||||
.join("test_grammars")
|
|
||||||
.join("get_col_should_hang_not_crash");
|
|
||||||
|
|
||||||
let grammar_json = load_grammar_file(&test_grammar_dir.join("grammar.js"), None).unwrap();
|
|
||||||
let (parser_name, parser_code) = generate_parser(grammar_json.as_str()).unwrap();
|
|
||||||
|
|
||||||
let language = get_test_language(&parser_name, &parser_code, Some(test_grammar_dir.as_path()));
|
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
|
||||||
parser.set_language(&language).unwrap();
|
|
||||||
|
|
||||||
let code_that_should_hang = "\nHello";
|
|
||||||
|
|
||||||
parser.parse(code_that_should_hang, None).unwrap();
|
|
||||||
}
|
|
||||||
|
|
@ -1,264 +0,0 @@
|
||||||
use std::{fs, path::PathBuf, process::Command};
|
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
|
||||||
use regex::Regex;
|
|
||||||
use tree_sitter_loader::TreeSitterJSON;
|
|
||||||
|
|
||||||
pub struct Version {
|
|
||||||
pub version: String,
|
|
||||||
pub current_dir: PathBuf,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Version {
|
|
||||||
#[must_use]
|
|
||||||
pub const fn new(version: String, current_dir: PathBuf) -> Self {
|
|
||||||
Self {
|
|
||||||
version,
|
|
||||||
current_dir,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run(self) -> Result<()> {
|
|
||||||
let tree_sitter_json = self.current_dir.join("tree-sitter.json");
|
|
||||||
|
|
||||||
let tree_sitter_json =
|
|
||||||
serde_json::from_str::<TreeSitterJSON>(&fs::read_to_string(tree_sitter_json)?)?;
|
|
||||||
|
|
||||||
let is_multigrammar = tree_sitter_json.grammars.len() > 1;
|
|
||||||
|
|
||||||
self.update_treesitter_json().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update tree-sitter.json at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_cargo_toml().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update Cargo.toml at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_package_json().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update package.json at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_makefile(is_multigrammar).with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update Makefile at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_cmakelists_txt().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update CMakeLists.txt at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
self.update_pyproject_toml().with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to update pyproject.toml at {}",
|
|
||||||
self.current_dir.display()
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_treesitter_json(&self) -> Result<()> {
|
|
||||||
let tree_sitter_json = &fs::read_to_string(self.current_dir.join("tree-sitter.json"))?;
|
|
||||||
|
|
||||||
let tree_sitter_json = tree_sitter_json
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.contains("\"version\":") {
|
|
||||||
let prefix_index = line.find("\"version\":").unwrap() + "\"version\":".len();
|
|
||||||
let start_quote = line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
|
||||||
let end_quote = line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
|
||||||
|
|
||||||
format!(
|
|
||||||
"{}{}{}",
|
|
||||||
&line[..start_quote],
|
|
||||||
self.version,
|
|
||||||
&line[end_quote..]
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("tree-sitter.json"), tree_sitter_json)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_cargo_toml(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("Cargo.toml").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let cargo_toml = fs::read_to_string(self.current_dir.join("Cargo.toml"))?;
|
|
||||||
|
|
||||||
let cargo_toml = cargo_toml
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.starts_with("version =") {
|
|
||||||
format!("version = \"{}\"", self.version)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("Cargo.toml"), cargo_toml)?;
|
|
||||||
|
|
||||||
if self.current_dir.join("Cargo.lock").exists() {
|
|
||||||
let Ok(cmd) = Command::new("cargo")
|
|
||||||
.arg("generate-lockfile")
|
|
||||||
.arg("--offline")
|
|
||||||
.current_dir(&self.current_dir)
|
|
||||||
.output()
|
|
||||||
else {
|
|
||||||
return Ok(()); // cargo is not `executable`, ignore
|
|
||||||
};
|
|
||||||
|
|
||||||
if !cmd.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&cmd.stderr);
|
|
||||||
return Err(anyhow!(
|
|
||||||
"Failed to run `cargo generate-lockfile`:\n{stderr}"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_package_json(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("package.json").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let package_json = &fs::read_to_string(self.current_dir.join("package.json"))?;
|
|
||||||
|
|
||||||
let package_json = package_json
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.contains("\"version\":") {
|
|
||||||
let prefix_index = line.find("\"version\":").unwrap() + "\"version\":".len();
|
|
||||||
let start_quote = line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
|
||||||
let end_quote = line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
|
||||||
|
|
||||||
format!(
|
|
||||||
"{}{}{}",
|
|
||||||
&line[..start_quote],
|
|
||||||
self.version,
|
|
||||||
&line[end_quote..]
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("package.json"), package_json)?;
|
|
||||||
|
|
||||||
if self.current_dir.join("package-lock.json").exists() {
|
|
||||||
let Ok(cmd) = Command::new("npm")
|
|
||||||
.arg("install")
|
|
||||||
.arg("--package-lock-only")
|
|
||||||
.current_dir(&self.current_dir)
|
|
||||||
.output()
|
|
||||||
else {
|
|
||||||
return Ok(()); // npm is not `executable`, ignore
|
|
||||||
};
|
|
||||||
|
|
||||||
if !cmd.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&cmd.stderr);
|
|
||||||
return Err(anyhow!("Failed to run `npm install`:\n{stderr}"));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_makefile(&self, is_multigrammar: bool) -> Result<()> {
|
|
||||||
let makefile = if is_multigrammar {
|
|
||||||
if !self.current_dir.join("common").join("common.mak").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::read_to_string(self.current_dir.join("Makefile"))?
|
|
||||||
} else {
|
|
||||||
if !self.current_dir.join("Makefile").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::read_to_string(self.current_dir.join("Makefile"))?
|
|
||||||
};
|
|
||||||
|
|
||||||
let makefile = makefile
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.starts_with("VERSION") {
|
|
||||||
format!("VERSION := {}", self.version)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("Makefile"), makefile)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_cmakelists_txt(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("CMakeLists.txt").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let cmake = fs::read_to_string(self.current_dir.join("CMakeLists.txt"))?;
|
|
||||||
|
|
||||||
let re = Regex::new(r#"(\s*VERSION\s+)"[0-9]+\.[0-9]+\.[0-9]+""#)?;
|
|
||||||
let cmake = re.replace(&cmake, format!(r#"$1"{}""#, self.version));
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("CMakeLists.txt"), cmake.as_bytes())?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn update_pyproject_toml(&self) -> Result<()> {
|
|
||||||
if !self.current_dir.join("pyproject.toml").exists() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let pyproject_toml = fs::read_to_string(self.current_dir.join("pyproject.toml"))?;
|
|
||||||
|
|
||||||
let pyproject_toml = pyproject_toml
|
|
||||||
.lines()
|
|
||||||
.map(|line| {
|
|
||||||
if line.starts_with("version =") {
|
|
||||||
format!("version = \"{}\"", self.version)
|
|
||||||
} else {
|
|
||||||
line.to_string()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n")
|
|
||||||
+ "\n";
|
|
||||||
|
|
||||||
fs::write(self.current_dir.join("pyproject.toml"), pyproject_toml)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
@ -8,14 +8,18 @@ rust-version.workspace = true
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
homepage.workspace = true
|
homepage.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
|
documentation = "https://docs.rs/tree-sitter-cli"
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
include = ["build.rs", "README.md", "benches/*", "src/**"]
|
include = ["build.rs", "README.md", "LICENSE", "benches/*", "src/**"]
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
path = "src/tree_sitter_cli.rs"
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "tree-sitter"
|
name = "tree-sitter"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
@ -26,7 +30,9 @@ name = "benchmark"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
|
default = ["qjs-rt"]
|
||||||
wasm = ["tree-sitter/wasm", "tree-sitter-loader/wasm"]
|
wasm = ["tree-sitter/wasm", "tree-sitter-loader/wasm"]
|
||||||
|
qjs-rt = ["tree-sitter-generate/qjs-rt"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ansi_colours.workspace = true
|
ansi_colours.workspace = true
|
||||||
|
|
@ -36,31 +42,26 @@ bstr.workspace = true
|
||||||
clap.workspace = true
|
clap.workspace = true
|
||||||
clap_complete.workspace = true
|
clap_complete.workspace = true
|
||||||
clap_complete_nushell.workspace = true
|
clap_complete_nushell.workspace = true
|
||||||
|
crc32fast.workspace = true
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
ctrlc.workspace = true
|
ctrlc.workspace = true
|
||||||
dialoguer.workspace = true
|
dialoguer.workspace = true
|
||||||
filetime.workspace = true
|
|
||||||
glob.workspace = true
|
glob.workspace = true
|
||||||
heck.workspace = true
|
heck.workspace = true
|
||||||
html-escape.workspace = true
|
html-escape.workspace = true
|
||||||
indexmap.workspace = true
|
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
memchr.workspace = true
|
memchr.workspace = true
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
regex-syntax.workspace = true
|
schemars.workspace = true
|
||||||
rustc-hash.workspace = true
|
|
||||||
semver.workspace = true
|
semver.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_derive.workspace = true
|
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
similar.workspace = true
|
similar.workspace = true
|
||||||
smallbitvec.workspace = true
|
|
||||||
streaming-iterator.workspace = true
|
streaming-iterator.workspace = true
|
||||||
|
thiserror.workspace = true
|
||||||
tiny_http.workspace = true
|
tiny_http.workspace = true
|
||||||
topological-sort.workspace = true
|
|
||||||
url.workspace = true
|
|
||||||
walkdir.workspace = true
|
walkdir.workspace = true
|
||||||
wasmparser.workspace = true
|
wasmparser.workspace = true
|
||||||
webbrowser.workspace = true
|
webbrowser.workspace = true
|
||||||
|
|
@ -74,7 +75,7 @@ tree-sitter-tags.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
encoding_rs = "0.8.35"
|
encoding_rs = "0.8.35"
|
||||||
widestring = "1.1.0"
|
widestring = "1.2.1"
|
||||||
tree_sitter_proc_macro = { path = "src/tests/proc_macro", package = "tree-sitter-tests-proc-macro" }
|
tree_sitter_proc_macro = { path = "src/tests/proc_macro", package = "tree-sitter-tests-proc-macro" }
|
||||||
|
|
||||||
tempfile.workspace = true
|
tempfile.workspace = true
|
||||||
21
crates/cli/LICENSE
Normal file
21
crates/cli/LICENSE
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2018 Max Brunsfeld
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -7,7 +7,8 @@
|
||||||
[npmjs.com]: https://www.npmjs.org/package/tree-sitter-cli
|
[npmjs.com]: https://www.npmjs.org/package/tree-sitter-cli
|
||||||
[npmjs.com badge]: https://img.shields.io/npm/v/tree-sitter-cli.svg?color=%23BF4A4A
|
[npmjs.com badge]: https://img.shields.io/npm/v/tree-sitter-cli.svg?color=%23BF4A4A
|
||||||
|
|
||||||
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`, `Linux`, and `Windows`.
|
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`,
|
||||||
|
`Linux`, and `Windows`.
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
|
|
@ -34,9 +35,11 @@ The `tree-sitter` binary itself has no dependencies, but specific commands have
|
||||||
|
|
||||||
### Commands
|
### Commands
|
||||||
|
|
||||||
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current working directory. See [the documentation] for more information.
|
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current
|
||||||
|
working directory. See [the documentation] for more information.
|
||||||
|
|
||||||
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory. See [the documentation] for more information.
|
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory.
|
||||||
|
See [the documentation] for more information.
|
||||||
|
|
||||||
* `parse` - The `tree-sitter parse` command will parse a file (or list of files) using Tree-sitter parsers.
|
* `parse` - The `tree-sitter parse` command will parse a file (or list of files) using Tree-sitter parsers.
|
||||||
|
|
||||||
|
|
@ -8,6 +8,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
|
use log::info;
|
||||||
use tree_sitter::{Language, Parser, Query};
|
use tree_sitter::{Language, Parser, Query};
|
||||||
use tree_sitter_loader::{CompileConfig, Loader};
|
use tree_sitter_loader::{CompileConfig, Loader};
|
||||||
|
|
||||||
|
|
@ -71,6 +72,8 @@ static EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR: LazyLock<
|
||||||
});
|
});
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
tree_sitter_cli::logger::init();
|
||||||
|
|
||||||
let max_path_length = EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR
|
let max_path_length = EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR
|
||||||
.values()
|
.values()
|
||||||
.flat_map(|(e, q)| {
|
.flat_map(|(e, q)| {
|
||||||
|
|
@ -81,7 +84,7 @@ fn main() {
|
||||||
.max()
|
.max()
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
|
|
||||||
eprintln!("Benchmarking with {} repetitions", *REPETITION_COUNT);
|
info!("Benchmarking with {} repetitions", *REPETITION_COUNT);
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
let mut parser = Parser::new();
|
||||||
let mut all_normal_speeds = Vec::new();
|
let mut all_normal_speeds = Vec::new();
|
||||||
|
|
@ -98,11 +101,11 @@ fn main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!("\nLanguage: {language_name}");
|
info!("\nLanguage: {language_name}");
|
||||||
let language = get_language(language_path);
|
let language = get_language(language_path);
|
||||||
parser.set_language(&language).unwrap();
|
parser.set_language(&language).unwrap();
|
||||||
|
|
||||||
eprintln!(" Constructing Queries");
|
info!(" Constructing Queries");
|
||||||
for path in query_paths {
|
for path in query_paths {
|
||||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||||
if !path.to_str().unwrap().contains(filter.as_str()) {
|
if !path.to_str().unwrap().contains(filter.as_str()) {
|
||||||
|
|
@ -117,7 +120,7 @@ fn main() {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!(" Parsing Valid Code:");
|
info!(" Parsing Valid Code:");
|
||||||
let mut normal_speeds = Vec::new();
|
let mut normal_speeds = Vec::new();
|
||||||
for example_path in example_paths {
|
for example_path in example_paths {
|
||||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||||
|
|
@ -131,7 +134,7 @@ fn main() {
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!(" Parsing Invalid Code (mismatched languages):");
|
info!(" Parsing Invalid Code (mismatched languages):");
|
||||||
let mut error_speeds = Vec::new();
|
let mut error_speeds = Vec::new();
|
||||||
for (other_language_path, (example_paths, _)) in
|
for (other_language_path, (example_paths, _)) in
|
||||||
EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR.iter()
|
EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR.iter()
|
||||||
|
|
@ -152,30 +155,30 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_normal, worst_normal)) = aggregate(&normal_speeds) {
|
if let Some((average_normal, worst_normal)) = aggregate(&normal_speeds) {
|
||||||
eprintln!(" Average Speed (normal): {average_normal} bytes/ms");
|
info!(" Average Speed (normal): {average_normal} bytes/ms");
|
||||||
eprintln!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
info!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_error, worst_error)) = aggregate(&error_speeds) {
|
if let Some((average_error, worst_error)) = aggregate(&error_speeds) {
|
||||||
eprintln!(" Average Speed (errors): {average_error} bytes/ms");
|
info!(" Average Speed (errors): {average_error} bytes/ms");
|
||||||
eprintln!(" Worst Speed (errors): {worst_error} bytes/ms");
|
info!(" Worst Speed (errors): {worst_error} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
all_normal_speeds.extend(normal_speeds);
|
all_normal_speeds.extend(normal_speeds);
|
||||||
all_error_speeds.extend(error_speeds);
|
all_error_speeds.extend(error_speeds);
|
||||||
}
|
}
|
||||||
|
|
||||||
eprintln!("\n Overall");
|
info!("\n Overall");
|
||||||
if let Some((average_normal, worst_normal)) = aggregate(&all_normal_speeds) {
|
if let Some((average_normal, worst_normal)) = aggregate(&all_normal_speeds) {
|
||||||
eprintln!(" Average Speed (normal): {average_normal} bytes/ms");
|
info!(" Average Speed (normal): {average_normal} bytes/ms");
|
||||||
eprintln!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
info!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_error, worst_error)) = aggregate(&all_error_speeds) {
|
if let Some((average_error, worst_error)) = aggregate(&all_error_speeds) {
|
||||||
eprintln!(" Average Speed (errors): {average_error} bytes/ms");
|
info!(" Average Speed (errors): {average_error} bytes/ms");
|
||||||
eprintln!(" Worst Speed (errors): {worst_error} bytes/ms");
|
info!(" Worst Speed (errors): {worst_error} bytes/ms");
|
||||||
}
|
}
|
||||||
eprintln!();
|
info!("");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
||||||
|
|
@ -194,12 +197,6 @@ fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) -> usize {
|
fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) -> usize {
|
||||||
eprint!(
|
|
||||||
" {:width$}\t",
|
|
||||||
path.file_name().unwrap().to_str().unwrap(),
|
|
||||||
width = max_path_length
|
|
||||||
);
|
|
||||||
|
|
||||||
let source_code = fs::read(path)
|
let source_code = fs::read(path)
|
||||||
.with_context(|| format!("Failed to read {}", path.display()))
|
.with_context(|| format!("Failed to read {}", path.display()))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
@ -210,8 +207,9 @@ fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) ->
|
||||||
let duration = time.elapsed() / (*REPETITION_COUNT as u32);
|
let duration = time.elapsed() / (*REPETITION_COUNT as u32);
|
||||||
let duration_ns = duration.as_nanos();
|
let duration_ns = duration.as_nanos();
|
||||||
let speed = ((source_code.len() as u128) * 1_000_000) / duration_ns;
|
let speed = ((source_code.len() as u128) * 1_000_000) / duration_ns;
|
||||||
eprintln!(
|
info!(
|
||||||
"time {:>7.2} ms\t\tspeed {speed:>6} bytes/ms",
|
" {:max_path_length$}\ttime {:>7.2} ms\t\tspeed {speed:>6} bytes/ms",
|
||||||
|
path.file_name().unwrap().to_str().unwrap(),
|
||||||
(duration_ns as f64) / 1e6,
|
(duration_ns as f64) / 1e6,
|
||||||
);
|
);
|
||||||
speed as usize
|
speed as usize
|
||||||
|
|
@ -52,9 +52,9 @@ fn main() {
|
||||||
|
|
||||||
fn web_playground_files_present() -> bool {
|
fn web_playground_files_present() -> bool {
|
||||||
let paths = [
|
let paths = [
|
||||||
"../docs/src/assets/js/playground.js",
|
"../../docs/src/assets/js/playground.js",
|
||||||
"../lib/binding_web/tree-sitter.js",
|
"../../lib/binding_web/web-tree-sitter.js",
|
||||||
"../lib/binding_web/tree-sitter.wasm",
|
"../../lib/binding_web/web-tree-sitter.wasm",
|
||||||
];
|
];
|
||||||
|
|
||||||
paths.iter().all(|p| Path::new(p).exists())
|
paths.iter().all(|p| Path::new(p).exists())
|
||||||
1
crates/cli/eslint/.gitignore
vendored
Normal file
1
crates/cli/eslint/.gitignore
vendored
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
LICENSE
|
||||||
|
|
@ -305,9 +305,9 @@
|
||||||
"peer": true
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/brace-expansion": {
|
"node_modules/brace-expansion": {
|
||||||
"version": "1.1.11",
|
"version": "1.1.12",
|
||||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
@ -805,9 +805,9 @@
|
||||||
"peer": true
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/js-yaml": {
|
"node_modules/js-yaml": {
|
||||||
"version": "4.1.0",
|
"version": "4.1.1",
|
||||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
|
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
|
||||||
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
|
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
@ -21,5 +21,9 @@
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"eslint": ">= 9"
|
"eslint": ">= 9"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"prepack": "cp ../../../LICENSE .",
|
||||||
|
"postpack": "rm LICENSE"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
1
cli/npm/dsl.d.ts → crates/cli/npm/dsl.d.ts
vendored
1
cli/npm/dsl.d.ts → crates/cli/npm/dsl.d.ts
vendored
|
|
@ -29,6 +29,7 @@ type Rule =
|
||||||
| PrecRule
|
| PrecRule
|
||||||
| Repeat1Rule
|
| Repeat1Rule
|
||||||
| RepeatRule
|
| RepeatRule
|
||||||
|
| ReservedRule
|
||||||
| SeqRule
|
| SeqRule
|
||||||
| StringRule
|
| StringRule
|
||||||
| SymbolRule<string>
|
| SymbolRule<string>
|
||||||
20
crates/cli/npm/package-lock.json
generated
Normal file
20
crates/cli/npm/package-lock.json
generated
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
{
|
||||||
|
"name": "tree-sitter-cli",
|
||||||
|
"version": "0.27.0",
|
||||||
|
"lockfileVersion": 3,
|
||||||
|
"requires": true,
|
||||||
|
"packages": {
|
||||||
|
"": {
|
||||||
|
"name": "tree-sitter-cli",
|
||||||
|
"version": "0.27.0",
|
||||||
|
"hasInstallScript": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"bin": {
|
||||||
|
"tree-sitter": "cli.js"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "tree-sitter-cli",
|
"name": "tree-sitter-cli",
|
||||||
"version": "0.25.9",
|
"version": "0.27.0",
|
||||||
"author": {
|
"author": {
|
||||||
"name": "Max Brunsfeld",
|
"name": "Max Brunsfeld",
|
||||||
"email": "maxbrunsfeld@gmail.com"
|
"email": "maxbrunsfeld@gmail.com"
|
||||||
|
|
@ -27,7 +27,7 @@
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"install": "node install.js",
|
"install": "node install.js",
|
||||||
"prepack": "cp ../../LICENSE ../README.md .",
|
"prepack": "cp ../../../LICENSE ../README.md .",
|
||||||
"postpack": "rm LICENSE README.md"
|
"postpack": "rm LICENSE README.md"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
69
crates/cli/package.nix
Normal file
69
crates/cli/package.nix
Normal file
|
|
@ -0,0 +1,69 @@
|
||||||
|
{
|
||||||
|
lib,
|
||||||
|
src,
|
||||||
|
rustPlatform,
|
||||||
|
version,
|
||||||
|
clang,
|
||||||
|
libclang,
|
||||||
|
cmake,
|
||||||
|
pkg-config,
|
||||||
|
nodejs_22,
|
||||||
|
test-grammars,
|
||||||
|
stdenv,
|
||||||
|
installShellFiles,
|
||||||
|
}:
|
||||||
|
let
|
||||||
|
isCross = stdenv.targetPlatform == stdenv.buildPlatform;
|
||||||
|
in
|
||||||
|
rustPlatform.buildRustPackage {
|
||||||
|
pname = "tree-sitter-cli";
|
||||||
|
|
||||||
|
inherit src version;
|
||||||
|
|
||||||
|
cargoBuildFlags = [ "--all-features" ];
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
clang
|
||||||
|
cmake
|
||||||
|
pkg-config
|
||||||
|
nodejs_22
|
||||||
|
]
|
||||||
|
++ lib.optionals (!isCross) [ installShellFiles ];
|
||||||
|
|
||||||
|
cargoLock.lockFile = ../../Cargo.lock;
|
||||||
|
|
||||||
|
env.LIBCLANG_PATH = "${libclang.lib}/lib";
|
||||||
|
|
||||||
|
preBuild = ''
|
||||||
|
rm -rf test/fixtures
|
||||||
|
mkdir -p test/fixtures
|
||||||
|
cp -r ${test-grammars}/fixtures/* test/fixtures/
|
||||||
|
chmod -R u+w test/fixtures
|
||||||
|
'';
|
||||||
|
|
||||||
|
preCheck = "export HOME=$TMPDIR";
|
||||||
|
doCheck = !isCross;
|
||||||
|
|
||||||
|
postInstall = lib.optionalString (!isCross) ''
|
||||||
|
installShellCompletion --cmd tree-sitter \
|
||||||
|
--bash <($out/bin/tree-sitter complete --shell bash) \
|
||||||
|
--zsh <($out/bin/tree-sitter complete --shell zsh) \
|
||||||
|
--fish <($out/bin/tree-sitter complete --shell fish)
|
||||||
|
'';
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
description = "Tree-sitter CLI - A tool for developing, testing, and using Tree-sitter parsers";
|
||||||
|
longDescription = ''
|
||||||
|
Tree-sitter is a parser generator tool and an incremental parsing library.
|
||||||
|
It can build a concrete syntax tree for a source file and efficiently update
|
||||||
|
the syntax tree as the source file is edited. This package provides the CLI
|
||||||
|
tool for developing, testing, and using Tree-sitter parsers.
|
||||||
|
'';
|
||||||
|
homepage = "https://tree-sitter.github.io/tree-sitter";
|
||||||
|
changelog = "https://github.com/tree-sitter/tree-sitter/releases/tag/v${version}";
|
||||||
|
license = lib.licenses.mit;
|
||||||
|
maintainers = with lib.maintainers; [ amaanq ];
|
||||||
|
platforms = lib.platforms.all;
|
||||||
|
mainProgram = "tree-sitter";
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
@ -1,5 +1,11 @@
|
||||||
use std::{collections::HashMap, env, fs, path::Path, sync::LazyLock};
|
use std::{
|
||||||
|
collections::HashMap,
|
||||||
|
env, fs,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::LazyLock,
|
||||||
|
};
|
||||||
|
|
||||||
|
use log::{error, info};
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tree_sitter::{Language, Parser};
|
use tree_sitter::{Language, Parser};
|
||||||
|
|
@ -19,7 +25,7 @@ use crate::{
|
||||||
random::Rand,
|
random::Rand,
|
||||||
},
|
},
|
||||||
parse::perform_edit,
|
parse::perform_edit,
|
||||||
test::{parse_tests, print_diff, print_diff_key, strip_sexp_fields, TestEntry},
|
test::{parse_tests, strip_sexp_fields, DiffKey, TestDiff, TestEntry},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub static LOG_ENABLED: LazyLock<bool> = LazyLock::new(|| env::var("TREE_SITTER_LOG").is_ok());
|
pub static LOG_ENABLED: LazyLock<bool> = LazyLock::new(|| env::var("TREE_SITTER_LOG").is_ok());
|
||||||
|
|
@ -57,14 +63,14 @@ pub fn new_seed() -> usize {
|
||||||
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| {
|
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| {
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
let seed = rng.gen::<usize>();
|
let seed = rng.gen::<usize>();
|
||||||
eprintln!("Seed: {seed}");
|
info!("Seed: {seed}");
|
||||||
seed
|
seed
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FuzzOptions {
|
pub struct FuzzOptions {
|
||||||
pub skipped: Option<Vec<String>>,
|
pub skipped: Option<Vec<String>>,
|
||||||
pub subdir: Option<String>,
|
pub subdir: Option<PathBuf>,
|
||||||
pub edits: usize,
|
pub edits: usize,
|
||||||
pub iterations: usize,
|
pub iterations: usize,
|
||||||
pub include: Option<Regex>,
|
pub include: Option<Regex>,
|
||||||
|
|
@ -103,12 +109,12 @@ pub fn fuzz_language_corpus(
|
||||||
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
|
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
|
||||||
|
|
||||||
if !corpus_dir.exists() || !corpus_dir.is_dir() {
|
if !corpus_dir.exists() || !corpus_dir.is_dir() {
|
||||||
eprintln!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
|
error!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
|
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
|
||||||
eprintln!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
|
error!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -144,7 +150,7 @@ pub fn fuzz_language_corpus(
|
||||||
let dump_edits = env::var("TREE_SITTER_DUMP_EDITS").is_ok();
|
let dump_edits = env::var("TREE_SITTER_DUMP_EDITS").is_ok();
|
||||||
|
|
||||||
if log_seed {
|
if log_seed {
|
||||||
println!(" start seed: {start_seed}");
|
info!(" start seed: {start_seed}");
|
||||||
}
|
}
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
|
|
@ -158,7 +164,7 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
println!(" {test_index}. {test_name}");
|
println!(" {test_index}. {test_name}");
|
||||||
|
|
||||||
let passed = allocations::record(|| {
|
let passed = allocations::record_checked(|| {
|
||||||
let mut log_session = None;
|
let mut log_session = None;
|
||||||
let mut parser = get_parser(&mut log_session, "log.html");
|
let mut parser = get_parser(&mut log_session, "log.html");
|
||||||
parser.set_language(language).unwrap();
|
parser.set_language(language).unwrap();
|
||||||
|
|
@ -177,8 +183,8 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
if actual_output != test.output {
|
if actual_output != test.output {
|
||||||
println!("Incorrect initial parse for {test_name}");
|
println!("Incorrect initial parse for {test_name}");
|
||||||
print_diff_key();
|
DiffKey::print();
|
||||||
print_diff(&actual_output, &test.output, true);
|
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||||
println!();
|
println!();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -186,7 +192,7 @@ pub fn fuzz_language_corpus(
|
||||||
true
|
true
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|e| {
|
.unwrap_or_else(|e| {
|
||||||
eprintln!("Error: {e}");
|
error!("{e}");
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -202,7 +208,7 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
for trial in 0..options.iterations {
|
for trial in 0..options.iterations {
|
||||||
let seed = start_seed + trial;
|
let seed = start_seed + trial;
|
||||||
let passed = allocations::record(|| {
|
let passed = allocations::record_checked(|| {
|
||||||
let mut rand = Rand::new(seed);
|
let mut rand = Rand::new(seed);
|
||||||
let mut log_session = None;
|
let mut log_session = None;
|
||||||
let mut parser = get_parser(&mut log_session, "log.html");
|
let mut parser = get_parser(&mut log_session, "log.html");
|
||||||
|
|
@ -211,7 +217,7 @@ pub fn fuzz_language_corpus(
|
||||||
let mut input = test.input.clone();
|
let mut input = test.input.clone();
|
||||||
|
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
info!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Perform a random series of edits and reparse.
|
// Perform a random series of edits and reparse.
|
||||||
|
|
@ -224,7 +230,7 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if log_seed {
|
if log_seed {
|
||||||
println!(" {test_index}.{trial:<2} seed: {seed}");
|
info!(" {test_index}.{trial:<2} seed: {seed}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if dump_edits {
|
if dump_edits {
|
||||||
|
|
@ -238,7 +244,7 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
info!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_included_ranges(&mut parser, &input, test.template_delimiters);
|
set_included_ranges(&mut parser, &input, test.template_delimiters);
|
||||||
|
|
@ -247,7 +253,7 @@ pub fn fuzz_language_corpus(
|
||||||
// Check that the new tree is consistent.
|
// Check that the new tree is consistent.
|
||||||
check_consistent_sizes(&tree2, &input);
|
check_consistent_sizes(&tree2, &input);
|
||||||
if let Err(message) = check_changed_ranges(&tree, &tree2, &input) {
|
if let Err(message) = check_changed_ranges(&tree, &tree2, &input) {
|
||||||
println!("\nUnexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n",);
|
error!("\nUnexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n",);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -256,7 +262,7 @@ pub fn fuzz_language_corpus(
|
||||||
perform_edit(&mut tree2, &mut input, &edit).unwrap();
|
perform_edit(&mut tree2, &mut input, &edit).unwrap();
|
||||||
}
|
}
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
info!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_included_ranges(&mut parser, &test.input, test.template_delimiters);
|
set_included_ranges(&mut parser, &test.input, test.template_delimiters);
|
||||||
|
|
@ -270,8 +276,8 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
if actual_output != test.output && !test.error {
|
if actual_output != test.output && !test.error {
|
||||||
println!("Incorrect parse for {test_name} - seed {seed}");
|
println!("Incorrect parse for {test_name} - seed {seed}");
|
||||||
print_diff_key();
|
DiffKey::print();
|
||||||
print_diff(&actual_output, &test.output, true);
|
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||||
println!();
|
println!();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -279,13 +285,13 @@ pub fn fuzz_language_corpus(
|
||||||
// Check that the edited tree is consistent.
|
// Check that the edited tree is consistent.
|
||||||
check_consistent_sizes(&tree3, &input);
|
check_consistent_sizes(&tree3, &input);
|
||||||
if let Err(message) = check_changed_ranges(&tree2, &tree3, &input) {
|
if let Err(message) = check_changed_ranges(&tree2, &tree3, &input) {
|
||||||
println!("Unexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n");
|
error!("Unexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
true
|
true
|
||||||
}).unwrap_or_else(|e| {
|
}).unwrap_or_else(|e| {
|
||||||
eprintln!("Error: {e}");
|
error!("{e}");
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -297,17 +303,17 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if failure_count != 0 {
|
if failure_count != 0 {
|
||||||
eprintln!("{failure_count} {language_name} corpus tests failed fuzzing");
|
info!("{failure_count} {language_name} corpus tests failed fuzzing");
|
||||||
}
|
}
|
||||||
|
|
||||||
skipped.retain(|_, v| *v == 0);
|
skipped.retain(|_, v| *v == 0);
|
||||||
|
|
||||||
if !skipped.is_empty() {
|
if !skipped.is_empty() {
|
||||||
println!("Non matchable skip definitions:");
|
info!("Non matchable skip definitions:");
|
||||||
for k in skipped.keys() {
|
for k in skipped.keys() {
|
||||||
println!(" {k}");
|
info!(" {k}");
|
||||||
}
|
}
|
||||||
panic!("Non matchable skip definitions needs to be removed");
|
panic!("Non matchable skip definitions need to be removed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -40,7 +40,11 @@ extern "C" {
|
||||||
fn free(ptr: *mut c_void);
|
fn free(ptr: *mut c_void);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn record<T>(f: impl FnOnce() -> T) -> Result<T, String> {
|
pub fn record<T>(f: impl FnOnce() -> T) -> T {
|
||||||
|
record_checked(f).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn record_checked<T>(f: impl FnOnce() -> T) -> Result<T, String> {
|
||||||
RECORDER.with(|recorder| {
|
RECORDER.with(|recorder| {
|
||||||
recorder.enabled.store(true, SeqCst);
|
recorder.enabled.store(true, SeqCst);
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
recorder.allocation_count.store(0, SeqCst);
|
||||||
|
|
@ -93,19 +97,34 @@ fn record_dealloc(ptr: *mut c_void) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that the returned pointer is eventually
|
||||||
|
/// freed by calling `ts_record_free`.
|
||||||
|
#[must_use]
|
||||||
|
pub unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
||||||
let result = malloc(size);
|
let result = malloc(size);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that the returned pointer is eventually
|
||||||
|
/// freed by calling `ts_record_free`.
|
||||||
|
#[must_use]
|
||||||
|
pub unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
||||||
let result = calloc(count, size);
|
let result = calloc(count, size);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that the returned pointer is eventually
|
||||||
|
/// freed by calling `ts_record_free`.
|
||||||
|
#[must_use]
|
||||||
|
pub unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
||||||
let result = realloc(ptr, size);
|
let result = realloc(ptr, size);
|
||||||
if ptr.is_null() {
|
if ptr.is_null() {
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
|
|
@ -116,7 +135,11 @@ unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
/// # Safety
|
||||||
|
///
|
||||||
|
/// The caller must ensure that `ptr` was allocated by a previous call
|
||||||
|
/// to `ts_record_malloc`, `ts_record_calloc`, or `ts_record_realloc`.
|
||||||
|
pub unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
||||||
record_dealloc(ptr);
|
record_dealloc(ptr);
|
||||||
free(ptr);
|
free(ptr);
|
||||||
}
|
}
|
||||||
|
|
@ -23,7 +23,7 @@ pub fn check_consistent_sizes(tree: &Tree, input: &[u8]) {
|
||||||
let mut some_child_has_changes = false;
|
let mut some_child_has_changes = false;
|
||||||
let mut actual_named_child_count = 0;
|
let mut actual_named_child_count = 0;
|
||||||
for i in 0..node.child_count() {
|
for i in 0..node.child_count() {
|
||||||
let child = node.child(i).unwrap();
|
let child = node.child(i as u32).unwrap();
|
||||||
assert!(child.start_byte() >= last_child_end_byte);
|
assert!(child.start_byte() >= last_child_end_byte);
|
||||||
assert!(child.start_position() >= last_child_end_point);
|
assert!(child.start_position() >= last_child_end_point);
|
||||||
check(child, line_offsets);
|
check(child, line_offsets);
|
||||||
|
|
@ -12,6 +12,7 @@ use std::{
|
||||||
use ansi_colours::{ansi256_from_rgb, rgb_from_ansi256};
|
use ansi_colours::{ansi256_from_rgb, rgb_from_ansi256};
|
||||||
use anstyle::{Ansi256Color, AnsiColor, Color, Effects, RgbColor};
|
use anstyle::{Ansi256Color, AnsiColor, Color, Effects, RgbColor};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use log::{info, warn};
|
||||||
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use tree_sitter_highlight::{HighlightConfiguration, HighlightEvent, Highlighter, HtmlRenderer};
|
use tree_sitter_highlight::{HighlightConfiguration, HighlightEvent, Highlighter, HtmlRenderer};
|
||||||
|
|
@ -348,19 +349,17 @@ pub fn highlight(
|
||||||
config.nonconformant_capture_names(&HashSet::new())
|
config.nonconformant_capture_names(&HashSet::new())
|
||||||
};
|
};
|
||||||
if names.is_empty() {
|
if names.is_empty() {
|
||||||
eprintln!("All highlight captures conform to standards.");
|
info!("All highlight captures conform to standards.");
|
||||||
} else {
|
} else {
|
||||||
eprintln!(
|
warn!(
|
||||||
"Non-standard highlight {} detected:",
|
"Non-standard highlight {} detected:\n* {}",
|
||||||
if names.len() > 1 {
|
if names.len() > 1 {
|
||||||
"captures"
|
"captures"
|
||||||
} else {
|
} else {
|
||||||
"capture"
|
"capture"
|
||||||
}
|
},
|
||||||
|
names.join("\n* ")
|
||||||
);
|
);
|
||||||
for name in names {
|
|
||||||
eprintln!("* {name}");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -451,7 +450,7 @@ pub fn highlight(
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.print_time {
|
if opts.print_time {
|
||||||
eprintln!("Time: {}ms", time.elapsed().as_millis());
|
info!("Time: {}ms", time.elapsed().as_millis());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -5,14 +5,20 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use crc32fast::hash as crc32;
|
||||||
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
|
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
|
||||||
use indoc::{formatdoc, indoc};
|
use indoc::{formatdoc, indoc};
|
||||||
|
use log::info;
|
||||||
|
use rand::{thread_rng, Rng};
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
use tree_sitter_generate::write_file;
|
use tree_sitter_generate::write_file;
|
||||||
use tree_sitter_loader::{Author, Bindings, Grammar, Links, Metadata, PathsJSON, TreeSitterJSON};
|
use tree_sitter_loader::{
|
||||||
use url::Url;
|
Author, Bindings, Grammar, Links, Metadata, PathsJSON, TreeSitterJSON,
|
||||||
|
DEFAULT_HIGHLIGHTS_QUERY_FILE_NAME, DEFAULT_INJECTIONS_QUERY_FILE_NAME,
|
||||||
|
DEFAULT_LOCALS_QUERY_FILE_NAME, DEFAULT_TAGS_QUERY_FILE_NAME,
|
||||||
|
};
|
||||||
|
|
||||||
const CLI_VERSION: &str = env!("CARGO_PKG_VERSION");
|
const CLI_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||||
const CLI_VERSION_PLACEHOLDER: &str = "CLI_VERSION";
|
const CLI_VERSION_PLACEHOLDER: &str = "CLI_VERSION";
|
||||||
|
|
@ -30,9 +36,12 @@ const PARSER_CLASS_NAME_PLACEHOLDER: &str = "PARSER_CLASS_NAME";
|
||||||
|
|
||||||
const PARSER_DESCRIPTION_PLACEHOLDER: &str = "PARSER_DESCRIPTION";
|
const PARSER_DESCRIPTION_PLACEHOLDER: &str = "PARSER_DESCRIPTION";
|
||||||
const PARSER_LICENSE_PLACEHOLDER: &str = "PARSER_LICENSE";
|
const PARSER_LICENSE_PLACEHOLDER: &str = "PARSER_LICENSE";
|
||||||
|
const PARSER_NS_PLACEHOLDER: &str = "PARSER_NS";
|
||||||
|
const PARSER_NS_CLEANED_PLACEHOLDER: &str = "PARSER_NS_CLEANED";
|
||||||
const PARSER_URL_PLACEHOLDER: &str = "PARSER_URL";
|
const PARSER_URL_PLACEHOLDER: &str = "PARSER_URL";
|
||||||
const PARSER_URL_STRIPPED_PLACEHOLDER: &str = "PARSER_URL_STRIPPED";
|
const PARSER_URL_STRIPPED_PLACEHOLDER: &str = "PARSER_URL_STRIPPED";
|
||||||
const PARSER_VERSION_PLACEHOLDER: &str = "PARSER_VERSION";
|
const PARSER_VERSION_PLACEHOLDER: &str = "PARSER_VERSION";
|
||||||
|
const PARSER_FINGERPRINT_PLACEHOLDER: &str = "PARSER_FINGERPRINT";
|
||||||
|
|
||||||
const AUTHOR_NAME_PLACEHOLDER: &str = "PARSER_AUTHOR_NAME";
|
const AUTHOR_NAME_PLACEHOLDER: &str = "PARSER_AUTHOR_NAME";
|
||||||
const AUTHOR_EMAIL_PLACEHOLDER: &str = "PARSER_AUTHOR_EMAIL";
|
const AUTHOR_EMAIL_PLACEHOLDER: &str = "PARSER_AUTHOR_EMAIL";
|
||||||
|
|
@ -51,12 +60,22 @@ const AUTHOR_BLOCK_RS: &str = "\nauthors = [";
|
||||||
const AUTHOR_NAME_PLACEHOLDER_RS: &str = "PARSER_AUTHOR_NAME";
|
const AUTHOR_NAME_PLACEHOLDER_RS: &str = "PARSER_AUTHOR_NAME";
|
||||||
const AUTHOR_EMAIL_PLACEHOLDER_RS: &str = " PARSER_AUTHOR_EMAIL";
|
const AUTHOR_EMAIL_PLACEHOLDER_RS: &str = " PARSER_AUTHOR_EMAIL";
|
||||||
|
|
||||||
|
const AUTHOR_BLOCK_JAVA: &str = "\n <developer>";
|
||||||
|
const AUTHOR_NAME_PLACEHOLDER_JAVA: &str = "\n <name>PARSER_AUTHOR_NAME</name>";
|
||||||
|
const AUTHOR_EMAIL_PLACEHOLDER_JAVA: &str = "\n <email>PARSER_AUTHOR_EMAIL</email>";
|
||||||
|
const AUTHOR_URL_PLACEHOLDER_JAVA: &str = "\n <url>PARSER_AUTHOR_URL</url>";
|
||||||
|
|
||||||
const AUTHOR_BLOCK_GRAMMAR: &str = "\n * @author ";
|
const AUTHOR_BLOCK_GRAMMAR: &str = "\n * @author ";
|
||||||
const AUTHOR_NAME_PLACEHOLDER_GRAMMAR: &str = "PARSER_AUTHOR_NAME";
|
const AUTHOR_NAME_PLACEHOLDER_GRAMMAR: &str = "PARSER_AUTHOR_NAME";
|
||||||
const AUTHOR_EMAIL_PLACEHOLDER_GRAMMAR: &str = " PARSER_AUTHOR_EMAIL";
|
const AUTHOR_EMAIL_PLACEHOLDER_GRAMMAR: &str = " PARSER_AUTHOR_EMAIL";
|
||||||
|
|
||||||
const FUNDING_URL_PLACEHOLDER: &str = "FUNDING_URL";
|
const FUNDING_URL_PLACEHOLDER: &str = "FUNDING_URL";
|
||||||
|
|
||||||
|
const HIGHLIGHTS_QUERY_PATH_PLACEHOLDER: &str = "HIGHLIGHTS_QUERY_PATH";
|
||||||
|
const INJECTIONS_QUERY_PATH_PLACEHOLDER: &str = "INJECTIONS_QUERY_PATH";
|
||||||
|
const LOCALS_QUERY_PATH_PLACEHOLDER: &str = "LOCALS_QUERY_PATH";
|
||||||
|
const TAGS_QUERY_PATH_PLACEHOLDER: &str = "TAGS_QUERY_PATH";
|
||||||
|
|
||||||
const GRAMMAR_JS_TEMPLATE: &str = include_str!("./templates/grammar.js");
|
const GRAMMAR_JS_TEMPLATE: &str = include_str!("./templates/grammar.js");
|
||||||
const PACKAGE_JSON_TEMPLATE: &str = include_str!("./templates/package.json");
|
const PACKAGE_JSON_TEMPLATE: &str = include_str!("./templates/package.json");
|
||||||
const GITIGNORE_TEMPLATE: &str = include_str!("./templates/gitignore");
|
const GITIGNORE_TEMPLATE: &str = include_str!("./templates/gitignore");
|
||||||
|
|
@ -95,32 +114,18 @@ const TEST_BINDING_PY_TEMPLATE: &str = include_str!("./templates/test_binding.py
|
||||||
const PACKAGE_SWIFT_TEMPLATE: &str = include_str!("./templates/package.swift");
|
const PACKAGE_SWIFT_TEMPLATE: &str = include_str!("./templates/package.swift");
|
||||||
const TESTS_SWIFT_TEMPLATE: &str = include_str!("./templates/tests.swift");
|
const TESTS_SWIFT_TEMPLATE: &str = include_str!("./templates/tests.swift");
|
||||||
|
|
||||||
|
const POM_XML_TEMPLATE: &str = include_str!("./templates/pom.xml");
|
||||||
|
const BINDING_JAVA_TEMPLATE: &str = include_str!("./templates/binding.java");
|
||||||
|
const TEST_JAVA_TEMPLATE: &str = include_str!("./templates/test.java");
|
||||||
|
|
||||||
const BUILD_ZIG_TEMPLATE: &str = include_str!("./templates/build.zig");
|
const BUILD_ZIG_TEMPLATE: &str = include_str!("./templates/build.zig");
|
||||||
const BUILD_ZIG_ZON_TEMPLATE: &str = include_str!("./templates/build.zig.zon");
|
const BUILD_ZIG_ZON_TEMPLATE: &str = include_str!("./templates/build.zig.zon");
|
||||||
const ROOT_ZIG_TEMPLATE: &str = include_str!("./templates/root.zig");
|
const ROOT_ZIG_TEMPLATE: &str = include_str!("./templates/root.zig");
|
||||||
const TEST_ZIG_TEMPLATE: &str = include_str!("./templates/test.zig");
|
const TEST_ZIG_TEMPLATE: &str = include_str!("./templates/test.zig");
|
||||||
|
|
||||||
const TREE_SITTER_JSON_SCHEMA: &str =
|
pub const TREE_SITTER_JSON_SCHEMA: &str =
|
||||||
"https://tree-sitter.github.io/tree-sitter/assets/schemas/config.schema.json";
|
"https://tree-sitter.github.io/tree-sitter/assets/schemas/config.schema.json";
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub fn path_in_ignore(repo_path: &Path) -> bool {
|
|
||||||
[
|
|
||||||
"bindings",
|
|
||||||
"build",
|
|
||||||
"examples",
|
|
||||||
"node_modules",
|
|
||||||
"queries",
|
|
||||||
"script",
|
|
||||||
"src",
|
|
||||||
"target",
|
|
||||||
"test",
|
|
||||||
"types",
|
|
||||||
]
|
|
||||||
.iter()
|
|
||||||
.any(|dir| repo_path.ends_with(dir))
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone)]
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
pub struct JsonConfigOpts {
|
pub struct JsonConfigOpts {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
|
@ -128,9 +133,9 @@ pub struct JsonConfigOpts {
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub description: String,
|
pub description: String,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub repository: Option<Url>,
|
pub repository: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub funding: Option<Url>,
|
pub funding: Option<String>,
|
||||||
pub scope: String,
|
pub scope: String,
|
||||||
pub file_types: Vec<String>,
|
pub file_types: Vec<String>,
|
||||||
pub version: Version,
|
pub version: Version,
|
||||||
|
|
@ -139,7 +144,9 @@ pub struct JsonConfigOpts {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub email: Option<String>,
|
pub email: Option<String>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub url: Option<Url>,
|
pub url: Option<String>,
|
||||||
|
pub namespace: Option<String>,
|
||||||
|
pub bindings: Bindings,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl JsonConfigOpts {
|
impl JsonConfigOpts {
|
||||||
|
|
@ -171,22 +178,17 @@ impl JsonConfigOpts {
|
||||||
authors: Some(vec![Author {
|
authors: Some(vec![Author {
|
||||||
name: self.author,
|
name: self.author,
|
||||||
email: self.email,
|
email: self.email,
|
||||||
url: self.url.map(|url| url.to_string()),
|
url: self.url,
|
||||||
}]),
|
}]),
|
||||||
links: Some(Links {
|
links: Some(Links {
|
||||||
repository: self.repository.unwrap_or_else(|| {
|
repository: self.repository.unwrap_or_else(|| {
|
||||||
Url::parse(&format!(
|
format!("https://github.com/tree-sitter/tree-sitter-{}", self.name)
|
||||||
"https://github.com/tree-sitter/tree-sitter-{}",
|
|
||||||
self.name
|
|
||||||
))
|
|
||||||
.expect("Failed to parse default repository URL")
|
|
||||||
}),
|
}),
|
||||||
funding: self.funding,
|
funding: self.funding,
|
||||||
homepage: None,
|
|
||||||
}),
|
}),
|
||||||
namespace: None,
|
namespace: self.namespace,
|
||||||
},
|
},
|
||||||
bindings: Bindings::default(),
|
bindings: self.bindings,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -207,6 +209,8 @@ impl Default for JsonConfigOpts {
|
||||||
author: String::new(),
|
author: String::new(),
|
||||||
email: None,
|
email: None,
|
||||||
url: None,
|
url: None,
|
||||||
|
namespace: None,
|
||||||
|
bindings: Bindings::default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -223,6 +227,11 @@ struct GenerateOpts<'a> {
|
||||||
camel_parser_name: &'a str,
|
camel_parser_name: &'a str,
|
||||||
title_parser_name: &'a str,
|
title_parser_name: &'a str,
|
||||||
class_name: &'a str,
|
class_name: &'a str,
|
||||||
|
highlights_query_path: &'a str,
|
||||||
|
injections_query_path: &'a str,
|
||||||
|
locals_query_path: &'a str,
|
||||||
|
tags_query_path: &'a str,
|
||||||
|
namespace: Option<&'a str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn generate_grammar_files(
|
pub fn generate_grammar_files(
|
||||||
|
|
@ -273,6 +282,11 @@ pub fn generate_grammar_files(
|
||||||
.clone()
|
.clone()
|
||||||
.unwrap_or_else(|| format!("TreeSitter{}", language_name.to_upper_camel_case()));
|
.unwrap_or_else(|| format!("TreeSitter{}", language_name.to_upper_camel_case()));
|
||||||
|
|
||||||
|
let default_highlights_path = Path::new("queries").join(DEFAULT_HIGHLIGHTS_QUERY_FILE_NAME);
|
||||||
|
let default_injections_path = Path::new("queries").join(DEFAULT_INJECTIONS_QUERY_FILE_NAME);
|
||||||
|
let default_locals_path = Path::new("queries").join(DEFAULT_LOCALS_QUERY_FILE_NAME);
|
||||||
|
let default_tags_path = Path::new("queries").join(DEFAULT_TAGS_QUERY_FILE_NAME);
|
||||||
|
|
||||||
let generate_opts = GenerateOpts {
|
let generate_opts = GenerateOpts {
|
||||||
author_name: authors
|
author_name: authors
|
||||||
.map(|a| a.first().map(|a| a.name.as_str()))
|
.map(|a| a.first().map(|a| a.name.as_str()))
|
||||||
|
|
@ -294,11 +308,24 @@ pub fn generate_grammar_files(
|
||||||
.metadata
|
.metadata
|
||||||
.links
|
.links
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.and_then(|l| l.funding.as_ref().map(|f| f.as_str())),
|
.and_then(|l| l.funding.as_deref()),
|
||||||
version: &tree_sitter_config.metadata.version,
|
version: &tree_sitter_config.metadata.version,
|
||||||
camel_parser_name: &camel_name,
|
camel_parser_name: &camel_name,
|
||||||
title_parser_name: &title_name,
|
title_parser_name: &title_name,
|
||||||
class_name: &class_name,
|
class_name: &class_name,
|
||||||
|
highlights_query_path: tree_sitter_config.grammars[0]
|
||||||
|
.highlights
|
||||||
|
.to_variable_value(&default_highlights_path),
|
||||||
|
injections_query_path: tree_sitter_config.grammars[0]
|
||||||
|
.injections
|
||||||
|
.to_variable_value(&default_injections_path),
|
||||||
|
locals_query_path: tree_sitter_config.grammars[0]
|
||||||
|
.locals
|
||||||
|
.to_variable_value(&default_locals_path),
|
||||||
|
tags_query_path: tree_sitter_config.grammars[0]
|
||||||
|
.tags
|
||||||
|
.to_variable_value(&default_tags_path),
|
||||||
|
namespace: tree_sitter_config.metadata.namespace.as_deref(),
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create package.json
|
// Create package.json
|
||||||
|
|
@ -314,9 +341,9 @@ pub fn generate_grammar_files(
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?
|
let mut contents = fs::read_to_string(path)?
|
||||||
.replace(
|
.replace(
|
||||||
r#""node-addon-api": "^8.3.1"#,
|
r#""node-addon-api": "^8.3.1""#,
|
||||||
r#""node-addon-api": "^8.5.0""#,
|
r#""node-addon-api": "^8.5.0""#,
|
||||||
)
|
)
|
||||||
.replace(
|
.replace(
|
||||||
|
|
@ -325,9 +352,18 @@ pub fn generate_grammar_files(
|
||||||
"tree-sitter-cli":"#},
|
"tree-sitter-cli":"#},
|
||||||
indoc! {r#"
|
indoc! {r#"
|
||||||
"prebuildify": "^6.0.1",
|
"prebuildify": "^6.0.1",
|
||||||
"tree-sitter": "^0.22.4",
|
"tree-sitter": "^0.25.0",
|
||||||
"tree-sitter-cli":"#},
|
"tree-sitter-cli":"#},
|
||||||
);
|
);
|
||||||
|
if !contents.contains("module") {
|
||||||
|
info!("Migrating package.json to ESM");
|
||||||
|
contents = contents.replace(
|
||||||
|
r#""repository":"#,
|
||||||
|
indoc! {r#"
|
||||||
|
"type": "module",
|
||||||
|
"repository":"#},
|
||||||
|
);
|
||||||
|
}
|
||||||
write_file(path, contents)?;
|
write_file(path, contents)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|
|
@ -335,9 +371,21 @@ pub fn generate_grammar_files(
|
||||||
|
|
||||||
// Do not create a grammar.js file in a repo with multiple language configs
|
// Do not create a grammar.js file in a repo with multiple language configs
|
||||||
if !tree_sitter_config.has_multiple_language_configs() {
|
if !tree_sitter_config.has_multiple_language_configs() {
|
||||||
missing_path(repo_path.join("grammar.js"), |path| {
|
missing_path_else(
|
||||||
generate_file(path, GRAMMAR_JS_TEMPLATE, language_name, &generate_opts)
|
repo_path.join("grammar.js"),
|
||||||
})?;
|
allow_update,
|
||||||
|
|path| generate_file(path, GRAMMAR_JS_TEMPLATE, language_name, &generate_opts),
|
||||||
|
|path| {
|
||||||
|
let mut contents = fs::read_to_string(path)?;
|
||||||
|
if contents.contains("module.exports") {
|
||||||
|
info!("Migrating grammars.js to ESM");
|
||||||
|
contents = contents.replace("module.exports =", "export default");
|
||||||
|
write_file(path, contents)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write .gitignore file
|
// Write .gitignore file
|
||||||
|
|
@ -346,10 +394,16 @@ pub fn generate_grammar_files(
|
||||||
allow_update,
|
allow_update,
|
||||||
|path| generate_file(path, GITIGNORE_TEMPLATE, language_name, &generate_opts),
|
|path| generate_file(path, GITIGNORE_TEMPLATE, language_name, &generate_opts),
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?;
|
let mut contents = fs::read_to_string(path)?;
|
||||||
if !contents.contains("Zig artifacts") {
|
if !contents.contains("Zig artifacts") {
|
||||||
eprintln!("Replacing .gitignore");
|
info!("Adding zig entries to .gitignore");
|
||||||
generate_file(path, GITIGNORE_TEMPLATE, language_name, &generate_opts)?;
|
contents.push('\n');
|
||||||
|
contents.push_str(indoc! {"
|
||||||
|
# Zig artifacts
|
||||||
|
.zig-cache/
|
||||||
|
zig-cache/
|
||||||
|
zig-out/
|
||||||
|
"});
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
|
|
@ -362,8 +416,13 @@ pub fn generate_grammar_files(
|
||||||
|path| generate_file(path, GITATTRIBUTES_TEMPLATE, language_name, &generate_opts),
|
|path| generate_file(path, GITATTRIBUTES_TEMPLATE, language_name, &generate_opts),
|
||||||
|path| {
|
|path| {
|
||||||
let mut contents = fs::read_to_string(path)?;
|
let mut contents = fs::read_to_string(path)?;
|
||||||
contents = contents.replace("bindings/c/* ", "bindings/c/** ");
|
let c_bindings_entry = "bindings/c/* ";
|
||||||
|
if contents.contains(c_bindings_entry) {
|
||||||
|
info!("Updating c bindings entry in .gitattributes");
|
||||||
|
contents = contents.replace(c_bindings_entry, "bindings/c/** ");
|
||||||
|
}
|
||||||
if !contents.contains("Zig bindings") {
|
if !contents.contains("Zig bindings") {
|
||||||
|
info!("Adding zig entries to .gitattributes");
|
||||||
contents.push('\n');
|
contents.push('\n');
|
||||||
contents.push_str(indoc! {"
|
contents.push_str(indoc! {"
|
||||||
# Zig bindings
|
# Zig bindings
|
||||||
|
|
@ -386,13 +445,131 @@ pub fn generate_grammar_files(
|
||||||
// Generate Rust bindings
|
// Generate Rust bindings
|
||||||
if tree_sitter_config.bindings.rust {
|
if tree_sitter_config.bindings.rust {
|
||||||
missing_path(bindings_dir.join("rust"), create_dir)?.apply(|path| {
|
missing_path(bindings_dir.join("rust"), create_dir)?.apply(|path| {
|
||||||
missing_path(path.join("lib.rs"), |path| {
|
missing_path_else(path.join("lib.rs"), allow_update, |path| {
|
||||||
generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts)
|
||||||
|
}, |path| {
|
||||||
|
let mut contents = fs::read_to_string(path)?;
|
||||||
|
if !contents.contains("#[cfg(with_highlights_query)]") {
|
||||||
|
info!("Updating query constants in bindings/rust/lib.rs");
|
||||||
|
let replacement = indoc! {r#"
|
||||||
|
#[cfg(with_highlights_query)]
|
||||||
|
/// The syntax highlighting query for this grammar.
|
||||||
|
pub const HIGHLIGHTS_QUERY: &str = include_str!("../../HIGHLIGHTS_QUERY_PATH");
|
||||||
|
|
||||||
|
#[cfg(with_injections_query)]
|
||||||
|
/// The language injection query for this grammar.
|
||||||
|
pub const INJECTIONS_QUERY: &str = include_str!("../../INJECTIONS_QUERY_PATH");
|
||||||
|
|
||||||
|
#[cfg(with_locals_query)]
|
||||||
|
/// The local variable query for this grammar.
|
||||||
|
pub const LOCALS_QUERY: &str = include_str!("../../LOCALS_QUERY_PATH");
|
||||||
|
|
||||||
|
#[cfg(with_tags_query)]
|
||||||
|
/// The symbol tagging query for this grammar.
|
||||||
|
pub const TAGS_QUERY: &str = include_str!("../../TAGS_QUERY_PATH");
|
||||||
|
"#}
|
||||||
|
.replace("HIGHLIGHTS_QUERY_PATH", generate_opts.highlights_query_path)
|
||||||
|
.replace("INJECTIONS_QUERY_PATH", generate_opts.injections_query_path)
|
||||||
|
.replace("LOCALS_QUERY_PATH", generate_opts.locals_query_path)
|
||||||
|
.replace("TAGS_QUERY_PATH", generate_opts.tags_query_path);
|
||||||
|
contents = contents
|
||||||
|
.replace(
|
||||||
|
indoc! {r#"
|
||||||
|
// NOTE: uncomment these to include any queries that this grammar contains:
|
||||||
|
|
||||||
|
// pub const HIGHLIGHTS_QUERY: &str = include_str!("../../queries/highlights.scm");
|
||||||
|
// pub const INJECTIONS_QUERY: &str = include_str!("../../queries/injections.scm");
|
||||||
|
// pub const LOCALS_QUERY: &str = include_str!("../../queries/locals.scm");
|
||||||
|
// pub const TAGS_QUERY: &str = include_str!("../../queries/tags.scm");
|
||||||
|
"#},
|
||||||
|
&replacement,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
write_file(path, contents)?;
|
||||||
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
missing_path(path.join("build.rs"), |path| {
|
missing_path_else(
|
||||||
generate_file(path, BUILD_RS_TEMPLATE, language_name, &generate_opts)
|
path.join("build.rs"),
|
||||||
})?;
|
allow_update,
|
||||||
|
|path| generate_file(path, BUILD_RS_TEMPLATE, language_name, &generate_opts),
|
||||||
|
|path| {
|
||||||
|
let mut contents = fs::read_to_string(path)?;
|
||||||
|
if !contents.contains("wasm32-unknown-unknown") {
|
||||||
|
info!("Adding wasm32-unknown-unknown target to bindings/rust/build.rs");
|
||||||
|
let replacement = indoc!{r#"
|
||||||
|
c_config.flag("-utf-8");
|
||||||
|
|
||||||
|
if std::env::var("TARGET").unwrap() == "wasm32-unknown-unknown" {
|
||||||
|
let Ok(wasm_headers) = std::env::var("DEP_TREE_SITTER_LANGUAGE_WASM_HEADERS") else {
|
||||||
|
panic!("Environment variable DEP_TREE_SITTER_LANGUAGE_WASM_HEADERS must be set by the language crate");
|
||||||
|
};
|
||||||
|
let Ok(wasm_src) =
|
||||||
|
std::env::var("DEP_TREE_SITTER_LANGUAGE_WASM_SRC").map(std::path::PathBuf::from)
|
||||||
|
else {
|
||||||
|
panic!("Environment variable DEP_TREE_SITTER_LANGUAGE_WASM_SRC must be set by the language crate");
|
||||||
|
};
|
||||||
|
|
||||||
|
c_config.include(&wasm_headers);
|
||||||
|
c_config.files([
|
||||||
|
wasm_src.join("stdio.c"),
|
||||||
|
wasm_src.join("stdlib.c"),
|
||||||
|
wasm_src.join("string.c"),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
"#}
|
||||||
|
.lines()
|
||||||
|
.map(|line| if line.is_empty() { line.to_string() } else { format!(" {line}") })
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
contents = contents.replace(r#" c_config.flag("-utf-8");"#, &replacement);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Introduce configuration variables for dynamic query inclusion
|
||||||
|
if !contents.contains("with_highlights_query") {
|
||||||
|
info!("Adding support for dynamic query inclusion to bindings/rust/build.rs");
|
||||||
|
let replaced = indoc! {r#"
|
||||||
|
c_config.compile("tree-sitter-KEBAB_PARSER_NAME");
|
||||||
|
}"#}
|
||||||
|
.replace("KEBAB_PARSER_NAME", &language_name.to_kebab_case());
|
||||||
|
|
||||||
|
let replacement = indoc! {r#"
|
||||||
|
c_config.compile("tree-sitter-KEBAB_PARSER_NAME");
|
||||||
|
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(with_highlights_query)");
|
||||||
|
if !"HIGHLIGHTS_QUERY_PATH".is_empty() && std::path::Path::new("HIGHLIGHTS_QUERY_PATH").exists() {
|
||||||
|
println!("cargo:rustc-cfg=with_highlights_query");
|
||||||
|
}
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(with_injections_query)");
|
||||||
|
if !"INJECTIONS_QUERY_PATH".is_empty() && std::path::Path::new("INJECTIONS_QUERY_PATH").exists() {
|
||||||
|
println!("cargo:rustc-cfg=with_injections_query");
|
||||||
|
}
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(with_locals_query)");
|
||||||
|
if !"LOCALS_QUERY_PATH".is_empty() && std::path::Path::new("LOCALS_QUERY_PATH").exists() {
|
||||||
|
println!("cargo:rustc-cfg=with_locals_query");
|
||||||
|
}
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(with_tags_query)");
|
||||||
|
if !"TAGS_QUERY_PATH".is_empty() && std::path::Path::new("TAGS_QUERY_PATH").exists() {
|
||||||
|
println!("cargo:rustc-cfg=with_tags_query");
|
||||||
|
}
|
||||||
|
}"#}
|
||||||
|
.replace("KEBAB_PARSER_NAME", &language_name.to_kebab_case())
|
||||||
|
.replace("HIGHLIGHTS_QUERY_PATH", generate_opts.highlights_query_path)
|
||||||
|
.replace("INJECTIONS_QUERY_PATH", generate_opts.injections_query_path)
|
||||||
|
.replace("LOCALS_QUERY_PATH", generate_opts.locals_query_path)
|
||||||
|
.replace("TAGS_QUERY_PATH", generate_opts.tags_query_path);
|
||||||
|
|
||||||
|
contents = contents.replace(
|
||||||
|
&replaced,
|
||||||
|
&replacement,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
write_file(path, contents)?;
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
missing_path_else(
|
missing_path_else(
|
||||||
repo_path.join("Cargo.toml"),
|
repo_path.join("Cargo.toml"),
|
||||||
|
|
@ -408,6 +585,7 @@ pub fn generate_grammar_files(
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?;
|
let contents = fs::read_to_string(path)?;
|
||||||
if contents.contains("\"LICENSE\"") {
|
if contents.contains("\"LICENSE\"") {
|
||||||
|
info!("Adding LICENSE entry to bindings/rust/Cargo.toml");
|
||||||
write_file(path, contents.replace("\"LICENSE\"", "\"/LICENSE\""))?;
|
write_file(path, contents.replace("\"LICENSE\"", "\"/LICENSE\""))?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -427,26 +605,53 @@ pub fn generate_grammar_files(
|
||||||
|path| generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts),
|
|path| generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts),
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?;
|
let contents = fs::read_to_string(path)?;
|
||||||
if !contents.contains("bun") {
|
if !contents.contains("Object.defineProperty") {
|
||||||
eprintln!("Replacing index.js");
|
info!("Replacing index.js");
|
||||||
generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts)?;
|
generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
missing_path(path.join("index.d.ts"), |path| {
|
missing_path_else(
|
||||||
generate_file(path, INDEX_D_TS_TEMPLATE, language_name, &generate_opts)
|
path.join("index.d.ts"),
|
||||||
})?;
|
allow_update,
|
||||||
|
|path| generate_file(path, INDEX_D_TS_TEMPLATE, language_name, &generate_opts),
|
||||||
|
|path| {
|
||||||
|
let contents = fs::read_to_string(path)?;
|
||||||
|
if !contents.contains("export default binding") {
|
||||||
|
info!("Replacing index.d.ts");
|
||||||
|
generate_file(path, INDEX_D_TS_TEMPLATE, language_name, &generate_opts)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
missing_path(path.join("binding_test.js"), |path| {
|
missing_path_else(
|
||||||
|
path.join("binding_test.js"),
|
||||||
|
allow_update,
|
||||||
|
|path| {
|
||||||
generate_file(
|
generate_file(
|
||||||
path,
|
path,
|
||||||
BINDING_TEST_JS_TEMPLATE,
|
BINDING_TEST_JS_TEMPLATE,
|
||||||
language_name,
|
language_name,
|
||||||
&generate_opts,
|
&generate_opts,
|
||||||
)
|
)
|
||||||
})?;
|
},
|
||||||
|
|path| {
|
||||||
|
let contents = fs::read_to_string(path)?;
|
||||||
|
if !contents.contains("import") {
|
||||||
|
info!("Replacing binding_test.js");
|
||||||
|
generate_file(
|
||||||
|
path,
|
||||||
|
BINDING_TEST_JS_TEMPLATE,
|
||||||
|
language_name,
|
||||||
|
&generate_opts,
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
missing_path(path.join("binding.cc"), |path| {
|
missing_path(path.join("binding.cc"), |path| {
|
||||||
generate_file(path, JS_BINDING_CC_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, JS_BINDING_CC_TEMPLATE, language_name, &generate_opts)
|
||||||
|
|
@ -459,6 +664,7 @@ pub fn generate_grammar_files(
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?;
|
let contents = fs::read_to_string(path)?;
|
||||||
if contents.contains("fs.exists(") {
|
if contents.contains("fs.exists(") {
|
||||||
|
info!("Replacing `fs.exists` calls in binding.gyp");
|
||||||
write_file(path, contents.replace("fs.exists(", "fs.existsSync("))?;
|
write_file(path, contents.replace("fs.exists(", "fs.existsSync("))?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -471,14 +677,17 @@ pub fn generate_grammar_files(
|
||||||
|
|
||||||
// Generate C bindings
|
// Generate C bindings
|
||||||
if tree_sitter_config.bindings.c {
|
if tree_sitter_config.bindings.c {
|
||||||
|
let kebab_case_name = language_name.to_kebab_case();
|
||||||
missing_path(bindings_dir.join("c"), create_dir)?.apply(|path| {
|
missing_path(bindings_dir.join("c"), create_dir)?.apply(|path| {
|
||||||
let old_file = &path.join(format!("tree-sitter-{}.h", language_name.to_kebab_case()));
|
let header_name = format!("tree-sitter-{kebab_case_name}.h");
|
||||||
|
let old_file = &path.join(&header_name);
|
||||||
if allow_update && fs::exists(old_file).unwrap_or(false) {
|
if allow_update && fs::exists(old_file).unwrap_or(false) {
|
||||||
|
info!("Removing bindings/c/{header_name}");
|
||||||
fs::remove_file(old_file)?;
|
fs::remove_file(old_file)?;
|
||||||
}
|
}
|
||||||
missing_path(path.join("tree_sitter"), create_dir)?.apply(|include_path| {
|
missing_path(path.join("tree_sitter"), create_dir)?.apply(|include_path| {
|
||||||
missing_path(
|
missing_path(
|
||||||
include_path.join(format!("tree-sitter-{}.h", language_name.to_kebab_case())),
|
include_path.join(&header_name),
|
||||||
|path| {
|
|path| {
|
||||||
generate_file(path, PARSER_NAME_H_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, PARSER_NAME_H_TEMPLATE, language_name, &generate_opts)
|
||||||
},
|
},
|
||||||
|
|
@ -487,7 +696,7 @@ pub fn generate_grammar_files(
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
missing_path(
|
missing_path(
|
||||||
path.join(format!("tree-sitter-{}.pc.in", language_name.to_kebab_case())),
|
path.join(format!("tree-sitter-{kebab_case_name}.pc.in")),
|
||||||
|path| {
|
|path| {
|
||||||
generate_file(
|
generate_file(
|
||||||
path,
|
path,
|
||||||
|
|
@ -505,11 +714,31 @@ pub fn generate_grammar_files(
|
||||||
generate_file(path, MAKEFILE_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, MAKEFILE_TEMPLATE, language_name, &generate_opts)
|
||||||
},
|
},
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?.replace(
|
let mut contents = fs::read_to_string(path)?;
|
||||||
"-m644 bindings/c/$(LANGUAGE_NAME).h",
|
if !contents.contains("cd '$(DESTDIR)$(LIBDIR)' && ln -sf") {
|
||||||
"-m644 bindings/c/tree_sitter/$(LANGUAGE_NAME).h"
|
info!("Replacing Makefile");
|
||||||
|
generate_file(path, MAKEFILE_TEMPLATE, language_name, &generate_opts)?;
|
||||||
|
} else {
|
||||||
|
let replaced = indoc! {r"
|
||||||
|
$(PARSER): $(SRC_DIR)/grammar.json
|
||||||
|
$(TS) generate $^
|
||||||
|
"};
|
||||||
|
if contents.contains(replaced) {
|
||||||
|
info!("Adding --no-parser target to Makefile");
|
||||||
|
contents = contents
|
||||||
|
.replace(
|
||||||
|
replaced,
|
||||||
|
indoc! {r"
|
||||||
|
$(SRC_DIR)/grammar.json: grammar.js
|
||||||
|
$(TS) generate --no-parser $^
|
||||||
|
|
||||||
|
$(PARSER): $(SRC_DIR)/grammar.json
|
||||||
|
$(TS) generate $^
|
||||||
|
"}
|
||||||
);
|
);
|
||||||
|
}
|
||||||
write_file(path, contents)?;
|
write_file(path, contents)?;
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
@ -519,8 +748,8 @@ pub fn generate_grammar_files(
|
||||||
allow_update,
|
allow_update,
|
||||||
|path| generate_file(path, CMAKELISTS_TXT_TEMPLATE, language_name, &generate_opts),
|
|path| generate_file(path, CMAKELISTS_TXT_TEMPLATE, language_name, &generate_opts),
|
||||||
|path| {
|
|path| {
|
||||||
let mut contents = fs::read_to_string(path)?;
|
let contents = fs::read_to_string(path)?;
|
||||||
contents = contents
|
let replaced_contents = contents
|
||||||
.replace("add_custom_target(test", "add_custom_target(ts-test")
|
.replace("add_custom_target(test", "add_custom_target(ts-test")
|
||||||
.replace(
|
.replace(
|
||||||
&formatdoc! {r#"
|
&formatdoc! {r#"
|
||||||
|
|
@ -540,8 +769,38 @@ pub fn generate_grammar_files(
|
||||||
INTERFACE $<BUILD_INTERFACE:${{CMAKE_CURRENT_SOURCE_DIR}}/bindings/c>
|
INTERFACE $<BUILD_INTERFACE:${{CMAKE_CURRENT_SOURCE_DIR}}/bindings/c>
|
||||||
$<INSTALL_INTERFACE:${{CMAKE_INSTALL_INCLUDEDIR}}>)
|
$<INSTALL_INTERFACE:${{CMAKE_INSTALL_INCLUDEDIR}}>)
|
||||||
"}
|
"}
|
||||||
|
).replace(
|
||||||
|
indoc! {r#"
|
||||||
|
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c"
|
||||||
|
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
||||||
|
COMMAND "${TREE_SITTER_CLI}" generate src/grammar.json
|
||||||
|
--abi=${TREE_SITTER_ABI_VERSION}
|
||||||
|
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||||
|
COMMENT "Generating parser.c")
|
||||||
|
"#},
|
||||||
|
indoc! {r#"
|
||||||
|
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}/src/node-types.json"
|
||||||
|
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/grammar.js"
|
||||||
|
COMMAND "${TREE_SITTER_CLI}" generate grammar.js --no-parser
|
||||||
|
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||||
|
COMMENT "Generating grammar.json")
|
||||||
|
|
||||||
|
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c"
|
||||||
|
BYPRODUCTS "${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/parser.h"
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/alloc.h"
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/array.h"
|
||||||
|
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
||||||
|
COMMAND "${TREE_SITTER_CLI}" generate src/grammar.json
|
||||||
|
--abi=${TREE_SITTER_ABI_VERSION}
|
||||||
|
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||||
|
COMMENT "Generating parser.c")
|
||||||
|
"#}
|
||||||
);
|
);
|
||||||
write_file(path, contents)?;
|
if !replaced_contents.eq(&contents) {
|
||||||
|
info!("Updating CMakeLists.txt");
|
||||||
|
write_file(path, replaced_contents)?;
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
@ -577,7 +836,8 @@ pub fn generate_grammar_files(
|
||||||
// Generate Python bindings
|
// Generate Python bindings
|
||||||
if tree_sitter_config.bindings.python {
|
if tree_sitter_config.bindings.python {
|
||||||
missing_path(bindings_dir.join("python"), create_dir)?.apply(|path| {
|
missing_path(bindings_dir.join("python"), create_dir)?.apply(|path| {
|
||||||
let lang_path = path.join(format!("tree_sitter_{}", language_name.to_snake_case()));
|
let snake_case_grammar_name = format!("tree_sitter_{}", language_name.to_snake_case());
|
||||||
|
let lang_path = path.join(&snake_case_grammar_name);
|
||||||
missing_path(&lang_path, create_dir)?;
|
missing_path(&lang_path, create_dir)?;
|
||||||
|
|
||||||
missing_path_else(
|
missing_path_else(
|
||||||
|
|
@ -587,6 +847,7 @@ pub fn generate_grammar_files(
|
||||||
|path| {
|
|path| {
|
||||||
let mut contents = fs::read_to_string(path)?;
|
let mut contents = fs::read_to_string(path)?;
|
||||||
if !contents.contains("PyModuleDef_Init") {
|
if !contents.contains("PyModuleDef_Init") {
|
||||||
|
info!("Updating bindings/python/{snake_case_grammar_name}/binding.c");
|
||||||
contents = contents
|
contents = contents
|
||||||
.replace("PyModule_Create", "PyModuleDef_Init")
|
.replace("PyModule_Create", "PyModuleDef_Init")
|
||||||
.replace(
|
.replace(
|
||||||
|
|
@ -619,13 +880,44 @@ pub fn generate_grammar_files(
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
missing_path(lang_path.join("__init__.py"), |path| {
|
missing_path_else(
|
||||||
|
lang_path.join("__init__.py"),
|
||||||
|
allow_update,
|
||||||
|
|path| {
|
||||||
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)
|
||||||
})?;
|
},
|
||||||
|
|path| {
|
||||||
|
let contents = fs::read_to_string(path)?;
|
||||||
|
if !contents.contains("uncomment these to include any queries") {
|
||||||
|
info!("Replacing __init__.py");
|
||||||
|
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
missing_path(lang_path.join("__init__.pyi"), |path| {
|
missing_path_else(
|
||||||
generate_file(path, INIT_PYI_TEMPLATE, language_name, &generate_opts)
|
lang_path.join("__init__.pyi"),
|
||||||
})?;
|
allow_update,
|
||||||
|
|path| generate_file(path, INIT_PYI_TEMPLATE, language_name, &generate_opts),
|
||||||
|
|path| {
|
||||||
|
let mut contents = fs::read_to_string(path)?;
|
||||||
|
if contents.contains("uncomment these to include any queries") {
|
||||||
|
info!("Replacing __init__.pyi");
|
||||||
|
generate_file(path, INIT_PYI_TEMPLATE, language_name, &generate_opts)?;
|
||||||
|
} else if !contents.contains("CapsuleType") {
|
||||||
|
info!("Updating __init__.pyi");
|
||||||
|
contents = contents
|
||||||
|
.replace(
|
||||||
|
"from typing import Final",
|
||||||
|
"from typing import Final\nfrom typing_extensions import CapsuleType"
|
||||||
|
)
|
||||||
|
.replace("-> object:", "-> CapsuleType:");
|
||||||
|
write_file(path, contents)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
missing_path(lang_path.join("py.typed"), |path| {
|
missing_path(lang_path.join("py.typed"), |path| {
|
||||||
generate_file(path, "", language_name, &generate_opts) // py.typed is empty
|
generate_file(path, "", language_name, &generate_opts) // py.typed is empty
|
||||||
|
|
@ -646,6 +938,7 @@ pub fn generate_grammar_files(
|
||||||
|path| {
|
|path| {
|
||||||
let mut contents = fs::read_to_string(path)?;
|
let mut contents = fs::read_to_string(path)?;
|
||||||
if !contents.contains("Parser(Language(") {
|
if !contents.contains("Parser(Language(") {
|
||||||
|
info!("Updating Language function in bindings/python/tests/test_binding.py");
|
||||||
contents = contents
|
contents = contents
|
||||||
.replace("tree_sitter.Language(", "Parser(Language(")
|
.replace("tree_sitter.Language(", "Parser(Language(")
|
||||||
.replace(".language())\n", ".language()))\n")
|
.replace(".language())\n", ".language()))\n")
|
||||||
|
|
@ -666,11 +959,19 @@ pub fn generate_grammar_files(
|
||||||
allow_update,
|
allow_update,
|
||||||
|path| generate_file(path, SETUP_PY_TEMPLATE, language_name, &generate_opts),
|
|path| generate_file(path, SETUP_PY_TEMPLATE, language_name, &generate_opts),
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?;
|
let mut contents = fs::read_to_string(path)?;
|
||||||
if !contents.contains("build_ext") {
|
if !contents.contains("build_ext") {
|
||||||
eprintln!("Replacing setup.py");
|
info!("Replacing setup.py");
|
||||||
generate_file(path, SETUP_PY_TEMPLATE, language_name, &generate_opts)?;
|
generate_file(path, SETUP_PY_TEMPLATE, language_name, &generate_opts)?;
|
||||||
}
|
}
|
||||||
|
if !contents.contains(" and not get_config_var") {
|
||||||
|
info!("Updating Python free-threading support in setup.py");
|
||||||
|
contents = contents.replace(
|
||||||
|
r#"startswith("cp"):"#,
|
||||||
|
r#"startswith("cp") and not get_config_var("Py_GIL_DISABLED"):"#
|
||||||
|
);
|
||||||
|
write_file(path, contents)?;
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
@ -689,6 +990,7 @@ pub fn generate_grammar_files(
|
||||||
|path| {
|
|path| {
|
||||||
let mut contents = fs::read_to_string(path)?;
|
let mut contents = fs::read_to_string(path)?;
|
||||||
if !contents.contains("cp310-*") {
|
if !contents.contains("cp310-*") {
|
||||||
|
info!("Updating dependencies in pyproject.toml");
|
||||||
contents = contents
|
contents = contents
|
||||||
.replace(r#"build = "cp39-*""#, r#"build = "cp310-*""#)
|
.replace(r#"build = "cp39-*""#, r#"build = "cp310-*""#)
|
||||||
.replace(r#"python = ">=3.9""#, r#"python = ">=3.10""#)
|
.replace(r#"python = ">=3.9""#, r#"python = ">=3.10""#)
|
||||||
|
|
@ -726,15 +1028,18 @@ pub fn generate_grammar_files(
|
||||||
allow_update,
|
allow_update,
|
||||||
|path| generate_file(path, PACKAGE_SWIFT_TEMPLATE, language_name, &generate_opts),
|
|path| generate_file(path, PACKAGE_SWIFT_TEMPLATE, language_name, &generate_opts),
|
||||||
|path| {
|
|path| {
|
||||||
let mut contents = fs::read_to_string(path)?;
|
let contents = fs::read_to_string(path)?;
|
||||||
contents = contents
|
let replaced_contents = contents
|
||||||
.replace(
|
.replace(
|
||||||
"https://github.com/ChimeHQ/SwiftTreeSitter",
|
"https://github.com/ChimeHQ/SwiftTreeSitter",
|
||||||
"https://github.com/tree-sitter/swift-tree-sitter",
|
"https://github.com/tree-sitter/swift-tree-sitter",
|
||||||
)
|
)
|
||||||
.replace("version: \"0.8.0\")", "version: \"0.9.0\")")
|
.replace("version: \"0.8.0\")", "version: \"0.9.0\")")
|
||||||
.replace("(url:", "(name: \"SwiftTreeSitter\", url:");
|
.replace("(url:", "(name: \"SwiftTreeSitter\", url:");
|
||||||
|
if !replaced_contents.eq(&contents) {
|
||||||
|
info!("Updating tree-sitter dependency in Package.swift");
|
||||||
write_file(path, contents)?;
|
write_file(path, contents)?;
|
||||||
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
@ -752,7 +1057,7 @@ pub fn generate_grammar_files(
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?;
|
let contents = fs::read_to_string(path)?;
|
||||||
if !contents.contains("b.pkg_hash.len") {
|
if !contents.contains("b.pkg_hash.len") {
|
||||||
eprintln!("Replacing build.zig");
|
info!("Replacing build.zig");
|
||||||
generate_file(path, BUILD_ZIG_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, BUILD_ZIG_TEMPLATE, language_name, &generate_opts)
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -767,7 +1072,7 @@ pub fn generate_grammar_files(
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?;
|
let contents = fs::read_to_string(path)?;
|
||||||
if !contents.contains(".name = .tree_sitter_") {
|
if !contents.contains(".name = .tree_sitter_") {
|
||||||
eprintln!("Replacing build.zig.zon");
|
info!("Replacing build.zig.zon");
|
||||||
generate_file(path, BUILD_ZIG_ZON_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, BUILD_ZIG_ZON_TEMPLATE, language_name, &generate_opts)
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -783,7 +1088,7 @@ pub fn generate_grammar_files(
|
||||||
|path| {
|
|path| {
|
||||||
let contents = fs::read_to_string(path)?;
|
let contents = fs::read_to_string(path)?;
|
||||||
if contents.contains("ts.Language") {
|
if contents.contains("ts.Language") {
|
||||||
eprintln!("Replacing root.zig");
|
info!("Replacing root.zig");
|
||||||
generate_file(path, ROOT_ZIG_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, ROOT_ZIG_TEMPLATE, language_name, &generate_opts)
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -799,6 +1104,45 @@ pub fn generate_grammar_files(
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Generate Java bindings
|
||||||
|
if tree_sitter_config.bindings.java {
|
||||||
|
missing_path(repo_path.join("pom.xml"), |path| {
|
||||||
|
generate_file(path, POM_XML_TEMPLATE, language_name, &generate_opts)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
missing_path(bindings_dir.join("java"), create_dir)?.apply(|path| {
|
||||||
|
missing_path(path.join("main"), create_dir)?.apply(|path| {
|
||||||
|
let package_path = generate_opts
|
||||||
|
.namespace
|
||||||
|
.unwrap_or("io.github.treesitter")
|
||||||
|
.replace(['-', '_'], "")
|
||||||
|
.split('.')
|
||||||
|
.fold(path.to_path_buf(), |path, dir| path.join(dir))
|
||||||
|
.join("jtreesitter")
|
||||||
|
.join(language_name.to_lowercase().replace('_', ""));
|
||||||
|
missing_path(package_path, create_dir)?.apply(|path| {
|
||||||
|
missing_path(path.join(format!("{class_name}.java")), |path| {
|
||||||
|
generate_file(path, BINDING_JAVA_TEMPLATE, language_name, &generate_opts)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
missing_path(path.join("test"), create_dir)?.apply(|path| {
|
||||||
|
missing_path(path.join(format!("{class_name}Test.java")), |path| {
|
||||||
|
generate_file(path, TEST_JAVA_TEMPLATE, language_name, &generate_opts)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -848,6 +1192,15 @@ fn generate_file(
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||||
|
|
||||||
|
let lower_parser_name = if path
|
||||||
|
.extension()
|
||||||
|
.is_some_and(|e| e.eq_ignore_ascii_case("java"))
|
||||||
|
{
|
||||||
|
language_name.to_snake_case().replace('_', "")
|
||||||
|
} else {
|
||||||
|
language_name.to_snake_case()
|
||||||
|
};
|
||||||
|
|
||||||
let mut replacement = template
|
let mut replacement = template
|
||||||
.replace(
|
.replace(
|
||||||
CAMEL_PARSER_NAME_PLACEHOLDER,
|
CAMEL_PARSER_NAME_PLACEHOLDER,
|
||||||
|
|
@ -861,14 +1214,11 @@ fn generate_file(
|
||||||
UPPER_PARSER_NAME_PLACEHOLDER,
|
UPPER_PARSER_NAME_PLACEHOLDER,
|
||||||
&language_name.to_shouty_snake_case(),
|
&language_name.to_shouty_snake_case(),
|
||||||
)
|
)
|
||||||
.replace(
|
|
||||||
LOWER_PARSER_NAME_PLACEHOLDER,
|
|
||||||
&language_name.to_snake_case(),
|
|
||||||
)
|
|
||||||
.replace(
|
.replace(
|
||||||
KEBAB_PARSER_NAME_PLACEHOLDER,
|
KEBAB_PARSER_NAME_PLACEHOLDER,
|
||||||
&language_name.to_kebab_case(),
|
&language_name.to_kebab_case(),
|
||||||
)
|
)
|
||||||
|
.replace(LOWER_PARSER_NAME_PLACEHOLDER, &lower_parser_name)
|
||||||
.replace(PARSER_NAME_PLACEHOLDER, language_name)
|
.replace(PARSER_NAME_PLACEHOLDER, language_name)
|
||||||
.replace(CLI_VERSION_PLACEHOLDER, CLI_VERSION)
|
.replace(CLI_VERSION_PLACEHOLDER, CLI_VERSION)
|
||||||
.replace(RUST_BINDING_VERSION_PLACEHOLDER, RUST_BINDING_VERSION)
|
.replace(RUST_BINDING_VERSION_PLACEHOLDER, RUST_BINDING_VERSION)
|
||||||
|
|
@ -877,7 +1227,20 @@ fn generate_file(
|
||||||
PARSER_VERSION_PLACEHOLDER,
|
PARSER_VERSION_PLACEHOLDER,
|
||||||
&generate_opts.version.to_string(),
|
&generate_opts.version.to_string(),
|
||||||
)
|
)
|
||||||
.replace(PARSER_CLASS_NAME_PLACEHOLDER, generate_opts.class_name);
|
.replace(PARSER_CLASS_NAME_PLACEHOLDER, generate_opts.class_name)
|
||||||
|
.replace(
|
||||||
|
HIGHLIGHTS_QUERY_PATH_PLACEHOLDER,
|
||||||
|
generate_opts.highlights_query_path,
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
INJECTIONS_QUERY_PATH_PLACEHOLDER,
|
||||||
|
generate_opts.injections_query_path,
|
||||||
|
)
|
||||||
|
.replace(
|
||||||
|
LOCALS_QUERY_PATH_PLACEHOLDER,
|
||||||
|
generate_opts.locals_query_path,
|
||||||
|
)
|
||||||
|
.replace(TAGS_QUERY_PATH_PLACEHOLDER, generate_opts.tags_query_path);
|
||||||
|
|
||||||
if let Some(name) = generate_opts.author_name {
|
if let Some(name) = generate_opts.author_name {
|
||||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER, name);
|
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER, name);
|
||||||
|
|
@ -895,6 +1258,9 @@ fn generate_file(
|
||||||
"Cargo.toml" => {
|
"Cargo.toml" => {
|
||||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER_RS, "");
|
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER_RS, "");
|
||||||
}
|
}
|
||||||
|
"pom.xml" => {
|
||||||
|
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER_JAVA, "");
|
||||||
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -920,23 +1286,32 @@ fn generate_file(
|
||||||
"Cargo.toml" => {
|
"Cargo.toml" => {
|
||||||
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER_RS, "");
|
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER_RS, "");
|
||||||
}
|
}
|
||||||
|
"pom.xml" => {
|
||||||
|
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER_JAVA, "");
|
||||||
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if filename == "package.json" {
|
match (generate_opts.author_url, filename) {
|
||||||
if let Some(url) = generate_opts.author_url {
|
(Some(url), "package.json" | "pom.xml") => {
|
||||||
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER, url);
|
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER, url);
|
||||||
} else {
|
}
|
||||||
|
(None, "package.json") => {
|
||||||
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER_JS, "");
|
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER_JS, "");
|
||||||
}
|
}
|
||||||
|
(None, "pom.xml") => {
|
||||||
|
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER_JAVA, "");
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
if generate_opts.author_name.is_none()
|
if generate_opts.author_name.is_none()
|
||||||
&& generate_opts.author_email.is_none()
|
&& generate_opts.author_email.is_none()
|
||||||
&& generate_opts.author_url.is_none()
|
&& generate_opts.author_url.is_none()
|
||||||
&& filename == "package.json"
|
|
||||||
{
|
{
|
||||||
|
match filename {
|
||||||
|
"package.json" => {
|
||||||
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_JS) {
|
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_JS) {
|
||||||
if let Some(end_idx) = replacement[start_idx..]
|
if let Some(end_idx) = replacement[start_idx..]
|
||||||
.find("},")
|
.find("},")
|
||||||
|
|
@ -945,6 +1320,19 @@ fn generate_file(
|
||||||
replacement.replace_range(start_idx..end_idx, "");
|
replacement.replace_range(start_idx..end_idx, "");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
"pom.xml" => {
|
||||||
|
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_JAVA) {
|
||||||
|
if let Some(end_idx) = replacement[start_idx..]
|
||||||
|
.find("</developer>")
|
||||||
|
.map(|i| i + start_idx + 12)
|
||||||
|
{
|
||||||
|
replacement.replace_range(start_idx..end_idx, "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
} else if generate_opts.author_name.is_none() && generate_opts.author_email.is_none() {
|
} else if generate_opts.author_name.is_none() && generate_opts.author_email.is_none() {
|
||||||
match filename {
|
match filename {
|
||||||
"pyproject.toml" => {
|
"pyproject.toml" => {
|
||||||
|
|
@ -981,16 +1369,15 @@ fn generate_file(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match generate_opts.license {
|
if let Some(license) = generate_opts.license {
|
||||||
Some(license) => replacement = replacement.replace(PARSER_LICENSE_PLACEHOLDER, license),
|
replacement = replacement.replace(PARSER_LICENSE_PLACEHOLDER, license);
|
||||||
_ => replacement = replacement.replace(PARSER_LICENSE_PLACEHOLDER, "MIT"),
|
} else {
|
||||||
|
replacement = replacement.replace(PARSER_LICENSE_PLACEHOLDER, "MIT");
|
||||||
}
|
}
|
||||||
|
|
||||||
match generate_opts.description {
|
if let Some(description) = generate_opts.description {
|
||||||
Some(description) => {
|
|
||||||
replacement = replacement.replace(PARSER_DESCRIPTION_PLACEHOLDER, description);
|
replacement = replacement.replace(PARSER_DESCRIPTION_PLACEHOLDER, description);
|
||||||
}
|
} else {
|
||||||
_ => {
|
|
||||||
replacement = replacement.replace(
|
replacement = replacement.replace(
|
||||||
PARSER_DESCRIPTION_PLACEHOLDER,
|
PARSER_DESCRIPTION_PLACEHOLDER,
|
||||||
&format!(
|
&format!(
|
||||||
|
|
@ -999,18 +1386,15 @@ fn generate_file(
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
match generate_opts.repository {
|
if let Some(repository) = generate_opts.repository {
|
||||||
Some(repository) => {
|
|
||||||
replacement = replacement
|
replacement = replacement
|
||||||
.replace(
|
.replace(
|
||||||
PARSER_URL_STRIPPED_PLACEHOLDER,
|
PARSER_URL_STRIPPED_PLACEHOLDER,
|
||||||
&repository.replace("https://", "").to_lowercase(),
|
&repository.replace("https://", "").to_lowercase(),
|
||||||
)
|
)
|
||||||
.replace(PARSER_URL_PLACEHOLDER, &repository.to_lowercase());
|
.replace(PARSER_URL_PLACEHOLDER, &repository.to_lowercase());
|
||||||
}
|
} else {
|
||||||
_ => {
|
|
||||||
replacement = replacement
|
replacement = replacement
|
||||||
.replace(
|
.replace(
|
||||||
PARSER_URL_STRIPPED_PLACEHOLDER,
|
PARSER_URL_STRIPPED_PLACEHOLDER,
|
||||||
|
|
@ -1027,6 +1411,18 @@ fn generate_file(
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(namespace) = generate_opts.namespace {
|
||||||
|
replacement = replacement
|
||||||
|
.replace(
|
||||||
|
PARSER_NS_CLEANED_PLACEHOLDER,
|
||||||
|
&namespace.replace(['-', '_'], ""),
|
||||||
|
)
|
||||||
|
.replace(PARSER_NS_PLACEHOLDER, namespace);
|
||||||
|
} else {
|
||||||
|
replacement = replacement
|
||||||
|
.replace(PARSER_NS_CLEANED_PLACEHOLDER, "io.github.treesitter")
|
||||||
|
.replace(PARSER_NS_PLACEHOLDER, "io.github.tree-sitter");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(funding_url) = generate_opts.funding {
|
if let Some(funding_url) = generate_opts.funding {
|
||||||
|
|
@ -1048,6 +1444,18 @@ fn generate_file(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if filename == "build.zig.zon" {
|
||||||
|
let id = thread_rng().gen_range(1u32..0xFFFF_FFFFu32);
|
||||||
|
let checksum = crc32(format!("tree_sitter_{language_name}").as_bytes());
|
||||||
|
replacement = replacement.replace(
|
||||||
|
PARSER_FINGERPRINT_PLACEHOLDER,
|
||||||
|
#[cfg(target_endian = "little")]
|
||||||
|
&format!("0x{checksum:x}{id:x}"),
|
||||||
|
#[cfg(target_endian = "big")]
|
||||||
|
&format!("0x{id:x}{checksum:x}"),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
write_file(path, replacement)?;
|
write_file(path, replacement)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
55
crates/cli/src/logger.rs
Normal file
55
crates/cli/src/logger.rs
Normal file
|
|
@ -0,0 +1,55 @@
|
||||||
|
use std::io::Write;
|
||||||
|
|
||||||
|
use anstyle::{AnsiColor, Color, Style};
|
||||||
|
use log::{Level, LevelFilter, Log, Metadata, Record};
|
||||||
|
|
||||||
|
pub fn paint(color: Option<impl Into<Color>>, text: &str) -> String {
|
||||||
|
let style = Style::new().fg_color(color.map(Into::into));
|
||||||
|
format!("{style}{text}{style:#}")
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Logger;
|
||||||
|
|
||||||
|
impl Log for Logger {
|
||||||
|
fn enabled(&self, _: &Metadata) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn log(&self, record: &Record) {
|
||||||
|
match record.level() {
|
||||||
|
Level::Error => eprintln!(
|
||||||
|
"{} {}",
|
||||||
|
paint(Some(AnsiColor::Red), "Error:"),
|
||||||
|
record.args()
|
||||||
|
),
|
||||||
|
Level::Warn => eprintln!(
|
||||||
|
"{} {}",
|
||||||
|
paint(Some(AnsiColor::Yellow), "Warning:"),
|
||||||
|
record.args()
|
||||||
|
),
|
||||||
|
Level::Info | Level::Debug => eprintln!("{}", record.args()),
|
||||||
|
Level::Trace => eprintln!(
|
||||||
|
"[{}] {}",
|
||||||
|
record
|
||||||
|
.module_path()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.trim_start_matches("rust_tree_sitter_cli::"),
|
||||||
|
record.args()
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&self) {
|
||||||
|
let mut stderr = std::io::stderr().lock();
|
||||||
|
let _ = stderr.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init() {
|
||||||
|
log::set_boxed_logger(Box::new(Logger {})).unwrap();
|
||||||
|
log::set_max_level(LevelFilter::Info);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn enable_debug() {
|
||||||
|
log::set_max_level(LevelFilter::Debug);
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,6 +1,7 @@
|
||||||
use std::{
|
use std::{
|
||||||
fmt, fs,
|
fmt, fs,
|
||||||
io::{self, StdoutLock, Write},
|
io::{self, Write},
|
||||||
|
ops::ControlFlow,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::atomic::{AtomicUsize, Ordering},
|
sync::atomic::{AtomicUsize, Ordering},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
|
|
@ -9,16 +10,17 @@ use std::{
|
||||||
use anstyle::{AnsiColor, Color, RgbColor};
|
use anstyle::{AnsiColor, Color, RgbColor};
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use clap::ValueEnum;
|
use clap::ValueEnum;
|
||||||
|
use log::info;
|
||||||
|
use schemars::JsonSchema;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tree_sitter::{
|
use tree_sitter::{
|
||||||
ffi, InputEdit, Language, LogType, ParseOptions, ParseState, Parser, Point, Range, Tree,
|
ffi, InputEdit, Language, LogType, ParseOptions, ParseState, Parser, Point, Range, Tree,
|
||||||
TreeCursor,
|
TreeCursor,
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::util;
|
use crate::{fuzz::edits::Edit, logger::paint, util};
|
||||||
use crate::{fuzz::edits::Edit, test::paint};
|
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize)]
|
#[derive(Debug, Default, Serialize, JsonSchema)]
|
||||||
pub struct Stats {
|
pub struct Stats {
|
||||||
pub successful_parses: usize,
|
pub successful_parses: usize,
|
||||||
pub total_parses: usize,
|
pub total_parses: usize,
|
||||||
|
|
@ -229,10 +231,21 @@ impl ParseSummary {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug, Default)]
|
#[derive(Serialize, Debug)]
|
||||||
pub struct ParseStats {
|
pub struct ParseStats {
|
||||||
pub parse_summaries: Vec<ParseSummary>,
|
pub parse_summaries: Vec<ParseSummary>,
|
||||||
pub cumulative_stats: Stats,
|
pub cumulative_stats: Stats,
|
||||||
|
pub source_count: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for ParseStats {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
parse_summaries: Vec::new(),
|
||||||
|
cumulative_stats: Stats::default(),
|
||||||
|
source_count: 1,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ValueEnum, Debug, Copy, Clone, Default, Eq, PartialEq)]
|
#[derive(Serialize, ValueEnum, Debug, Copy, Clone, Default, Eq, PartialEq)]
|
||||||
|
|
@ -357,15 +370,15 @@ pub fn parse_file_at_path(
|
||||||
let progress_callback = &mut |_: &ParseState| {
|
let progress_callback = &mut |_: &ParseState| {
|
||||||
if let Some(cancellation_flag) = opts.cancellation_flag {
|
if let Some(cancellation_flag) = opts.cancellation_flag {
|
||||||
if cancellation_flag.load(Ordering::SeqCst) != 0 {
|
if cancellation_flag.load(Ordering::SeqCst) != 0 {
|
||||||
return true;
|
return ControlFlow::Break(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.timeout > 0 && start_time.elapsed().as_micros() > opts.timeout as u128 {
|
if opts.timeout > 0 && start_time.elapsed().as_micros() > opts.timeout as u128 {
|
||||||
return true;
|
return ControlFlow::Break(());
|
||||||
}
|
}
|
||||||
|
|
||||||
false
|
ControlFlow::Continue(())
|
||||||
};
|
};
|
||||||
|
|
||||||
let parse_opts = ParseOptions::new().progress_callback(progress_callback);
|
let parse_opts = ParseOptions::new().progress_callback(progress_callback);
|
||||||
|
|
@ -424,7 +437,7 @@ pub fn parse_file_at_path(
|
||||||
|
|
||||||
if let Some(mut tree) = tree {
|
if let Some(mut tree) = tree {
|
||||||
if opts.debug_graph && !opts.edits.is_empty() {
|
if opts.debug_graph && !opts.edits.is_empty() {
|
||||||
println!("BEFORE:\n{}", String::from_utf8_lossy(&source_code));
|
info!("BEFORE:\n{}", String::from_utf8_lossy(&source_code));
|
||||||
}
|
}
|
||||||
|
|
||||||
let edit_time = Instant::now();
|
let edit_time = Instant::now();
|
||||||
|
|
@ -434,7 +447,7 @@ pub fn parse_file_at_path(
|
||||||
tree = parser.parse(&source_code, Some(&tree)).unwrap();
|
tree = parser.parse(&source_code, Some(&tree)).unwrap();
|
||||||
|
|
||||||
if opts.debug_graph {
|
if opts.debug_graph {
|
||||||
println!("AFTER {i}:\n{}", String::from_utf8_lossy(&source_code));
|
info!("AFTER {i}:\n{}", String::from_utf8_lossy(&source_code));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let edit_duration = edit_time.elapsed();
|
let edit_duration = edit_time.elapsed();
|
||||||
|
|
@ -501,63 +514,23 @@ pub fn parse_file_at_path(
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Cst {
|
if opts.output == ParseOutput::Cst {
|
||||||
let lossy_source_code = String::from_utf8_lossy(&source_code);
|
render_cst(&source_code, &tree, &mut cursor, opts, &mut stdout)?;
|
||||||
let total_width = lossy_source_code
|
|
||||||
.lines()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(row, col)| {
|
|
||||||
(row as f64).log10() as usize + (col.len() as f64).log10() as usize + 1
|
|
||||||
})
|
|
||||||
.max()
|
|
||||||
.unwrap_or(1);
|
|
||||||
let mut indent_level = 1;
|
|
||||||
let mut did_visit_children = false;
|
|
||||||
let mut in_error = false;
|
|
||||||
loop {
|
|
||||||
if did_visit_children {
|
|
||||||
if cursor.goto_next_sibling() {
|
|
||||||
did_visit_children = false;
|
|
||||||
} else if cursor.goto_parent() {
|
|
||||||
did_visit_children = true;
|
|
||||||
indent_level -= 1;
|
|
||||||
if !cursor.node().has_error() {
|
|
||||||
in_error = false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
cst_render_node(
|
|
||||||
opts,
|
|
||||||
&mut cursor,
|
|
||||||
&source_code,
|
|
||||||
&mut stdout,
|
|
||||||
total_width,
|
|
||||||
indent_level,
|
|
||||||
in_error,
|
|
||||||
)?;
|
|
||||||
if cursor.goto_first_child() {
|
|
||||||
did_visit_children = false;
|
|
||||||
indent_level += 1;
|
|
||||||
if cursor.node().has_error() {
|
|
||||||
in_error = true;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
did_visit_children = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
cursor.reset(tree.root_node());
|
|
||||||
println!();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Xml {
|
if opts.output == ParseOutput::Xml {
|
||||||
let mut needs_newline = false;
|
let mut needs_newline = false;
|
||||||
let mut indent_level = 0;
|
let mut indent_level = 2;
|
||||||
let mut did_visit_children = false;
|
let mut did_visit_children = false;
|
||||||
let mut had_named_children = false;
|
let mut had_named_children = false;
|
||||||
let mut tags = Vec::<&str>::new();
|
let mut tags = Vec::<&str>::new();
|
||||||
|
|
||||||
|
// If we're parsing the first file, write the header
|
||||||
|
if opts.stats.parse_summaries.is_empty() {
|
||||||
writeln!(&mut stdout, "<?xml version=\"1.0\"?>")?;
|
writeln!(&mut stdout, "<?xml version=\"1.0\"?>")?;
|
||||||
|
writeln!(&mut stdout, "<sources>")?;
|
||||||
|
}
|
||||||
|
writeln!(&mut stdout, " <source name=\"{}\">", path.display())?;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let node = cursor.node();
|
let node = cursor.node();
|
||||||
let is_named = node.is_named();
|
let is_named = node.is_named();
|
||||||
|
|
@ -572,7 +545,7 @@ pub fn parse_file_at_path(
|
||||||
write!(&mut stdout, "</{}>", tag.expect("there is a tag"))?;
|
write!(&mut stdout, "</{}>", tag.expect("there is a tag"))?;
|
||||||
// we only write a line in the case where it's the last sibling
|
// we only write a line in the case where it's the last sibling
|
||||||
if let Some(parent) = node.parent() {
|
if let Some(parent) = node.parent() {
|
||||||
if parent.child(parent.child_count() - 1).unwrap() == node {
|
if parent.child(parent.child_count() as u32 - 1).unwrap() == node {
|
||||||
stdout.write_all(b"\n")?;
|
stdout.write_all(b"\n")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -636,8 +609,14 @@ pub fn parse_file_at_path(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
writeln!(&mut stdout)?;
|
||||||
|
writeln!(&mut stdout, " </source>")?;
|
||||||
|
|
||||||
|
// If we parsed the last file, write the closing tag for the `sources` header
|
||||||
|
if opts.stats.parse_summaries.len() == opts.stats.source_count - 1 {
|
||||||
|
writeln!(&mut stdout, "</sources>")?;
|
||||||
|
}
|
||||||
cursor.reset(tree.root_node());
|
cursor.reset(tree.root_node());
|
||||||
println!();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Dot {
|
if opts.output == ParseOutput::Dot {
|
||||||
|
|
@ -695,10 +674,9 @@ pub fn parse_file_at_path(
|
||||||
width = max_path_length
|
width = max_path_length
|
||||||
)?;
|
)?;
|
||||||
if let Some(node) = first_error {
|
if let Some(node) = first_error {
|
||||||
let start = node.start_position();
|
let node_kind = node.kind();
|
||||||
let end = node.end_position();
|
let mut node_text = String::with_capacity(node_kind.len());
|
||||||
let mut node_text = String::new();
|
for c in node_kind.chars() {
|
||||||
for c in node.kind().chars() {
|
|
||||||
if let Some(escaped) = escape_invisible(c) {
|
if let Some(escaped) = escape_invisible(c) {
|
||||||
node_text += escaped;
|
node_text += escaped;
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -715,6 +693,9 @@ pub fn parse_file_at_path(
|
||||||
} else {
|
} else {
|
||||||
write!(&mut stdout, "{node_text}")?;
|
write!(&mut stdout, "{node_text}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let start = node.start_position();
|
||||||
|
let end = node.end_position();
|
||||||
write!(
|
write!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
" [{}, {}] - [{}, {}])",
|
" [{}, {}] - [{}, {}])",
|
||||||
|
|
@ -781,12 +762,77 @@ const fn escape_invisible(c: char) -> Option<&'static str> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const fn escape_delimiter(c: char) -> Option<&'static str> {
|
||||||
|
Some(match c {
|
||||||
|
'`' => "\\`",
|
||||||
|
'\"' => "\\\"",
|
||||||
|
_ => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn render_cst<'a, 'b: 'a>(
|
||||||
|
source_code: &[u8],
|
||||||
|
tree: &'b Tree,
|
||||||
|
cursor: &mut TreeCursor<'a>,
|
||||||
|
opts: &ParseFileOptions,
|
||||||
|
out: &mut impl Write,
|
||||||
|
) -> Result<()> {
|
||||||
|
let lossy_source_code = String::from_utf8_lossy(source_code);
|
||||||
|
let total_width = lossy_source_code
|
||||||
|
.lines()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(row, col)| (row as f64).log10() as usize + (col.len() as f64).log10() as usize + 1)
|
||||||
|
.max()
|
||||||
|
.unwrap_or(1);
|
||||||
|
let mut indent_level = usize::from(!opts.no_ranges);
|
||||||
|
let mut did_visit_children = false;
|
||||||
|
let mut in_error = false;
|
||||||
|
loop {
|
||||||
|
if did_visit_children {
|
||||||
|
if cursor.goto_next_sibling() {
|
||||||
|
did_visit_children = false;
|
||||||
|
} else if cursor.goto_parent() {
|
||||||
|
did_visit_children = true;
|
||||||
|
indent_level -= 1;
|
||||||
|
if !cursor.node().has_error() {
|
||||||
|
in_error = false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cst_render_node(
|
||||||
|
opts,
|
||||||
|
cursor,
|
||||||
|
source_code,
|
||||||
|
out,
|
||||||
|
total_width,
|
||||||
|
indent_level,
|
||||||
|
in_error,
|
||||||
|
)?;
|
||||||
|
if cursor.goto_first_child() {
|
||||||
|
did_visit_children = false;
|
||||||
|
indent_level += 1;
|
||||||
|
if cursor.node().has_error() {
|
||||||
|
in_error = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
did_visit_children = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cursor.reset(tree.root_node());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn render_node_text(source: &str) -> String {
|
fn render_node_text(source: &str) -> String {
|
||||||
source
|
source
|
||||||
.chars()
|
.chars()
|
||||||
.fold(String::with_capacity(source.len()), |mut acc, c| {
|
.fold(String::with_capacity(source.len()), |mut acc, c| {
|
||||||
if let Some(esc) = escape_invisible(c) {
|
if let Some(esc) = escape_invisible(c) {
|
||||||
acc.push_str(esc);
|
acc.push_str(esc);
|
||||||
|
} else if let Some(esc) = escape_delimiter(c) {
|
||||||
|
acc.push_str(esc);
|
||||||
} else {
|
} else {
|
||||||
acc.push(c);
|
acc.push(c);
|
||||||
}
|
}
|
||||||
|
|
@ -796,7 +842,7 @@ fn render_node_text(source: &str) -> String {
|
||||||
|
|
||||||
fn write_node_text(
|
fn write_node_text(
|
||||||
opts: &ParseFileOptions,
|
opts: &ParseFileOptions,
|
||||||
stdout: &mut StdoutLock<'static>,
|
out: &mut impl Write,
|
||||||
cursor: &TreeCursor,
|
cursor: &TreeCursor,
|
||||||
is_named: bool,
|
is_named: bool,
|
||||||
source: &str,
|
source: &str,
|
||||||
|
|
@ -812,7 +858,7 @@ fn write_node_text(
|
||||||
|
|
||||||
if !is_named {
|
if !is_named {
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}{}{}",
|
"{}{}{}",
|
||||||
paint(quote_color, &String::from(quote)),
|
paint(quote_color, &String::from(quote)),
|
||||||
paint(color, &render_node_text(source)),
|
paint(color, &render_node_text(source)),
|
||||||
|
|
@ -836,12 +882,11 @@ fn write_node_text(
|
||||||
0
|
0
|
||||||
};
|
};
|
||||||
let formatted_line = render_line_feed(line, opts);
|
let formatted_line = render_line_feed(line, opts);
|
||||||
if !opts.no_ranges {
|
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}{}{}{}{}{}",
|
"{}{}{}{}{}{}",
|
||||||
if multiline { "\n" } else { " " },
|
if multiline { "\n" } else { " " },
|
||||||
if multiline {
|
if multiline && !opts.no_ranges {
|
||||||
render_node_range(opts, cursor, is_named, true, total_width, node_range)
|
render_node_range(opts, cursor, is_named, true, total_width, node_range)
|
||||||
} else {
|
} else {
|
||||||
String::new()
|
String::new()
|
||||||
|
|
@ -852,19 +897,9 @@ fn write_node_text(
|
||||||
String::new()
|
String::new()
|
||||||
},
|
},
|
||||||
paint(quote_color, &String::from(quote)),
|
paint(quote_color, &String::from(quote)),
|
||||||
&paint(color, &render_node_text(&formatted_line)),
|
paint(color, &render_node_text(&formatted_line)),
|
||||||
paint(quote_color, &String::from(quote)),
|
paint(quote_color, &String::from(quote)),
|
||||||
)?;
|
)?;
|
||||||
} else {
|
|
||||||
write!(
|
|
||||||
stdout,
|
|
||||||
"\n{}{}{}{}",
|
|
||||||
" ".repeat(indent_level + 1),
|
|
||||||
paint(quote_color, &String::from(quote)),
|
|
||||||
&paint(color, &render_node_text(&formatted_line)),
|
|
||||||
paint(quote_color, &String::from(quote)),
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -918,9 +953,9 @@ fn render_node_range(
|
||||||
|
|
||||||
fn cst_render_node(
|
fn cst_render_node(
|
||||||
opts: &ParseFileOptions,
|
opts: &ParseFileOptions,
|
||||||
cursor: &mut TreeCursor,
|
cursor: &TreeCursor,
|
||||||
source_code: &[u8],
|
source_code: &[u8],
|
||||||
stdout: &mut StdoutLock<'static>,
|
out: &mut impl Write,
|
||||||
total_width: usize,
|
total_width: usize,
|
||||||
indent_level: usize,
|
indent_level: usize,
|
||||||
in_error: bool,
|
in_error: bool,
|
||||||
|
|
@ -929,13 +964,13 @@ fn cst_render_node(
|
||||||
let is_named = node.is_named();
|
let is_named = node.is_named();
|
||||||
if !opts.no_ranges {
|
if !opts.no_ranges {
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}",
|
"{}",
|
||||||
render_node_range(opts, cursor, is_named, false, total_width, node.range())
|
render_node_range(opts, cursor, is_named, false, total_width, node.range())
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}{}",
|
"{}{}",
|
||||||
" ".repeat(indent_level),
|
" ".repeat(indent_level),
|
||||||
if in_error && !node.has_error() {
|
if in_error && !node.has_error() {
|
||||||
|
|
@ -947,14 +982,14 @@ fn cst_render_node(
|
||||||
if is_named {
|
if is_named {
|
||||||
if let Some(field_name) = cursor.field_name() {
|
if let Some(field_name) = cursor.field_name() {
|
||||||
write!(
|
write!(
|
||||||
stdout,
|
out,
|
||||||
"{}",
|
"{}",
|
||||||
paint(opts.parse_theme.field, &format!("{field_name}: "))
|
paint(opts.parse_theme.field, &format!("{field_name}: "))
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.has_error() || node.is_error() {
|
if node.has_error() || node.is_error() {
|
||||||
write!(stdout, "{}", paint(opts.parse_theme.error, "•"))?;
|
write!(out, "{}", paint(opts.parse_theme.error, "•"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let kind_color = if node.is_error() {
|
let kind_color = if node.is_error() {
|
||||||
|
|
@ -964,13 +999,13 @@ fn cst_render_node(
|
||||||
} else {
|
} else {
|
||||||
opts.parse_theme.node_kind
|
opts.parse_theme.node_kind
|
||||||
};
|
};
|
||||||
write!(stdout, "{} ", paint(kind_color, node.kind()))?;
|
write!(out, "{}", paint(kind_color, node.kind()))?;
|
||||||
|
|
||||||
if node.child_count() == 0 {
|
if node.child_count() == 0 {
|
||||||
// Node text from a pattern or external scanner
|
// Node text from a pattern or external scanner
|
||||||
write_node_text(
|
write_node_text(
|
||||||
opts,
|
opts,
|
||||||
stdout,
|
out,
|
||||||
cursor,
|
cursor,
|
||||||
is_named,
|
is_named,
|
||||||
&String::from_utf8_lossy(&source_code[node.start_byte()..node.end_byte()]),
|
&String::from_utf8_lossy(&source_code[node.start_byte()..node.end_byte()]),
|
||||||
|
|
@ -979,17 +1014,13 @@ fn cst_render_node(
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
} else if node.is_missing() {
|
} else if node.is_missing() {
|
||||||
write!(stdout, "{}: ", paint(opts.parse_theme.missing, "MISSING"))?;
|
write!(out, "{}: ", paint(opts.parse_theme.missing, "MISSING"))?;
|
||||||
write!(
|
write!(out, "\"{}\"", paint(opts.parse_theme.missing, node.kind()))?;
|
||||||
stdout,
|
|
||||||
"\"{}\"",
|
|
||||||
paint(opts.parse_theme.missing, node.kind())
|
|
||||||
)?;
|
|
||||||
} else {
|
} else {
|
||||||
// Terminal literals, like "fn"
|
// Terminal literals, like "fn"
|
||||||
write_node_text(
|
write_node_text(
|
||||||
opts,
|
opts,
|
||||||
stdout,
|
out,
|
||||||
cursor,
|
cursor,
|
||||||
is_named,
|
is_named,
|
||||||
node.kind(),
|
node.kind(),
|
||||||
|
|
@ -997,7 +1028,7 @@ fn cst_render_node(
|
||||||
(total_width, indent_level),
|
(total_width, indent_level),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
writeln!(stdout)?;
|
writeln!(out)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
481
crates/cli/src/playground.html
Normal file
481
crates/cli/src/playground.html
Normal file
|
|
@ -0,0 +1,481 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>tree-sitter THE_LANGUAGE_NAME</title>
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.css">
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.css">
|
||||||
|
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-32x32.png"
|
||||||
|
sizes="32x32" />
|
||||||
|
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-16x16.png"
|
||||||
|
sizes="16x16" />
|
||||||
|
<style>
|
||||||
|
/* Base Variables */
|
||||||
|
:root {
|
||||||
|
--light-bg: #f9f9f9;
|
||||||
|
--light-border: #e0e0e0;
|
||||||
|
--light-text: #333;
|
||||||
|
--light-hover-border: #c1c1c1;
|
||||||
|
--light-scrollbar-track: #f1f1f1;
|
||||||
|
--light-scrollbar-thumb: #c1c1c1;
|
||||||
|
--light-scrollbar-thumb-hover: #a8a8a8;
|
||||||
|
--light-tree-row-bg: #e3f2fd;
|
||||||
|
|
||||||
|
--dark-bg: #1d1f21;
|
||||||
|
--dark-border: #2d2d2d;
|
||||||
|
--dark-text: #c5c8c6;
|
||||||
|
--dark-panel-bg: #252526;
|
||||||
|
--dark-code-bg: #1e1e1e;
|
||||||
|
--dark-scrollbar-track: #25282c;
|
||||||
|
--dark-scrollbar-thumb: #4a4d51;
|
||||||
|
--dark-scrollbar-thumb-hover: #5a5d61;
|
||||||
|
--dark-tree-row-bg: #373737;
|
||||||
|
|
||||||
|
--primary-color: #0550ae;
|
||||||
|
--primary-color-alpha: rgba(5, 80, 174, 0.1);
|
||||||
|
--primary-color-alpha-dark: rgba(121, 192, 255, 0.1);
|
||||||
|
--selection-color: rgba(39, 95, 255, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Theme Colors */
|
||||||
|
[data-theme="dark"] {
|
||||||
|
--bg-color: var(--dark-bg);
|
||||||
|
--border-color: var(--dark-border);
|
||||||
|
--text-color: var(--dark-text);
|
||||||
|
--panel-bg: var(--dark-panel-bg);
|
||||||
|
--code-bg: var(--dark-code-bg);
|
||||||
|
--tree-row-bg: var(--dark-tree-row-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="light"] {
|
||||||
|
--bg-color: var(--light-bg);
|
||||||
|
--border-color: var(--light-border);
|
||||||
|
--text-color: var(--light-text);
|
||||||
|
--panel-bg: white;
|
||||||
|
--code-bg: white;
|
||||||
|
--tree-row-bg: var(--light-tree-row-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Base Styles */
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
font-family: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Layout */
|
||||||
|
#playground-container {
|
||||||
|
width: 100%;
|
||||||
|
height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
header {
|
||||||
|
padding: 16px 24px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 20px;
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.language-name,
|
||||||
|
#language-version {
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
main {
|
||||||
|
flex: 1;
|
||||||
|
display: flex;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#input-pane {
|
||||||
|
width: 50%;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
border-right: 1px solid var(--border-color);
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-container {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
position: relative;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container:not([style*="visibility: hidden"]) {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container .panel-header {
|
||||||
|
flex: 0 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container .CodeMirror {
|
||||||
|
flex: 1;
|
||||||
|
position: relative;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container-scroll {
|
||||||
|
width: 50%;
|
||||||
|
overflow: auto;
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
padding: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container {
|
||||||
|
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
||||||
|
line-height: 1.5;
|
||||||
|
margin: 0;
|
||||||
|
padding: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.panel-header {
|
||||||
|
padding: 8px 16px;
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 14px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.CodeMirror {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
height: 100%;
|
||||||
|
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.6;
|
||||||
|
background-color: var(--code-bg) !important;
|
||||||
|
color: var(--text-color) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-error {
|
||||||
|
text-decoration: underline red dashed;
|
||||||
|
-webkit-text-decoration: underline red dashed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scrollbars */
|
||||||
|
::-webkit-scrollbar {
|
||||||
|
width: 8px;
|
||||||
|
height: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--light-scrollbar-track);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--light-scrollbar-thumb);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: var(--light-scrollbar-thumb-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] {
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
background: var(--dark-scrollbar-track) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
background: var(--dark-scrollbar-thumb) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: var(--dark-scrollbar-thumb-hover) !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Theme Toggle */
|
||||||
|
.theme-toggle {
|
||||||
|
background: none;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 6px;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-toggle:hover {
|
||||||
|
background-color: var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="light"] .moon-icon,
|
||||||
|
[data-theme="dark"] .sun-icon {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Form Elements */
|
||||||
|
input[type="checkbox"] {
|
||||||
|
margin-right: 6px;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
label {
|
||||||
|
font-size: 14px;
|
||||||
|
margin-right: 16px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a {
|
||||||
|
cursor: pointer;
|
||||||
|
text-decoration: none;
|
||||||
|
color: #040404;
|
||||||
|
padding: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.named {
|
||||||
|
color: #0550ae;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous {
|
||||||
|
color: #116329;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous:before {
|
||||||
|
content: '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous:after {
|
||||||
|
content: '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.error {
|
||||||
|
color: #cf222e;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.highlighted {
|
||||||
|
background-color: #cae2ff;
|
||||||
|
color: red;
|
||||||
|
border-radius: 3px;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
#copy-button {
|
||||||
|
background: none;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 6px;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-color);
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
margin-left: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#copy-button:hover {
|
||||||
|
background-color: var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
#copy-button:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--primary-color);
|
||||||
|
box-shadow: 0 0 0 2px var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast {
|
||||||
|
position: fixed;
|
||||||
|
bottom: 20px;
|
||||||
|
right: 20px;
|
||||||
|
background-color: var(--light-text);
|
||||||
|
color: white;
|
||||||
|
padding: 12px 16px;
|
||||||
|
border-radius: 6px;
|
||||||
|
box-shadow: 0 4px 12px rgba(0, 0, 0, 0.3);
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 500;
|
||||||
|
opacity: 0;
|
||||||
|
transform: translateY(20px);
|
||||||
|
transition: all 0.3s ease;
|
||||||
|
z-index: 1000;
|
||||||
|
pointer-events: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast.show {
|
||||||
|
opacity: 1;
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark Theme Node Colors */
|
||||||
|
[data-theme="dark"] {
|
||||||
|
& #output-container a {
|
||||||
|
color: #d4d4d4;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.named {
|
||||||
|
color: #79c0ff;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.anonymous {
|
||||||
|
color: #7ee787;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.error {
|
||||||
|
color: #ff7b72;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.highlighted {
|
||||||
|
background-color: #656669;
|
||||||
|
color: red;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror {
|
||||||
|
background-color: var(--dark-code-bg) !important;
|
||||||
|
color: var(--dark-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-gutters {
|
||||||
|
background-color: var(--dark-panel-bg) !important;
|
||||||
|
border-color: var(--dark-border) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-cursor {
|
||||||
|
border-color: var(--dark-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-selected {
|
||||||
|
background-color: rgba(255, 255, 255, 0.1) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .toast {
|
||||||
|
background-color: var(--dark-bg);
|
||||||
|
color: var(--dark-text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.tree-row:has(.highlighted) {
|
||||||
|
background-color: var(--tree-row-bg);
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="playground-container" style="visibility: hidden;">
|
||||||
|
<header>
|
||||||
|
<div class="header-item">
|
||||||
|
<span class="language-name">Language: THE_LANGUAGE_NAME</span>
|
||||||
|
<span id="language-version"></span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="logging-checkbox" type="checkbox">
|
||||||
|
<label for="logging-checkbox">log</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="anonymous-nodes-checkbox" type="checkbox">
|
||||||
|
<label for="anonymous-nodes-checkbox">show anonymous nodes</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="query-checkbox" type="checkbox">
|
||||||
|
<label for="query-checkbox">query</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="accessibility-checkbox" type="checkbox">
|
||||||
|
<label for="accessibility-checkbox">accessibility</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<label for="update-time">parse time: </label>
|
||||||
|
<span id="update-time"></span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<a href="https://tree-sitter.github.io/tree-sitter/7-playground.html#about">(?)</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<select id="language-select" style="display: none;">
|
||||||
|
<option value="parser">Parser</option>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<button id="theme-toggle" class="theme-toggle" aria-label="Toggle theme">
|
||||||
|
<svg class="sun-icon" viewBox="0 0 24 24" width="16" height="16">
|
||||||
|
<path fill="currentColor"
|
||||||
|
d="M12 17.5a5.5 5.5 0 1 0 0-11 5.5 5.5 0 0 0 0 11zm0 1.5a7 7 0 1 1 0-14 7 7 0 0 1 0 14zm0-16a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0V4a1 1 0 0 1 1-1zm0 15a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0v-2a1 1 0 0 1 1-1zm9-9a1 1 0 0 1-1 1h-2a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1zM4 12a1 1 0 0 1-1 1H1a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1z" />
|
||||||
|
</svg>
|
||||||
|
<svg class="moon-icon" viewBox="0 0 24 24" width="16" height="16">
|
||||||
|
<path fill="currentColor"
|
||||||
|
d="M12.1 22c-5.5 0-10-4.5-10-10s4.5-10 10-10c.2 0 .3 0 .5.1-1.3 1.4-2 3.2-2 5.2 0 4.1 3.4 7.5 7.5 7.5 2 0 3.8-.7 5.2-2 .1.2.1.3.1.5 0 5.4-4.5 9.7-10 9.7z" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
<div id="input-pane">
|
||||||
|
<div class="panel-header">Code</div>
|
||||||
|
<div id="code-container">
|
||||||
|
<textarea id="code-input"></textarea>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="query-container" style="visibility: hidden; position: absolute;">
|
||||||
|
<div class="panel-header">Query</div>
|
||||||
|
<textarea id="query-input"></textarea>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="output-container-scroll">
|
||||||
|
<div class="panel-header">
|
||||||
|
Tree
|
||||||
|
<button type="button" id="copy-button" class="theme-toggle" aria-label="Copy tree">
|
||||||
|
<svg width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2"
|
||||||
|
stroke-linecap="round" stroke-linejoin="round">
|
||||||
|
<rect width="14" height="14" x="8" y="8" rx="2" ry="2" />
|
||||||
|
<path d="M4 16c-1.1 0-2-.9-2-2V4c0-1.1.9-2 2-2h10c1.1 0 2 .9 2 2" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<pre id="output-container" class="highlight"></pre>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.js"></script>
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.js"></script>
|
||||||
|
|
||||||
|
<script>LANGUAGE_BASE_URL = "";</script>
|
||||||
|
<script type="module" src="playground.js"></script>
|
||||||
|
<script type="module">
|
||||||
|
import * as TreeSitter from './web-tree-sitter.js';
|
||||||
|
window.TreeSitter = TreeSitter;
|
||||||
|
setTimeout(() => window.initializePlayground({local: true}), 1)
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
@ -7,6 +7,7 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
|
use log::{error, info};
|
||||||
use tiny_http::{Header, Response, Server};
|
use tiny_http::{Header, Response, Server};
|
||||||
|
|
||||||
use super::wasm;
|
use super::wasm;
|
||||||
|
|
@ -18,7 +19,7 @@ macro_rules! optional_resource {
|
||||||
if let Some(tree_sitter_dir) = tree_sitter_dir {
|
if let Some(tree_sitter_dir) = tree_sitter_dir {
|
||||||
Cow::Owned(fs::read(tree_sitter_dir.join($path)).unwrap())
|
Cow::Owned(fs::read(tree_sitter_dir.join($path)).unwrap())
|
||||||
} else {
|
} else {
|
||||||
Cow::Borrowed(include_bytes!(concat!("../../", $path)))
|
Cow::Borrowed(include_bytes!(concat!("../../../", $path)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -34,25 +35,91 @@ macro_rules! optional_resource {
|
||||||
}
|
}
|
||||||
|
|
||||||
optional_resource!(get_playground_js, "docs/src/assets/js/playground.js");
|
optional_resource!(get_playground_js, "docs/src/assets/js/playground.js");
|
||||||
optional_resource!(get_lib_js, "lib/binding_web/tree-sitter.js");
|
optional_resource!(get_lib_js, "lib/binding_web/web-tree-sitter.js");
|
||||||
optional_resource!(get_lib_wasm, "lib/binding_web/tree-sitter.wasm");
|
optional_resource!(get_lib_wasm, "lib/binding_web/web-tree-sitter.wasm");
|
||||||
|
|
||||||
fn get_main_html(tree_sitter_dir: Option<&Path>) -> Cow<'static, [u8]> {
|
fn get_main_html(tree_sitter_dir: Option<&Path>) -> Cow<'static, [u8]> {
|
||||||
tree_sitter_dir.map_or(
|
tree_sitter_dir.map_or(
|
||||||
Cow::Borrowed(include_bytes!("playground.html")),
|
Cow::Borrowed(include_bytes!("playground.html")),
|
||||||
|tree_sitter_dir| {
|
|tree_sitter_dir| {
|
||||||
Cow::Owned(fs::read(tree_sitter_dir.join("cli/src/playground.html")).unwrap())
|
Cow::Owned(fs::read(tree_sitter_dir.join("crates/cli/src/playground.html")).unwrap())
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn export(grammar_path: &Path, export_path: &Path) -> Result<()> {
|
||||||
|
let (grammar_name, language_wasm) = wasm::load_language_wasm_file(grammar_path)?;
|
||||||
|
|
||||||
|
fs::create_dir_all(export_path).with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to create export directory: {}",
|
||||||
|
export_path.display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let tree_sitter_dir = env::var("TREE_SITTER_BASE_DIR").map(PathBuf::from).ok();
|
||||||
|
|
||||||
|
let playground_js = get_playground_js(tree_sitter_dir.as_deref());
|
||||||
|
let lib_js = get_lib_js(tree_sitter_dir.as_deref());
|
||||||
|
let lib_wasm = get_lib_wasm(tree_sitter_dir.as_deref());
|
||||||
|
|
||||||
|
let has_local_playground_js = !playground_js.is_empty();
|
||||||
|
let has_local_lib_js = !lib_js.is_empty();
|
||||||
|
let has_local_lib_wasm = !lib_wasm.is_empty();
|
||||||
|
|
||||||
|
let mut main_html = str::from_utf8(&get_main_html(tree_sitter_dir.as_deref()))
|
||||||
|
.unwrap()
|
||||||
|
.replace("THE_LANGUAGE_NAME", &grammar_name);
|
||||||
|
|
||||||
|
if !has_local_playground_js {
|
||||||
|
main_html = main_html.replace(
|
||||||
|
r#"<script type="module" src="playground.js"></script>"#,
|
||||||
|
r#"<script type="module" src="https://tree-sitter.github.io/tree-sitter/assets/js/playground.js"></script>"#
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if !has_local_lib_js {
|
||||||
|
main_html = main_html.replace(
|
||||||
|
"import * as TreeSitter from './web-tree-sitter.js';",
|
||||||
|
"import * as TreeSitter from 'https://tree-sitter.github.io/web-tree-sitter.js';",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fs::write(export_path.join("index.html"), main_html.as_bytes())
|
||||||
|
.with_context(|| "Failed to write index.html")?;
|
||||||
|
|
||||||
|
fs::write(export_path.join("tree-sitter-parser.wasm"), language_wasm)
|
||||||
|
.with_context(|| "Failed to write parser wasm file")?;
|
||||||
|
|
||||||
|
if has_local_playground_js {
|
||||||
|
fs::write(export_path.join("playground.js"), playground_js)
|
||||||
|
.with_context(|| "Failed to write playground.js")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_local_lib_js {
|
||||||
|
fs::write(export_path.join("web-tree-sitter.js"), lib_js)
|
||||||
|
.with_context(|| "Failed to write web-tree-sitter.js")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
if has_local_lib_wasm {
|
||||||
|
fs::write(export_path.join("web-tree-sitter.wasm"), lib_wasm)
|
||||||
|
.with_context(|| "Failed to write web-tree-sitter.wasm")?;
|
||||||
|
}
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"Exported playground to {}",
|
||||||
|
export_path.canonicalize()?.display()
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn serve(grammar_path: &Path, open_in_browser: bool) -> Result<()> {
|
pub fn serve(grammar_path: &Path, open_in_browser: bool) -> Result<()> {
|
||||||
let server = get_server()?;
|
let server = get_server()?;
|
||||||
let (grammar_name, language_wasm) = wasm::load_language_wasm_file(grammar_path)?;
|
let (grammar_name, language_wasm) = wasm::load_language_wasm_file(grammar_path)?;
|
||||||
let url = format!("http://{}", server.server_addr());
|
let url = format!("http://{}", server.server_addr());
|
||||||
println!("Started playground on: {url}");
|
info!("Started playground on: {url}");
|
||||||
if open_in_browser && webbrowser::open(&url).is_err() {
|
if open_in_browser && webbrowser::open(&url).is_err() {
|
||||||
eprintln!("Failed to open '{url}' in a web browser");
|
error!("Failed to open '{url}' in a web browser");
|
||||||
}
|
}
|
||||||
|
|
||||||
let tree_sitter_dir = env::var("TREE_SITTER_BASE_DIR").map(PathBuf::from).ok();
|
let tree_sitter_dir = env::var("TREE_SITTER_BASE_DIR").map(PathBuf::from).ok();
|
||||||
|
|
@ -79,16 +146,16 @@ pub fn serve(grammar_path: &Path, open_in_browser: bool) -> Result<()> {
|
||||||
response(&playground_js, &js_header)
|
response(&playground_js, &js_header)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"/tree-sitter.js" => {
|
"/web-tree-sitter.js" => {
|
||||||
if lib_js.is_empty() {
|
if lib_js.is_empty() {
|
||||||
redirect("https://tree-sitter.github.io/tree-sitter.js")
|
redirect("https://tree-sitter.github.io/web-tree-sitter.js")
|
||||||
} else {
|
} else {
|
||||||
response(&lib_js, &js_header)
|
response(&lib_js, &js_header)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"/tree-sitter.wasm" => {
|
"/web-tree-sitter.wasm" => {
|
||||||
if lib_wasm.is_empty() {
|
if lib_wasm.is_empty() {
|
||||||
redirect("https://tree-sitter.github.io/tree-sitter.wasm")
|
redirect("https://tree-sitter.github.io/web-tree-sitter.wasm")
|
||||||
} else {
|
} else {
|
||||||
response(&lib_wasm, &wasm_header)
|
response(&lib_wasm, &wasm_header)
|
||||||
}
|
}
|
||||||
|
|
@ -6,29 +6,35 @@ use std::{
|
||||||
time::Instant,
|
time::Instant,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anstyle::AnsiColor;
|
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
|
use log::warn;
|
||||||
use streaming_iterator::StreamingIterator;
|
use streaming_iterator::StreamingIterator;
|
||||||
use tree_sitter::{Language, Parser, Point, Query, QueryCursor};
|
use tree_sitter::{Language, Parser, Point, Query, QueryCursor};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
query_testing::{self, to_utf8_point},
|
query_testing::{self, to_utf8_point},
|
||||||
test::paint,
|
test::{TestInfo, TestOutcome, TestResult, TestSummary},
|
||||||
};
|
};
|
||||||
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
#[derive(Default)]
|
||||||
|
pub struct QueryFileOptions {
|
||||||
|
pub ordered_captures: bool,
|
||||||
|
pub byte_range: Option<Range<usize>>,
|
||||||
|
pub point_range: Option<Range<Point>>,
|
||||||
|
pub containing_byte_range: Option<Range<usize>>,
|
||||||
|
pub containing_point_range: Option<Range<Point>>,
|
||||||
|
pub quiet: bool,
|
||||||
|
pub print_time: bool,
|
||||||
|
pub stdin: bool,
|
||||||
|
}
|
||||||
|
|
||||||
pub fn query_file_at_path(
|
pub fn query_file_at_path(
|
||||||
language: &Language,
|
language: &Language,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
name: &str,
|
name: &str,
|
||||||
query_path: &Path,
|
query_path: &Path,
|
||||||
ordered_captures: bool,
|
opts: &QueryFileOptions,
|
||||||
byte_range: Option<Range<usize>>,
|
test_summary: Option<&mut TestSummary>,
|
||||||
point_range: Option<Range<Point>>,
|
|
||||||
should_test: bool,
|
|
||||||
quiet: bool,
|
|
||||||
print_time: bool,
|
|
||||||
stdin: bool,
|
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let stdout = io::stdout();
|
let stdout = io::stdout();
|
||||||
let mut stdout = stdout.lock();
|
let mut stdout = stdout.lock();
|
||||||
|
|
@ -38,19 +44,26 @@ pub fn query_file_at_path(
|
||||||
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
|
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
|
||||||
|
|
||||||
let mut query_cursor = QueryCursor::new();
|
let mut query_cursor = QueryCursor::new();
|
||||||
if let Some(range) = byte_range {
|
if let Some(ref range) = opts.byte_range {
|
||||||
query_cursor.set_byte_range(range);
|
query_cursor.set_byte_range(range.clone());
|
||||||
}
|
}
|
||||||
if let Some(range) = point_range {
|
if let Some(ref range) = opts.point_range {
|
||||||
query_cursor.set_point_range(range);
|
query_cursor.set_point_range(range.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref range) = opts.containing_byte_range {
|
||||||
|
query_cursor.set_containing_byte_range(range.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref range) = opts.containing_point_range {
|
||||||
|
query_cursor.set_containing_point_range(range.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
let mut parser = Parser::new();
|
||||||
parser.set_language(language)?;
|
parser.set_language(language)?;
|
||||||
|
|
||||||
let mut results = Vec::new();
|
let mut results = Vec::new();
|
||||||
|
let should_test = test_summary.is_some();
|
||||||
|
|
||||||
if !should_test && !stdin {
|
if !should_test && !opts.stdin {
|
||||||
writeln!(&mut stdout, "{name}")?;
|
writeln!(&mut stdout, "{name}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -59,12 +72,12 @@ pub fn query_file_at_path(
|
||||||
let tree = parser.parse(&source_code, None).unwrap();
|
let tree = parser.parse(&source_code, None).unwrap();
|
||||||
|
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
if ordered_captures {
|
if opts.ordered_captures {
|
||||||
let mut captures = query_cursor.captures(&query, tree.root_node(), source_code.as_slice());
|
let mut captures = query_cursor.captures(&query, tree.root_node(), source_code.as_slice());
|
||||||
while let Some((mat, capture_index)) = captures.next() {
|
while let Some((mat, capture_index)) = captures.next() {
|
||||||
let capture = mat.captures[*capture_index];
|
let capture = mat.captures[*capture_index];
|
||||||
let capture_name = &query.capture_names()[capture.index as usize];
|
let capture_name = &query.capture_names()[capture.index as usize];
|
||||||
if !quiet && !should_test {
|
if !opts.quiet && !should_test {
|
||||||
writeln!(
|
writeln!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
" pattern: {:>2}, capture: {} - {capture_name}, start: {}, end: {}, text: `{}`",
|
" pattern: {:>2}, capture: {} - {capture_name}, start: {}, end: {}, text: `{}`",
|
||||||
|
|
@ -75,23 +88,25 @@ pub fn query_file_at_path(
|
||||||
capture.node.utf8_text(&source_code).unwrap_or("")
|
capture.node.utf8_text(&source_code).unwrap_or("")
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
if should_test {
|
||||||
results.push(query_testing::CaptureInfo {
|
results.push(query_testing::CaptureInfo {
|
||||||
name: (*capture_name).to_string(),
|
name: (*capture_name).to_string(),
|
||||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||||
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
let mut matches = query_cursor.matches(&query, tree.root_node(), source_code.as_slice());
|
let mut matches = query_cursor.matches(&query, tree.root_node(), source_code.as_slice());
|
||||||
while let Some(m) = matches.next() {
|
while let Some(m) = matches.next() {
|
||||||
if !quiet && !should_test {
|
if !opts.quiet && !should_test {
|
||||||
writeln!(&mut stdout, " pattern: {}", m.pattern_index)?;
|
writeln!(&mut stdout, " pattern: {}", m.pattern_index)?;
|
||||||
}
|
}
|
||||||
for capture in m.captures {
|
for capture in m.captures {
|
||||||
let start = capture.node.start_position();
|
let start = capture.node.start_position();
|
||||||
let end = capture.node.end_position();
|
let end = capture.node.end_position();
|
||||||
let capture_name = &query.capture_names()[capture.index as usize];
|
let capture_name = &query.capture_names()[capture.index as usize];
|
||||||
if !quiet && !should_test {
|
if !opts.quiet && !should_test {
|
||||||
if end.row == start.row {
|
if end.row == start.row {
|
||||||
writeln!(
|
writeln!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
|
|
@ -106,6 +121,7 @@ pub fn query_file_at_path(
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if should_test {
|
||||||
results.push(query_testing::CaptureInfo {
|
results.push(query_testing::CaptureInfo {
|
||||||
name: (*capture_name).to_string(),
|
name: (*capture_name).to_string(),
|
||||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||||
|
|
@ -114,33 +130,43 @@ pub fn query_file_at_path(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if query_cursor.did_exceed_match_limit() {
|
if query_cursor.did_exceed_match_limit() {
|
||||||
writeln!(
|
warn!("Query exceeded maximum number of in-progress captures!");
|
||||||
&mut stdout,
|
|
||||||
" WARNING: Query exceeded maximum number of in-progress captures!"
|
|
||||||
)?;
|
|
||||||
}
|
}
|
||||||
if should_test {
|
if should_test {
|
||||||
let path_name = if stdin {
|
let path_name = if opts.stdin {
|
||||||
"stdin"
|
"stdin"
|
||||||
} else {
|
} else {
|
||||||
Path::new(&path).file_name().unwrap().to_str().unwrap()
|
Path::new(&path).file_name().unwrap().to_str().unwrap()
|
||||||
};
|
};
|
||||||
|
// Invariant: `test_summary` will always be `Some` when `should_test` is true
|
||||||
|
let test_summary = test_summary.unwrap();
|
||||||
match query_testing::assert_expected_captures(&results, path, &mut parser, language) {
|
match query_testing::assert_expected_captures(&results, path, &mut parser, language) {
|
||||||
Ok(assertion_count) => {
|
Ok(assertion_count) => {
|
||||||
println!(
|
test_summary.query_results.add_case(TestResult {
|
||||||
" ✓ {} ({} assertions)",
|
name: path_name.to_string(),
|
||||||
paint(Some(AnsiColor::Green), path_name),
|
info: TestInfo::AssertionTest {
|
||||||
assertion_count
|
outcome: TestOutcome::AssertionPassed { assertion_count },
|
||||||
);
|
test_num: test_summary.test_num,
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
println!(" ✗ {}", paint(Some(AnsiColor::Red), path_name));
|
test_summary.query_results.add_case(TestResult {
|
||||||
|
name: path_name.to_string(),
|
||||||
|
info: TestInfo::AssertionTest {
|
||||||
|
outcome: TestOutcome::AssertionFailed {
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
test_num: test_summary.test_num,
|
||||||
|
},
|
||||||
|
});
|
||||||
return Err(e);
|
return Err(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if print_time {
|
if opts.print_time {
|
||||||
writeln!(&mut stdout, "{:?}", start.elapsed())?;
|
writeln!(&mut stdout, "{:?}", start.elapsed())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -237,8 +237,8 @@ pub fn assert_expected_captures(
|
||||||
return Err(anyhow!(
|
return Err(anyhow!(
|
||||||
"Assertion failed: at {}, found {}, expected {}",
|
"Assertion failed: at {}, found {}, expected {}",
|
||||||
found.start,
|
found.start,
|
||||||
|
found.name,
|
||||||
assertion.expected_capture_name,
|
assertion.expected_capture_name,
|
||||||
found.name
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -3,11 +3,11 @@ root = true
|
||||||
[*]
|
[*]
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
|
|
||||||
[*.{json,toml,yml,gyp}]
|
[*.{json,toml,yml,gyp,xml}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
[*.js]
|
[*.{js,ts}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
|
@ -31,6 +31,10 @@ indent_size = 4
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.java]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
[*.go]
|
[*.go]
|
||||||
indent_style = tab
|
indent_style = tab
|
||||||
indent_size = 8
|
indent_size = 8
|
||||||
43
crates/cli/src/templates/__init__.py
Normal file
43
crates/cli/src/templates/__init__.py
Normal file
|
|
@ -0,0 +1,43 @@
|
||||||
|
"""PARSER_DESCRIPTION"""
|
||||||
|
|
||||||
|
from importlib.resources import files as _files
|
||||||
|
|
||||||
|
from ._binding import language
|
||||||
|
|
||||||
|
|
||||||
|
def _get_query(name, file):
|
||||||
|
try:
|
||||||
|
query = _files(f"{__package__}") / file
|
||||||
|
globals()[name] = query.read_text()
|
||||||
|
except FileNotFoundError:
|
||||||
|
globals()[name] = None
|
||||||
|
return globals()[name]
|
||||||
|
|
||||||
|
|
||||||
|
def __getattr__(name):
|
||||||
|
if name == "HIGHLIGHTS_QUERY":
|
||||||
|
return _get_query("HIGHLIGHTS_QUERY", "HIGHLIGHTS_QUERY_PATH")
|
||||||
|
if name == "INJECTIONS_QUERY":
|
||||||
|
return _get_query("INJECTIONS_QUERY", "INJECTIONS_QUERY_PATH")
|
||||||
|
if name == "LOCALS_QUERY":
|
||||||
|
return _get_query("LOCALS_QUERY", "LOCALS_QUERY_PATH")
|
||||||
|
if name == "TAGS_QUERY":
|
||||||
|
return _get_query("TAGS_QUERY", "TAGS_QUERY_PATH")
|
||||||
|
|
||||||
|
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"language",
|
||||||
|
"HIGHLIGHTS_QUERY",
|
||||||
|
"INJECTIONS_QUERY",
|
||||||
|
"LOCALS_QUERY",
|
||||||
|
"TAGS_QUERY",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def __dir__():
|
||||||
|
return sorted(__all__ + [
|
||||||
|
"__all__", "__builtins__", "__cached__", "__doc__", "__file__",
|
||||||
|
"__loader__", "__name__", "__package__", "__path__", "__spec__",
|
||||||
|
])
|
||||||
17
crates/cli/src/templates/__init__.pyi
Normal file
17
crates/cli/src/templates/__init__.pyi
Normal file
|
|
@ -0,0 +1,17 @@
|
||||||
|
from typing import Final
|
||||||
|
from typing_extensions import CapsuleType
|
||||||
|
|
||||||
|
HIGHLIGHTS_QUERY: Final[str] | None
|
||||||
|
"""The syntax highlighting query for this grammar."""
|
||||||
|
|
||||||
|
INJECTIONS_QUERY: Final[str] | None
|
||||||
|
"""The language injection query for this grammar."""
|
||||||
|
|
||||||
|
LOCALS_QUERY: Final[str] | None
|
||||||
|
"""The local variable query for this grammar."""
|
||||||
|
|
||||||
|
TAGS_QUERY: Final[str] | None
|
||||||
|
"""The symbol tagging query for this grammar."""
|
||||||
|
|
||||||
|
def language() -> CapsuleType:
|
||||||
|
"""The tree-sitter language function for this grammar."""
|
||||||
65
crates/cli/src/templates/binding.java
Normal file
65
crates/cli/src/templates/binding.java
Normal file
|
|
@ -0,0 +1,65 @@
|
||||||
|
package PARSER_NS_CLEANED.jtreesitter.LOWER_PARSER_NAME;
|
||||||
|
|
||||||
|
import java.lang.foreign.*;
|
||||||
|
|
||||||
|
public final class PARSER_CLASS_NAME {
|
||||||
|
private static final ValueLayout VOID_PTR =
|
||||||
|
ValueLayout.ADDRESS.withTargetLayout(MemoryLayout.sequenceLayout(Long.MAX_VALUE, ValueLayout.JAVA_BYTE));
|
||||||
|
private static final FunctionDescriptor FUNC_DESC = FunctionDescriptor.of(VOID_PTR);
|
||||||
|
private static final Linker LINKER = Linker.nativeLinker();
|
||||||
|
private static final PARSER_CLASS_NAME INSTANCE = new PARSER_CLASS_NAME();
|
||||||
|
|
||||||
|
private final Arena arena = Arena.ofAuto();
|
||||||
|
private volatile SymbolLookup lookup = null;
|
||||||
|
|
||||||
|
private PARSER_CLASS_NAME() {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the tree-sitter language for this grammar.
|
||||||
|
*/
|
||||||
|
public static MemorySegment language() {
|
||||||
|
if (INSTANCE.lookup == null)
|
||||||
|
INSTANCE.lookup = INSTANCE.findLibrary();
|
||||||
|
return language(INSTANCE.lookup);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the tree-sitter language for this grammar.
|
||||||
|
*
|
||||||
|
* <strong>The {@linkplain Arena} used in the {@code lookup}
|
||||||
|
* must not be closed while the language is being used.</strong>
|
||||||
|
*/
|
||||||
|
public static MemorySegment language(SymbolLookup lookup) {
|
||||||
|
return call(lookup, "tree_sitter_PARSER_NAME");
|
||||||
|
}
|
||||||
|
|
||||||
|
private SymbolLookup findLibrary() {
|
||||||
|
try {
|
||||||
|
var library = System.mapLibraryName("tree-sitter-KEBAB_PARSER_NAME");
|
||||||
|
return SymbolLookup.libraryLookup(library, arena);
|
||||||
|
} catch (IllegalArgumentException ex1) {
|
||||||
|
try {
|
||||||
|
System.loadLibrary("tree-sitter-KEBAB_PARSER_NAME");
|
||||||
|
return SymbolLookup.loaderLookup();
|
||||||
|
} catch (UnsatisfiedLinkError ex2) {
|
||||||
|
ex1.addSuppressed(ex2);
|
||||||
|
throw ex1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static UnsatisfiedLinkError unresolved(String name) {
|
||||||
|
return new UnsatisfiedLinkError("Unresolved symbol: %s".formatted(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("SameParameterValue")
|
||||||
|
private static MemorySegment call(SymbolLookup lookup, String name) throws UnsatisfiedLinkError {
|
||||||
|
var address = lookup.find(name).orElseThrow(() -> unresolved(name));
|
||||||
|
try {
|
||||||
|
var function = LINKER.downcallHandle(address, FUNC_DESC);
|
||||||
|
return (MemorySegment) function.invokeExact();
|
||||||
|
} catch (Throwable e) {
|
||||||
|
throw new RuntimeException("Call to %s failed".formatted(name), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
11
crates/cli/src/templates/binding_test.js
Normal file
11
crates/cli/src/templates/binding_test.js
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
import assert from "node:assert";
|
||||||
|
import { test } from "node:test";
|
||||||
|
import Parser from "tree-sitter";
|
||||||
|
|
||||||
|
test("can load grammar", () => {
|
||||||
|
const parser = new Parser();
|
||||||
|
assert.doesNotReject(async () => {
|
||||||
|
const { default: language } = await import("./index.js");
|
||||||
|
parser.setLanguage(language);
|
||||||
|
});
|
||||||
|
});
|
||||||
56
crates/cli/src/templates/build.rs
Normal file
56
crates/cli/src/templates/build.rs
Normal file
|
|
@ -0,0 +1,56 @@
|
||||||
|
fn main() {
|
||||||
|
let src_dir = std::path::Path::new("src");
|
||||||
|
|
||||||
|
let mut c_config = cc::Build::new();
|
||||||
|
c_config.std("c11").include(src_dir);
|
||||||
|
|
||||||
|
#[cfg(target_env = "msvc")]
|
||||||
|
c_config.flag("-utf-8");
|
||||||
|
|
||||||
|
if std::env::var("TARGET").unwrap() == "wasm32-unknown-unknown" {
|
||||||
|
let Ok(wasm_headers) = std::env::var("DEP_TREE_SITTER_LANGUAGE_WASM_HEADERS") else {
|
||||||
|
panic!("Environment variable DEP_TREE_SITTER_LANGUAGE_WASM_HEADERS must be set by the language crate");
|
||||||
|
};
|
||||||
|
let Ok(wasm_src) =
|
||||||
|
std::env::var("DEP_TREE_SITTER_LANGUAGE_WASM_SRC").map(std::path::PathBuf::from)
|
||||||
|
else {
|
||||||
|
panic!("Environment variable DEP_TREE_SITTER_LANGUAGE_WASM_SRC must be set by the language crate");
|
||||||
|
};
|
||||||
|
|
||||||
|
c_config.include(&wasm_headers);
|
||||||
|
c_config.files([
|
||||||
|
wasm_src.join("stdio.c"),
|
||||||
|
wasm_src.join("stdlib.c"),
|
||||||
|
wasm_src.join("string.c"),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let parser_path = src_dir.join("parser.c");
|
||||||
|
c_config.file(&parser_path);
|
||||||
|
println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap());
|
||||||
|
|
||||||
|
let scanner_path = src_dir.join("scanner.c");
|
||||||
|
if scanner_path.exists() {
|
||||||
|
c_config.file(&scanner_path);
|
||||||
|
println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
c_config.compile("tree-sitter-KEBAB_PARSER_NAME");
|
||||||
|
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(with_highlights_query)");
|
||||||
|
if !"HIGHLIGHTS_QUERY_PATH".is_empty() && std::path::Path::new("HIGHLIGHTS_QUERY_PATH").exists() {
|
||||||
|
println!("cargo:rustc-cfg=with_highlights_query");
|
||||||
|
}
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(with_injections_query)");
|
||||||
|
if !"INJECTIONS_QUERY_PATH".is_empty() && std::path::Path::new("INJECTIONS_QUERY_PATH").exists() {
|
||||||
|
println!("cargo:rustc-cfg=with_injections_query");
|
||||||
|
}
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(with_locals_query)");
|
||||||
|
if !"LOCALS_QUERY_PATH".is_empty() && std::path::Path::new("LOCALS_QUERY_PATH").exists() {
|
||||||
|
println!("cargo:rustc-cfg=with_locals_query");
|
||||||
|
}
|
||||||
|
println!("cargo:rustc-check-cfg=cfg(with_tags_query)");
|
||||||
|
if !"TAGS_QUERY_PATH".is_empty() && std::path::Path::new("TAGS_QUERY_PATH").exists() {
|
||||||
|
println!("cargo:rustc-cfg=with_tags_query");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
.{
|
.{
|
||||||
.name = .tree_sitter_PARSER_NAME,
|
.name = .tree_sitter_PARSER_NAME,
|
||||||
|
.fingerprint = PARSER_FINGERPRINT,
|
||||||
.version = "PARSER_VERSION",
|
.version = "PARSER_VERSION",
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.tree_sitter = .{
|
.tree_sitter = .{
|
||||||
|
|
@ -19,7 +19,17 @@ include(GNUInstallDirs)
|
||||||
|
|
||||||
find_program(TREE_SITTER_CLI tree-sitter DOC "Tree-sitter CLI")
|
find_program(TREE_SITTER_CLI tree-sitter DOC "Tree-sitter CLI")
|
||||||
|
|
||||||
|
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}/src/node-types.json"
|
||||||
|
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/grammar.js"
|
||||||
|
COMMAND "${TREE_SITTER_CLI}" generate grammar.js --no-parser
|
||||||
|
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||||
|
COMMENT "Generating grammar.json")
|
||||||
|
|
||||||
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c"
|
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c"
|
||||||
|
BYPRODUCTS "${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/parser.h"
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/alloc.h"
|
||||||
|
"${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/array.h"
|
||||||
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
||||||
COMMAND "${TREE_SITTER_CLI}" generate src/grammar.json
|
COMMAND "${TREE_SITTER_CLI}" generate src/grammar.json
|
||||||
--abi=${TREE_SITTER_ABI_VERSION}
|
--abi=${TREE_SITTER_ABI_VERSION}
|
||||||
|
|
@ -40,3 +40,7 @@ Package.resolved linguist-generated
|
||||||
bindings/zig/* linguist-generated
|
bindings/zig/* linguist-generated
|
||||||
build.zig linguist-generated
|
build.zig linguist-generated
|
||||||
build.zig.zon linguist-generated
|
build.zig.zon linguist-generated
|
||||||
|
|
||||||
|
# Java bindings
|
||||||
|
pom.xml linguist-generated
|
||||||
|
bindings/java/** linguist-generated
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue