Compare commits
139 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6739742fb6 | ||
|
|
d251226a3c | ||
|
|
ae8184b8b9 | ||
|
|
470ecf8996 | ||
|
|
0cdb6bef7b | ||
|
|
cd603fa981 | ||
|
|
b12009a746 | ||
|
|
9f9a0bc410 | ||
|
|
5d290a2a75 | ||
|
|
5808350bfe | ||
|
|
e64e74d5ed | ||
|
|
1a88b26a10 | ||
|
|
6c05cdfb0c | ||
|
|
aefae11c0d | ||
|
|
630fa52717 | ||
|
|
eea85f4eff | ||
|
|
cd6672701b | ||
|
|
f4ca3d95ca | ||
|
|
17e3c7a5c5 | ||
|
|
dd60d5cff0 | ||
|
|
f1288ea5c9 | ||
|
|
47ae060966 | ||
|
|
a1893b4420 | ||
|
|
999e041d49 | ||
|
|
0d4d854809 | ||
|
|
93d793d249 | ||
|
|
82486d4b0a | ||
|
|
5d9605a91e | ||
|
|
5293dd683e | ||
|
|
62effdf128 | ||
|
|
8e4f21aba0 | ||
|
|
5208299bbb | ||
|
|
ba7350c7ee | ||
|
|
f96d518ebf | ||
|
|
d5b82fbbab | ||
|
|
a7d8c0cbb2 | ||
|
|
24007727d4 | ||
|
|
6aa63a7213 | ||
|
|
eacb95c85d | ||
|
|
6967640571 | ||
|
|
4ac2d5d276 | ||
|
|
642b56d9af | ||
|
|
0574fcf256 | ||
|
|
98de2bc1a8 | ||
|
|
cd4b6e2ef9 | ||
|
|
8caecbc13f | ||
|
|
1b654ae35d | ||
|
|
3bd44afcaa | ||
|
|
8b8199775f | ||
|
|
744e556f7e | ||
|
|
8a3dcc6155 | ||
|
|
b0afbf3762 | ||
|
|
974be3bb30 | ||
|
|
d861e2bcd9 | ||
|
|
b9c2d1dc89 | ||
|
|
8ca17d1bb1 | ||
|
|
3182efeccc | ||
|
|
bec7c3272b | ||
|
|
e6bfed33ee | ||
|
|
053b264502 | ||
|
|
a8f25fa441 | ||
|
|
f450ce4f6e | ||
|
|
3ff8edf9e8 | ||
|
|
6b6040961c | ||
|
|
888f57657d | ||
|
|
be8fe690d8 | ||
|
|
c0b1710f8a | ||
|
|
7d3feeae9a | ||
|
|
3f85f65e3f | ||
|
|
df8b62fc50 | ||
|
|
14b4708018 | ||
|
|
dcef0cc0ee | ||
|
|
c1a0f48781 | ||
|
|
f6d17fdb04 | ||
|
|
829733a35e | ||
|
|
d64b863030 | ||
|
|
882aa867eb | ||
|
|
de92a9b4c9 | ||
|
|
5880df47e2 | ||
|
|
e92a7803eb | ||
|
|
0d656de98b | ||
|
|
b095968dff | ||
|
|
d592b16ac0 | ||
|
|
320c0865e9 | ||
|
|
60635e0729 | ||
|
|
120f74723e | ||
|
|
02508d5570 | ||
|
|
42e7e9c3e7 | ||
|
|
55b9a25c84 | ||
|
|
877782a8a4 | ||
|
|
0e1f715ef1 | ||
|
|
f3012a999d | ||
|
|
3072d35ed5 | ||
|
|
57e3a7b2ca | ||
|
|
0df2916920 | ||
|
|
61c21aa408 | ||
|
|
7eb23d9f3c | ||
|
|
db2d221ae9 | ||
|
|
67cb3cb881 | ||
|
|
12a31536e1 | ||
|
|
7657cc9d35 | ||
|
|
13ff3935ac | ||
|
|
361287fb56 | ||
|
|
13d4db8bb4 | ||
|
|
419a5a7305 | ||
|
|
c7b5f89392 | ||
|
|
d546e28abf | ||
|
|
86e2fd2337 | ||
|
|
ff255a2354 | ||
|
|
fe67521b3d | ||
|
|
f02d7e7e33 | ||
|
|
6a8676f335 | ||
|
|
944386d25f | ||
|
|
ef03a3f8fe | ||
|
|
18a5243933 | ||
|
|
8444cc3deb | ||
|
|
097c2d4f05 | ||
|
|
b8f52210f9 | ||
|
|
ecc787e221 | ||
|
|
6188010f53 | ||
|
|
70cde4a110 | ||
|
|
77363a65c2 | ||
|
|
605e580063 | ||
|
|
a2f2b16acb | ||
|
|
87d778a1c6 | ||
|
|
e344837e35 | ||
|
|
bdee2c2dd3 | ||
|
|
da5926d6f5 | ||
|
|
b3bc7701cd | ||
|
|
262f1782cc | ||
|
|
00d172bf9f | ||
|
|
ae54350c76 | ||
|
|
3355825a68 | ||
|
|
7d0e029e37 | ||
|
|
0f5ccc4aba | ||
|
|
0cf6e7c507 | ||
|
|
1dc4804b6e | ||
|
|
c5b22a1dc6 | ||
|
|
92efd26380 |
152 changed files with 7283 additions and 3803 deletions
5
.github/dependabot.yml
vendored
5
.github/dependabot.yml
vendored
|
|
@ -14,6 +14,10 @@ updates:
|
|||
groups:
|
||||
cargo:
|
||||
patterns: ["*"]
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
|
|
@ -28,6 +32,7 @@ updates:
|
|||
groups:
|
||||
actions:
|
||||
patterns: ["*"]
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
versioning-strategy: increase
|
||||
directories:
|
||||
|
|
|
|||
25
.github/scripts/wasm_stdlib.js
vendored
Normal file
25
.github/scripts/wasm_stdlib.js
vendored
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
module.exports = async ({ github, context, core }) => {
|
||||
if (context.eventName !== 'pull_request') return;
|
||||
|
||||
const prNumber = context.payload.pull_request.number;
|
||||
const owner = context.repo.owner;
|
||||
const repo = context.repo.repo;
|
||||
|
||||
const { data: files } = await github.rest.pulls.listFiles({
|
||||
owner,
|
||||
repo,
|
||||
pull_number: prNumber
|
||||
});
|
||||
|
||||
const changedFiles = files.map(file => file.filename);
|
||||
|
||||
const wasmStdLibSrc = 'crates/language/wasm/';
|
||||
const dirChanged = changedFiles.some(file => file.startsWith(wasmStdLibSrc));
|
||||
|
||||
if (!dirChanged) return;
|
||||
|
||||
const wasmStdLibHeader = 'lib/src/wasm/wasm-stdlib.h';
|
||||
const requiredChanged = changedFiles.includes(wasmStdLibHeader);
|
||||
|
||||
if (!requiredChanged) core.setFailed(`Changes detected in ${wasmStdLibSrc} but ${wasmStdLibHeader} was not modified.`);
|
||||
};
|
||||
4
.github/workflows/backport.yml
vendored
4
.github/workflows/backport.yml
vendored
|
|
@ -14,7 +14,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Create app token
|
||||
uses: actions/create-github-app-token@v2
|
||||
|
|
@ -24,7 +24,7 @@ jobs:
|
|||
private-key: ${{ secrets.BACKPORT_KEY }}
|
||||
|
||||
- name: Create backport PR
|
||||
uses: korthout/backport-action@v3
|
||||
uses: korthout/backport-action@v4
|
||||
with:
|
||||
pull_title: "${pull_title}"
|
||||
label_pattern: "^ci:backport ([^ ]+)$"
|
||||
|
|
|
|||
2
.github/workflows/bindgen.yml
vendored
2
.github/workflows/bindgen.yml
vendored
|
|
@ -16,7 +16,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up stable Rust toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
|
|
|
|||
37
.github/workflows/build.yml
vendored
37
.github/workflows/build.yml
vendored
|
|
@ -41,7 +41,7 @@ jobs:
|
|||
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-2025 }
|
||||
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-2025 }
|
||||
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-15 }
|
||||
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-13 }
|
||||
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-15-intel }
|
||||
- { platform: wasm32 , target: wasm32-unknown-unknown , os: ubuntu-24.04 }
|
||||
|
||||
# Extra features
|
||||
|
|
@ -68,24 +68,25 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up environment
|
||||
- name: Set up cross-compilation
|
||||
if: matrix.cross
|
||||
run: |
|
||||
printf 'EMSCRIPTEN_VERSION=%s\n' "$(<crates/loader/emscripten-version)" >> $GITHUB_ENV
|
||||
for target in armv7-unknown-linux-gnueabihf i686-unknown-linux-gnu powerpc64-unknown-linux-gnu; do
|
||||
camel_target=${target//-/_}; target_cc=${target/-unknown/}
|
||||
printf 'CC_%s=%s\n' "$camel_target" "${target_cc/v7/}-gcc"
|
||||
printf 'AR_%s=%s\n' "$camel_target" "${target_cc/v7/}-ar"
|
||||
printf 'CARGO_TARGET_%s_LINKER=%s\n' "${camel_target^^}" "${target_cc/v7/}-gcc"
|
||||
done >> $GITHUB_ENV
|
||||
{
|
||||
printf 'CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_RUNNER=qemu-arm -L /usr/arm-linux-gnueabihf\n'
|
||||
printf 'CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_RUNNER=qemu-ppc64 -L /usr/powerpc64-linux-gnu\n'
|
||||
} >> $GITHUB_ENV
|
||||
|
||||
if [[ '${{ matrix.cross }}' == true ]]; then
|
||||
for target in armv7-unknown-linux-gnueabihf i686-unknown-linux-gnu powerpc64-unknown-linux-gnu; do
|
||||
camel_target=${target//-/_}; target_cc=${target/-unknown/}
|
||||
printf 'CC_%s=%s\n' "$camel_target" "${target_cc/v7/}-gcc"
|
||||
printf 'AR_%s=%s\n' "$camel_target" "${target_cc/v7/}-ar"
|
||||
printf 'CARGO_TARGET_%s_LINKER=%s\n' "${camel_target^^}" "${target_cc/v7/}-gcc"
|
||||
done >> $GITHUB_ENV
|
||||
{
|
||||
printf 'CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_RUNNER=qemu-arm -L /usr/arm-linux-gnueabihf\n'
|
||||
printf 'CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_RUNNER=qemu-ppc64 -L /usr/powerpc64-linux-gnu\n'
|
||||
} >> $GITHUB_ENV
|
||||
fi
|
||||
- name: Get emscripten version
|
||||
if: contains(matrix.features, 'wasm')
|
||||
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<crates/loader/emscripten-version)" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Emscripten
|
||||
if: contains(matrix.features, 'wasm')
|
||||
|
|
@ -277,7 +278,7 @@ jobs:
|
|||
|
||||
- name: Upload CLI artifact
|
||||
if: "!matrix.no-run"
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: tree-sitter.${{ matrix.platform }}
|
||||
path: target/${{ matrix.target }}/release/tree-sitter${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
||||
|
|
@ -286,7 +287,7 @@ jobs:
|
|||
|
||||
- name: Upload Wasm artifacts
|
||||
if: matrix.platform == 'linux-x64'
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: tree-sitter.wasm
|
||||
path: |
|
||||
|
|
|
|||
7
.github/workflows/ci.yml
vendored
7
.github/workflows/ci.yml
vendored
|
|
@ -9,7 +9,7 @@ on:
|
|||
- LICENSE
|
||||
- cli/src/templates
|
||||
push:
|
||||
# branches: [master]
|
||||
branches: [master]
|
||||
paths-ignore:
|
||||
- docs/**
|
||||
- "**/README.md"
|
||||
|
|
@ -26,7 +26,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up stable Rust toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
|
|
@ -44,3 +44,6 @@ jobs:
|
|||
|
||||
build:
|
||||
uses: ./.github/workflows/build.yml
|
||||
|
||||
check-wasm-stdlib:
|
||||
uses: ./.github/workflows/wasm_stdlib.yml
|
||||
|
|
|
|||
5
.github/workflows/docs.yml
vendored
5
.github/workflows/docs.yml
vendored
|
|
@ -3,6 +3,7 @@ on:
|
|||
push:
|
||||
branches: [master]
|
||||
paths: [docs/**]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy-docs:
|
||||
|
|
@ -15,7 +16,7 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
|
|
@ -25,7 +26,7 @@ jobs:
|
|||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
jq_expr='.assets[] | select(.name | contains("x86_64-unknown-linux-gnu")) | .browser_download_url'
|
||||
url=$(gh api repos/rust-lang/mdbook/releases/latest --jq "$jq_expr")
|
||||
url=$(gh api repos/rust-lang/mdbook/releases/tags/v0.4.52 --jq "$jq_expr")
|
||||
mkdir mdbook
|
||||
curl -sSL "$url" | tar -xz -C mdbook
|
||||
printf '%s/mdbook\n' "$PWD" >> "$GITHUB_PATH"
|
||||
|
|
|
|||
4
.github/workflows/nvim_ts.yml
vendored
4
.github/workflows/nvim_ts.yml
vendored
|
|
@ -28,9 +28,9 @@ jobs:
|
|||
NVIM: ${{ matrix.os == 'windows-latest' && 'nvim-win64\\bin\\nvim.exe' || 'nvim' }}
|
||||
NVIM_TS_DIR: nvim-treesitter
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
repository: nvim-treesitter/nvim-treesitter
|
||||
path: ${{ env.NVIM_TS_DIR }}
|
||||
|
|
|
|||
39
.github/workflows/release.yml
vendored
39
.github/workflows/release.yml
vendored
|
|
@ -17,13 +17,15 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
permissions:
|
||||
id-token: write
|
||||
attestations: write
|
||||
contents: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
path: artifacts
|
||||
|
||||
|
|
@ -47,9 +49,16 @@ jobs:
|
|||
rm -rf artifacts
|
||||
ls -l target/
|
||||
|
||||
- name: Generate attestations
|
||||
uses: actions/attest-build-provenance@v3
|
||||
with:
|
||||
subject-path: |
|
||||
target/tree-sitter-*.gz
|
||||
target/web-tree-sitter.tar.gz
|
||||
|
||||
- name: Create release
|
||||
run: |-
|
||||
gh release create ${{ github.ref_name }} \
|
||||
gh release create $GITHUB_REF_NAME \
|
||||
target/tree-sitter-*.gz \
|
||||
target/web-tree-sitter.tar.gz
|
||||
env:
|
||||
|
|
@ -58,22 +67,34 @@ jobs:
|
|||
crates_io:
|
||||
name: Publish packages to Crates.io
|
||||
runs-on: ubuntu-latest
|
||||
environment: crates
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
needs: release
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
|
||||
- name: Set up registry token
|
||||
id: auth
|
||||
uses: rust-lang/crates-io-auth-action@v1
|
||||
|
||||
- name: Publish crates to Crates.io
|
||||
uses: katyo/publish-crates@v2
|
||||
with:
|
||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
registry-token: ${{ steps.auth.outputs.token }}
|
||||
|
||||
npm:
|
||||
name: Publish packages to npmjs.com
|
||||
runs-on: ubuntu-latest
|
||||
environment: npm
|
||||
permissions:
|
||||
id-token: write
|
||||
contents: read
|
||||
needs: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
|
@ -81,12 +102,12 @@ jobs:
|
|||
directory: [crates/cli/npm, lib/binding_web]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v5
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: 20
|
||||
node-version: 24
|
||||
registry-url: https://registry.npmjs.org
|
||||
|
||||
- name: Set up Rust
|
||||
|
|
@ -106,5 +127,3 @@ jobs:
|
|||
- name: Publish to npmjs.com
|
||||
working-directory: ${{ matrix.directory }}
|
||||
run: npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
|
|
|||
4
.github/workflows/response.yml
vendored
4
.github/workflows/response.yml
vendored
|
|
@ -17,7 +17,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout script
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
sparse-checkout: .github/scripts/close_unresponsive.js
|
||||
sparse-checkout-cone-mode: false
|
||||
|
|
@ -35,7 +35,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout script
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
sparse-checkout: .github/scripts/remove_response_label.js
|
||||
sparse-checkout-cone-mode: false
|
||||
|
|
|
|||
2
.github/workflows/reviewers_remove.yml
vendored
2
.github/workflows/reviewers_remove.yml
vendored
|
|
@ -12,7 +12,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout script
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
sparse-checkout: .github/scripts/reviewers_remove.js
|
||||
sparse-checkout-cone-mode: false
|
||||
|
|
|
|||
2
.github/workflows/sanitize.yml
vendored
2
.github/workflows/sanitize.yml
vendored
|
|
@ -15,7 +15,7 @@ jobs:
|
|||
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Install UBSAN library
|
||||
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
||||
|
|
|
|||
2
.github/workflows/spam.yml
vendored
2
.github/workflows/spam.yml
vendored
|
|
@ -16,7 +16,7 @@ jobs:
|
|||
if: github.event.label.name == 'spam'
|
||||
steps:
|
||||
- name: Checkout script
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
sparse-checkout: .github/scripts/close_spam.js
|
||||
sparse-checkout-cone-mode: false
|
||||
|
|
|
|||
2
.github/workflows/wasm_exports.yml
vendored
2
.github/workflows/wasm_exports.yml
vendored
|
|
@ -18,7 +18,7 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up stable Rust toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
|
|
|
|||
19
.github/workflows/wasm_stdlib.yml
vendored
Normal file
19
.github/workflows/wasm_stdlib.yml
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
name: Check Wasm Stdlib build
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Check directory changes
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const scriptPath = `${process.env.GITHUB_WORKSPACE}/.github/scripts/wasm_stdlib.js`;
|
||||
const script = require(scriptPath);
|
||||
return script({ github, context, core });
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
cmake_minimum_required(VERSION 3.13)
|
||||
|
||||
project(tree-sitter
|
||||
VERSION "0.26.0"
|
||||
VERSION "0.27.0"
|
||||
DESCRIPTION "An incremental parsing system for programming tools"
|
||||
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
||||
LANGUAGES C)
|
||||
|
|
@ -81,7 +81,7 @@ set_target_properties(tree-sitter
|
|||
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}"
|
||||
DEFINE_SYMBOL "")
|
||||
|
||||
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE _DARWIN_C_SOURCE)
|
||||
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE _BSD_SOURCE _DARWIN_C_SOURCE)
|
||||
|
||||
include(GNUInstallDirs)
|
||||
|
||||
|
|
|
|||
912
Cargo.lock
generated
912
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
45
Cargo.toml
45
Cargo.toml
|
|
@ -14,13 +14,13 @@ members = [
|
|||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.26.0"
|
||||
version = "0.27.0"
|
||||
authors = [
|
||||
"Max Brunsfeld <maxbrunsfeld@gmail.com>",
|
||||
"Amaan Qureshi <amaanq12@gmail.com>",
|
||||
]
|
||||
edition = "2021"
|
||||
rust-version = "1.84"
|
||||
rust-version = "1.85"
|
||||
homepage = "https://tree-sitter.github.io/tree-sitter"
|
||||
repository = "https://github.com/tree-sitter/tree-sitter"
|
||||
license = "MIT"
|
||||
|
|
@ -103,11 +103,11 @@ codegen-units = 256
|
|||
|
||||
[workspace.dependencies]
|
||||
ansi_colours = "1.2.3"
|
||||
anstyle = "1.0.11"
|
||||
anstyle = "1.0.13"
|
||||
anyhow = "1.0.100"
|
||||
bstr = "1.12.0"
|
||||
cc = "1.2.39"
|
||||
clap = { version = "4.5.48", features = [
|
||||
cc = "1.2.53"
|
||||
clap = { version = "4.5.54", features = [
|
||||
"cargo",
|
||||
"derive",
|
||||
"env",
|
||||
|
|
@ -115,48 +115,49 @@ clap = { version = "4.5.48", features = [
|
|||
"string",
|
||||
"unstable-styles",
|
||||
] }
|
||||
clap_complete = "4.5.58"
|
||||
clap_complete_nushell = "4.5.8"
|
||||
clap_complete = "4.5.65"
|
||||
clap_complete_nushell = "4.5.10"
|
||||
crc32fast = "1.5.0"
|
||||
ctor = "0.2.9"
|
||||
ctrlc = { version = "3.5.0", features = ["termination"] }
|
||||
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
||||
etcetera = "0.10.0"
|
||||
etcetera = "0.11.0"
|
||||
fs4 = "0.12.0"
|
||||
glob = "0.3.3"
|
||||
heck = "0.5.0"
|
||||
html-escape = "0.2.13"
|
||||
indexmap = "2.11.4"
|
||||
indexmap = "2.12.1"
|
||||
indoc = "2.0.6"
|
||||
libloading = "0.8.9"
|
||||
libloading = "0.9.0"
|
||||
log = { version = "0.4.28", features = ["std"] }
|
||||
memchr = "2.7.5"
|
||||
memchr = "2.7.6"
|
||||
once_cell = "1.21.3"
|
||||
pretty_assertions = "1.4.1"
|
||||
rand = "0.8.5"
|
||||
regex = "1.11.2"
|
||||
regex = "1.11.3"
|
||||
regex-syntax = "0.8.6"
|
||||
rustc-hash = "2.1.1"
|
||||
schemars = "1.0.5"
|
||||
semver = { version = "1.0.27", features = ["serde"] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
serde_json = { version = "1.0.145", features = ["preserve_order"] }
|
||||
serde_json = { version = "1.0.149", features = ["preserve_order"] }
|
||||
similar = "2.7.0"
|
||||
smallbitvec = "2.6.0"
|
||||
streaming-iterator = "0.1.9"
|
||||
tempfile = "3.23.0"
|
||||
thiserror = "2.0.16"
|
||||
thiserror = "2.0.17"
|
||||
tiny_http = "0.12.0"
|
||||
topological-sort = "0.2.2"
|
||||
unindent = "0.2.4"
|
||||
walkdir = "2.5.0"
|
||||
wasmparser = "0.229.0"
|
||||
wasmparser = "0.243.0"
|
||||
webbrowser = "1.0.5"
|
||||
|
||||
tree-sitter = { version = "0.26.0", path = "./lib" }
|
||||
tree-sitter-generate = { version = "0.26.0", path = "./crates/generate" }
|
||||
tree-sitter-loader = { version = "0.26.0", path = "./crates/loader" }
|
||||
tree-sitter-config = { version = "0.26.0", path = "./crates/config" }
|
||||
tree-sitter-highlight = { version = "0.26.0", path = "./crates/highlight" }
|
||||
tree-sitter-tags = { version = "0.26.0", path = "./crates/tags" }
|
||||
tree-sitter = { version = "0.27.0", path = "./lib" }
|
||||
tree-sitter-generate = { version = "0.27.0", path = "./crates/generate" }
|
||||
tree-sitter-loader = { version = "0.27.0", path = "./crates/loader" }
|
||||
tree-sitter-config = { version = "0.27.0", path = "./crates/config" }
|
||||
tree-sitter-highlight = { version = "0.27.0", path = "./crates/highlight" }
|
||||
tree-sitter-tags = { version = "0.27.0", path = "./crates/tags" }
|
||||
|
||||
tree-sitter-language = { version = "0.1.5", path = "./crates/language" }
|
||||
tree-sitter-language = { version = "0.1", path = "./crates/language" }
|
||||
|
|
|
|||
4
Makefile
4
Makefile
|
|
@ -1,4 +1,4 @@
|
|||
VERSION := 0.26.0
|
||||
VERSION := 0.27.0
|
||||
DESCRIPTION := An incremental parsing system for programming tools
|
||||
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||
|
||||
|
|
@ -24,7 +24,7 @@ OBJ := $(SRC:.c=.o)
|
|||
ARFLAGS := rcs
|
||||
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
||||
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE -D_DARWIN_C_SOURCE
|
||||
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE -D_BSD_SOURCE -D_DARWIN_C_SOURCE
|
||||
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
||||
|
||||
# ABI versioning
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ let package = Package(
|
|||
.headerSearchPath("src"),
|
||||
.define("_POSIX_C_SOURCE", to: "200112L"),
|
||||
.define("_DEFAULT_SOURCE"),
|
||||
.define("_BSD_SOURCE"),
|
||||
.define("_DARWIN_C_SOURCE"),
|
||||
]),
|
||||
],
|
||||
|
|
|
|||
|
|
@ -40,6 +40,7 @@ pub fn build(b: *std.Build) !void {
|
|||
|
||||
lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L");
|
||||
lib.root_module.addCMacro("_DEFAULT_SOURCE", "");
|
||||
lib.root_module.addCMacro("_BSD_SOURCE", "");
|
||||
lib.root_module.addCMacro("_DARWIN_C_SOURCE", "");
|
||||
|
||||
if (wasm) {
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
.{
|
||||
.name = .tree_sitter,
|
||||
.fingerprint = 0x841224b447ac0d4f,
|
||||
.version = "0.26.0",
|
||||
.version = "0.27.0",
|
||||
.minimum_zig_version = "0.14.1",
|
||||
.paths = .{
|
||||
"build.zig",
|
||||
|
|
|
|||
|
|
@ -54,11 +54,13 @@ log.workspace = true
|
|||
memchr.workspace = true
|
||||
rand.workspace = true
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
similar.workspace = true
|
||||
streaming-iterator.workspace = true
|
||||
thiserror.workspace = true
|
||||
tiny_http.workspace = true
|
||||
walkdir.workspace = true
|
||||
wasmparser.workspace = true
|
||||
|
|
@ -73,7 +75,7 @@ tree-sitter-tags.workspace = true
|
|||
|
||||
[dev-dependencies]
|
||||
encoding_rs = "0.8.35"
|
||||
widestring = "1.2.0"
|
||||
widestring = "1.2.1"
|
||||
tree_sitter_proc_macro = { path = "src/tests/proc_macro", package = "tree-sitter-tests-proc-macro" }
|
||||
|
||||
tempfile.workspace = true
|
||||
|
|
|
|||
|
|
@ -7,7 +7,8 @@
|
|||
[npmjs.com]: https://www.npmjs.org/package/tree-sitter-cli
|
||||
[npmjs.com badge]: https://img.shields.io/npm/v/tree-sitter-cli.svg?color=%23BF4A4A
|
||||
|
||||
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`, `Linux`, and `Windows`.
|
||||
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`,
|
||||
`Linux`, and `Windows`.
|
||||
|
||||
### Installation
|
||||
|
||||
|
|
@ -34,9 +35,11 @@ The `tree-sitter` binary itself has no dependencies, but specific commands have
|
|||
|
||||
### Commands
|
||||
|
||||
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current working directory. See [the documentation] for more information.
|
||||
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current
|
||||
working directory. See [the documentation] for more information.
|
||||
|
||||
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory. See [the documentation] for more information.
|
||||
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory.
|
||||
See [the documentation] for more information.
|
||||
|
||||
* `parse` - The `tree-sitter parse` command will parse a file (or list of files) using Tree-sitter parsers.
|
||||
|
||||
|
|
|
|||
6
crates/cli/eslint/package-lock.json
generated
6
crates/cli/eslint/package-lock.json
generated
|
|
@ -805,9 +805,9 @@
|
|||
"peer": true
|
||||
},
|
||||
"node_modules/js-yaml": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
|
||||
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
|
||||
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
|
||||
"license": "MIT",
|
||||
"peer": true,
|
||||
"dependencies": {
|
||||
|
|
|
|||
1
crates/cli/npm/dsl.d.ts
vendored
1
crates/cli/npm/dsl.d.ts
vendored
|
|
@ -29,6 +29,7 @@ type Rule =
|
|||
| PrecRule
|
||||
| Repeat1Rule
|
||||
| RepeatRule
|
||||
| ReservedRule
|
||||
| SeqRule
|
||||
| StringRule
|
||||
| SymbolRule<string>
|
||||
|
|
|
|||
4
crates/cli/npm/package-lock.json
generated
4
crates/cli/npm/package-lock.json
generated
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "tree-sitter-cli",
|
||||
"version": "0.26.0",
|
||||
"version": "0.27.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "tree-sitter-cli",
|
||||
"version": "0.26.0",
|
||||
"version": "0.27.0",
|
||||
"hasInstallScript": true,
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "tree-sitter-cli",
|
||||
"version": "0.26.0",
|
||||
"version": "0.27.0",
|
||||
"author": {
|
||||
"name": "Max Brunsfeld",
|
||||
"email": "maxbrunsfeld@gmail.com"
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ use crate::{
|
|||
random::Rand,
|
||||
},
|
||||
parse::perform_edit,
|
||||
test::{parse_tests, print_diff, print_diff_key, strip_sexp_fields, TestEntry},
|
||||
test::{parse_tests, strip_sexp_fields, DiffKey, TestDiff, TestEntry},
|
||||
};
|
||||
|
||||
pub static LOG_ENABLED: LazyLock<bool> = LazyLock::new(|| env::var("TREE_SITTER_LOG").is_ok());
|
||||
|
|
@ -183,8 +183,8 @@ pub fn fuzz_language_corpus(
|
|||
|
||||
if actual_output != test.output {
|
||||
println!("Incorrect initial parse for {test_name}");
|
||||
print_diff_key();
|
||||
print_diff(&actual_output, &test.output, true);
|
||||
DiffKey::print();
|
||||
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||
println!();
|
||||
return false;
|
||||
}
|
||||
|
|
@ -276,8 +276,8 @@ pub fn fuzz_language_corpus(
|
|||
|
||||
if actual_output != test.output && !test.error {
|
||||
println!("Incorrect parse for {test_name} - seed {seed}");
|
||||
print_diff_key();
|
||||
print_diff(&actual_output, &test.output, true);
|
||||
DiffKey::print();
|
||||
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||
println!();
|
||||
return false;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,13 +8,17 @@ use anyhow::{anyhow, Context, Result};
|
|||
use crc32fast::hash as crc32;
|
||||
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
|
||||
use indoc::{formatdoc, indoc};
|
||||
use log::warn;
|
||||
use log::info;
|
||||
use rand::{thread_rng, Rng};
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Map, Value};
|
||||
use tree_sitter_generate::write_file;
|
||||
use tree_sitter_loader::{Author, Bindings, Grammar, Links, Metadata, PathsJSON, TreeSitterJSON};
|
||||
use tree_sitter_loader::{
|
||||
Author, Bindings, Grammar, Links, Metadata, PathsJSON, TreeSitterJSON,
|
||||
DEFAULT_HIGHLIGHTS_QUERY_FILE_NAME, DEFAULT_INJECTIONS_QUERY_FILE_NAME,
|
||||
DEFAULT_LOCALS_QUERY_FILE_NAME, DEFAULT_TAGS_QUERY_FILE_NAME,
|
||||
};
|
||||
|
||||
const CLI_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const CLI_VERSION_PLACEHOLDER: &str = "CLI_VERSION";
|
||||
|
|
@ -32,6 +36,8 @@ const PARSER_CLASS_NAME_PLACEHOLDER: &str = "PARSER_CLASS_NAME";
|
|||
|
||||
const PARSER_DESCRIPTION_PLACEHOLDER: &str = "PARSER_DESCRIPTION";
|
||||
const PARSER_LICENSE_PLACEHOLDER: &str = "PARSER_LICENSE";
|
||||
const PARSER_NS_PLACEHOLDER: &str = "PARSER_NS";
|
||||
const PARSER_NS_CLEANED_PLACEHOLDER: &str = "PARSER_NS_CLEANED";
|
||||
const PARSER_URL_PLACEHOLDER: &str = "PARSER_URL";
|
||||
const PARSER_URL_STRIPPED_PLACEHOLDER: &str = "PARSER_URL_STRIPPED";
|
||||
const PARSER_VERSION_PLACEHOLDER: &str = "PARSER_VERSION";
|
||||
|
|
@ -54,12 +60,22 @@ const AUTHOR_BLOCK_RS: &str = "\nauthors = [";
|
|||
const AUTHOR_NAME_PLACEHOLDER_RS: &str = "PARSER_AUTHOR_NAME";
|
||||
const AUTHOR_EMAIL_PLACEHOLDER_RS: &str = " PARSER_AUTHOR_EMAIL";
|
||||
|
||||
const AUTHOR_BLOCK_JAVA: &str = "\n <developer>";
|
||||
const AUTHOR_NAME_PLACEHOLDER_JAVA: &str = "\n <name>PARSER_AUTHOR_NAME</name>";
|
||||
const AUTHOR_EMAIL_PLACEHOLDER_JAVA: &str = "\n <email>PARSER_AUTHOR_EMAIL</email>";
|
||||
const AUTHOR_URL_PLACEHOLDER_JAVA: &str = "\n <url>PARSER_AUTHOR_URL</url>";
|
||||
|
||||
const AUTHOR_BLOCK_GRAMMAR: &str = "\n * @author ";
|
||||
const AUTHOR_NAME_PLACEHOLDER_GRAMMAR: &str = "PARSER_AUTHOR_NAME";
|
||||
const AUTHOR_EMAIL_PLACEHOLDER_GRAMMAR: &str = " PARSER_AUTHOR_EMAIL";
|
||||
|
||||
const FUNDING_URL_PLACEHOLDER: &str = "FUNDING_URL";
|
||||
|
||||
const HIGHLIGHTS_QUERY_PATH_PLACEHOLDER: &str = "HIGHLIGHTS_QUERY_PATH";
|
||||
const INJECTIONS_QUERY_PATH_PLACEHOLDER: &str = "INJECTIONS_QUERY_PATH";
|
||||
const LOCALS_QUERY_PATH_PLACEHOLDER: &str = "LOCALS_QUERY_PATH";
|
||||
const TAGS_QUERY_PATH_PLACEHOLDER: &str = "TAGS_QUERY_PATH";
|
||||
|
||||
const GRAMMAR_JS_TEMPLATE: &str = include_str!("./templates/grammar.js");
|
||||
const PACKAGE_JSON_TEMPLATE: &str = include_str!("./templates/package.json");
|
||||
const GITIGNORE_TEMPLATE: &str = include_str!("./templates/gitignore");
|
||||
|
|
@ -98,12 +114,16 @@ const TEST_BINDING_PY_TEMPLATE: &str = include_str!("./templates/test_binding.py
|
|||
const PACKAGE_SWIFT_TEMPLATE: &str = include_str!("./templates/package.swift");
|
||||
const TESTS_SWIFT_TEMPLATE: &str = include_str!("./templates/tests.swift");
|
||||
|
||||
const POM_XML_TEMPLATE: &str = include_str!("./templates/pom.xml");
|
||||
const BINDING_JAVA_TEMPLATE: &str = include_str!("./templates/binding.java");
|
||||
const TEST_JAVA_TEMPLATE: &str = include_str!("./templates/test.java");
|
||||
|
||||
const BUILD_ZIG_TEMPLATE: &str = include_str!("./templates/build.zig");
|
||||
const BUILD_ZIG_ZON_TEMPLATE: &str = include_str!("./templates/build.zig.zon");
|
||||
const ROOT_ZIG_TEMPLATE: &str = include_str!("./templates/root.zig");
|
||||
const TEST_ZIG_TEMPLATE: &str = include_str!("./templates/test.zig");
|
||||
|
||||
const TREE_SITTER_JSON_SCHEMA: &str =
|
||||
pub const TREE_SITTER_JSON_SCHEMA: &str =
|
||||
"https://tree-sitter.github.io/tree-sitter/assets/schemas/config.schema.json";
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
|
|
@ -125,6 +145,7 @@ pub struct JsonConfigOpts {
|
|||
pub email: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub url: Option<String>,
|
||||
pub namespace: Option<String>,
|
||||
pub bindings: Bindings,
|
||||
}
|
||||
|
||||
|
|
@ -165,7 +186,7 @@ impl JsonConfigOpts {
|
|||
}),
|
||||
funding: self.funding,
|
||||
}),
|
||||
namespace: None,
|
||||
namespace: self.namespace,
|
||||
},
|
||||
bindings: self.bindings,
|
||||
}
|
||||
|
|
@ -188,6 +209,7 @@ impl Default for JsonConfigOpts {
|
|||
author: String::new(),
|
||||
email: None,
|
||||
url: None,
|
||||
namespace: None,
|
||||
bindings: Bindings::default(),
|
||||
}
|
||||
}
|
||||
|
|
@ -205,6 +227,11 @@ struct GenerateOpts<'a> {
|
|||
camel_parser_name: &'a str,
|
||||
title_parser_name: &'a str,
|
||||
class_name: &'a str,
|
||||
highlights_query_path: &'a str,
|
||||
injections_query_path: &'a str,
|
||||
locals_query_path: &'a str,
|
||||
tags_query_path: &'a str,
|
||||
namespace: Option<&'a str>,
|
||||
}
|
||||
|
||||
pub fn generate_grammar_files(
|
||||
|
|
@ -255,6 +282,11 @@ pub fn generate_grammar_files(
|
|||
.clone()
|
||||
.unwrap_or_else(|| format!("TreeSitter{}", language_name.to_upper_camel_case()));
|
||||
|
||||
let default_highlights_path = Path::new("queries").join(DEFAULT_HIGHLIGHTS_QUERY_FILE_NAME);
|
||||
let default_injections_path = Path::new("queries").join(DEFAULT_INJECTIONS_QUERY_FILE_NAME);
|
||||
let default_locals_path = Path::new("queries").join(DEFAULT_LOCALS_QUERY_FILE_NAME);
|
||||
let default_tags_path = Path::new("queries").join(DEFAULT_TAGS_QUERY_FILE_NAME);
|
||||
|
||||
let generate_opts = GenerateOpts {
|
||||
author_name: authors
|
||||
.map(|a| a.first().map(|a| a.name.as_str()))
|
||||
|
|
@ -281,6 +313,19 @@ pub fn generate_grammar_files(
|
|||
camel_parser_name: &camel_name,
|
||||
title_parser_name: &title_name,
|
||||
class_name: &class_name,
|
||||
highlights_query_path: tree_sitter_config.grammars[0]
|
||||
.highlights
|
||||
.to_variable_value(&default_highlights_path),
|
||||
injections_query_path: tree_sitter_config.grammars[0]
|
||||
.injections
|
||||
.to_variable_value(&default_injections_path),
|
||||
locals_query_path: tree_sitter_config.grammars[0]
|
||||
.locals
|
||||
.to_variable_value(&default_locals_path),
|
||||
tags_query_path: tree_sitter_config.grammars[0]
|
||||
.tags
|
||||
.to_variable_value(&default_tags_path),
|
||||
namespace: tree_sitter_config.metadata.namespace.as_deref(),
|
||||
};
|
||||
|
||||
// Create package.json
|
||||
|
|
@ -307,11 +352,11 @@ pub fn generate_grammar_files(
|
|||
"tree-sitter-cli":"#},
|
||||
indoc! {r#"
|
||||
"prebuildify": "^6.0.1",
|
||||
"tree-sitter": "^0.22.4",
|
||||
"tree-sitter": "^0.25.0",
|
||||
"tree-sitter-cli":"#},
|
||||
);
|
||||
if !contents.contains("module") {
|
||||
warn!("Updating package.json");
|
||||
info!("Migrating package.json to ESM");
|
||||
contents = contents.replace(
|
||||
r#""repository":"#,
|
||||
indoc! {r#"
|
||||
|
|
@ -333,6 +378,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if contents.contains("module.exports") {
|
||||
info!("Migrating grammars.js to ESM");
|
||||
contents = contents.replace("module.exports =", "export default");
|
||||
write_file(path, contents)?;
|
||||
}
|
||||
|
|
@ -348,10 +394,16 @@ pub fn generate_grammar_files(
|
|||
allow_update,
|
||||
|path| generate_file(path, GITIGNORE_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("Zig artifacts") {
|
||||
warn!("Replacing .gitignore");
|
||||
generate_file(path, GITIGNORE_TEMPLATE, language_name, &generate_opts)?;
|
||||
info!("Adding zig entries to .gitignore");
|
||||
contents.push('\n');
|
||||
contents.push_str(indoc! {"
|
||||
# Zig artifacts
|
||||
.zig-cache/
|
||||
zig-cache/
|
||||
zig-out/
|
||||
"});
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
|
|
@ -364,8 +416,13 @@ pub fn generate_grammar_files(
|
|||
|path| generate_file(path, GITATTRIBUTES_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
contents = contents.replace("bindings/c/* ", "bindings/c/** ");
|
||||
let c_bindings_entry = "bindings/c/* ";
|
||||
if contents.contains(c_bindings_entry) {
|
||||
info!("Updating c bindings entry in .gitattributes");
|
||||
contents = contents.replace(c_bindings_entry, "bindings/c/** ");
|
||||
}
|
||||
if !contents.contains("Zig bindings") {
|
||||
info!("Adding zig entries to .gitattributes");
|
||||
contents.push('\n');
|
||||
contents.push_str(indoc! {"
|
||||
# Zig bindings
|
||||
|
|
@ -388,8 +445,48 @@ pub fn generate_grammar_files(
|
|||
// Generate Rust bindings
|
||||
if tree_sitter_config.bindings.rust {
|
||||
missing_path(bindings_dir.join("rust"), create_dir)?.apply(|path| {
|
||||
missing_path(path.join("lib.rs"), |path| {
|
||||
missing_path_else(path.join("lib.rs"), allow_update, |path| {
|
||||
generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts)
|
||||
}, |path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("#[cfg(with_highlights_query)]") {
|
||||
info!("Updating query constants in bindings/rust/lib.rs");
|
||||
let replacement = indoc! {r#"
|
||||
#[cfg(with_highlights_query)]
|
||||
/// The syntax highlighting query for this grammar.
|
||||
pub const HIGHLIGHTS_QUERY: &str = include_str!("../../HIGHLIGHTS_QUERY_PATH");
|
||||
|
||||
#[cfg(with_injections_query)]
|
||||
/// The language injection query for this grammar.
|
||||
pub const INJECTIONS_QUERY: &str = include_str!("../../INJECTIONS_QUERY_PATH");
|
||||
|
||||
#[cfg(with_locals_query)]
|
||||
/// The local variable query for this grammar.
|
||||
pub const LOCALS_QUERY: &str = include_str!("../../LOCALS_QUERY_PATH");
|
||||
|
||||
#[cfg(with_tags_query)]
|
||||
/// The symbol tagging query for this grammar.
|
||||
pub const TAGS_QUERY: &str = include_str!("../../TAGS_QUERY_PATH");
|
||||
"#}
|
||||
.replace("HIGHLIGHTS_QUERY_PATH", generate_opts.highlights_query_path)
|
||||
.replace("INJECTIONS_QUERY_PATH", generate_opts.injections_query_path)
|
||||
.replace("LOCALS_QUERY_PATH", generate_opts.locals_query_path)
|
||||
.replace("TAGS_QUERY_PATH", generate_opts.tags_query_path);
|
||||
contents = contents
|
||||
.replace(
|
||||
indoc! {r#"
|
||||
// NOTE: uncomment these to include any queries that this grammar contains:
|
||||
|
||||
// pub const HIGHLIGHTS_QUERY: &str = include_str!("../../queries/highlights.scm");
|
||||
// pub const INJECTIONS_QUERY: &str = include_str!("../../queries/injections.scm");
|
||||
// pub const LOCALS_QUERY: &str = include_str!("../../queries/locals.scm");
|
||||
// pub const TAGS_QUERY: &str = include_str!("../../queries/tags.scm");
|
||||
"#},
|
||||
&replacement,
|
||||
);
|
||||
}
|
||||
write_file(path, contents)?;
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
missing_path_else(
|
||||
|
|
@ -397,37 +494,76 @@ pub fn generate_grammar_files(
|
|||
allow_update,
|
||||
|path| generate_file(path, BUILD_RS_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let replacement = indoc!{r#"
|
||||
c_config.flag("-utf-8");
|
||||
|
||||
if std::env::var("TARGET").unwrap() == "wasm32-unknown-unknown" {
|
||||
let Ok(wasm_headers) = std::env::var("DEP_TREE_SITTER_LANGUAGE_WASM_HEADERS") else {
|
||||
panic!("Environment variable DEP_TREE_SITTER_LANGUAGE_WASM_HEADERS must be set by the language crate");
|
||||
};
|
||||
let Ok(wasm_src) =
|
||||
std::env::var("DEP_TREE_SITTER_LANGUAGE_WASM_SRC").map(std::path::PathBuf::from)
|
||||
else {
|
||||
panic!("Environment variable DEP_TREE_SITTER_LANGUAGE_WASM_SRC must be set by the language crate");
|
||||
};
|
||||
|
||||
c_config.include(&wasm_headers);
|
||||
c_config.files([
|
||||
wasm_src.join("stdio.c"),
|
||||
wasm_src.join("stdlib.c"),
|
||||
wasm_src.join("string.c"),
|
||||
]);
|
||||
}
|
||||
"#};
|
||||
|
||||
let indented_replacement = replacement
|
||||
.lines()
|
||||
.map(|line| if line.is_empty() { line.to_string() } else { format!(" {line}") })
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("wasm32-unknown-unknown") {
|
||||
contents = contents.replace(r#" c_config.flag("-utf-8");"#, &indented_replacement);
|
||||
info!("Adding wasm32-unknown-unknown target to bindings/rust/build.rs");
|
||||
let replacement = indoc!{r#"
|
||||
c_config.flag("-utf-8");
|
||||
|
||||
if std::env::var("TARGET").unwrap() == "wasm32-unknown-unknown" {
|
||||
let Ok(wasm_headers) = std::env::var("DEP_TREE_SITTER_LANGUAGE_WASM_HEADERS") else {
|
||||
panic!("Environment variable DEP_TREE_SITTER_LANGUAGE_WASM_HEADERS must be set by the language crate");
|
||||
};
|
||||
let Ok(wasm_src) =
|
||||
std::env::var("DEP_TREE_SITTER_LANGUAGE_WASM_SRC").map(std::path::PathBuf::from)
|
||||
else {
|
||||
panic!("Environment variable DEP_TREE_SITTER_LANGUAGE_WASM_SRC must be set by the language crate");
|
||||
};
|
||||
|
||||
c_config.include(&wasm_headers);
|
||||
c_config.files([
|
||||
wasm_src.join("stdio.c"),
|
||||
wasm_src.join("stdlib.c"),
|
||||
wasm_src.join("string.c"),
|
||||
]);
|
||||
}
|
||||
"#}
|
||||
.lines()
|
||||
.map(|line| if line.is_empty() { line.to_string() } else { format!(" {line}") })
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
contents = contents.replace(r#" c_config.flag("-utf-8");"#, &replacement);
|
||||
}
|
||||
|
||||
// Introduce configuration variables for dynamic query inclusion
|
||||
if !contents.contains("with_highlights_query") {
|
||||
info!("Adding support for dynamic query inclusion to bindings/rust/build.rs");
|
||||
let replaced = indoc! {r#"
|
||||
c_config.compile("tree-sitter-KEBAB_PARSER_NAME");
|
||||
}"#}
|
||||
.replace("KEBAB_PARSER_NAME", &language_name.to_kebab_case());
|
||||
|
||||
let replacement = indoc! {r#"
|
||||
c_config.compile("tree-sitter-KEBAB_PARSER_NAME");
|
||||
|
||||
println!("cargo:rustc-check-cfg=cfg(with_highlights_query)");
|
||||
if !"HIGHLIGHTS_QUERY_PATH".is_empty() && std::path::Path::new("HIGHLIGHTS_QUERY_PATH").exists() {
|
||||
println!("cargo:rustc-cfg=with_highlights_query");
|
||||
}
|
||||
println!("cargo:rustc-check-cfg=cfg(with_injections_query)");
|
||||
if !"INJECTIONS_QUERY_PATH".is_empty() && std::path::Path::new("INJECTIONS_QUERY_PATH").exists() {
|
||||
println!("cargo:rustc-cfg=with_injections_query");
|
||||
}
|
||||
println!("cargo:rustc-check-cfg=cfg(with_locals_query)");
|
||||
if !"LOCALS_QUERY_PATH".is_empty() && std::path::Path::new("LOCALS_QUERY_PATH").exists() {
|
||||
println!("cargo:rustc-cfg=with_locals_query");
|
||||
}
|
||||
println!("cargo:rustc-check-cfg=cfg(with_tags_query)");
|
||||
if !"TAGS_QUERY_PATH".is_empty() && std::path::Path::new("TAGS_QUERY_PATH").exists() {
|
||||
println!("cargo:rustc-cfg=with_tags_query");
|
||||
}
|
||||
}"#}
|
||||
.replace("KEBAB_PARSER_NAME", &language_name.to_kebab_case())
|
||||
.replace("HIGHLIGHTS_QUERY_PATH", generate_opts.highlights_query_path)
|
||||
.replace("INJECTIONS_QUERY_PATH", generate_opts.injections_query_path)
|
||||
.replace("LOCALS_QUERY_PATH", generate_opts.locals_query_path)
|
||||
.replace("TAGS_QUERY_PATH", generate_opts.tags_query_path);
|
||||
|
||||
contents = contents.replace(
|
||||
&replaced,
|
||||
&replacement,
|
||||
);
|
||||
}
|
||||
|
||||
write_file(path, contents)?;
|
||||
|
|
@ -449,6 +585,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if contents.contains("\"LICENSE\"") {
|
||||
info!("Adding LICENSE entry to bindings/rust/Cargo.toml");
|
||||
write_file(path, contents.replace("\"LICENSE\"", "\"/LICENSE\""))?;
|
||||
}
|
||||
Ok(())
|
||||
|
|
@ -468,17 +605,27 @@ pub fn generate_grammar_files(
|
|||
|path| generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("new URL") {
|
||||
warn!("Replacing index.js");
|
||||
if !contents.contains("Object.defineProperty") {
|
||||
info!("Replacing index.js");
|
||||
generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path(path.join("index.d.ts"), |path| {
|
||||
generate_file(path, INDEX_D_TS_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
missing_path_else(
|
||||
path.join("index.d.ts"),
|
||||
allow_update,
|
||||
|path| generate_file(path, INDEX_D_TS_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("export default binding") {
|
||||
info!("Replacing index.d.ts");
|
||||
generate_file(path, INDEX_D_TS_TEMPLATE, language_name, &generate_opts)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("binding_test.js"),
|
||||
|
|
@ -494,7 +641,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("import") {
|
||||
warn!("Replacing binding_test.js");
|
||||
info!("Replacing binding_test.js");
|
||||
generate_file(
|
||||
path,
|
||||
BINDING_TEST_JS_TEMPLATE,
|
||||
|
|
@ -517,6 +664,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if contents.contains("fs.exists(") {
|
||||
info!("Replacing `fs.exists` calls in binding.gyp");
|
||||
write_file(path, contents.replace("fs.exists(", "fs.existsSync("))?;
|
||||
}
|
||||
Ok(())
|
||||
|
|
@ -529,14 +677,17 @@ pub fn generate_grammar_files(
|
|||
|
||||
// Generate C bindings
|
||||
if tree_sitter_config.bindings.c {
|
||||
let kebab_case_name = language_name.to_kebab_case();
|
||||
missing_path(bindings_dir.join("c"), create_dir)?.apply(|path| {
|
||||
let old_file = &path.join(format!("tree-sitter-{}.h", language_name.to_kebab_case()));
|
||||
let header_name = format!("tree-sitter-{kebab_case_name}.h");
|
||||
let old_file = &path.join(&header_name);
|
||||
if allow_update && fs::exists(old_file).unwrap_or(false) {
|
||||
info!("Removing bindings/c/{header_name}");
|
||||
fs::remove_file(old_file)?;
|
||||
}
|
||||
missing_path(path.join("tree_sitter"), create_dir)?.apply(|include_path| {
|
||||
missing_path(
|
||||
include_path.join(format!("tree-sitter-{}.h", language_name.to_kebab_case())),
|
||||
include_path.join(&header_name),
|
||||
|path| {
|
||||
generate_file(path, PARSER_NAME_H_TEMPLATE, language_name, &generate_opts)
|
||||
},
|
||||
|
|
@ -545,7 +696,7 @@ pub fn generate_grammar_files(
|
|||
})?;
|
||||
|
||||
missing_path(
|
||||
path.join(format!("tree-sitter-{}.pc.in", language_name.to_kebab_case())),
|
||||
path.join(format!("tree-sitter-{kebab_case_name}.pc.in")),
|
||||
|path| {
|
||||
generate_file(
|
||||
path,
|
||||
|
|
@ -565,23 +716,27 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("cd '$(DESTDIR)$(LIBDIR)' && ln -sf") {
|
||||
warn!("Replacing Makefile");
|
||||
info!("Replacing Makefile");
|
||||
generate_file(path, MAKEFILE_TEMPLATE, language_name, &generate_opts)?;
|
||||
} else {
|
||||
contents = contents
|
||||
.replace(
|
||||
indoc! {r"
|
||||
$(PARSER): $(SRC_DIR)/grammar.json
|
||||
$(TS) generate $^
|
||||
"},
|
||||
indoc! {r"
|
||||
$(SRC_DIR)/grammar.json: grammar.js
|
||||
$(TS) generate --emit=json $^
|
||||
let replaced = indoc! {r"
|
||||
$(PARSER): $(SRC_DIR)/grammar.json
|
||||
$(TS) generate $^
|
||||
"};
|
||||
if contents.contains(replaced) {
|
||||
info!("Adding --no-parser target to Makefile");
|
||||
contents = contents
|
||||
.replace(
|
||||
replaced,
|
||||
indoc! {r"
|
||||
$(SRC_DIR)/grammar.json: grammar.js
|
||||
$(TS) generate --no-parser $^
|
||||
|
||||
$(PARSER): $(SRC_DIR)/grammar.json
|
||||
$(TS) generate --emit=parser $^
|
||||
"}
|
||||
);
|
||||
$(PARSER): $(SRC_DIR)/grammar.json
|
||||
$(TS) generate $^
|
||||
"}
|
||||
);
|
||||
}
|
||||
write_file(path, contents)?;
|
||||
}
|
||||
Ok(())
|
||||
|
|
@ -593,8 +748,8 @@ pub fn generate_grammar_files(
|
|||
allow_update,
|
||||
|path| generate_file(path, CMAKELISTS_TXT_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
contents = contents
|
||||
let contents = fs::read_to_string(path)?;
|
||||
let replaced_contents = contents
|
||||
.replace("add_custom_target(test", "add_custom_target(ts-test")
|
||||
.replace(
|
||||
&formatdoc! {r#"
|
||||
|
|
@ -625,21 +780,27 @@ pub fn generate_grammar_files(
|
|||
"#},
|
||||
indoc! {r#"
|
||||
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/node-types.json"
|
||||
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/grammar.js"
|
||||
COMMAND "${TREE_SITTER_CLI}" generate grammar.js
|
||||
--emit=json
|
||||
COMMAND "${TREE_SITTER_CLI}" generate grammar.js --no-parser
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
COMMENT "Generating grammar.json")
|
||||
|
||||
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c"
|
||||
BYPRODUCTS "${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/parser.h"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/alloc.h"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/array.h"
|
||||
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
||||
COMMAND "${TREE_SITTER_CLI}" generate src/grammar.json
|
||||
--emit=parser --abi=${TREE_SITTER_ABI_VERSION}
|
||||
--abi=${TREE_SITTER_ABI_VERSION}
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
COMMENT "Generating parser.c")
|
||||
"#}
|
||||
);
|
||||
write_file(path, contents)?;
|
||||
if !replaced_contents.eq(&contents) {
|
||||
info!("Updating CMakeLists.txt");
|
||||
write_file(path, replaced_contents)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
|
@ -675,7 +836,8 @@ pub fn generate_grammar_files(
|
|||
// Generate Python bindings
|
||||
if tree_sitter_config.bindings.python {
|
||||
missing_path(bindings_dir.join("python"), create_dir)?.apply(|path| {
|
||||
let lang_path = path.join(format!("tree_sitter_{}", language_name.to_snake_case()));
|
||||
let snake_case_grammar_name = format!("tree_sitter_{}", language_name.to_snake_case());
|
||||
let lang_path = path.join(&snake_case_grammar_name);
|
||||
missing_path(&lang_path, create_dir)?;
|
||||
|
||||
missing_path_else(
|
||||
|
|
@ -685,6 +847,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("PyModuleDef_Init") {
|
||||
info!("Updating bindings/python/{snake_case_grammar_name}/binding.c");
|
||||
contents = contents
|
||||
.replace("PyModule_Create", "PyModuleDef_Init")
|
||||
.replace(
|
||||
|
|
@ -717,9 +880,21 @@ pub fn generate_grammar_files(
|
|||
},
|
||||
)?;
|
||||
|
||||
missing_path(lang_path.join("__init__.py"), |path| {
|
||||
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
missing_path_else(
|
||||
lang_path.join("__init__.py"),
|
||||
allow_update,
|
||||
|path| {
|
||||
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)
|
||||
},
|
||||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("uncomment these to include any queries") {
|
||||
info!("Replacing __init__.py");
|
||||
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path_else(
|
||||
lang_path.join("__init__.pyi"),
|
||||
|
|
@ -727,7 +902,11 @@ pub fn generate_grammar_files(
|
|||
|path| generate_file(path, INIT_PYI_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("CapsuleType") {
|
||||
if contents.contains("uncomment these to include any queries") {
|
||||
info!("Replacing __init__.pyi");
|
||||
generate_file(path, INIT_PYI_TEMPLATE, language_name, &generate_opts)?;
|
||||
} else if !contents.contains("CapsuleType") {
|
||||
info!("Updating __init__.pyi");
|
||||
contents = contents
|
||||
.replace(
|
||||
"from typing import Final",
|
||||
|
|
@ -759,6 +938,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("Parser(Language(") {
|
||||
info!("Updating Language function in bindings/python/tests/test_binding.py");
|
||||
contents = contents
|
||||
.replace("tree_sitter.Language(", "Parser(Language(")
|
||||
.replace(".language())\n", ".language()))\n")
|
||||
|
|
@ -779,11 +959,19 @@ pub fn generate_grammar_files(
|
|||
allow_update,
|
||||
|path| generate_file(path, SETUP_PY_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("build_ext") {
|
||||
warn!("Replacing setup.py");
|
||||
info!("Replacing setup.py");
|
||||
generate_file(path, SETUP_PY_TEMPLATE, language_name, &generate_opts)?;
|
||||
}
|
||||
if !contents.contains(" and not get_config_var") {
|
||||
info!("Updating Python free-threading support in setup.py");
|
||||
contents = contents.replace(
|
||||
r#"startswith("cp"):"#,
|
||||
r#"startswith("cp") and not get_config_var("Py_GIL_DISABLED"):"#
|
||||
);
|
||||
write_file(path, contents)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
|
@ -802,6 +990,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("cp310-*") {
|
||||
info!("Updating dependencies in pyproject.toml");
|
||||
contents = contents
|
||||
.replace(r#"build = "cp39-*""#, r#"build = "cp310-*""#)
|
||||
.replace(r#"python = ">=3.9""#, r#"python = ">=3.10""#)
|
||||
|
|
@ -839,15 +1028,18 @@ pub fn generate_grammar_files(
|
|||
allow_update,
|
||||
|path| generate_file(path, PACKAGE_SWIFT_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let mut contents = fs::read_to_string(path)?;
|
||||
contents = contents
|
||||
let contents = fs::read_to_string(path)?;
|
||||
let replaced_contents = contents
|
||||
.replace(
|
||||
"https://github.com/ChimeHQ/SwiftTreeSitter",
|
||||
"https://github.com/tree-sitter/swift-tree-sitter",
|
||||
)
|
||||
.replace("version: \"0.8.0\")", "version: \"0.9.0\")")
|
||||
.replace("(url:", "(name: \"SwiftTreeSitter\", url:");
|
||||
write_file(path, contents)?;
|
||||
if !replaced_contents.eq(&contents) {
|
||||
info!("Updating tree-sitter dependency in Package.swift");
|
||||
write_file(path, contents)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
|
@ -865,7 +1057,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("b.pkg_hash.len") {
|
||||
warn!("Replacing build.zig");
|
||||
info!("Replacing build.zig");
|
||||
generate_file(path, BUILD_ZIG_TEMPLATE, language_name, &generate_opts)
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
@ -880,7 +1072,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if !contents.contains(".name = .tree_sitter_") {
|
||||
warn!("Replacing build.zig.zon");
|
||||
info!("Replacing build.zig.zon");
|
||||
generate_file(path, BUILD_ZIG_ZON_TEMPLATE, language_name, &generate_opts)
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
@ -896,7 +1088,7 @@ pub fn generate_grammar_files(
|
|||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if contents.contains("ts.Language") {
|
||||
warn!("Replacing root.zig");
|
||||
info!("Replacing root.zig");
|
||||
generate_file(path, ROOT_ZIG_TEMPLATE, language_name, &generate_opts)
|
||||
} else {
|
||||
Ok(())
|
||||
|
|
@ -912,6 +1104,45 @@ pub fn generate_grammar_files(
|
|||
})?;
|
||||
}
|
||||
|
||||
// Generate Java bindings
|
||||
if tree_sitter_config.bindings.java {
|
||||
missing_path(repo_path.join("pom.xml"), |path| {
|
||||
generate_file(path, POM_XML_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
missing_path(bindings_dir.join("java"), create_dir)?.apply(|path| {
|
||||
missing_path(path.join("main"), create_dir)?.apply(|path| {
|
||||
let package_path = generate_opts
|
||||
.namespace
|
||||
.unwrap_or("io.github.treesitter")
|
||||
.replace(['-', '_'], "")
|
||||
.split('.')
|
||||
.fold(path.to_path_buf(), |path, dir| path.join(dir))
|
||||
.join("jtreesitter")
|
||||
.join(language_name.to_lowercase().replace('_', ""));
|
||||
missing_path(package_path, create_dir)?.apply(|path| {
|
||||
missing_path(path.join(format!("{class_name}.java")), |path| {
|
||||
generate_file(path, BINDING_JAVA_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
missing_path(path.join("test"), create_dir)?.apply(|path| {
|
||||
missing_path(path.join(format!("{class_name}Test.java")), |path| {
|
||||
generate_file(path, TEST_JAVA_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -961,6 +1192,15 @@ fn generate_file(
|
|||
) -> Result<()> {
|
||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||
|
||||
let lower_parser_name = if path
|
||||
.extension()
|
||||
.is_some_and(|e| e.eq_ignore_ascii_case("java"))
|
||||
{
|
||||
language_name.to_snake_case().replace('_', "")
|
||||
} else {
|
||||
language_name.to_snake_case()
|
||||
};
|
||||
|
||||
let mut replacement = template
|
||||
.replace(
|
||||
CAMEL_PARSER_NAME_PLACEHOLDER,
|
||||
|
|
@ -974,14 +1214,11 @@ fn generate_file(
|
|||
UPPER_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_shouty_snake_case(),
|
||||
)
|
||||
.replace(
|
||||
LOWER_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_snake_case(),
|
||||
)
|
||||
.replace(
|
||||
KEBAB_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_kebab_case(),
|
||||
)
|
||||
.replace(LOWER_PARSER_NAME_PLACEHOLDER, &lower_parser_name)
|
||||
.replace(PARSER_NAME_PLACEHOLDER, language_name)
|
||||
.replace(CLI_VERSION_PLACEHOLDER, CLI_VERSION)
|
||||
.replace(RUST_BINDING_VERSION_PLACEHOLDER, RUST_BINDING_VERSION)
|
||||
|
|
@ -990,7 +1227,20 @@ fn generate_file(
|
|||
PARSER_VERSION_PLACEHOLDER,
|
||||
&generate_opts.version.to_string(),
|
||||
)
|
||||
.replace(PARSER_CLASS_NAME_PLACEHOLDER, generate_opts.class_name);
|
||||
.replace(PARSER_CLASS_NAME_PLACEHOLDER, generate_opts.class_name)
|
||||
.replace(
|
||||
HIGHLIGHTS_QUERY_PATH_PLACEHOLDER,
|
||||
generate_opts.highlights_query_path,
|
||||
)
|
||||
.replace(
|
||||
INJECTIONS_QUERY_PATH_PLACEHOLDER,
|
||||
generate_opts.injections_query_path,
|
||||
)
|
||||
.replace(
|
||||
LOCALS_QUERY_PATH_PLACEHOLDER,
|
||||
generate_opts.locals_query_path,
|
||||
)
|
||||
.replace(TAGS_QUERY_PATH_PLACEHOLDER, generate_opts.tags_query_path);
|
||||
|
||||
if let Some(name) = generate_opts.author_name {
|
||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER, name);
|
||||
|
|
@ -1008,6 +1258,9 @@ fn generate_file(
|
|||
"Cargo.toml" => {
|
||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER_RS, "");
|
||||
}
|
||||
"pom.xml" => {
|
||||
replacement = replacement.replace(AUTHOR_NAME_PLACEHOLDER_JAVA, "");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
|
@ -1033,30 +1286,52 @@ fn generate_file(
|
|||
"Cargo.toml" => {
|
||||
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER_RS, "");
|
||||
}
|
||||
"pom.xml" => {
|
||||
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER_JAVA, "");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if filename == "package.json" {
|
||||
if let Some(url) = generate_opts.author_url {
|
||||
match (generate_opts.author_url, filename) {
|
||||
(Some(url), "package.json" | "pom.xml") => {
|
||||
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER, url);
|
||||
} else {
|
||||
}
|
||||
(None, "package.json") => {
|
||||
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER_JS, "");
|
||||
}
|
||||
(None, "pom.xml") => {
|
||||
replacement = replacement.replace(AUTHOR_URL_PLACEHOLDER_JAVA, "");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if generate_opts.author_name.is_none()
|
||||
&& generate_opts.author_email.is_none()
|
||||
&& generate_opts.author_url.is_none()
|
||||
&& filename == "package.json"
|
||||
{
|
||||
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_JS) {
|
||||
if let Some(end_idx) = replacement[start_idx..]
|
||||
.find("},")
|
||||
.map(|i| i + start_idx + 2)
|
||||
{
|
||||
replacement.replace_range(start_idx..end_idx, "");
|
||||
match filename {
|
||||
"package.json" => {
|
||||
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_JS) {
|
||||
if let Some(end_idx) = replacement[start_idx..]
|
||||
.find("},")
|
||||
.map(|i| i + start_idx + 2)
|
||||
{
|
||||
replacement.replace_range(start_idx..end_idx, "");
|
||||
}
|
||||
}
|
||||
}
|
||||
"pom.xml" => {
|
||||
if let Some(start_idx) = replacement.find(AUTHOR_BLOCK_JAVA) {
|
||||
if let Some(end_idx) = replacement[start_idx..]
|
||||
.find("</developer>")
|
||||
.map(|i| i + start_idx + 12)
|
||||
{
|
||||
replacement.replace_range(start_idx..end_idx, "");
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
} else if generate_opts.author_name.is_none() && generate_opts.author_email.is_none() {
|
||||
match filename {
|
||||
|
|
@ -1137,6 +1412,19 @@ fn generate_file(
|
|||
);
|
||||
}
|
||||
|
||||
if let Some(namespace) = generate_opts.namespace {
|
||||
replacement = replacement
|
||||
.replace(
|
||||
PARSER_NS_CLEANED_PLACEHOLDER,
|
||||
&namespace.replace(['-', '_'], ""),
|
||||
)
|
||||
.replace(PARSER_NS_PLACEHOLDER, namespace);
|
||||
} else {
|
||||
replacement = replacement
|
||||
.replace(PARSER_NS_CLEANED_PLACEHOLDER, "io.github.treesitter")
|
||||
.replace(PARSER_NS_PLACEHOLDER, "io.github.tree-sitter");
|
||||
}
|
||||
|
||||
if let Some(funding_url) = generate_opts.funding {
|
||||
match filename {
|
||||
"pyproject.toml" | "package.json" => {
|
||||
|
|
|
|||
|
|
@ -20,15 +20,16 @@ use tree_sitter_cli::{
|
|||
LOG_GRAPH_ENABLED, START_SEED,
|
||||
},
|
||||
highlight::{self, HighlightOptions},
|
||||
init::{generate_grammar_files, JsonConfigOpts},
|
||||
init::{generate_grammar_files, JsonConfigOpts, TREE_SITTER_JSON_SCHEMA},
|
||||
input::{get_input, get_tmp_source_file, CliInput},
|
||||
logger,
|
||||
parse::{self, ParseDebugType, ParseFileOptions, ParseOutput, ParseTheme},
|
||||
playground, query,
|
||||
playground,
|
||||
query::{self, QueryFileOptions},
|
||||
tags::{self, TagsOptions},
|
||||
test::{self, TestOptions, TestStats},
|
||||
test_highlight, test_tags, util, version,
|
||||
version::BumpLevel,
|
||||
test::{self, TestOptions, TestStats, TestSummary},
|
||||
test_highlight, test_tags, util,
|
||||
version::{self, BumpLevel},
|
||||
wasm,
|
||||
};
|
||||
use tree_sitter_config::Config;
|
||||
|
|
@ -88,17 +89,6 @@ struct Init {
|
|||
pub grammar_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, ValueEnum, PartialEq, Eq)]
|
||||
enum GenerationEmit {
|
||||
/// Generate `grammar.json` and `node-types.json`
|
||||
Json,
|
||||
/// Generate `parser.c` and related files
|
||||
#[default]
|
||||
Parser,
|
||||
/// Compile to a library
|
||||
Lib,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
#[command(alias = "gen", alias = "g")]
|
||||
struct Generate {
|
||||
|
|
@ -121,28 +111,38 @@ struct Generate {
|
|||
)
|
||||
)]
|
||||
pub abi_version: Option<String>,
|
||||
/// What generated files to emit
|
||||
/// Only generate `grammar.json` and `node-types.json`
|
||||
#[arg(long)]
|
||||
#[clap(value_enum, default_value_t=GenerationEmit::Parser)]
|
||||
pub emit: GenerationEmit,
|
||||
/// Deprecated: use --emit=lib.
|
||||
#[arg(long, short = 'b', conflicts_with = "emit")]
|
||||
pub no_parser: bool,
|
||||
/// Deprecated: use the `build` command
|
||||
#[arg(long, short = 'b')]
|
||||
pub build: bool,
|
||||
/// Compile a parser in debug mode
|
||||
/// Deprecated: use the `build` command
|
||||
#[arg(long, short = '0')]
|
||||
pub debug_build: bool,
|
||||
/// The path to the directory containing the parser library
|
||||
/// Deprecated: use the `build` command
|
||||
#[arg(long, value_name = "PATH")]
|
||||
pub libdir: Option<PathBuf>,
|
||||
/// The path to output the generated source files
|
||||
#[arg(long, short, value_name = "DIRECTORY")]
|
||||
pub output: Option<PathBuf>,
|
||||
/// Produce a report of the states for the given rule, use `-` to report every rule
|
||||
#[arg(long)]
|
||||
#[arg(long, conflicts_with = "json", conflicts_with = "json_summary")]
|
||||
pub report_states_for_rule: Option<String>,
|
||||
/// Report conflicts in a JSON format
|
||||
#[arg(long)]
|
||||
/// Deprecated: use --json-summary
|
||||
#[arg(
|
||||
long,
|
||||
conflicts_with = "json_summary",
|
||||
conflicts_with = "report_states_for_rule"
|
||||
)]
|
||||
pub json: bool,
|
||||
/// Report conflicts in a JSON format
|
||||
#[arg(
|
||||
long,
|
||||
conflicts_with = "json",
|
||||
conflicts_with = "report_states_for_rule"
|
||||
)]
|
||||
pub json_summary: bool,
|
||||
/// The name or path of the JavaScript runtime to use for generating parsers
|
||||
#[cfg(not(feature = "qjs-rt"))]
|
||||
#[arg(
|
||||
|
|
@ -223,7 +223,7 @@ struct Parse {
|
|||
#[arg(long, short = 'D')]
|
||||
pub debug_graph: bool,
|
||||
/// Compile parsers to Wasm instead of native dynamic libraries
|
||||
#[arg(long)]
|
||||
#[arg(long, hide = cfg!(not(feature = "wasm")))]
|
||||
pub wasm: bool,
|
||||
/// Output the parse data with graphviz dot
|
||||
#[arg(long = "dot")]
|
||||
|
|
@ -235,7 +235,7 @@ struct Parse {
|
|||
#[arg(long = "cst", short = 'c')]
|
||||
pub output_cst: bool,
|
||||
/// Show parsing statistic
|
||||
#[arg(long, short)]
|
||||
#[arg(long, short, conflicts_with = "json", conflicts_with = "json_summary")]
|
||||
pub stat: bool,
|
||||
/// Interrupt the parsing process by timeout (µs)
|
||||
#[arg(long)]
|
||||
|
|
@ -260,9 +260,12 @@ struct Parse {
|
|||
/// Open `log.html` in the default browser, if `--debug-graph` is supplied
|
||||
#[arg(long)]
|
||||
pub open_log: bool,
|
||||
/// Output parsing results in a JSON format
|
||||
#[arg(long, short = 'j')]
|
||||
/// Deprecated: use --json-summary
|
||||
#[arg(long, conflicts_with = "json_summary", conflicts_with = "stat")]
|
||||
pub json: bool,
|
||||
/// Output parsing results in a JSON format
|
||||
#[arg(long, short = 'j', conflicts_with = "json", conflicts_with = "stat")]
|
||||
pub json_summary: bool,
|
||||
/// The path to an alternative config.json file
|
||||
#[arg(long)]
|
||||
pub config_path: Option<PathBuf>,
|
||||
|
|
@ -320,7 +323,7 @@ struct Test {
|
|||
#[arg(long, short = 'D')]
|
||||
pub debug_graph: bool,
|
||||
/// Compile parsers to Wasm instead of native dynamic libraries
|
||||
#[arg(long)]
|
||||
#[arg(long, hide = cfg!(not(feature = "wasm")))]
|
||||
pub wasm: bool,
|
||||
/// Open `log.html` in the default browser, if `--debug-graph` is supplied
|
||||
#[arg(long)]
|
||||
|
|
@ -340,6 +343,9 @@ struct Test {
|
|||
/// Show only the pass-fail overview tree
|
||||
#[arg(long)]
|
||||
pub overview_only: bool,
|
||||
/// Output the test summary in a JSON format
|
||||
#[arg(long)]
|
||||
pub json_summary: bool,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
|
|
@ -442,6 +448,14 @@ struct Query {
|
|||
/// The range of rows in which the query will be executed
|
||||
#[arg(long)]
|
||||
pub row_range: Option<String>,
|
||||
/// The range of byte offsets in which the query will be executed. Only the matches that are fully contained within the provided
|
||||
/// byte range will be returned.
|
||||
#[arg(long)]
|
||||
pub containing_byte_range: Option<String>,
|
||||
/// The range of rows in which the query will be executed. Only the matches that are fully contained within the provided row range
|
||||
/// will be returned.
|
||||
#[arg(long)]
|
||||
pub containing_row_range: Option<String>,
|
||||
/// Select a language by the scope instead of a file extension
|
||||
#[arg(long)]
|
||||
pub scope: Option<String>,
|
||||
|
|
@ -583,6 +597,20 @@ pub enum Shell {
|
|||
Nushell,
|
||||
}
|
||||
|
||||
/// Complete `action` if the wasm feature is enabled, otherwise return an error
|
||||
macro_rules! checked_wasm {
|
||||
($action:block) => {
|
||||
#[cfg(feature = "wasm")]
|
||||
{
|
||||
$action
|
||||
}
|
||||
#[cfg(not(feature = "wasm"))]
|
||||
{
|
||||
Err(anyhow!("--wasm flag specified, but this build of tree-sitter-cli does not include the wasm feature"))?;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl InitConfig {
|
||||
fn run() -> Result<()> {
|
||||
if let Ok(Some(config_path)) = Config::find_config_file() {
|
||||
|
|
@ -744,6 +772,14 @@ impl Init {
|
|||
.map(|e| Some(e.trim().to_string()))
|
||||
};
|
||||
|
||||
let namespace = || {
|
||||
Input::<String>::with_theme(&ColorfulTheme::default())
|
||||
.with_prompt("Package namespace")
|
||||
.default("io.github.tree-sitter".to_string())
|
||||
.allow_empty(true)
|
||||
.interact()
|
||||
};
|
||||
|
||||
let bindings = || {
|
||||
let languages = Bindings::default().languages();
|
||||
|
||||
|
|
@ -773,6 +809,7 @@ impl Init {
|
|||
"author",
|
||||
"email",
|
||||
"url",
|
||||
"namespace",
|
||||
"bindings",
|
||||
"exit",
|
||||
];
|
||||
|
|
@ -793,6 +830,7 @@ impl Init {
|
|||
"author" => opts.author = author()?,
|
||||
"email" => opts.email = email()?,
|
||||
"url" => opts.url = url()?,
|
||||
"namespace" => opts.namespace = Some(namespace()?),
|
||||
"bindings" => opts.bindings = bindings()?,
|
||||
"exit" => break,
|
||||
_ => unreachable!(),
|
||||
|
|
@ -829,10 +867,26 @@ impl Init {
|
|||
|
||||
(opts.name.clone(), Some(opts))
|
||||
} else {
|
||||
let mut json = serde_json::from_str::<TreeSitterJSON>(
|
||||
&fs::read_to_string(current_dir.join("tree-sitter.json"))
|
||||
.with_context(|| "Failed to read tree-sitter.json")?,
|
||||
)?;
|
||||
let old_config = fs::read_to_string(current_dir.join("tree-sitter.json"))
|
||||
.with_context(|| "Failed to read tree-sitter.json")?;
|
||||
|
||||
let mut json = serde_json::from_str::<TreeSitterJSON>(&old_config)?;
|
||||
if json.schema.is_none() {
|
||||
json.schema = Some(TREE_SITTER_JSON_SCHEMA.to_string());
|
||||
}
|
||||
|
||||
let new_config = format!("{}\n", serde_json::to_string_pretty(&json)?);
|
||||
// Write the re-serialized config back, as newly added optional boolean fields
|
||||
// will be included with explicit `false`s rather than implict `null`s
|
||||
if self.update && !old_config.trim().eq(new_config.trim()) {
|
||||
info!("Updating tree-sitter.json");
|
||||
fs::write(
|
||||
current_dir.join("tree-sitter.json"),
|
||||
serde_json::to_string_pretty(&json)?,
|
||||
)
|
||||
.with_context(|| "Failed to write tree-sitter.json")?;
|
||||
}
|
||||
|
||||
(json.grammars.swap_remove(0).name, None)
|
||||
};
|
||||
|
||||
|
|
@ -862,9 +916,13 @@ impl Generate {
|
|||
version.parse().expect("invalid abi version flag")
|
||||
}
|
||||
});
|
||||
if self.build {
|
||||
warn!("--build is deprecated, use --emit=lib instead");
|
||||
}
|
||||
|
||||
let json_summary = if self.json {
|
||||
warn!("--json is deprecated, use --json-summary instead");
|
||||
true
|
||||
} else {
|
||||
self.json_summary
|
||||
};
|
||||
|
||||
if let Err(err) = tree_sitter_generate::generate_parser_in_directory(
|
||||
current_dir,
|
||||
|
|
@ -873,14 +931,14 @@ impl Generate {
|
|||
abi_version,
|
||||
self.report_states_for_rule.as_deref(),
|
||||
self.js_runtime.as_deref(),
|
||||
self.emit != GenerationEmit::Json,
|
||||
!self.no_parser,
|
||||
if self.disable_optimizations {
|
||||
OptLevel::empty()
|
||||
} else {
|
||||
OptLevel::default()
|
||||
},
|
||||
) {
|
||||
if self.json {
|
||||
if json_summary {
|
||||
eprintln!("{}", serde_json::to_string_pretty(&err)?);
|
||||
// Exit early to prevent errors from being printed a second time in the caller
|
||||
std::process::exit(1);
|
||||
|
|
@ -889,7 +947,8 @@ impl Generate {
|
|||
Err(anyhow!(err.to_string())).with_context(|| "Error when generating parser")?;
|
||||
}
|
||||
}
|
||||
if self.emit == GenerationEmit::Lib || self.build {
|
||||
if self.build {
|
||||
warn!("--build is deprecated, use the `build` command");
|
||||
if let Some(path) = self.libdir {
|
||||
loader = loader::Loader::with_parser_lib_path(path);
|
||||
}
|
||||
|
|
@ -912,11 +971,21 @@ impl Build {
|
|||
} else {
|
||||
let output_path = if let Some(ref path) = self.output {
|
||||
let path = Path::new(path);
|
||||
if path.is_absolute() {
|
||||
let full_path = if path.is_absolute() {
|
||||
path.to_path_buf()
|
||||
} else {
|
||||
current_dir.join(path)
|
||||
}
|
||||
};
|
||||
let parent_path = full_path
|
||||
.parent()
|
||||
.context("Output path must have a parent")?;
|
||||
let name = full_path
|
||||
.file_name()
|
||||
.context("Ouput path must have a filename")?;
|
||||
fs::create_dir_all(parent_path).context("Failed to create output path")?;
|
||||
let mut canon_path = parent_path.canonicalize().context("Invalid output path")?;
|
||||
canon_path.push(name);
|
||||
canon_path
|
||||
} else {
|
||||
let file_name = grammar_path
|
||||
.file_stem()
|
||||
|
|
@ -939,12 +1008,9 @@ impl Build {
|
|||
|
||||
loader.force_rebuild(true);
|
||||
|
||||
let config = Config::load(None)?;
|
||||
let loader_config = config.get()?;
|
||||
loader.find_all_languages(&loader_config).unwrap();
|
||||
loader
|
||||
.compile_parser_at_path(&grammar_path, output_path, flags)
|
||||
.unwrap();
|
||||
.context("Failed to compile parser")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -954,13 +1020,19 @@ impl Parse {
|
|||
fn run(self, mut loader: loader::Loader, current_dir: &Path) -> Result<()> {
|
||||
let config = Config::load(self.config_path)?;
|
||||
let color = env::var("NO_COLOR").map_or(true, |v| v != "1");
|
||||
let json_summary = if self.json {
|
||||
warn!("--json is deprecated, use --json-summary instead");
|
||||
true
|
||||
} else {
|
||||
self.json_summary
|
||||
};
|
||||
let output = if self.output_dot {
|
||||
ParseOutput::Dot
|
||||
} else if self.output_xml {
|
||||
ParseOutput::Xml
|
||||
} else if self.output_cst {
|
||||
ParseOutput::Cst
|
||||
} else if self.quiet || self.json {
|
||||
} else if self.quiet || json_summary {
|
||||
ParseOutput::Quiet
|
||||
} else {
|
||||
ParseOutput::Normal
|
||||
|
|
@ -991,13 +1063,14 @@ impl Parse {
|
|||
loader.debug_build(self.debug_build);
|
||||
loader.force_rebuild(self.rebuild || self.grammar_path.is_some());
|
||||
|
||||
#[cfg(feature = "wasm")]
|
||||
if self.wasm {
|
||||
let engine = tree_sitter::wasmtime::Engine::default();
|
||||
parser
|
||||
.set_wasm_store(tree_sitter::WasmStore::new(&engine).unwrap())
|
||||
.unwrap();
|
||||
loader.use_wasm(&engine);
|
||||
checked_wasm!({
|
||||
let engine = tree_sitter::wasmtime::Engine::default();
|
||||
parser
|
||||
.set_wasm_store(tree_sitter::WasmStore::new(&engine).unwrap())
|
||||
.unwrap();
|
||||
loader.use_wasm(&engine);
|
||||
});
|
||||
}
|
||||
|
||||
let timeout = self.timeout.unwrap_or_default();
|
||||
|
|
@ -1034,7 +1107,7 @@ impl Parse {
|
|||
|
||||
let mut update_stats = |stats: &mut parse::ParseStats| {
|
||||
let parse_result = stats.parse_summaries.last().unwrap();
|
||||
if should_track_stats {
|
||||
if should_track_stats || json_summary {
|
||||
stats.cumulative_stats.total_parses += 1;
|
||||
if parse_result.successful {
|
||||
stats.cumulative_stats.successful_parses += 1;
|
||||
|
|
@ -1072,13 +1145,13 @@ impl Parse {
|
|||
let path = Path::new(&path);
|
||||
let language = loader
|
||||
.select_language(
|
||||
path,
|
||||
Some(path),
|
||||
current_dir,
|
||||
self.scope.as_deref(),
|
||||
lib_info.as_ref(),
|
||||
)
|
||||
.with_context(|| {
|
||||
anyhow!("Failed to load langauge for path \"{}\"", path.display())
|
||||
anyhow!("Failed to load language for path \"{}\"", path.display())
|
||||
})?;
|
||||
|
||||
parse::parse_file_at_path(
|
||||
|
|
@ -1103,7 +1176,12 @@ impl Parse {
|
|||
|
||||
let language = if let Some(ref lib_path) = self.lib_path {
|
||||
&loader
|
||||
.select_language(lib_path, current_dir, None, lib_info.as_ref())
|
||||
.select_language(
|
||||
None,
|
||||
current_dir,
|
||||
self.scope.as_deref(),
|
||||
lib_info.as_ref(),
|
||||
)
|
||||
.with_context(|| {
|
||||
anyhow!(
|
||||
"Failed to load language for path \"{}\"",
|
||||
|
|
@ -1137,8 +1215,12 @@ impl Parse {
|
|||
|
||||
let path = get_tmp_source_file(&contents)?;
|
||||
let name = "stdin";
|
||||
let language =
|
||||
loader.select_language(&path, current_dir, None, lib_info.as_ref())?;
|
||||
let language = loader.select_language(
|
||||
None,
|
||||
current_dir,
|
||||
self.scope.as_deref(),
|
||||
lib_info.as_ref(),
|
||||
)?;
|
||||
|
||||
parse::parse_file_at_path(
|
||||
&mut parser,
|
||||
|
|
@ -1156,7 +1238,7 @@ impl Parse {
|
|||
if should_track_stats {
|
||||
println!("\n{}", stats.cumulative_stats);
|
||||
}
|
||||
if self.json {
|
||||
if json_summary {
|
||||
println!("{}", serde_json::to_string_pretty(&stats)?);
|
||||
}
|
||||
|
||||
|
|
@ -1168,6 +1250,28 @@ impl Parse {
|
|||
}
|
||||
}
|
||||
|
||||
/// In case an error is encountered, prints out the contents of `test_summary` and
|
||||
/// propagates the error
|
||||
fn check_test(
|
||||
test_result: Result<()>,
|
||||
test_summary: &TestSummary,
|
||||
json_summary: bool,
|
||||
) -> Result<()> {
|
||||
if let Err(e) = test_result {
|
||||
if json_summary {
|
||||
let json_summary = serde_json::to_string_pretty(test_summary)
|
||||
.expect("Failed to encode summary to JSON");
|
||||
println!("{json_summary}");
|
||||
} else {
|
||||
println!("{test_summary}");
|
||||
}
|
||||
|
||||
Err(e)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl Test {
|
||||
fn run(self, mut loader: loader::Loader, current_dir: &Path) -> Result<()> {
|
||||
let config = Config::load(self.config_path)?;
|
||||
|
|
@ -1179,13 +1283,14 @@ impl Test {
|
|||
|
||||
let mut parser = Parser::new();
|
||||
|
||||
#[cfg(feature = "wasm")]
|
||||
if self.wasm {
|
||||
let engine = tree_sitter::wasmtime::Engine::default();
|
||||
parser
|
||||
.set_wasm_store(tree_sitter::WasmStore::new(&engine).unwrap())
|
||||
.unwrap();
|
||||
loader.use_wasm(&engine);
|
||||
checked_wasm!({
|
||||
let engine = tree_sitter::wasmtime::Engine::default();
|
||||
parser
|
||||
.set_wasm_store(tree_sitter::WasmStore::new(&engine).unwrap())
|
||||
.unwrap();
|
||||
loader.use_wasm(&engine);
|
||||
});
|
||||
}
|
||||
|
||||
if self.lib_path.is_none() && self.lang_name.is_some() {
|
||||
|
|
@ -1196,7 +1301,7 @@ impl Test {
|
|||
let lib_info =
|
||||
get_lib_info(self.lib_path.as_ref(), self.lang_name.as_ref(), current_dir);
|
||||
&loader
|
||||
.select_language(lib_path, current_dir, None, lib_info.as_ref())
|
||||
.select_language(None, current_dir, None, lib_info.as_ref())
|
||||
.with_context(|| {
|
||||
anyhow!(
|
||||
"Failed to load language for path \"{}\"",
|
||||
|
|
@ -1212,15 +1317,18 @@ impl Test {
|
|||
parser.set_language(language)?;
|
||||
|
||||
let test_dir = current_dir.join("test");
|
||||
let mut stats = parse::Stats::default();
|
||||
let mut test_summary = TestSummary::new(
|
||||
color,
|
||||
stat,
|
||||
self.update,
|
||||
self.overview_only,
|
||||
self.json_summary,
|
||||
);
|
||||
|
||||
// Run the corpus tests. Look for them in `test/corpus`.
|
||||
let test_corpus_dir = test_dir.join("corpus");
|
||||
if test_corpus_dir.is_dir() {
|
||||
let mut output = String::new();
|
||||
let mut rates = Vec::new();
|
||||
let mut opts = TestOptions {
|
||||
output: &mut output,
|
||||
let opts = TestOptions {
|
||||
path: test_corpus_dir,
|
||||
debug: self.debug,
|
||||
debug_graph: self.debug_graph,
|
||||
|
|
@ -1231,51 +1339,67 @@ impl Test {
|
|||
open_log: self.open_log,
|
||||
languages: languages.iter().map(|(l, n)| (n.as_str(), l)).collect(),
|
||||
color,
|
||||
test_num: 1,
|
||||
parse_rates: &mut rates,
|
||||
stat_display: stat,
|
||||
stats: &mut stats,
|
||||
show_fields: self.show_fields,
|
||||
overview_only: self.overview_only,
|
||||
};
|
||||
|
||||
test::run_tests_at_path(&mut parser, &mut opts)?;
|
||||
println!("\n{stats}");
|
||||
check_test(
|
||||
test::run_tests_at_path(&mut parser, &opts, &mut test_summary),
|
||||
&test_summary,
|
||||
self.json_summary,
|
||||
)?;
|
||||
test_summary.test_num = 1;
|
||||
}
|
||||
|
||||
// Check that all of the queries are valid.
|
||||
test::check_queries_at_path(language, ¤t_dir.join("queries"))?;
|
||||
let query_dir = current_dir.join("queries");
|
||||
check_test(
|
||||
test::check_queries_at_path(language, &query_dir),
|
||||
&test_summary,
|
||||
self.json_summary,
|
||||
)?;
|
||||
test_summary.test_num = 1;
|
||||
|
||||
// Run the syntax highlighting tests.
|
||||
let test_highlight_dir = test_dir.join("highlight");
|
||||
if test_highlight_dir.is_dir() {
|
||||
let mut highlighter = Highlighter::new();
|
||||
highlighter.parser = parser;
|
||||
test_highlight::test_highlights(
|
||||
&loader,
|
||||
&config.get()?,
|
||||
&mut highlighter,
|
||||
&test_highlight_dir,
|
||||
color,
|
||||
check_test(
|
||||
test_highlight::test_highlights(
|
||||
&loader,
|
||||
&config.get()?,
|
||||
&mut highlighter,
|
||||
&test_highlight_dir,
|
||||
&mut test_summary,
|
||||
),
|
||||
&test_summary,
|
||||
self.json_summary,
|
||||
)?;
|
||||
parser = highlighter.parser;
|
||||
test_summary.test_num = 1;
|
||||
}
|
||||
|
||||
let test_tag_dir = test_dir.join("tags");
|
||||
if test_tag_dir.is_dir() {
|
||||
let mut tags_context = TagsContext::new();
|
||||
tags_context.parser = parser;
|
||||
test_tags::test_tags(
|
||||
&loader,
|
||||
&config.get()?,
|
||||
&mut tags_context,
|
||||
&test_tag_dir,
|
||||
color,
|
||||
check_test(
|
||||
test_tags::test_tags(
|
||||
&loader,
|
||||
&config.get()?,
|
||||
&mut tags_context,
|
||||
&test_tag_dir,
|
||||
&mut test_summary,
|
||||
),
|
||||
&test_summary,
|
||||
self.json_summary,
|
||||
)?;
|
||||
test_summary.test_num = 1;
|
||||
}
|
||||
|
||||
// For the rest of the queries, find their tests and run them
|
||||
for entry in walkdir::WalkDir::new(current_dir.join("queries"))
|
||||
for entry in walkdir::WalkDir::new(&query_dir)
|
||||
.into_iter()
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|e| e.file_type().is_file())
|
||||
|
|
@ -1298,34 +1422,48 @@ impl Test {
|
|||
})
|
||||
.collect::<Vec<_>>();
|
||||
if !entries.is_empty() {
|
||||
println!("{stem}:");
|
||||
test_summary.query_results.add_group(stem);
|
||||
}
|
||||
|
||||
for entry in entries {
|
||||
test_summary.test_num = 1;
|
||||
let opts = QueryFileOptions::default();
|
||||
for entry in &entries {
|
||||
let path = entry.path();
|
||||
query::query_file_at_path(
|
||||
language,
|
||||
path,
|
||||
&path.display().to_string(),
|
||||
path,
|
||||
false,
|
||||
None,
|
||||
None,
|
||||
true,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
check_test(
|
||||
query::query_file_at_path(
|
||||
language,
|
||||
path,
|
||||
&path.display().to_string(),
|
||||
path,
|
||||
&opts,
|
||||
Some(&mut test_summary),
|
||||
),
|
||||
&test_summary,
|
||||
self.json_summary,
|
||||
)?;
|
||||
}
|
||||
if !entries.is_empty() {
|
||||
test_summary.query_results.pop_traversal();
|
||||
}
|
||||
}
|
||||
}
|
||||
test_summary.test_num = 1;
|
||||
|
||||
if self.json_summary {
|
||||
let json_summary = serde_json::to_string_pretty(&test_summary)
|
||||
.expect("Failed to encode test summary to JSON");
|
||||
println!("{json_summary}");
|
||||
} else {
|
||||
println!("{test_summary}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Version {
|
||||
fn run(self, current_dir: PathBuf) -> Result<()> {
|
||||
version::Version::new(self.version, current_dir, self.bump).run()
|
||||
Ok(version::Version::new(self.version, current_dir, self.bump).run()?)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1344,7 +1482,7 @@ impl Fuzz {
|
|||
let lang_name = lib_info.1.to_string();
|
||||
&(
|
||||
loader
|
||||
.select_language(lib_path, current_dir, None, Some(&lib_info))
|
||||
.select_language(None, current_dir, None, Some(&lib_info))
|
||||
.with_context(|| {
|
||||
anyhow!(
|
||||
"Failed to load language for path \"{}\"",
|
||||
|
|
@ -1389,18 +1527,11 @@ impl Query {
|
|||
loader.find_all_languages(&loader_config)?;
|
||||
let query_path = Path::new(&self.query_path);
|
||||
|
||||
let byte_range = self.byte_range.as_ref().and_then(|range| {
|
||||
let mut parts = range.split(':');
|
||||
let start = parts.next()?.parse().ok()?;
|
||||
let end = parts.next().unwrap().parse().ok()?;
|
||||
Some(start..end)
|
||||
});
|
||||
let point_range = self.row_range.as_ref().and_then(|range| {
|
||||
let mut parts = range.split(':');
|
||||
let start = parts.next()?.parse().ok()?;
|
||||
let end = parts.next().unwrap().parse().ok()?;
|
||||
Some(Point::new(start, 0)..Point::new(end, 0))
|
||||
});
|
||||
let byte_range = parse_range(&self.byte_range, |x| x)?;
|
||||
let point_range = parse_range(&self.row_range, |row| Point::new(row, 0))?;
|
||||
let containing_byte_range = parse_range(&self.containing_byte_range, |x| x)?;
|
||||
let containing_point_range =
|
||||
parse_range(&self.containing_row_range, |row| Point::new(row, 0))?;
|
||||
|
||||
let cancellation_flag = util::cancel_on_signal();
|
||||
|
||||
|
|
@ -1419,25 +1550,30 @@ impl Query {
|
|||
match input {
|
||||
CliInput::Paths(paths) => {
|
||||
let language = loader.select_language(
|
||||
Path::new(&paths[0]),
|
||||
Some(Path::new(&paths[0])),
|
||||
current_dir,
|
||||
self.scope.as_deref(),
|
||||
lib_info.as_ref(),
|
||||
)?;
|
||||
|
||||
let opts = QueryFileOptions {
|
||||
ordered_captures: self.captures,
|
||||
byte_range,
|
||||
point_range,
|
||||
containing_byte_range,
|
||||
containing_point_range,
|
||||
quiet: self.quiet,
|
||||
print_time: self.time,
|
||||
stdin: false,
|
||||
};
|
||||
for path in paths {
|
||||
query::query_file_at_path(
|
||||
&language,
|
||||
&path,
|
||||
&path.display().to_string(),
|
||||
query_path,
|
||||
self.captures,
|
||||
byte_range.clone(),
|
||||
point_range.clone(),
|
||||
self.test,
|
||||
self.quiet,
|
||||
self.time,
|
||||
false,
|
||||
&opts,
|
||||
None,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
|
@ -1450,7 +1586,7 @@ impl Query {
|
|||
let languages = loader.languages_at_path(current_dir)?;
|
||||
let language = if let Some(ref lib_path) = self.lib_path {
|
||||
&loader
|
||||
.select_language(lib_path, current_dir, None, lib_info.as_ref())
|
||||
.select_language(None, current_dir, None, lib_info.as_ref())
|
||||
.with_context(|| {
|
||||
anyhow!(
|
||||
"Failed to load language for path \"{}\"",
|
||||
|
|
@ -1465,19 +1601,17 @@ impl Query {
|
|||
.map(|(l, _)| l.clone())
|
||||
.ok_or_else(|| anyhow!("No language found"))?
|
||||
};
|
||||
query::query_file_at_path(
|
||||
language,
|
||||
&path,
|
||||
&name,
|
||||
query_path,
|
||||
self.captures,
|
||||
let opts = QueryFileOptions {
|
||||
ordered_captures: self.captures,
|
||||
byte_range,
|
||||
point_range,
|
||||
self.test,
|
||||
self.quiet,
|
||||
self.time,
|
||||
true,
|
||||
)?;
|
||||
containing_byte_range,
|
||||
containing_point_range,
|
||||
quiet: self.quiet,
|
||||
print_time: self.time,
|
||||
stdin: true,
|
||||
};
|
||||
query::query_file_at_path(language, &path, &name, query_path, &opts, None)?;
|
||||
fs::remove_file(path)?;
|
||||
}
|
||||
CliInput::Stdin(contents) => {
|
||||
|
|
@ -1486,20 +1620,18 @@ impl Query {
|
|||
|
||||
let path = get_tmp_source_file(&contents)?;
|
||||
let language =
|
||||
loader.select_language(&path, current_dir, None, lib_info.as_ref())?;
|
||||
query::query_file_at_path(
|
||||
&language,
|
||||
&path,
|
||||
"stdin",
|
||||
query_path,
|
||||
self.captures,
|
||||
loader.select_language(None, current_dir, None, lib_info.as_ref())?;
|
||||
let opts = QueryFileOptions {
|
||||
ordered_captures: self.captures,
|
||||
byte_range,
|
||||
point_range,
|
||||
self.test,
|
||||
self.quiet,
|
||||
self.time,
|
||||
true,
|
||||
)?;
|
||||
containing_byte_range,
|
||||
containing_point_range,
|
||||
quiet: self.quiet,
|
||||
print_time: self.time,
|
||||
stdin: true,
|
||||
};
|
||||
query::query_file_at_path(&language, &path, "stdin", query_path, &opts, None)?;
|
||||
fs::remove_file(path)?;
|
||||
}
|
||||
}
|
||||
|
|
@ -1516,6 +1648,7 @@ impl Highlight {
|
|||
let loader_config = config.get()?;
|
||||
loader.find_all_languages(&loader_config)?;
|
||||
loader.force_rebuild(self.rebuild || self.grammar_path.is_some());
|
||||
let languages = loader.languages_at_path(current_dir)?;
|
||||
|
||||
let cancellation_flag = util::cancel_on_signal();
|
||||
|
||||
|
|
@ -1596,7 +1729,6 @@ impl Highlight {
|
|||
} => {
|
||||
let path = get_tmp_source_file(&contents)?;
|
||||
|
||||
let languages = loader.languages_at_path(current_dir)?;
|
||||
let language = languages
|
||||
.iter()
|
||||
.find(|(_, n)| language_names.contains(&Box::from(n.as_str())))
|
||||
|
|
@ -1627,7 +1759,6 @@ impl Highlight {
|
|||
if let (Some(l), Some(lc)) = (language.clone(), language_configuration) {
|
||||
(l, lc)
|
||||
} else {
|
||||
let languages = loader.languages_at_path(current_dir)?;
|
||||
let language = languages
|
||||
.first()
|
||||
.map(|(l, _)| l.clone())
|
||||
|
|
@ -2000,3 +2131,32 @@ fn get_lib_info<'a>(
|
|||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse a range string of the form "start:end" into an optional Range<T>.
|
||||
fn parse_range<T>(
|
||||
range_str: &Option<String>,
|
||||
make: impl Fn(usize) -> T,
|
||||
) -> Result<Option<std::ops::Range<T>>> {
|
||||
if let Some(range) = range_str.as_ref() {
|
||||
let err_msg = format!("Invalid range '{range}', expected 'start:end'");
|
||||
let mut parts = range.split(':');
|
||||
|
||||
let Some(part) = parts.next() else {
|
||||
Err(anyhow!(err_msg))?
|
||||
};
|
||||
let Ok(start) = part.parse::<usize>() else {
|
||||
Err(anyhow!(err_msg))?
|
||||
};
|
||||
|
||||
let Some(part) = parts.next() else {
|
||||
Err(anyhow!(err_msg))?
|
||||
};
|
||||
let Ok(end) = part.parse::<usize>() else {
|
||||
Err(anyhow!(err_msg))?
|
||||
};
|
||||
|
||||
Ok(Some(make(start)..make(end)))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ use anstyle::{AnsiColor, Color, RgbColor};
|
|||
use anyhow::{anyhow, Context, Result};
|
||||
use clap::ValueEnum;
|
||||
use log::info;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tree_sitter::{
|
||||
ffi, InputEdit, Language, LogType, ParseOptions, ParseState, Parser, Point, Range, Tree,
|
||||
|
|
@ -19,7 +20,7 @@ use tree_sitter::{
|
|||
|
||||
use crate::{fuzz::edits::Edit, logger::paint, util};
|
||||
|
||||
#[derive(Debug, Default, Serialize)]
|
||||
#[derive(Debug, Default, Serialize, JsonSchema)]
|
||||
pub struct Stats {
|
||||
pub successful_parses: usize,
|
||||
pub total_parses: usize,
|
||||
|
|
@ -230,13 +231,23 @@ impl ParseSummary {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, Default)]
|
||||
#[derive(Serialize, Debug)]
|
||||
pub struct ParseStats {
|
||||
pub parse_summaries: Vec<ParseSummary>,
|
||||
pub cumulative_stats: Stats,
|
||||
pub source_count: usize,
|
||||
}
|
||||
|
||||
impl Default for ParseStats {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
parse_summaries: Vec::new(),
|
||||
cumulative_stats: Stats::default(),
|
||||
source_count: 1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, ValueEnum, Debug, Copy, Clone, Default, Eq, PartialEq)]
|
||||
pub enum ParseDebugType {
|
||||
#[default]
|
||||
|
|
@ -504,7 +515,6 @@ pub fn parse_file_at_path(
|
|||
|
||||
if opts.output == ParseOutput::Cst {
|
||||
render_cst(&source_code, &tree, &mut cursor, opts, &mut stdout)?;
|
||||
println!();
|
||||
}
|
||||
|
||||
if opts.output == ParseOutput::Xml {
|
||||
|
|
@ -664,10 +674,9 @@ pub fn parse_file_at_path(
|
|||
width = max_path_length
|
||||
)?;
|
||||
if let Some(node) = first_error {
|
||||
let start = node.start_position();
|
||||
let end = node.end_position();
|
||||
let mut node_text = String::new();
|
||||
for c in node.kind().chars() {
|
||||
let node_kind = node.kind();
|
||||
let mut node_text = String::with_capacity(node_kind.len());
|
||||
for c in node_kind.chars() {
|
||||
if let Some(escaped) = escape_invisible(c) {
|
||||
node_text += escaped;
|
||||
} else {
|
||||
|
|
@ -684,6 +693,9 @@ pub fn parse_file_at_path(
|
|||
} else {
|
||||
write!(&mut stdout, "{node_text}")?;
|
||||
}
|
||||
|
||||
let start = node.start_position();
|
||||
let end = node.end_position();
|
||||
write!(
|
||||
&mut stdout,
|
||||
" [{}, {}] - [{}, {}])",
|
||||
|
|
@ -772,7 +784,7 @@ pub fn render_cst<'a, 'b: 'a>(
|
|||
.map(|(row, col)| (row as f64).log10() as usize + (col.len() as f64).log10() as usize + 1)
|
||||
.max()
|
||||
.unwrap_or(1);
|
||||
let mut indent_level = 1;
|
||||
let mut indent_level = usize::from(!opts.no_ranges);
|
||||
let mut did_visit_children = false;
|
||||
let mut in_error = false;
|
||||
loop {
|
||||
|
|
@ -870,35 +882,24 @@ fn write_node_text(
|
|||
0
|
||||
};
|
||||
let formatted_line = render_line_feed(line, opts);
|
||||
if !opts.no_ranges {
|
||||
write!(
|
||||
out,
|
||||
"{}{}{}{}{}{}",
|
||||
if multiline { "\n" } else { "" },
|
||||
if multiline {
|
||||
render_node_range(opts, cursor, is_named, true, total_width, node_range)
|
||||
} else {
|
||||
String::new()
|
||||
},
|
||||
if multiline {
|
||||
" ".repeat(indent_level + 1)
|
||||
} else {
|
||||
String::new()
|
||||
},
|
||||
paint(quote_color, &String::from(quote)),
|
||||
&paint(color, &render_node_text(&formatted_line)),
|
||||
paint(quote_color, &String::from(quote)),
|
||||
)?;
|
||||
} else {
|
||||
write!(
|
||||
out,
|
||||
"\n{}{}{}{}",
|
||||
" ".repeat(indent_level + 1),
|
||||
paint(quote_color, &String::from(quote)),
|
||||
&paint(color, &render_node_text(&formatted_line)),
|
||||
paint(quote_color, &String::from(quote)),
|
||||
)?;
|
||||
}
|
||||
write!(
|
||||
out,
|
||||
"{}{}{}{}{}{}",
|
||||
if multiline { "\n" } else { " " },
|
||||
if multiline && !opts.no_ranges {
|
||||
render_node_range(opts, cursor, is_named, true, total_width, node_range)
|
||||
} else {
|
||||
String::new()
|
||||
},
|
||||
if multiline {
|
||||
" ".repeat(indent_level + 1)
|
||||
} else {
|
||||
String::new()
|
||||
},
|
||||
paint(quote_color, &String::from(quote)),
|
||||
paint(color, &render_node_text(&formatted_line)),
|
||||
paint(quote_color, &String::from(quote)),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -952,7 +953,7 @@ fn render_node_range(
|
|||
|
||||
fn cst_render_node(
|
||||
opts: &ParseFileOptions,
|
||||
cursor: &mut TreeCursor,
|
||||
cursor: &TreeCursor,
|
||||
source_code: &[u8],
|
||||
out: &mut impl Write,
|
||||
total_width: usize,
|
||||
|
|
@ -998,10 +999,9 @@ fn cst_render_node(
|
|||
} else {
|
||||
opts.parse_theme.node_kind
|
||||
};
|
||||
write!(out, "{}", paint(kind_color, node.kind()),)?;
|
||||
write!(out, "{}", paint(kind_color, node.kind()))?;
|
||||
|
||||
if node.child_count() == 0 {
|
||||
write!(out, " ")?;
|
||||
// Node text from a pattern or external scanner
|
||||
write_node_text(
|
||||
opts,
|
||||
|
|
|
|||
|
|
@ -19,7 +19,8 @@
|
|||
--light-scrollbar-track: #f1f1f1;
|
||||
--light-scrollbar-thumb: #c1c1c1;
|
||||
--light-scrollbar-thumb-hover: #a8a8a8;
|
||||
|
||||
--light-tree-row-bg: #e3f2fd;
|
||||
|
||||
--dark-bg: #1d1f21;
|
||||
--dark-border: #2d2d2d;
|
||||
--dark-text: #c5c8c6;
|
||||
|
|
@ -28,6 +29,7 @@
|
|||
--dark-scrollbar-track: #25282c;
|
||||
--dark-scrollbar-thumb: #4a4d51;
|
||||
--dark-scrollbar-thumb-hover: #5a5d61;
|
||||
--dark-tree-row-bg: #373737;
|
||||
|
||||
--primary-color: #0550ae;
|
||||
--primary-color-alpha: rgba(5, 80, 174, 0.1);
|
||||
|
|
@ -42,6 +44,7 @@
|
|||
--text-color: var(--dark-text);
|
||||
--panel-bg: var(--dark-panel-bg);
|
||||
--code-bg: var(--dark-code-bg);
|
||||
--tree-row-bg: var(--dark-tree-row-bg);
|
||||
}
|
||||
|
||||
[data-theme="light"] {
|
||||
|
|
@ -50,6 +53,7 @@
|
|||
--text-color: var(--light-text);
|
||||
--panel-bg: white;
|
||||
--code-bg: white;
|
||||
--tree-row-bg: var(--light-tree-row-bg);
|
||||
}
|
||||
|
||||
/* Base Styles */
|
||||
|
|
@ -275,7 +279,7 @@
|
|||
}
|
||||
|
||||
#output-container a.highlighted {
|
||||
background-color: #d9d9d9;
|
||||
background-color: #cae2ff;
|
||||
color: red;
|
||||
border-radius: 3px;
|
||||
text-decoration: underline;
|
||||
|
|
@ -346,7 +350,7 @@
|
|||
}
|
||||
|
||||
& #output-container a.highlighted {
|
||||
background-color: #373b41;
|
||||
background-color: #656669;
|
||||
color: red;
|
||||
}
|
||||
|
||||
|
|
@ -373,6 +377,9 @@
|
|||
color: var(--dark-text);
|
||||
}
|
||||
}
|
||||
.tree-row:has(.highlighted) {
|
||||
background-color: var(--tree-row-bg);
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
|
|
|
|||
|
|
@ -6,30 +6,35 @@ use std::{
|
|||
time::Instant,
|
||||
};
|
||||
|
||||
use anstyle::AnsiColor;
|
||||
use anyhow::{Context, Result};
|
||||
use log::warn;
|
||||
use streaming_iterator::StreamingIterator;
|
||||
use tree_sitter::{Language, Parser, Point, Query, QueryCursor};
|
||||
|
||||
use crate::{
|
||||
logger::paint,
|
||||
query_testing::{self, to_utf8_point},
|
||||
test::{TestInfo, TestOutcome, TestResult, TestSummary},
|
||||
};
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[derive(Default)]
|
||||
pub struct QueryFileOptions {
|
||||
pub ordered_captures: bool,
|
||||
pub byte_range: Option<Range<usize>>,
|
||||
pub point_range: Option<Range<Point>>,
|
||||
pub containing_byte_range: Option<Range<usize>>,
|
||||
pub containing_point_range: Option<Range<Point>>,
|
||||
pub quiet: bool,
|
||||
pub print_time: bool,
|
||||
pub stdin: bool,
|
||||
}
|
||||
|
||||
pub fn query_file_at_path(
|
||||
language: &Language,
|
||||
path: &Path,
|
||||
name: &str,
|
||||
query_path: &Path,
|
||||
ordered_captures: bool,
|
||||
byte_range: Option<Range<usize>>,
|
||||
point_range: Option<Range<Point>>,
|
||||
should_test: bool,
|
||||
quiet: bool,
|
||||
print_time: bool,
|
||||
stdin: bool,
|
||||
opts: &QueryFileOptions,
|
||||
test_summary: Option<&mut TestSummary>,
|
||||
) -> Result<()> {
|
||||
let stdout = io::stdout();
|
||||
let mut stdout = stdout.lock();
|
||||
|
|
@ -39,19 +44,26 @@ pub fn query_file_at_path(
|
|||
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
|
||||
|
||||
let mut query_cursor = QueryCursor::new();
|
||||
if let Some(range) = byte_range {
|
||||
query_cursor.set_byte_range(range);
|
||||
if let Some(ref range) = opts.byte_range {
|
||||
query_cursor.set_byte_range(range.clone());
|
||||
}
|
||||
if let Some(range) = point_range {
|
||||
query_cursor.set_point_range(range);
|
||||
if let Some(ref range) = opts.point_range {
|
||||
query_cursor.set_point_range(range.clone());
|
||||
}
|
||||
if let Some(ref range) = opts.containing_byte_range {
|
||||
query_cursor.set_containing_byte_range(range.clone());
|
||||
}
|
||||
if let Some(ref range) = opts.containing_point_range {
|
||||
query_cursor.set_containing_point_range(range.clone());
|
||||
}
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language)?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
let should_test = test_summary.is_some();
|
||||
|
||||
if !should_test && !stdin {
|
||||
if !should_test && !opts.stdin {
|
||||
writeln!(&mut stdout, "{name}")?;
|
||||
}
|
||||
|
||||
|
|
@ -60,12 +72,12 @@ pub fn query_file_at_path(
|
|||
let tree = parser.parse(&source_code, None).unwrap();
|
||||
|
||||
let start = Instant::now();
|
||||
if ordered_captures {
|
||||
if opts.ordered_captures {
|
||||
let mut captures = query_cursor.captures(&query, tree.root_node(), source_code.as_slice());
|
||||
while let Some((mat, capture_index)) = captures.next() {
|
||||
let capture = mat.captures[*capture_index];
|
||||
let capture_name = &query.capture_names()[capture.index as usize];
|
||||
if !quiet && !should_test {
|
||||
if !opts.quiet && !should_test {
|
||||
writeln!(
|
||||
&mut stdout,
|
||||
" pattern: {:>2}, capture: {} - {capture_name}, start: {}, end: {}, text: `{}`",
|
||||
|
|
@ -76,23 +88,25 @@ pub fn query_file_at_path(
|
|||
capture.node.utf8_text(&source_code).unwrap_or("")
|
||||
)?;
|
||||
}
|
||||
results.push(query_testing::CaptureInfo {
|
||||
name: (*capture_name).to_string(),
|
||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
||||
});
|
||||
if should_test {
|
||||
results.push(query_testing::CaptureInfo {
|
||||
name: (*capture_name).to_string(),
|
||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut matches = query_cursor.matches(&query, tree.root_node(), source_code.as_slice());
|
||||
while let Some(m) = matches.next() {
|
||||
if !quiet && !should_test {
|
||||
if !opts.quiet && !should_test {
|
||||
writeln!(&mut stdout, " pattern: {}", m.pattern_index)?;
|
||||
}
|
||||
for capture in m.captures {
|
||||
let start = capture.node.start_position();
|
||||
let end = capture.node.end_position();
|
||||
let capture_name = &query.capture_names()[capture.index as usize];
|
||||
if !quiet && !should_test {
|
||||
if !opts.quiet && !should_test {
|
||||
if end.row == start.row {
|
||||
writeln!(
|
||||
&mut stdout,
|
||||
|
|
@ -107,38 +121,52 @@ pub fn query_file_at_path(
|
|||
)?;
|
||||
}
|
||||
}
|
||||
results.push(query_testing::CaptureInfo {
|
||||
name: (*capture_name).to_string(),
|
||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
||||
});
|
||||
if should_test {
|
||||
results.push(query_testing::CaptureInfo {
|
||||
name: (*capture_name).to_string(),
|
||||
start: to_utf8_point(capture.node.start_position(), source_code.as_slice()),
|
||||
end: to_utf8_point(capture.node.end_position(), source_code.as_slice()),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if !query_cursor.did_exceed_match_limit() {
|
||||
if query_cursor.did_exceed_match_limit() {
|
||||
warn!("Query exceeded maximum number of in-progress captures!");
|
||||
}
|
||||
if should_test {
|
||||
let path_name = if stdin {
|
||||
let path_name = if opts.stdin {
|
||||
"stdin"
|
||||
} else {
|
||||
Path::new(&path).file_name().unwrap().to_str().unwrap()
|
||||
};
|
||||
// Invariant: `test_summary` will always be `Some` when `should_test` is true
|
||||
let test_summary = test_summary.unwrap();
|
||||
match query_testing::assert_expected_captures(&results, path, &mut parser, language) {
|
||||
Ok(assertion_count) => {
|
||||
println!(
|
||||
" ✓ {} ({} assertions)",
|
||||
paint(Some(AnsiColor::Green), path_name),
|
||||
assertion_count
|
||||
);
|
||||
test_summary.query_results.add_case(TestResult {
|
||||
name: path_name.to_string(),
|
||||
info: TestInfo::AssertionTest {
|
||||
outcome: TestOutcome::AssertionPassed { assertion_count },
|
||||
test_num: test_summary.test_num,
|
||||
},
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
println!(" ✗ {}", paint(Some(AnsiColor::Red), path_name));
|
||||
test_summary.query_results.add_case(TestResult {
|
||||
name: path_name.to_string(),
|
||||
info: TestInfo::AssertionTest {
|
||||
outcome: TestOutcome::AssertionFailed {
|
||||
error: e.to_string(),
|
||||
},
|
||||
test_num: test_summary.test_num,
|
||||
},
|
||||
});
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
if print_time {
|
||||
if opts.print_time {
|
||||
writeln!(&mut stdout, "{:?}", start.elapsed())?;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -3,11 +3,11 @@ root = true
|
|||
[*]
|
||||
charset = utf-8
|
||||
|
||||
[*.{json,toml,yml,gyp}]
|
||||
[*.{json,toml,yml,gyp,xml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.js]
|
||||
[*.{js,ts}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
|
|
@ -31,6 +31,10 @@ indent_size = 4
|
|||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.java]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.go]
|
||||
indent_style = tab
|
||||
indent_size = 8
|
||||
|
|
|
|||
|
|
@ -6,32 +6,33 @@ from ._binding import language
|
|||
|
||||
|
||||
def _get_query(name, file):
|
||||
query = _files(f"{__package__}.queries") / file
|
||||
globals()[name] = query.read_text()
|
||||
try:
|
||||
query = _files(f"{__package__}") / file
|
||||
globals()[name] = query.read_text()
|
||||
except FileNotFoundError:
|
||||
globals()[name] = None
|
||||
return globals()[name]
|
||||
|
||||
|
||||
def __getattr__(name):
|
||||
# NOTE: uncomment these to include any queries that this grammar contains:
|
||||
|
||||
# if name == "HIGHLIGHTS_QUERY":
|
||||
# return _get_query("HIGHLIGHTS_QUERY", "highlights.scm")
|
||||
# if name == "INJECTIONS_QUERY":
|
||||
# return _get_query("INJECTIONS_QUERY", "injections.scm")
|
||||
# if name == "LOCALS_QUERY":
|
||||
# return _get_query("LOCALS_QUERY", "locals.scm")
|
||||
# if name == "TAGS_QUERY":
|
||||
# return _get_query("TAGS_QUERY", "tags.scm")
|
||||
if name == "HIGHLIGHTS_QUERY":
|
||||
return _get_query("HIGHLIGHTS_QUERY", "HIGHLIGHTS_QUERY_PATH")
|
||||
if name == "INJECTIONS_QUERY":
|
||||
return _get_query("INJECTIONS_QUERY", "INJECTIONS_QUERY_PATH")
|
||||
if name == "LOCALS_QUERY":
|
||||
return _get_query("LOCALS_QUERY", "LOCALS_QUERY_PATH")
|
||||
if name == "TAGS_QUERY":
|
||||
return _get_query("TAGS_QUERY", "TAGS_QUERY_PATH")
|
||||
|
||||
raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
|
||||
|
||||
|
||||
__all__ = [
|
||||
"language",
|
||||
# "HIGHLIGHTS_QUERY",
|
||||
# "INJECTIONS_QUERY",
|
||||
# "LOCALS_QUERY",
|
||||
# "TAGS_QUERY",
|
||||
"HIGHLIGHTS_QUERY",
|
||||
"INJECTIONS_QUERY",
|
||||
"LOCALS_QUERY",
|
||||
"TAGS_QUERY",
|
||||
]
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,11 +1,17 @@
|
|||
from typing import Final
|
||||
from typing_extensions import CapsuleType
|
||||
|
||||
# NOTE: uncomment these to include any queries that this grammar contains:
|
||||
HIGHLIGHTS_QUERY: Final[str] | None
|
||||
"""The syntax highlighting query for this grammar."""
|
||||
|
||||
# HIGHLIGHTS_QUERY: Final[str]
|
||||
# INJECTIONS_QUERY: Final[str]
|
||||
# LOCALS_QUERY: Final[str]
|
||||
# TAGS_QUERY: Final[str]
|
||||
INJECTIONS_QUERY: Final[str] | None
|
||||
"""The language injection query for this grammar."""
|
||||
|
||||
def language() -> CapsuleType: ...
|
||||
LOCALS_QUERY: Final[str] | None
|
||||
"""The local variable query for this grammar."""
|
||||
|
||||
TAGS_QUERY: Final[str] | None
|
||||
"""The symbol tagging query for this grammar."""
|
||||
|
||||
def language() -> CapsuleType:
|
||||
"""The tree-sitter language function for this grammar."""
|
||||
|
|
|
|||
65
crates/cli/src/templates/binding.java
Normal file
65
crates/cli/src/templates/binding.java
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
package PARSER_NS_CLEANED.jtreesitter.LOWER_PARSER_NAME;
|
||||
|
||||
import java.lang.foreign.*;
|
||||
|
||||
public final class PARSER_CLASS_NAME {
|
||||
private static final ValueLayout VOID_PTR =
|
||||
ValueLayout.ADDRESS.withTargetLayout(MemoryLayout.sequenceLayout(Long.MAX_VALUE, ValueLayout.JAVA_BYTE));
|
||||
private static final FunctionDescriptor FUNC_DESC = FunctionDescriptor.of(VOID_PTR);
|
||||
private static final Linker LINKER = Linker.nativeLinker();
|
||||
private static final PARSER_CLASS_NAME INSTANCE = new PARSER_CLASS_NAME();
|
||||
|
||||
private final Arena arena = Arena.ofAuto();
|
||||
private volatile SymbolLookup lookup = null;
|
||||
|
||||
private PARSER_CLASS_NAME() {}
|
||||
|
||||
/**
|
||||
* Get the tree-sitter language for this grammar.
|
||||
*/
|
||||
public static MemorySegment language() {
|
||||
if (INSTANCE.lookup == null)
|
||||
INSTANCE.lookup = INSTANCE.findLibrary();
|
||||
return language(INSTANCE.lookup);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the tree-sitter language for this grammar.
|
||||
*
|
||||
* <strong>The {@linkplain Arena} used in the {@code lookup}
|
||||
* must not be closed while the language is being used.</strong>
|
||||
*/
|
||||
public static MemorySegment language(SymbolLookup lookup) {
|
||||
return call(lookup, "tree_sitter_PARSER_NAME");
|
||||
}
|
||||
|
||||
private SymbolLookup findLibrary() {
|
||||
try {
|
||||
var library = System.mapLibraryName("tree-sitter-KEBAB_PARSER_NAME");
|
||||
return SymbolLookup.libraryLookup(library, arena);
|
||||
} catch (IllegalArgumentException ex1) {
|
||||
try {
|
||||
System.loadLibrary("tree-sitter-KEBAB_PARSER_NAME");
|
||||
return SymbolLookup.loaderLookup();
|
||||
} catch (UnsatisfiedLinkError ex2) {
|
||||
ex1.addSuppressed(ex2);
|
||||
throw ex1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static UnsatisfiedLinkError unresolved(String name) {
|
||||
return new UnsatisfiedLinkError("Unresolved symbol: %s".formatted(name));
|
||||
}
|
||||
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
private static MemorySegment call(SymbolLookup lookup, String name) throws UnsatisfiedLinkError {
|
||||
var address = lookup.find(name).orElseThrow(() -> unresolved(name));
|
||||
try {
|
||||
var function = LINKER.downcallHandle(address, FUNC_DESC);
|
||||
return (MemorySegment) function.invokeExact();
|
||||
} catch (Throwable e) {
|
||||
throw new RuntimeException("Call to %s failed".formatted(name), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -36,4 +36,21 @@ fn main() {
|
|||
}
|
||||
|
||||
c_config.compile("tree-sitter-KEBAB_PARSER_NAME");
|
||||
|
||||
println!("cargo:rustc-check-cfg=cfg(with_highlights_query)");
|
||||
if !"HIGHLIGHTS_QUERY_PATH".is_empty() && std::path::Path::new("HIGHLIGHTS_QUERY_PATH").exists() {
|
||||
println!("cargo:rustc-cfg=with_highlights_query");
|
||||
}
|
||||
println!("cargo:rustc-check-cfg=cfg(with_injections_query)");
|
||||
if !"INJECTIONS_QUERY_PATH".is_empty() && std::path::Path::new("INJECTIONS_QUERY_PATH").exists() {
|
||||
println!("cargo:rustc-cfg=with_injections_query");
|
||||
}
|
||||
println!("cargo:rustc-check-cfg=cfg(with_locals_query)");
|
||||
if !"LOCALS_QUERY_PATH".is_empty() && std::path::Path::new("LOCALS_QUERY_PATH").exists() {
|
||||
println!("cargo:rustc-cfg=with_locals_query");
|
||||
}
|
||||
println!("cargo:rustc-check-cfg=cfg(with_tags_query)");
|
||||
if !"TAGS_QUERY_PATH".is_empty() && std::path::Path::new("TAGS_QUERY_PATH").exists() {
|
||||
println!("cargo:rustc-cfg=with_tags_query");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,16 +20,19 @@ include(GNUInstallDirs)
|
|||
find_program(TREE_SITTER_CLI tree-sitter DOC "Tree-sitter CLI")
|
||||
|
||||
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/node-types.json"
|
||||
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/grammar.js"
|
||||
COMMAND "${TREE_SITTER_CLI}" generate grammar.js
|
||||
--emit=json
|
||||
COMMAND "${TREE_SITTER_CLI}" generate grammar.js --no-parser
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
COMMENT "Generating grammar.json")
|
||||
|
||||
add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c"
|
||||
BYPRODUCTS "${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/parser.h"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/alloc.h"
|
||||
"${CMAKE_CURRENT_SOURCE_DIR}/src/tree_sitter/array.h"
|
||||
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/src/grammar.json"
|
||||
COMMAND "${TREE_SITTER_CLI}" generate src/grammar.json
|
||||
--emit=parser --abi=${TREE_SITTER_ABI_VERSION}
|
||||
--abi=${TREE_SITTER_ABI_VERSION}
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
COMMENT "Generating parser.c")
|
||||
|
||||
|
|
|
|||
|
|
@ -40,3 +40,7 @@ Package.resolved linguist-generated
|
|||
bindings/zig/* linguist-generated
|
||||
build.zig linguist-generated
|
||||
build.zig.zon linguist-generated
|
||||
|
||||
# Java bindings
|
||||
pom.xml linguist-generated
|
||||
bindings/java/** linguist-generated
|
||||
|
|
|
|||
|
|
@ -45,3 +45,4 @@ zig-out/
|
|||
*.tar.gz
|
||||
*.tgz
|
||||
*.zip
|
||||
*.jar
|
||||
|
|
|
|||
39
crates/cli/src/templates/index.d.ts
vendored
39
crates/cli/src/templates/index.d.ts
vendored
|
|
@ -18,10 +18,43 @@ type NodeInfo =
|
|||
children: ChildNode[];
|
||||
});
|
||||
|
||||
type Language = {
|
||||
/**
|
||||
* The tree-sitter language object for this grammar.
|
||||
*
|
||||
* @see {@linkcode https://tree-sitter.github.io/node-tree-sitter/interfaces/Parser.Language.html Parser.Language}
|
||||
*
|
||||
* @example
|
||||
* import Parser from "tree-sitter";
|
||||
* import CAMEL_PARSER_NAME from "tree-sitter-KEBAB_PARSER_NAME";
|
||||
*
|
||||
* const parser = new Parser();
|
||||
* parser.setLanguage(CAMEL_PARSER_NAME);
|
||||
*/
|
||||
declare const binding: {
|
||||
/**
|
||||
* The inner language object.
|
||||
* @private
|
||||
*/
|
||||
language: unknown;
|
||||
|
||||
/**
|
||||
* The content of the `node-types.json` file for this grammar.
|
||||
*
|
||||
* @see {@linkplain https://tree-sitter.github.io/tree-sitter/using-parsers/6-static-node-types Static Node Types}
|
||||
*/
|
||||
nodeTypeInfo: NodeInfo[];
|
||||
|
||||
/** The syntax highlighting query for this grammar. */
|
||||
HIGHLIGHTS_QUERY?: string;
|
||||
|
||||
/** The language injection query for this grammar. */
|
||||
INJECTIONS_QUERY?: string;
|
||||
|
||||
/** The local variable query for this grammar. */
|
||||
LOCALS_QUERY?: string;
|
||||
|
||||
/** The symbol tagging query for this grammar. */
|
||||
TAGS_QUERY?: string;
|
||||
};
|
||||
|
||||
declare const language: Language;
|
||||
export = language;
|
||||
export default binding;
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
import { readFileSync } from "node:fs";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const root = fileURLToPath(new URL("../..", import.meta.url));
|
||||
|
|
@ -8,8 +9,29 @@ const binding = typeof process.versions.bun === "string"
|
|||
: (await import("node-gyp-build")).default(root);
|
||||
|
||||
try {
|
||||
const nodeTypes = await import(`${root}/src/node-types.json`, {with: {type: "json"}});
|
||||
const nodeTypes = await import(`${root}/src/node-types.json`, { with: { type: "json" } });
|
||||
binding.nodeTypeInfo = nodeTypes.default;
|
||||
} catch (_) {}
|
||||
} catch { }
|
||||
|
||||
const queries = [
|
||||
["HIGHLIGHTS_QUERY", `${root}/HIGHLIGHTS_QUERY_PATH`],
|
||||
["INJECTIONS_QUERY", `${root}/INJECTIONS_QUERY_PATH`],
|
||||
["LOCALS_QUERY", `${root}/LOCALS_QUERY_PATH`],
|
||||
["TAGS_QUERY", `${root}/TAGS_QUERY_PATH`],
|
||||
];
|
||||
|
||||
for (const [prop, path] of queries) {
|
||||
Object.defineProperty(binding, prop, {
|
||||
configurable: true,
|
||||
enumerable: true,
|
||||
get() {
|
||||
delete binding[prop];
|
||||
try {
|
||||
binding[prop] = readFileSync(path, "utf8");
|
||||
} catch { }
|
||||
return binding[prop];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
export default binding;
|
||||
|
|
|
|||
|
|
@ -32,12 +32,21 @@ pub const LANGUAGE: LanguageFn = unsafe { LanguageFn::from_raw(tree_sitter_PARSE
|
|||
/// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers/6-static-node-types
|
||||
pub const NODE_TYPES: &str = include_str!("../../src/node-types.json");
|
||||
|
||||
// NOTE: uncomment these to include any queries that this grammar contains:
|
||||
#[cfg(with_highlights_query)]
|
||||
/// The syntax highlighting query for this grammar.
|
||||
pub const HIGHLIGHTS_QUERY: &str = include_str!("../../HIGHLIGHTS_QUERY_PATH");
|
||||
|
||||
// pub const HIGHLIGHTS_QUERY: &str = include_str!("../../queries/highlights.scm");
|
||||
// pub const INJECTIONS_QUERY: &str = include_str!("../../queries/injections.scm");
|
||||
// pub const LOCALS_QUERY: &str = include_str!("../../queries/locals.scm");
|
||||
// pub const TAGS_QUERY: &str = include_str!("../../queries/tags.scm");
|
||||
#[cfg(with_injections_query)]
|
||||
/// The language injection query for this grammar.
|
||||
pub const INJECTIONS_QUERY: &str = include_str!("../../INJECTIONS_QUERY_PATH");
|
||||
|
||||
#[cfg(with_locals_query)]
|
||||
/// The local variable query for this grammar.
|
||||
pub const LOCALS_QUERY: &str = include_str!("../../LOCALS_QUERY_PATH");
|
||||
|
||||
#[cfg(with_tags_query)]
|
||||
/// The symbol tagging query for this grammar.
|
||||
pub const TAGS_QUERY: &str = include_str!("../../TAGS_QUERY_PATH");
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
|
|
|||
|
|
@ -73,10 +73,10 @@ $(LANGUAGE_NAME).pc: bindings/c/$(LANGUAGE_NAME).pc.in
|
|||
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
||||
|
||||
$(SRC_DIR)/grammar.json: grammar.js
|
||||
$(TS) generate --emit=json $^
|
||||
$(TS) generate --no-parser $^
|
||||
|
||||
$(PARSER): $(SRC_DIR)/grammar.json
|
||||
$(TS) generate --emit=parser $^
|
||||
$(TS) generate $^
|
||||
|
||||
install: all
|
||||
install -d '$(DESTDIR)$(DATADIR)'/tree-sitter/queries/KEBAB_PARSER_NAME '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
||||
|
|
|
|||
|
|
@ -38,11 +38,11 @@
|
|||
},
|
||||
"devDependencies": {
|
||||
"prebuildify": "^6.0.1",
|
||||
"tree-sitter": "^0.22.4",
|
||||
"tree-sitter": "^0.25.0",
|
||||
"tree-sitter-cli": "^CLI_VERSION"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"tree-sitter": "^0.22.4"
|
||||
"tree-sitter": "^0.25.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"tree-sitter": {
|
||||
|
|
|
|||
154
crates/cli/src/templates/pom.xml
Normal file
154
crates/cli/src/templates/pom.xml
Normal file
|
|
@ -0,0 +1,154 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>PARSER_NS</groupId>
|
||||
<artifactId>jtreesitter-KEBAB_PARSER_NAME</artifactId>
|
||||
<name>JTreeSitter CAMEL_PARSER_NAME</name>
|
||||
<version>PARSER_VERSION</version>
|
||||
<description>PARSER_DESCRIPTION</description>
|
||||
<url>PARSER_URL</url>
|
||||
<licenses>
|
||||
<license>
|
||||
<name>PARSER_LICENSE</name>
|
||||
<url>https://spdx.org/licenses/PARSER_LICENSE.html</url>
|
||||
</license>
|
||||
</licenses>
|
||||
<developers>
|
||||
<developer>
|
||||
<name>PARSER_AUTHOR_NAME</name>
|
||||
<email>PARSER_AUTHOR_EMAIL</email>
|
||||
<url>PARSER_AUTHOR_URL</url>
|
||||
</developer>
|
||||
</developers>
|
||||
<scm>
|
||||
<url>PARSER_URL</url>
|
||||
<connection>scm:git:git://PARSER_URL_STRIPPED.git</connection>
|
||||
<developerConnection>scm:git:ssh://PARSER_URL_STRIPPED.git</developerConnection>
|
||||
</scm>
|
||||
<properties>
|
||||
<maven.compiler.release>23</maven.compiler.release>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<maven.deploy.skip>true</maven.deploy.skip>
|
||||
<gpg.skip>true</gpg.skip>
|
||||
<publish.auto>false</publish.auto>
|
||||
<publish.skip>true</publish.skip>
|
||||
</properties>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>io.github.tree-sitter</groupId>
|
||||
<artifactId>jtreesitter</artifactId>
|
||||
<version>0.26.0</version>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.junit.jupiter</groupId>
|
||||
<artifactId>junit-jupiter-api</artifactId>
|
||||
<version>6.0.1</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<sourceDirectory>bindings/java/main</sourceDirectory>
|
||||
<testSourceDirectory>bindings/java/test</testSourceDirectory>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-surefire-plugin</artifactId>
|
||||
<version>3.5.4</version>
|
||||
<configuration>
|
||||
<reportsDirectory>
|
||||
${project.build.directory}/reports/surefire
|
||||
</reportsDirectory>
|
||||
<argLine>--enable-native-access=ALL-UNNAMED</argLine>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-javadoc-plugin</artifactId>
|
||||
<version>3.12.0</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>jar</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
<configuration>
|
||||
<show>public</show>
|
||||
<nohelp>true</nohelp>
|
||||
<noqualifier>true</noqualifier>
|
||||
<doclint>all,-missing</doclint>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-source-plugin</artifactId>
|
||||
<version>3.3.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<goals>
|
||||
<goal>jar-no-fork</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<artifactId>maven-gpg-plugin</artifactId>
|
||||
<version>3.2.8</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>verify</phase>
|
||||
<goals>
|
||||
<goal>sign</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<bestPractices>true</bestPractices>
|
||||
<gpgArguments>
|
||||
<arg>--no-tty</arg>
|
||||
<arg>--pinentry-mode</arg>
|
||||
<arg>loopback</arg>
|
||||
</gpgArguments>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>io.github.mavenplugins</groupId>
|
||||
<artifactId>central-publishing-maven-plugin</artifactId>
|
||||
<version>1.1.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>deploy</phase>
|
||||
<goals>
|
||||
<goal>publish</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<waitUntil>validated</waitUntil>
|
||||
<autoPublish>${publish.auto}</autoPublish>
|
||||
<skipPublishing>${publish.skip}</skipPublishing>
|
||||
<outputFilename>${project.artifactId}-${project.version}.zip</outputFilename>
|
||||
<deploymentName>${project.artifactId}-${project.version}.zip</deploymentName>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
<extensions>true</extensions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>ci</id>
|
||||
<activation>
|
||||
<property>
|
||||
<name>env.CI</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
</activation>
|
||||
<properties>
|
||||
<gpg.skip>false</gpg.skip>
|
||||
<publish.auto>true</publish.auto>
|
||||
<publish.skip>false</publish.skip>
|
||||
</properties>
|
||||
</profile>
|
||||
</profiles>
|
||||
</project>
|
||||
|
|
@ -32,7 +32,7 @@ class BuildExt(build_ext):
|
|||
class BdistWheel(bdist_wheel):
|
||||
def get_tag(self):
|
||||
python, abi, platform = super().get_tag()
|
||||
if python.startswith("cp"):
|
||||
if python.startswith("cp") and not get_config_var("Py_GIL_DISABLED"):
|
||||
python, abi = "cp310", "abi3"
|
||||
return python, abi, platform
|
||||
|
||||
|
|
|
|||
12
crates/cli/src/templates/test.java
Normal file
12
crates/cli/src/templates/test.java
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
import io.github.treesitter.jtreesitter.Language;
|
||||
import PARSER_NS_CLEANED.jtreesitter.LOWER_PARSER_NAME.PARSER_CLASS_NAME;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
|
||||
|
||||
public class PARSER_CLASS_NAMETest {
|
||||
@Test
|
||||
public void testCanLoadLanguage() {
|
||||
assertDoesNotThrow(() -> new Language(PARSER_CLASS_NAME.language()));
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,14 +1,13 @@
|
|||
use std::{fs, path::Path};
|
||||
|
||||
use anstyle::AnsiColor;
|
||||
use anyhow::{anyhow, Result};
|
||||
use tree_sitter::Point;
|
||||
use tree_sitter_highlight::{Highlight, HighlightConfiguration, HighlightEvent, Highlighter};
|
||||
use tree_sitter_loader::{Config, Loader};
|
||||
|
||||
use crate::{
|
||||
logger::paint,
|
||||
query_testing::{parse_position_comments, to_utf8_point, Assertion, Utf8Point},
|
||||
test::{TestInfo, TestOutcome, TestResult, TestSummary},
|
||||
util,
|
||||
};
|
||||
|
||||
|
|
@ -48,19 +47,7 @@ pub fn test_highlights(
|
|||
loader_config: &Config,
|
||||
highlighter: &mut Highlighter,
|
||||
directory: &Path,
|
||||
use_color: bool,
|
||||
) -> Result<()> {
|
||||
println!("syntax highlighting:");
|
||||
test_highlights_indented(loader, loader_config, highlighter, directory, use_color, 2)
|
||||
}
|
||||
|
||||
fn test_highlights_indented(
|
||||
loader: &Loader,
|
||||
loader_config: &Config,
|
||||
highlighter: &mut Highlighter,
|
||||
directory: &Path,
|
||||
use_color: bool,
|
||||
indent_level: usize,
|
||||
test_summary: &mut TestSummary,
|
||||
) -> Result<()> {
|
||||
let mut failed = false;
|
||||
|
||||
|
|
@ -68,25 +55,22 @@ fn test_highlights_indented(
|
|||
let highlight_test_file = highlight_test_file?;
|
||||
let test_file_path = highlight_test_file.path();
|
||||
let test_file_name = highlight_test_file.file_name();
|
||||
print!(
|
||||
"{indent:indent_level$}",
|
||||
indent = "",
|
||||
indent_level = indent_level * 2
|
||||
);
|
||||
if test_file_path.is_dir() && test_file_path.read_dir()?.next().is_some() {
|
||||
println!("{}:", test_file_name.to_string_lossy());
|
||||
if test_highlights_indented(
|
||||
test_summary
|
||||
.highlight_results
|
||||
.add_group(test_file_name.to_string_lossy().as_ref());
|
||||
if test_highlights(
|
||||
loader,
|
||||
loader_config,
|
||||
highlighter,
|
||||
&test_file_path,
|
||||
use_color,
|
||||
indent_level + 1,
|
||||
test_summary,
|
||||
)
|
||||
.is_err()
|
||||
{
|
||||
failed = true;
|
||||
}
|
||||
test_summary.highlight_results.pop_traversal();
|
||||
} else {
|
||||
let (language, language_config) = loader
|
||||
.language_configuration_for_file_name(&test_file_path)?
|
||||
|
|
@ -111,30 +95,28 @@ fn test_highlights_indented(
|
|||
fs::read(&test_file_path)?.as_slice(),
|
||||
) {
|
||||
Ok(assertion_count) => {
|
||||
println!(
|
||||
"✓ {} ({assertion_count} assertions)",
|
||||
paint(
|
||||
use_color.then_some(AnsiColor::Green),
|
||||
test_file_name.to_string_lossy().as_ref()
|
||||
),
|
||||
);
|
||||
test_summary.highlight_results.add_case(TestResult {
|
||||
name: test_file_name.to_string_lossy().to_string(),
|
||||
info: TestInfo::AssertionTest {
|
||||
outcome: TestOutcome::AssertionPassed { assertion_count },
|
||||
test_num: test_summary.test_num,
|
||||
},
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
println!(
|
||||
"✗ {}",
|
||||
paint(
|
||||
use_color.then_some(AnsiColor::Red),
|
||||
test_file_name.to_string_lossy().as_ref()
|
||||
)
|
||||
);
|
||||
println!(
|
||||
"{indent:indent_level$} {e}",
|
||||
indent = "",
|
||||
indent_level = indent_level * 2
|
||||
);
|
||||
test_summary.highlight_results.add_case(TestResult {
|
||||
name: test_file_name.to_string_lossy().to_string(),
|
||||
info: TestInfo::AssertionTest {
|
||||
outcome: TestOutcome::AssertionFailed {
|
||||
error: e.to_string(),
|
||||
},
|
||||
test_num: test_summary.test_num,
|
||||
},
|
||||
});
|
||||
failed = true;
|
||||
}
|
||||
}
|
||||
test_summary.test_num += 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,13 +1,12 @@
|
|||
use std::{fs, path::Path};
|
||||
|
||||
use anstyle::AnsiColor;
|
||||
use anyhow::{anyhow, Result};
|
||||
use tree_sitter_loader::{Config, Loader};
|
||||
use tree_sitter_tags::{TagsConfiguration, TagsContext};
|
||||
|
||||
use crate::{
|
||||
logger::paint,
|
||||
query_testing::{parse_position_comments, to_utf8_point, Assertion, Utf8Point},
|
||||
test::{TestInfo, TestOutcome, TestResult, TestSummary},
|
||||
util,
|
||||
};
|
||||
|
||||
|
|
@ -47,19 +46,7 @@ pub fn test_tags(
|
|||
loader_config: &Config,
|
||||
tags_context: &mut TagsContext,
|
||||
directory: &Path,
|
||||
use_color: bool,
|
||||
) -> Result<()> {
|
||||
println!("tags:");
|
||||
test_tags_indented(loader, loader_config, tags_context, directory, use_color, 2)
|
||||
}
|
||||
|
||||
pub fn test_tags_indented(
|
||||
loader: &Loader,
|
||||
loader_config: &Config,
|
||||
tags_context: &mut TagsContext,
|
||||
directory: &Path,
|
||||
use_color: bool,
|
||||
indent_level: usize,
|
||||
test_summary: &mut TestSummary,
|
||||
) -> Result<()> {
|
||||
let mut failed = false;
|
||||
|
||||
|
|
@ -67,25 +54,22 @@ pub fn test_tags_indented(
|
|||
let tag_test_file = tag_test_file?;
|
||||
let test_file_path = tag_test_file.path();
|
||||
let test_file_name = tag_test_file.file_name();
|
||||
print!(
|
||||
"{indent:indent_level$}",
|
||||
indent = "",
|
||||
indent_level = indent_level * 2
|
||||
);
|
||||
if test_file_path.is_dir() && test_file_path.read_dir()?.next().is_some() {
|
||||
println!("{}:", test_file_name.to_string_lossy());
|
||||
if test_tags_indented(
|
||||
test_summary
|
||||
.tag_results
|
||||
.add_group(test_file_name.to_string_lossy().as_ref());
|
||||
if test_tags(
|
||||
loader,
|
||||
loader_config,
|
||||
tags_context,
|
||||
&test_file_path,
|
||||
use_color,
|
||||
indent_level + 1,
|
||||
test_summary,
|
||||
)
|
||||
.is_err()
|
||||
{
|
||||
failed = true;
|
||||
}
|
||||
test_summary.tag_results.pop_traversal();
|
||||
} else {
|
||||
let (language, language_config) = loader
|
||||
.language_configuration_for_file_name(&test_file_path)?
|
||||
|
|
@ -104,30 +88,28 @@ pub fn test_tags_indented(
|
|||
fs::read(&test_file_path)?.as_slice(),
|
||||
) {
|
||||
Ok(assertion_count) => {
|
||||
println!(
|
||||
"✓ {} ({assertion_count} assertions)",
|
||||
paint(
|
||||
use_color.then_some(AnsiColor::Green),
|
||||
test_file_name.to_string_lossy().as_ref()
|
||||
),
|
||||
);
|
||||
test_summary.tag_results.add_case(TestResult {
|
||||
name: test_file_name.to_string_lossy().to_string(),
|
||||
info: TestInfo::AssertionTest {
|
||||
outcome: TestOutcome::AssertionPassed { assertion_count },
|
||||
test_num: test_summary.test_num,
|
||||
},
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
println!(
|
||||
"✗ {}",
|
||||
paint(
|
||||
use_color.then_some(AnsiColor::Red),
|
||||
test_file_name.to_string_lossy().as_ref()
|
||||
)
|
||||
);
|
||||
println!(
|
||||
"{indent:indent_level$} {e}",
|
||||
indent = "",
|
||||
indent_level = indent_level * 2
|
||||
);
|
||||
test_summary.tag_results.add_case(TestResult {
|
||||
name: test_file_name.to_string_lossy().to_string(),
|
||||
info: TestInfo::AssertionTest {
|
||||
outcome: TestOutcome::AssertionFailed {
|
||||
error: e.to_string(),
|
||||
},
|
||||
test_num: test_summary.test_num,
|
||||
},
|
||||
});
|
||||
failed = true;
|
||||
}
|
||||
}
|
||||
test_summary.test_num += 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -26,6 +26,8 @@ pub use crate::fuzz::{
|
|||
ITERATION_COUNT,
|
||||
};
|
||||
|
||||
pub use helpers::fixtures::get_language;
|
||||
|
||||
/// This is a simple wrapper around [`tree_sitter_generate::generate_parser_for_grammar`], because
|
||||
/// our tests do not need to pass in a version number, only the grammar JSON.
|
||||
fn generate_parser(grammar_json: &str) -> GenerateResult<(String, String)> {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ use crate::{
|
|||
LOG_GRAPH_ENABLED, START_SEED,
|
||||
},
|
||||
parse::perform_edit,
|
||||
test::{parse_tests, print_diff, print_diff_key, strip_sexp_fields},
|
||||
test::{parse_tests, strip_sexp_fields, DiffKey, TestDiff},
|
||||
tests::{
|
||||
allocations,
|
||||
helpers::fixtures::{fixtures_dir, get_language, get_test_language, SCRATCH_BASE_DIR},
|
||||
|
|
@ -209,8 +209,8 @@ pub fn test_language_corpus(
|
|||
|
||||
if actual_output != test.output {
|
||||
println!("Incorrect initial parse for {test_name}");
|
||||
print_diff_key();
|
||||
print_diff(&actual_output, &test.output, true);
|
||||
DiffKey::print();
|
||||
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||
println!();
|
||||
return false;
|
||||
}
|
||||
|
|
@ -297,8 +297,8 @@ pub fn test_language_corpus(
|
|||
|
||||
if actual_output != test.output {
|
||||
println!("Incorrect parse for {test_name} - seed {seed}");
|
||||
print_diff_key();
|
||||
print_diff(&actual_output, &test.output, true);
|
||||
DiffKey::print();
|
||||
println!("{}", TestDiff::new(&actual_output, &test.output));
|
||||
println!();
|
||||
return false;
|
||||
}
|
||||
|
|
@ -428,8 +428,8 @@ fn test_feature_corpus_files() {
|
|||
if actual_output == test.output {
|
||||
true
|
||||
} else {
|
||||
print_diff_key();
|
||||
print_diff(&actual_output, &test.output, true);
|
||||
DiffKey::print();
|
||||
print!("{}", TestDiff::new(&actual_output, &test.output));
|
||||
println!();
|
||||
false
|
||||
}
|
||||
|
|
|
|||
|
|
@ -90,7 +90,7 @@ fn detect_language_by_first_line_regex() {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn detect_langauge_by_double_barrel_file_extension() {
|
||||
fn detect_language_by_double_barrel_file_extension() {
|
||||
let blade_dir = tree_sitter_dir(
|
||||
r#"{
|
||||
"grammars": [
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ pub struct Pattern {
|
|||
named: bool,
|
||||
field: Option<&'static str>,
|
||||
capture: Option<String>,
|
||||
children: Vec<Pattern>,
|
||||
children: Vec<Self>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
|
|
@ -225,7 +225,7 @@ impl Pattern {
|
|||
}
|
||||
|
||||
// Find every matching combination of child patterns and child nodes.
|
||||
let mut finished_matches = Vec::<Match>::new();
|
||||
let mut finished_matches = Vec::<Match<'_, 'tree>>::new();
|
||||
if cursor.goto_first_child() {
|
||||
let mut match_states = vec![(0, mat)];
|
||||
loop {
|
||||
|
|
|
|||
|
|
@ -2669,6 +2669,64 @@ fn test_query_matches_within_range_of_long_repetition() {
|
|||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_matches_contained_within_range() {
|
||||
allocations::record(|| {
|
||||
let language = get_language("json");
|
||||
let query = Query::new(
|
||||
&language,
|
||||
r#"
|
||||
("[" @l_bracket "]" @r_bracket)
|
||||
("{" @l_brace "}" @r_brace)
|
||||
"#,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let source = r#"
|
||||
[
|
||||
{"key1": "value1"},
|
||||
{"key2": "value2"},
|
||||
{"key3": "value3"},
|
||||
{"key4": "value4"},
|
||||
{"key5": "value5"},
|
||||
{"key6": "value6"},
|
||||
{"key7": "value7"},
|
||||
{"key8": "value8"},
|
||||
{"key9": "value9"},
|
||||
{"key10": "value10"},
|
||||
{"key11": "value11"},
|
||||
{"key12": "value12"},
|
||||
]
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(&language).unwrap();
|
||||
let tree = parser.parse(&source, None).unwrap();
|
||||
|
||||
let expected_matches = [
|
||||
(1, vec![("l_brace", "{"), ("r_brace", "}")]),
|
||||
(1, vec![("l_brace", "{"), ("r_brace", "}")]),
|
||||
];
|
||||
{
|
||||
let mut cursor = QueryCursor::new();
|
||||
let matches = cursor
|
||||
.set_containing_point_range(Point::new(5, 0)..Point::new(7, 0))
|
||||
.matches(&query, tree.root_node(), source.as_bytes());
|
||||
assert_eq!(collect_matches(matches, &query, &source), &expected_matches);
|
||||
}
|
||||
{
|
||||
let mut cursor = QueryCursor::new();
|
||||
let matches = cursor.set_containing_byte_range(78..120).matches(
|
||||
&query,
|
||||
tree.root_node(),
|
||||
source.as_bytes(),
|
||||
);
|
||||
assert_eq!(collect_matches(matches, &query, &source), &expected_matches);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_query_matches_different_queries_same_cursor() {
|
||||
allocations::record(|| {
|
||||
|
|
@ -5032,6 +5090,26 @@ fn test_query_quantified_captures() {
|
|||
("comment.documentation", "// quuz"),
|
||||
],
|
||||
},
|
||||
Row {
|
||||
description: "multiple quantifiers should not hang query parsing",
|
||||
language: get_language("c"),
|
||||
code: indoc! {"
|
||||
// foo
|
||||
// bar
|
||||
// baz
|
||||
"},
|
||||
pattern: r"
|
||||
((comment) ?+ @comment)
|
||||
",
|
||||
// This should be identical to the `*` quantifier.
|
||||
captures: &[
|
||||
("comment", "// foo"),
|
||||
("comment", "// foo"),
|
||||
("comment", "// foo"),
|
||||
("comment", "// bar"),
|
||||
("comment", "// baz"),
|
||||
],
|
||||
},
|
||||
];
|
||||
|
||||
allocations::record(|| {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use std::{fs, path::PathBuf, process::Command};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use clap::ValueEnum;
|
||||
use log::{info, warn};
|
||||
use regex::Regex;
|
||||
|
|
@ -22,6 +21,36 @@ pub struct Version {
|
|||
pub bump: Option<BumpLevel>,
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum VersionError {
|
||||
#[error(transparent)]
|
||||
Json(#[from] serde_json::Error),
|
||||
#[error(transparent)]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error("Failed to update one or more files:\n\n{0}")]
|
||||
Update(UpdateErrors),
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub struct UpdateErrors(Vec<UpdateError>);
|
||||
|
||||
impl std::fmt::Display for UpdateErrors {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
for error in &self.0 {
|
||||
writeln!(f, "{error}\n")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum UpdateError {
|
||||
#[error("Failed to update {1}:\n{0}")]
|
||||
Io(std::io::Error, PathBuf),
|
||||
#[error("Failed to run `{0}`:\n{1}")]
|
||||
Command(&'static str, String),
|
||||
}
|
||||
|
||||
impl Version {
|
||||
#[must_use]
|
||||
pub const fn new(
|
||||
|
|
@ -36,7 +65,7 @@ impl Version {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn run(mut self) -> Result<()> {
|
||||
pub fn run(mut self) -> Result<(), VersionError> {
|
||||
let tree_sitter_json = self.current_dir.join("tree-sitter.json");
|
||||
|
||||
let tree_sitter_json =
|
||||
|
|
@ -84,98 +113,101 @@ impl Version {
|
|||
|
||||
let is_multigrammar = tree_sitter_json.grammars.len() > 1;
|
||||
|
||||
self.update_treesitter_json().with_context(|| {
|
||||
format!(
|
||||
"Failed to update tree-sitter.json at {}",
|
||||
self.current_dir.display()
|
||||
)
|
||||
})?;
|
||||
self.update_cargo_toml().with_context(|| {
|
||||
format!(
|
||||
"Failed to update Cargo.toml at {}",
|
||||
self.current_dir.display()
|
||||
)
|
||||
})?;
|
||||
self.update_package_json().with_context(|| {
|
||||
format!(
|
||||
"Failed to update package.json at {}",
|
||||
self.current_dir.display()
|
||||
)
|
||||
})?;
|
||||
self.update_makefile(is_multigrammar).with_context(|| {
|
||||
format!(
|
||||
"Failed to update Makefile at {}",
|
||||
self.current_dir.display()
|
||||
)
|
||||
})?;
|
||||
self.update_cmakelists_txt().with_context(|| {
|
||||
format!(
|
||||
"Failed to update CMakeLists.txt at {}",
|
||||
self.current_dir.display()
|
||||
)
|
||||
})?;
|
||||
self.update_pyproject_toml().with_context(|| {
|
||||
format!(
|
||||
"Failed to update pyproject.toml at {}",
|
||||
self.current_dir.display()
|
||||
)
|
||||
})?;
|
||||
let mut errors = Vec::new();
|
||||
|
||||
Ok(())
|
||||
// Helper to push errors into the errors vector, returns true if an error was pushed
|
||||
let mut push_err = |result: Result<(), UpdateError>| -> bool {
|
||||
if let Err(e) = result {
|
||||
errors.push(e);
|
||||
return true;
|
||||
}
|
||||
false
|
||||
};
|
||||
|
||||
push_err(self.update_treesitter_json());
|
||||
|
||||
// Only update Cargo.lock if Cargo.toml was updated
|
||||
push_err(self.update_cargo_toml()).then(|| push_err(self.update_cargo_lock()));
|
||||
|
||||
// Only update package-lock.json if package.json was updated
|
||||
push_err(self.update_package_json()).then(|| push_err(self.update_package_lock_json()));
|
||||
|
||||
push_err(self.update_makefile(is_multigrammar));
|
||||
push_err(self.update_cmakelists_txt());
|
||||
push_err(self.update_pyproject_toml());
|
||||
push_err(self.update_zig_zon());
|
||||
|
||||
if errors.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(VersionError::Update(UpdateErrors(errors)))
|
||||
}
|
||||
}
|
||||
|
||||
fn update_treesitter_json(&self) -> Result<()> {
|
||||
let tree_sitter_json = &fs::read_to_string(self.current_dir.join("tree-sitter.json"))?;
|
||||
|
||||
let tree_sitter_json = tree_sitter_json
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.contains("\"version\":") {
|
||||
let prefix_index = line.find("\"version\":").unwrap() + "\"version\":".len();
|
||||
let start_quote = line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
||||
let end_quote = line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
||||
|
||||
format!(
|
||||
"{}{}{}",
|
||||
&line[..start_quote],
|
||||
self.version.as_ref().unwrap(),
|
||||
&line[end_quote..]
|
||||
)
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n";
|
||||
|
||||
fs::write(self.current_dir.join("tree-sitter.json"), tree_sitter_json)?;
|
||||
|
||||
Ok(())
|
||||
fn update_file_with<F>(&self, path: &PathBuf, update_fn: F) -> Result<(), UpdateError>
|
||||
where
|
||||
F: Fn(&str) -> String,
|
||||
{
|
||||
let content = fs::read_to_string(path).map_err(|e| UpdateError::Io(e, path.clone()))?;
|
||||
let updated_content = update_fn(&content);
|
||||
fs::write(path, updated_content).map_err(|e| UpdateError::Io(e, path.clone()))
|
||||
}
|
||||
|
||||
fn update_cargo_toml(&self) -> Result<()> {
|
||||
if !self.current_dir.join("Cargo.toml").exists() {
|
||||
fn update_treesitter_json(&self) -> Result<(), UpdateError> {
|
||||
let json_path = self.current_dir.join("tree-sitter.json");
|
||||
self.update_file_with(&json_path, |content| {
|
||||
content
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.contains("\"version\":") {
|
||||
let prefix_index =
|
||||
line.find("\"version\":").unwrap() + "\"version\":".len();
|
||||
let start_quote =
|
||||
line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
||||
let end_quote =
|
||||
line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
||||
|
||||
format!(
|
||||
"{}{}{}",
|
||||
&line[..start_quote],
|
||||
self.version.as_ref().unwrap(),
|
||||
&line[end_quote..]
|
||||
)
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n"
|
||||
})
|
||||
}
|
||||
|
||||
fn update_cargo_toml(&self) -> Result<(), UpdateError> {
|
||||
let cargo_toml_path = self.current_dir.join("Cargo.toml");
|
||||
if !cargo_toml_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let cargo_toml = fs::read_to_string(self.current_dir.join("Cargo.toml"))?;
|
||||
self.update_file_with(&cargo_toml_path, |content| {
|
||||
content
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.starts_with("version =") {
|
||||
format!("version = \"{}\"", self.version.as_ref().unwrap())
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n"
|
||||
})?;
|
||||
|
||||
let cargo_toml = cargo_toml
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.starts_with("version =") {
|
||||
format!("version = \"{}\"", self.version.as_ref().unwrap())
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n";
|
||||
|
||||
fs::write(self.current_dir.join("Cargo.toml"), cargo_toml)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_cargo_lock(&self) -> Result<(), UpdateError> {
|
||||
if self.current_dir.join("Cargo.lock").exists() {
|
||||
let Ok(cmd) = Command::new("cargo")
|
||||
.arg("generate-lockfile")
|
||||
|
|
@ -188,8 +220,9 @@ impl Version {
|
|||
|
||||
if !cmd.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&cmd.stderr);
|
||||
return Err(anyhow!(
|
||||
"Failed to run `cargo generate-lockfile`:\n{stderr}"
|
||||
return Err(UpdateError::Command(
|
||||
"cargo generate-lockfile",
|
||||
stderr.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
@ -197,37 +230,43 @@ impl Version {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn update_package_json(&self) -> Result<()> {
|
||||
if !self.current_dir.join("package.json").exists() {
|
||||
fn update_package_json(&self) -> Result<(), UpdateError> {
|
||||
let package_json_path = self.current_dir.join("package.json");
|
||||
if !package_json_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let package_json = &fs::read_to_string(self.current_dir.join("package.json"))?;
|
||||
self.update_file_with(&package_json_path, |content| {
|
||||
content
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.contains("\"version\":") {
|
||||
let prefix_index =
|
||||
line.find("\"version\":").unwrap() + "\"version\":".len();
|
||||
let start_quote =
|
||||
line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
||||
let end_quote =
|
||||
line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
||||
|
||||
let package_json = package_json
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.contains("\"version\":") {
|
||||
let prefix_index = line.find("\"version\":").unwrap() + "\"version\":".len();
|
||||
let start_quote = line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
||||
let end_quote = line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
||||
format!(
|
||||
"{}{}{}",
|
||||
&line[..start_quote],
|
||||
self.version.as_ref().unwrap(),
|
||||
&line[end_quote..]
|
||||
)
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n"
|
||||
})?;
|
||||
|
||||
format!(
|
||||
"{}{}{}",
|
||||
&line[..start_quote],
|
||||
self.version.as_ref().unwrap(),
|
||||
&line[end_quote..]
|
||||
)
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n";
|
||||
|
||||
fs::write(self.current_dir.join("package.json"), package_json)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_package_lock_json(&self) -> Result<(), UpdateError> {
|
||||
if self.current_dir.join("package-lock.json").exists() {
|
||||
let Ok(cmd) = Command::new("npm")
|
||||
.arg("install")
|
||||
|
|
@ -240,82 +279,117 @@ impl Version {
|
|||
|
||||
if !cmd.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&cmd.stderr);
|
||||
return Err(anyhow!("Failed to run `npm install`:\n{stderr}"));
|
||||
return Err(UpdateError::Command("npm install", stderr.to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_makefile(&self, is_multigrammar: bool) -> Result<()> {
|
||||
let makefile = if is_multigrammar {
|
||||
if !self.current_dir.join("common").join("common.mak").exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fs::read_to_string(self.current_dir.join("Makefile"))?
|
||||
fn update_makefile(&self, is_multigrammar: bool) -> Result<(), UpdateError> {
|
||||
let makefile_path = if is_multigrammar {
|
||||
self.current_dir.join("common").join("common.mak")
|
||||
} else {
|
||||
if !self.current_dir.join("Makefile").exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fs::read_to_string(self.current_dir.join("Makefile"))?
|
||||
self.current_dir.join("Makefile")
|
||||
};
|
||||
|
||||
let makefile = makefile
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.starts_with("VERSION") {
|
||||
format!("VERSION := {}", self.version.as_ref().unwrap())
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n";
|
||||
|
||||
fs::write(self.current_dir.join("Makefile"), makefile)?;
|
||||
self.update_file_with(&makefile_path, |content| {
|
||||
content
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.starts_with("VERSION") {
|
||||
format!("VERSION := {}", self.version.as_ref().unwrap())
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n"
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_cmakelists_txt(&self) -> Result<()> {
|
||||
if !self.current_dir.join("CMakeLists.txt").exists() {
|
||||
fn update_cmakelists_txt(&self) -> Result<(), UpdateError> {
|
||||
let cmake_lists_path = self.current_dir.join("CMakeLists.txt");
|
||||
if !cmake_lists_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let cmake = fs::read_to_string(self.current_dir.join("CMakeLists.txt"))?;
|
||||
|
||||
let re = Regex::new(r#"(\s*VERSION\s+)"[0-9]+\.[0-9]+\.[0-9]+""#)?;
|
||||
let cmake = re.replace(&cmake, format!(r#"$1"{}""#, self.version.as_ref().unwrap()));
|
||||
|
||||
fs::write(self.current_dir.join("CMakeLists.txt"), cmake.as_bytes())?;
|
||||
self.update_file_with(&cmake_lists_path, |content| {
|
||||
let re = Regex::new(r#"(\s*VERSION\s+)"[0-9]+\.[0-9]+\.[0-9]+""#)
|
||||
.expect("Failed to compile regex");
|
||||
re.replace(
|
||||
content,
|
||||
format!(r#"$1"{}""#, self.version.as_ref().unwrap()),
|
||||
)
|
||||
.to_string()
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_pyproject_toml(&self) -> Result<()> {
|
||||
if !self.current_dir.join("pyproject.toml").exists() {
|
||||
fn update_pyproject_toml(&self) -> Result<(), UpdateError> {
|
||||
let pyproject_toml_path = self.current_dir.join("pyproject.toml");
|
||||
if !pyproject_toml_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let pyproject_toml = fs::read_to_string(self.current_dir.join("pyproject.toml"))?;
|
||||
self.update_file_with(&pyproject_toml_path, |content| {
|
||||
content
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.starts_with("version =") {
|
||||
format!("version = \"{}\"", self.version.as_ref().unwrap())
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n"
|
||||
})?;
|
||||
|
||||
let pyproject_toml = pyproject_toml
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line.starts_with("version =") {
|
||||
format!("version = \"{}\"", self.version.as_ref().unwrap())
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n";
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fs::write(self.current_dir.join("pyproject.toml"), pyproject_toml)?;
|
||||
fn update_zig_zon(&self) -> Result<(), UpdateError> {
|
||||
let zig_zon_path = self.current_dir.join("build.zig.zon");
|
||||
if !zig_zon_path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.update_file_with(&zig_zon_path, |content| {
|
||||
let zig_version_prefix = ".version =";
|
||||
content
|
||||
.lines()
|
||||
.map(|line| {
|
||||
if line
|
||||
.trim_start_matches(|c: char| c.is_ascii_whitespace())
|
||||
.starts_with(zig_version_prefix)
|
||||
{
|
||||
let prefix_index =
|
||||
line.find(zig_version_prefix).unwrap() + zig_version_prefix.len();
|
||||
let start_quote =
|
||||
line[prefix_index..].find('"').unwrap() + prefix_index + 1;
|
||||
let end_quote =
|
||||
line[start_quote + 1..].find('"').unwrap() + start_quote + 1;
|
||||
|
||||
format!(
|
||||
"{}{}{}",
|
||||
&line[..start_quote],
|
||||
self.version.as_ref().unwrap(),
|
||||
&line[end_quote..]
|
||||
)
|
||||
} else {
|
||||
line.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
+ "\n"
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,8 +20,8 @@ path = "src/tree_sitter_config.rs"
|
|||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
etcetera.workspace = true
|
||||
log.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
|
|
|||
|
|
@ -1,12 +1,54 @@
|
|||
#![cfg_attr(not(any(test, doctest)), doc = include_str!("../README.md"))]
|
||||
|
||||
use std::{env, fs, path::PathBuf};
|
||||
use std::{
|
||||
env, fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use etcetera::BaseStrategy as _;
|
||||
use log::warn;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use thiserror::Error;
|
||||
|
||||
pub type ConfigResult<T> = Result<T, ConfigError>;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ConfigError {
|
||||
#[error("Bad JSON config {0} -- {1}")]
|
||||
ConfigRead(String, serde_json::Error),
|
||||
#[error(transparent)]
|
||||
HomeDir(#[from] etcetera::HomeDirError),
|
||||
#[error(transparent)]
|
||||
IO(IoError),
|
||||
#[error(transparent)]
|
||||
Serialization(#[from] serde_json::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub struct IoError {
|
||||
pub error: std::io::Error,
|
||||
pub path: Option<String>,
|
||||
}
|
||||
|
||||
impl IoError {
|
||||
fn new(error: std::io::Error, path: Option<&Path>) -> Self {
|
||||
Self {
|
||||
error,
|
||||
path: path.map(|p| p.to_string_lossy().to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for IoError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.error)?;
|
||||
if let Some(ref path) = self.path {
|
||||
write!(f, " ({path})")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Holds the contents of tree-sitter's configuration file.
|
||||
///
|
||||
|
|
@ -23,7 +65,7 @@ pub struct Config {
|
|||
}
|
||||
|
||||
impl Config {
|
||||
pub fn find_config_file() -> Result<Option<PathBuf>> {
|
||||
pub fn find_config_file() -> ConfigResult<Option<PathBuf>> {
|
||||
if let Ok(path) = env::var("TREE_SITTER_DIR") {
|
||||
let mut path = PathBuf::from(path);
|
||||
path.push("config.json");
|
||||
|
|
@ -46,8 +88,12 @@ impl Config {
|
|||
.join("tree-sitter")
|
||||
.join("config.json");
|
||||
if legacy_apple_path.is_file() {
|
||||
fs::create_dir_all(xdg_path.parent().unwrap())?;
|
||||
fs::rename(&legacy_apple_path, &xdg_path)?;
|
||||
let xdg_dir = xdg_path.parent().unwrap();
|
||||
fs::create_dir_all(xdg_dir)
|
||||
.map_err(|e| ConfigError::IO(IoError::new(e, Some(xdg_dir))))?;
|
||||
fs::rename(&legacy_apple_path, &xdg_path).map_err(|e| {
|
||||
ConfigError::IO(IoError::new(e, Some(legacy_apple_path.as_path())))
|
||||
})?;
|
||||
warn!(
|
||||
"Your config.json file has been automatically migrated from \"{}\" to \"{}\"",
|
||||
legacy_apple_path.display(),
|
||||
|
|
@ -67,7 +113,7 @@ impl Config {
|
|||
Ok(None)
|
||||
}
|
||||
|
||||
fn xdg_config_file() -> Result<PathBuf> {
|
||||
fn xdg_config_file() -> ConfigResult<PathBuf> {
|
||||
let xdg_path = etcetera::choose_base_strategy()?
|
||||
.config_dir()
|
||||
.join("tree-sitter")
|
||||
|
|
@ -84,7 +130,7 @@ impl Config {
|
|||
/// [`etcetera::choose_base_strategy`](https://docs.rs/etcetera/*/etcetera/#basestrategy)
|
||||
/// - `$HOME/.tree-sitter/config.json` as a fallback from where tree-sitter _used_ to store
|
||||
/// its configuration
|
||||
pub fn load(path: Option<PathBuf>) -> Result<Self> {
|
||||
pub fn load(path: Option<PathBuf>) -> ConfigResult<Self> {
|
||||
let location = if let Some(path) = path {
|
||||
path
|
||||
} else if let Some(path) = Self::find_config_file()? {
|
||||
|
|
@ -94,9 +140,9 @@ impl Config {
|
|||
};
|
||||
|
||||
let content = fs::read_to_string(&location)
|
||||
.with_context(|| format!("Failed to read {}", location.to_string_lossy()))?;
|
||||
.map_err(|e| ConfigError::IO(IoError::new(e, Some(location.as_path()))))?;
|
||||
let config = serde_json::from_str(&content)
|
||||
.with_context(|| format!("Bad JSON config {}", location.to_string_lossy()))?;
|
||||
.map_err(|e| ConfigError::ConfigRead(location.to_string_lossy().to_string(), e))?;
|
||||
Ok(Self { location, config })
|
||||
}
|
||||
|
||||
|
|
@ -106,7 +152,7 @@ impl Config {
|
|||
/// disk.
|
||||
///
|
||||
/// (Note that this is typically only done by the `tree-sitter init-config` command.)
|
||||
pub fn initial() -> Result<Self> {
|
||||
pub fn initial() -> ConfigResult<Self> {
|
||||
let location = if let Ok(path) = env::var("TREE_SITTER_DIR") {
|
||||
let mut path = PathBuf::from(path);
|
||||
path.push("config.json");
|
||||
|
|
@ -119,17 +165,20 @@ impl Config {
|
|||
}
|
||||
|
||||
/// Saves this configuration to the file that it was originally loaded from.
|
||||
pub fn save(&self) -> Result<()> {
|
||||
pub fn save(&self) -> ConfigResult<()> {
|
||||
let json = serde_json::to_string_pretty(&self.config)?;
|
||||
fs::create_dir_all(self.location.parent().unwrap())?;
|
||||
fs::write(&self.location, json)?;
|
||||
let config_dir = self.location.parent().unwrap();
|
||||
fs::create_dir_all(config_dir)
|
||||
.map_err(|e| ConfigError::IO(IoError::new(e, Some(config_dir))))?;
|
||||
fs::write(&self.location, json)
|
||||
.map_err(|e| ConfigError::IO(IoError::new(e, Some(self.location.as_path()))))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Parses a component-specific configuration from the configuration file. The type `C` must
|
||||
/// be [deserializable](https://docs.rs/serde/*/serde/trait.Deserialize.html) from a JSON
|
||||
/// object, and must only include the fields relevant to that component.
|
||||
pub fn get<C>(&self) -> Result<C>
|
||||
pub fn get<C>(&self) -> ConfigResult<C>
|
||||
where
|
||||
C: for<'de> Deserialize<'de>,
|
||||
{
|
||||
|
|
@ -140,7 +189,7 @@ impl Config {
|
|||
/// Adds a component-specific configuration to the configuration file. The type `C` must be
|
||||
/// [serializable](https://docs.rs/serde/*/serde/trait.Serialize.html) into a JSON object, and
|
||||
/// must only include the fields relevant to that component.
|
||||
pub fn add<C>(&mut self, config: C) -> Result<()>
|
||||
pub fn add<C>(&mut self, config: C) -> ConfigResult<()>
|
||||
where
|
||||
C: Serialize,
|
||||
{
|
||||
|
|
|
|||
|
|
@ -25,7 +25,6 @@ load = ["dep:semver"]
|
|||
qjs-rt = ["load", "rquickjs", "pathdiff"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
bitflags = "2.9.4"
|
||||
dunce = "1.0.5"
|
||||
indexmap.workspace = true
|
||||
|
|
@ -34,7 +33,7 @@ log.workspace = true
|
|||
pathdiff = { version = "0.2.3", optional = true }
|
||||
regex.workspace = true
|
||||
regex-syntax.workspace = true
|
||||
rquickjs = { version = "0.9.0", optional = true, features = [
|
||||
rquickjs = { version = "0.11.0", optional = true, features = [
|
||||
"bindgen",
|
||||
"loader",
|
||||
"macro",
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ pub enum ParseTableBuilderError {
|
|||
StateCount(usize),
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Serialize)]
|
||||
#[derive(Default, Debug, Serialize, Error)]
|
||||
pub struct ConflictError {
|
||||
pub symbol_sequence: Vec<String>,
|
||||
pub conflicting_lookahead: String,
|
||||
|
|
@ -89,7 +89,7 @@ pub struct ConflictError {
|
|||
pub possible_resolutions: Vec<Resolution>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Serialize)]
|
||||
#[derive(Default, Debug, Serialize, Error)]
|
||||
pub struct Interpretation {
|
||||
pub preceding_symbols: Vec<String>,
|
||||
pub variable_name: String,
|
||||
|
|
@ -108,7 +108,7 @@ pub enum Resolution {
|
|||
AddConflict { symbols: Vec<String> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[derive(Debug, Serialize, Error)]
|
||||
pub struct AmbiguousExtraError {
|
||||
pub parent_symbols: Vec<String>,
|
||||
}
|
||||
|
|
@ -238,9 +238,6 @@ impl std::fmt::Display for AmbiguousExtraError {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for ConflictError {}
|
||||
impl std::error::Error for AmbiguousExtraError {}
|
||||
|
||||
impl<'a> ParseTableBuilder<'a> {
|
||||
fn new(
|
||||
syntax_grammar: &'a SyntaxGrammar,
|
||||
|
|
|
|||
|
|
@ -204,7 +204,7 @@ impl fmt::Display for ParseItemDisplay<'_> {
|
|||
|| step.reserved_word_set_id != ReservedWordSetId::default()
|
||||
{
|
||||
write!(f, " (")?;
|
||||
if step.precedence.is_none() {
|
||||
if !step.precedence.is_none() {
|
||||
write!(f, " {}", step.precedence)?;
|
||||
}
|
||||
if let Some(associativity) = step.associativity {
|
||||
|
|
|
|||
|
|
@ -306,9 +306,7 @@ impl Minimizer<'_> {
|
|||
return true;
|
||||
}
|
||||
|
||||
for (i, action1) in actions1.iter().enumerate() {
|
||||
let action2 = &actions2[i];
|
||||
|
||||
for (action1, action2) in actions1.iter().zip(actions2.iter()) {
|
||||
// Two shift actions are equivalent if their destinations are in the same group.
|
||||
if let (
|
||||
ParseAction::Shift {
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@ pub struct TokenConflictMap<'a> {
|
|||
|
||||
impl<'a> TokenConflictMap<'a> {
|
||||
/// Create a token conflict map based on a lexical grammar, which describes the structure
|
||||
/// each token, and a `following_token` map, which indicates which tokens may be appear
|
||||
/// of each token, and a `following_token` map, which indicates which tokens may be appear
|
||||
/// immediately after each other token.
|
||||
///
|
||||
/// This analyzes the possible kinds of overlap between each pair of tokens and stores
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use std::{collections::HashMap, sync::LazyLock};
|
||||
use std::{collections::BTreeMap, sync::LazyLock};
|
||||
#[cfg(feature = "load")]
|
||||
use std::{
|
||||
env, fs,
|
||||
|
|
@ -7,7 +7,6 @@ use std::{
|
|||
process::{Command, Stdio},
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use bitflags::bitflags;
|
||||
use log::warn;
|
||||
use node_types::VariableInfo;
|
||||
|
|
@ -57,7 +56,7 @@ struct JSONOutput {
|
|||
syntax_grammar: SyntaxGrammar,
|
||||
lexical_grammar: LexicalGrammar,
|
||||
inlines: InlinedProductionMap,
|
||||
simple_aliases: HashMap<Symbol, Alias>,
|
||||
simple_aliases: BTreeMap<Symbol, Alias>,
|
||||
variable_info: Vec<VariableInfo>,
|
||||
}
|
||||
|
||||
|
|
@ -81,8 +80,8 @@ pub type GenerateResult<T> = Result<T, GenerateError>;
|
|||
pub enum GenerateError {
|
||||
#[error("Error with specified path -- {0}")]
|
||||
GrammarPath(String),
|
||||
#[error("{0}")]
|
||||
IO(String),
|
||||
#[error(transparent)]
|
||||
IO(IoError),
|
||||
#[cfg(feature = "load")]
|
||||
#[error(transparent)]
|
||||
LoadGrammarFile(#[from] LoadGrammarError),
|
||||
|
|
@ -101,9 +100,28 @@ pub enum GenerateError {
|
|||
SuperTypeCycle(#[from] SuperTypeCycleError),
|
||||
}
|
||||
|
||||
impl From<std::io::Error> for GenerateError {
|
||||
fn from(value: std::io::Error) -> Self {
|
||||
Self::IO(value.to_string())
|
||||
#[derive(Debug, Error, Serialize)]
|
||||
pub struct IoError {
|
||||
pub error: String,
|
||||
pub path: Option<String>,
|
||||
}
|
||||
|
||||
impl IoError {
|
||||
fn new(error: &std::io::Error, path: Option<&Path>) -> Self {
|
||||
Self {
|
||||
error: error.to_string(),
|
||||
path: path.map(|p| p.to_string_lossy().to_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for IoError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.error)?;
|
||||
if let Some(ref path) = self.path {
|
||||
write!(f, " ({path})")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -118,18 +136,11 @@ pub enum LoadGrammarError {
|
|||
#[error("Failed to load grammar.js -- {0}")]
|
||||
LoadJSGrammarFile(#[from] JSError),
|
||||
#[error("Failed to load grammar.json -- {0}")]
|
||||
IO(String),
|
||||
IO(IoError),
|
||||
#[error("Unknown grammar file extension: {0:?}")]
|
||||
FileExtension(PathBuf),
|
||||
}
|
||||
|
||||
#[cfg(feature = "load")]
|
||||
impl From<std::io::Error> for LoadGrammarError {
|
||||
fn from(value: std::io::Error) -> Self {
|
||||
Self::IO(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "load")]
|
||||
#[derive(Debug, Error, Serialize)]
|
||||
pub enum ParseVersionError {
|
||||
|
|
@ -137,8 +148,8 @@ pub enum ParseVersionError {
|
|||
Version(String),
|
||||
#[error("{0}")]
|
||||
JSON(String),
|
||||
#[error("{0}")]
|
||||
IO(String),
|
||||
#[error(transparent)]
|
||||
IO(IoError),
|
||||
}
|
||||
|
||||
#[cfg(feature = "load")]
|
||||
|
|
@ -153,8 +164,21 @@ pub enum JSError {
|
|||
JSRuntimeUtf8 { runtime: String, error: String },
|
||||
#[error("`{runtime}` process exited with status {code}")]
|
||||
JSRuntimeExit { runtime: String, code: i32 },
|
||||
#[error("{0}")]
|
||||
IO(String),
|
||||
#[error("Failed to open stdin for `{runtime}`")]
|
||||
JSRuntimeStdin { runtime: String },
|
||||
#[error("Failed to write {item} to `{runtime}`'s stdin -- {error}")]
|
||||
JSRuntimeWrite {
|
||||
runtime: String,
|
||||
item: String,
|
||||
error: String,
|
||||
},
|
||||
#[error("Failed to read output from `{runtime}` -- {error}")]
|
||||
JSRuntimeRead { runtime: String, error: String },
|
||||
#[error(transparent)]
|
||||
IO(IoError),
|
||||
#[cfg(feature = "qjs-rt")]
|
||||
#[error("Failed to get relative path")]
|
||||
RelativePath,
|
||||
#[error("Could not parse this package's version as semver -- {0}")]
|
||||
Semver(String),
|
||||
#[error("Failed to serialze grammar JSON -- {0}")]
|
||||
|
|
@ -164,13 +188,6 @@ pub enum JSError {
|
|||
QuickJS(String),
|
||||
}
|
||||
|
||||
#[cfg(feature = "load")]
|
||||
impl From<std::io::Error> for JSError {
|
||||
fn from(value: std::io::Error) -> Self {
|
||||
Self::IO(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "load")]
|
||||
impl From<serde_json::Error> for JSError {
|
||||
fn from(value: serde_json::Error) -> Self {
|
||||
|
|
@ -231,7 +248,8 @@ where
|
|||
.try_exists()
|
||||
.map_err(|e| GenerateError::GrammarPath(e.to_string()))?
|
||||
{
|
||||
fs::create_dir_all(&path_buf)?;
|
||||
fs::create_dir_all(&path_buf)
|
||||
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(path_buf.as_path()))))?;
|
||||
repo_path = path_buf;
|
||||
repo_path.join("grammar.js")
|
||||
} else {
|
||||
|
|
@ -248,15 +266,12 @@ where
|
|||
let header_path = src_path.join("tree_sitter");
|
||||
|
||||
// Ensure that the output directory exists
|
||||
fs::create_dir_all(&src_path)?;
|
||||
fs::create_dir_all(&src_path)
|
||||
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(src_path.as_path()))))?;
|
||||
|
||||
if grammar_path.file_name().unwrap() != "grammar.json" {
|
||||
fs::write(src_path.join("grammar.json"), &grammar_json).map_err(|e| {
|
||||
GenerateError::IO(format!(
|
||||
"Failed to write grammar.json to {} -- {e}",
|
||||
src_path.display()
|
||||
))
|
||||
})?;
|
||||
fs::write(src_path.join("grammar.json"), &grammar_json)
|
||||
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(src_path.as_path()))))?;
|
||||
}
|
||||
|
||||
// If our job is only to generate `grammar.json` and not `parser.c`, stop here.
|
||||
|
|
@ -298,7 +313,8 @@ where
|
|||
|
||||
write_file(&src_path.join("parser.c"), c_code)?;
|
||||
write_file(&src_path.join("node-types.json"), node_types_json)?;
|
||||
fs::create_dir_all(&header_path)?;
|
||||
fs::create_dir_all(&header_path)
|
||||
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(header_path.as_path()))))?;
|
||||
write_file(&header_path.join("alloc.h"), ALLOC_HEADER)?;
|
||||
write_file(&header_path.join("array.h"), ARRAY_HEADER)?;
|
||||
write_file(&header_path.join("parser.h"), PARSER_HEADER)?;
|
||||
|
|
@ -414,9 +430,8 @@ fn read_grammar_version(repo_path: &Path) -> Result<Option<Version>, ParseVersio
|
|||
let json = path
|
||||
.exists()
|
||||
.then(|| {
|
||||
let contents = fs::read_to_string(path.as_path()).map_err(|e| {
|
||||
ParseVersionError::IO(format!("Failed to read `{}` -- {e}", path.display()))
|
||||
})?;
|
||||
let contents = fs::read_to_string(path.as_path())
|
||||
.map_err(|e| ParseVersionError::IO(IoError::new(&e, Some(path.as_path()))))?;
|
||||
serde_json::from_str::<TreeSitterJson>(&contents).map_err(|e| {
|
||||
ParseVersionError::JSON(format!("Failed to parse `{}` -- {e}", path.display()))
|
||||
})
|
||||
|
|
@ -450,14 +465,16 @@ pub fn load_grammar_file(
|
|||
}
|
||||
match grammar_path.extension().and_then(|e| e.to_str()) {
|
||||
Some("js") => Ok(load_js_grammar_file(grammar_path, js_runtime)?),
|
||||
Some("json") => Ok(fs::read_to_string(grammar_path)?),
|
||||
Some("json") => Ok(fs::read_to_string(grammar_path)
|
||||
.map_err(|e| LoadGrammarError::IO(IoError::new(&e, Some(grammar_path))))?),
|
||||
_ => Err(LoadGrammarError::FileExtension(grammar_path.to_owned()))?,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "load")]
|
||||
fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResult<String> {
|
||||
let grammar_path = dunce::canonicalize(grammar_path)?;
|
||||
let grammar_path = dunce::canonicalize(grammar_path)
|
||||
.map_err(|e| JSError::IO(IoError::new(&e, Some(grammar_path))))?;
|
||||
|
||||
#[cfg(feature = "qjs-rt")]
|
||||
if js_runtime == Some("native") {
|
||||
|
|
@ -498,7 +515,9 @@ fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResu
|
|||
let mut js_stdin = js_process
|
||||
.stdin
|
||||
.take()
|
||||
.ok_or_else(|| JSError::IO(format!("Failed to open stdin for `{js_runtime}`")))?;
|
||||
.ok_or_else(|| JSError::JSRuntimeStdin {
|
||||
runtime: js_runtime.to_string(),
|
||||
})?;
|
||||
|
||||
let cli_version = Version::parse(env!("CARGO_PKG_VERSION"))?;
|
||||
write!(
|
||||
|
|
@ -508,21 +527,26 @@ fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResu
|
|||
globalThis.TREE_SITTER_CLI_VERSION_PATCH = {};",
|
||||
cli_version.major, cli_version.minor, cli_version.patch,
|
||||
)
|
||||
.map_err(|e| {
|
||||
JSError::IO(format!(
|
||||
"Failed to write tree-sitter version to `{js_runtime}`'s stdin -- {e}"
|
||||
))
|
||||
})?;
|
||||
js_stdin.write(include_bytes!("./dsl.js")).map_err(|e| {
|
||||
JSError::IO(format!(
|
||||
"Failed to write grammar dsl to `{js_runtime}`'s stdin -- {e}"
|
||||
))
|
||||
.map_err(|e| JSError::JSRuntimeWrite {
|
||||
runtime: js_runtime.to_string(),
|
||||
item: "tree-sitter version".to_string(),
|
||||
error: e.to_string(),
|
||||
})?;
|
||||
js_stdin
|
||||
.write(include_bytes!("./dsl.js"))
|
||||
.map_err(|e| JSError::JSRuntimeWrite {
|
||||
runtime: js_runtime.to_string(),
|
||||
item: "grammar dsl".to_string(),
|
||||
error: e.to_string(),
|
||||
})?;
|
||||
drop(js_stdin);
|
||||
|
||||
let output = js_process
|
||||
.wait_with_output()
|
||||
.map_err(|e| JSError::IO(format!("Failed to read output from `{js_runtime}` -- {e}")))?;
|
||||
.map_err(|e| JSError::JSRuntimeRead {
|
||||
runtime: js_runtime.to_string(),
|
||||
error: e.to_string(),
|
||||
})?;
|
||||
match output.status.code() {
|
||||
Some(0) => {
|
||||
let stdout = String::from_utf8(output.stdout).map_err(|e| JSError::JSRuntimeUtf8 {
|
||||
|
|
@ -538,9 +562,15 @@ fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResu
|
|||
grammar_json = &stdout[pos + 1..];
|
||||
|
||||
let mut stdout = std::io::stdout().lock();
|
||||
stdout.write_all(node_output.as_bytes())?;
|
||||
stdout.write_all(b"\n")?;
|
||||
stdout.flush()?;
|
||||
stdout
|
||||
.write_all(node_output.as_bytes())
|
||||
.map_err(|e| JSError::IO(IoError::new(&e, None)))?;
|
||||
stdout
|
||||
.write_all(b"\n")
|
||||
.map_err(|e| JSError::IO(IoError::new(&e, None)))?;
|
||||
stdout
|
||||
.flush()
|
||||
.map_err(|e| JSError::IO(IoError::new(&e, None)))?;
|
||||
}
|
||||
|
||||
Ok(serde_json::to_string_pretty(&serde_json::from_str::<
|
||||
|
|
@ -560,8 +590,7 @@ fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResu
|
|||
|
||||
#[cfg(feature = "load")]
|
||||
pub fn write_file(path: &Path, body: impl AsRef<[u8]>) -> GenerateResult<()> {
|
||||
fs::write(path, body)
|
||||
.map_err(|e| GenerateError::IO(format!("Failed to write {:?} -- {e}", path.file_name())))
|
||||
fs::write(path, body).map_err(|e| GenerateError::IO(IoError::new(&e, Some(path))))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
|
|
@ -378,11 +377,11 @@ pub fn get_variable_info(
|
|||
fn get_aliases_by_symbol(
|
||||
syntax_grammar: &SyntaxGrammar,
|
||||
default_aliases: &AliasMap,
|
||||
) -> HashMap<Symbol, HashSet<Option<Alias>>> {
|
||||
) -> HashMap<Symbol, BTreeSet<Option<Alias>>> {
|
||||
let mut aliases_by_symbol = HashMap::new();
|
||||
for (symbol, alias) in default_aliases {
|
||||
aliases_by_symbol.insert(*symbol, {
|
||||
let mut aliases = HashSet::new();
|
||||
let mut aliases = BTreeSet::new();
|
||||
aliases.insert(Some(alias.clone()));
|
||||
aliases
|
||||
});
|
||||
|
|
@ -391,7 +390,7 @@ fn get_aliases_by_symbol(
|
|||
if !default_aliases.contains_key(extra_symbol) {
|
||||
aliases_by_symbol
|
||||
.entry(*extra_symbol)
|
||||
.or_insert_with(HashSet::new)
|
||||
.or_insert_with(BTreeSet::new)
|
||||
.insert(None);
|
||||
}
|
||||
}
|
||||
|
|
@ -400,7 +399,7 @@ fn get_aliases_by_symbol(
|
|||
for step in &production.steps {
|
||||
aliases_by_symbol
|
||||
.entry(step.symbol)
|
||||
.or_insert_with(HashSet::new)
|
||||
.or_insert_with(BTreeSet::new)
|
||||
.insert(
|
||||
step.alias
|
||||
.as_ref()
|
||||
|
|
@ -531,7 +530,7 @@ pub fn generate_node_types_json(
|
|||
|
||||
let aliases_by_symbol = get_aliases_by_symbol(syntax_grammar, default_aliases);
|
||||
|
||||
let empty = HashSet::new();
|
||||
let empty = BTreeSet::new();
|
||||
let extra_names = syntax_grammar
|
||||
.extra_symbols
|
||||
.iter()
|
||||
|
|
@ -590,7 +589,7 @@ pub fn generate_node_types_json(
|
|||
} else if !syntax_grammar.variables_to_inline.contains(&symbol) {
|
||||
// If a rule is aliased under multiple names, then its information
|
||||
// contributes to multiple entries in the final JSON.
|
||||
for alias in aliases_by_symbol.get(&symbol).unwrap_or(&HashSet::new()) {
|
||||
for alias in aliases_by_symbol.get(&symbol).unwrap_or(&BTreeSet::new()) {
|
||||
let kind;
|
||||
let is_named;
|
||||
if let Some(alias) = alias {
|
||||
|
|
@ -784,6 +783,9 @@ pub fn generate_node_types_json(
|
|||
a_is_leaf.cmp(&b_is_leaf)
|
||||
})
|
||||
.then_with(|| a.kind.cmp(&b.kind))
|
||||
.then_with(|| a.named.cmp(&b.named))
|
||||
.then_with(|| a.root.cmp(&b.root))
|
||||
.then_with(|| a.extra.cmp(&b.extra))
|
||||
});
|
||||
result.dedup();
|
||||
Ok(result)
|
||||
|
|
@ -826,12 +828,12 @@ fn extend_sorted<'a, T>(vec: &mut Vec<T>, values: impl IntoIterator<Item = &'a T
|
|||
where
|
||||
T: 'a + Clone + Eq + Ord,
|
||||
{
|
||||
values.into_iter().any(|value| {
|
||||
values.into_iter().fold(false, |acc, value| {
|
||||
if let Err(i) = vec.binary_search(value) {
|
||||
vec.insert(i, value.clone());
|
||||
true
|
||||
} else {
|
||||
false
|
||||
acc
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use std::collections::HashSet;
|
||||
|
||||
use anyhow::Result;
|
||||
use log::warn;
|
||||
use regex::Regex;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
|
@ -18,7 +17,7 @@ use crate::{
|
|||
#[allow(clippy::upper_case_acronyms)]
|
||||
enum RuleJSON {
|
||||
ALIAS {
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
named: bool,
|
||||
value: String,
|
||||
},
|
||||
|
|
@ -34,46 +33,46 @@ enum RuleJSON {
|
|||
name: String,
|
||||
},
|
||||
CHOICE {
|
||||
members: Vec<RuleJSON>,
|
||||
members: Vec<Self>,
|
||||
},
|
||||
FIELD {
|
||||
name: String,
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
SEQ {
|
||||
members: Vec<RuleJSON>,
|
||||
members: Vec<Self>,
|
||||
},
|
||||
REPEAT {
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
REPEAT1 {
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
PREC_DYNAMIC {
|
||||
value: i32,
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
PREC_LEFT {
|
||||
value: PrecedenceValueJSON,
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
PREC_RIGHT {
|
||||
value: PrecedenceValueJSON,
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
PREC {
|
||||
value: PrecedenceValueJSON,
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
TOKEN {
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
IMMEDIATE_TOKEN {
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
RESERVED {
|
||||
context_name: String,
|
||||
content: Box<RuleJSON>,
|
||||
content: Box<Self>,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ use std::{
|
|||
mem,
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
pub use expand_tokens::ExpandTokensError;
|
||||
pub use extract_tokens::ExtractTokensError;
|
||||
pub use flatten_grammar::FlattenGrammarError;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
use anyhow::Result;
|
||||
use regex_syntax::{
|
||||
hir::{Class, Hir, HirKind},
|
||||
ParserBuilder,
|
||||
|
|
@ -27,7 +26,7 @@ pub enum ExpandTokensError {
|
|||
"The rule `{0}` matches the empty string.
|
||||
Tree-sitter does not support syntactic rules that match the empty string
|
||||
unless they are used only as the grammar's start rule.
|
||||
"
|
||||
"
|
||||
)]
|
||||
EmptyString(String),
|
||||
#[error(transparent)]
|
||||
|
|
@ -189,7 +188,7 @@ impl NfaBuilder {
|
|||
}
|
||||
Rule::String(s) => {
|
||||
for c in s.chars().rev() {
|
||||
self.push_advance(CharacterSet::empty().add_char(c), next_state_id);
|
||||
self.push_advance(CharacterSet::from_char(c), next_state_id);
|
||||
next_state_id = self.nfa.last_state_id();
|
||||
}
|
||||
Ok(!s.is_empty())
|
||||
|
|
|
|||
|
|
@ -69,9 +69,7 @@ pub(super) fn extract_default_aliases(
|
|||
SymbolType::External => &mut external_status_list[symbol.index],
|
||||
SymbolType::NonTerminal => &mut non_terminal_status_list[symbol.index],
|
||||
SymbolType::Terminal => &mut terminal_status_list[symbol.index],
|
||||
SymbolType::End | SymbolType::EndOfNonTerminalExtra => {
|
||||
panic!("Unexpected end token")
|
||||
}
|
||||
SymbolType::End | SymbolType::EndOfNonTerminalExtra => panic!("Unexpected end token"),
|
||||
};
|
||||
status.appears_unaliased = true;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
|
|
@ -153,7 +152,7 @@ pub(super) fn extract_tokens(
|
|||
}
|
||||
}
|
||||
|
||||
let mut external_tokens = Vec::new();
|
||||
let mut external_tokens = Vec::with_capacity(grammar.external_tokens.len());
|
||||
for external_token in grammar.external_tokens {
|
||||
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
|
||||
if let Rule::Symbol(symbol) = rule {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,3 @@
|
|||
use anyhow::Result;
|
||||
use log::warn;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde::Serialize;
|
||||
use thiserror::Error;
|
||||
|
||||
|
|
@ -71,12 +70,13 @@ impl InlinedProductionMapBuilder {
|
|||
let production_map = production_indices_by_step_id
|
||||
.into_iter()
|
||||
.map(|(step_id, production_indices)| {
|
||||
let production = step_id.variable_index.map_or_else(
|
||||
|| &productions[step_id.production_index],
|
||||
|variable_index| {
|
||||
&grammar.variables[variable_index].productions[step_id.production_index]
|
||||
},
|
||||
) as *const Production;
|
||||
let production =
|
||||
core::ptr::from_ref::<Production>(step_id.variable_index.map_or_else(
|
||||
|| &productions[step_id.production_index],
|
||||
|variable_index| {
|
||||
&grammar.variables[variable_index].productions[step_id.production_index]
|
||||
},
|
||||
));
|
||||
((production, step_id.step_index as u32), production_indices)
|
||||
})
|
||||
.collect();
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ use rquickjs::{
|
|||
Context, Ctx, Function, Module, Object, Runtime, Type, Value,
|
||||
};
|
||||
|
||||
use super::{JSError, JSResult};
|
||||
use super::{IoError, JSError, JSResult};
|
||||
|
||||
const DSL: &[u8] = include_bytes!("dsl.js");
|
||||
|
||||
|
|
@ -95,9 +95,27 @@ impl Console {
|
|||
Type::Module => "module".to_string(),
|
||||
Type::BigInt => v.get::<String>().unwrap_or_else(|_| "BigInt".to_string()),
|
||||
Type::Unknown => "unknown".to_string(),
|
||||
Type::Array => {
|
||||
let js_vals = v
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter::<Value<'_>>()
|
||||
.filter_map(|x| x.ok())
|
||||
.map(|x| {
|
||||
if x.is_string() {
|
||||
format!("'{}'", Self::format_args(&[x]))
|
||||
} else {
|
||||
Self::format_args(&[x])
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
format!("[ {js_vals} ]")
|
||||
}
|
||||
Type::Symbol
|
||||
| Type::Object
|
||||
| Type::Array
|
||||
| Type::Proxy
|
||||
| Type::Function
|
||||
| Type::Constructor
|
||||
| Type::Promise
|
||||
|
|
@ -197,11 +215,11 @@ fn try_resolve_path(path: &Path) -> rquickjs::Result<PathBuf> {
|
|||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn require_from_module<'a>(
|
||||
ctx: Ctx<'a>,
|
||||
fn require_from_module<'js>(
|
||||
ctx: Ctx<'js>,
|
||||
module_path: String,
|
||||
from_module: &str,
|
||||
) -> rquickjs::Result<Value<'a>> {
|
||||
) -> rquickjs::Result<Value<'js>> {
|
||||
let current_module = PathBuf::from(from_module);
|
||||
let current_dir = if current_module.is_file() {
|
||||
current_module.parent().unwrap_or(Path::new("."))
|
||||
|
|
@ -216,13 +234,13 @@ fn require_from_module<'a>(
|
|||
load_module_from_content(&ctx, &resolved_path, &contents)
|
||||
}
|
||||
|
||||
fn load_module_from_content<'a>(
|
||||
ctx: &Ctx<'a>,
|
||||
fn load_module_from_content<'js>(
|
||||
ctx: &Ctx<'js>,
|
||||
path: &Path,
|
||||
contents: &str,
|
||||
) -> rquickjs::Result<Value<'a>> {
|
||||
) -> rquickjs::Result<Value<'js>> {
|
||||
if path.extension().is_some_and(|ext| ext == "json") {
|
||||
return ctx.eval::<Value, _>(format!("JSON.parse({contents:?})"));
|
||||
return ctx.eval::<Value<'js>, _>(format!("JSON.parse({contents:?})"));
|
||||
}
|
||||
|
||||
let exports = Object::new(ctx.clone())?;
|
||||
|
|
@ -238,7 +256,7 @@ fn load_module_from_content<'a>(
|
|||
let module_path = filename.clone();
|
||||
let require = Function::new(
|
||||
ctx.clone(),
|
||||
move |ctx_inner: Ctx<'a>, target_path: String| -> rquickjs::Result<Value<'a>> {
|
||||
move |ctx_inner: Ctx<'js>, target_path: String| -> rquickjs::Result<Value<'js>> {
|
||||
require_from_module(ctx_inner, target_path, &module_path)
|
||||
},
|
||||
)?;
|
||||
|
|
@ -246,8 +264,8 @@ fn load_module_from_content<'a>(
|
|||
let wrapper =
|
||||
format!("(function(exports, require, module, __filename, __dirname) {{ {contents} }})");
|
||||
|
||||
let module_func = ctx.eval::<Function, _>(wrapper)?;
|
||||
module_func.call::<_, Value>((exports, require, module_obj.clone(), filename, dirname))?;
|
||||
let module_func = ctx.eval::<Function<'js>, _>(wrapper)?;
|
||||
module_func.call::<_, Value<'js>>((exports, require, module_obj.clone(), filename, dirname))?;
|
||||
|
||||
module_obj.get("exports")
|
||||
}
|
||||
|
|
@ -261,15 +279,16 @@ pub fn execute_native_runtime(grammar_path: &Path) -> JSResult<String> {
|
|||
let context = Context::full(&runtime)?;
|
||||
|
||||
let resolver = FileResolver::default()
|
||||
.with_path("./node_modules")
|
||||
.with_path("./")
|
||||
.with_pattern("{}.mjs");
|
||||
let loader = ScriptLoader::default().with_extension("mjs");
|
||||
runtime.set_loader(resolver, loader);
|
||||
|
||||
let cwd = std::env::current_dir()?;
|
||||
let cwd = std::env::current_dir().map_err(|e| JSError::IO(IoError::new(&e, None)))?;
|
||||
let relative_path = pathdiff::diff_paths(grammar_path, &cwd)
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.ok_or_else(|| JSError::IO("Failed to get relative path".to_string()))?;
|
||||
.ok_or(JSError::RelativePath)?;
|
||||
|
||||
context.with(|ctx| -> JSResult<String> {
|
||||
let globals = ctx.globals();
|
||||
|
|
|
|||
|
|
@ -34,6 +34,8 @@ macro_rules! add {
|
|||
|
||||
macro_rules! add_whitespace {
|
||||
($this:tt) => {{
|
||||
// 4 bytes per char, 2 spaces per indent level
|
||||
$this.buffer.reserve(4 * 2 * $this.indent_level);
|
||||
for _ in 0..$this.indent_level {
|
||||
write!(&mut $this.buffer, " ").unwrap();
|
||||
}
|
||||
|
|
@ -688,13 +690,14 @@ impl Generator {
|
|||
flat_field_map.push((field_name.clone(), *location));
|
||||
}
|
||||
}
|
||||
let field_map_len = flat_field_map.len();
|
||||
field_map_ids.push((
|
||||
self.get_field_map_id(
|
||||
flat_field_map.clone(),
|
||||
flat_field_map,
|
||||
&mut flat_field_maps,
|
||||
&mut next_flat_field_map_index,
|
||||
),
|
||||
flat_field_map.len(),
|
||||
field_map_len,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
|
@ -962,10 +965,7 @@ impl Generator {
|
|||
large_char_set_ix = Some(char_set_ix);
|
||||
}
|
||||
|
||||
let mut line_break = "\n".to_string();
|
||||
for _ in 0..self.indent_level + 2 {
|
||||
line_break.push_str(" ");
|
||||
}
|
||||
let line_break = format!("\n{}", " ".repeat(self.indent_level + 2));
|
||||
|
||||
let has_positive_condition = large_char_set_ix.is_some() || !asserted_chars.is_empty();
|
||||
let has_negative_condition = !negated_chars.is_empty();
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
use std::{collections::HashMap, fmt};
|
||||
use std::{collections::BTreeMap, fmt};
|
||||
|
||||
use serde::Serialize;
|
||||
use smallbitvec::SmallBitVec;
|
||||
|
|
@ -34,7 +34,7 @@ pub enum Precedence {
|
|||
Name(String),
|
||||
}
|
||||
|
||||
pub type AliasMap = HashMap<Symbol, Alias>;
|
||||
pub type AliasMap = BTreeMap<Symbol, Alias>;
|
||||
|
||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Serialize)]
|
||||
pub struct MetadataParams {
|
||||
|
|
@ -60,15 +60,15 @@ pub enum Rule {
|
|||
Pattern(String, String),
|
||||
NamedSymbol(String),
|
||||
Symbol(Symbol),
|
||||
Choice(Vec<Rule>),
|
||||
Choice(Vec<Self>),
|
||||
Metadata {
|
||||
params: MetadataParams,
|
||||
rule: Box<Rule>,
|
||||
rule: Box<Self>,
|
||||
},
|
||||
Repeat(Box<Rule>),
|
||||
Seq(Vec<Rule>),
|
||||
Repeat(Box<Self>),
|
||||
Seq(Vec<Self>),
|
||||
Reserved {
|
||||
rule: Box<Rule>,
|
||||
rule: Box<Self>,
|
||||
context_name: String,
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -189,7 +189,7 @@ struct HighlightIterLayer<'a> {
|
|||
depth: usize,
|
||||
}
|
||||
|
||||
pub struct _QueryCaptures<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> {
|
||||
pub struct _QueryCaptures<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> {
|
||||
ptr: *mut ffi::TSQueryCursor,
|
||||
query: &'query Query,
|
||||
text_provider: T,
|
||||
|
|
@ -225,7 +225,7 @@ impl<'tree> _QueryMatch<'_, 'tree> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> Iterator
|
||||
impl<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> Iterator
|
||||
for _QueryCaptures<'query, 'tree, T, I>
|
||||
{
|
||||
type Item = (QueryMatch<'query, 'tree>, usize);
|
||||
|
|
@ -344,11 +344,13 @@ impl HighlightConfiguration {
|
|||
locals_query: &str,
|
||||
) -> Result<Self, QueryError> {
|
||||
// Concatenate the query strings, keeping track of the start offset of each section.
|
||||
let mut query_source = String::new();
|
||||
let mut query_source = String::with_capacity(
|
||||
injection_query.len() + locals_query.len() + highlights_query.len(),
|
||||
);
|
||||
query_source.push_str(injection_query);
|
||||
let locals_query_offset = query_source.len();
|
||||
let locals_query_offset = injection_query.len();
|
||||
query_source.push_str(locals_query);
|
||||
let highlights_query_offset = query_source.len();
|
||||
let highlights_query_offset = injection_query.len() + locals_query.len();
|
||||
query_source.push_str(highlights_query);
|
||||
|
||||
// Construct a single query by concatenating the three query strings, but record the
|
||||
|
|
@ -592,6 +594,7 @@ impl<'a> HighlightIterLayer<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
// SAFETY:
|
||||
// The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
|
||||
// prevents them from being moved. But both of these values are really just
|
||||
// pointers, so it's actually ok to move them.
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
[package]
|
||||
name = "tree-sitter-language"
|
||||
description = "The tree-sitter Language type, used by the library and by language implementations"
|
||||
version = "0.1.5"
|
||||
version = "0.1.7"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
rust-version = "1.77"
|
||||
|
|
|
|||
|
|
@ -23,9 +23,15 @@ typedef long unsigned int size_t;
|
|||
|
||||
typedef long unsigned int uintptr_t;
|
||||
|
||||
#define UINT16_MAX 65535
|
||||
#define INT8_MAX 127
|
||||
#define INT16_MAX 32767
|
||||
#define INT32_MAX 2147483647L
|
||||
#define INT64_MAX 9223372036854775807LL
|
||||
|
||||
#define UINT8_MAX 255
|
||||
#define UINT16_MAX 65535
|
||||
#define UINT32_MAX 4294967295U
|
||||
#define UINT64_MAX 18446744073709551615ULL
|
||||
|
||||
#if defined(__wasm32__)
|
||||
|
||||
|
|
|
|||
|
|
@ -13,4 +13,6 @@ void *memset(void *dst, int value, size_t count);
|
|||
|
||||
int strncmp(const char *left, const char *right, size_t n);
|
||||
|
||||
size_t strlen(const char *str);
|
||||
|
||||
#endif // TREE_SITTER_WASM_STRING_H_
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
typedef struct {
|
||||
bool left_justify; // -
|
||||
|
|
@ -105,12 +106,6 @@ static int ptr_to_str(void *ptr, char *buffer) {
|
|||
return 2 + len;
|
||||
}
|
||||
|
||||
size_t strlen(const char *str) {
|
||||
const char *s = str;
|
||||
while (*s) s++;
|
||||
return s - str;
|
||||
}
|
||||
|
||||
char *strncpy(char *dest, const char *src, size_t n) {
|
||||
char *d = dest;
|
||||
const char *s = src;
|
||||
|
|
|
|||
|
|
@ -58,3 +58,9 @@ int strncmp(const char *left, const char *right, size_t n) {
|
|||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
size_t strlen(const char *str) {
|
||||
const char *s = str;
|
||||
while (*s) s++;
|
||||
return s - str;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,7 +28,6 @@ wasm = ["tree-sitter/wasm"]
|
|||
default = ["tree-sitter-highlight", "tree-sitter-tags"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
cc.workspace = true
|
||||
etcetera.workspace = true
|
||||
fs4.workspace = true
|
||||
|
|
@ -41,6 +40,7 @@ semver.workspace = true
|
|||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
tempfile.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
||||
tree-sitter = { workspace = true }
|
||||
tree-sitter-highlight = { workspace = true, optional = true }
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
1
crates/loader/wasi-sdk-version
Normal file
1
crates/loader/wasi-sdk-version
Normal file
|
|
@ -0,0 +1 @@
|
|||
29.0
|
||||
|
|
@ -313,6 +313,7 @@ impl TagsContext {
|
|||
)
|
||||
.ok_or(Error::Cancelled)?;
|
||||
|
||||
// SAFETY:
|
||||
// The `matches` iterator borrows the `Tree`, which prevents it from being
|
||||
// moved. But the tree is really just a pointer, so it's actually ok to
|
||||
// move it.
|
||||
|
|
|
|||
|
|
@ -19,9 +19,13 @@ anstyle.workspace = true
|
|||
anyhow.workspace = true
|
||||
bindgen = { version = "0.72.0" }
|
||||
clap.workspace = true
|
||||
etcetera.workspace = true
|
||||
indoc.workspace = true
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
semver.workspace = true
|
||||
serde_json.workspace = true
|
||||
tree-sitter-cli = { path = "../cli/" }
|
||||
tree-sitter-loader = { path = "../loader/" }
|
||||
notify = "8.2.0"
|
||||
notify-debouncer-full = "0.6.0"
|
||||
|
|
|
|||
|
|
@ -8,13 +8,15 @@ use std::{
|
|||
time::Duration,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Error, Result};
|
||||
use anyhow::{anyhow, Result};
|
||||
use etcetera::BaseStrategy as _;
|
||||
use indoc::indoc;
|
||||
use notify::{
|
||||
event::{AccessKind, AccessMode},
|
||||
EventKind, RecursiveMode,
|
||||
};
|
||||
use notify_debouncer_full::new_debouncer;
|
||||
use tree_sitter_loader::{IoError, LoaderError, WasiSDKClangError};
|
||||
|
||||
use crate::{
|
||||
bail_on_err, embed_sources::embed_sources_in_map, watch_wasm, BuildWasm, EMSCRIPTEN_TAG,
|
||||
|
|
@ -50,6 +52,8 @@ const EXPORTED_RUNTIME_METHODS: [&str; 20] = [
|
|||
"LE_HEAP_STORE_I64",
|
||||
];
|
||||
|
||||
const WASI_SDK_VERSION: &str = include_str!("../../loader/wasi-sdk-version").trim_ascii();
|
||||
|
||||
pub fn run_wasm(args: &BuildWasm) -> Result<()> {
|
||||
let mut emscripten_flags = if args.debug {
|
||||
vec!["-O0", "--minify", "0"]
|
||||
|
|
@ -195,6 +199,7 @@ pub fn run_wasm(args: &BuildWasm) -> Result<()> {
|
|||
"-D", "NDEBUG=",
|
||||
"-D", "_POSIX_C_SOURCE=200112L",
|
||||
"-D", "_DEFAULT_SOURCE=",
|
||||
"-D", "_BSD_SOURCE=",
|
||||
"-D", "_DARWIN_C_SOURCE=",
|
||||
"-I", "lib/src",
|
||||
"-I", "lib/include",
|
||||
|
|
@ -309,9 +314,17 @@ fn build_wasm(cmd: &mut Command, edit_tsd: bool) -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// This gets the path to the `clang` binary in the WASI SDK specified by the
|
||||
/// `TREE_SITTER_WASI_SDK_PATH` environment variable.
|
||||
fn get_wasi_binary() -> Result<PathBuf, Error> {
|
||||
/// This ensures that the wasi-sdk is available, downloading and extracting it if necessary,
|
||||
/// and returns the path to the `clang` executable.
|
||||
///
|
||||
/// If `TREE_SITTER_WASI_SDK_PATH` is set, it will use that path to look for the clang executable.
|
||||
///
|
||||
/// Note that this is just a minimially modified version of
|
||||
/// `tree_sitter_loader::ensure_wasi_sdk_exists`. In the loader, this functionality is implemented
|
||||
/// as a private method of `Loader`. Rather than add this to the public API, we just
|
||||
/// re-implement it. Any fixes and/or modifications made to the loader's copy should be reflected
|
||||
/// here.
|
||||
pub fn ensure_wasi_sdk_exists() -> Result<PathBuf> {
|
||||
let possible_executables = if cfg!(windows) {
|
||||
vec![
|
||||
"clang.exe",
|
||||
|
|
@ -332,19 +345,122 @@ fn get_wasi_binary() -> Result<PathBuf, Error> {
|
|||
}
|
||||
}
|
||||
|
||||
return Err(anyhow!(
|
||||
"TREE_SITTER_WASI_SDK_PATH is set to '{}', but no clang executable found in 'bin/' directory. \
|
||||
Looked for: {}",
|
||||
wasi_sdk_dir.display(),
|
||||
possible_executables.join(", ")
|
||||
));
|
||||
Err(LoaderError::WasiSDKClang(WasiSDKClangError {
|
||||
wasi_sdk_dir: wasi_sdk_dir.to_string_lossy().to_string(),
|
||||
possible_executables: possible_executables.clone(),
|
||||
download: false,
|
||||
}))?;
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"TREE_SITTER_WASI_SDK_PATH environment variable is not set. \
|
||||
Please install the WASI SDK from https://github.com/WebAssembly/wasi-sdk/releases \
|
||||
and set TREE_SITTER_WASI_SDK_PATH to the installation directory."
|
||||
))
|
||||
let cache_dir = etcetera::choose_base_strategy()?
|
||||
.cache_dir()
|
||||
.join("tree-sitter");
|
||||
fs::create_dir_all(&cache_dir).map_err(|error| {
|
||||
LoaderError::IO(IoError {
|
||||
error,
|
||||
path: Some(cache_dir.to_string_lossy().to_string()),
|
||||
})
|
||||
})?;
|
||||
|
||||
let wasi_sdk_dir = cache_dir.join("wasi-sdk");
|
||||
|
||||
for exe in &possible_executables {
|
||||
let clang_exe = wasi_sdk_dir.join("bin").join(exe);
|
||||
if clang_exe.exists() {
|
||||
return Ok(clang_exe);
|
||||
}
|
||||
}
|
||||
|
||||
fs::create_dir_all(&wasi_sdk_dir).map_err(|error| {
|
||||
LoaderError::IO(IoError {
|
||||
error,
|
||||
path: Some(wasi_sdk_dir.to_string_lossy().to_string()),
|
||||
})
|
||||
})?;
|
||||
|
||||
let arch_os = if cfg!(target_os = "macos") {
|
||||
if cfg!(target_arch = "aarch64") {
|
||||
"arm64-macos"
|
||||
} else {
|
||||
"x86_64-macos"
|
||||
}
|
||||
} else if cfg!(target_os = "windows") {
|
||||
if cfg!(target_arch = "aarch64") {
|
||||
"arm64-windows"
|
||||
} else {
|
||||
"x86_64-windows"
|
||||
}
|
||||
} else if cfg!(target_os = "linux") {
|
||||
if cfg!(target_arch = "aarch64") {
|
||||
"arm64-linux"
|
||||
} else {
|
||||
"x86_64-linux"
|
||||
}
|
||||
} else {
|
||||
Err(LoaderError::WasiSDKPlatform)?
|
||||
};
|
||||
|
||||
let sdk_filename = format!("wasi-sdk-{WASI_SDK_VERSION}-{arch_os}.tar.gz");
|
||||
let wasi_sdk_major_version = WASI_SDK_VERSION
|
||||
.trim_end_matches(char::is_numeric) // trim minor version...
|
||||
.trim_end_matches('.'); // ...and '.' separator
|
||||
let sdk_url = format!(
|
||||
"https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-{wasi_sdk_major_version}/{sdk_filename}",
|
||||
);
|
||||
|
||||
eprintln!("Downloading wasi-sdk from {sdk_url}...");
|
||||
let temp_tar_path = cache_dir.join(sdk_filename);
|
||||
|
||||
let status = Command::new("curl")
|
||||
.arg("-f")
|
||||
.arg("-L")
|
||||
.arg("-o")
|
||||
.arg(&temp_tar_path)
|
||||
.arg(&sdk_url)
|
||||
.status()
|
||||
.map_err(|e| LoaderError::Curl(sdk_url.clone(), e))?;
|
||||
|
||||
if !status.success() {
|
||||
Err(LoaderError::WasiSDKDownload(sdk_url))?;
|
||||
}
|
||||
|
||||
eprintln!("Extracting wasi-sdk to {}...", wasi_sdk_dir.display());
|
||||
extract_tar_gz_with_strip(&temp_tar_path, &wasi_sdk_dir)?;
|
||||
|
||||
fs::remove_file(temp_tar_path).ok();
|
||||
for exe in &possible_executables {
|
||||
let clang_exe = wasi_sdk_dir.join("bin").join(exe);
|
||||
if clang_exe.exists() {
|
||||
return Ok(clang_exe);
|
||||
}
|
||||
}
|
||||
|
||||
Err(LoaderError::WasiSDKClang(WasiSDKClangError {
|
||||
wasi_sdk_dir: wasi_sdk_dir.to_string_lossy().to_string(),
|
||||
possible_executables,
|
||||
download: true,
|
||||
}))?
|
||||
}
|
||||
|
||||
/// Extracts a tar.gz archive with `tar`, stripping the first path component.
|
||||
fn extract_tar_gz_with_strip(archive_path: &Path, destination: &Path) -> Result<()> {
|
||||
let status = Command::new("tar")
|
||||
.arg("-xzf")
|
||||
.arg(archive_path)
|
||||
.arg("--strip-components=1")
|
||||
.arg("-C")
|
||||
.arg(destination)
|
||||
.status()
|
||||
.map_err(|e| LoaderError::Tar(archive_path.to_string_lossy().to_string(), e))?;
|
||||
|
||||
if !status.success() {
|
||||
Err(LoaderError::Extraction(
|
||||
archive_path.to_string_lossy().to_string(),
|
||||
destination.to_string_lossy().to_string(),
|
||||
))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_wasm_stdlib() -> Result<()> {
|
||||
|
|
@ -353,7 +469,7 @@ pub fn run_wasm_stdlib() -> Result<()> {
|
|||
.map(|line| format!("-Wl,--export={}", &line[1..line.len() - 2]))
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let clang_exe = get_wasi_binary()?;
|
||||
let clang_exe = ensure_wasi_sdk_exists()?;
|
||||
|
||||
let output = Command::new(&clang_exe)
|
||||
.args([
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ use std::{cmp::Ordering, path::Path};
|
|||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use indoc::indoc;
|
||||
use semver::{BuildMetadata, Prerelease, Version};
|
||||
use semver::{Prerelease, Version};
|
||||
|
||||
use crate::{create_commit, BumpVersion};
|
||||
|
||||
|
|
@ -48,7 +48,6 @@ pub fn run(args: BumpVersion) -> Result<()> {
|
|||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
let latest_tag_sha = String::from_utf8(output.stdout)?.trim().to_string();
|
||||
|
||||
let workspace_toml_version = Version::parse(&fetch_workspace_version()?)?;
|
||||
|
||||
|
|
@ -65,102 +64,7 @@ pub fn run(args: BumpVersion) -> Result<()> {
|
|||
return Ok(());
|
||||
}
|
||||
|
||||
let output = std::process::Command::new("git")
|
||||
.args(["rev-list", &format!("{latest_tag_sha}..HEAD")])
|
||||
.output()?;
|
||||
if !output.status.success() {
|
||||
anyhow::bail!(
|
||||
"Failed to get commits: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
let commits = String::from_utf8(output.stdout)?
|
||||
.lines()
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut should_increment_patch = false;
|
||||
let mut should_increment_minor = false;
|
||||
|
||||
for commit_sha in commits {
|
||||
let output = std::process::Command::new("git")
|
||||
.args(["log", "-1", "--format=%s", &commit_sha])
|
||||
.output()?;
|
||||
if !output.status.success() {
|
||||
continue;
|
||||
}
|
||||
let message = String::from_utf8(output.stdout)?.trim().to_string();
|
||||
|
||||
let output = std::process::Command::new("git")
|
||||
.args([
|
||||
"diff-tree",
|
||||
"--no-commit-id",
|
||||
"--name-only",
|
||||
"-r",
|
||||
&commit_sha,
|
||||
])
|
||||
.output()?;
|
||||
if !output.status.success() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut source_code_changed = false;
|
||||
for path in String::from_utf8(output.stdout)?.lines() {
|
||||
let path = Path::new(path);
|
||||
if path.extension().is_some_and(|ext| {
|
||||
ext.eq_ignore_ascii_case("rs")
|
||||
|| ext.eq_ignore_ascii_case("js")
|
||||
|| ext.eq_ignore_ascii_case("c")
|
||||
}) {
|
||||
source_code_changed = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if source_code_changed {
|
||||
should_increment_patch = true;
|
||||
|
||||
let Some((prefix, _)) = message.split_once(':') else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let convention = if prefix.contains('(') {
|
||||
prefix.split_once('(').unwrap().0
|
||||
} else {
|
||||
prefix
|
||||
};
|
||||
|
||||
if ["feat", "feat!"].contains(&convention) || prefix.ends_with('!') {
|
||||
should_increment_minor = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let next_version = if let Some(version) = args.version {
|
||||
version
|
||||
} else {
|
||||
let mut next_version = current_version.clone();
|
||||
if should_increment_minor {
|
||||
next_version.minor += 1;
|
||||
next_version.patch = 0;
|
||||
next_version.pre = Prerelease::EMPTY;
|
||||
next_version.build = BuildMetadata::EMPTY;
|
||||
} else if should_increment_patch {
|
||||
next_version.patch += 1;
|
||||
next_version.pre = Prerelease::EMPTY;
|
||||
next_version.build = BuildMetadata::EMPTY;
|
||||
} else {
|
||||
return Err(anyhow!(format!(
|
||||
"No source code changed since {current_version}"
|
||||
)));
|
||||
}
|
||||
next_version
|
||||
};
|
||||
if next_version <= current_version {
|
||||
return Err(anyhow!(format!(
|
||||
"Next version {next_version} must be greater than current version {current_version}"
|
||||
)));
|
||||
}
|
||||
let next_version = args.version;
|
||||
|
||||
println!("Bumping from {current_version} to {next_version}");
|
||||
update_crates(¤t_version, &next_version)?;
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ use notify_debouncer_full::new_debouncer;
|
|||
|
||||
use crate::{bail_on_err, watch_wasm, CheckWasmExports};
|
||||
|
||||
const EXCLUDES: [&str; 23] = [
|
||||
const EXCLUDES: [&str; 25] = [
|
||||
// Unneeded because the JS side has its own way of implementing it
|
||||
"ts_node_child_by_field_name",
|
||||
"ts_node_edit",
|
||||
|
|
@ -44,6 +44,8 @@ const EXCLUDES: [&str; 23] = [
|
|||
"ts_query_cursor_delete",
|
||||
"ts_query_cursor_match_limit",
|
||||
"ts_query_cursor_remove_match",
|
||||
"ts_query_cursor_set_point_range",
|
||||
"ts_query_cursor_set_containing_byte_range",
|
||||
];
|
||||
|
||||
pub fn run(args: &CheckWasmExports) -> Result<()> {
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ mod embed_sources;
|
|||
mod fetch;
|
||||
mod generate;
|
||||
mod test;
|
||||
mod test_schema;
|
||||
mod upgrade_wasmtime;
|
||||
|
||||
use std::{path::Path, process::Command};
|
||||
|
|
@ -40,6 +41,8 @@ enum Commands {
|
|||
GenerateBindings,
|
||||
/// Generates the fixtures for testing tree-sitter.
|
||||
GenerateFixtures(GenerateFixtures),
|
||||
/// Generates the JSON schema for the test runner summary.
|
||||
GenerateTestSchema,
|
||||
/// Generate the list of exports from Tree-sitter Wasm files.
|
||||
GenerateWasmExports,
|
||||
/// Run the test suite
|
||||
|
|
@ -94,8 +97,8 @@ struct BuildWasm {
|
|||
#[derive(Args)]
|
||||
struct BumpVersion {
|
||||
/// The version to bump to.
|
||||
#[arg(long, short)]
|
||||
version: Option<Version>,
|
||||
#[arg(index = 1, required = true)]
|
||||
version: Version,
|
||||
}
|
||||
|
||||
#[derive(Args)]
|
||||
|
|
@ -236,6 +239,7 @@ fn run() -> Result<()> {
|
|||
Commands::GenerateFixtures(generate_fixtures_options) => {
|
||||
generate::run_fixtures(&generate_fixtures_options)?;
|
||||
}
|
||||
Commands::GenerateTestSchema => test_schema::run_test_schema()?,
|
||||
Commands::GenerateWasmExports => generate::run_wasm_exports()?,
|
||||
Commands::Test(test_options) => test::run(&test_options)?,
|
||||
Commands::TestWasm => test::run_wasm()?,
|
||||
|
|
|
|||
|
|
@ -73,9 +73,6 @@ pub fn run(args: &Test) -> Result<()> {
|
|||
.arg("--no-run")
|
||||
.arg("--message-format=json");
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
cargo_cmd.arg("--").arg("--test-threads=1");
|
||||
|
||||
let cargo_cmd = cargo_cmd.stdout(Stdio::piped()).spawn()?;
|
||||
|
||||
let jq_cmd = Command::new("jq")
|
||||
|
|
@ -103,9 +100,6 @@ pub fn run(args: &Test) -> Result<()> {
|
|||
}
|
||||
cargo_cmd.args(&args.args);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
cargo_cmd.arg("--").arg("--test-threads=1");
|
||||
|
||||
if args.nocapture {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
cargo_cmd.arg("--");
|
||||
|
|
|
|||
25
crates/xtask/src/test_schema.rs
Normal file
25
crates/xtask/src/test_schema.rs
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use serde_json::to_writer_pretty;
|
||||
|
||||
use tree_sitter_cli::test::TestSummary;
|
||||
|
||||
pub fn run_test_schema() -> Result<()> {
|
||||
let schema = schemars::schema_for!(TestSummary);
|
||||
|
||||
let xtask_path: PathBuf = env!("CARGO_MANIFEST_DIR").into();
|
||||
let schema_path = xtask_path
|
||||
.parent()
|
||||
.unwrap()
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("docs")
|
||||
.join("src")
|
||||
.join("assets")
|
||||
.join("schemas")
|
||||
.join("test-summary.schema.json");
|
||||
let mut file = std::fs::File::create(schema_path)?;
|
||||
|
||||
Ok(to_writer_pretty(&mut file, &schema)?)
|
||||
}
|
||||
|
|
@ -4,7 +4,6 @@ authors = [
|
|||
"Amaan Qureshi <amaanq12@gmail.com>",
|
||||
]
|
||||
language = "en"
|
||||
multilingual = false
|
||||
src = "src"
|
||||
title = "Tree-sitter"
|
||||
|
||||
|
|
|
|||
|
|
@ -73,9 +73,8 @@ The behaviors of these three files are described in the next section.
|
|||
|
||||
## Queries
|
||||
|
||||
Tree-sitter's syntax highlighting system is based on *tree queries*, which are a general system for pattern-matching on Tree-sitter's
|
||||
syntax trees. See [this section][pattern matching] of the documentation for more information
|
||||
about tree queries.
|
||||
Tree-sitter's syntax highlighting system is based on *tree queries*, which are a general system for pattern-matching on
|
||||
Tree-sitter's syntax trees. See [this section][pattern matching] of the documentation for more information about tree queries.
|
||||
|
||||
Syntax highlighting is controlled by *three* different types of query files that are usually included in the `queries` folder.
|
||||
The default names for the query files use the `.scm` file. We chose this extension because it commonly used for files written
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@
|
|||
Tree-sitter can be used in conjunction with its [query language][query language] as a part of code navigation systems.
|
||||
An example of such a system can be seen in the `tree-sitter tags` command, which emits a textual dump of the interesting
|
||||
syntactic nodes in its file argument. A notable application of this is GitHub's support for [search-based code navigation][gh search].
|
||||
This document exists to describe how to integrate with such systems, and how to extend this functionality to any language with a Tree-sitter grammar.
|
||||
This document exists to describe how to integrate with such systems, and how to extend this functionality to any language
|
||||
with a Tree-sitter grammar.
|
||||
|
||||
## Tagging and captures
|
||||
|
||||
|
|
@ -12,9 +13,9 @@ entities. Having found them, you use a syntax capture to label the entity and it
|
|||
|
||||
The essence of a given tag lies in two pieces of data: the _role_ of the entity that is matched
|
||||
(i.e. whether it is a definition or a reference) and the _kind_ of that entity, which describes how the entity is used
|
||||
(i.e. whether it's a class definition, function call, variable reference, and so on). Our convention is to use a syntax capture
|
||||
following the `@role.kind` capture name format, and another inner capture, always called `@name`, that pulls out the name
|
||||
of a given identifier.
|
||||
(i.e. whether it's a class definition, function call, variable reference, and so on). Our convention is to use a syntax
|
||||
capture following the `@role.kind` capture name format, and another inner capture, always called `@name`, that pulls out
|
||||
the name of a given identifier.
|
||||
|
||||
You may optionally include a capture named `@doc` to bind a docstring. For convenience purposes, the tagging system provides
|
||||
two built-in functions, `#select-adjacent!` and `#strip!` that are convenient for removing comment syntax from a docstring.
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ cargo install --path crates/cli
|
|||
If you're going to be in a fast iteration cycle and would like the CLI to build faster, you can use the `release-dev` profile:
|
||||
|
||||
```sh
|
||||
cargo build --release --profile release-dev
|
||||
cargo build --profile release-dev
|
||||
# or
|
||||
cargo install --path crates/cli --profile release-dev
|
||||
```
|
||||
|
|
@ -83,6 +83,19 @@ cargo xtask generate-fixtures --wasm
|
|||
cargo xtask test-wasm
|
||||
```
|
||||
|
||||
#### Wasm Stdlib
|
||||
|
||||
The tree-sitter Wasm stdlib can be built via xtask:
|
||||
|
||||
```sh
|
||||
cargo xtask build-wasm-stdlib
|
||||
```
|
||||
|
||||
This command looks for the [Wasi SDK][wasi_sdk] indicated by the `TREE_SITTER_WASI_SDK_PATH`
|
||||
environment variable. If you don't have the binary, it can be downloaded from wasi-sdk's [releases][wasi-sdk-releases]
|
||||
page. Note that any changes to `crates/language/wasm/**` requires rebuilding the tree-sitter Wasm stdlib via
|
||||
`cargo xtask build-wasm-stdlib`.
|
||||
|
||||
### Debugging
|
||||
|
||||
The test script has a number of useful flags. You can list them all by running `cargo xtask test -h`.
|
||||
|
|
@ -220,4 +233,6 @@ and the tree-sitter module is fetched from [here][js url]. This, along with the
|
|||
[pypi]: https://pypi.org
|
||||
[rust]: https://rustup.rs
|
||||
[ts repo]: https://github.com/tree-sitter/tree-sitter
|
||||
[wasi_sdk]: https://github.com/WebAssembly/wasi-sdk
|
||||
[wasi-sdk-releases]: https://github.com/WebAssembly/wasi-sdk/releases
|
||||
[web-ts]: https://www.npmjs.com/package/web-tree-sitter
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue