Compare commits
61 commits
master
...
release-0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6183b59f51 | ||
|
|
38223b9871 | ||
|
|
5e8760bf46 | ||
|
|
05beba8137 | ||
|
|
21a517c423 | ||
|
|
62feed0715 | ||
|
|
1a983b7e2c | ||
|
|
ede1960eef | ||
|
|
b79f31da80 | ||
|
|
62ce991e84 | ||
|
|
29263440c7 | ||
|
|
6919d8406d | ||
|
|
3473ca9a6b | ||
|
|
bc4186776a | ||
|
|
4e870b9207 | ||
|
|
8d466ead30 | ||
|
|
9be7dda48f | ||
|
|
bb8eb83f50 | ||
|
|
6c4c522724 | ||
|
|
5e645f11b2 | ||
|
|
e3c8263338 | ||
|
|
fc8c1863e2 | ||
|
|
2507aabc04 | ||
|
|
d25a5e4886 | ||
|
|
2f6583aae2 | ||
|
|
340d3eeb41 | ||
|
|
28cbc771f1 | ||
|
|
70fd2c02f1 | ||
|
|
35f119db03 | ||
|
|
0258a41e15 | ||
|
|
32c1466224 | ||
|
|
54299d3d87 | ||
|
|
8745e5d2ce | ||
|
|
1fd07fd619 | ||
|
|
2303b7d2c5 | ||
|
|
0358feda05 | ||
|
|
1aa6567c7b | ||
|
|
9d0313af0f | ||
|
|
85a42f48be | ||
|
|
aac741dfd1 | ||
|
|
4cf96126d4 | ||
|
|
61d67adbf7 | ||
|
|
5e93499f20 | ||
|
|
b1493f9b35 | ||
|
|
89bd9e302e | ||
|
|
12bc174205 | ||
|
|
a498790a82 | ||
|
|
f629fd4aed | ||
|
|
657c7b548e | ||
|
|
bdfe32402e | ||
|
|
8c45b79808 | ||
|
|
cd1abd9351 | ||
|
|
46bdc14e20 | ||
|
|
0b167b0782 | ||
|
|
76fffb0f2d | ||
|
|
b759a5fac5 | ||
|
|
b3f808cc01 | ||
|
|
0d8f143a9d | ||
|
|
c639d547f9 | ||
|
|
75d2915f48 | ||
|
|
c21db9fea9 |
102 changed files with 2341 additions and 2248 deletions
23
.github/actions/cache/action.yml
vendored
23
.github/actions/cache/action.yml
vendored
|
|
@ -1,24 +1,23 @@
|
|||
name: 'Cache'
|
||||
description: "This action caches fixtures"
|
||||
name: Cache
|
||||
|
||||
description: This action caches fixtures
|
||||
|
||||
outputs:
|
||||
cache-hit:
|
||||
description: 'Cache hit'
|
||||
value: ${{ steps.cache_output.outputs.cache-hit }}
|
||||
description: Cache hit
|
||||
value: ${{ steps.cache.outputs.cache-hit }}
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
using: composite
|
||||
steps:
|
||||
- uses: actions/cache@v4
|
||||
id: cache_fixtures
|
||||
id: cache
|
||||
with:
|
||||
path: |
|
||||
test/fixtures/grammars
|
||||
target/release/tree-sitter-*.wasm
|
||||
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
||||
'cli/src/generate/**',
|
||||
'script/generate-fixtures*',
|
||||
'cli/generate/src/**',
|
||||
'xtask/src/*',
|
||||
'test/fixtures/grammars/*/**/src/*.c',
|
||||
'.github/actions/cache/action.yml') }}
|
||||
|
||||
- run: echo "cache-hit=${{ steps.cache_fixtures.outputs.cache-hit }}" >> $GITHUB_OUTPUT
|
||||
shell: bash
|
||||
id: cache_output
|
||||
|
|
|
|||
18
.github/scripts/cross.sh
vendored
18
.github/scripts/cross.sh
vendored
|
|
@ -1,17 +1,3 @@
|
|||
#!/bin/bash
|
||||
#!/bin/bash -eu
|
||||
|
||||
# set -x
|
||||
set -e
|
||||
|
||||
if [ "$BUILD_CMD" != "cross" ]; then
|
||||
echo "cross.sh - is a helper to assist only in cross compiling environments" >&2
|
||||
echo "To use this tool set the BUILD_CMD env var to the \"cross\" value" >&2
|
||||
exit 111
|
||||
fi
|
||||
|
||||
if [ -z "$CROSS_IMAGE" ]; then
|
||||
echo "The CROSS_IMAGE env var should be provided" >&2
|
||||
exit 111
|
||||
fi
|
||||
|
||||
docker run --rm -v /home/runner:/home/runner -w "$PWD" "$CROSS_IMAGE" "$@"
|
||||
exec docker run --rm -v /home/runner:/home/runner -w "$PWD" "$CROSS_IMAGE" "$@"
|
||||
|
|
|
|||
20
.github/scripts/make.sh
vendored
20
.github/scripts/make.sh
vendored
|
|
@ -1,19 +1,9 @@
|
|||
#!/bin/bash
|
||||
#!/bin/bash -eu
|
||||
|
||||
# set -x
|
||||
set -e
|
||||
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
||||
|
||||
if [ "$BUILD_CMD" == "cross" ]; then
|
||||
if [ -z "$CC" ]; then
|
||||
echo "make.sh: CC is not set" >&2
|
||||
exit 111
|
||||
fi
|
||||
if [ -z "$AR" ]; then
|
||||
echo "make.sh: AR is not set" >&2
|
||||
exit 111
|
||||
fi
|
||||
|
||||
cross.sh make CC=$CC AR=$AR "$@"
|
||||
if [[ $BUILD_CMD == cross ]]; then
|
||||
cross.sh make CC="$CC" AR="$AR" "$@"
|
||||
else
|
||||
make "$@"
|
||||
exec make "$@"
|
||||
fi
|
||||
|
|
|
|||
27
.github/scripts/tree-sitter.sh
vendored
27
.github/scripts/tree-sitter.sh
vendored
|
|
@ -1,28 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
# set -x
|
||||
set -e
|
||||
|
||||
if [ -z "$ROOT" ]; then
|
||||
echo "The ROOT env var should be set to absolute path of a repo root folder" >&2
|
||||
exit 111
|
||||
fi
|
||||
|
||||
if [ -z "$TARGET" ]; then
|
||||
echo "The TARGET env var should be equal to a \`cargo build --target <TARGET>\` command value" >&2
|
||||
exit 111
|
||||
fi
|
||||
#!/bin/bash -eu
|
||||
|
||||
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
||||
|
||||
if [ "$BUILD_CMD" == "cross" ]; then
|
||||
if [ -z "$CROSS_RUNNER" ]; then
|
||||
echo "The CROSS_RUNNER env var should be set to a CARGO_TARGET_*_RUNNER env var value" >&2
|
||||
echo "that is available in a docker image used by the cross tool under the hood" >&2
|
||||
exit 111
|
||||
fi
|
||||
|
||||
cross.sh $CROSS_RUNNER "$tree_sitter" "$@"
|
||||
if [[ $BUILD_CMD == cross ]]; then
|
||||
cross.sh "$CROSS_RUNNER" "$tree_sitter" "$@"
|
||||
else
|
||||
"$tree_sitter" "$@"
|
||||
exec "$tree_sitter" "$@"
|
||||
fi
|
||||
|
|
|
|||
19
.github/workflows/backport.yml
vendored
19
.github/workflows/backport.yml
vendored
|
|
@ -1,26 +1,29 @@
|
|||
name: backport
|
||||
name: Backport Pull Request
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [closed, labeled]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
name: Backport Pull Request
|
||||
if: github.event.pull_request.merged
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/create-github-app-token@v1
|
||||
- name: Create app token
|
||||
uses: actions/create-github-app-token@v1
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.BACKPORT_APP }}
|
||||
private-key: ${{ secrets.BACKPORT_KEY }}
|
||||
|
||||
- name: Create backport PR
|
||||
id: backport
|
||||
uses: korthout/backport-action@v3
|
||||
with:
|
||||
pull_title: "${pull_title}"
|
||||
|
|
|
|||
234
.github/workflows/build.yml
vendored
234
.github/workflows/build.yml
vendored
|
|
@ -8,7 +8,7 @@ env:
|
|||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
run_test:
|
||||
run-test:
|
||||
default: true
|
||||
type: boolean
|
||||
|
||||
|
|
@ -21,92 +21,93 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux-arm64 #
|
||||
- linux-arm #
|
||||
- linux-x64 #
|
||||
- linux-x86 #
|
||||
- linux-powerpc64 #
|
||||
- windows-arm64 #
|
||||
- windows-x64 # <-- No C library build - requires an additional adapted Makefile for `cl.exe` compiler
|
||||
- windows-x86 # -- // --
|
||||
- macos-arm64 #
|
||||
- macos-x64 #
|
||||
- linux-arm64
|
||||
- linux-arm
|
||||
- linux-x64
|
||||
- linux-x86
|
||||
- linux-powerpc64
|
||||
- windows-arm64
|
||||
- windows-x64
|
||||
- windows-x86
|
||||
- macos-arm64
|
||||
- macos-x64
|
||||
|
||||
include:
|
||||
# When adding a new `target`:
|
||||
# 1. Define a new platform alias above
|
||||
# 2. Add a new record to a matrix map in `cli/npm/install.js`
|
||||
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||
- { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true }
|
||||
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-20.04 , cli_features: wasm } #2272
|
||||
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest }
|
||||
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , cli_features: wasm }
|
||||
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest }
|
||||
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-14 , cli_features: wasm }
|
||||
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-12 , cli_features: wasm }
|
||||
# When adding a new `target`:
|
||||
# 1. Define a new platform alias above
|
||||
# 2. Add a new record to the matrix map in `cli/npm/install.js`
|
||||
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||
- { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true }
|
||||
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-20.04 , features: wasm } # See #2272
|
||||
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest }
|
||||
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , features: wasm }
|
||||
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest }
|
||||
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-14 , features: wasm }
|
||||
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-13 , features: wasm }
|
||||
|
||||
# Cross compilers for C library
|
||||
- { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar }
|
||||
- { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar }
|
||||
- { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar }
|
||||
- { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar }
|
||||
# Cross compilers for C library
|
||||
- { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar }
|
||||
- { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar }
|
||||
- { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar }
|
||||
- { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar }
|
||||
|
||||
# See #2041 tree-sitter issue
|
||||
- { platform: windows-x64 , rust-test-threads: 1 }
|
||||
- { platform: windows-x86 , rust-test-threads: 1 }
|
||||
# Prevent race condition (see #2041)
|
||||
- { platform: windows-x64 , rust-test-threads: 1 }
|
||||
- { platform: windows-x86 , rust-test-threads: 1 }
|
||||
|
||||
# CLI only build
|
||||
- { platform: windows-arm64 , cli-only: true }
|
||||
# Can't natively run CLI on Github runner's host
|
||||
- { platform: windows-arm64 , no-run: true }
|
||||
|
||||
env:
|
||||
BUILD_CMD: cargo
|
||||
EXE: ${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
||||
SUFFIX: ${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Read Emscripten version
|
||||
run: echo "EMSCRIPTEN_VERSION=$(cat cli/loader/emscripten-version)" >> $GITHUB_ENV
|
||||
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<cli/loader/emscripten-version)" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Emscripten
|
||||
if: ${{ !matrix.cli-only && !matrix.use-cross }}
|
||||
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
||||
uses: mymindstorm/setup-emsdk@v14
|
||||
with:
|
||||
version: ${{ env.EMSCRIPTEN_VERSION }}
|
||||
|
||||
- run: rustup toolchain install stable --profile minimal
|
||||
- run: rustup target add ${{ matrix.target }}
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Set up Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
target: ${{ matrix.target }}
|
||||
|
||||
- name: Install cross
|
||||
if: ${{ matrix.use-cross }}
|
||||
uses: taiki-e/install-action@v2
|
||||
with:
|
||||
tool: cross
|
||||
run: RUSTFLAGS="" cargo install cross --git https://github.com/cross-rs/cross
|
||||
|
||||
- name: Build custom cross image
|
||||
if: ${{ matrix.use-cross && matrix.os == 'ubuntu-latest' }}
|
||||
- name: Configure cross
|
||||
if: ${{ matrix.use-cross }}
|
||||
run: |
|
||||
target="${{ matrix.target }}"
|
||||
image=ghcr.io/cross-rs/$target:custom
|
||||
echo "CROSS_IMAGE=$image" >> $GITHUB_ENV
|
||||
printf '%s\n' > Cross.toml \
|
||||
'[target.${{ matrix.target }}]' \
|
||||
'image = "ghcr.io/cross-rs/${{ matrix.target }}:edge"' \
|
||||
'[build]' \
|
||||
'pre-build = [' \
|
||||
' "dpkg --add-architecture $CROSS_DEB_ARCH",' \
|
||||
' "curl -fsSL https://deb.nodesource.com/setup_22.x | bash -",' \
|
||||
' "apt-get update && apt-get -y install libssl-dev nodejs"' \
|
||||
']'
|
||||
cat - Cross.toml <<< 'Cross.toml:'
|
||||
printf '%s\n' >> $GITHUB_ENV \
|
||||
"CROSS_CONFIG=$PWD/Cross.toml" \
|
||||
"CROSS_IMAGE=ghcr.io/cross-rs/${{ matrix.target }}:edge"
|
||||
|
||||
echo "[target.$target]" >> Cross.toml
|
||||
echo "image = \"$image\"" >> Cross.toml
|
||||
echo "CROSS_CONFIG=$PWD/Cross.toml" >> $GITHUB_ENV
|
||||
|
||||
echo "FROM ghcr.io/cross-rs/$target:edge" >> Dockerfile
|
||||
echo "RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash -" >> Dockerfile
|
||||
echo "RUN apt-get update && apt-get -y install nodejs" >> Dockerfile
|
||||
docker build -t $image .
|
||||
|
||||
- name: Setup env extras
|
||||
- name: Set up environment
|
||||
env:
|
||||
RUST_TEST_THREADS: ${{ matrix.rust-test-threads }}
|
||||
USE_CROSS: ${{ matrix.use-cross }}
|
||||
|
|
@ -115,68 +116,117 @@ jobs:
|
|||
AR: ${{ matrix.ar }}
|
||||
run: |
|
||||
PATH="$PWD/.github/scripts:$PATH"
|
||||
echo "$PWD/.github/scripts" >> $GITHUB_PATH
|
||||
printf '%s/.github/scripts\n' "$PWD" >> $GITHUB_PATH
|
||||
|
||||
echo "TREE_SITTER=tree-sitter.sh" >> $GITHUB_ENV
|
||||
echo "TARGET=$TARGET" >> $GITHUB_ENV
|
||||
echo "ROOT=$PWD" >> $GITHUB_ENV
|
||||
printf '%s\n' >> $GITHUB_ENV \
|
||||
'TREE_SITTER=tree-sitter.sh' \
|
||||
"TARGET=$TARGET" \
|
||||
"ROOT=$PWD"
|
||||
|
||||
[ -n "$RUST_TEST_THREADS" ] && \
|
||||
echo "RUST_TEST_THREADS=$RUST_TEST_THREADS" >> $GITHUB_ENV
|
||||
[[ -n $RUST_TEST_THREADS ]] && \
|
||||
printf 'RUST_TEST_THREADS=%s\n' "$RUST_TEST_THREADS" >> $GITHUB_ENV
|
||||
|
||||
[ -n "$CC" ] && echo "CC=$CC" >> $GITHUB_ENV
|
||||
[ -n "$AR" ] && echo "AR=$AR" >> $GITHUB_ENV
|
||||
[[ -n $CC ]] && printf 'CC=%s\n' "$CC" >> $GITHUB_ENV
|
||||
[[ -n $AR ]] && printf 'AR=%s\n' "$AR" >> $GITHUB_ENV
|
||||
|
||||
if [ "$USE_CROSS" == "true" ]; then
|
||||
echo "BUILD_CMD=cross" >> $GITHUB_ENV
|
||||
runner=$(BUILD_CMD=cross cross.sh bash -c "env | sed -nr '/^CARGO_TARGET_.*_RUNNER=/s///p'")
|
||||
[ -n "$runner" ] && echo "CROSS_RUNNER=$runner" >> $GITHUB_ENV
|
||||
if [[ $USE_CROSS == true ]]; then
|
||||
printf 'BUILD_CMD=cross\n' >> $GITHUB_ENV
|
||||
runner=$(cross.sh bash -c "env | sed -n 's/^CARGO_TARGET_.*_RUNNER=//p'")
|
||||
[[ -n $runner ]] && printf 'CROSS_RUNNER=%s\n' "$runner" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
- name: Build C library
|
||||
if: ${{ !contains(matrix.os, 'windows') }} # Requires an additional adapted Makefile for `cl.exe` compiler
|
||||
run: make.sh -j CFLAGS="-Werror"
|
||||
- name: Build wasmtime library
|
||||
if: ${{ !matrix.use-cross && contains(matrix.features, 'wasm') }}
|
||||
run: |
|
||||
mkdir -p target
|
||||
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
||||
jq -r '.packages[] | select(.name == "wasmtime-c-api-impl") | .version')
|
||||
curl -LSs "$WASMTIME_REPO/archive/refs/tags/v${WASMTIME_VERSION}.tar.gz" | tar xzf - -C target
|
||||
cd target/wasmtime-${WASMTIME_VERSION}
|
||||
cmake -S crates/c-api -B target/c-api \
|
||||
-DCMAKE_INSTALL_PREFIX="$PWD/artifacts" \
|
||||
-DWASMTIME_DISABLE_ALL_FEATURES=ON \
|
||||
-DWASMTIME_FEATURE_CRANELIFT=ON \
|
||||
-DWASMTIME_TARGET='${{ matrix.target }}'
|
||||
cmake --build target/c-api && cmake --install target/c-api
|
||||
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
||||
env:
|
||||
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
||||
RUSTFLAGS: ""
|
||||
|
||||
- name: Build C library (make)
|
||||
if: ${{ runner.os != 'Windows' }}
|
||||
run: make.sh -j CFLAGS="$CFLAGS"
|
||||
env:
|
||||
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||
|
||||
- name: Build C library (CMake)
|
||||
if: ${{ !matrix.use-cross }}
|
||||
run: |
|
||||
cmake -S lib -B build/static \
|
||||
-DBUILD_SHARED_LIBS=OFF \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||
cmake --build build/static --verbose
|
||||
|
||||
cmake -S lib -B build/shared \
|
||||
-DBUILD_SHARED_LIBS=ON \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||
cmake --build build/shared --verbose
|
||||
env:
|
||||
CC: ${{ contains(matrix.target, 'linux') && 'clang' || '' }}
|
||||
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
||||
|
||||
- name: Build wasm library
|
||||
if: ${{ !matrix.cli-only && !matrix.use-cross }} # No sense to build on the same Github runner hosts many times
|
||||
run: script/build-wasm
|
||||
# No reason to build on the same Github runner hosts many times
|
||||
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
||||
run: $BUILD_CMD run -p xtask -- build-wasm
|
||||
|
||||
- run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.cli_features }}
|
||||
- name: Build target
|
||||
run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.features }}
|
||||
|
||||
- run: script/fetch-fixtures
|
||||
|
||||
- uses: ./.github/actions/cache
|
||||
- name: Cache fixtures
|
||||
id: cache
|
||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||
uses: ./.github/actions/cache
|
||||
|
||||
- name: Fetch fixtures
|
||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||
run: $BUILD_CMD run -p xtask -- fetch-fixtures
|
||||
|
||||
- name: Generate fixtures
|
||||
if: ${{ !matrix.cli-only && inputs.run_test && steps.cache.outputs.cache-hit != 'true' }} # Can't natively run CLI on Github runner's host
|
||||
run: script/generate-fixtures
|
||||
if: ${{ !matrix.no-run && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
||||
run: $BUILD_CMD run -p xtask -- generate-fixtures
|
||||
|
||||
- name: Generate WASM fixtures
|
||||
if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test && steps.cache.outputs.cache-hit != 'true' }} # See comment for the "Build wasm library" step
|
||||
run: script/generate-fixtures-wasm
|
||||
- name: Generate Wasm fixtures
|
||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
||||
run: $BUILD_CMD run -p xtask -- generate-fixtures --wasm
|
||||
|
||||
- name: Run main tests
|
||||
if: ${{ !matrix.cli-only && inputs.run_test }} # Can't natively run CLI on Github runner's host
|
||||
run: $BUILD_CMD test --target=${{ matrix.target }} --features=${{ matrix.cli_features }}
|
||||
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||
run: $BUILD_CMD test --target=${{ matrix.target }} --features=${{ matrix.features }}
|
||||
|
||||
- name: Run wasm tests
|
||||
if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test }} # See comment for the "Build wasm library" step
|
||||
run: script/test-wasm
|
||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
||||
run: $BUILD_CMD run -p xtask -- test-wasm
|
||||
|
||||
- name: Run benchmarks
|
||||
if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test }} # Cross-compiled benchmarks make no sense
|
||||
# Cross-compiled benchmarks are pointless
|
||||
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
||||
run: $BUILD_CMD bench benchmark -p tree-sitter-cli --target=${{ matrix.target }}
|
||||
|
||||
- name: Upload CLI artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: tree-sitter.${{ matrix.platform }}
|
||||
path: target/${{ matrix.target }}/release/tree-sitter${{ env.EXE }}
|
||||
path: target/${{ matrix.target }}/release/tree-sitter${{ env.SUFFIX }}
|
||||
if-no-files-found: error
|
||||
retention-days: 7
|
||||
|
||||
- name: Upload WASM artifacts
|
||||
- name: Upload Wasm artifacts
|
||||
if: ${{ matrix.platform == 'linux-x64' }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
|
|
|
|||
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
|
|
@ -1,9 +1,9 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
branches: [master]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
|
|
@ -13,12 +13,22 @@ jobs:
|
|||
checks:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- run: rustup toolchain install stable --profile minimal
|
||||
- run: rustup toolchain install nightly --profile minimal
|
||||
- run: rustup component add --toolchain nightly rustfmt
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: make lint
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up stable Rust toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
|
||||
- name: Set up nightly Rust toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
with:
|
||||
toolchain: nightly
|
||||
components: clippy, rustfmt
|
||||
|
||||
- name: Lint files
|
||||
run: make lint
|
||||
|
||||
sanitize:
|
||||
uses: ./.github/workflows/sanitize.yml
|
||||
|
|
|
|||
68
.github/workflows/release.yml
vendored
68
.github/workflows/release.yml
vendored
|
|
@ -1,4 +1,5 @@
|
|||
name: Release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
|
|
@ -9,16 +10,17 @@ jobs:
|
|||
build:
|
||||
uses: ./.github/workflows/build.yml
|
||||
with:
|
||||
run_test: false
|
||||
run-test: false
|
||||
|
||||
release:
|
||||
name: Release
|
||||
name: Release on GitHub
|
||||
runs-on: ubuntu-latest
|
||||
needs: build
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Download build artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
|
|
@ -42,29 +44,24 @@ jobs:
|
|||
ls -l target/
|
||||
|
||||
- name: Create release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
name: ${{ github.ref_name }}
|
||||
tag_name: ${{ github.ref_name }}
|
||||
fail_on_unmatched_files: true
|
||||
files: |
|
||||
target/tree-sitter-*.gz
|
||||
target/tree-sitter.wasm
|
||||
run: |-
|
||||
gh release create \
|
||||
target/tree-sitter-*.gz \
|
||||
target/tree-sitter.wasm \
|
||||
target/tree-sitter.js
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
crates_io:
|
||||
name: Publish CLI to Crates.io
|
||||
name: Publish packages to Crates.io
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
profile: minimal
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Set up Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
|
||||
- name: Publish crates to Crates.io
|
||||
uses: katyo/publish-crates@v2
|
||||
|
|
@ -72,29 +69,32 @@ jobs:
|
|||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
|
||||
npm:
|
||||
name: Publish lib to npmjs.com
|
||||
name: Publish packages to npmjs.com
|
||||
runs-on: ubuntu-latest
|
||||
needs: release
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
directory: ["cli/npm", "lib/binding_web"]
|
||||
directory: [cli/npm, lib/binding_web]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: CHeckout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 20
|
||||
registry-url: https://registry.npmjs.org
|
||||
|
||||
- name: Set up Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
|
||||
- name: Build wasm
|
||||
if: matrix.directory == 'lib/binding_web'
|
||||
run: ./script/build-wasm
|
||||
run: cargo xtask build-wasm
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
- name: Publish lib to npmjs.com
|
||||
- name: Publish to npmjs.com
|
||||
working-directory: ${{ matrix.directory }}
|
||||
run: npm publish
|
||||
env:
|
||||
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
|
||||
run: |
|
||||
cd ${{ matrix.directory }}
|
||||
npm publish
|
||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||
|
|
|
|||
37
.github/workflows/response.yml
vendored
37
.github/workflows/response.yml
vendored
|
|
@ -1,34 +1,47 @@
|
|||
name: no_response
|
||||
name: No response
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '30 1 * * *' # Run every day at 01:30
|
||||
- cron: "30 1 * * *" # Run every day at 01:30
|
||||
workflow_dispatch:
|
||||
issue_comment:
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
close:
|
||||
name: Close issues with no response
|
||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/github-script@v7
|
||||
- name: Checkout script
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: .github/scripts/close_unresponsive.js
|
||||
sparse-checkout-cone-mode: false
|
||||
|
||||
- name: Run script
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const script = require('./.github/scripts/close_unresponsive.js')
|
||||
await script({github, context})
|
||||
|
||||
remove_label:
|
||||
name: Remove response label
|
||||
if: github.event_name == 'issue_comment'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/github-script@v7
|
||||
- name: Checkout script
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: .github/scripts/remove_response_label.js
|
||||
sparse-checkout-cone-mode: false
|
||||
|
||||
- name: Run script
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const script = require('./.github/scripts/remove_response_label.js')
|
||||
|
|
|
|||
18
.github/workflows/reviewers_remove.yml
vendored
18
.github/workflows/reviewers_remove.yml
vendored
|
|
@ -1,15 +1,23 @@
|
|||
name: "reviewers: remove"
|
||||
name: Remove Reviewers
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [converted_to_draft, closed]
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
remove-reviewers:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: 'Remove reviewers'
|
||||
- name: Checkout script
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
sparse-checkout: .github/scripts/reviewers_remove.js
|
||||
sparse-checkout-cone-mode: false
|
||||
|
||||
- name: Run script
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
|
|
|
|||
55
.github/workflows/sanitize.yml
vendored
55
.github/workflows/sanitize.yml
vendored
|
|
@ -8,39 +8,44 @@ on:
|
|||
workflow_call:
|
||||
|
||||
jobs:
|
||||
check_undefined_behaviour:
|
||||
name: Sanitizer checks
|
||||
check-undefined-behaviour:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
env:
|
||||
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
||||
steps:
|
||||
- name: Checkout source code
|
||||
uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install UBSAN library
|
||||
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
||||
- name: Install UBSAN library
|
||||
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
||||
|
||||
- run: rustup toolchain install stable --profile minimal
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- run: cargo build --release
|
||||
- run: script/fetch-fixtures
|
||||
- name: Set up Rust
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
|
||||
- uses: ./.github/actions/cache
|
||||
id: cache
|
||||
- name: Build project
|
||||
run: cargo build --release
|
||||
|
||||
- if: ${{ steps.cache.outputs.cache-hit != 'true' }}
|
||||
run: script/generate-fixtures
|
||||
- name: Cache fixtures
|
||||
uses: ./.github/actions/cache
|
||||
id: cache
|
||||
|
||||
- name: Run main tests with undefined behaviour sanitizer (UBSAN)
|
||||
env:
|
||||
CFLAGS: -fsanitize=undefined
|
||||
RUSTFLAGS: ${{ env.RUSTFLAGS }} -lubsan
|
||||
run: cargo test -- --test-threads 1
|
||||
- name: Fetch fixtures
|
||||
run: cargo xtask fetch-fixtures
|
||||
|
||||
- name: Run main tests with address sanitizer (ASAN)
|
||||
env:
|
||||
ASAN_OPTIONS: verify_asan_link_order=0
|
||||
CFLAGS: -fsanitize=address
|
||||
RUSTFLAGS: ${{ env.RUSTFLAGS }} -lasan --cfg sanitizing
|
||||
run: cargo test -- --test-threads 1
|
||||
- name: Generate fixtures
|
||||
if: ${{ steps.cache.outputs.cache-hit != 'true' }}
|
||||
run: cargo xtask generate-fixtures
|
||||
|
||||
- name: Run main tests with undefined behaviour sanitizer (UBSAN)
|
||||
run: cargo test -- --test-threads 1
|
||||
env:
|
||||
CFLAGS: -fsanitize=undefined
|
||||
RUSTFLAGS: ${{ env.RUSTFLAGS }} -lubsan
|
||||
|
||||
- name: Run main tests with address sanitizer (ASAN)
|
||||
run: cargo test -- --test-threads 1
|
||||
env:
|
||||
ASAN_OPTIONS: verify_asan_link_order=0
|
||||
CFLAGS: -fsanitize=address
|
||||
RUSTFLAGS: ${{ env.RUSTFLAGS }} -lasan --cfg sanitizing
|
||||
|
|
|
|||
159
Cargo.lock
generated
159
Cargo.lock
generated
|
|
@ -156,9 +156,9 @@ checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3"
|
|||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.1.24"
|
||||
version = "1.1.28"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "812acba72f0a070b003d3697490d2b55b837230ae7c6c6497f05cc2ddbb8d938"
|
||||
checksum = "2e80e3b6a3ab07840e1cae9b0666a63970dc28e8ed5ffbcdacbfc760c281bfc1"
|
||||
dependencies = [
|
||||
"jobserver",
|
||||
"libc",
|
||||
|
|
@ -311,18 +311,18 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-bforest"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a6e376bd92bddd03dcfc443b14382611cae5d10012aa0b1628bbf18bb73f12f7"
|
||||
checksum = "7b765ed4349e66bedd9b88c7691da42e24c7f62067a6be17ddffa949367b6e17"
|
||||
dependencies = [
|
||||
"cranelift-entity",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-bitset"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "45ecbe07f25a8100e5077933516200e97808f1d7196b5a073edb85fa08fde32e"
|
||||
checksum = "9eaa2aece6237198afd32bff57699e08d4dccb8d3902c214fc1e6ba907247ca4"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_derive",
|
||||
|
|
@ -330,9 +330,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-codegen"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bc60913f32c1de18538c28bef74b8c87cf16de7841a1b0956fcf01b23237853a"
|
||||
checksum = "351824439e59d42f0e4fa5aac1d13deded155120043565769e55cd4ad3ca8ed9"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"cranelift-bforest",
|
||||
|
|
@ -353,33 +353,33 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-meta"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bae009e7822f47aa55e7dcef846ccf3aa4eb102ca6b4bcb8a44b36f3f49aa85c"
|
||||
checksum = "5a0ce0273d7a493ef8f31f606849a4e931c19187a4923f5f87fc1f2b13109981"
|
||||
dependencies = [
|
||||
"cranelift-codegen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-shared"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0c78f01a852536c68e34444450f845ed6e0782a1f047f85397fe460b8fbce8f1"
|
||||
checksum = "0f72016ac35579051913f4f07f6b36c509ed69412d852fd44c8e1d7b7fa6d92a"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-control"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a061b22e00a9e36b31f2660dfb05a9617b7775bd54b79754d3bb75a990dac06"
|
||||
checksum = "db28951d21512c4fd0554ef179bfb11e4eb6815062957a9173824eee5de0c46c"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-entity"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95e2b261a3e74ae42f4e606906d5ffa44ee2684e8b1ae23bdf75d21908dc9233"
|
||||
checksum = "14ebe592a2f81af9237cf9be29dd3854ecb72108cfffa59e85ef12389bf939e3"
|
||||
dependencies = [
|
||||
"cranelift-bitset",
|
||||
"serde",
|
||||
|
|
@ -388,9 +388,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-frontend"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fe14abba0e6bab42aca0f9ce757f96880f9187e88bc6cb975ed6acd8a42f7770"
|
||||
checksum = "4437db9d60c7053ac91ded0802740c2ccf123ee6d6898dd906c34f8c530cd119"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"log",
|
||||
|
|
@ -400,15 +400,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-isle"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "311d91ae72b37d4262b51217baf8c9e01f1afd5148931468da1fdb7e9d011347"
|
||||
checksum = "230cb33572b9926e210f2ca28145f2bc87f389e1456560932168e2591feb65c1"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-native"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a3f84c75e578189ff7a716c24ad83740b553bf583f2510b323bfe4c1a74bb93"
|
||||
checksum = "364524ac7aef7070b1141478724abebeec297d4ea1e87ad8b8986465e91146d9"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"libc",
|
||||
|
|
@ -417,9 +417,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-wasm"
|
||||
version = "0.112.1"
|
||||
version = "0.112.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f56b7b2476c47b2091eee5a20bc54a80fbb29ca5313ae2bd0dea52621abcfca1"
|
||||
checksum = "0572cbd9d136a62c0f39837b6bce3b0978b96b8586794042bec0c214668fd6f5"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"cranelift-entity",
|
||||
|
|
@ -442,9 +442,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "ctor"
|
||||
version = "0.2.8"
|
||||
version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
|
||||
checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
|
|
@ -565,6 +565,12 @@ dependencies = [
|
|||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "foldhash"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2"
|
||||
|
||||
[[package]]
|
||||
name = "form_urlencoded"
|
||||
version = "1.2.1"
|
||||
|
|
@ -651,6 +657,9 @@ name = "hashbrown"
|
|||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
|
||||
dependencies = [
|
||||
"foldhash",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
|
|
@ -982,24 +991,21 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "object"
|
||||
version = "0.36.4"
|
||||
version = "0.36.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a"
|
||||
checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"hashbrown 0.14.5",
|
||||
"hashbrown 0.15.0",
|
||||
"indexmap",
|
||||
"memchr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell"
|
||||
version = "1.20.1"
|
||||
version = "1.20.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1"
|
||||
dependencies = [
|
||||
"portable-atomic",
|
||||
]
|
||||
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
|
||||
|
||||
[[package]]
|
||||
name = "openssl-probe"
|
||||
|
|
@ -1055,12 +1061,6 @@ version = "0.3.31"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2"
|
||||
|
||||
[[package]]
|
||||
name = "postcard"
|
||||
version = "1.0.10"
|
||||
|
|
@ -1104,9 +1104,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.86"
|
||||
version = "1.0.87"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
|
||||
checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
|
@ -1538,7 +1538,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter"
|
||||
version = "0.24.1"
|
||||
version = "0.24.7"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"cc",
|
||||
|
|
@ -1551,7 +1551,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter-cli"
|
||||
version = "0.24.1"
|
||||
version = "0.24.7"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"anyhow",
|
||||
|
|
@ -1601,7 +1601,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter-config"
|
||||
version = "0.24.1"
|
||||
version = "0.24.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"dirs",
|
||||
|
|
@ -1611,7 +1611,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter-generate"
|
||||
version = "0.24.1"
|
||||
version = "0.24.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"heck 0.5.0",
|
||||
|
|
@ -1632,7 +1632,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter-highlight"
|
||||
version = "0.24.1"
|
||||
version = "0.24.7"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"regex",
|
||||
|
|
@ -1643,17 +1643,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter-language"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "tree-sitter-loader"
|
||||
version = "0.24.1"
|
||||
version = "0.24.7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cc",
|
||||
"dirs",
|
||||
"fs4",
|
||||
"indoc",
|
||||
"lazy_static",
|
||||
"libloading",
|
||||
"once_cell",
|
||||
"path-slash",
|
||||
|
|
@ -1670,7 +1671,7 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "tree-sitter-tags"
|
||||
version = "0.24.1"
|
||||
version = "0.24.7"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"regex",
|
||||
|
|
@ -1871,9 +1872,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "03601559991d459a228236a49135364eac85ac00dc07b65fb95ae61a957793af"
|
||||
checksum = "ef01f9cb9636ed42a7ec5a09d785c0643590199dc7372dc22c7e2ba7a31a97d4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags",
|
||||
|
|
@ -1911,18 +1912,18 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-asm-macros"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e453b3bde07312874c0c6703e2de9281daab46646172c1b71fa59a97226f858e"
|
||||
checksum = "ba5b20797419d6baf2296db2354f864e8bb3447cacca9d151ce7700ae08b4460"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasmtime-c-api-impl"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4def1c38f8981c88d92e10acc7efb01da5b5775897fca2ab81caad76e930bd6d"
|
||||
checksum = "2852f09a087c740683a32a33b8f34268d1d33c1298b4f707d25f4bee158ccd75"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"log",
|
||||
|
|
@ -1934,9 +1935,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-c-api-macros"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4c3feb5a461c52a376e80ef7ce7cee37a3a8395cb1794ac8eb340c0cd0b5d715"
|
||||
checksum = "fa52cecfad085e7a9725bcbf3c2b15a900e5dc14f5ddcc305c9779c19936618b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -1944,9 +1945,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-component-macro"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4a6faeabbdbfd27e24e8d5204207ba9c247a13cf84181ea721b5f209f281fe01"
|
||||
checksum = "26593c4b18c76ca3c3fbdd813d6692256537b639b851d8a6fe827e3d6966fc01"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"proc-macro2",
|
||||
|
|
@ -1959,15 +1960,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-component-util"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6b1b24db4aa3dc7c0d3181d1833b4fe9ec0cd3f08780b746415c84c0a9ec9011"
|
||||
checksum = "a2ed562fbb0cbed20a56c369c8de146c1de06a48c19e26ed9aa45f073514ee60"
|
||||
|
||||
[[package]]
|
||||
name = "wasmtime-cranelift"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c737bef9ea94aab874e29ac6a8688b89ceb43c7b51f047079c43387972c07ee3"
|
||||
checksum = "f389b789cbcb53a8499131182135dea21d7d97ad77e7fb66830f69479ef0e68c"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
|
|
@ -1990,9 +1991,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-environ"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "817bfa9ea878ec37aa24f85fd6912844e8d87d321662824cf920d561b698cdfd"
|
||||
checksum = "84b72debe8899f19bedf66f7071310f06ef62de943a1369ba9b373613e77dd3d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-bitset",
|
||||
|
|
@ -2013,9 +2014,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-jit-icache-coherence"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "48011232c0da424f89c3752a378d0b7f512fae321ea414a43e1e7a302a6a1f7e"
|
||||
checksum = "1d930bc1325bc0448be6a11754156d770f56f6c3a61f440e9567f36cd2ea3065"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
|
|
@ -2025,15 +2026,15 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-slab"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9858a22e656ae8574631221b474b8bebf63f1367fcac3f179873833eabc2ced"
|
||||
checksum = "055a181b8d03998511294faea14798df436503f14d7fd20edcf7370ec583e80a"
|
||||
|
||||
[[package]]
|
||||
name = "wasmtime-types"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4d14b8a9206fe94485a03edb1654cd530dbd2a859a85a43502cb4e99653a568c"
|
||||
checksum = "c8340d976673ac3fdacac781f2afdc4933920c1adc738c3409e825dab3955399"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-entity",
|
||||
|
|
@ -2045,9 +2046,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-versioned-export-macros"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e9bb1f01efb8b542eadfda511e8ea1cc54309451aba97b69969e5b1a59cb7ded"
|
||||
checksum = "a4b0c1f76891f778db9602ee3fbb4eb7e9a3f511847d1fb1b69eddbcea28303c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -2056,9 +2057,9 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-wit-bindgen"
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eb1596caa67b31ac675fd3da61685c4260f8b10832021db42c85d227b7ba8133"
|
||||
checksum = "b2fca2cbb5bb390f65d4434c19bf8d9873dfc60f10802918ebcd6f819a38d703"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"heck 0.4.1",
|
||||
|
|
@ -2348,8 +2349,14 @@ dependencies = [
|
|||
name = "xtask"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"anyhow",
|
||||
"bindgen",
|
||||
"cc",
|
||||
"clap",
|
||||
"git2",
|
||||
"indoc",
|
||||
"regex",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
|
|
|
|||
65
Cargo.toml
65
Cargo.toml
|
|
@ -13,7 +13,7 @@ members = [
|
|||
resolver = "2"
|
||||
|
||||
[workspace.package]
|
||||
version = "0.24.1"
|
||||
version = "0.24.7"
|
||||
authors = ["Max Brunsfeld <maxbrunsfeld@gmail.com>"]
|
||||
edition = "2021"
|
||||
rust-version = "1.74.1"
|
||||
|
|
@ -23,6 +23,55 @@ license = "MIT"
|
|||
keywords = ["incremental", "parsing"]
|
||||
categories = ["command-line-utilities", "parsing"]
|
||||
|
||||
[workspace.lints.clippy]
|
||||
dbg_macro = "deny"
|
||||
todo = "deny"
|
||||
pedantic = { level = "warn", priority = -1 }
|
||||
nursery = { level = "warn", priority = -1 }
|
||||
cargo = { level = "warn", priority = -1 }
|
||||
|
||||
# The lints below are a specific subset of the pedantic+nursery lints
|
||||
# that we explicitly allow in the tree-sitter codebase because they either:
|
||||
#
|
||||
# 1. Contain false positives,
|
||||
# 2. Are unnecessary, or
|
||||
# 3. Worsen the code
|
||||
|
||||
branches_sharing_code = "allow"
|
||||
cast_lossless = "allow"
|
||||
cast_possible_truncation = "allow"
|
||||
cast_possible_wrap = "allow"
|
||||
cast_precision_loss = "allow"
|
||||
cast_sign_loss = "allow"
|
||||
checked_conversions = "allow"
|
||||
cognitive_complexity = "allow"
|
||||
collection_is_never_read = "allow"
|
||||
fallible_impl_from = "allow"
|
||||
fn_params_excessive_bools = "allow"
|
||||
inline_always = "allow"
|
||||
if_not_else = "allow"
|
||||
items_after_statements = "allow"
|
||||
match_wildcard_for_single_variants = "allow"
|
||||
missing_errors_doc = "allow"
|
||||
missing_panics_doc = "allow"
|
||||
module_name_repetitions = "allow"
|
||||
multiple_crate_versions = "allow"
|
||||
option_if_let_else = "allow"
|
||||
or_fun_call = "allow"
|
||||
range_plus_one = "allow"
|
||||
redundant_clone = "allow"
|
||||
redundant_closure_for_method_calls = "allow"
|
||||
ref_option = "allow"
|
||||
similar_names = "allow"
|
||||
string_lit_as_bytes = "allow"
|
||||
struct_excessive_bools = "allow"
|
||||
struct_field_names = "allow"
|
||||
transmute_undefined_repr = "allow"
|
||||
too_many_lines = "allow"
|
||||
unnecessary_wraps = "allow"
|
||||
unused_self = "allow"
|
||||
used_underscore_items = "allow"
|
||||
|
||||
[profile.optimize]
|
||||
inherits = "release"
|
||||
strip = true # Automatically strip symbols from the binary.
|
||||
|
|
@ -56,7 +105,7 @@ clap = { version = "4.5.18", features = [
|
|||
"unstable-styles",
|
||||
] }
|
||||
clap_complete = "4.5.29"
|
||||
ctor = "0.2.8"
|
||||
ctor = "0.2.9"
|
||||
ctrlc = { version = "3.4.5", features = ["termination"] }
|
||||
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
||||
dirs = "5.0.1"
|
||||
|
|
@ -96,9 +145,9 @@ walkdir = "2.5.0"
|
|||
wasmparser = "0.217.0"
|
||||
webbrowser = "1.0.2"
|
||||
|
||||
tree-sitter = { version = "0.24.0", path = "./lib" }
|
||||
tree-sitter-generate = { version = "0.24.0", path = "./cli/generate" }
|
||||
tree-sitter-loader = { version = "0.24.0", path = "./cli/loader" }
|
||||
tree-sitter-config = { version = "0.24.0", path = "./cli/config" }
|
||||
tree-sitter-highlight = { version = "0.24.0", path = "./highlight" }
|
||||
tree-sitter-tags = { version = "0.24.0", path = "./tags" }
|
||||
tree-sitter = { version = "0.24.5", path = "./lib" }
|
||||
tree-sitter-generate = { version = "0.24.5", path = "./cli/generate" }
|
||||
tree-sitter-loader = { version = "0.24.5", path = "./cli/loader" }
|
||||
tree-sitter-config = { version = "0.24.5", path = "./cli/config" }
|
||||
tree-sitter-highlight = { version = "0.24.5", path = "./highlight" }
|
||||
tree-sitter-tags = { version = "0.24.5", path = "./tags" }
|
||||
|
|
|
|||
18
Makefile
18
Makefile
|
|
@ -2,7 +2,7 @@ ifeq ($(OS),Windows_NT)
|
|||
$(error Windows is not supported)
|
||||
endif
|
||||
|
||||
VERSION := 0.24.1
|
||||
VERSION := 0.24.7
|
||||
DESCRIPTION := An incremental parsing system for programming tools
|
||||
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||
|
||||
|
|
@ -25,7 +25,7 @@ OBJ := $(SRC:.c=.o)
|
|||
|
||||
# define default flags, and override to append mandatory flags
|
||||
ARFLAGS := rcs
|
||||
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -pedantic
|
||||
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
||||
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
||||
|
||||
|
|
@ -62,8 +62,8 @@ endif
|
|||
|
||||
tree-sitter.pc: lib/tree-sitter.pc.in
|
||||
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR)|' \
|
||||
-e 's|@CMAKE_INSTALL_INCLUDEDIR@|$(INCLUDEDIR)|' \
|
||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
||||
-e 's|@CMAKE_INSTALL_INCLUDEDIR@|$(INCLUDEDIR:$(PREFIX)/%=%)|' \
|
||||
-e 's|@PROJECT_DESCRIPTION@|$(DESCRIPTION)|' \
|
||||
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
||||
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
||||
|
|
@ -94,13 +94,13 @@ uninstall:
|
|||
##### Dev targets #####
|
||||
|
||||
test:
|
||||
script/fetch-fixtures
|
||||
script/generate-fixtures
|
||||
script/test
|
||||
cargo xtask fetch-fixtures
|
||||
cargo xtask generate-fixtures
|
||||
cargo xtask test
|
||||
|
||||
test_wasm:
|
||||
script/generate-fixtures-wasm
|
||||
script/test-wasm
|
||||
cargo xtask generate-fixtures-wasm
|
||||
cargo xtask test-wasm
|
||||
|
||||
lint:
|
||||
cargo update --workspace --locked --quiet
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
.{
|
||||
.name = "tree-sitter",
|
||||
.version = "0.24.1",
|
||||
.version = "0.24.7",
|
||||
.paths = .{
|
||||
"build.zig",
|
||||
"build.zig.zon",
|
||||
|
|
|
|||
|
|
@ -12,6 +12,9 @@ license.workspace = true
|
|||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "tree-sitter"
|
||||
path = "src/main.rs"
|
||||
|
|
|
|||
|
|
@ -12,6 +12,9 @@ license.workspace = true
|
|||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
dirs.workspace = true
|
||||
|
|
|
|||
|
|
@ -12,6 +12,9 @@ license.workspace = true
|
|||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
heck.workspace = true
|
||||
|
|
|
|||
|
|
@ -70,18 +70,17 @@ impl<'a> Minimizer<'a> {
|
|||
production_id: 0,
|
||||
symbol,
|
||||
..
|
||||
} => {
|
||||
if !self.simple_aliases.contains_key(symbol)
|
||||
&& !self.syntax_grammar.supertype_symbols.contains(symbol)
|
||||
&& !aliased_symbols.contains(symbol)
|
||||
&& self.syntax_grammar.variables[symbol.index].kind
|
||||
!= VariableType::Named
|
||||
&& (unit_reduction_symbol.is_none()
|
||||
|| unit_reduction_symbol == Some(symbol))
|
||||
{
|
||||
unit_reduction_symbol = Some(symbol);
|
||||
continue;
|
||||
}
|
||||
} if !self.simple_aliases.contains_key(symbol)
|
||||
&& !self.syntax_grammar.supertype_symbols.contains(symbol)
|
||||
&& !self.syntax_grammar.extra_symbols.contains(symbol)
|
||||
&& !aliased_symbols.contains(symbol)
|
||||
&& self.syntax_grammar.variables[symbol.index].kind
|
||||
!= VariableType::Named
|
||||
&& (unit_reduction_symbol.is_none()
|
||||
|| unit_reduction_symbol == Some(symbol)) =>
|
||||
{
|
||||
unit_reduction_symbol = Some(symbol);
|
||||
continue;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -149,7 +149,7 @@ impl<'a> Interner<'a> {
|
|||
fn check_single(&self, elements: &[Rule], name: Option<&str>) {
|
||||
if elements.len() == 1 && matches!(elements[0], Rule::String(_) | Rule::Pattern(_, _)) {
|
||||
eprintln!(
|
||||
"Warning: rule {} is just a `seq` or `choice` rule with a single element. This is unnecessary.",
|
||||
"Warning: rule {} contains a `seq` or `choice` rule with a single element. This is unnecessary.",
|
||||
name.unwrap_or_default()
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ extern "C" {
|
|||
#include <string.h>
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4101)
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic push
|
||||
|
|
@ -278,7 +279,7 @@ static inline void _array__splice(Array *self, size_t element_size,
|
|||
#define _compare_int(a, b) ((int)*(a) - (int)(b))
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(default : 4101)
|
||||
#pragma warning(pop)
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -12,6 +12,9 @@ license.workspace = true
|
|||
keywords.workspace = true
|
||||
categories.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
wasm = ["tree-sitter/wasm"]
|
||||
# TODO: For backward compatibility these must be enabled by default,
|
||||
|
|
@ -24,6 +27,7 @@ cc.workspace = true
|
|||
dirs.workspace = true
|
||||
fs4.workspace = true
|
||||
indoc.workspace = true
|
||||
lazy_static.workspace = true
|
||||
libloading.workspace = true
|
||||
once_cell.workspace = true
|
||||
path-slash.workspace = true
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
3.1.64
|
||||
3.1.64
|
||||
|
|
@ -21,6 +21,7 @@ use anyhow::Error;
|
|||
use anyhow::{anyhow, Context, Result};
|
||||
use fs4::fs_std::FileExt;
|
||||
use indoc::indoc;
|
||||
use lazy_static::lazy_static;
|
||||
use libloading::{Library, Symbol};
|
||||
use once_cell::unsync::OnceCell;
|
||||
use path_slash::PathBufExt as _;
|
||||
|
|
@ -38,6 +39,10 @@ use tree_sitter_highlight::HighlightConfiguration;
|
|||
use tree_sitter_tags::{Error as TagsError, TagsConfiguration};
|
||||
use url::Url;
|
||||
|
||||
lazy_static! {
|
||||
static ref GRAMMAR_NAME_REGEX: Regex = Regex::new(r#""name":\s*"(.*?)""#).unwrap();
|
||||
}
|
||||
|
||||
pub const EMSCRIPTEN_TAG: &str = concat!("docker.io/emscripten/emsdk:", env!("EMSCRIPTEN_VERSION"));
|
||||
|
||||
#[derive(Default, Deserialize, Serialize)]
|
||||
|
|
@ -141,12 +146,10 @@ pub struct TreeSitterJSON {
|
|||
}
|
||||
|
||||
impl TreeSitterJSON {
|
||||
pub fn from_file(path: &Path) -> Option<Self> {
|
||||
if let Ok(file) = fs::File::open(path.join("tree-sitter.json")) {
|
||||
Some(serde_json::from_reader(file).ok()?)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
pub fn from_file(path: &Path) -> Result<Self> {
|
||||
Ok(serde_json::from_str(&fs::read_to_string(
|
||||
path.join("tree-sitter.json"),
|
||||
)?)?)
|
||||
}
|
||||
|
||||
pub fn has_multiple_language_configs(&self) -> bool {
|
||||
|
|
@ -161,7 +164,8 @@ pub struct Grammar {
|
|||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub camelcase: Option<String>,
|
||||
pub scope: String,
|
||||
pub path: PathBuf,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub path: Option<PathBuf>,
|
||||
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
|
||||
pub external_files: PathsJSON,
|
||||
pub file_types: Option<Vec<String>>,
|
||||
|
|
@ -192,7 +196,6 @@ pub struct Metadata {
|
|||
pub authors: Option<Vec<Author>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub links: Option<Links>,
|
||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
||||
#[serde(skip)]
|
||||
pub namespace: Option<String>,
|
||||
}
|
||||
|
|
@ -600,6 +603,13 @@ impl Loader {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn language_for_configuration(
|
||||
&self,
|
||||
configuration: &LanguageConfiguration,
|
||||
) -> Result<Language> {
|
||||
self.language_for_id(configuration.language_id)
|
||||
}
|
||||
|
||||
fn language_for_id(&self, id: usize) -> Result<Language> {
|
||||
let (path, language, externals) = &self.languages_by_id[id];
|
||||
language
|
||||
|
|
@ -628,27 +638,7 @@ impl Loader {
|
|||
|
||||
pub fn load_language_at_path(&self, mut config: CompileConfig) -> Result<Language> {
|
||||
let grammar_path = config.src_path.join("grammar.json");
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct GrammarJSON {
|
||||
name: String,
|
||||
}
|
||||
let mut grammar_file = fs::File::open(&grammar_path).with_context(|| {
|
||||
format!(
|
||||
"Failed to read grammar.json file at the following path:\n{:?}",
|
||||
&grammar_path
|
||||
)
|
||||
})?;
|
||||
let grammar_json: GrammarJSON = serde_json::from_reader(BufReader::new(&mut grammar_file))
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to parse grammar.json file at the following path:\n{:?}",
|
||||
&grammar_path
|
||||
)
|
||||
})?;
|
||||
|
||||
config.name = grammar_json.name;
|
||||
|
||||
config.name = Self::grammar_json_name(&grammar_path)?;
|
||||
self.load_language_at_path_with_name(config)
|
||||
}
|
||||
|
||||
|
|
@ -856,7 +846,7 @@ impl Loader {
|
|||
format!("Failed to execute the C compiler with the following command:\n{command:?}")
|
||||
})?;
|
||||
|
||||
lock_file.unlock()?;
|
||||
FileExt::unlock(lock_file)?;
|
||||
fs::remove_file(lock_path)?;
|
||||
|
||||
if output.status.success() {
|
||||
|
|
@ -1125,27 +1115,16 @@ impl Loader {
|
|||
parser_path: &Path,
|
||||
set_current_path_config: bool,
|
||||
) -> Result<&[LanguageConfiguration]> {
|
||||
#[derive(Deserialize)]
|
||||
struct GrammarJSON {
|
||||
name: String,
|
||||
}
|
||||
|
||||
let initial_language_configuration_count = self.language_configurations.len();
|
||||
|
||||
if let Some(config) = TreeSitterJSON::from_file(parser_path) {
|
||||
let ts_json = TreeSitterJSON::from_file(parser_path);
|
||||
if let Ok(config) = ts_json {
|
||||
let language_count = self.languages_by_id.len();
|
||||
for grammar in config.grammars {
|
||||
// Determine the path to the parser directory. This can be specified in
|
||||
// the package.json, but defaults to the directory containing the
|
||||
// package.json.
|
||||
let language_path = parser_path.join(grammar.path);
|
||||
|
||||
let grammar_path = language_path.join("src").join("grammar.json");
|
||||
let mut grammar_file =
|
||||
fs::File::open(grammar_path).with_context(|| "Failed to read grammar.json")?;
|
||||
let grammar_json: GrammarJSON =
|
||||
serde_json::from_reader(BufReader::new(&mut grammar_file))
|
||||
.with_context(|| "Failed to parse grammar.json")?;
|
||||
// the tree-sitter.json, but defaults to the directory containing the
|
||||
// tree-sitter.json.
|
||||
let language_path = parser_path.join(grammar.path.unwrap_or(PathBuf::from(".")));
|
||||
|
||||
// Determine if a previous language configuration in this package.json file
|
||||
// already uses the same language.
|
||||
|
|
@ -1184,7 +1163,7 @@ impl Loader {
|
|||
|
||||
let configuration = LanguageConfiguration {
|
||||
root_path: parser_path.to_path_buf(),
|
||||
language_name: grammar_json.name,
|
||||
language_name: grammar.name,
|
||||
scope: Some(grammar.scope),
|
||||
language_id,
|
||||
file_types: grammar.file_types.unwrap_or_default(),
|
||||
|
|
@ -1230,20 +1209,30 @@ impl Loader {
|
|||
Some(self.language_configurations.len() - 1);
|
||||
}
|
||||
}
|
||||
} else if let Err(e) = ts_json {
|
||||
match e.downcast_ref::<std::io::Error>() {
|
||||
// This is noisy, and not really an issue.
|
||||
Some(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
_ => {
|
||||
eprintln!(
|
||||
"Warning: Failed to parse {} -- {e}",
|
||||
parser_path.join("tree-sitter.json").display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we didn't find any language configurations in the tree-sitter.json file,
|
||||
// but there is a grammar.json file, then use the grammar file to form a simple
|
||||
// language configuration.
|
||||
if self.language_configurations.len() == initial_language_configuration_count
|
||||
&& parser_path.join("src").join("grammar.json").exists()
|
||||
{
|
||||
let grammar_path = parser_path.join("src").join("grammar.json");
|
||||
let mut grammar_file =
|
||||
fs::File::open(grammar_path).with_context(|| "Failed to read grammar.json")?;
|
||||
let grammar_json: GrammarJSON =
|
||||
serde_json::from_reader(BufReader::new(&mut grammar_file))
|
||||
.with_context(|| "Failed to parse grammar.json")?;
|
||||
let language_name = Self::grammar_json_name(&grammar_path)?;
|
||||
let configuration = LanguageConfiguration {
|
||||
root_path: parser_path.to_owned(),
|
||||
language_name: grammar_json.name,
|
||||
language_name,
|
||||
language_id: self.languages_by_id.len(),
|
||||
file_types: Vec::new(),
|
||||
scope: None,
|
||||
|
|
@ -1279,6 +1268,36 @@ impl Loader {
|
|||
pattern.and_then(|r| RegexBuilder::new(r).multi_line(true).build().ok())
|
||||
}
|
||||
|
||||
fn grammar_json_name(grammar_path: &Path) -> Result<String> {
|
||||
let file = fs::File::open(grammar_path).with_context(|| {
|
||||
format!("Failed to open grammar.json at {}", grammar_path.display())
|
||||
})?;
|
||||
|
||||
let first_three_lines = BufReader::new(file)
|
||||
.lines()
|
||||
.take(3)
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to read the first three lines of grammar.json at {}",
|
||||
grammar_path.display()
|
||||
)
|
||||
})?
|
||||
.join("\n");
|
||||
|
||||
let name = GRAMMAR_NAME_REGEX
|
||||
.captures(&first_three_lines)
|
||||
.and_then(|c| c.get(1))
|
||||
.ok_or_else(|| {
|
||||
anyhow!(
|
||||
"Failed to parse the language name from grammar.json at {}",
|
||||
grammar_path.display()
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(name.as_str().to_string())
|
||||
}
|
||||
|
||||
pub fn select_language(
|
||||
&mut self,
|
||||
path: &Path,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "tree-sitter-cli",
|
||||
"version": "0.24.1",
|
||||
"version": "0.24.7",
|
||||
"author": "Max Brunsfeld",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
|
|
|
|||
426
cli/src/init.rs
426
cli/src/init.rs
|
|
@ -1,17 +1,15 @@
|
|||
use std::{
|
||||
fs::{self, File},
|
||||
io::BufReader,
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
str::{self, FromStr},
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
|
||||
use indoc::indoc;
|
||||
use regex::Regex;
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{json, Map, Value};
|
||||
use serde_json::{Map, Value};
|
||||
use tree_sitter_generate::write_file;
|
||||
use tree_sitter_loader::{
|
||||
Author, Bindings, Grammar, Links, Metadata, PackageJSON, PackageJSONAuthor,
|
||||
|
|
@ -77,7 +75,7 @@ const BINDING_GYP_TEMPLATE: &str = include_str!("./templates/binding.gyp");
|
|||
const BINDING_TEST_JS_TEMPLATE: &str = include_str!("./templates/binding_test.js");
|
||||
|
||||
const MAKEFILE_TEMPLATE: &str = include_str!("./templates/makefile");
|
||||
const CMAKELISTS_TXT_TEMPLATE: &str = include_str!("./templates/cmakelists.txt");
|
||||
const CMAKELISTS_TXT_TEMPLATE: &str = include_str!("./templates/cmakelists.cmake");
|
||||
const PARSER_NAME_H_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.h");
|
||||
const PARSER_NAME_PC_IN_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.pc.in");
|
||||
|
||||
|
|
@ -112,22 +110,6 @@ pub fn path_in_ignore(repo_path: &Path) -> bool {
|
|||
.any(|dir| repo_path.ends_with(dir))
|
||||
}
|
||||
|
||||
fn insert_after(
|
||||
map: Map<String, Value>,
|
||||
after: &str,
|
||||
key: &str,
|
||||
value: Value,
|
||||
) -> Map<String, Value> {
|
||||
let mut entries = map.into_iter().collect::<Vec<_>>();
|
||||
let after_index = entries
|
||||
.iter()
|
||||
.position(|(k, _)| k == after)
|
||||
.unwrap_or(entries.len() - 1)
|
||||
+ 1;
|
||||
entries.insert(after_index, (key.to_string(), value));
|
||||
entries.into_iter().collect()
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct JsonConfigOpts {
|
||||
pub name: String,
|
||||
|
|
@ -153,9 +135,9 @@ impl JsonConfigOpts {
|
|||
name: self.name.clone(),
|
||||
camelcase: Some(self.camelcase),
|
||||
scope: self.scope,
|
||||
path: PathBuf::from("."),
|
||||
path: None,
|
||||
external_files: PathsJSON::Empty,
|
||||
file_types: None,
|
||||
file_types: Some(self.file_types),
|
||||
highlights: PathsJSON::Empty,
|
||||
injections: PathsJSON::Empty,
|
||||
locals: PathsJSON::Empty,
|
||||
|
|
@ -171,7 +153,7 @@ impl JsonConfigOpts {
|
|||
authors: Some(vec![Author {
|
||||
name: self.author,
|
||||
email: self.email,
|
||||
url: None,
|
||||
url: self.url.map(|url| url.to_string()),
|
||||
}]),
|
||||
links: Some(Links {
|
||||
repository: self.repository.unwrap_or_else(|| {
|
||||
|
|
@ -216,6 +198,7 @@ struct GenerateOpts<'a> {
|
|||
description: Option<&'a str>,
|
||||
repository: Option<&'a str>,
|
||||
version: &'a Version,
|
||||
camel_parser_name: &'a str,
|
||||
}
|
||||
|
||||
// TODO: remove in 0.25
|
||||
|
|
@ -228,9 +211,9 @@ pub fn migrate_package_json(repo_path: &Path) -> Result<bool> {
|
|||
root_path.join("tree-sitter.json"),
|
||||
);
|
||||
|
||||
let old_config = serde_json::from_reader::<_, PackageJSON>(
|
||||
File::open(&package_json_path)
|
||||
.with_context(|| format!("Failed to open package.json in {}", root_path.display()))?,
|
||||
let old_config = serde_json::from_str::<PackageJSON>(
|
||||
&fs::read_to_string(&package_json_path)
|
||||
.with_context(|| format!("Failed to read package.json in {}", root_path.display()))?,
|
||||
)?;
|
||||
|
||||
if old_config.tree_sitter.is_none() {
|
||||
|
|
@ -249,7 +232,7 @@ pub fn migrate_package_json(repo_path: &Path) -> Result<bool> {
|
|||
name: name.clone(),
|
||||
camelcase: Some(name.to_upper_camel_case()),
|
||||
scope: l.scope.unwrap_or_else(|| format!("source.{name}")),
|
||||
path: l.path,
|
||||
path: Some(l.path),
|
||||
external_files: l.external_files,
|
||||
file_types: l.file_types,
|
||||
highlights: l.highlights,
|
||||
|
|
@ -352,19 +335,19 @@ pub fn migrate_package_json(repo_path: &Path) -> Result<bool> {
|
|||
|
||||
write_file(
|
||||
&tree_sitter_json_path,
|
||||
serde_json::to_string_pretty(&new_config)?,
|
||||
serde_json::to_string_pretty(&new_config)? + "\n",
|
||||
)?;
|
||||
|
||||
// Remove the `tree-sitter` field in-place
|
||||
let mut package_json = serde_json::from_reader::<_, Map<String, Value>>(
|
||||
File::open(&package_json_path)
|
||||
.with_context(|| format!("Failed to open package.json in {}", root_path.display()))?,
|
||||
let mut package_json = serde_json::from_str::<Map<String, Value>>(
|
||||
&fs::read_to_string(&package_json_path)
|
||||
.with_context(|| format!("Failed to read package.json in {}", root_path.display()))?,
|
||||
)
|
||||
.unwrap();
|
||||
package_json.remove("tree-sitter");
|
||||
write_file(
|
||||
&root_path.join("package.json"),
|
||||
serde_json::to_string_pretty(&package_json)?,
|
||||
serde_json::to_string_pretty(&package_json)? + "\n",
|
||||
)?;
|
||||
|
||||
println!("Warning: your package.json's `tree-sitter` field has been automatically migrated to the new `tree-sitter.json` config file");
|
||||
|
|
@ -383,8 +366,6 @@ pub fn generate_grammar_files(
|
|||
) -> Result<()> {
|
||||
let dashed_language_name = language_name.to_kebab_case();
|
||||
|
||||
// TODO: remove legacy code updates in v0.24.0
|
||||
|
||||
let tree_sitter_config = missing_path_else(
|
||||
repo_path.join("tree-sitter.json"),
|
||||
true,
|
||||
|
|
@ -407,12 +388,16 @@ pub fn generate_grammar_files(
|
|||
},
|
||||
)?;
|
||||
|
||||
let tree_sitter_config = serde_json::from_reader::<_, TreeSitterJSON>(
|
||||
File::open(tree_sitter_config.as_path())
|
||||
.with_context(|| "Failed to open tree-sitter.json")?,
|
||||
let tree_sitter_config = serde_json::from_str::<TreeSitterJSON>(
|
||||
&fs::read_to_string(tree_sitter_config.as_path())
|
||||
.with_context(|| "Failed to read tree-sitter.json")?,
|
||||
)?;
|
||||
|
||||
let authors = tree_sitter_config.metadata.authors.as_ref();
|
||||
let camel_name = tree_sitter_config.grammars[0]
|
||||
.camelcase
|
||||
.clone()
|
||||
.unwrap_or_else(|| language_name.to_upper_camel_case());
|
||||
|
||||
let generate_opts = GenerateOpts {
|
||||
author_name: authors
|
||||
|
|
@ -432,126 +417,18 @@ pub fn generate_grammar_files(
|
|||
.as_ref()
|
||||
.map(|l| l.repository.as_str()),
|
||||
version: &tree_sitter_config.metadata.version,
|
||||
camel_parser_name: &camel_name,
|
||||
};
|
||||
|
||||
// Create or update package.json
|
||||
missing_path_else(
|
||||
repo_path.join("package.json"),
|
||||
allow_update,
|
||||
|path| {
|
||||
generate_file(
|
||||
path,
|
||||
PACKAGE_JSON_TEMPLATE,
|
||||
dashed_language_name.as_str(),
|
||||
&generate_opts,
|
||||
)
|
||||
},
|
||||
|path| {
|
||||
let package_json_str =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read package.json")?;
|
||||
let mut package_json = serde_json::from_str::<Map<String, Value>>(&package_json_str)
|
||||
.with_context(|| "Failed to parse package.json")?;
|
||||
let mut updated = false;
|
||||
|
||||
let dependencies = package_json
|
||||
.entry("dependencies".to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
if dependencies.remove("nan").is_some() {
|
||||
eprintln!("Replacing nan dependency with node-addon-api in package.json");
|
||||
dependencies.insert("node-addon-api".to_string(), "^8.0.0".into());
|
||||
updated = true;
|
||||
}
|
||||
if !dependencies.contains_key("node-gyp-build") {
|
||||
eprintln!("Adding node-gyp-build dependency to package.json");
|
||||
dependencies.insert("node-gyp-build".to_string(), "^4.8.1".into());
|
||||
updated = true;
|
||||
}
|
||||
|
||||
let dev_dependencies = package_json
|
||||
.entry("devDependencies".to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
if !dev_dependencies.contains_key("prebuildify") {
|
||||
eprintln!("Adding prebuildify devDependency to package.json");
|
||||
dev_dependencies.insert("prebuildify".to_string(), "^6.0.1".into());
|
||||
updated = true;
|
||||
}
|
||||
|
||||
let node_test = "node --test bindings/node/*_test.js";
|
||||
let scripts = package_json
|
||||
.entry("scripts".to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
if !scripts.get("test").is_some_and(|v| v == node_test) {
|
||||
eprintln!("Updating package.json scripts");
|
||||
*scripts = Map::from_iter([
|
||||
("install".to_string(), "node-gyp-build".into()),
|
||||
("prestart".to_string(), "tree-sitter build --wasm".into()),
|
||||
("start".to_string(), "tree-sitter playground".into()),
|
||||
("test".to_string(), node_test.into()),
|
||||
]);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `peerDependencies` after `dependencies`
|
||||
if !package_json.contains_key("peerDependencies") {
|
||||
eprintln!("Adding peerDependencies to package.json");
|
||||
package_json = insert_after(
|
||||
package_json,
|
||||
"dependencies",
|
||||
"peerDependencies",
|
||||
json!({"tree-sitter": "^0.21.1"}),
|
||||
);
|
||||
|
||||
package_json = insert_after(
|
||||
package_json,
|
||||
"peerDependencies",
|
||||
"peerDependenciesMeta",
|
||||
json!({"tree_sitter": {"optional": true}}),
|
||||
);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `types` right after `main`
|
||||
if !package_json.contains_key("types") {
|
||||
eprintln!("Adding types to package.json");
|
||||
package_json = insert_after(package_json, "main", "types", "bindings/node".into());
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `files` right after `keywords`
|
||||
if !package_json.contains_key("files") {
|
||||
eprintln!("Adding files to package.json");
|
||||
package_json = insert_after(
|
||||
package_json,
|
||||
"keywords",
|
||||
"files",
|
||||
json!([
|
||||
"grammar.js",
|
||||
"binding.gyp",
|
||||
"prebuilds/**",
|
||||
"bindings/node/*",
|
||||
"queries/*",
|
||||
"src/**",
|
||||
"*.wasm"
|
||||
]),
|
||||
);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if updated {
|
||||
let mut package_json_str = serde_json::to_string_pretty(&package_json)?;
|
||||
package_json_str.push('\n');
|
||||
write_file(path, package_json_str)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
// Create package.json
|
||||
missing_path(repo_path.join("package.json"), |path| {
|
||||
generate_file(
|
||||
path,
|
||||
PACKAGE_JSON_TEMPLATE,
|
||||
dashed_language_name.as_str(),
|
||||
&generate_opts,
|
||||
)
|
||||
})?;
|
||||
|
||||
// Do not create a grammar.js file in a repo with multiple language configs
|
||||
if !tree_sitter_config.has_multiple_language_configs() {
|
||||
|
|
@ -580,83 +457,22 @@ pub fn generate_grammar_files(
|
|||
// Generate Rust bindings
|
||||
if tree_sitter_config.bindings.rust {
|
||||
missing_path(bindings_dir.join("rust"), create_dir)?.apply(|path| {
|
||||
missing_path_else(
|
||||
path.join("lib.rs"),
|
||||
allow_update,
|
||||
|path| generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let lib_rs =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read lib.rs")?;
|
||||
if !lib_rs.contains("tree_sitter_language") {
|
||||
generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts)?;
|
||||
eprintln!("Updated lib.rs with `tree_sitter_language` dependency");
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
missing_path(path.join("lib.rs"), |path| {
|
||||
generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("build.rs"),
|
||||
allow_update,
|
||||
|path| generate_file(path, BUILD_RS_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let build_rs =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read build.rs")?;
|
||||
if !build_rs.contains("-utf-8") {
|
||||
let index = build_rs
|
||||
.find(" let parser_path = src_dir.join(\"parser.c\")")
|
||||
.ok_or_else(|| anyhow!(indoc!{
|
||||
"Failed to auto-update build.rs with the `/utf-8` flag for windows.
|
||||
To fix this, remove `bindings/rust/build.rs` and re-run `tree-sitter generate`"}))?;
|
||||
missing_path(path.join("build.rs"), |path| {
|
||||
generate_file(path, BUILD_RS_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
let build_rs = format!(
|
||||
"{}{}{}\n{}",
|
||||
&build_rs[..index],
|
||||
" #[cfg(target_env = \"msvc\")]\n",
|
||||
" c_config.flag(\"-utf-8\");\n",
|
||||
&build_rs[index..]
|
||||
);
|
||||
|
||||
write_file(path, build_rs)?;
|
||||
eprintln!("Updated build.rs with the /utf-8 flag for Windows compilation");
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path_else(
|
||||
repo_path.join("Cargo.toml"),
|
||||
allow_update,
|
||||
|path| generate_file(path, CARGO_TOML_TEMPLATE, dashed_language_name.as_str(), &generate_opts),
|
||||
|path| {
|
||||
let cargo_toml =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read Cargo.toml")?;
|
||||
if !cargo_toml.contains("tree-sitter-language") {
|
||||
let start_index = cargo_toml
|
||||
.find("tree-sitter = \"")
|
||||
.ok_or_else(|| anyhow!("Failed to find the `tree-sitter` dependency in Cargo.toml"))?;
|
||||
|
||||
let version_start_index = start_index + "tree-sitter = \"".len();
|
||||
let version_end_index = cargo_toml[version_start_index..]
|
||||
.find('\"')
|
||||
.map(|i| i + version_start_index)
|
||||
.ok_or_else(|| anyhow!("Failed to find the end of the `tree-sitter` version in Cargo.toml"))?;
|
||||
|
||||
let cargo_toml = format!(
|
||||
"{}{}{}\n{}\n{}",
|
||||
&cargo_toml[..start_index],
|
||||
"tree-sitter-language = \"0.1.0\"",
|
||||
&cargo_toml[version_end_index + 1..],
|
||||
"[dev-dependencies]",
|
||||
"tree-sitter = \"0.23\"",
|
||||
);
|
||||
|
||||
write_file(path, cargo_toml)?;
|
||||
eprintln!("Updated Cargo.toml with the `tree-sitter-language` dependency");
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
missing_path(repo_path.join("Cargo.toml"), |path| {
|
||||
generate_file(
|
||||
path,
|
||||
CARGO_TOML_TEMPLATE,
|
||||
dashed_language_name.as_str(),
|
||||
&generate_opts,
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
|
@ -670,10 +486,8 @@ pub fn generate_grammar_files(
|
|||
allow_update,
|
||||
|path| generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let index_js =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read index.js")?;
|
||||
if index_js.contains("../../build/Release") {
|
||||
eprintln!("Replacing index.js with new binding API");
|
||||
let contents = fs::read_to_string(path)?;
|
||||
if !contents.contains("bun") {
|
||||
generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts)?;
|
||||
}
|
||||
Ok(())
|
||||
|
|
@ -693,36 +507,13 @@ pub fn generate_grammar_files(
|
|||
)
|
||||
})?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("binding.cc"),
|
||||
allow_update,
|
||||
|path| generate_file(path, JS_BINDING_CC_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let binding_cc =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read binding.cc")?;
|
||||
if binding_cc.contains("NAN_METHOD(New) {}") {
|
||||
eprintln!("Replacing binding.cc with new binding API");
|
||||
generate_file(path, JS_BINDING_CC_TEMPLATE, language_name, &generate_opts)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
missing_path(path.join("binding.cc"), |path| {
|
||||
generate_file(path, JS_BINDING_CC_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
// Create binding.gyp, or update it with new binding API.
|
||||
missing_path_else(
|
||||
repo_path.join("binding.gyp"),
|
||||
allow_update,
|
||||
|path| generate_file(path, BINDING_GYP_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let binding_gyp =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read binding.gyp")?;
|
||||
if binding_gyp.contains("require('nan')") {
|
||||
eprintln!("Replacing binding.gyp with new binding API");
|
||||
generate_file(path, BINDING_GYP_TEMPLATE, language_name, &generate_opts)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
missing_path(repo_path.join("binding.gyp"), |path| {
|
||||
generate_file(path, BINDING_GYP_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
|
@ -752,9 +543,20 @@ pub fn generate_grammar_files(
|
|||
generate_file(path, MAKEFILE_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
missing_path(repo_path.join("CMakeLists.txt"), |path| {
|
||||
generate_file(path, CMAKELISTS_TXT_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
missing_path_else(
|
||||
repo_path.join("CMakeLists.txt"),
|
||||
allow_update,
|
||||
|path| generate_file(path, CMAKELISTS_TXT_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let contents = fs::read_to_string(path)?;
|
||||
let old = "add_custom_target(test";
|
||||
if contents.contains(old) {
|
||||
write_file(path, contents.replace(old, "add_custom_target(ts-test"))
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
|
@ -767,39 +569,14 @@ pub fn generate_grammar_files(
|
|||
generate_file(path, BINDING_GO_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("binding_test.go"),
|
||||
allow_update,
|
||||
|path| {
|
||||
generate_file(
|
||||
path,
|
||||
BINDING_TEST_GO_TEMPLATE,
|
||||
language_name,
|
||||
&generate_opts,
|
||||
)
|
||||
},
|
||||
|path| {
|
||||
let binding_test_go = fs::read_to_string(path)
|
||||
.with_context(|| "Failed to read binding_test.go")?;
|
||||
if binding_test_go.contains("smacker") {
|
||||
eprintln!("Replacing binding_test.go with new binding API");
|
||||
generate_file(
|
||||
path,
|
||||
BINDING_TEST_GO_TEMPLATE,
|
||||
language_name,
|
||||
&generate_opts,
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
// Delete the old go.mod file that lives inside bindings/go, it now lives in the root
|
||||
// dir
|
||||
let go_mod_path = path.join("go.mod");
|
||||
if allow_update && go_mod_path.exists() {
|
||||
fs::remove_file(go_mod_path).with_context(|| "Failed to remove old go.mod file")?;
|
||||
}
|
||||
missing_path(path.join("binding_test.go"), |path| {
|
||||
generate_file(
|
||||
path,
|
||||
BINDING_TEST_GO_TEMPLATE,
|
||||
language_name,
|
||||
&generate_opts,
|
||||
)
|
||||
})?;
|
||||
|
||||
missing_path(repo_path.join("go.mod"), |path| {
|
||||
generate_file(path, GO_MOD_TEMPLATE, language_name, &generate_opts)
|
||||
|
|
@ -815,20 +592,9 @@ pub fn generate_grammar_files(
|
|||
let lang_path = path.join(format!("tree_sitter_{}", language_name.to_snake_case()));
|
||||
missing_path(&lang_path, create_dir)?;
|
||||
|
||||
missing_path_else(
|
||||
lang_path.join("binding.c"),
|
||||
allow_update,
|
||||
|path| generate_file(path, PY_BINDING_C_TEMPLATE, language_name, &generate_opts),
|
||||
|path| {
|
||||
let binding_c = fs::read_to_string(path)
|
||||
.with_context(|| "Failed to read bindings/python/binding.c")?;
|
||||
if !binding_c.contains("PyCapsule_New") {
|
||||
eprintln!("Replacing bindings/python/binding.c with new binding API");
|
||||
generate_file(path, PY_BINDING_C_TEMPLATE, language_name, &generate_opts)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
missing_path(lang_path.join("binding.c"), |path| {
|
||||
generate_file(path, PY_BINDING_C_TEMPLATE, language_name, &generate_opts)
|
||||
})?;
|
||||
|
||||
missing_path(lang_path.join("__init__.py"), |path| {
|
||||
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)
|
||||
|
|
@ -874,7 +640,7 @@ pub fn generate_grammar_files(
|
|||
// Generate Swift bindings
|
||||
if tree_sitter_config.bindings.swift {
|
||||
missing_path(bindings_dir.join("swift"), create_dir)?.apply(|path| {
|
||||
let lang_path = path.join(format!("TreeSitter{}", language_name.to_upper_camel_case()));
|
||||
let lang_path = path.join(format!("TreeSitter{camel_name}",));
|
||||
missing_path(&lang_path, create_dir)?;
|
||||
|
||||
missing_path(lang_path.join(format!("{language_name}.h")), |path| {
|
||||
|
|
@ -882,18 +648,12 @@ pub fn generate_grammar_files(
|
|||
})?;
|
||||
|
||||
missing_path(
|
||||
path.join(format!(
|
||||
"TreeSitter{}Tests",
|
||||
language_name.to_upper_camel_case()
|
||||
)),
|
||||
path.join(format!("TreeSitter{camel_name}Tests",)),
|
||||
create_dir,
|
||||
)?
|
||||
.apply(|path| {
|
||||
missing_path(
|
||||
path.join(format!(
|
||||
"TreeSitter{}Tests.swift",
|
||||
language_name.to_upper_camel_case()
|
||||
)),
|
||||
path.join(format!("TreeSitter{camel_name}Tests.swift")),
|
||||
|path| generate_file(path, TESTS_SWIFT_TEMPLATE, language_name, &generate_opts),
|
||||
)?;
|
||||
|
||||
|
|
@ -919,15 +679,14 @@ pub fn get_root_path(path: &Path) -> Result<PathBuf> {
|
|||
let json = pathbuf
|
||||
.exists()
|
||||
.then(|| {
|
||||
let file = File::open(pathbuf.as_path())
|
||||
.with_context(|| format!("Failed to open {filename}"))?;
|
||||
let reader = BufReader::new(file);
|
||||
let contents = fs::read_to_string(pathbuf.as_path())
|
||||
.with_context(|| format!("Failed to read {filename}"))?;
|
||||
if is_package_json {
|
||||
serde_json::from_reader::<_, Map<String, Value>>(reader)
|
||||
serde_json::from_str::<Map<String, Value>>(&contents)
|
||||
.context(format!("Failed to parse {filename}"))
|
||||
.map(|v| v.contains_key("tree-sitter"))
|
||||
} else {
|
||||
serde_json::from_reader::<_, TreeSitterJSON>(reader)
|
||||
serde_json::from_str::<TreeSitterJSON>(&contents)
|
||||
.context(format!("Failed to parse {filename}"))
|
||||
.map(|_| true)
|
||||
}
|
||||
|
|
@ -961,7 +720,7 @@ fn generate_file(
|
|||
let mut replacement = template
|
||||
.replace(
|
||||
CAMEL_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_upper_camel_case(),
|
||||
generate_opts.camel_parser_name,
|
||||
)
|
||||
.replace(
|
||||
UPPER_PARSER_NAME_PLACEHOLDER,
|
||||
|
|
@ -1001,7 +760,12 @@ fn generate_file(
|
|||
}
|
||||
|
||||
if let Some(email) = generate_opts.author_email {
|
||||
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER, email);
|
||||
replacement = match filename {
|
||||
"Cargo.toml" | "grammar.js" => {
|
||||
replacement.replace(AUTHOR_EMAIL_PLACEHOLDER, &format!("<{email}>"))
|
||||
}
|
||||
_ => replacement.replace(AUTHOR_EMAIL_PLACEHOLDER, email),
|
||||
}
|
||||
} else {
|
||||
match filename {
|
||||
"package.json" => {
|
||||
|
|
@ -1099,7 +863,7 @@ fn generate_file(
|
|||
PARSER_DESCRIPTION_PLACEHOLDER,
|
||||
&format!(
|
||||
"{} grammar for tree-sitter",
|
||||
language_name.to_upper_camel_case()
|
||||
generate_opts.camel_parser_name,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -653,11 +653,11 @@ impl Init {
|
|||
|
||||
(opts.name.clone(), Some(opts))
|
||||
} else {
|
||||
let json = serde_json::from_reader::<_, TreeSitterJSON>(
|
||||
fs::File::open(current_dir.join("tree-sitter.json"))
|
||||
.with_context(|| "Failed to open tree-sitter.json")?,
|
||||
let mut json = serde_json::from_str::<TreeSitterJSON>(
|
||||
&fs::read_to_string(current_dir.join("tree-sitter.json"))
|
||||
.with_context(|| "Failed to read tree-sitter.json")?,
|
||||
)?;
|
||||
(json.grammars[0].name.clone(), None)
|
||||
(json.grammars.swap_remove(0).name, None)
|
||||
};
|
||||
|
||||
generate_grammar_files(current_dir, &language_name, self.update, json_config_opts)?;
|
||||
|
|
|
|||
|
|
@ -19,6 +19,11 @@
|
|||
<input id="logging-checkbox" type="checkbox"></input>
|
||||
</div>
|
||||
|
||||
<div class=header-item>
|
||||
<label for="anonymous-nodes-checkbox">show anonymous nodes</label>
|
||||
<input id="anonymous-nodes-checkbox" type="checkbox"></input>
|
||||
</div>
|
||||
|
||||
<div class=header-item>
|
||||
<label for="query-checkbox">query</label>
|
||||
<input id="query-checkbox" type="checkbox"></input>
|
||||
|
|
@ -67,6 +72,12 @@
|
|||
<script src=tree-sitter.js></script>
|
||||
<script src=playground.js></script>
|
||||
|
||||
<script>
|
||||
setTimeout(() => {
|
||||
window.initializePlayground({local: true});
|
||||
}, 1000);
|
||||
</script>
|
||||
|
||||
<style>
|
||||
body {
|
||||
margin: 0;
|
||||
|
|
|
|||
|
|
@ -41,3 +41,6 @@ indent_size = 8
|
|||
|
||||
[parser.c]
|
||||
indent_size = 2
|
||||
|
||||
[{alloc,array,parser}.h]
|
||||
indent_size = 2
|
||||
|
|
|
|||
|
|
@ -6,6 +6,5 @@ Name: tree-sitter-PARSER_NAME
|
|||
Description: @PROJECT_DESCRIPTION@
|
||||
URL: @PROJECT_HOMEPAGE_URL@
|
||||
Version: @PROJECT_VERSION@
|
||||
Requires: @TS_REQUIRES@
|
||||
Libs: -L${libdir} -ltree-sitter-PARSER_NAME
|
||||
Cflags: -I${includedir}
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ edition = "2021"
|
|||
autoexamples = false
|
||||
|
||||
build = "bindings/rust/build.rs"
|
||||
include = ["bindings/rust/*", "grammar.js", "queries/*", "src/*"]
|
||||
include = ["bindings/rust/*", "grammar.js", "queries/*", "src/*", "tree-sitter.json"]
|
||||
|
||||
[lib]
|
||||
path = "bindings/rust/lib.rs"
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
/// <reference types="node" />
|
||||
|
||||
const assert = require("node:assert");
|
||||
const { test } = require("node:test");
|
||||
|
||||
const Parser = require("tree-sitter");
|
||||
|
||||
test("can load grammar", () => {
|
||||
const parser = new (require("tree-sitter"))();
|
||||
const parser = new Parser();
|
||||
assert.doesNotThrow(() => parser.setLanguage(require(".")));
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,12 +1,13 @@
|
|||
cmake_minimum_required(VERSION 3.13)
|
||||
|
||||
project(tree-sitter-PARSER_NAME
|
||||
VERSION "0.0.1"
|
||||
DESCRIPTION "CAMEL_PARSER_NAME grammar for tree-sitter"
|
||||
HOMEPAGE_URL "https://github.com/tree-sitter/tree-sitter-PARSER_NAME"
|
||||
VERSION "PARSER_VERSION"
|
||||
DESCRIPTION "PARSER_DESCRIPTION"
|
||||
HOMEPAGE_URL "PARSER_URL"
|
||||
LANGUAGES C)
|
||||
|
||||
option(BUILD_SHARED_LIBS "Build using shared libraries" ON)
|
||||
option(TREE_SITTER_REUSE_ALLOCATOR "Reuse the library allocator" OFF)
|
||||
|
||||
set(TREE_SITTER_ABI_VERSION ABI_VERSION_MAX CACHE STRING "Tree-sitter ABI version")
|
||||
if(NOT ${TREE_SITTER_ABI_VERSION} MATCHES "^[0-9]+$")
|
||||
|
|
@ -24,16 +25,21 @@ add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c"
|
|||
COMMENT "Generating parser.c")
|
||||
|
||||
add_library(tree-sitter-PARSER_NAME src/parser.c)
|
||||
if(EXISTS src/scanner.c)
|
||||
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/src/scanner.c)
|
||||
target_sources(tree-sitter-PARSER_NAME PRIVATE src/scanner.c)
|
||||
endif()
|
||||
target_include_directories(tree-sitter-PARSER_NAME PRIVATE src)
|
||||
|
||||
target_compile_definitions(tree-sitter-PARSER_NAME PRIVATE
|
||||
$<$<BOOL:${TREE_SITTER_REUSE_ALLOCATOR}>:TREE_SITTER_REUSE_ALLOCATOR>
|
||||
$<$<CONFIG:Debug>:TREE_SITTER_DEBUG>)
|
||||
|
||||
set_target_properties(tree-sitter-PARSER_NAME
|
||||
PROPERTIES
|
||||
C_STANDARD 11
|
||||
POSITION_INDEPENDENT_CODE ON
|
||||
SOVERSION "${TREE_SITTER_ABI_VERSION}.${PROJECT_VERSION_MAJOR}")
|
||||
SOVERSION "${TREE_SITTER_ABI_VERSION}.${PROJECT_VERSION_MAJOR}"
|
||||
DEFINE_SYMBOL "")
|
||||
|
||||
configure_file(bindings/c/tree-sitter-PARSER_NAME.pc.in
|
||||
"${CMAKE_CURRENT_BINARY_DIR}/tree-sitter-PARSER_NAME.pc" @ONLY)
|
||||
|
|
@ -47,8 +53,6 @@ install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter-PARSER_NAME.pc"
|
|||
install(TARGETS tree-sitter-PARSER_NAME
|
||||
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
|
||||
|
||||
add_custom_target(test "${TREE_SITTER_CLI}" test
|
||||
add_custom_target(ts-test "${TREE_SITTER_CLI}" test
|
||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||
COMMENT "tree-sitter test")
|
||||
|
||||
# vim:ft=cmake:
|
||||
|
|
@ -1,13 +1,37 @@
|
|||
* text=auto eol=lf
|
||||
|
||||
# Generated source files
|
||||
src/*.json linguist-generated
|
||||
src/parser.c linguist-generated
|
||||
src/tree_sitter/* linguist-generated
|
||||
|
||||
bindings/** linguist-generated
|
||||
binding.gyp linguist-generated
|
||||
setup.py linguist-generated
|
||||
Makefile linguist-generated
|
||||
# C bindings
|
||||
bindings/c/* linguist-generated
|
||||
CMakeLists.txt linguist-generated
|
||||
Package.swift linguist-generated
|
||||
Makefile linguist-generated
|
||||
|
||||
# Rust bindings
|
||||
bindings/rust/* linguist-generated
|
||||
Cargo.toml linguist-generated
|
||||
Cargo.lock linguist-generated
|
||||
|
||||
# Node.js bindings
|
||||
bindings/node/* linguist-generated
|
||||
binding.gyp linguist-generated
|
||||
package.json linguist-generated
|
||||
package-lock.json linguist-generated
|
||||
|
||||
# Python bindings
|
||||
bindings/python/** linguist-generated
|
||||
setup.py linguist-generated
|
||||
pyproject.toml linguist-generated
|
||||
|
||||
# Go bindings
|
||||
bindings/go/* linguist-generated
|
||||
go.mod linguist-generated
|
||||
go.sum linguist-generated
|
||||
|
||||
# Swift bindings
|
||||
bindings/swift/** linguist-generated
|
||||
Package.swift linguist-generated
|
||||
Package.resolved linguist-generated
|
||||
|
|
|
|||
|
|
@ -5,11 +5,9 @@ target/
|
|||
build/
|
||||
prebuilds/
|
||||
node_modules/
|
||||
*.tgz
|
||||
|
||||
# Swift artifacts
|
||||
.build/
|
||||
Package.resolved
|
||||
|
||||
# Go artifacts
|
||||
_obj/
|
||||
|
|
@ -35,3 +33,8 @@ dist/
|
|||
*.wasm
|
||||
*.obj
|
||||
*.o
|
||||
|
||||
# Archives
|
||||
*.tar.gz
|
||||
*.tgz
|
||||
*.zip
|
||||
|
|
|
|||
|
|
@ -2,4 +2,4 @@ module PARSER_URL_STRIPPED
|
|||
|
||||
go 1.22
|
||||
|
||||
require github.com/tree-sitter/go-tree-sitter v0.23.1
|
||||
require github.com/tree-sitter/go-tree-sitter v0.24.0
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
const root = require("path").join(__dirname, "..", "..");
|
||||
|
||||
module.exports = require("node-gyp-build")(root);
|
||||
module.exports =
|
||||
typeof process.versions.bun === "string"
|
||||
// Support `bun build --compile` by being statically analyzable enough to find the .node file at build-time
|
||||
? require(`../../prebuilds/${process.platform}-${process.arch}/tree-sitter-PARSER_NAME.node`)
|
||||
: require("node-gyp-build")(root);
|
||||
|
||||
try {
|
||||
module.exports.nodeTypeInfo = require("../../src/node-types.json");
|
||||
|
|
|
|||
|
|
@ -59,8 +59,8 @@ endif
|
|||
|
||||
$(LANGUAGE_NAME).pc: bindings/c/$(LANGUAGE_NAME).pc.in
|
||||
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR)|' \
|
||||
-e 's|@CMAKE_INSTALL_INCLUDEDIR@|$(INCLUDEDIR)|' \
|
||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
||||
-e 's|@CMAKE_INSTALL_INCLUDEDIR@|$(INCLUDEDIR:$(PREFIX)/%=%)|' \
|
||||
-e 's|@PROJECT_DESCRIPTION@|$(DESCRIPTION)|' \
|
||||
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
||||
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
"name": "tree-sitter-PARSER_NAME",
|
||||
"version": "PARSER_VERSION",
|
||||
"description": "PARSER_DESCRIPTION",
|
||||
"repository": "github:tree-sitter/tree-sitter-PARSER_NAME",
|
||||
"repository": "PARSER_URL",
|
||||
"license": "PARSER_LICENSE",
|
||||
"author": {
|
||||
"name": "PARSER_AUTHOR_NAME",
|
||||
|
|
@ -19,6 +19,7 @@
|
|||
],
|
||||
"files": [
|
||||
"grammar.js",
|
||||
"tree-sitter.json",
|
||||
"binding.gyp",
|
||||
"prebuilds/**",
|
||||
"bindings/node/*",
|
||||
|
|
@ -27,8 +28,8 @@
|
|||
"*.wasm"
|
||||
],
|
||||
"dependencies": {
|
||||
"node-addon-api": "^8.0.0",
|
||||
"node-gyp-build": "^4.8.1"
|
||||
"node-addon-api": "^8.2.1",
|
||||
"node-gyp-build": "^4.8.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prebuildify": "^6.0.1",
|
||||
|
|
@ -47,11 +48,5 @@
|
|||
"prestart": "tree-sitter build --wasm",
|
||||
"start": "tree-sitter playground",
|
||||
"test": "node --test bindings/node/*_test.js"
|
||||
},
|
||||
"tree-sitter": [
|
||||
{
|
||||
"scope": "source.LOWER_PARSER_NAME",
|
||||
"injection-regex": "^LOWER_PARSER_NAME$"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ version = "PARSER_VERSION"
|
|||
keywords = ["incremental", "parsing", "tree-sitter", "PARSER_NAME"]
|
||||
classifiers = [
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Topic :: Software Development :: Compilers",
|
||||
"Topic :: Text Processing :: Linguistic",
|
||||
"Typing :: Typed",
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ fn detect_language_by_first_line_regex() {
|
|||
.find_language_configurations_at_path(strace_dir.path(), false)
|
||||
.unwrap();
|
||||
|
||||
// this is just to validate that we can read the package.json correctly
|
||||
// this is just to validate that we can read the tree-sitter.json correctly
|
||||
assert_eq!(config[0].scope.as_ref().unwrap(), "source.strace");
|
||||
|
||||
let file_name = strace_dir.path().join("strace.log");
|
||||
|
|
|
|||
|
|
@ -306,6 +306,33 @@ fn test_parent_of_zero_width_node() {
|
|||
assert_eq!(parent, script_element);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_next_sibling_of_zero_width_node() {
|
||||
let grammar_json = load_grammar_file(
|
||||
&fixtures_dir()
|
||||
.join("test_grammars")
|
||||
.join("next_sibling_from_zwt")
|
||||
.join("grammar.js"),
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let (parser_name, parser_code) = generate_parser_for_grammar(&grammar_json).unwrap();
|
||||
|
||||
let mut parser = Parser::new();
|
||||
let language = get_test_language(&parser_name, &parser_code, None);
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser.parse("abdef", None).unwrap();
|
||||
|
||||
let root_node = tree.root_node();
|
||||
let missing_c = root_node.child(2).unwrap();
|
||||
assert!(missing_c.is_missing());
|
||||
assert_eq!(missing_c.kind(), "c");
|
||||
let node_d = root_node.child(3).unwrap();
|
||||
assert_eq!(missing_c.next_sibling().unwrap(), node_d);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_node_field_name_for_child() {
|
||||
let mut parser = Parser::new();
|
||||
|
|
@ -1026,6 +1053,31 @@ fn test_node_numeric_symbols_respect_simple_aliases() {
|
|||
assert_eq!(unary_minus_node.kind_id(), binary_minus_node.kind_id());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hidden_zero_width_node_with_visible_child() {
|
||||
let code = r"
|
||||
class Foo {
|
||||
std::
|
||||
private:
|
||||
std::string s;
|
||||
};
|
||||
";
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(&get_language("cpp")).unwrap();
|
||||
let tree = parser.parse(code, None).unwrap();
|
||||
let root = tree.root_node();
|
||||
|
||||
let class_specifier = root.child(0).unwrap();
|
||||
let field_decl_list = class_specifier.child_by_field_name("body").unwrap();
|
||||
let field_decl = field_decl_list.named_child(0).unwrap();
|
||||
let field_ident = field_decl.child_by_field_name("declarator").unwrap();
|
||||
assert_eq!(
|
||||
field_decl.child_with_descendant(field_ident).unwrap(),
|
||||
field_ident
|
||||
);
|
||||
}
|
||||
|
||||
fn get_all_nodes(tree: &Tree) -> Vec<Node> {
|
||||
let mut result = Vec::new();
|
||||
let mut visited_children = false;
|
||||
|
|
|
|||
|
|
@ -1507,6 +1507,20 @@ fn test_parsing_with_scanner_logging() {
|
|||
assert!(found);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parsing_get_column_at_eof() {
|
||||
let dir = fixtures_dir().join("test_grammars").join("get_col_eof");
|
||||
let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap();
|
||||
let (grammar_name, parser_code) = generate_parser_for_grammar(&grammar_json).unwrap();
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser
|
||||
.set_language(&get_test_language(&grammar_name, &parser_code, Some(&dir)))
|
||||
.unwrap();
|
||||
|
||||
parser.parse("a", None).unwrap();
|
||||
}
|
||||
|
||||
const fn simple_range(start: usize, end: usize) -> Range {
|
||||
Range {
|
||||
start_byte: start,
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@ edition.workspace = true
|
|||
rust-version.workspace = true
|
||||
publish = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
|
|
|
|||
|
|
@ -1,33 +1,116 @@
|
|||
let tree;
|
||||
function initializeLocalTheme() {
|
||||
const themeToggle = document.getElementById('theme-toggle');
|
||||
if (!themeToggle) return;
|
||||
|
||||
(async () => {
|
||||
const CAPTURE_REGEX = /@\s*([\w._-]+)/g;
|
||||
const COLORS_BY_INDEX = [
|
||||
'blue',
|
||||
'chocolate',
|
||||
'darkblue',
|
||||
'darkcyan',
|
||||
'darkgreen',
|
||||
'darkred',
|
||||
'darkslategray',
|
||||
'dimgray',
|
||||
'green',
|
||||
'indigo',
|
||||
'navy',
|
||||
'red',
|
||||
'sienna',
|
||||
// Load saved theme or use system preference
|
||||
const savedTheme = localStorage.getItem('theme');
|
||||
const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||
const initialTheme = savedTheme || (prefersDark ? 'dark' : 'light');
|
||||
|
||||
// Set initial theme
|
||||
document.documentElement.setAttribute('data-theme', initialTheme);
|
||||
|
||||
themeToggle.addEventListener('click', () => {
|
||||
const currentTheme = document.documentElement.getAttribute('data-theme');
|
||||
const newTheme = currentTheme === 'light' ? 'dark' : 'light';
|
||||
document.documentElement.setAttribute('data-theme', newTheme);
|
||||
localStorage.setItem('theme', newTheme);
|
||||
});
|
||||
}
|
||||
|
||||
function initializeCustomSelect({ initialValue = null, addListeners = false }) {
|
||||
const button = document.getElementById('language-button');
|
||||
const select = document.getElementById('language-select');
|
||||
if (!button || !select) return;
|
||||
|
||||
const dropdown = button.nextElementSibling;
|
||||
const selectedValue = button.querySelector('.selected-value');
|
||||
|
||||
if (initialValue) {
|
||||
select.value = initialValue;
|
||||
}
|
||||
selectedValue.textContent = select.options[select.selectedIndex].text;
|
||||
|
||||
if (addListeners) {
|
||||
button.addEventListener('click', (e) => {
|
||||
e.preventDefault(); // Prevent form submission
|
||||
dropdown.classList.toggle('show');
|
||||
});
|
||||
|
||||
document.addEventListener('click', (e) => {
|
||||
if (!button.contains(e.target)) {
|
||||
dropdown.classList.remove('show');
|
||||
}
|
||||
});
|
||||
|
||||
dropdown.querySelectorAll('.option').forEach(option => {
|
||||
option.addEventListener('click', () => {
|
||||
selectedValue.textContent = option.textContent;
|
||||
select.value = option.dataset.value;
|
||||
dropdown.classList.remove('show');
|
||||
|
||||
const event = new Event('change');
|
||||
select.dispatchEvent(event);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
window.initializePlayground = async function initializePlayground(opts) {
|
||||
const { local } = opts;
|
||||
if (local) {
|
||||
initializeLocalTheme();
|
||||
}
|
||||
initializeCustomSelect({ addListeners: true });
|
||||
|
||||
let tree;
|
||||
|
||||
const CAPTURE_REGEX = /@\s*([\w\._-]+)/g;
|
||||
const LIGHT_COLORS = [
|
||||
"#0550ae", // blue
|
||||
"#ab5000", // rust brown
|
||||
"#116329", // forest green
|
||||
"#844708", // warm brown
|
||||
"#6639ba", // purple
|
||||
"#7d4e00", // orange brown
|
||||
"#0969da", // bright blue
|
||||
"#1a7f37", // green
|
||||
"#cf222e", // red
|
||||
"#8250df", // violet
|
||||
"#6e7781", // gray
|
||||
"#953800", // dark orange
|
||||
"#1b7c83" // teal
|
||||
];
|
||||
|
||||
const codeInput = document.getElementById('code-input');
|
||||
const languageSelect = document.getElementById('language-select');
|
||||
const loggingCheckbox = document.getElementById('logging-checkbox');
|
||||
const outputContainer = document.getElementById('output-container');
|
||||
const outputContainerScroll = document.getElementById('output-container-scroll');
|
||||
const playgroundContainer = document.getElementById('playground-container');
|
||||
const queryCheckbox = document.getElementById('query-checkbox');
|
||||
const queryContainer = document.getElementById('query-container');
|
||||
const queryInput = document.getElementById('query-input');
|
||||
const updateTimeSpan = document.getElementById('update-time');
|
||||
const DARK_COLORS = [
|
||||
"#79c0ff", // light blue
|
||||
"#ffa657", // orange
|
||||
"#7ee787", // light green
|
||||
"#ff7b72", // salmon
|
||||
"#d2a8ff", // light purple
|
||||
"#ffa198", // pink
|
||||
"#a5d6ff", // pale blue
|
||||
"#56d364", // bright green
|
||||
"#ff9492", // light red
|
||||
"#e0b8ff", // pale purple
|
||||
"#9ca3af", // gray
|
||||
"#ffb757", // yellow orange
|
||||
"#80cbc4" // light teal
|
||||
];
|
||||
|
||||
const codeInput = document.getElementById("code-input");
|
||||
const languageSelect = document.getElementById("language-select");
|
||||
const loggingCheckbox = document.getElementById("logging-checkbox");
|
||||
const anonymousNodes = document.getElementById('anonymous-nodes-checkbox');
|
||||
const outputContainer = document.getElementById("output-container");
|
||||
const outputContainerScroll = document.getElementById(
|
||||
"output-container-scroll",
|
||||
);
|
||||
const playgroundContainer = document.getElementById("playground-container");
|
||||
const queryCheckbox = document.getElementById("query-checkbox");
|
||||
const queryContainer = document.getElementById("query-container");
|
||||
const queryInput = document.getElementById("query-input");
|
||||
const updateTimeSpan = document.getElementById("update-time");
|
||||
const languagesByName = {};
|
||||
|
||||
loadState();
|
||||
|
|
@ -35,21 +118,36 @@ let tree;
|
|||
await TreeSitter.init();
|
||||
|
||||
const parser = new TreeSitter();
|
||||
|
||||
console.log(parser, codeInput, queryInput);
|
||||
|
||||
const codeEditor = CodeMirror.fromTextArea(codeInput, {
|
||||
lineNumbers: true,
|
||||
showCursorWhenSelecting: true
|
||||
});
|
||||
|
||||
codeEditor.on('keydown', (_, event) => {
|
||||
if (event.key === 'ArrowLeft' || event.key === 'ArrowRight') {
|
||||
event.stopPropagation(); // Prevent mdBook from going back/forward
|
||||
}
|
||||
});
|
||||
|
||||
const queryEditor = CodeMirror.fromTextArea(queryInput, {
|
||||
lineNumbers: true,
|
||||
showCursorWhenSelecting: true
|
||||
showCursorWhenSelecting: true,
|
||||
});
|
||||
|
||||
queryEditor.on('keydown', (_, event) => {
|
||||
if (event.key === 'ArrowLeft' || event.key === 'ArrowRight') {
|
||||
event.stopPropagation(); // Prevent mdBook from going back/forward
|
||||
}
|
||||
});
|
||||
|
||||
const cluster = new Clusterize({
|
||||
rows: [],
|
||||
noDataText: null,
|
||||
contentElem: outputContainer,
|
||||
scrollElem: outputContainerScroll
|
||||
scrollElem: outputContainerScroll,
|
||||
});
|
||||
const renderTreeOnCodeChange = debounce(renderTree, 50);
|
||||
const saveStateOnChange = debounce(saveState, 2000);
|
||||
|
|
@ -62,32 +160,33 @@ let tree;
|
|||
let isRendering = 0;
|
||||
let query;
|
||||
|
||||
codeEditor.on('changes', handleCodeChange);
|
||||
codeEditor.on('viewportChange', runTreeQueryOnChange);
|
||||
codeEditor.on('cursorActivity', debounce(handleCursorMovement, 150));
|
||||
queryEditor.on('changes', debounce(handleQueryChange, 150));
|
||||
codeEditor.on("changes", handleCodeChange);
|
||||
codeEditor.on("viewportChange", runTreeQueryOnChange);
|
||||
codeEditor.on("cursorActivity", debounce(handleCursorMovement, 150));
|
||||
queryEditor.on("changes", debounce(handleQueryChange, 150));
|
||||
|
||||
loggingCheckbox.addEventListener('change', handleLoggingChange);
|
||||
queryCheckbox.addEventListener('change', handleQueryEnableChange);
|
||||
languageSelect.addEventListener('change', handleLanguageChange);
|
||||
outputContainer.addEventListener('click', handleTreeClick);
|
||||
loggingCheckbox.addEventListener("change", handleLoggingChange);
|
||||
anonymousNodes.addEventListener('change', renderTree);
|
||||
queryCheckbox.addEventListener("change", handleQueryEnableChange);
|
||||
languageSelect.addEventListener("change", handleLanguageChange);
|
||||
outputContainer.addEventListener("click", handleTreeClick);
|
||||
|
||||
handleQueryEnableChange();
|
||||
await handleLanguageChange()
|
||||
await handleLanguageChange();
|
||||
|
||||
playgroundContainer.style.visibility = 'visible';
|
||||
playgroundContainer.style.visibility = "visible";
|
||||
|
||||
async function handleLanguageChange() {
|
||||
const newLanguageName = languageSelect.value;
|
||||
if (!languagesByName[newLanguageName]) {
|
||||
const url = `${LANGUAGE_BASE_URL}/tree-sitter-${newLanguageName}.wasm`
|
||||
const url = `${LANGUAGE_BASE_URL}/tree-sitter-${newLanguageName}.wasm`;
|
||||
languageSelect.disabled = true;
|
||||
try {
|
||||
languagesByName[newLanguageName] = await TreeSitter.Language.load(url);
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
languageSelect.value = languageName;
|
||||
return
|
||||
return;
|
||||
} finally {
|
||||
languageSelect.disabled = false;
|
||||
}
|
||||
|
|
@ -100,8 +199,8 @@ let tree;
|
|||
handleQueryChange();
|
||||
}
|
||||
|
||||
async function handleCodeChange(_editor, changes) {
|
||||
const newText = `${codeEditor.getValue()}\n`;
|
||||
async function handleCodeChange(editor, changes) {
|
||||
const newText = codeEditor.getValue() + "\n";
|
||||
const edits = tree && changes && changes.map(treeEditForEditorChange);
|
||||
|
||||
const start = performance.now();
|
||||
|
|
@ -126,16 +225,16 @@ let tree;
|
|||
isRendering++;
|
||||
const cursor = tree.walk();
|
||||
|
||||
const currentRenderCount = parseCount;
|
||||
let row = '';
|
||||
const rows = [];
|
||||
let currentRenderCount = parseCount;
|
||||
let row = "";
|
||||
let rows = [];
|
||||
let finishedRow = false;
|
||||
let visitedChildren = false;
|
||||
let indentLevel = 0;
|
||||
|
||||
for (let i = 0;; i++) {
|
||||
for (let i = 0; ; i++) {
|
||||
if (i > 0 && i % 10000 === 0) {
|
||||
await new Promise(r => setTimeout(r, 0));
|
||||
await new Promise((r) => setTimeout(r, 0));
|
||||
if (parseCount !== currentRenderCount) {
|
||||
cursor.delete();
|
||||
isRendering--;
|
||||
|
|
@ -145,9 +244,12 @@ let tree;
|
|||
|
||||
let displayName;
|
||||
if (cursor.nodeIsMissing) {
|
||||
displayName = `MISSING ${cursor.nodeType}`
|
||||
const nodeTypeText = cursor.nodeIsNamed ? cursor.nodeType : `"${cursor.nodeType}"`;
|
||||
displayName = `MISSING ${nodeTypeText}`;
|
||||
} else if (cursor.nodeIsNamed) {
|
||||
displayName = cursor.nodeType;
|
||||
} else if (anonymousNodes.checked) {
|
||||
displayName = cursor.nodeType
|
||||
}
|
||||
|
||||
if (visitedChildren) {
|
||||
|
|
@ -166,7 +268,7 @@ let tree;
|
|||
} else {
|
||||
if (displayName) {
|
||||
if (finishedRow) {
|
||||
row += '</div>';
|
||||
row += "</div>";
|
||||
rows.push(row);
|
||||
finishedRow = false;
|
||||
}
|
||||
|
|
@ -175,11 +277,23 @@ let tree;
|
|||
const id = cursor.nodeId;
|
||||
let fieldName = cursor.currentFieldName;
|
||||
if (fieldName) {
|
||||
fieldName += ': ';
|
||||
fieldName += ": ";
|
||||
} else {
|
||||
fieldName = '';
|
||||
fieldName = "";
|
||||
}
|
||||
row = `<div>${' '.repeat(indentLevel)}${fieldName}<a class='plain' href="#" data-id=${id} data-range="${start.row},${start.column},${end.row},${end.column}">${displayName}</a> [${start.row}, ${start.column}] - [${end.row}, ${end.column}]`;
|
||||
|
||||
const nodeClass =
|
||||
displayName === 'ERROR' || displayName.startsWith('MISSING')
|
||||
? 'node-link error'
|
||||
: cursor.nodeIsNamed
|
||||
? 'node-link named'
|
||||
: 'node-link anonymous';
|
||||
|
||||
row = `<div class="tree-row">${" ".repeat(indentLevel)}${fieldName}` +
|
||||
`<a class='${nodeClass}' href="#" data-id=${id} ` +
|
||||
`data-range="${start.row},${start.column},${end.row},${end.column}">` +
|
||||
`${displayName}</a> <span class="position-info">` +
|
||||
`[${start.row}, ${start.column}] - [${end.row}, ${end.column}]</span>`;
|
||||
finishedRow = true;
|
||||
}
|
||||
|
||||
|
|
@ -192,7 +306,7 @@ let tree;
|
|||
}
|
||||
}
|
||||
if (finishedRow) {
|
||||
row += '</div>';
|
||||
row += "</div>";
|
||||
rows.push(row);
|
||||
}
|
||||
|
||||
|
|
@ -212,33 +326,48 @@ let tree;
|
|||
|
||||
codeEditor.operation(() => {
|
||||
const marks = codeEditor.getAllMarks();
|
||||
marks.forEach(m => m.clear());
|
||||
marks.forEach((m) => m.clear());
|
||||
|
||||
if (tree && query) {
|
||||
const captures = query.captures(
|
||||
tree.rootNode,
|
||||
{row: startRow, column: 0},
|
||||
{row: endRow, column: 0},
|
||||
{ row: startRow, column: 0 },
|
||||
{ row: endRow, column: 0 },
|
||||
);
|
||||
let lastNodeId;
|
||||
for (const {name, node} of captures) {
|
||||
for (const { name, node } of captures) {
|
||||
if (node.id === lastNodeId) continue;
|
||||
lastNodeId = node.id;
|
||||
const {startPosition, endPosition} = node;
|
||||
const { startPosition, endPosition } = node;
|
||||
codeEditor.markText(
|
||||
{line: startPosition.row, ch: startPosition.column},
|
||||
{line: endPosition.row, ch: endPosition.column},
|
||||
{ line: startPosition.row, ch: startPosition.column },
|
||||
{ line: endPosition.row, ch: endPosition.column },
|
||||
{
|
||||
inclusiveLeft: true,
|
||||
inclusiveRight: true,
|
||||
css: `color: ${colorForCaptureName(name)}`
|
||||
}
|
||||
css: `color: ${colorForCaptureName(name)}`,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// When we change from a dark theme to a light theme (and vice versa), the colors of the
|
||||
// captures need to be updated.
|
||||
const observer = new MutationObserver((mutations) => {
|
||||
mutations.forEach((mutation) => {
|
||||
if (mutation.attributeName === 'class') {
|
||||
handleQueryChange();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
observer.observe(document.documentElement, {
|
||||
attributes: true,
|
||||
attributeFilter: ['class']
|
||||
});
|
||||
|
||||
function handleQueryChange() {
|
||||
if (query) {
|
||||
query.delete();
|
||||
|
|
@ -247,7 +376,7 @@ let tree;
|
|||
}
|
||||
|
||||
queryEditor.operation(() => {
|
||||
queryEditor.getAllMarks().forEach(m => m.clear());
|
||||
queryEditor.getAllMarks().forEach((m) => m.clear());
|
||||
if (!queryCheckbox.checked) return;
|
||||
|
||||
const queryText = queryEditor.getValue();
|
||||
|
|
@ -258,15 +387,15 @@ let tree;
|
|||
|
||||
let row = 0;
|
||||
queryEditor.eachLine((line) => {
|
||||
while (match = CAPTURE_REGEX.exec(line.text)) {
|
||||
while ((match = CAPTURE_REGEX.exec(line.text))) {
|
||||
queryEditor.markText(
|
||||
{line: row, ch: match.index},
|
||||
{line: row, ch: match.index + match[0].length},
|
||||
{ line: row, ch: match.index },
|
||||
{ line: row, ch: match.index + match[0].length },
|
||||
{
|
||||
inclusiveLeft: true,
|
||||
inclusiveRight: true,
|
||||
css: `color: ${colorForCaptureName(match[1])}`
|
||||
}
|
||||
css: `color: ${colorForCaptureName(match[1])}`,
|
||||
},
|
||||
);
|
||||
}
|
||||
row++;
|
||||
|
|
@ -275,7 +404,7 @@ let tree;
|
|||
const startPosition = queryEditor.posFromIndex(error.index);
|
||||
const endPosition = {
|
||||
line: startPosition.line,
|
||||
ch: startPosition.ch + (error.length || Infinity)
|
||||
ch: startPosition.ch + (error.length || Infinity),
|
||||
};
|
||||
|
||||
if (error.index === queryText.length) {
|
||||
|
|
@ -287,16 +416,12 @@ let tree;
|
|||
}
|
||||
}
|
||||
|
||||
queryEditor.markText(
|
||||
startPosition,
|
||||
endPosition,
|
||||
{
|
||||
className: 'query-error',
|
||||
inclusiveLeft: true,
|
||||
inclusiveRight: true,
|
||||
attributes: {title: error.message}
|
||||
}
|
||||
);
|
||||
queryEditor.markText(startPosition, endPosition, {
|
||||
className: "query-error",
|
||||
inclusiveLeft: true,
|
||||
inclusiveRight: true,
|
||||
attributes: { title: error.message },
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
|
@ -308,16 +433,13 @@ let tree;
|
|||
if (isRendering) return;
|
||||
|
||||
const selection = codeEditor.getDoc().listSelections()[0];
|
||||
let start = {row: selection.anchor.line, column: selection.anchor.ch};
|
||||
let end = {row: selection.head.line, column: selection.head.ch};
|
||||
let start = { row: selection.anchor.line, column: selection.anchor.ch };
|
||||
let end = { row: selection.head.line, column: selection.head.ch };
|
||||
if (
|
||||
start.row > end.row ||
|
||||
(
|
||||
start.row === end.row &&
|
||||
start.column > end.column
|
||||
)
|
||||
(start.row === end.row && start.column > end.column)
|
||||
) {
|
||||
const swap = end;
|
||||
let swap = end;
|
||||
end = start;
|
||||
start = swap;
|
||||
}
|
||||
|
|
@ -325,12 +447,22 @@ let tree;
|
|||
if (treeRows) {
|
||||
if (treeRowHighlightedIndex !== -1) {
|
||||
const row = treeRows[treeRowHighlightedIndex];
|
||||
if (row) treeRows[treeRowHighlightedIndex] = row.replace('highlighted', 'plain');
|
||||
if (row)
|
||||
treeRows[treeRowHighlightedIndex] = row.replace(
|
||||
"highlighted",
|
||||
"plain",
|
||||
);
|
||||
}
|
||||
treeRowHighlightedIndex = treeRows.findIndex(row => row.includes(`data-id=${node.id}`));
|
||||
treeRowHighlightedIndex = treeRows.findIndex((row) =>
|
||||
row.includes(`data-id=${node.id}`),
|
||||
);
|
||||
if (treeRowHighlightedIndex !== -1) {
|
||||
const row = treeRows[treeRowHighlightedIndex];
|
||||
if (row) treeRows[treeRowHighlightedIndex] = row.replace('plain', 'highlighted');
|
||||
if (row)
|
||||
treeRows[treeRowHighlightedIndex] = row.replace(
|
||||
"plain",
|
||||
"highlighted",
|
||||
);
|
||||
}
|
||||
cluster.update(treeRows);
|
||||
const lineHeight = cluster.options.item_height;
|
||||
|
|
@ -338,26 +470,25 @@ let tree;
|
|||
const containerHeight = outputContainerScroll.clientHeight;
|
||||
const offset = treeRowHighlightedIndex * lineHeight;
|
||||
if (scrollTop > offset - 20) {
|
||||
$(outputContainerScroll).animate({scrollTop: offset - 20}, 150);
|
||||
$(outputContainerScroll).animate({ scrollTop: offset - 20 }, 150);
|
||||
} else if (scrollTop < offset + lineHeight + 40 - containerHeight) {
|
||||
$(outputContainerScroll).animate({scrollTop: offset - containerHeight + 40}, 150);
|
||||
$(outputContainerScroll).animate(
|
||||
{ scrollTop: offset - containerHeight + 40 },
|
||||
150,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function handleTreeClick(event) {
|
||||
if (event.target.tagName === 'A') {
|
||||
if (event.target.tagName === "A") {
|
||||
event.preventDefault();
|
||||
const [startRow, startColumn, endRow, endColumn] = event
|
||||
.target
|
||||
.dataset
|
||||
.range
|
||||
.split(',')
|
||||
.map(n => parseInt(n));
|
||||
const [startRow, startColumn, endRow, endColumn] =
|
||||
event.target.dataset.range.split(",").map((n) => parseInt(n));
|
||||
codeEditor.focus();
|
||||
codeEditor.setSelection(
|
||||
{line: startRow, ch: startColumn},
|
||||
{line: endRow, ch: endColumn}
|
||||
{ line: startRow, ch: startColumn },
|
||||
{ line: endRow, ch: endColumn },
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
@ -366,9 +497,9 @@ let tree;
|
|||
if (loggingCheckbox.checked) {
|
||||
parser.setLogger((message, lexing) => {
|
||||
if (lexing) {
|
||||
console.log(" ", message)
|
||||
console.log(" ", message);
|
||||
} else {
|
||||
console.log(message)
|
||||
console.log(message);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
|
|
@ -378,11 +509,11 @@ let tree;
|
|||
|
||||
function handleQueryEnableChange() {
|
||||
if (queryCheckbox.checked) {
|
||||
queryContainer.style.visibility = '';
|
||||
queryContainer.style.position = '';
|
||||
queryContainer.style.visibility = "";
|
||||
queryContainer.style.position = "";
|
||||
} else {
|
||||
queryContainer.style.visibility = 'hidden';
|
||||
queryContainer.style.position = 'absolute';
|
||||
queryContainer.style.visibility = "hidden";
|
||||
queryContainer.style.position = "absolute";
|
||||
}
|
||||
handleQueryChange();
|
||||
}
|
||||
|
|
@ -392,48 +523,63 @@ let tree;
|
|||
const newLineCount = change.text.length;
|
||||
const lastLineLength = change.text[newLineCount - 1].length;
|
||||
|
||||
const startPosition = {row: change.from.line, column: change.from.ch};
|
||||
const oldEndPosition = {row: change.to.line, column: change.to.ch};
|
||||
const startPosition = { row: change.from.line, column: change.from.ch };
|
||||
const oldEndPosition = { row: change.to.line, column: change.to.ch };
|
||||
const newEndPosition = {
|
||||
row: startPosition.row + newLineCount - 1,
|
||||
column: newLineCount === 1
|
||||
? startPosition.column + lastLineLength
|
||||
: lastLineLength
|
||||
column:
|
||||
newLineCount === 1
|
||||
? startPosition.column + lastLineLength
|
||||
: lastLineLength,
|
||||
};
|
||||
|
||||
const startIndex = codeEditor.indexFromPos(change.from);
|
||||
let newEndIndex = startIndex + newLineCount - 1;
|
||||
let oldEndIndex = startIndex + oldLineCount - 1;
|
||||
for (let i = 0; i < newLineCount; i++) newEndIndex += change.text[i].length;
|
||||
for (let i = 0; i < oldLineCount; i++) oldEndIndex += change.removed[i].length;
|
||||
for (let i = 0; i < oldLineCount; i++)
|
||||
oldEndIndex += change.removed[i].length;
|
||||
|
||||
return {
|
||||
startIndex, oldEndIndex, newEndIndex,
|
||||
startPosition, oldEndPosition, newEndPosition
|
||||
startIndex,
|
||||
oldEndIndex,
|
||||
newEndIndex,
|
||||
startPosition,
|
||||
oldEndPosition,
|
||||
newEndPosition,
|
||||
};
|
||||
}
|
||||
|
||||
function colorForCaptureName(capture) {
|
||||
const id = query.captureNames.indexOf(capture);
|
||||
return COLORS_BY_INDEX[id % COLORS_BY_INDEX.length];
|
||||
const isDark = document.querySelector('html').classList.contains('ayu') ||
|
||||
document.querySelector('html').classList.contains('coal') ||
|
||||
document.querySelector('html').classList.contains('navy');
|
||||
|
||||
const colors = isDark ? DARK_COLORS : LIGHT_COLORS;
|
||||
return colors[id % colors.length];
|
||||
}
|
||||
|
||||
function loadState() {
|
||||
const language = localStorage.getItem("language");
|
||||
const sourceCode = localStorage.getItem("sourceCode");
|
||||
const anonNodes = localStorage.getItem("anonymousNodes");
|
||||
const query = localStorage.getItem("query");
|
||||
const queryEnabled = localStorage.getItem("queryEnabled");
|
||||
if (language != null && sourceCode != null && query != null) {
|
||||
queryInput.value = query;
|
||||
codeInput.value = sourceCode;
|
||||
languageSelect.value = language;
|
||||
queryCheckbox.checked = (queryEnabled === 'true');
|
||||
initializeCustomSelect({ initialValue: language });
|
||||
anonymousNodes.checked = anonNodes === "true";
|
||||
queryCheckbox.checked = queryEnabled === "true";
|
||||
}
|
||||
}
|
||||
|
||||
function saveState() {
|
||||
localStorage.setItem("language", languageSelect.value);
|
||||
localStorage.setItem("sourceCode", codeEditor.getValue());
|
||||
localStorage.setItem("anonymousNodes", anonymousNodes.checked);
|
||||
saveQueryState();
|
||||
}
|
||||
|
||||
|
|
@ -443,17 +589,18 @@ let tree;
|
|||
}
|
||||
|
||||
function debounce(func, wait, immediate) {
|
||||
let timeout;
|
||||
return function() {
|
||||
const context = this, args = arguments;
|
||||
const later = function() {
|
||||
var timeout;
|
||||
return function () {
|
||||
var context = this,
|
||||
args = arguments;
|
||||
var later = function () {
|
||||
timeout = null;
|
||||
if (!immediate) func.apply(context, args);
|
||||
};
|
||||
const callNow = immediate && !timeout;
|
||||
var callNow = immediate && !timeout;
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(later, wait);
|
||||
if (callNow) func.apply(context, args);
|
||||
};
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
|
|
|||
|
|
@ -15,6 +15,9 @@ license.workspace = true
|
|||
keywords = ["incremental", "parsing", "syntax", "highlighting"]
|
||||
categories = ["parsing", "text-editors"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
crate-type = ["lib", "staticlib"]
|
||||
|
||||
|
|
|
|||
|
|
@ -1,60 +1,74 @@
|
|||
cmake_minimum_required(VERSION 3.13)
|
||||
|
||||
project(tree-sitter
|
||||
VERSION "0.24.1"
|
||||
VERSION "0.24.7"
|
||||
DESCRIPTION "An incremental parsing system for programming tools"
|
||||
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
||||
LANGUAGES C)
|
||||
|
||||
option(BUILD_SHARED_LIBS "Build using shared libraries" ON)
|
||||
option(TREE_SITTER_FEATURE_WASM "Enable the Wasm feature" OFF)
|
||||
option(AMALGAMATED "Build using an amalgamated source" OFF)
|
||||
|
||||
file(GLOB TS_SOURCE_FILES src/*.c)
|
||||
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
||||
if(AMALGAMATED)
|
||||
set(TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
||||
else()
|
||||
file(GLOB TS_SOURCE_FILES src/*.c)
|
||||
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
||||
endif()
|
||||
|
||||
add_library(tree-sitter ${TS_SOURCE_FILES})
|
||||
|
||||
target_include_directories(tree-sitter PRIVATE src src/wasm include)
|
||||
|
||||
if(NOT MSVC)
|
||||
target_compile_options(tree-sitter PRIVATE -Wall -Wextra -Wshadow -Wno-unused-parameter -pedantic)
|
||||
endif()
|
||||
|
||||
if(NOT BUILD_SHARED_LIBS)
|
||||
if(WIN32)
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a)
|
||||
else()
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
||||
endif()
|
||||
if(MSVC)
|
||||
target_compile_options(tree-sitter PRIVATE
|
||||
/wd4018 # disable 'signed/unsigned mismatch'
|
||||
/wd4232 # disable 'nonstandard extension used'
|
||||
/wd4244 # disable 'possible loss of data'
|
||||
/wd4267 # disable 'possible loss of data (size_t)'
|
||||
/wd4701 # disable 'potentially uninitialized local variable'
|
||||
/we4022 # treat 'incompatible types' as an error
|
||||
/W4)
|
||||
else()
|
||||
target_compile_options(tree-sitter PRIVATE
|
||||
-Wall -Wextra -Wshadow -Wpedantic
|
||||
-Werror=incompatible-pointer-types)
|
||||
endif()
|
||||
|
||||
if(TREE_SITTER_FEATURE_WASM)
|
||||
if(NOT DEFINED CACHE{WASMTIME_INCLUDE_DIR})
|
||||
message(CHECK_START "Looking for wasmtime headers")
|
||||
find_path(WASMTIME_INCLUDE_DIR wasmtime.h
|
||||
PATHS ENV DEP_WASMTIME_C_API_INCLUDE
|
||||
REQUIRED)
|
||||
PATHS ENV DEP_WASMTIME_C_API_INCLUDE)
|
||||
if(NOT WASMTIME_INCLUDE_DIR)
|
||||
unset(WASMTIME_INCLUDE_DIR CACHE)
|
||||
message(FATAL_ERROR "Could not find wasmtime headers.\nDid you forget to set CMAKE_INCLUDE_PATH?")
|
||||
endif()
|
||||
message(CHECK_PASS "found")
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED CACHE{WASMTIME_LIBRARY})
|
||||
message(CHECK_START "Looking for wasmtime library")
|
||||
find_library(WASMTIME_LIBRARY wasmtime
|
||||
REQUIRED)
|
||||
find_library(WASMTIME_LIBRARY wasmtime)
|
||||
if(NOT WASMTIME_LIBRARY)
|
||||
unset(WASMTIME_LIBRARY CACHE)
|
||||
message(FATAL_ERROR "Could not find wasmtime library.\nDid you forget to set CMAKE_LIBRARY_PATH?")
|
||||
endif()
|
||||
message(CHECK_PASS "found")
|
||||
endif()
|
||||
|
||||
target_compile_definitions(tree-sitter PUBLIC TREE_SITTER_FEATURE_WASM)
|
||||
target_include_directories(tree-sitter SYSTEM PRIVATE "${WASMTIME_INCLUDE_DIR}")
|
||||
target_link_libraries(tree-sitter PRIVATE "${WASMTIME_LIBRARY}")
|
||||
target_link_libraries(tree-sitter PUBLIC "${WASMTIME_LIBRARY}")
|
||||
set_property(TARGET tree-sitter PROPERTY C_STANDARD_REQUIRED ON)
|
||||
|
||||
if(NOT BUILD_SHARED_LIBS)
|
||||
if(WIN32)
|
||||
target_compile_definitions(tree-sitter PRIVATE WASM_API_EXTERN= WASI_API_EXTERN=)
|
||||
target_link_libraries(tree-sitter PRIVATE ws2_32 advapi32 userenv ntdll shell32 ole32 bcrypt)
|
||||
target_link_libraries(tree-sitter INTERFACE ws2_32 advapi32 userenv ntdll shell32 ole32 bcrypt)
|
||||
elseif(NOT APPLE)
|
||||
target_link_libraries(tree-sitter PRIVATE pthread dl m)
|
||||
target_link_libraries(tree-sitter INTERFACE pthread dl m)
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
|
|
@ -64,7 +78,10 @@ set_target_properties(tree-sitter
|
|||
C_STANDARD 11
|
||||
C_VISIBILITY_PRESET hidden
|
||||
POSITION_INDEPENDENT_CODE ON
|
||||
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}")
|
||||
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}"
|
||||
DEFINE_SYMBOL "")
|
||||
|
||||
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE)
|
||||
|
||||
configure_file(tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY)
|
||||
|
||||
|
|
|
|||
|
|
@ -25,6 +25,9 @@ include = [
|
|||
"/include/tree_sitter/api.h",
|
||||
]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
default = ["std"]
|
||||
std = ["regex/std", "regex/perf", "regex-syntax/unicode"]
|
||||
|
|
@ -37,7 +40,7 @@ tree-sitter-language = { version = "0.1", path = "language" }
|
|||
streaming-iterator = "0.1.9"
|
||||
|
||||
[dependencies.wasmtime-c-api]
|
||||
version = "25.0.1"
|
||||
version = "25.0.2"
|
||||
optional = true
|
||||
package = "wasmtime-c-api-impl"
|
||||
default-features = false
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ extern crate alloc;
|
|||
#[cfg(not(feature = "std"))]
|
||||
use alloc::{boxed::Box, format, string::String, string::ToString, vec::Vec};
|
||||
use core::{
|
||||
char,
|
||||
ffi::{c_char, c_void, CStr},
|
||||
fmt::{self, Write},
|
||||
hash, iter,
|
||||
|
|
@ -489,9 +488,9 @@ impl Parser {
|
|||
/// Get the parser's current language.
|
||||
#[doc(alias = "ts_parser_language")]
|
||||
#[must_use]
|
||||
pub fn language(&self) -> Option<Language> {
|
||||
pub fn language(&self) -> Option<LanguageRef<'_>> {
|
||||
let ptr = unsafe { ffi::ts_parser_language(self.0.as_ptr()) };
|
||||
(!ptr.is_null()).then(|| Language(ptr))
|
||||
(!ptr.is_null()).then_some(LanguageRef(ptr, PhantomData))
|
||||
}
|
||||
|
||||
/// Get the parser's current logger.
|
||||
|
|
@ -1854,9 +1853,28 @@ impl Query {
|
|||
// Error types that report names
|
||||
ffi::TSQueryErrorNodeType | ffi::TSQueryErrorField | ffi::TSQueryErrorCapture => {
|
||||
let suffix = source.split_at(offset).1;
|
||||
let end_offset = suffix
|
||||
.find(|c| !char::is_alphanumeric(c) && c != '_' && c != '-')
|
||||
.unwrap_or(suffix.len());
|
||||
let in_quotes = source.as_bytes()[offset - 1] == b'"';
|
||||
let mut end_offset = suffix.len();
|
||||
if let Some(pos) = suffix
|
||||
.char_indices()
|
||||
.take_while(|(_, c)| *c != '\n')
|
||||
.find_map(|(i, c)| match c {
|
||||
'"' if in_quotes
|
||||
&& i > 0
|
||||
&& suffix.chars().nth(i - 1) != Some('\\') =>
|
||||
{
|
||||
Some(i)
|
||||
}
|
||||
c if !in_quotes
|
||||
&& (c.is_whitespace() || c == '(' || c == ')' || c == ':') =>
|
||||
{
|
||||
Some(i)
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
{
|
||||
end_offset = pos;
|
||||
}
|
||||
message = suffix.split_at(end_offset).0.to_string();
|
||||
kind = match error_type {
|
||||
ffi::TSQueryErrorNodeType => QueryErrorKind::NodeType,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "web-tree-sitter",
|
||||
"version": "0.24.1",
|
||||
"version": "0.24.7",
|
||||
"description": "Tree-sitter bindings for the web",
|
||||
"main": "tree-sitter.js",
|
||||
"types": "tree-sitter-web.d.ts",
|
||||
|
|
|
|||
|
|
@ -1,14 +1,19 @@
|
|||
[package]
|
||||
name = "tree-sitter-language"
|
||||
description = "The tree-sitter Language type, used by the library and by language implementations"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
authors.workspace = true
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
readme = "README.md"
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
license.workspace = true
|
||||
keywords.workspace = true
|
||||
categories = ["api-bindings", "development-tools::ffi", "parsing"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "language.rs"
|
||||
|
|
|
|||
4
lib/language/README.md
Normal file
4
lib/language/README.md
Normal file
|
|
@ -0,0 +1,4 @@
|
|||
# Tree-sitter Language
|
||||
|
||||
This crate provides a `LanguageFn` type for grammars to create `Language` instances from a parser,
|
||||
without having to worry about the `tree-sitter` crate version not matching.
|
||||
|
|
@ -14,6 +14,7 @@ extern "C" {
|
|||
#include <string.h>
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4101)
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic push
|
||||
|
|
@ -278,7 +279,7 @@ static inline void _array__splice(Array *self, size_t element_size,
|
|||
#define _compare_int(a, b) ((int)*(a) - (int)(b))
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(default : 4101)
|
||||
#pragma warning(pop)
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -49,9 +49,9 @@ static inline bool clock_is_gt(TSClock self, TSClock other) {
|
|||
return self > other;
|
||||
}
|
||||
|
||||
#elif defined(CLOCK_MONOTONIC) && !defined(__APPLE__)
|
||||
#elif defined(CLOCK_MONOTONIC)
|
||||
|
||||
// POSIX with monotonic clock support (Linux)
|
||||
// POSIX with monotonic clock support (Linux, macOS)
|
||||
// * Represent a time as a monotonic (seconds, nanoseconds) pair.
|
||||
// * Represent a duration as a number of microseconds.
|
||||
//
|
||||
|
|
@ -102,7 +102,7 @@ static inline bool clock_is_gt(TSClock self, TSClock other) {
|
|||
|
||||
#else
|
||||
|
||||
// macOS or POSIX without monotonic clock support
|
||||
// POSIX without monotonic clock support
|
||||
// * Represent a time as a process clock value.
|
||||
// * Represent a duration as a number of process clock ticks.
|
||||
//
|
||||
|
|
|
|||
|
|
@ -252,12 +252,12 @@ static uint32_t ts_lexer__get_column(TSLexer *_self) {
|
|||
uint32_t goal_byte = self->current_position.bytes;
|
||||
|
||||
self->did_get_column = true;
|
||||
self->current_position.bytes -= self->current_position.extent.column;
|
||||
self->current_position.extent.column = 0;
|
||||
|
||||
if (self->current_position.bytes < self->chunk_start) {
|
||||
ts_lexer__get_chunk(self);
|
||||
}
|
||||
Length start_of_col = {
|
||||
self->current_position.bytes - self->current_position.extent.column,
|
||||
{self->current_position.extent.row, 0},
|
||||
};
|
||||
ts_lexer_goto(self, start_of_col);
|
||||
ts_lexer__get_chunk(self);
|
||||
|
||||
uint32_t result = 0;
|
||||
if (!ts_lexer__eof(_self)) {
|
||||
|
|
|
|||
|
|
@ -103,21 +103,6 @@ static inline bool ts_node_child_iterator_next(
|
|||
return true;
|
||||
}
|
||||
|
||||
// This will return true if the next sibling is a zero-width token that is adjacent to the current node and is relevant
|
||||
static inline bool ts_node_child_iterator_next_sibling_is_empty_adjacent(NodeChildIterator *self, TSNode previous) {
|
||||
if (!self->parent.ptr || ts_node_child_iterator_done(self)) return false;
|
||||
if (self->child_index == 0) return false;
|
||||
const Subtree *child = &ts_subtree_children(self->parent)[self->child_index];
|
||||
TSSymbol alias = 0;
|
||||
if (!ts_subtree_extra(*child)) {
|
||||
if (self->alias_sequence) {
|
||||
alias = self->alias_sequence[self->structural_child_index];
|
||||
}
|
||||
}
|
||||
TSNode next = ts_node_new(self->tree, child, self->position, alias);
|
||||
return ts_node_end_byte(previous) == ts_node_end_byte(next) && ts_node__is_relevant(next, true);
|
||||
}
|
||||
|
||||
// TSNode - private
|
||||
|
||||
static inline bool ts_node__is_relevant(TSNode self, bool include_anonymous) {
|
||||
|
|
@ -277,8 +262,16 @@ static inline TSNode ts_node__next_sibling(TSNode self, bool include_anonymous)
|
|||
TSNode child;
|
||||
NodeChildIterator iterator = ts_node_iterate_children(&node);
|
||||
while (ts_node_child_iterator_next(&iterator, &child)) {
|
||||
if (iterator.position.bytes < target_end_byte) continue;
|
||||
if (ts_node_start_byte(child) <= ts_node_start_byte(self)) {
|
||||
if (iterator.position.bytes <= target_end_byte) continue;
|
||||
uint32_t start_byte = ts_node_start_byte(self);
|
||||
uint32_t child_start_byte = ts_node_start_byte(child);
|
||||
|
||||
bool is_empty = start_byte == target_end_byte;
|
||||
bool contains_target = is_empty ?
|
||||
child_start_byte < start_byte :
|
||||
child_start_byte <= start_byte;
|
||||
|
||||
if (contains_target) {
|
||||
if (ts_node__subtree(child).ptr != ts_node__subtree(self).ptr) {
|
||||
child_containing_target = child;
|
||||
}
|
||||
|
|
@ -549,9 +542,9 @@ TSNode ts_node_parent(TSNode self) {
|
|||
if (node.id == self.id) return ts_node__null();
|
||||
|
||||
while (true) {
|
||||
TSNode next_node = ts_node_child_containing_descendant(node, self);
|
||||
if (ts_node_is_null(next_node)) break;
|
||||
node = next_node;
|
||||
TSNode next_node = ts_node_child_with_descendant(node, self);
|
||||
if (next_node.id == self.id || ts_node_is_null(next_node)) break;
|
||||
node = next_node;
|
||||
}
|
||||
|
||||
return node;
|
||||
|
|
@ -560,6 +553,7 @@ TSNode ts_node_parent(TSNode self) {
|
|||
TSNode ts_node_child_containing_descendant(TSNode self, TSNode descendant) {
|
||||
uint32_t start_byte = ts_node_start_byte(descendant);
|
||||
uint32_t end_byte = ts_node_end_byte(descendant);
|
||||
bool is_empty = start_byte == end_byte;
|
||||
|
||||
do {
|
||||
NodeChildIterator iter = ts_node_iterate_children(&self);
|
||||
|
|
@ -572,24 +566,16 @@ TSNode ts_node_child_containing_descendant(TSNode self, TSNode descendant) {
|
|||
return ts_node__null();
|
||||
}
|
||||
|
||||
// Here we check the current self node and *all* of its zero-width token siblings that follow.
|
||||
// If any of these nodes contain the target subnode, we return that node. Otherwise, we restore the node we started at
|
||||
// for the loop condition, and that will continue with the next *non-zero-width* sibling.
|
||||
TSNode old = self;
|
||||
// While the next sibling is a zero-width token
|
||||
while (ts_node_child_iterator_next_sibling_is_empty_adjacent(&iter, self)) {
|
||||
TSNode current_node = ts_node_child_containing_descendant(self, descendant);
|
||||
// If the target child is in self, return it
|
||||
if (!ts_node_is_null(current_node)) {
|
||||
return current_node;
|
||||
}
|
||||
ts_node_child_iterator_next(&iter, &self);
|
||||
if (self.id == descendant.id) {
|
||||
return ts_node__null();
|
||||
// If the descendant is empty, and the end byte is within `self`,
|
||||
// we check whether `self` contains it or not.
|
||||
if (is_empty && iter.position.bytes >= end_byte && ts_node_child_count(self) > 0) {
|
||||
TSNode child = ts_node_child_with_descendant(self, descendant);
|
||||
// If the child is not null, return self if it's relevant, else return the child
|
||||
if (!ts_node_is_null(child)) {
|
||||
return ts_node__is_relevant(self, true) ? self : child;
|
||||
}
|
||||
}
|
||||
self = old;
|
||||
} while (iter.position.bytes < end_byte || ts_node_child_count(self) == 0);
|
||||
} while ((is_empty ? iter.position.bytes <= end_byte : iter.position.bytes < end_byte) || ts_node_child_count(self) == 0);
|
||||
} while (!ts_node__is_relevant(self, true));
|
||||
|
||||
return self;
|
||||
|
|
@ -598,6 +584,7 @@ TSNode ts_node_child_containing_descendant(TSNode self, TSNode descendant) {
|
|||
TSNode ts_node_child_with_descendant(TSNode self, TSNode descendant) {
|
||||
uint32_t start_byte = ts_node_start_byte(descendant);
|
||||
uint32_t end_byte = ts_node_end_byte(descendant);
|
||||
bool is_empty = start_byte == end_byte;
|
||||
|
||||
do {
|
||||
NodeChildIterator iter = ts_node_iterate_children(&self);
|
||||
|
|
@ -612,24 +599,16 @@ TSNode ts_node_child_with_descendant(TSNode self, TSNode descendant) {
|
|||
return self;
|
||||
}
|
||||
|
||||
// Here we check the current self node and *all* of its zero-width token siblings that follow.
|
||||
// If any of these nodes contain the target subnode, we return that node. Otherwise, we restore the node we started at
|
||||
// for the loop condition, and that will continue with the next *non-zero-width* sibling.
|
||||
TSNode old = self;
|
||||
// While the next sibling is a zero-width token
|
||||
while (ts_node_child_iterator_next_sibling_is_empty_adjacent(&iter, self)) {
|
||||
TSNode current_node = ts_node_child_with_descendant(self, descendant);
|
||||
// If the target child is in self, return it
|
||||
if (!ts_node_is_null(current_node)) {
|
||||
return current_node;
|
||||
}
|
||||
ts_node_child_iterator_next(&iter, &self);
|
||||
if (self.id == descendant.id) {
|
||||
return self;
|
||||
// If the descendant is empty, and the end byte is within `self`,
|
||||
// we check whether `self` contains it or not.
|
||||
if (is_empty && iter.position.bytes >= end_byte && ts_node_child_count(self) > 0) {
|
||||
TSNode child = ts_node_child_with_descendant(self, descendant);
|
||||
// If the child is not null, return self if it's relevant, else return the child
|
||||
if (!ts_node_is_null(child)) {
|
||||
return ts_node__is_relevant(self, true) ? self : child;
|
||||
}
|
||||
}
|
||||
self = old;
|
||||
} while (iter.position.bytes < end_byte || ts_node_child_count(self) == 0);
|
||||
} while ((is_empty ? iter.position.bytes <= end_byte : iter.position.bytes < end_byte) || ts_node_child_count(self) == 0);
|
||||
} while (!ts_node__is_relevant(self, true));
|
||||
|
||||
return self;
|
||||
|
|
|
|||
|
|
@ -350,7 +350,7 @@ static bool ts_parser__call_main_lex_fn(TSParser *self, TSLexMode lex_mode) {
|
|||
}
|
||||
}
|
||||
|
||||
static bool ts_parser__call_keyword_lex_fn(TSParser *self, TSLexMode lex_mode) {
|
||||
static bool ts_parser__call_keyword_lex_fn(TSParser *self) {
|
||||
if (ts_language_is_wasm(self->language)) {
|
||||
return ts_wasm_store_call_lex_keyword(self->wasm_store, 0);
|
||||
} else {
|
||||
|
|
@ -553,27 +553,29 @@ static Subtree ts_parser__lex(
|
|||
external_scanner_state_len
|
||||
);
|
||||
|
||||
// When recovering from an error, ignore any zero-length external tokens
|
||||
// unless they have changed the external scanner's state. This helps to
|
||||
// avoid infinite loops which could otherwise occur, because the lexer is
|
||||
// looking for any possible token, instead of looking for the specific set of
|
||||
// tokens that are valid in some parse state.
|
||||
// Avoid infinite loops caused by the external scanner returning empty tokens.
|
||||
// Empty tokens are needed in some circumstances, e.g. indent/dedent tokens
|
||||
// in Python. Ignore the following classes of empty tokens:
|
||||
//
|
||||
// Note that it's possible that the token end position may be *before* the
|
||||
// original position of the lexer because of the way that tokens are positioned
|
||||
// at included range boundaries: when a token is terminated at the start of
|
||||
// an included range, it is marked as ending at the *end* of the preceding
|
||||
// included range.
|
||||
// * Tokens produced during error recovery. When recovering from an error,
|
||||
// all tokens are allowed, so it's easy to accidentally return unwanted
|
||||
// empty tokens.
|
||||
// * Tokens that are marked as 'extra' in the grammar. These don't change
|
||||
// the parse state, so they would definitely cause an infinite loop.
|
||||
if (
|
||||
self->lexer.token_end_position.bytes <= current_position.bytes &&
|
||||
(error_mode || !ts_stack_has_advanced_since_error(self->stack, version)) &&
|
||||
!external_scanner_state_changed
|
||||
) {
|
||||
LOG(
|
||||
"ignore_empty_external_token symbol:%s",
|
||||
SYM_NAME(self->language->external_scanner.symbol_map[self->lexer.data.result_symbol])
|
||||
)
|
||||
found_token = false;
|
||||
TSSymbol symbol = self->language->external_scanner.symbol_map[self->lexer.data.result_symbol];
|
||||
TSStateId next_parse_state = ts_language_next_state(self->language, parse_state, symbol);
|
||||
bool token_is_extra = (next_parse_state == parse_state);
|
||||
if (error_mode || !ts_stack_has_advanced_since_error(self->stack, version) || token_is_extra) {
|
||||
LOG(
|
||||
"ignore_empty_external_token symbol:%s",
|
||||
SYM_NAME(self->language->external_scanner.symbol_map[self->lexer.data.result_symbol])
|
||||
);
|
||||
found_token = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -651,7 +653,7 @@ static Subtree ts_parser__lex(
|
|||
ts_lexer_reset(&self->lexer, self->lexer.token_start_position);
|
||||
ts_lexer_start(&self->lexer);
|
||||
|
||||
is_keyword = ts_parser__call_keyword_lex_fn(self, lex_mode);
|
||||
is_keyword = ts_parser__call_keyword_lex_fn(self);
|
||||
|
||||
if (
|
||||
is_keyword &&
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
#include "tree_sitter/api.h"
|
||||
#include "./alloc.h"
|
||||
#include "./tree_cursor.h"
|
||||
#include "./language.h"
|
||||
#include "./tree.h"
|
||||
|
|
@ -212,7 +211,6 @@ bool ts_tree_cursor_goto_first_child(TSTreeCursor *self) {
|
|||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
TreeCursorStep ts_tree_cursor_goto_last_child_internal(TSTreeCursor *_self) {
|
||||
|
|
@ -253,7 +251,6 @@ bool ts_tree_cursor_goto_last_child(TSTreeCursor *self) {
|
|||
return false;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
static inline int64_t ts_tree_cursor_goto_first_child_for_byte_and_point(
|
||||
|
|
|
|||
|
|
@ -16,6 +16,14 @@
|
|||
#include <wasm.h>
|
||||
#include <wasmtime.h>
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4100)
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wunused-parameter"
|
||||
#endif
|
||||
|
||||
#define array_len(a) (sizeof(a) / sizeof(a[0]))
|
||||
|
||||
// The following symbols from the C and C++ standard libraries are available
|
||||
|
|
@ -159,8 +167,6 @@ typedef struct {
|
|||
int32_t eof;
|
||||
} LexerInWasmMemory;
|
||||
|
||||
static volatile uint32_t NEXT_LANGUAGE_ID;
|
||||
|
||||
// Linear memory layout:
|
||||
// [ <-- stack | stdlib statics | lexer | language statics --> | serialization_buffer | heap --> ]
|
||||
#define MAX_MEMORY_SIZE (128 * 1024 * 1024 / MEMORY_PAGE_SIZE)
|
||||
|
|
@ -169,7 +175,7 @@ static volatile uint32_t NEXT_LANGUAGE_ID;
|
|||
* WasmDylinkMemoryInfo
|
||||
***********************/
|
||||
|
||||
static uint8_t read_u8(const uint8_t **p, const uint8_t *end) {
|
||||
static uint8_t read_u8(const uint8_t **p) {
|
||||
return *(*p)++;
|
||||
}
|
||||
|
||||
|
|
@ -204,7 +210,7 @@ static bool wasm_dylink_info__parse(
|
|||
p += 4;
|
||||
|
||||
while (p < end) {
|
||||
uint8_t section_id = read_u8(&p, end);
|
||||
uint8_t section_id = read_u8(&p);
|
||||
uint32_t section_length = read_uleb128(&p, end);
|
||||
const uint8_t *section_end = p + section_length;
|
||||
if (section_end > end) return false;
|
||||
|
|
@ -217,7 +223,7 @@ static bool wasm_dylink_info__parse(
|
|||
if (name_length == 8 && memcmp(p, "dylink.0", 8) == 0) {
|
||||
p = name_end;
|
||||
while (p < section_end) {
|
||||
uint8_t subsection_type = read_u8(&p, section_end);
|
||||
uint8_t subsection_type = read_u8(&p);
|
||||
uint32_t subsection_size = read_uleb128(&p, section_end);
|
||||
const uint8_t *subsection_end = p + subsection_size;
|
||||
if (subsection_end > section_end) return false;
|
||||
|
|
@ -545,6 +551,7 @@ TSWasmStore *ts_wasm_store_new(TSWasmEngine *engine, TSWasmError *wasm_error) {
|
|||
wasm_trap_t *trap = NULL;
|
||||
wasm_message_t message = WASM_EMPTY_VEC;
|
||||
wasm_exporttype_vec_t export_types = WASM_EMPTY_VEC;
|
||||
wasm_importtype_vec_t import_types = WASM_EMPTY_VEC;
|
||||
wasmtime_extern_t *imports = NULL;
|
||||
wasmtime_module_t *stdlib_module = NULL;
|
||||
wasm_memorytype_t *memory_type = NULL;
|
||||
|
|
@ -660,11 +667,10 @@ TSWasmStore *ts_wasm_store_new(TSWasmEngine *engine, TSWasmError *wasm_error) {
|
|||
}
|
||||
|
||||
// Retrieve the stdlib module's imports.
|
||||
wasm_importtype_vec_t import_types = WASM_EMPTY_VEC;
|
||||
wasmtime_module_imports(stdlib_module, &import_types);
|
||||
|
||||
// Find the initial number of memory pages needed by the stdlib.
|
||||
const wasm_memorytype_t *stdlib_memory_type;
|
||||
const wasm_memorytype_t *stdlib_memory_type = NULL;
|
||||
for (unsigned i = 0; i < import_types.size; i++) {
|
||||
wasm_importtype_t *import_type = import_types.data[i];
|
||||
const wasm_name_t *import_name = wasm_importtype_name(import_type);
|
||||
|
|
@ -1545,13 +1551,22 @@ static void ts_wasm_store__call(
|
|||
}
|
||||
}
|
||||
|
||||
// The data fields of TSLexer, without the function pointers.
|
||||
//
|
||||
// This portion of the struct needs to be copied in and out
|
||||
// of wasm memory before and after calling a scan function.
|
||||
typedef struct {
|
||||
int32_t lookahead;
|
||||
TSSymbol result_symbol;
|
||||
} TSLexerDataPrefix;
|
||||
|
||||
static bool ts_wasm_store__call_lex_function(TSWasmStore *self, unsigned function_index, TSStateId state) {
|
||||
wasmtime_context_t *context = wasmtime_store_context(self->store);
|
||||
uint8_t *memory_data = wasmtime_memory_data(context, &self->memory);
|
||||
memcpy(
|
||||
&memory_data[self->lexer_address],
|
||||
&self->current_lexer->lookahead,
|
||||
sizeof(self->current_lexer->lookahead)
|
||||
self->current_lexer,
|
||||
sizeof(TSLexerDataPrefix)
|
||||
);
|
||||
|
||||
wasmtime_val_raw_t args[2] = {
|
||||
|
|
@ -1563,9 +1578,9 @@ static bool ts_wasm_store__call_lex_function(TSWasmStore *self, unsigned functio
|
|||
bool result = args[0].i32;
|
||||
|
||||
memcpy(
|
||||
&self->current_lexer->lookahead,
|
||||
self->current_lexer,
|
||||
&memory_data[self->lexer_address],
|
||||
sizeof(self->current_lexer->lookahead) + sizeof(self->current_lexer->result_symbol)
|
||||
sizeof(TSLexerDataPrefix)
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
|
@ -1610,8 +1625,8 @@ bool ts_wasm_store_call_scanner_scan(
|
|||
|
||||
memcpy(
|
||||
&memory_data[self->lexer_address],
|
||||
&self->current_lexer->lookahead,
|
||||
sizeof(self->current_lexer->lookahead)
|
||||
self->current_lexer,
|
||||
sizeof(TSLexerDataPrefix)
|
||||
);
|
||||
|
||||
uint32_t valid_tokens_address =
|
||||
|
|
@ -1626,9 +1641,9 @@ bool ts_wasm_store_call_scanner_scan(
|
|||
if (self->has_error) return false;
|
||||
|
||||
memcpy(
|
||||
&self->current_lexer->lookahead,
|
||||
self->current_lexer,
|
||||
&memory_data[self->lexer_address],
|
||||
sizeof(self->current_lexer->lookahead) + sizeof(self->current_lexer->result_symbol)
|
||||
sizeof(TSLexerDataPrefix)
|
||||
);
|
||||
return args[0].i32;
|
||||
}
|
||||
|
|
@ -1743,6 +1758,12 @@ void ts_wasm_language_release(const TSLanguage *self) {
|
|||
}
|
||||
}
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(pop)
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
||||
#else
|
||||
|
||||
// If the WASM feature is not enabled, define dummy versions of all of the
|
||||
|
|
|
|||
|
|
@ -1,62 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
function usage {
|
||||
cat <<EOF
|
||||
USAGE
|
||||
|
||||
$0 [-h] [-l language-name] [-e example-file-name] [-r repetition-count]
|
||||
|
||||
OPTIONS
|
||||
|
||||
-h print this message
|
||||
|
||||
-l run only the benchmarks for the given language
|
||||
|
||||
-e run only the benchmarks that parse the example file with the given name
|
||||
|
||||
-r parse each sample the given number of times (default 5)
|
||||
|
||||
-g debug
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
mode=normal
|
||||
|
||||
while getopts "hgl:e:r:" option; do
|
||||
case ${option} in
|
||||
h)
|
||||
usage
|
||||
exit
|
||||
;;
|
||||
g)
|
||||
mode=debug
|
||||
;;
|
||||
e)
|
||||
export TREE_SITTER_BENCHMARK_EXAMPLE_FILTER=${OPTARG}
|
||||
;;
|
||||
l)
|
||||
export TREE_SITTER_BENCHMARK_LANGUAGE_FILTER=${OPTARG}
|
||||
;;
|
||||
r)
|
||||
export TREE_SITTER_BENCHMARK_REPETITION_COUNT=${OPTARG}
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ $mode == debug ]]; then
|
||||
test_binary=$(
|
||||
cargo bench benchmark -p tree-sitter-cli --no-run --message-format=json 2> /dev/null |
|
||||
jq -rs 'map(select(.target.name == "benchmark" and .executable))[0].executable'
|
||||
)
|
||||
env | grep TREE_SITTER
|
||||
echo "$test_binary"
|
||||
else
|
||||
exec cargo bench benchmark -p tree-sitter-cli
|
||||
fi
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
@echo off
|
||||
|
||||
cargo bench benchmark -p tree-sitter-cli
|
||||
exit /b %errorlevel%
|
||||
|
|
@ -1,76 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
|
||||
set -e
|
||||
|
||||
if [[ $(uname -s) != Linux ]]; then
|
||||
printf 'Fuzzing is only supported on Linux\n' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CC=${CC:-clang}
|
||||
CXX=${CXX:-clang++}
|
||||
|
||||
default_fuzz_flags=-fsanitize=fuzzer,address,undefined
|
||||
|
||||
export CFLAGS="$default_fuzz_flags $CFLAGS"
|
||||
export CXXFLAGS="$default_fuzz_flags $CXXFLAGS"
|
||||
|
||||
make CC="$CC" CXX="$CXX" libtree-sitter.a
|
||||
|
||||
if [[ -z $* ]]; then
|
||||
mapfile -t languages < <(ls test/fixtures/grammars)
|
||||
else
|
||||
languages=("$@")
|
||||
fi
|
||||
|
||||
mkdir -p test/fuzz/out
|
||||
|
||||
for lang in "${languages[@]}"; do
|
||||
# skip typescript & php
|
||||
if [[ $lang == typescript || $lang == php ]]; then
|
||||
continue
|
||||
fi
|
||||
printf 'Building %s fuzzer...\n' "$lang"
|
||||
lang_dir="test/fixtures/grammars/$lang"
|
||||
lang_grammar="${lang_dir}/src/grammar.json"
|
||||
|
||||
# The following assumes each language is implemented as src/parser.c plus an
|
||||
# optional scanner in src/scanner.c
|
||||
objects=()
|
||||
|
||||
lang_scanner="${lang_dir}/src/scanner"
|
||||
if [[ -f "${lang_scanner}.c" ]]; then
|
||||
$CC $CFLAGS -std=c11 -g -O1 -I "${lang_dir}/src" -c "${lang_scanner}.c" -o "${lang_scanner}.o"
|
||||
objects+=("${lang_scanner}.o")
|
||||
fi
|
||||
|
||||
# Compiling with -O0 speeds up the build dramatically
|
||||
$CC $CFLAGS -g -O0 -I "${lang_dir}/src" "${lang_dir}/src/parser.c" -c -o "${lang_dir}/src/parser.o"
|
||||
objects+=("${lang_dir}/src/parser.o")
|
||||
|
||||
highlights_filename="${lang_dir}/queries/highlights.scm"
|
||||
if [[ -f "${highlights_filename}" ]]; then
|
||||
ts_lang_query_filename="${lang}.scm"
|
||||
cp "${highlights_filename}" "test/fuzz/out/${ts_lang_query_filename}"
|
||||
else
|
||||
ts_lang_query_filename=""
|
||||
fi
|
||||
|
||||
ts_lang="tree_sitter_$(jq -r .name "$lang_grammar")"
|
||||
$CXX $CXXFLAGS -std=c++11 -Ilib/include \
|
||||
-D TS_LANG="$ts_lang" \
|
||||
-D TS_LANG_QUERY_FILENAME="\"${ts_lang_query_filename}\"" \
|
||||
test/fuzz/fuzzer.cc \
|
||||
"${objects[@]}" \
|
||||
libtree-sitter.a \
|
||||
-o "test/fuzz/out/${lang}_fuzzer"
|
||||
|
||||
jq '
|
||||
[ ..
|
||||
| if .type? == "STRING" or (.type? == "ALIAS" and .named? == false) then .value else empty end
|
||||
| select(test("\\S") and length == utf8bytelength)
|
||||
] | unique | .[]
|
||||
' "$lang_grammar" | sort > "test/fuzz/out/${lang}.dict"
|
||||
done
|
||||
|
|
@ -1,28 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
declare -a EXPORT_FLAGS
|
||||
while read -r -d, function; do
|
||||
EXPORT_FLAGS+=("-Wl,--export=${function:1:-1}")
|
||||
done < lib/src/wasm/stdlib-symbols.txt
|
||||
|
||||
target/wasi-sdk-21.0/bin/clang-17 \
|
||||
-o stdlib.wasm \
|
||||
-Os \
|
||||
-fPIC \
|
||||
-Wl,--no-entry \
|
||||
-Wl,--stack-first \
|
||||
-Wl,-z -Wl,stack-size=65536 \
|
||||
-Wl,--import-undefined \
|
||||
-Wl,--import-memory \
|
||||
-Wl,--import-table \
|
||||
-Wl,--strip-debug \
|
||||
-Wl,--export=reset_heap \
|
||||
-Wl,--export=__wasm_call_ctors \
|
||||
-Wl,--export=__stack_pointer \
|
||||
"${EXPORT_FLAGS[@]}" \
|
||||
lib/src/wasm/stdlib.c
|
||||
|
||||
xxd -C -i stdlib.wasm > lib/src/wasm/wasm-stdlib.h
|
||||
mv stdlib.wasm target/
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
src_dir=lib/src
|
||||
allocation_functions=(malloc calloc realloc free)
|
||||
|
||||
for function in "${allocation_functions[@]}"; do
|
||||
usages=$(grep -n -E "\b${function}\(" -r $src_dir --exclude alloc.c --exclude stdlib.c)
|
||||
if [[ -n $usages ]]; then
|
||||
printf 'The %s function should not be called directly, but is called here:\n%s\n' "$function" "$usages" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
|
@ -1,26 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
EMSDK_DIR=target/emsdk
|
||||
EMSCRIPTEN_VERSION=$(< cli/loader/emscripten-version)
|
||||
|
||||
{
|
||||
if [[ ! -f $EMSDK_DIR/emsdk ]]; then
|
||||
printf 'Downloading emscripten SDK...\n'
|
||||
git clone https://github.com/emscripten-core/emsdk.git $EMSDK_DIR
|
||||
fi
|
||||
|
||||
cd $EMSDK_DIR
|
||||
|
||||
printf 'Updating emscripten SDK...\n'
|
||||
git reset --hard
|
||||
git pull
|
||||
./emsdk list
|
||||
|
||||
printf 'Installing emscripten...\n'
|
||||
./emsdk install "$EMSCRIPTEN_VERSION"
|
||||
|
||||
printf 'Activating emscripten...\n'
|
||||
./emsdk activate "$EMSCRIPTEN_VERSION"
|
||||
} >&2
|
||||
|
|
@ -1,37 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
GRAMMARS_DIR="$PWD/test/fixtures/grammars"
|
||||
|
||||
fetch_grammar() {
|
||||
local grammar=$1
|
||||
local ref=$2
|
||||
local grammar_dir="${GRAMMARS_DIR}/${grammar}"
|
||||
local grammar_url=https://github.com/tree-sitter/tree-sitter-${grammar}
|
||||
|
||||
printf 'Updating %s grammar...\n' "$grammar"
|
||||
|
||||
if [[ ! -d "$grammar_dir" ]]; then
|
||||
git clone "$grammar_url" "$grammar_dir" --depth=1
|
||||
fi
|
||||
|
||||
git -C "$grammar_dir" fetch origin "$ref" --depth=1
|
||||
git -C "$grammar_dir" reset --hard FETCH_HEAD
|
||||
}
|
||||
|
||||
fetch_grammar bash master
|
||||
fetch_grammar c master
|
||||
fetch_grammar cpp master
|
||||
fetch_grammar embedded-template master
|
||||
fetch_grammar go master
|
||||
fetch_grammar html master
|
||||
fetch_grammar java master
|
||||
fetch_grammar javascript master
|
||||
fetch_grammar jsdoc master
|
||||
fetch_grammar json master
|
||||
fetch_grammar php master
|
||||
fetch_grammar python master
|
||||
fetch_grammar ruby master
|
||||
fetch_grammar rust master
|
||||
fetch_grammar typescript master
|
||||
|
|
@ -1,32 +0,0 @@
|
|||
@echo off
|
||||
|
||||
call:fetch_grammar bash master
|
||||
call:fetch_grammar c master
|
||||
call:fetch_grammar cpp master
|
||||
call:fetch_grammar embedded-template master
|
||||
call:fetch_grammar go master
|
||||
call:fetch_grammar html master
|
||||
call:fetch_grammar java master
|
||||
call:fetch_grammar javascript master
|
||||
call:fetch_grammar jsdoc master
|
||||
call:fetch_grammar json master
|
||||
call:fetch_grammar php master
|
||||
call:fetch_grammar python master
|
||||
call:fetch_grammar ruby master
|
||||
call:fetch_grammar rust master
|
||||
call:fetch_grammar typescript master
|
||||
exit /B 0
|
||||
|
||||
:fetch_grammar
|
||||
setlocal
|
||||
set grammar_dir=test\fixtures\grammars\%~1
|
||||
set grammar_url=https://github.com/tree-sitter/tree-sitter-%~1
|
||||
set grammar_branch=%~2
|
||||
@if not exist %grammar_dir% (
|
||||
git clone %grammar_url% %grammar_dir% --depth=1
|
||||
)
|
||||
pushd %grammar_dir%
|
||||
git fetch origin %2 --depth=1
|
||||
git reset --hard FETCH_HEAD
|
||||
popd
|
||||
exit /B 0
|
||||
|
|
@ -1,44 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
output_path=lib/binding_rust/bindings.rs
|
||||
header_path=lib/include/tree_sitter/api.h
|
||||
no_derive_copy=(
|
||||
TSInput
|
||||
TSLanguage
|
||||
TSLogger
|
||||
TSLookaheadIterator
|
||||
TSParser
|
||||
TSTree
|
||||
TSQuery
|
||||
TSQueryCursor
|
||||
TSQueryCapture
|
||||
TSQueryMatch
|
||||
TSQueryPredicateStep
|
||||
)
|
||||
no_copy=$(IFS='|'; echo "${no_derive_copy[*]}")
|
||||
|
||||
file_version=$(head -n1 "$output_path" | cut -d' ' -f6)
|
||||
tool_version=$(bindgen --version | cut -d' ' -f2)
|
||||
higher_version=$(printf '%s\n' "$file_version" "$tool_version" | sort -V | tail -n1)
|
||||
|
||||
if [[ "$higher_version" != "$tool_version" ]]; then
|
||||
printf 'Latest used bindgen version was %s\n' "$file_version" >&2
|
||||
printf 'Currently installed bindgen CLI version is %s\n\n' "$tool_version" >&2
|
||||
# shellcheck disable=SC2016
|
||||
printf 'You must upgrade bindgen CLI first with `cargo install bindgen-cli`\n' >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bindgen \
|
||||
--no-layout-tests \
|
||||
--allowlist-type '^TS.*' \
|
||||
--allowlist-function '^ts_.*' \
|
||||
--allowlist-var '^TREE_SITTER.*' \
|
||||
--blocklist-type '^__.*' \
|
||||
--no-prepend-enum-name \
|
||||
--no-copy "$no_copy" \
|
||||
--use-core \
|
||||
"$header_path" \
|
||||
-- \
|
||||
-D TREE_SITTER_FEATURE_WASM \
|
||||
> "$output_path"
|
||||
|
|
@ -1,27 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
ROOT_DIR="$PWD"
|
||||
GRAMMARS_DIR="$ROOT_DIR/test/fixtures/grammars"
|
||||
|
||||
if [[ $CI == true ]]; then
|
||||
set -x
|
||||
else
|
||||
cargo build --release
|
||||
TREE_SITTER="$ROOT_DIR/target/release/tree-sitter"
|
||||
fi
|
||||
|
||||
filter_grammar_name="$1"
|
||||
|
||||
while read -r grammar_file; do
|
||||
grammar_dir="${grammar_file%/*}"
|
||||
grammar_name="${grammar_dir##*/}"
|
||||
|
||||
if [[ -n $filter_grammar_name && "$filter_grammar_name" != "$grammar_name" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
printf 'Regenerating %s parser\n' "$grammar_name"
|
||||
(cd "$grammar_dir" && "$TREE_SITTER" generate src/grammar.json --abi=latest)
|
||||
done < <(find "$GRAMMARS_DIR" -name grammar.js -not -path '*/node_modules/*')
|
||||
|
|
@ -1,33 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
ROOT_DIR="$PWD"
|
||||
GRAMMARS_DIR="$ROOT_DIR/test/fixtures/grammars"
|
||||
|
||||
if [[ $CI == true ]]; then
|
||||
set -x
|
||||
else
|
||||
cargo build --release
|
||||
TREE_SITTER="$ROOT_DIR/target/release/tree-sitter"
|
||||
fi
|
||||
|
||||
build_wasm_args=
|
||||
if [[ $1 == --docker ]]; then
|
||||
build_wasm_args=--docker
|
||||
shift
|
||||
fi
|
||||
|
||||
filter_grammar_name="$1"
|
||||
|
||||
while read -r grammar_file; do
|
||||
grammar_dir="${grammar_file%/*}"
|
||||
grammar_name="${grammar_dir##*/}"
|
||||
|
||||
if [[ -n $filter_grammar_name && "$filter_grammar_name" != "$grammar_name" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
printf 'Compiling %s parser to wasm\n' "$grammar_name"
|
||||
"$TREE_SITTER" build --wasm $build_wasm_args -o "target/release/tree-sitter-${grammar_name}.wasm" "$grammar_dir"
|
||||
done < <(find "$GRAMMARS_DIR" -name grammar.js -not -path '*/node_modules/*')
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
@echo off
|
||||
|
||||
setlocal EnableDelayedExpansion
|
||||
set tree_sitter="%cd%\target\release\tree-sitter"
|
||||
|
||||
for /f "tokens=*" %%f in ('dir test\fixtures\grammars\grammar.js /b/s') do (
|
||||
pushd "%%f\.."
|
||||
echo Regenerating parser !cd!
|
||||
%tree_sitter% generate src\grammar.json --abi=latest
|
||||
popd
|
||||
)
|
||||
|
||||
exit /B 0
|
||||
|
|
@ -1,36 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Usage:
|
||||
# script/heap-profile
|
||||
#
|
||||
# Parse an example source file and record memory usage
|
||||
#
|
||||
# Dependencies:
|
||||
# * `pprof` executable: https://github.com/google/pprof
|
||||
# * `gperftools` package: https://github.com/gperftools/gperftools
|
||||
|
||||
set -e
|
||||
|
||||
GRAMMARS_DIR="$PWD/test/fixtures/grammars"
|
||||
|
||||
# Build the library
|
||||
make libtree-sitter.a
|
||||
|
||||
# Build the heap-profiling harness
|
||||
clang++ \
|
||||
-Wno-reorder-init-list \
|
||||
-Wno-c99-designator \
|
||||
-I lib/include \
|
||||
-I "$GRAMMARS_DIR" \
|
||||
-D GRAMMARS_DIR="\"${GRAMMARS_DIR}/\"" \
|
||||
test/profile/heap.cc \
|
||||
-l tcmalloc \
|
||||
libtree-sitter.a \
|
||||
-o target/heap-profile
|
||||
|
||||
# Run the harness with heap profiling enabled.
|
||||
export HEAPPROFILE="$PWD/profile"
|
||||
target/heap-profile "$@"
|
||||
|
||||
# Extract statistics using pprof.
|
||||
pprof -top -cum profile.0001.heap
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
if (($# < 3)); then
|
||||
echo "usage: $0 <language> <halt|recover> <testcase> [libFuzzer args...]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -eu
|
||||
|
||||
export ASAN_OPTIONS=quarantine_size_mb=10:detect_leaks=1:symbolize=1
|
||||
export UBSAN=print_stacktrace=1:halt_on_error=1:symbolize=1
|
||||
|
||||
# check if CI env var exists
|
||||
if [[ -z ${CI:-} ]]; then
|
||||
declare -A mode_config=(
|
||||
[halt]='-timeout=1 -rss_limit_mb=2048'
|
||||
[recover]='-timeout=10 -rss_limit_mb=2048'
|
||||
)
|
||||
else
|
||||
declare -A mode_config=(
|
||||
[halt]='-max_total_time=120 -timeout=1 -rss_limit_mb=2048'
|
||||
[recover]='-time=120 -timeout=10 -rss_limit_mb=2048'
|
||||
)
|
||||
fi
|
||||
|
||||
lang="$1"
|
||||
shift
|
||||
mode="$1"
|
||||
shift
|
||||
testcase="$1"
|
||||
shift
|
||||
# Treat remainder of arguments as libFuzzer arguments
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
test/fuzz/out/${lang}_fuzzer ${mode_config[$mode]} -runs=1 "$testcase" "$@"
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
if (($# < 2)); then
|
||||
echo "usage: $0 <language> <halt|recover> [libFuzzer args...]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
set -eu
|
||||
|
||||
export ASAN_OPTIONS=quarantine_size_mb=10:detect_leaks=1:symbolize=1
|
||||
export UBSAN=print_stacktrace=1:halt_on_error=1:symbolize=1
|
||||
|
||||
# check if CI env var exists
|
||||
if [[ -z ${CI:-} ]]; then
|
||||
declare -A mode_config=(
|
||||
[halt]='-timeout=1 -rss_limit_mb=2048'
|
||||
[recover]='-timeout=10 -rss_limit_mb=2048'
|
||||
)
|
||||
else
|
||||
declare -A mode_config=(
|
||||
[halt]='-max_total_time=120 -timeout=1 -rss_limit_mb=2048'
|
||||
[recover]='-time=120 -timeout=10 -rss_limit_mb=2048'
|
||||
)
|
||||
fi
|
||||
|
||||
lang="$1"
|
||||
shift
|
||||
mode="$1"
|
||||
shift
|
||||
# Treat remainder of arguments as libFuzzer arguments
|
||||
|
||||
# Fuzzing logs and testcases are always written to `pwd`, so `cd` there first
|
||||
results="$PWD/test/fuzz/out/fuzz-results/${lang}"
|
||||
mkdir -p "${results}"
|
||||
cd "${results}"
|
||||
|
||||
# Create a corpus directory, so new discoveries are stored on disk. These will
|
||||
# then be loaded on subsequent fuzzing runs
|
||||
mkdir -p corpus
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
../../${lang}_fuzzer -dict="../../${lang}.dict" -artifact_prefix=${lang}_ -max_len=2048 ${mode_config[$mode]} corpus "$@"
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
root=$PWD
|
||||
cd docs
|
||||
|
||||
bundle exec jekyll serve "$@" &
|
||||
|
||||
bundle exec ruby <<RUBY &
|
||||
require "listen"
|
||||
|
||||
def copy_wasm_files
|
||||
`cp $root/lib/binding_web/tree-sitter.{js,wasm} $root/docs/assets/js/`
|
||||
`cp $root/target/release/*.wasm $root/docs/assets/js/`
|
||||
end
|
||||
|
||||
puts "Copying WASM files to docs folder..."
|
||||
copy_wasm_files
|
||||
|
||||
puts "Watching release directory"
|
||||
listener = Listen.to("$root/lib/binding_web", only: /^tree-sitter\.(js|wasm)$/, wait_for_delay: 2) do
|
||||
puts "WASM files updated. Copying new files to docs folder..."
|
||||
copy_wasm_files
|
||||
end
|
||||
|
||||
listener.start
|
||||
sleep
|
||||
RUBY
|
||||
|
||||
wait
|
||||
|
|
@ -1,50 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const {statSync} = require('fs');
|
||||
const {execFileSync} = require('child_process');
|
||||
const libPath = process.argv[2];
|
||||
|
||||
if (!libPath || libPath === '--help') {
|
||||
console.log(`Usage: ${process.argv[1]} <dylib-path>`);
|
||||
process.exit(0)
|
||||
}
|
||||
|
||||
// Get total file size
|
||||
const totalSize = statSync(libPath).size
|
||||
|
||||
// Dump symbols with addresses
|
||||
const output = execFileSync(
|
||||
'nm',
|
||||
['-t', 'd', libPath],
|
||||
{encoding: 'utf8'}
|
||||
);
|
||||
|
||||
// Parse addresses
|
||||
const addressEntries = [];
|
||||
for (const line of output.split('\n')) {
|
||||
const [address, _, name] = line.split(/\s+/);
|
||||
if (address && name) {
|
||||
addressEntries.push({name, address: parseInt(address)})
|
||||
}
|
||||
}
|
||||
|
||||
// Compute sizes by subtracting addresses
|
||||
addressEntries.sort((a, b) => a.address - b.address);
|
||||
const sizeEntries = addressEntries.map(({name, address}, i) => {
|
||||
const next = addressEntries[i + 1] ? addressEntries[i + 1].address : totalSize;
|
||||
const size = next - address;
|
||||
return {name, size}
|
||||
})
|
||||
|
||||
function formatSize(sizeInBytes) {
|
||||
return sizeInBytes > 1024
|
||||
? `${(sizeInBytes / 1024).toFixed(1)} kb`
|
||||
: `${sizeInBytes} b`
|
||||
}
|
||||
|
||||
// Display sizes
|
||||
sizeEntries.sort((a, b) => b.size - a.size);
|
||||
console.log('total'.padEnd(64, ' '), '\t', formatSize(totalSize));
|
||||
for (const entry of sizeEntries) {
|
||||
console.log(entry.name.padEnd(64, ' '), '\t', formatSize(entry.size));
|
||||
}
|
||||
101
script/test
101
script/test
|
|
@ -1,101 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
function usage {
|
||||
cat <<EOF
|
||||
USAGE
|
||||
|
||||
$0 [-adDg] [-s SEED] [-l LANGUAGE] [-e EXAMPLE]
|
||||
|
||||
OPTIONS
|
||||
|
||||
-h Print this message
|
||||
|
||||
-a Compile C code with the Clang address sanitizer
|
||||
|
||||
-e Run only the corpus tests whose name contain the given string
|
||||
|
||||
-i Run the given number of iterations of randomized tests (default 10)
|
||||
|
||||
-s Set the seed used to control random behavior
|
||||
|
||||
-d Print parsing log to stderr
|
||||
|
||||
-D Generate an SVG graph of parsing logs
|
||||
|
||||
-g Run the tests with a debugger
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
export RUST_BACKTRACE=full
|
||||
|
||||
mode=normal
|
||||
test_flags=()
|
||||
|
||||
while getopts "adDghl:e:s:i:" option; do
|
||||
case ${option} in
|
||||
h)
|
||||
usage
|
||||
exit
|
||||
;;
|
||||
a)
|
||||
export CFLAGS=-fsanitize=undefined,address
|
||||
|
||||
# When the Tree-sitter C library is compiled with the address sanitizer, the address sanitizer
|
||||
# runtime library needs to be linked into the final test executable. When using Xcode clang,
|
||||
# the Rust linker doesn't know where to find that library, so we need to specify linker flags directly.
|
||||
runtime_dir=$(cc -print-runtime-dir)
|
||||
if [[ $runtime_dir == */Xcode.app/* ]]; then
|
||||
export RUSTFLAGS="-C link-arg=-L${runtime_dir} -C link-arg=-lclang_rt.asan_osx_dynamic -C link-arg=-Wl,-rpath,${runtime_dir}"
|
||||
fi
|
||||
|
||||
# Specify a `--target` explicitly. This is required for address sanitizer support.
|
||||
toolchain=$(rustup show active-toolchain)
|
||||
toolchain_regex='(stable|beta|nightly)-([_a-z0-9-]+).*'
|
||||
if [[ $toolchain =~ $toolchain_regex ]]; then
|
||||
release=${BASH_REMATCH[1]}
|
||||
current_target=${BASH_REMATCH[2]}
|
||||
else
|
||||
printf "Failed to parse toolchain '%s'\n" "$toolchain" >&2
|
||||
fi
|
||||
|
||||
test_flags+=("--target=$current_target")
|
||||
;;
|
||||
e)
|
||||
export TREE_SITTER_EXAMPLE=${OPTARG}
|
||||
;;
|
||||
s)
|
||||
export TREE_SITTER_SEED=${OPTARG}
|
||||
;;
|
||||
i)
|
||||
export TREE_SITTER_ITERATIONS=${OPTARG}
|
||||
;;
|
||||
d)
|
||||
export TREE_SITTER_LOG=1
|
||||
;;
|
||||
D)
|
||||
export TREE_SITTER_LOG_GRAPHS=1
|
||||
;;
|
||||
g)
|
||||
mode=debug
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
shift $((OPTIND - 1))
|
||||
|
||||
if [[ ${mode} == debug ]]; then
|
||||
test_binary=$(
|
||||
cargo test "${test_flags[@]}" --no-run --message-format=json 2> /dev/null |
|
||||
jq -rs 'map(select(.target.name == "tree-sitter-cli" and .executable))[0].executable'
|
||||
)
|
||||
lldb "${test_binary}" -- "$1"
|
||||
else
|
||||
cargo test "${test_flags[@]}" "$1" -- --nocapture
|
||||
fi
|
||||
|
|
@ -1,12 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
cd lib/binding_web
|
||||
|
||||
if [[ ! -d node_modules/chai ]] || [[ ! -d node_modules/mocha ]]; then
|
||||
printf 'Installing test dependencies...\n'
|
||||
npm install
|
||||
fi
|
||||
|
||||
node_modules/.bin/mocha
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
@echo off
|
||||
|
||||
setlocal
|
||||
set RUST_TEST_THREADS=1
|
||||
set RUST_BACKTRACE=full
|
||||
cargo test "%~1"
|
||||
if %errorlevel% NEQ 0 (
|
||||
exit /b %errorlevel%
|
||||
)
|
||||
endlocal
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
function scan_build {
|
||||
extra_args=()
|
||||
|
||||
# AFAICT, in the trusty travis container the scan-build tool is from the 3.4
|
||||
# installation. Therefore, by default it will use clang-3.4 when analysing code
|
||||
# which doesn't support the '-std=c++14' (it is available via '-std=c++1y').
|
||||
# Use the system-wide installed clang instead which is 3.5 and does support
|
||||
# '-std=c++14'.
|
||||
extra_args+=("--use-analyzer=$(command -v clang)")
|
||||
|
||||
# scan-build will try to guess which CXX should be used to compile the actual
|
||||
# code, which is usually g++ but we need g++5 in the CI. Explicitly pass
|
||||
# $CC/$CXX to scan-build if they are set in the environment.
|
||||
|
||||
if [[ -n $CC ]]; then
|
||||
extra_args+=("--use-cc=$CC")
|
||||
fi
|
||||
|
||||
if [[ -n $CXX ]]; then
|
||||
extra_args+=("--use-c++=$CXX")
|
||||
fi
|
||||
|
||||
scan-build "${extra_args[@]}" --status-bugs -disable-checker deadcode.DeadStores "$@"
|
||||
}
|
||||
|
|
@ -1,256 +0,0 @@
|
|||
# Errors
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Cond
|
||||
fun:_ZN6option6Parser9workhorseEbPKNS_10DescriptorEiPPKcRNS0_6ActionEbbi
|
||||
fun:_ZN6option6Parser5parseEbPKNS_10DescriptorEiPPKcPNS_6OptionES8_ibi
|
||||
fun:_ZN6option6ParserC1EPKNS_10DescriptorEiPPcPNS_6OptionES7_ibi
|
||||
fun:_ZN6bandit6detail7optionsC1EiPPc
|
||||
fun:_ZN6bandit3runEiPPc
|
||||
fun:main
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Cond
|
||||
fun:_ZN6option6Parser9workhorseEbPKNS_10DescriptorEiPPKcRNS0_6ActionEbbi
|
||||
fun:_ZN6option5Stats3addEbPKNS_10DescriptorEiPPKcib
|
||||
fun:_ZN6option5StatsC1EPKNS_10DescriptorEiPPcib
|
||||
fun:_ZN6bandit6detail7optionsC1EiPPc
|
||||
fun:_ZN6bandit3runEiPPc
|
||||
fun:main
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Cond
|
||||
fun:_ZN6option6Parser9workhorseEbPKNS_10DescriptorEiPPKcRNS0_6ActionEbbi
|
||||
fun:_ZN6bandit6detail7optionsC2EiPPc
|
||||
fun:_ZN6bandit3runEiPPc
|
||||
fun:main
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Value8
|
||||
fun:_platform_memcmp
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_ENKUlvE_clEvEUlvE0_NS7_ISD_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_EUlvE_NS7_ISC_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Addr1
|
||||
fun:_platform_memcmp
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_ENKUlvE_clEvEUlvE0_NS7_ISD_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_EUlvE_NS7_ISC_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Cond
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_ENKUlvE_clEvEUlvE0_NS7_ISD_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_EUlvE_NS7_ISC_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Cond
|
||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE3_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE3_NS_9allocatorIS4_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Value8
|
||||
fun:_platform_memcmp
|
||||
fun:_ZNK9snowhouse16EqualsConstraintINSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEEclIS7_EEbRKT_
|
||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE3_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE3_NS_9allocatorIS4_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Cond
|
||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE1_NS_9allocatorIS4_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcI3$_0NS_9allocatorIS2_EEFvvEEclEv
|
||||
fun:_ZN6bandit3runERKNS_6detail7optionsERKNSt3__14listINS4_8functionIFvvEEENS4_9allocatorIS8_EEEERNS4_5dequeIPNS0_7contextENS9_ISG_EEEERNS0_8listenerE
|
||||
fun:_ZN6bandit3runEiPPc
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Value8
|
||||
fun:_platform_memcmp
|
||||
fun:_ZNK9snowhouse16EqualsConstraintINSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEEclIS7_EEbRKT_
|
||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE1_NS_9allocatorIS4_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcI3$_0NS_9allocatorIS2_EEFvvEEclEv
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Cond
|
||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE4_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE4_NS_9allocatorIS4_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Value8
|
||||
fun:_platform_memcmp
|
||||
fun:_ZNK9snowhouse16EqualsConstraintINSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEEclIS7_EEbRKT_
|
||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE4_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE4_NS_9allocatorIS4_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Cond
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_ENKUlvE_clEvEUlvE0_NS7_ISF_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_EUlvE_NS7_ISE_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Addr1
|
||||
fun:_platform_memcmp
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_ENKUlvE_clEvEUlvE0_NS7_ISF_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_EUlvE_NS7_ISE_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
}
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Value8
|
||||
fun:_platform_memcmp
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_ENKUlvE_clEvEUlvE0_NS7_ISF_EEFvvEEclEv
|
||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_EUlvE_NS7_ISE_EEFvvEEclEv
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
||||
}
|
||||
|
||||
# Leaks
|
||||
|
||||
{
|
||||
<insert_a_suppression_name_here>
|
||||
Memcheck:Leak
|
||||
match-leak-kinds: possible
|
||||
fun:malloc_zone_malloc
|
||||
fun:_objc_copyClassNamesForImage
|
||||
fun:_ZL9protocolsv
|
||||
fun:_Z9readClassP10objc_classbb
|
||||
fun:gc_init
|
||||
fun:_ZL33objc_initializeClassPair_internalP10objc_classPKcS0_S0_
|
||||
fun:layout_string_create
|
||||
fun:_ZL12realizeClassP10objc_class
|
||||
fun:_ZL22copySwiftV1MangledNamePKcb
|
||||
fun:_ZL22copySwiftV1MangledNamePKcb
|
||||
fun:_ZL22copySwiftV1MangledNamePKcb
|
||||
fun:_ZL22copySwiftV1MangledNamePKcb
|
||||
}
|
||||
|
|
@ -15,6 +15,9 @@ license.workspace = true
|
|||
keywords = ["incremental", "parsing", "syntax", "tagging"]
|
||||
categories = ["parsing", "text-editors"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
crate-type = ["lib", "staticlib"]
|
||||
|
||||
|
|
|
|||
9
test/fixtures/test_grammars/epsilon_external_extra_tokens/corpus.txt
vendored
Normal file
9
test/fixtures/test_grammars/epsilon_external_extra_tokens/corpus.txt
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
==========================
|
||||
A document
|
||||
==========================
|
||||
|
||||
a b
|
||||
|
||||
---
|
||||
|
||||
(document)
|
||||
11
test/fixtures/test_grammars/epsilon_external_extra_tokens/grammar.js
vendored
Normal file
11
test/fixtures/test_grammars/epsilon_external_extra_tokens/grammar.js
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
module.exports = grammar({
|
||||
name: 'epsilon_external_extra_tokens',
|
||||
|
||||
extras: $ => [/\s/, $.comment],
|
||||
|
||||
externals: $ => [$.comment],
|
||||
|
||||
rules: {
|
||||
document: $ => seq('a', 'b'),
|
||||
}
|
||||
});
|
||||
33
test/fixtures/test_grammars/epsilon_external_extra_tokens/scanner.c
vendored
Normal file
33
test/fixtures/test_grammars/epsilon_external_extra_tokens/scanner.c
vendored
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
#include "tree_sitter/parser.h"
|
||||
|
||||
enum TokenType {
|
||||
COMMENT
|
||||
};
|
||||
|
||||
void *tree_sitter_epsilon_external_extra_tokens_external_scanner_create(void) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
bool tree_sitter_epsilon_external_extra_tokens_external_scanner_scan(
|
||||
void *payload,
|
||||
TSLexer *lexer,
|
||||
const bool *valid_symbols
|
||||
) {
|
||||
lexer->result_symbol = COMMENT;
|
||||
return true;
|
||||
}
|
||||
|
||||
unsigned tree_sitter_epsilon_external_extra_tokens_external_scanner_serialize(
|
||||
void *payload,
|
||||
char *buffer
|
||||
) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
void tree_sitter_epsilon_external_extra_tokens_external_scanner_deserialize(
|
||||
void *payload,
|
||||
const char *buffer,
|
||||
unsigned length
|
||||
) {}
|
||||
|
||||
void tree_sitter_epsilon_external_extra_tokens_external_scanner_destroy(void *payload) {}
|
||||
0
test/fixtures/test_grammars/get_col_eof/corpus.txt
vendored
Normal file
0
test/fixtures/test_grammars/get_col_eof/corpus.txt
vendored
Normal file
11
test/fixtures/test_grammars/get_col_eof/grammar.js
vendored
Normal file
11
test/fixtures/test_grammars/get_col_eof/grammar.js
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
module.exports = grammar({
|
||||
name: "get_col_eof",
|
||||
|
||||
externals: $ => [
|
||||
$.char
|
||||
],
|
||||
|
||||
rules: {
|
||||
source_file: $ => repeat($.char),
|
||||
}
|
||||
});
|
||||
34
test/fixtures/test_grammars/get_col_eof/scanner.c
vendored
Normal file
34
test/fixtures/test_grammars/get_col_eof/scanner.c
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
#include "tree_sitter/parser.h"
|
||||
|
||||
enum TokenType { CHAR };
|
||||
|
||||
void *tree_sitter_get_col_eof_external_scanner_create(void) { return NULL; }
|
||||
|
||||
void tree_sitter_get_col_eof_external_scanner_destroy(void *scanner) {}
|
||||
|
||||
unsigned tree_sitter_get_col_eof_external_scanner_serialize(void *scanner,
|
||||
char *buffer) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
void tree_sitter_get_col_eof_external_scanner_deserialize(void *scanner,
|
||||
const char *buffer,
|
||||
unsigned length) {}
|
||||
|
||||
bool tree_sitter_get_col_eof_external_scanner_scan(void *scanner,
|
||||
TSLexer *lexer,
|
||||
const bool *valid_symbols) {
|
||||
if (lexer->eof(lexer)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (valid_symbols[CHAR]) {
|
||||
lexer->advance(lexer, false);
|
||||
lexer->get_column(lexer);
|
||||
lexer->result_symbol = CHAR;
|
||||
lexer->mark_end(lexer);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
10
test/fixtures/test_grammars/next_sibling_from_zwt/corpus.txt
vendored
Normal file
10
test/fixtures/test_grammars/next_sibling_from_zwt/corpus.txt
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
===========================
|
||||
missing c node
|
||||
===========================
|
||||
|
||||
abdef
|
||||
|
||||
---
|
||||
|
||||
(source
|
||||
(MISSING "c"))
|
||||
22
test/fixtures/test_grammars/next_sibling_from_zwt/grammar.js
vendored
Normal file
22
test/fixtures/test_grammars/next_sibling_from_zwt/grammar.js
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
module.exports = grammar({
|
||||
name: "next_sibling_from_zwt",
|
||||
extras: $ => [
|
||||
/\s|\\\r?\n/,
|
||||
],
|
||||
|
||||
rules: {
|
||||
source: $ => seq(
|
||||
'a',
|
||||
$._bc,
|
||||
'd',
|
||||
'e',
|
||||
'f',
|
||||
),
|
||||
|
||||
_bc: $ => seq(
|
||||
'b',
|
||||
'c',
|
||||
),
|
||||
}
|
||||
});
|
||||
|
||||
|
|
@ -1,43 +0,0 @@
|
|||
# Fuzzing tree-sitter
|
||||
|
||||
The tree-sitter fuzzing support requires 1) the `libFuzzer` runtime library and 2) a recent version of clang
|
||||
|
||||
## libFuzzer
|
||||
|
||||
The main fuzzing logic is implemented by `libFuzzer` which is part of the compiler-rt project but is not shipped by distros. `libFuzzer` will need to be built from source, e.g.:
|
||||
|
||||
```
|
||||
cd ~/src
|
||||
git clone https://github.com/llvm-mirror/compiler-rt
|
||||
cd compiler-rt/lib/fuzzer
|
||||
./build.sh
|
||||
```
|
||||
|
||||
## clang
|
||||
|
||||
Using libFuzzer requires at least version 7 of `clang` and may _not_ work with your system-installed version. If your system-installed version is too old, the easiest way to get started is to use the version provided by the Chromium team. Instructions are available at [libFuzzer.info](http://libfuzzer.info).
|
||||
|
||||
The fuzzers can then be built with:
|
||||
```
|
||||
export CLANG_DIR=$HOME/src/third_party/llvm-build/Release+Asserts/bin
|
||||
CC="$CLANG_DIR/clang" CXX="$CLANG_DIR/clang++" LINK="$CLANG_DIR/clang++" \
|
||||
LIB_FUZZER_PATH=$HOME/src/compiler-rt/lib/fuzzer/libFuzzer.a \
|
||||
./script/build-fuzzers
|
||||
```
|
||||
|
||||
This will generate a separate fuzzer for each grammar defined in `test/fixtures/grammars` and will be instrumented with [AddressSanitizer](https://clang.llvm.org/docs/AddressSanitizer.html) and [UndefinedBehaviorSanitizer](https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html). Individual fuzzers can be built with, for example, `./script/build-fuzzers python ruby`.
|
||||
|
||||
The `run-fuzzer` script handles running an individual fuzzer with a sensible default set of arguments:
|
||||
```
|
||||
./script/run-fuzzer <grammar-name> (halt|recover) <extra libFuzzer arguments...>
|
||||
```
|
||||
|
||||
which will log information to stdout. Failing testcases and a fuzz corpus will be saved to `fuzz-results/<grammar-name>`. The most important extra `libFuzzer` options are `-jobs` and `-workers` which allow parallel fuzzing. This is can done with, e.g.:
|
||||
```
|
||||
./script/run-fuzzer <grammar-name> halt -jobs=32 -workers=32
|
||||
```
|
||||
|
||||
The testcase can be used to reproduce the crash by running:
|
||||
```
|
||||
./script/reproduce <grammar-name> (halt|recover) <path-to-testcase>
|
||||
```
|
||||
|
|
@ -1,79 +0,0 @@
|
|||
#include <cassert>
|
||||
#include <fstream>
|
||||
#include "tree_sitter/api.h"
|
||||
|
||||
extern "C" const TSLanguage *TS_LANG();
|
||||
|
||||
static TSQuery *lang_query;
|
||||
|
||||
extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv) {
|
||||
if(TS_LANG_QUERY_FILENAME[0]) {
|
||||
// The query filename is relative to the fuzzing binary. Convert it
|
||||
// to an absolute path first
|
||||
auto binary_filename = std::string((*argv)[0]);
|
||||
auto binary_directory = binary_filename.substr(0, binary_filename.find_last_of("\\/"));
|
||||
auto lang_query_filename = binary_directory + "/" + TS_LANG_QUERY_FILENAME;
|
||||
|
||||
auto f = std::ifstream(lang_query_filename);
|
||||
assert(f.good());
|
||||
std::string lang_query_source((std::istreambuf_iterator<char>(f)), std::istreambuf_iterator<char>());
|
||||
|
||||
uint32_t error_offset = 0;
|
||||
TSQueryError error_type = TSQueryErrorNone;
|
||||
|
||||
lang_query = ts_query_new(
|
||||
TS_LANG(),
|
||||
lang_query_source.c_str(),
|
||||
lang_query_source.size(),
|
||||
&error_offset,
|
||||
&error_type
|
||||
);
|
||||
|
||||
assert(lang_query);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
|
||||
const char *str = reinterpret_cast<const char *>(data);
|
||||
|
||||
TSParser *parser = ts_parser_new();
|
||||
|
||||
// This can fail if the language version doesn't match the runtime version
|
||||
bool language_ok = ts_parser_set_language(parser, TS_LANG());
|
||||
assert(language_ok);
|
||||
|
||||
TSTree *tree = ts_parser_parse_string(parser, NULL, str, size);
|
||||
TSNode root_node = ts_tree_root_node(tree);
|
||||
|
||||
if (lang_query != nullptr) {
|
||||
{
|
||||
TSQueryCursor *cursor = ts_query_cursor_new();
|
||||
|
||||
ts_query_cursor_exec(cursor, lang_query, root_node);
|
||||
TSQueryMatch match;
|
||||
while (ts_query_cursor_next_match(cursor, &match)) {
|
||||
}
|
||||
|
||||
ts_query_cursor_delete(cursor);
|
||||
}
|
||||
|
||||
{
|
||||
TSQueryCursor *cursor = ts_query_cursor_new();
|
||||
|
||||
ts_query_cursor_exec(cursor, lang_query, root_node);
|
||||
TSQueryMatch match;
|
||||
uint32_t capture_index;
|
||||
while (ts_query_cursor_next_capture(cursor, &match, &capture_index)) {
|
||||
}
|
||||
|
||||
ts_query_cursor_delete(cursor);
|
||||
}
|
||||
}
|
||||
|
||||
ts_tree_delete(tree);
|
||||
ts_parser_delete(parser);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
|
@ -1,42 +0,0 @@
|
|||
#include <fstream>
|
||||
#include <string>
|
||||
#include <cstdlib>
|
||||
#include <tree_sitter/api.h>
|
||||
|
||||
extern "C" {
|
||||
#include "javascript/src/parser.c"
|
||||
#include "javascript/src/scanner.c"
|
||||
}
|
||||
|
||||
#define LANGUAGE tree_sitter_javascript
|
||||
#define SOURCE_PATH "javascript/examples/jquery.js"
|
||||
|
||||
int main() {
|
||||
TSParser *parser = ts_parser_new();
|
||||
if (!ts_parser_set_language(parser, LANGUAGE())) {
|
||||
fprintf(stderr, "Invalid language\n");
|
||||
exit(1);
|
||||
}
|
||||
|
||||
const char *source_path = GRAMMARS_DIR SOURCE_PATH;
|
||||
|
||||
printf("Parsing %s\n", source_path);
|
||||
|
||||
std::ifstream source_file(source_path);
|
||||
if (!source_file.good()) {
|
||||
fprintf(stderr, "Invalid source path %s\n", source_path);
|
||||
exit(1);
|
||||
}
|
||||
|
||||
std::string source_code(
|
||||
(std::istreambuf_iterator<char>(source_file)),
|
||||
std::istreambuf_iterator<char>()
|
||||
);
|
||||
|
||||
TSTree *tree = ts_parser_parse_string(
|
||||
parser,
|
||||
NULL,
|
||||
source_code.c_str(),
|
||||
source_code.size()
|
||||
);
|
||||
}
|
||||
|
|
@ -11,10 +11,19 @@ keywords.workspace = true
|
|||
categories.workspace = true
|
||||
publish = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
anstyle.workspace = true
|
||||
anyhow.workspace = true
|
||||
bindgen = { version = "0.70.1" }
|
||||
cc.workspace = true
|
||||
clap.workspace = true
|
||||
git2.workspace = true
|
||||
indoc.workspace = true
|
||||
toml.workspace = true
|
||||
regex.workspace = true
|
||||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
|
|
|||
75
xtask/src/benchmark.rs
Normal file
75
xtask/src/benchmark.rs
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
use anyhow::Result;
|
||||
|
||||
use crate::{bail_on_err, Benchmark};
|
||||
|
||||
pub fn run(args: &Benchmark) -> Result<()> {
|
||||
if let Some(ref example) = args.example_file_name {
|
||||
std::env::set_var("TREE_SITTER_BENCHMARK_EXAMPLE_FILTER", example);
|
||||
}
|
||||
|
||||
if let Some(ref language) = args.language {
|
||||
std::env::set_var("TREE_SITTER_BENCHMARK_LANGUAGE_FILTER", language);
|
||||
}
|
||||
|
||||
if args.repetition_count != 5 {
|
||||
std::env::set_var(
|
||||
"TREE_SITTER_BENCHMARK_REPETITION_COUNT",
|
||||
args.repetition_count.to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
if args.debug {
|
||||
let output = std::process::Command::new("cargo")
|
||||
.arg("bench")
|
||||
.arg("benchmark")
|
||||
.arg("-p")
|
||||
.arg("tree-sitter-cli")
|
||||
.arg("--no-run")
|
||||
.arg("--message-format=json")
|
||||
.spawn()?
|
||||
.wait_with_output()?;
|
||||
|
||||
bail_on_err(&output, "Failed to run `cargo bench`")?;
|
||||
|
||||
let json_output = serde_json::from_slice::<serde_json::Value>(&output.stdout)?;
|
||||
|
||||
let test_binary = json_output
|
||||
.as_array()
|
||||
.ok_or_else(|| anyhow::anyhow!("Invalid JSON output"))?
|
||||
.iter()
|
||||
.find_map(|message| {
|
||||
if message
|
||||
.get("target")
|
||||
.and_then(|target| target.get("name"))
|
||||
.and_then(|name| name.as_str())
|
||||
.is_some_and(|name| name == "benchmark")
|
||||
&& message
|
||||
.get("executable")
|
||||
.and_then(|executable| executable.as_str())
|
||||
.is_some()
|
||||
{
|
||||
message
|
||||
.get("executable")
|
||||
.and_then(|executable| executable.as_str())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.ok_or_else(|| anyhow::anyhow!("Failed to find benchmark executable"))?;
|
||||
|
||||
println!("{test_binary}");
|
||||
} else {
|
||||
let status = std::process::Command::new("cargo")
|
||||
.arg("bench")
|
||||
.arg("benchmark")
|
||||
.arg("-p")
|
||||
.arg("tree-sitter-cli")
|
||||
.status()?;
|
||||
|
||||
if !status.success() {
|
||||
anyhow::bail!("Failed to run `cargo bench`");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
228
xtask/src/build_wasm.rs
Normal file
228
xtask/src/build_wasm.rs
Normal file
|
|
@ -0,0 +1,228 @@
|
|||
use std::{
|
||||
ffi::{OsStr, OsString},
|
||||
fmt::Write,
|
||||
fs,
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
|
||||
use crate::{bail_on_err, BuildWasm, EMSCRIPTEN_TAG};
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
enum EmccSource {
|
||||
Native,
|
||||
Docker,
|
||||
Podman,
|
||||
}
|
||||
|
||||
pub fn run_wasm(args: &BuildWasm) -> Result<()> {
|
||||
let mut emscripten_flags = vec!["-O3", "--minify", "0"];
|
||||
|
||||
if args.debug {
|
||||
emscripten_flags.extend(["-s", "ASSERTIONS=1", "-s", "SAFE_HEAP=1", "-O0", "-g"]);
|
||||
}
|
||||
|
||||
if args.verbose {
|
||||
emscripten_flags.extend(["-s", "VERBOSE=1", "-v"]);
|
||||
}
|
||||
|
||||
let emcc_name = if cfg!(windows) { "emcc.bat" } else { "emcc" };
|
||||
|
||||
// Order of preference: emscripten > docker > podman > error
|
||||
let source = if !args.docker && Command::new(emcc_name).output().is_ok() {
|
||||
EmccSource::Native
|
||||
} else if Command::new("docker")
|
||||
.arg("info")
|
||||
.output()
|
||||
.map_or(false, |out| out.status.success())
|
||||
{
|
||||
EmccSource::Docker
|
||||
} else if Command::new("podman")
|
||||
.arg("--version")
|
||||
.output()
|
||||
.map_or(false, |out| out.status.success())
|
||||
{
|
||||
EmccSource::Podman
|
||||
} else {
|
||||
return Err(anyhow!(
|
||||
"You must have either emcc, docker, or podman on your PATH to run this command"
|
||||
));
|
||||
};
|
||||
|
||||
let mut command = match source {
|
||||
EmccSource::Native => Command::new(emcc_name),
|
||||
EmccSource::Docker | EmccSource::Podman => {
|
||||
let mut command = match source {
|
||||
EmccSource::Docker => Command::new("docker"),
|
||||
EmccSource::Podman => Command::new("podman"),
|
||||
_ => unreachable!(),
|
||||
};
|
||||
command.args(["run", "--rm"]);
|
||||
|
||||
// Mount the root directory as a volume, which is the repo root
|
||||
let mut volume_string = OsString::from(std::env::current_dir().unwrap());
|
||||
volume_string.push(":/src:Z");
|
||||
command.args([OsStr::new("--volume"), &volume_string]);
|
||||
|
||||
// In case `docker` is an alias to `podman`, ensure that podman
|
||||
// mounts the current directory as writable by the container
|
||||
// user which has the same uid as the host user. Setting the
|
||||
// podman-specific variable is more reliable than attempting to
|
||||
// detect whether `docker` is an alias for `podman`.
|
||||
// see https://docs.podman.io/en/latest/markdown/podman-run.1.html#userns-mode
|
||||
command.env("PODMAN_USERNS", "keep-id");
|
||||
|
||||
// Get the current user id so that files created in the docker container will have
|
||||
// the same owner.
|
||||
#[cfg(unix)]
|
||||
{
|
||||
#[link(name = "c")]
|
||||
extern "C" {
|
||||
fn getuid() -> u32;
|
||||
}
|
||||
// don't need to set user for podman since PODMAN_USERNS=keep-id is already set
|
||||
if source == EmccSource::Docker {
|
||||
let user_id = unsafe { getuid() };
|
||||
command.args(["--user", &user_id.to_string()]);
|
||||
}
|
||||
};
|
||||
|
||||
// Run `emcc` in a container using the `emscripten-slim` image
|
||||
command.args([EMSCRIPTEN_TAG, "emcc"]);
|
||||
command
|
||||
}
|
||||
};
|
||||
|
||||
fs::create_dir_all("target/scratch").unwrap();
|
||||
|
||||
let exported_functions = concat!(
|
||||
include_str!("../../lib/src/wasm/stdlib-symbols.txt"),
|
||||
include_str!("../../lib/binding_web/exports.txt")
|
||||
)
|
||||
.replace('"', "")
|
||||
.lines()
|
||||
.fold(String::new(), |mut output, line| {
|
||||
let _ = write!(output, "_{line}");
|
||||
output
|
||||
})
|
||||
.trim_end_matches(',')
|
||||
.to_string();
|
||||
|
||||
let exported_functions = format!("EXPORTED_FUNCTIONS={exported_functions}");
|
||||
let exported_runtime_methods = "EXPORTED_RUNTIME_METHODS=stringToUTF16,AsciiToString";
|
||||
|
||||
emscripten_flags.extend([
|
||||
"-s",
|
||||
"WASM=1",
|
||||
"-s",
|
||||
"INITIAL_MEMORY=33554432",
|
||||
"-s",
|
||||
"ALLOW_MEMORY_GROWTH=1",
|
||||
"-s",
|
||||
"SUPPORT_BIG_ENDIAN=1",
|
||||
"-s",
|
||||
"MAIN_MODULE=2",
|
||||
"-s",
|
||||
"FILESYSTEM=0",
|
||||
"-s",
|
||||
"NODEJS_CATCH_EXIT=0",
|
||||
"-s",
|
||||
"NODEJS_CATCH_REJECTION=0",
|
||||
"-s",
|
||||
&exported_functions,
|
||||
"-s",
|
||||
exported_runtime_methods,
|
||||
"-fno-exceptions",
|
||||
"-std=c11",
|
||||
"-D",
|
||||
"fprintf(...)=",
|
||||
"-D",
|
||||
"NDEBUG=",
|
||||
"-D",
|
||||
"_POSIX_C_SOURCE=200112L",
|
||||
"-D",
|
||||
"_DEFAULT_SOURCE=",
|
||||
"-I",
|
||||
"lib/src",
|
||||
"-I",
|
||||
"lib/include",
|
||||
"--js-library",
|
||||
"lib/binding_web/imports.js",
|
||||
"--pre-js",
|
||||
"lib/binding_web/prefix.js",
|
||||
"--post-js",
|
||||
"lib/binding_web/binding.js",
|
||||
"--post-js",
|
||||
"lib/binding_web/suffix.js",
|
||||
"lib/src/lib.c",
|
||||
"lib/binding_web/binding.c",
|
||||
"-o",
|
||||
"target/scratch/tree-sitter.js",
|
||||
]);
|
||||
|
||||
bail_on_err(
|
||||
&command.args(emscripten_flags).spawn()?.wait_with_output()?,
|
||||
"Failed to compile the Tree-sitter WASM library",
|
||||
)?;
|
||||
|
||||
fs::rename(
|
||||
"target/scratch/tree-sitter.js",
|
||||
"lib/binding_web/tree-sitter.js",
|
||||
)?;
|
||||
|
||||
fs::rename(
|
||||
"target/scratch/tree-sitter.wasm",
|
||||
"lib/binding_web/tree-sitter.wasm",
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_wasm_stdlib() -> Result<()> {
|
||||
let export_flags = include_str!("../../lib/src/wasm/stdlib-symbols.txt")
|
||||
.lines()
|
||||
.map(|line| format!("-Wl,--export={}", &line[1..line.len() - 1]))
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let mut command = Command::new("target/wasi-sdk-21.0/bin/clang-17");
|
||||
|
||||
let output = command
|
||||
.args([
|
||||
"-o",
|
||||
"stdlib.wasm",
|
||||
"-Os",
|
||||
"-fPIC",
|
||||
"-Wl,--no-entry",
|
||||
"-Wl,--stack-first",
|
||||
"-Wl,-z",
|
||||
"-Wl,stack-size=65536",
|
||||
"-Wl,--import-undefined",
|
||||
"-Wl,--import-memory",
|
||||
"-Wl,--import-table",
|
||||
"-Wl,--strip-debug",
|
||||
"-Wl,--export=reset_heap",
|
||||
"-Wl,--export=__wasm_call_ctors",
|
||||
"-Wl,--export=__stack_pointer",
|
||||
])
|
||||
.args(export_flags)
|
||||
.arg("lib/src/wasm/stdlib.c")
|
||||
.output()?;
|
||||
|
||||
bail_on_err(&output, "Failed to compile the Tree-sitter WASM stdlib")?;
|
||||
|
||||
let xxd = Command::new("xxd")
|
||||
.args(["-C", "-i", "stdlib.wasm"])
|
||||
.output()?;
|
||||
|
||||
bail_on_err(
|
||||
&xxd,
|
||||
"Failed to run xxd on the compiled Tree-sitter WASM stdlib",
|
||||
)?;
|
||||
|
||||
fs::write("lib/src/wasm/wasm-stdlib.h", xxd.stdout)?;
|
||||
|
||||
fs::rename("stdlib.wasm", "target/stdlib.wasm")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
@ -1,11 +1,14 @@
|
|||
use std::{cmp::Ordering, path::Path};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use git2::{DiffOptions, Repository};
|
||||
use indoc::indoc;
|
||||
use semver::{BuildMetadata, Prerelease, Version};
|
||||
use toml::Value;
|
||||
|
||||
pub fn get_latest_tag(repo: &Repository) -> Result<String, Box<dyn std::error::Error>> {
|
||||
use crate::BumpVersion;
|
||||
|
||||
pub fn get_latest_tag(repo: &Repository) -> Result<String> {
|
||||
let mut tags = repo
|
||||
.tag_names(None)?
|
||||
.into_iter()
|
||||
|
|
@ -23,10 +26,10 @@ pub fn get_latest_tag(repo: &Repository) -> Result<String, Box<dyn std::error::E
|
|||
|
||||
tags.last()
|
||||
.map(std::string::ToString::to_string)
|
||||
.ok_or_else(|| "No tags found".into())
|
||||
.ok_or_else(|| anyhow!("No tags found"))
|
||||
}
|
||||
|
||||
pub fn bump_versions() -> Result<(), Box<dyn std::error::Error>> {
|
||||
pub fn run(args: BumpVersion) -> Result<()> {
|
||||
let repo = Repository::open(".")?;
|
||||
let latest_tag = get_latest_tag(&repo)?;
|
||||
let current_version = Version::parse(&latest_tag)?;
|
||||
|
|
@ -104,35 +107,39 @@ pub fn bump_versions() -> Result<(), Box<dyn std::error::Error>> {
|
|||
}
|
||||
}
|
||||
|
||||
let mut version = current_version.clone();
|
||||
if should_increment_minor {
|
||||
version.minor += 1;
|
||||
version.patch = 0;
|
||||
version.pre = Prerelease::EMPTY;
|
||||
version.build = BuildMetadata::EMPTY;
|
||||
} else if should_increment_patch {
|
||||
version.patch += 1;
|
||||
version.pre = Prerelease::EMPTY;
|
||||
version.build = BuildMetadata::EMPTY;
|
||||
let next_version = if let Some(version) = args.version {
|
||||
version
|
||||
} else {
|
||||
return Err(format!("No source code changed since {current_version}").into());
|
||||
}
|
||||
let mut next_version = current_version.clone();
|
||||
if should_increment_minor {
|
||||
next_version.minor += 1;
|
||||
next_version.patch = 0;
|
||||
next_version.pre = Prerelease::EMPTY;
|
||||
next_version.build = BuildMetadata::EMPTY;
|
||||
} else if should_increment_patch {
|
||||
next_version.patch += 1;
|
||||
next_version.pre = Prerelease::EMPTY;
|
||||
next_version.build = BuildMetadata::EMPTY;
|
||||
} else {
|
||||
return Err(anyhow!(format!(
|
||||
"No source code changed since {current_version}"
|
||||
)));
|
||||
}
|
||||
next_version
|
||||
};
|
||||
|
||||
println!("Bumping from {current_version} to {version}");
|
||||
update_crates(¤t_version, &version)?;
|
||||
update_makefile(&version)?;
|
||||
update_cmake(&version)?;
|
||||
update_npm(&version)?;
|
||||
update_zig(&version)?;
|
||||
tag_next_version(&repo, &version)?;
|
||||
println!("Bumping from {current_version} to {next_version}");
|
||||
update_crates(¤t_version, &next_version)?;
|
||||
update_makefile(&next_version)?;
|
||||
update_cmake(&next_version)?;
|
||||
update_npm(&next_version)?;
|
||||
update_zig(&next_version)?;
|
||||
tag_next_version(&repo, &next_version)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn tag_next_version(
|
||||
repo: &Repository,
|
||||
next_version: &Version,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
fn tag_next_version(repo: &Repository, next_version: &Version) -> Result<()> {
|
||||
// first add the manifests
|
||||
|
||||
let mut index = repo.index()?;
|
||||
|
|
@ -184,7 +191,7 @@ fn tag_next_version(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn update_makefile(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> {
|
||||
fn update_makefile(next_version: &Version) -> Result<()> {
|
||||
let makefile = std::fs::read_to_string("Makefile")?;
|
||||
let makefile = makefile
|
||||
.lines()
|
||||
|
|
@ -204,7 +211,7 @@ fn update_makefile(next_version: &Version) -> Result<(), Box<dyn std::error::Err
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn update_cmake(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> {
|
||||
fn update_cmake(next_version: &Version) -> Result<()> {
|
||||
let cmake = std::fs::read_to_string("lib/CMakeLists.txt")?;
|
||||
let cmake = cmake
|
||||
.lines()
|
||||
|
|
@ -230,10 +237,7 @@ fn update_cmake(next_version: &Version) -> Result<(), Box<dyn std::error::Error>
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn update_crates(
|
||||
current_version: &Version,
|
||||
next_version: &Version,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
fn update_crates(current_version: &Version, next_version: &Version) -> Result<()> {
|
||||
let mut cmd = std::process::Command::new("cargo");
|
||||
cmd.arg("workspaces").arg("version");
|
||||
|
||||
|
|
@ -253,20 +257,20 @@ fn update_crates(
|
|||
let status = cmd.status()?;
|
||||
|
||||
if !status.success() {
|
||||
return Err("Failed to update crates".into());
|
||||
return Err(anyhow!("Failed to update crates"));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_npm(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> {
|
||||
fn update_npm(next_version: &Version) -> Result<()> {
|
||||
for path in ["lib/binding_web/package.json", "cli/npm/package.json"] {
|
||||
let package_json =
|
||||
serde_json::from_str::<serde_json::Value>(&std::fs::read_to_string(path)?)?;
|
||||
|
||||
let mut package_json = package_json
|
||||
.as_object()
|
||||
.ok_or("Invalid package.json")?
|
||||
.ok_or_else(|| anyhow!("Invalid package.json"))?
|
||||
.clone();
|
||||
package_json.insert(
|
||||
"version".to_string(),
|
||||
|
|
@ -281,7 +285,7 @@ fn update_npm(next_version: &Version) -> Result<(), Box<dyn std::error::Error>>
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn update_zig(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> {
|
||||
fn update_zig(next_version: &Version) -> Result<()> {
|
||||
let zig = std::fs::read_to_string("build.zig.zon")?;
|
||||
|
||||
let zig = zig
|
||||
|
|
@ -303,7 +307,7 @@ fn update_zig(next_version: &Version) -> Result<(), Box<dyn std::error::Error>>
|
|||
}
|
||||
|
||||
/// read Cargo.toml and get the version
|
||||
fn fetch_workspace_version() -> Result<String, Box<dyn std::error::Error>> {
|
||||
fn fetch_workspace_version() -> Result<String> {
|
||||
let cargo_toml = toml::from_str::<Value>(&std::fs::read_to_string("Cargo.toml")?)?;
|
||||
|
||||
Ok(cargo_toml["workspace"]["package"]["version"]
|
||||
|
|
|
|||
33
xtask/src/clippy.rs
Normal file
33
xtask/src/clippy.rs
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
use std::process::Command;
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{bail_on_err, Clippy};
|
||||
|
||||
pub fn run(args: &Clippy) -> Result<()> {
|
||||
let mut clippy_command = Command::new("cargo");
|
||||
clippy_command.arg("+nightly").arg("clippy");
|
||||
|
||||
if let Some(package) = args.package.as_ref() {
|
||||
clippy_command.args(["--package", package]);
|
||||
} else {
|
||||
clippy_command.arg("--workspace");
|
||||
}
|
||||
|
||||
clippy_command
|
||||
.arg("--release")
|
||||
.arg("--all-targets")
|
||||
.arg("--all-features")
|
||||
.arg("--")
|
||||
.arg("-D")
|
||||
.arg("warnings");
|
||||
|
||||
if args.fix {
|
||||
clippy_command.arg("--fix");
|
||||
}
|
||||
|
||||
bail_on_err(
|
||||
&clippy_command.spawn()?.wait_with_output()?,
|
||||
"Clippy failed",
|
||||
)
|
||||
}
|
||||
119
xtask/src/fetch.rs
Normal file
119
xtask/src/fetch.rs
Normal file
|
|
@ -0,0 +1,119 @@
|
|||
use std::{path::Path, process::Command};
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
use crate::{bail_on_err, EMSCRIPTEN_VERSION};
|
||||
|
||||
pub fn run_fixtures() -> Result<()> {
|
||||
let grammars_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("test")
|
||||
.join("fixtures")
|
||||
.join("grammars");
|
||||
|
||||
[
|
||||
("bash", "master"),
|
||||
("c", "master"),
|
||||
("cpp", "master"),
|
||||
("embedded-template", "master"),
|
||||
("go", "master"),
|
||||
("html", "master"),
|
||||
("java", "master"),
|
||||
("javascript", "master"),
|
||||
("jsdoc", "master"),
|
||||
("json", "master"),
|
||||
("php", "master"),
|
||||
("python", "master"),
|
||||
("ruby", "master"),
|
||||
("rust", "master"),
|
||||
("typescript", "master"),
|
||||
]
|
||||
.iter()
|
||||
.try_for_each(|(grammar, r#ref)| {
|
||||
let grammar_dir = grammars_dir.join(grammar);
|
||||
let grammar_url = format!("https://github.com/tree-sitter/tree-sitter-{grammar}");
|
||||
|
||||
println!("Updating the {grammar} grammar...");
|
||||
|
||||
if !grammar_dir.exists() {
|
||||
let mut command = Command::new("git");
|
||||
command.args([
|
||||
"clone",
|
||||
"--depth",
|
||||
"1",
|
||||
&grammar_url,
|
||||
&grammar_dir.to_string_lossy(),
|
||||
]);
|
||||
bail_on_err(
|
||||
&command.spawn()?.wait_with_output()?,
|
||||
"Failed to clone the {grammar} grammar",
|
||||
)?;
|
||||
}
|
||||
|
||||
std::env::set_current_dir(&grammar_dir)?;
|
||||
|
||||
let mut command = Command::new("git");
|
||||
command.args(["fetch", "origin", r#ref, "--depth", "1"]);
|
||||
bail_on_err(
|
||||
&command.spawn()?.wait_with_output()?,
|
||||
"Failed to fetch the {grammar} grammar",
|
||||
)?;
|
||||
|
||||
let mut command = Command::new("git");
|
||||
command.args(["reset", "--hard", "FETCH_HEAD"]);
|
||||
bail_on_err(
|
||||
&command.spawn()?.wait_with_output()?,
|
||||
"Failed to reset the {grammar} grammar",
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run_emscripten() -> Result<()> {
|
||||
let emscripten_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.parent()
|
||||
.unwrap()
|
||||
.join("target")
|
||||
.join("emsdk");
|
||||
|
||||
if emscripten_dir.exists() {
|
||||
println!("Emscripten SDK already exists");
|
||||
return Ok(());
|
||||
}
|
||||
println!("Cloning the Emscripten SDK...");
|
||||
|
||||
let mut command = Command::new("git");
|
||||
command.args([
|
||||
"clone",
|
||||
"https://github.com/emscripten-core/emsdk.git",
|
||||
&emscripten_dir.to_string_lossy(),
|
||||
]);
|
||||
bail_on_err(
|
||||
&command.spawn()?.wait_with_output()?,
|
||||
"Failed to clone the Emscripten SDK",
|
||||
)?;
|
||||
|
||||
std::env::set_current_dir(&emscripten_dir)?;
|
||||
|
||||
let emsdk = if cfg!(windows) {
|
||||
"emsdk.bat"
|
||||
} else {
|
||||
"./emsdk"
|
||||
};
|
||||
|
||||
let mut command = Command::new(emsdk);
|
||||
command.args(["install", EMSCRIPTEN_VERSION]);
|
||||
bail_on_err(
|
||||
&command.spawn()?.wait_with_output()?,
|
||||
"Failed to install Emscripten",
|
||||
)?;
|
||||
|
||||
let mut command = Command::new(emsdk);
|
||||
command.args(["activate", EMSCRIPTEN_VERSION]);
|
||||
bail_on_err(
|
||||
&command.spawn()?.wait_with_output()?,
|
||||
"Failed to activate Emscripten",
|
||||
)
|
||||
}
|
||||
118
xtask/src/generate.rs
Normal file
118
xtask/src/generate.rs
Normal file
|
|
@ -0,0 +1,118 @@
|
|||
use std::{ffi::OsStr, fs, process::Command};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
|
||||
use crate::{bail_on_err, GenerateFixtures};
|
||||
|
||||
const HEADER_PATH: &str = "include/tree_sitter/api.h";
|
||||
|
||||
pub fn run_fixtures(args: &GenerateFixtures) -> Result<()> {
|
||||
let output = std::process::Command::new("cargo")
|
||||
.args(["build", "--release"])
|
||||
.spawn()?
|
||||
.wait_with_output()?;
|
||||
bail_on_err(&output, "Failed to run cargo build")?;
|
||||
|
||||
let tree_sitter_binary = std::env::current_dir()?
|
||||
.join("target")
|
||||
.join("release")
|
||||
.join("tree-sitter");
|
||||
|
||||
let grammars_dir = std::env::current_dir()?
|
||||
.join("test")
|
||||
.join("fixtures")
|
||||
.join("grammars");
|
||||
|
||||
for grammar_file in find_grammar_files(grammars_dir.to_str().unwrap()).flatten() {
|
||||
let grammar_dir = grammar_file.parent().unwrap();
|
||||
let grammar_name = grammar_dir.file_name().and_then(OsStr::to_str).unwrap();
|
||||
|
||||
println!(
|
||||
"Regenerating {grammar_name} parser{}",
|
||||
if args.wasm { " to wasm" } else { "" }
|
||||
);
|
||||
|
||||
if args.wasm {
|
||||
let mut cmd = Command::new(&tree_sitter_binary);
|
||||
let cmd = cmd.args([
|
||||
"build",
|
||||
"--wasm",
|
||||
"-o",
|
||||
&format!("target/release/tree-sitter-{grammar_name}.wasm"),
|
||||
grammar_dir.to_str().unwrap(),
|
||||
]);
|
||||
if args.docker {
|
||||
cmd.arg("--docker");
|
||||
}
|
||||
bail_on_err(
|
||||
&cmd.spawn()?.wait_with_output()?,
|
||||
&format!("Failed to regenerate {grammar_name} parser to wasm"),
|
||||
)?;
|
||||
} else {
|
||||
let output = Command::new(&tree_sitter_binary)
|
||||
.arg("generate")
|
||||
.arg("src/grammar.json")
|
||||
.arg("--abi=latest")
|
||||
.current_dir(grammar_dir)
|
||||
.spawn()?
|
||||
.wait_with_output()?;
|
||||
bail_on_err(
|
||||
&output,
|
||||
&format!("Failed to regenerate {grammar_name} parser"),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_bindings() -> Result<()> {
|
||||
let no_copy = [
|
||||
"TSInput",
|
||||
"TSLanguage",
|
||||
"TSLogger",
|
||||
"TSLookaheadIterator",
|
||||
"TSParser",
|
||||
"TSTree",
|
||||
"TSQuery",
|
||||
"TSQueryCursor",
|
||||
"TSQueryCapture",
|
||||
"TSQueryMatch",
|
||||
"TSQueryPredicateStep",
|
||||
];
|
||||
|
||||
let bindings = bindgen::Builder::default()
|
||||
.header(HEADER_PATH)
|
||||
.layout_tests(false)
|
||||
.allowlist_type("^TS.*")
|
||||
.allowlist_function("^ts_.*")
|
||||
.allowlist_var("^TREE_SITTER.*")
|
||||
.no_copy(no_copy.join("|"))
|
||||
.prepend_enum_name(false)
|
||||
.use_core()
|
||||
.clang_arg("-D TREE_SITTER_FEATURE_WASM")
|
||||
.generate()
|
||||
.expect("Failed to generate bindings");
|
||||
|
||||
bindings
|
||||
.write_to_file("lib/binding_rust/bindings.rs")
|
||||
.with_context(|| "Failed to write bindings")
|
||||
}
|
||||
|
||||
fn find_grammar_files(
|
||||
dir: &str,
|
||||
) -> impl Iterator<Item = Result<std::path::PathBuf, std::io::Error>> {
|
||||
fs::read_dir(dir)
|
||||
.expect("Failed to read directory")
|
||||
.filter_map(Result::ok)
|
||||
.flat_map(|entry| {
|
||||
let path = entry.path();
|
||||
if path.is_dir() && !path.to_string_lossy().contains("node_modules") {
|
||||
Box::new(find_grammar_files(path.to_str().unwrap())) as Box<dyn Iterator<Item = _>>
|
||||
} else if path.is_file() && path.file_name() == Some(OsStr::new("grammar.js")) {
|
||||
Box::new(std::iter::once(Ok(path))) as _
|
||||
} else {
|
||||
Box::new(std::iter::empty()) as _
|
||||
}
|
||||
})
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue