Compare commits
61 commits
master
...
release-0.
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6183b59f51 | ||
|
|
38223b9871 | ||
|
|
5e8760bf46 | ||
|
|
05beba8137 | ||
|
|
21a517c423 | ||
|
|
62feed0715 | ||
|
|
1a983b7e2c | ||
|
|
ede1960eef | ||
|
|
b79f31da80 | ||
|
|
62ce991e84 | ||
|
|
29263440c7 | ||
|
|
6919d8406d | ||
|
|
3473ca9a6b | ||
|
|
bc4186776a | ||
|
|
4e870b9207 | ||
|
|
8d466ead30 | ||
|
|
9be7dda48f | ||
|
|
bb8eb83f50 | ||
|
|
6c4c522724 | ||
|
|
5e645f11b2 | ||
|
|
e3c8263338 | ||
|
|
fc8c1863e2 | ||
|
|
2507aabc04 | ||
|
|
d25a5e4886 | ||
|
|
2f6583aae2 | ||
|
|
340d3eeb41 | ||
|
|
28cbc771f1 | ||
|
|
70fd2c02f1 | ||
|
|
35f119db03 | ||
|
|
0258a41e15 | ||
|
|
32c1466224 | ||
|
|
54299d3d87 | ||
|
|
8745e5d2ce | ||
|
|
1fd07fd619 | ||
|
|
2303b7d2c5 | ||
|
|
0358feda05 | ||
|
|
1aa6567c7b | ||
|
|
9d0313af0f | ||
|
|
85a42f48be | ||
|
|
aac741dfd1 | ||
|
|
4cf96126d4 | ||
|
|
61d67adbf7 | ||
|
|
5e93499f20 | ||
|
|
b1493f9b35 | ||
|
|
89bd9e302e | ||
|
|
12bc174205 | ||
|
|
a498790a82 | ||
|
|
f629fd4aed | ||
|
|
657c7b548e | ||
|
|
bdfe32402e | ||
|
|
8c45b79808 | ||
|
|
cd1abd9351 | ||
|
|
46bdc14e20 | ||
|
|
0b167b0782 | ||
|
|
76fffb0f2d | ||
|
|
b759a5fac5 | ||
|
|
b3f808cc01 | ||
|
|
0d8f143a9d | ||
|
|
c639d547f9 | ||
|
|
75d2915f48 | ||
|
|
c21db9fea9 |
102 changed files with 2341 additions and 2248 deletions
23
.github/actions/cache/action.yml
vendored
23
.github/actions/cache/action.yml
vendored
|
|
@ -1,24 +1,23 @@
|
||||||
name: 'Cache'
|
name: Cache
|
||||||
description: "This action caches fixtures"
|
|
||||||
|
description: This action caches fixtures
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: 'Cache hit'
|
description: Cache hit
|
||||||
value: ${{ steps.cache_output.outputs.cache-hit }}
|
value: ${{ steps.cache.outputs.cache-hit }}
|
||||||
|
|
||||||
runs:
|
runs:
|
||||||
using: "composite"
|
using: composite
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/cache@v4
|
- uses: actions/cache@v4
|
||||||
id: cache_fixtures
|
id: cache
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
test/fixtures/grammars
|
test/fixtures/grammars
|
||||||
target/release/tree-sitter-*.wasm
|
target/release/tree-sitter-*.wasm
|
||||||
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
||||||
'cli/src/generate/**',
|
'cli/generate/src/**',
|
||||||
'script/generate-fixtures*',
|
'xtask/src/*',
|
||||||
'test/fixtures/grammars/*/**/src/*.c',
|
'test/fixtures/grammars/*/**/src/*.c',
|
||||||
'.github/actions/cache/action.yml') }}
|
'.github/actions/cache/action.yml') }}
|
||||||
|
|
||||||
- run: echo "cache-hit=${{ steps.cache_fixtures.outputs.cache-hit }}" >> $GITHUB_OUTPUT
|
|
||||||
shell: bash
|
|
||||||
id: cache_output
|
|
||||||
|
|
|
||||||
18
.github/scripts/cross.sh
vendored
18
.github/scripts/cross.sh
vendored
|
|
@ -1,17 +1,3 @@
|
||||||
#!/bin/bash
|
#!/bin/bash -eu
|
||||||
|
|
||||||
# set -x
|
exec docker run --rm -v /home/runner:/home/runner -w "$PWD" "$CROSS_IMAGE" "$@"
|
||||||
set -e
|
|
||||||
|
|
||||||
if [ "$BUILD_CMD" != "cross" ]; then
|
|
||||||
echo "cross.sh - is a helper to assist only in cross compiling environments" >&2
|
|
||||||
echo "To use this tool set the BUILD_CMD env var to the \"cross\" value" >&2
|
|
||||||
exit 111
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$CROSS_IMAGE" ]; then
|
|
||||||
echo "The CROSS_IMAGE env var should be provided" >&2
|
|
||||||
exit 111
|
|
||||||
fi
|
|
||||||
|
|
||||||
docker run --rm -v /home/runner:/home/runner -w "$PWD" "$CROSS_IMAGE" "$@"
|
|
||||||
|
|
|
||||||
20
.github/scripts/make.sh
vendored
20
.github/scripts/make.sh
vendored
|
|
@ -1,19 +1,9 @@
|
||||||
#!/bin/bash
|
#!/bin/bash -eu
|
||||||
|
|
||||||
# set -x
|
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
||||||
set -e
|
|
||||||
|
|
||||||
if [ "$BUILD_CMD" == "cross" ]; then
|
if [[ $BUILD_CMD == cross ]]; then
|
||||||
if [ -z "$CC" ]; then
|
cross.sh make CC="$CC" AR="$AR" "$@"
|
||||||
echo "make.sh: CC is not set" >&2
|
|
||||||
exit 111
|
|
||||||
fi
|
|
||||||
if [ -z "$AR" ]; then
|
|
||||||
echo "make.sh: AR is not set" >&2
|
|
||||||
exit 111
|
|
||||||
fi
|
|
||||||
|
|
||||||
cross.sh make CC=$CC AR=$AR "$@"
|
|
||||||
else
|
else
|
||||||
make "$@"
|
exec make "$@"
|
||||||
fi
|
fi
|
||||||
|
|
|
||||||
27
.github/scripts/tree-sitter.sh
vendored
27
.github/scripts/tree-sitter.sh
vendored
|
|
@ -1,28 +1,9 @@
|
||||||
#!/bin/bash
|
#!/bin/bash -eu
|
||||||
|
|
||||||
# set -x
|
|
||||||
set -e
|
|
||||||
|
|
||||||
if [ -z "$ROOT" ]; then
|
|
||||||
echo "The ROOT env var should be set to absolute path of a repo root folder" >&2
|
|
||||||
exit 111
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$TARGET" ]; then
|
|
||||||
echo "The TARGET env var should be equal to a \`cargo build --target <TARGET>\` command value" >&2
|
|
||||||
exit 111
|
|
||||||
fi
|
|
||||||
|
|
||||||
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
||||||
|
|
||||||
if [ "$BUILD_CMD" == "cross" ]; then
|
if [[ $BUILD_CMD == cross ]]; then
|
||||||
if [ -z "$CROSS_RUNNER" ]; then
|
cross.sh "$CROSS_RUNNER" "$tree_sitter" "$@"
|
||||||
echo "The CROSS_RUNNER env var should be set to a CARGO_TARGET_*_RUNNER env var value" >&2
|
|
||||||
echo "that is available in a docker image used by the cross tool under the hood" >&2
|
|
||||||
exit 111
|
|
||||||
fi
|
|
||||||
|
|
||||||
cross.sh $CROSS_RUNNER "$tree_sitter" "$@"
|
|
||||||
else
|
else
|
||||||
"$tree_sitter" "$@"
|
exec "$tree_sitter" "$@"
|
||||||
fi
|
fi
|
||||||
|
|
|
||||||
19
.github/workflows/backport.yml
vendored
19
.github/workflows/backport.yml
vendored
|
|
@ -1,26 +1,29 @@
|
||||||
name: backport
|
name: Backport Pull Request
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [closed, labeled]
|
types: [closed, labeled]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
backport:
|
backport:
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
name: Backport Pull Request
|
|
||||||
if: github.event.pull_request.merged
|
if: github.event.pull_request.merged
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/create-github-app-token@v1
|
- name: Create app token
|
||||||
|
uses: actions/create-github-app-token@v1
|
||||||
id: app-token
|
id: app-token
|
||||||
with:
|
with:
|
||||||
app-id: ${{ vars.BACKPORT_APP }}
|
app-id: ${{ vars.BACKPORT_APP }}
|
||||||
private-key: ${{ secrets.BACKPORT_KEY }}
|
private-key: ${{ secrets.BACKPORT_KEY }}
|
||||||
|
|
||||||
- name: Create backport PR
|
- name: Create backport PR
|
||||||
id: backport
|
|
||||||
uses: korthout/backport-action@v3
|
uses: korthout/backport-action@v3
|
||||||
with:
|
with:
|
||||||
pull_title: "${pull_title}"
|
pull_title: "${pull_title}"
|
||||||
|
|
|
||||||
234
.github/workflows/build.yml
vendored
234
.github/workflows/build.yml
vendored
|
|
@ -8,7 +8,7 @@ env:
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
run_test:
|
run-test:
|
||||||
default: true
|
default: true
|
||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
|
|
@ -21,92 +21,93 @@ jobs:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
platform:
|
platform:
|
||||||
- linux-arm64 #
|
- linux-arm64
|
||||||
- linux-arm #
|
- linux-arm
|
||||||
- linux-x64 #
|
- linux-x64
|
||||||
- linux-x86 #
|
- linux-x86
|
||||||
- linux-powerpc64 #
|
- linux-powerpc64
|
||||||
- windows-arm64 #
|
- windows-arm64
|
||||||
- windows-x64 # <-- No C library build - requires an additional adapted Makefile for `cl.exe` compiler
|
- windows-x64
|
||||||
- windows-x86 # -- // --
|
- windows-x86
|
||||||
- macos-arm64 #
|
- macos-arm64
|
||||||
- macos-x64 #
|
- macos-x64
|
||||||
|
|
||||||
include:
|
include:
|
||||||
# When adding a new `target`:
|
# When adding a new `target`:
|
||||||
# 1. Define a new platform alias above
|
# 1. Define a new platform alias above
|
||||||
# 2. Add a new record to a matrix map in `cli/npm/install.js`
|
# 2. Add a new record to the matrix map in `cli/npm/install.js`
|
||||||
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||||
- { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true }
|
||||||
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-20.04 , cli_features: wasm } #2272
|
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-20.04 , features: wasm } # See #2272
|
||||||
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||||
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||||
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest }
|
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest }
|
||||||
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , cli_features: wasm }
|
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , features: wasm }
|
||||||
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest }
|
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest }
|
||||||
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-14 , cli_features: wasm }
|
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-14 , features: wasm }
|
||||||
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-12 , cli_features: wasm }
|
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-13 , features: wasm }
|
||||||
|
|
||||||
# Cross compilers for C library
|
# Cross compilers for C library
|
||||||
- { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar }
|
- { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar }
|
||||||
- { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar }
|
- { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar }
|
||||||
- { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar }
|
- { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar }
|
||||||
- { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar }
|
- { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar }
|
||||||
|
|
||||||
# See #2041 tree-sitter issue
|
# Prevent race condition (see #2041)
|
||||||
- { platform: windows-x64 , rust-test-threads: 1 }
|
- { platform: windows-x64 , rust-test-threads: 1 }
|
||||||
- { platform: windows-x86 , rust-test-threads: 1 }
|
- { platform: windows-x86 , rust-test-threads: 1 }
|
||||||
|
|
||||||
# CLI only build
|
# Can't natively run CLI on Github runner's host
|
||||||
- { platform: windows-arm64 , cli-only: true }
|
- { platform: windows-arm64 , no-run: true }
|
||||||
|
|
||||||
env:
|
env:
|
||||||
BUILD_CMD: cargo
|
BUILD_CMD: cargo
|
||||||
EXE: ${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
SUFFIX: ${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Read Emscripten version
|
- name: Read Emscripten version
|
||||||
run: echo "EMSCRIPTEN_VERSION=$(cat cli/loader/emscripten-version)" >> $GITHUB_ENV
|
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<cli/loader/emscripten-version)" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Install Emscripten
|
- name: Install Emscripten
|
||||||
if: ${{ !matrix.cli-only && !matrix.use-cross }}
|
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
||||||
uses: mymindstorm/setup-emsdk@v14
|
uses: mymindstorm/setup-emsdk@v14
|
||||||
with:
|
with:
|
||||||
version: ${{ env.EMSCRIPTEN_VERSION }}
|
version: ${{ env.EMSCRIPTEN_VERSION }}
|
||||||
|
|
||||||
- run: rustup toolchain install stable --profile minimal
|
- name: Set up Rust
|
||||||
- run: rustup target add ${{ matrix.target }}
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
- uses: Swatinem/rust-cache@v2
|
with:
|
||||||
|
target: ${{ matrix.target }}
|
||||||
|
|
||||||
- name: Install cross
|
- name: Install cross
|
||||||
if: ${{ matrix.use-cross }}
|
if: ${{ matrix.use-cross }}
|
||||||
uses: taiki-e/install-action@v2
|
run: RUSTFLAGS="" cargo install cross --git https://github.com/cross-rs/cross
|
||||||
with:
|
|
||||||
tool: cross
|
|
||||||
|
|
||||||
- name: Build custom cross image
|
- name: Configure cross
|
||||||
if: ${{ matrix.use-cross && matrix.os == 'ubuntu-latest' }}
|
if: ${{ matrix.use-cross }}
|
||||||
run: |
|
run: |
|
||||||
target="${{ matrix.target }}"
|
printf '%s\n' > Cross.toml \
|
||||||
image=ghcr.io/cross-rs/$target:custom
|
'[target.${{ matrix.target }}]' \
|
||||||
echo "CROSS_IMAGE=$image" >> $GITHUB_ENV
|
'image = "ghcr.io/cross-rs/${{ matrix.target }}:edge"' \
|
||||||
|
'[build]' \
|
||||||
|
'pre-build = [' \
|
||||||
|
' "dpkg --add-architecture $CROSS_DEB_ARCH",' \
|
||||||
|
' "curl -fsSL https://deb.nodesource.com/setup_22.x | bash -",' \
|
||||||
|
' "apt-get update && apt-get -y install libssl-dev nodejs"' \
|
||||||
|
']'
|
||||||
|
cat - Cross.toml <<< 'Cross.toml:'
|
||||||
|
printf '%s\n' >> $GITHUB_ENV \
|
||||||
|
"CROSS_CONFIG=$PWD/Cross.toml" \
|
||||||
|
"CROSS_IMAGE=ghcr.io/cross-rs/${{ matrix.target }}:edge"
|
||||||
|
|
||||||
echo "[target.$target]" >> Cross.toml
|
- name: Set up environment
|
||||||
echo "image = \"$image\"" >> Cross.toml
|
|
||||||
echo "CROSS_CONFIG=$PWD/Cross.toml" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
echo "FROM ghcr.io/cross-rs/$target:edge" >> Dockerfile
|
|
||||||
echo "RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash -" >> Dockerfile
|
|
||||||
echo "RUN apt-get update && apt-get -y install nodejs" >> Dockerfile
|
|
||||||
docker build -t $image .
|
|
||||||
|
|
||||||
- name: Setup env extras
|
|
||||||
env:
|
env:
|
||||||
RUST_TEST_THREADS: ${{ matrix.rust-test-threads }}
|
RUST_TEST_THREADS: ${{ matrix.rust-test-threads }}
|
||||||
USE_CROSS: ${{ matrix.use-cross }}
|
USE_CROSS: ${{ matrix.use-cross }}
|
||||||
|
|
@ -115,68 +116,117 @@ jobs:
|
||||||
AR: ${{ matrix.ar }}
|
AR: ${{ matrix.ar }}
|
||||||
run: |
|
run: |
|
||||||
PATH="$PWD/.github/scripts:$PATH"
|
PATH="$PWD/.github/scripts:$PATH"
|
||||||
echo "$PWD/.github/scripts" >> $GITHUB_PATH
|
printf '%s/.github/scripts\n' "$PWD" >> $GITHUB_PATH
|
||||||
|
|
||||||
echo "TREE_SITTER=tree-sitter.sh" >> $GITHUB_ENV
|
printf '%s\n' >> $GITHUB_ENV \
|
||||||
echo "TARGET=$TARGET" >> $GITHUB_ENV
|
'TREE_SITTER=tree-sitter.sh' \
|
||||||
echo "ROOT=$PWD" >> $GITHUB_ENV
|
"TARGET=$TARGET" \
|
||||||
|
"ROOT=$PWD"
|
||||||
|
|
||||||
[ -n "$RUST_TEST_THREADS" ] && \
|
[[ -n $RUST_TEST_THREADS ]] && \
|
||||||
echo "RUST_TEST_THREADS=$RUST_TEST_THREADS" >> $GITHUB_ENV
|
printf 'RUST_TEST_THREADS=%s\n' "$RUST_TEST_THREADS" >> $GITHUB_ENV
|
||||||
|
|
||||||
[ -n "$CC" ] && echo "CC=$CC" >> $GITHUB_ENV
|
[[ -n $CC ]] && printf 'CC=%s\n' "$CC" >> $GITHUB_ENV
|
||||||
[ -n "$AR" ] && echo "AR=$AR" >> $GITHUB_ENV
|
[[ -n $AR ]] && printf 'AR=%s\n' "$AR" >> $GITHUB_ENV
|
||||||
|
|
||||||
if [ "$USE_CROSS" == "true" ]; then
|
if [[ $USE_CROSS == true ]]; then
|
||||||
echo "BUILD_CMD=cross" >> $GITHUB_ENV
|
printf 'BUILD_CMD=cross\n' >> $GITHUB_ENV
|
||||||
runner=$(BUILD_CMD=cross cross.sh bash -c "env | sed -nr '/^CARGO_TARGET_.*_RUNNER=/s///p'")
|
runner=$(cross.sh bash -c "env | sed -n 's/^CARGO_TARGET_.*_RUNNER=//p'")
|
||||||
[ -n "$runner" ] && echo "CROSS_RUNNER=$runner" >> $GITHUB_ENV
|
[[ -n $runner ]] && printf 'CROSS_RUNNER=%s\n' "$runner" >> $GITHUB_ENV
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Build C library
|
- name: Build wasmtime library
|
||||||
if: ${{ !contains(matrix.os, 'windows') }} # Requires an additional adapted Makefile for `cl.exe` compiler
|
if: ${{ !matrix.use-cross && contains(matrix.features, 'wasm') }}
|
||||||
run: make.sh -j CFLAGS="-Werror"
|
run: |
|
||||||
|
mkdir -p target
|
||||||
|
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
||||||
|
jq -r '.packages[] | select(.name == "wasmtime-c-api-impl") | .version')
|
||||||
|
curl -LSs "$WASMTIME_REPO/archive/refs/tags/v${WASMTIME_VERSION}.tar.gz" | tar xzf - -C target
|
||||||
|
cd target/wasmtime-${WASMTIME_VERSION}
|
||||||
|
cmake -S crates/c-api -B target/c-api \
|
||||||
|
-DCMAKE_INSTALL_PREFIX="$PWD/artifacts" \
|
||||||
|
-DWASMTIME_DISABLE_ALL_FEATURES=ON \
|
||||||
|
-DWASMTIME_FEATURE_CRANELIFT=ON \
|
||||||
|
-DWASMTIME_TARGET='${{ matrix.target }}'
|
||||||
|
cmake --build target/c-api && cmake --install target/c-api
|
||||||
|
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
||||||
|
env:
|
||||||
|
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
||||||
|
RUSTFLAGS: ""
|
||||||
|
|
||||||
|
- name: Build C library (make)
|
||||||
|
if: ${{ runner.os != 'Windows' }}
|
||||||
|
run: make.sh -j CFLAGS="$CFLAGS"
|
||||||
|
env:
|
||||||
|
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
|
|
||||||
|
- name: Build C library (CMake)
|
||||||
|
if: ${{ !matrix.use-cross }}
|
||||||
|
run: |
|
||||||
|
cmake -S lib -B build/static \
|
||||||
|
-DBUILD_SHARED_LIBS=OFF \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
|
cmake --build build/static --verbose
|
||||||
|
|
||||||
|
cmake -S lib -B build/shared \
|
||||||
|
-DBUILD_SHARED_LIBS=ON \
|
||||||
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
|
cmake --build build/shared --verbose
|
||||||
|
env:
|
||||||
|
CC: ${{ contains(matrix.target, 'linux') && 'clang' || '' }}
|
||||||
|
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
||||||
|
|
||||||
- name: Build wasm library
|
- name: Build wasm library
|
||||||
if: ${{ !matrix.cli-only && !matrix.use-cross }} # No sense to build on the same Github runner hosts many times
|
# No reason to build on the same Github runner hosts many times
|
||||||
run: script/build-wasm
|
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
||||||
|
run: $BUILD_CMD run -p xtask -- build-wasm
|
||||||
|
|
||||||
- run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.cli_features }}
|
- name: Build target
|
||||||
|
run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.features }}
|
||||||
|
|
||||||
- run: script/fetch-fixtures
|
- name: Cache fixtures
|
||||||
|
|
||||||
- uses: ./.github/actions/cache
|
|
||||||
id: cache
|
id: cache
|
||||||
|
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||||
|
uses: ./.github/actions/cache
|
||||||
|
|
||||||
|
- name: Fetch fixtures
|
||||||
|
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||||
|
run: $BUILD_CMD run -p xtask -- fetch-fixtures
|
||||||
|
|
||||||
- name: Generate fixtures
|
- name: Generate fixtures
|
||||||
if: ${{ !matrix.cli-only && inputs.run_test && steps.cache.outputs.cache-hit != 'true' }} # Can't natively run CLI on Github runner's host
|
if: ${{ !matrix.no-run && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
||||||
run: script/generate-fixtures
|
run: $BUILD_CMD run -p xtask -- generate-fixtures
|
||||||
|
|
||||||
- name: Generate WASM fixtures
|
- name: Generate Wasm fixtures
|
||||||
if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test && steps.cache.outputs.cache-hit != 'true' }} # See comment for the "Build wasm library" step
|
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
||||||
run: script/generate-fixtures-wasm
|
run: $BUILD_CMD run -p xtask -- generate-fixtures --wasm
|
||||||
|
|
||||||
- name: Run main tests
|
- name: Run main tests
|
||||||
if: ${{ !matrix.cli-only && inputs.run_test }} # Can't natively run CLI on Github runner's host
|
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||||
run: $BUILD_CMD test --target=${{ matrix.target }} --features=${{ matrix.cli_features }}
|
run: $BUILD_CMD test --target=${{ matrix.target }} --features=${{ matrix.features }}
|
||||||
|
|
||||||
- name: Run wasm tests
|
- name: Run wasm tests
|
||||||
if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test }} # See comment for the "Build wasm library" step
|
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
||||||
run: script/test-wasm
|
run: $BUILD_CMD run -p xtask -- test-wasm
|
||||||
|
|
||||||
- name: Run benchmarks
|
- name: Run benchmarks
|
||||||
if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test }} # Cross-compiled benchmarks make no sense
|
# Cross-compiled benchmarks are pointless
|
||||||
|
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
||||||
run: $BUILD_CMD bench benchmark -p tree-sitter-cli --target=${{ matrix.target }}
|
run: $BUILD_CMD bench benchmark -p tree-sitter-cli --target=${{ matrix.target }}
|
||||||
|
|
||||||
- name: Upload CLI artifact
|
- name: Upload CLI artifact
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: tree-sitter.${{ matrix.platform }}
|
name: tree-sitter.${{ matrix.platform }}
|
||||||
path: target/${{ matrix.target }}/release/tree-sitter${{ env.EXE }}
|
path: target/${{ matrix.target }}/release/tree-sitter${{ env.SUFFIX }}
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
- name: Upload WASM artifacts
|
- name: Upload Wasm artifacts
|
||||||
if: ${{ matrix.platform == 'linux-x64' }}
|
if: ${{ matrix.platform == 'linux-x64' }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
|
|
|
||||||
26
.github/workflows/ci.yml
vendored
26
.github/workflows/ci.yml
vendored
|
|
@ -1,9 +1,9 @@
|
||||||
name: CI
|
name: CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches: [master]
|
||||||
- 'master'
|
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: ${{ github.workflow }}-${{ github.ref }}
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
|
@ -13,12 +13,22 @@ jobs:
|
||||||
checks:
|
checks:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Checkout repository
|
||||||
- run: rustup toolchain install stable --profile minimal
|
uses: actions/checkout@v4
|
||||||
- run: rustup toolchain install nightly --profile minimal
|
|
||||||
- run: rustup component add --toolchain nightly rustfmt
|
- name: Set up stable Rust toolchain
|
||||||
- uses: Swatinem/rust-cache@v2
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
- run: make lint
|
with:
|
||||||
|
toolchain: stable
|
||||||
|
|
||||||
|
- name: Set up nightly Rust toolchain
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
components: clippy, rustfmt
|
||||||
|
|
||||||
|
- name: Lint files
|
||||||
|
run: make lint
|
||||||
|
|
||||||
sanitize:
|
sanitize:
|
||||||
uses: ./.github/workflows/sanitize.yml
|
uses: ./.github/workflows/sanitize.yml
|
||||||
|
|
|
||||||
68
.github/workflows/release.yml
vendored
68
.github/workflows/release.yml
vendored
|
|
@ -1,4 +1,5 @@
|
||||||
name: Release
|
name: Release
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
|
|
@ -9,16 +10,17 @@ jobs:
|
||||||
build:
|
build:
|
||||||
uses: ./.github/workflows/build.yml
|
uses: ./.github/workflows/build.yml
|
||||||
with:
|
with:
|
||||||
run_test: false
|
run-test: false
|
||||||
|
|
||||||
release:
|
release:
|
||||||
name: Release
|
name: Release on GitHub
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: build
|
needs: build
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Download build artifacts
|
- name: Download build artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v4
|
||||||
|
|
@ -42,29 +44,24 @@ jobs:
|
||||||
ls -l target/
|
ls -l target/
|
||||||
|
|
||||||
- name: Create release
|
- name: Create release
|
||||||
uses: softprops/action-gh-release@v2
|
run: |-
|
||||||
with:
|
gh release create \
|
||||||
name: ${{ github.ref_name }}
|
target/tree-sitter-*.gz \
|
||||||
tag_name: ${{ github.ref_name }}
|
target/tree-sitter.wasm \
|
||||||
fail_on_unmatched_files: true
|
|
||||||
files: |
|
|
||||||
target/tree-sitter-*.gz
|
|
||||||
target/tree-sitter.wasm
|
|
||||||
target/tree-sitter.js
|
target/tree-sitter.js
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
crates_io:
|
crates_io:
|
||||||
name: Publish CLI to Crates.io
|
name: Publish packages to Crates.io
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: release
|
needs: release
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust
|
- name: Set up Rust
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
with:
|
|
||||||
profile: minimal
|
|
||||||
toolchain: stable
|
|
||||||
override: true
|
|
||||||
|
|
||||||
- name: Publish crates to Crates.io
|
- name: Publish crates to Crates.io
|
||||||
uses: katyo/publish-crates@v2
|
uses: katyo/publish-crates@v2
|
||||||
|
|
@ -72,29 +69,32 @@ jobs:
|
||||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||||
|
|
||||||
npm:
|
npm:
|
||||||
name: Publish lib to npmjs.com
|
name: Publish packages to npmjs.com
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: release
|
needs: release
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
directory: ["cli/npm", "lib/binding_web"]
|
directory: [cli/npm, lib/binding_web]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: CHeckout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
registry-url: https://registry.npmjs.org
|
||||||
|
|
||||||
|
- name: Set up Rust
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
||||||
- name: Build wasm
|
- name: Build wasm
|
||||||
if: matrix.directory == 'lib/binding_web'
|
if: matrix.directory == 'lib/binding_web'
|
||||||
run: ./script/build-wasm
|
run: cargo xtask build-wasm
|
||||||
|
|
||||||
- name: Setup Node
|
- name: Publish to npmjs.com
|
||||||
uses: actions/setup-node@v4
|
working-directory: ${{ matrix.directory }}
|
||||||
with:
|
run: npm publish
|
||||||
node-version: 18
|
|
||||||
registry-url: "https://registry.npmjs.org"
|
|
||||||
|
|
||||||
- name: Publish lib to npmjs.com
|
|
||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{secrets.NPM_TOKEN}}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
run: |
|
|
||||||
cd ${{ matrix.directory }}
|
|
||||||
npm publish
|
|
||||||
|
|
|
||||||
37
.github/workflows/response.yml
vendored
37
.github/workflows/response.yml
vendored
|
|
@ -1,34 +1,47 @@
|
||||||
name: no_response
|
name: No response
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
- cron: '30 1 * * *' # Run every day at 01:30
|
- cron: "30 1 * * *" # Run every day at 01:30
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
issue_comment:
|
issue_comment:
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
close:
|
close:
|
||||||
|
name: Close issues with no response
|
||||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Checkout script
|
||||||
- uses: actions/github-script@v7
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
sparse-checkout: .github/scripts/close_unresponsive.js
|
||||||
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
|
- name: Run script
|
||||||
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/close_unresponsive.js')
|
const script = require('./.github/scripts/close_unresponsive.js')
|
||||||
await script({github, context})
|
await script({github, context})
|
||||||
|
|
||||||
remove_label:
|
remove_label:
|
||||||
|
name: Remove response label
|
||||||
if: github.event_name == 'issue_comment'
|
if: github.event_name == 'issue_comment'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Checkout script
|
||||||
- uses: actions/github-script@v7
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
sparse-checkout: .github/scripts/remove_response_label.js
|
||||||
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
|
- name: Run script
|
||||||
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/remove_response_label.js')
|
const script = require('./.github/scripts/remove_response_label.js')
|
||||||
|
|
|
||||||
18
.github/workflows/reviewers_remove.yml
vendored
18
.github/workflows/reviewers_remove.yml
vendored
|
|
@ -1,15 +1,23 @@
|
||||||
name: "reviewers: remove"
|
name: Remove Reviewers
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [converted_to_draft, closed]
|
types: [converted_to_draft, closed]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
remove-reviewers:
|
remove-reviewers:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- name: Checkout script
|
||||||
- name: 'Remove reviewers'
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
sparse-checkout: .github/scripts/reviewers_remove.js
|
||||||
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
|
- name: Run script
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
|
|
|
||||||
55
.github/workflows/sanitize.yml
vendored
55
.github/workflows/sanitize.yml
vendored
|
|
@ -8,39 +8,44 @@ on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_undefined_behaviour:
|
check-undefined-behaviour:
|
||||||
name: Sanitizer checks
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
timeout-minutes: 20
|
timeout-minutes: 20
|
||||||
env:
|
env:
|
||||||
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout source code
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install UBSAN library
|
- name: Install UBSAN library
|
||||||
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
||||||
|
|
||||||
- run: rustup toolchain install stable --profile minimal
|
- name: Set up Rust
|
||||||
- uses: Swatinem/rust-cache@v2
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
- run: cargo build --release
|
|
||||||
- run: script/fetch-fixtures
|
|
||||||
|
|
||||||
- uses: ./.github/actions/cache
|
- name: Build project
|
||||||
id: cache
|
run: cargo build --release
|
||||||
|
|
||||||
- if: ${{ steps.cache.outputs.cache-hit != 'true' }}
|
- name: Cache fixtures
|
||||||
run: script/generate-fixtures
|
uses: ./.github/actions/cache
|
||||||
|
id: cache
|
||||||
|
|
||||||
- name: Run main tests with undefined behaviour sanitizer (UBSAN)
|
- name: Fetch fixtures
|
||||||
env:
|
run: cargo xtask fetch-fixtures
|
||||||
CFLAGS: -fsanitize=undefined
|
|
||||||
RUSTFLAGS: ${{ env.RUSTFLAGS }} -lubsan
|
|
||||||
run: cargo test -- --test-threads 1
|
|
||||||
|
|
||||||
- name: Run main tests with address sanitizer (ASAN)
|
- name: Generate fixtures
|
||||||
env:
|
if: ${{ steps.cache.outputs.cache-hit != 'true' }}
|
||||||
ASAN_OPTIONS: verify_asan_link_order=0
|
run: cargo xtask generate-fixtures
|
||||||
CFLAGS: -fsanitize=address
|
|
||||||
RUSTFLAGS: ${{ env.RUSTFLAGS }} -lasan --cfg sanitizing
|
- name: Run main tests with undefined behaviour sanitizer (UBSAN)
|
||||||
run: cargo test -- --test-threads 1
|
run: cargo test -- --test-threads 1
|
||||||
|
env:
|
||||||
|
CFLAGS: -fsanitize=undefined
|
||||||
|
RUSTFLAGS: ${{ env.RUSTFLAGS }} -lubsan
|
||||||
|
|
||||||
|
- name: Run main tests with address sanitizer (ASAN)
|
||||||
|
run: cargo test -- --test-threads 1
|
||||||
|
env:
|
||||||
|
ASAN_OPTIONS: verify_asan_link_order=0
|
||||||
|
CFLAGS: -fsanitize=address
|
||||||
|
RUSTFLAGS: ${{ env.RUSTFLAGS }} -lasan --cfg sanitizing
|
||||||
|
|
|
||||||
159
Cargo.lock
generated
159
Cargo.lock
generated
|
|
@ -156,9 +156,9 @@ checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cc"
|
name = "cc"
|
||||||
version = "1.1.24"
|
version = "1.1.28"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "812acba72f0a070b003d3697490d2b55b837230ae7c6c6497f05cc2ddbb8d938"
|
checksum = "2e80e3b6a3ab07840e1cae9b0666a63970dc28e8ed5ffbcdacbfc760c281bfc1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"jobserver",
|
"jobserver",
|
||||||
"libc",
|
"libc",
|
||||||
|
|
@ -311,18 +311,18 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-bforest"
|
name = "cranelift-bforest"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a6e376bd92bddd03dcfc443b14382611cae5d10012aa0b1628bbf18bb73f12f7"
|
checksum = "7b765ed4349e66bedd9b88c7691da42e24c7f62067a6be17ddffa949367b6e17"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cranelift-entity",
|
"cranelift-entity",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-bitset"
|
name = "cranelift-bitset"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "45ecbe07f25a8100e5077933516200e97808f1d7196b5a073edb85fa08fde32e"
|
checksum = "9eaa2aece6237198afd32bff57699e08d4dccb8d3902c214fc1e6ba907247ca4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"serde_derive",
|
"serde_derive",
|
||||||
|
|
@ -330,9 +330,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-codegen"
|
name = "cranelift-codegen"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bc60913f32c1de18538c28bef74b8c87cf16de7841a1b0956fcf01b23237853a"
|
checksum = "351824439e59d42f0e4fa5aac1d13deded155120043565769e55cd4ad3ca8ed9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bumpalo",
|
"bumpalo",
|
||||||
"cranelift-bforest",
|
"cranelift-bforest",
|
||||||
|
|
@ -353,33 +353,33 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-codegen-meta"
|
name = "cranelift-codegen-meta"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bae009e7822f47aa55e7dcef846ccf3aa4eb102ca6b4bcb8a44b36f3f49aa85c"
|
checksum = "5a0ce0273d7a493ef8f31f606849a4e931c19187a4923f5f87fc1f2b13109981"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cranelift-codegen-shared",
|
"cranelift-codegen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-codegen-shared"
|
name = "cranelift-codegen-shared"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0c78f01a852536c68e34444450f845ed6e0782a1f047f85397fe460b8fbce8f1"
|
checksum = "0f72016ac35579051913f4f07f6b36c509ed69412d852fd44c8e1d7b7fa6d92a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-control"
|
name = "cranelift-control"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7a061b22e00a9e36b31f2660dfb05a9617b7775bd54b79754d3bb75a990dac06"
|
checksum = "db28951d21512c4fd0554ef179bfb11e4eb6815062957a9173824eee5de0c46c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arbitrary",
|
"arbitrary",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-entity"
|
name = "cranelift-entity"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "95e2b261a3e74ae42f4e606906d5ffa44ee2684e8b1ae23bdf75d21908dc9233"
|
checksum = "14ebe592a2f81af9237cf9be29dd3854ecb72108cfffa59e85ef12389bf939e3"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cranelift-bitset",
|
"cranelift-bitset",
|
||||||
"serde",
|
"serde",
|
||||||
|
|
@ -388,9 +388,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-frontend"
|
name = "cranelift-frontend"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fe14abba0e6bab42aca0f9ce757f96880f9187e88bc6cb975ed6acd8a42f7770"
|
checksum = "4437db9d60c7053ac91ded0802740c2ccf123ee6d6898dd906c34f8c530cd119"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cranelift-codegen",
|
"cranelift-codegen",
|
||||||
"log",
|
"log",
|
||||||
|
|
@ -400,15 +400,15 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-isle"
|
name = "cranelift-isle"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "311d91ae72b37d4262b51217baf8c9e01f1afd5148931468da1fdb7e9d011347"
|
checksum = "230cb33572b9926e210f2ca28145f2bc87f389e1456560932168e2591feb65c1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-native"
|
name = "cranelift-native"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "2a3f84c75e578189ff7a716c24ad83740b553bf583f2510b323bfe4c1a74bb93"
|
checksum = "364524ac7aef7070b1141478724abebeec297d4ea1e87ad8b8986465e91146d9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cranelift-codegen",
|
"cranelift-codegen",
|
||||||
"libc",
|
"libc",
|
||||||
|
|
@ -417,9 +417,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cranelift-wasm"
|
name = "cranelift-wasm"
|
||||||
version = "0.112.1"
|
version = "0.112.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f56b7b2476c47b2091eee5a20bc54a80fbb29ca5313ae2bd0dea52621abcfca1"
|
checksum = "0572cbd9d136a62c0f39837b6bce3b0978b96b8586794042bec0c214668fd6f5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cranelift-codegen",
|
"cranelift-codegen",
|
||||||
"cranelift-entity",
|
"cranelift-entity",
|
||||||
|
|
@ -442,9 +442,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ctor"
|
name = "ctor"
|
||||||
version = "0.2.8"
|
version = "0.2.9"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
|
checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn",
|
||||||
|
|
@ -565,6 +565,12 @@ dependencies = [
|
||||||
"windows-sys 0.59.0",
|
"windows-sys 0.59.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "foldhash"
|
||||||
|
version = "0.1.3"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "f81ec6369c545a7d40e4589b5597581fa1c441fe1cce96dd1de43159910a36a2"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "form_urlencoded"
|
name = "form_urlencoded"
|
||||||
version = "1.2.1"
|
version = "1.2.1"
|
||||||
|
|
@ -651,6 +657,9 @@ name = "hashbrown"
|
||||||
version = "0.15.0"
|
version = "0.15.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
|
checksum = "1e087f84d4f86bf4b218b927129862374b72199ae7d8657835f1e89000eea4fb"
|
||||||
|
dependencies = [
|
||||||
|
"foldhash",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heck"
|
name = "heck"
|
||||||
|
|
@ -982,24 +991,21 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "object"
|
name = "object"
|
||||||
version = "0.36.4"
|
version = "0.36.5"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a"
|
checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"crc32fast",
|
"crc32fast",
|
||||||
"hashbrown 0.14.5",
|
"hashbrown 0.15.0",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"memchr",
|
"memchr",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "once_cell"
|
name = "once_cell"
|
||||||
version = "1.20.1"
|
version = "1.20.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "82881c4be219ab5faaf2ad5e5e5ecdff8c66bd7402ca3160975c93b24961afd1"
|
checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775"
|
||||||
dependencies = [
|
|
||||||
"portable-atomic",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "openssl-probe"
|
name = "openssl-probe"
|
||||||
|
|
@ -1055,12 +1061,6 @@ version = "0.3.31"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
|
checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "portable-atomic"
|
|
||||||
version = "1.9.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cc9c68a3f6da06753e9335d63e27f6b9754dd1920d941135b7ea8224f141adb2"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "postcard"
|
name = "postcard"
|
||||||
version = "1.0.10"
|
version = "1.0.10"
|
||||||
|
|
@ -1104,9 +1104,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.86"
|
version = "1.0.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
|
checksum = "b3e4daa0dcf6feba26f985457cdf104d4b4256fc5a09547140f3631bb076b19a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
@ -1538,7 +1538,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter"
|
name = "tree-sitter"
|
||||||
version = "0.24.1"
|
version = "0.24.7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bindgen",
|
"bindgen",
|
||||||
"cc",
|
"cc",
|
||||||
|
|
@ -1551,7 +1551,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter-cli"
|
name = "tree-sitter-cli"
|
||||||
version = "0.24.1"
|
version = "0.24.7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anstyle",
|
"anstyle",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
|
|
@ -1601,7 +1601,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter-config"
|
name = "tree-sitter-config"
|
||||||
version = "0.24.1"
|
version = "0.24.7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"dirs",
|
"dirs",
|
||||||
|
|
@ -1611,7 +1611,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter-generate"
|
name = "tree-sitter-generate"
|
||||||
version = "0.24.1"
|
version = "0.24.7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"heck 0.5.0",
|
"heck 0.5.0",
|
||||||
|
|
@ -1632,7 +1632,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter-highlight"
|
name = "tree-sitter-highlight"
|
||||||
version = "0.24.1"
|
version = "0.24.7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static",
|
"lazy_static",
|
||||||
"regex",
|
"regex",
|
||||||
|
|
@ -1643,17 +1643,18 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter-language"
|
name = "tree-sitter-language"
|
||||||
version = "0.1.2"
|
version = "0.1.3"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter-loader"
|
name = "tree-sitter-loader"
|
||||||
version = "0.24.1"
|
version = "0.24.7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cc",
|
"cc",
|
||||||
"dirs",
|
"dirs",
|
||||||
"fs4",
|
"fs4",
|
||||||
"indoc",
|
"indoc",
|
||||||
|
"lazy_static",
|
||||||
"libloading",
|
"libloading",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"path-slash",
|
"path-slash",
|
||||||
|
|
@ -1670,7 +1671,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tree-sitter-tags"
|
name = "tree-sitter-tags"
|
||||||
version = "0.24.1"
|
version = "0.24.7"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"memchr",
|
"memchr",
|
||||||
"regex",
|
"regex",
|
||||||
|
|
@ -1871,9 +1872,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime"
|
name = "wasmtime"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "03601559991d459a228236a49135364eac85ac00dc07b65fb95ae61a957793af"
|
checksum = "ef01f9cb9636ed42a7ec5a09d785c0643590199dc7372dc22c7e2ba7a31a97d4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
|
|
@ -1911,18 +1912,18 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-asm-macros"
|
name = "wasmtime-asm-macros"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e453b3bde07312874c0c6703e2de9281daab46646172c1b71fa59a97226f858e"
|
checksum = "ba5b20797419d6baf2296db2354f864e8bb3447cacca9d151ce7700ae08b4460"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-c-api-impl"
|
name = "wasmtime-c-api-impl"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4def1c38f8981c88d92e10acc7efb01da5b5775897fca2ab81caad76e930bd6d"
|
checksum = "2852f09a087c740683a32a33b8f34268d1d33c1298b4f707d25f4bee158ccd75"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"log",
|
"log",
|
||||||
|
|
@ -1934,9 +1935,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-c-api-macros"
|
name = "wasmtime-c-api-macros"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4c3feb5a461c52a376e80ef7ce7cee37a3a8395cb1794ac8eb340c0cd0b5d715"
|
checksum = "fa52cecfad085e7a9725bcbf3c2b15a900e5dc14f5ddcc305c9779c19936618b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
@ -1944,9 +1945,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-component-macro"
|
name = "wasmtime-component-macro"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4a6faeabbdbfd27e24e8d5204207ba9c247a13cf84181ea721b5f209f281fe01"
|
checksum = "26593c4b18c76ca3c3fbdd813d6692256537b639b851d8a6fe827e3d6966fc01"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
|
|
@ -1959,15 +1960,15 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-component-util"
|
name = "wasmtime-component-util"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6b1b24db4aa3dc7c0d3181d1833b4fe9ec0cd3f08780b746415c84c0a9ec9011"
|
checksum = "a2ed562fbb0cbed20a56c369c8de146c1de06a48c19e26ed9aa45f073514ee60"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-cranelift"
|
name = "wasmtime-cranelift"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c737bef9ea94aab874e29ac6a8688b89ceb43c7b51f047079c43387972c07ee3"
|
checksum = "f389b789cbcb53a8499131182135dea21d7d97ad77e7fb66830f69479ef0e68c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
|
@ -1990,9 +1991,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-environ"
|
name = "wasmtime-environ"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "817bfa9ea878ec37aa24f85fd6912844e8d87d321662824cf920d561b698cdfd"
|
checksum = "84b72debe8899f19bedf66f7071310f06ef62de943a1369ba9b373613e77dd3d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cranelift-bitset",
|
"cranelift-bitset",
|
||||||
|
|
@ -2013,9 +2014,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-jit-icache-coherence"
|
name = "wasmtime-jit-icache-coherence"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "48011232c0da424f89c3752a378d0b7f512fae321ea414a43e1e7a302a6a1f7e"
|
checksum = "1d930bc1325bc0448be6a11754156d770f56f6c3a61f440e9567f36cd2ea3065"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
|
@ -2025,15 +2026,15 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-slab"
|
name = "wasmtime-slab"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d9858a22e656ae8574631221b474b8bebf63f1367fcac3f179873833eabc2ced"
|
checksum = "055a181b8d03998511294faea14798df436503f14d7fd20edcf7370ec583e80a"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-types"
|
name = "wasmtime-types"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "4d14b8a9206fe94485a03edb1654cd530dbd2a859a85a43502cb4e99653a568c"
|
checksum = "c8340d976673ac3fdacac781f2afdc4933920c1adc738c3409e825dab3955399"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"cranelift-entity",
|
"cranelift-entity",
|
||||||
|
|
@ -2045,9 +2046,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-versioned-export-macros"
|
name = "wasmtime-versioned-export-macros"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e9bb1f01efb8b542eadfda511e8ea1cc54309451aba97b69969e5b1a59cb7ded"
|
checksum = "a4b0c1f76891f778db9602ee3fbb4eb7e9a3f511847d1fb1b69eddbcea28303c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
|
|
@ -2056,9 +2057,9 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasmtime-wit-bindgen"
|
name = "wasmtime-wit-bindgen"
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "eb1596caa67b31ac675fd3da61685c4260f8b10832021db42c85d227b7ba8133"
|
checksum = "b2fca2cbb5bb390f65d4434c19bf8d9873dfc60f10802918ebcd6f819a38d703"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"heck 0.4.1",
|
"heck 0.4.1",
|
||||||
|
|
@ -2348,8 +2349,14 @@ dependencies = [
|
||||||
name = "xtask"
|
name = "xtask"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"anstyle",
|
||||||
|
"anyhow",
|
||||||
|
"bindgen",
|
||||||
|
"cc",
|
||||||
|
"clap",
|
||||||
"git2",
|
"git2",
|
||||||
"indoc",
|
"indoc",
|
||||||
|
"regex",
|
||||||
"semver",
|
"semver",
|
||||||
"serde",
|
"serde",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
|
|
||||||
65
Cargo.toml
65
Cargo.toml
|
|
@ -13,7 +13,7 @@ members = [
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.24.1"
|
version = "0.24.7"
|
||||||
authors = ["Max Brunsfeld <maxbrunsfeld@gmail.com>"]
|
authors = ["Max Brunsfeld <maxbrunsfeld@gmail.com>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.74.1"
|
rust-version = "1.74.1"
|
||||||
|
|
@ -23,6 +23,55 @@ license = "MIT"
|
||||||
keywords = ["incremental", "parsing"]
|
keywords = ["incremental", "parsing"]
|
||||||
categories = ["command-line-utilities", "parsing"]
|
categories = ["command-line-utilities", "parsing"]
|
||||||
|
|
||||||
|
[workspace.lints.clippy]
|
||||||
|
dbg_macro = "deny"
|
||||||
|
todo = "deny"
|
||||||
|
pedantic = { level = "warn", priority = -1 }
|
||||||
|
nursery = { level = "warn", priority = -1 }
|
||||||
|
cargo = { level = "warn", priority = -1 }
|
||||||
|
|
||||||
|
# The lints below are a specific subset of the pedantic+nursery lints
|
||||||
|
# that we explicitly allow in the tree-sitter codebase because they either:
|
||||||
|
#
|
||||||
|
# 1. Contain false positives,
|
||||||
|
# 2. Are unnecessary, or
|
||||||
|
# 3. Worsen the code
|
||||||
|
|
||||||
|
branches_sharing_code = "allow"
|
||||||
|
cast_lossless = "allow"
|
||||||
|
cast_possible_truncation = "allow"
|
||||||
|
cast_possible_wrap = "allow"
|
||||||
|
cast_precision_loss = "allow"
|
||||||
|
cast_sign_loss = "allow"
|
||||||
|
checked_conversions = "allow"
|
||||||
|
cognitive_complexity = "allow"
|
||||||
|
collection_is_never_read = "allow"
|
||||||
|
fallible_impl_from = "allow"
|
||||||
|
fn_params_excessive_bools = "allow"
|
||||||
|
inline_always = "allow"
|
||||||
|
if_not_else = "allow"
|
||||||
|
items_after_statements = "allow"
|
||||||
|
match_wildcard_for_single_variants = "allow"
|
||||||
|
missing_errors_doc = "allow"
|
||||||
|
missing_panics_doc = "allow"
|
||||||
|
module_name_repetitions = "allow"
|
||||||
|
multiple_crate_versions = "allow"
|
||||||
|
option_if_let_else = "allow"
|
||||||
|
or_fun_call = "allow"
|
||||||
|
range_plus_one = "allow"
|
||||||
|
redundant_clone = "allow"
|
||||||
|
redundant_closure_for_method_calls = "allow"
|
||||||
|
ref_option = "allow"
|
||||||
|
similar_names = "allow"
|
||||||
|
string_lit_as_bytes = "allow"
|
||||||
|
struct_excessive_bools = "allow"
|
||||||
|
struct_field_names = "allow"
|
||||||
|
transmute_undefined_repr = "allow"
|
||||||
|
too_many_lines = "allow"
|
||||||
|
unnecessary_wraps = "allow"
|
||||||
|
unused_self = "allow"
|
||||||
|
used_underscore_items = "allow"
|
||||||
|
|
||||||
[profile.optimize]
|
[profile.optimize]
|
||||||
inherits = "release"
|
inherits = "release"
|
||||||
strip = true # Automatically strip symbols from the binary.
|
strip = true # Automatically strip symbols from the binary.
|
||||||
|
|
@ -56,7 +105,7 @@ clap = { version = "4.5.18", features = [
|
||||||
"unstable-styles",
|
"unstable-styles",
|
||||||
] }
|
] }
|
||||||
clap_complete = "4.5.29"
|
clap_complete = "4.5.29"
|
||||||
ctor = "0.2.8"
|
ctor = "0.2.9"
|
||||||
ctrlc = { version = "3.4.5", features = ["termination"] }
|
ctrlc = { version = "3.4.5", features = ["termination"] }
|
||||||
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
||||||
dirs = "5.0.1"
|
dirs = "5.0.1"
|
||||||
|
|
@ -96,9 +145,9 @@ walkdir = "2.5.0"
|
||||||
wasmparser = "0.217.0"
|
wasmparser = "0.217.0"
|
||||||
webbrowser = "1.0.2"
|
webbrowser = "1.0.2"
|
||||||
|
|
||||||
tree-sitter = { version = "0.24.0", path = "./lib" }
|
tree-sitter = { version = "0.24.5", path = "./lib" }
|
||||||
tree-sitter-generate = { version = "0.24.0", path = "./cli/generate" }
|
tree-sitter-generate = { version = "0.24.5", path = "./cli/generate" }
|
||||||
tree-sitter-loader = { version = "0.24.0", path = "./cli/loader" }
|
tree-sitter-loader = { version = "0.24.5", path = "./cli/loader" }
|
||||||
tree-sitter-config = { version = "0.24.0", path = "./cli/config" }
|
tree-sitter-config = { version = "0.24.5", path = "./cli/config" }
|
||||||
tree-sitter-highlight = { version = "0.24.0", path = "./highlight" }
|
tree-sitter-highlight = { version = "0.24.5", path = "./highlight" }
|
||||||
tree-sitter-tags = { version = "0.24.0", path = "./tags" }
|
tree-sitter-tags = { version = "0.24.5", path = "./tags" }
|
||||||
|
|
|
||||||
18
Makefile
18
Makefile
|
|
@ -2,7 +2,7 @@ ifeq ($(OS),Windows_NT)
|
||||||
$(error Windows is not supported)
|
$(error Windows is not supported)
|
||||||
endif
|
endif
|
||||||
|
|
||||||
VERSION := 0.24.1
|
VERSION := 0.24.7
|
||||||
DESCRIPTION := An incremental parsing system for programming tools
|
DESCRIPTION := An incremental parsing system for programming tools
|
||||||
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||||
|
|
||||||
|
|
@ -25,7 +25,7 @@ OBJ := $(SRC:.c=.o)
|
||||||
|
|
||||||
# define default flags, and override to append mandatory flags
|
# define default flags, and override to append mandatory flags
|
||||||
ARFLAGS := rcs
|
ARFLAGS := rcs
|
||||||
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -pedantic
|
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
||||||
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
||||||
|
|
||||||
|
|
@ -62,8 +62,8 @@ endif
|
||||||
|
|
||||||
tree-sitter.pc: lib/tree-sitter.pc.in
|
tree-sitter.pc: lib/tree-sitter.pc.in
|
||||||
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
||||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR)|' \
|
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
||||||
-e 's|@CMAKE_INSTALL_INCLUDEDIR@|$(INCLUDEDIR)|' \
|
-e 's|@CMAKE_INSTALL_INCLUDEDIR@|$(INCLUDEDIR:$(PREFIX)/%=%)|' \
|
||||||
-e 's|@PROJECT_DESCRIPTION@|$(DESCRIPTION)|' \
|
-e 's|@PROJECT_DESCRIPTION@|$(DESCRIPTION)|' \
|
||||||
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
||||||
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
||||||
|
|
@ -94,13 +94,13 @@ uninstall:
|
||||||
##### Dev targets #####
|
##### Dev targets #####
|
||||||
|
|
||||||
test:
|
test:
|
||||||
script/fetch-fixtures
|
cargo xtask fetch-fixtures
|
||||||
script/generate-fixtures
|
cargo xtask generate-fixtures
|
||||||
script/test
|
cargo xtask test
|
||||||
|
|
||||||
test_wasm:
|
test_wasm:
|
||||||
script/generate-fixtures-wasm
|
cargo xtask generate-fixtures-wasm
|
||||||
script/test-wasm
|
cargo xtask test-wasm
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
cargo update --workspace --locked --quiet
|
cargo update --workspace --locked --quiet
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
.{
|
.{
|
||||||
.name = "tree-sitter",
|
.name = "tree-sitter",
|
||||||
.version = "0.24.1",
|
.version = "0.24.7",
|
||||||
.paths = .{
|
.paths = .{
|
||||||
"build.zig",
|
"build.zig",
|
||||||
"build.zig.zon",
|
"build.zig.zon",
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,9 @@ license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "tree-sitter"
|
name = "tree-sitter"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,9 @@ license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
dirs.workspace = true
|
dirs.workspace = true
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,9 @@ license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow.workspace = true
|
anyhow.workspace = true
|
||||||
heck.workspace = true
|
heck.workspace = true
|
||||||
|
|
|
||||||
|
|
@ -70,18 +70,17 @@ impl<'a> Minimizer<'a> {
|
||||||
production_id: 0,
|
production_id: 0,
|
||||||
symbol,
|
symbol,
|
||||||
..
|
..
|
||||||
} => {
|
} if !self.simple_aliases.contains_key(symbol)
|
||||||
if !self.simple_aliases.contains_key(symbol)
|
&& !self.syntax_grammar.supertype_symbols.contains(symbol)
|
||||||
&& !self.syntax_grammar.supertype_symbols.contains(symbol)
|
&& !self.syntax_grammar.extra_symbols.contains(symbol)
|
||||||
&& !aliased_symbols.contains(symbol)
|
&& !aliased_symbols.contains(symbol)
|
||||||
&& self.syntax_grammar.variables[symbol.index].kind
|
&& self.syntax_grammar.variables[symbol.index].kind
|
||||||
!= VariableType::Named
|
!= VariableType::Named
|
||||||
&& (unit_reduction_symbol.is_none()
|
&& (unit_reduction_symbol.is_none()
|
||||||
|| unit_reduction_symbol == Some(symbol))
|
|| unit_reduction_symbol == Some(symbol)) =>
|
||||||
{
|
{
|
||||||
unit_reduction_symbol = Some(symbol);
|
unit_reduction_symbol = Some(symbol);
|
||||||
continue;
|
continue;
|
||||||
}
|
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -149,7 +149,7 @@ impl<'a> Interner<'a> {
|
||||||
fn check_single(&self, elements: &[Rule], name: Option<&str>) {
|
fn check_single(&self, elements: &[Rule], name: Option<&str>) {
|
||||||
if elements.len() == 1 && matches!(elements[0], Rule::String(_) | Rule::Pattern(_, _)) {
|
if elements.len() == 1 && matches!(elements[0], Rule::String(_) | Rule::Pattern(_, _)) {
|
||||||
eprintln!(
|
eprintln!(
|
||||||
"Warning: rule {} is just a `seq` or `choice` rule with a single element. This is unnecessary.",
|
"Warning: rule {} contains a `seq` or `choice` rule with a single element. This is unnecessary.",
|
||||||
name.unwrap_or_default()
|
name.unwrap_or_default()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -14,6 +14,7 @@ extern "C" {
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#ifdef _MSC_VER
|
#ifdef _MSC_VER
|
||||||
|
#pragma warning(push)
|
||||||
#pragma warning(disable : 4101)
|
#pragma warning(disable : 4101)
|
||||||
#elif defined(__GNUC__) || defined(__clang__)
|
#elif defined(__GNUC__) || defined(__clang__)
|
||||||
#pragma GCC diagnostic push
|
#pragma GCC diagnostic push
|
||||||
|
|
@ -278,7 +279,7 @@ static inline void _array__splice(Array *self, size_t element_size,
|
||||||
#define _compare_int(a, b) ((int)*(a) - (int)(b))
|
#define _compare_int(a, b) ((int)*(a) - (int)(b))
|
||||||
|
|
||||||
#ifdef _MSC_VER
|
#ifdef _MSC_VER
|
||||||
#pragma warning(default : 4101)
|
#pragma warning(pop)
|
||||||
#elif defined(__GNUC__) || defined(__clang__)
|
#elif defined(__GNUC__) || defined(__clang__)
|
||||||
#pragma GCC diagnostic pop
|
#pragma GCC diagnostic pop
|
||||||
#endif
|
#endif
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,9 @@ license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
wasm = ["tree-sitter/wasm"]
|
wasm = ["tree-sitter/wasm"]
|
||||||
# TODO: For backward compatibility these must be enabled by default,
|
# TODO: For backward compatibility these must be enabled by default,
|
||||||
|
|
@ -24,6 +27,7 @@ cc.workspace = true
|
||||||
dirs.workspace = true
|
dirs.workspace = true
|
||||||
fs4.workspace = true
|
fs4.workspace = true
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
|
lazy_static.workspace = true
|
||||||
libloading.workspace = true
|
libloading.workspace = true
|
||||||
once_cell.workspace = true
|
once_cell.workspace = true
|
||||||
path-slash.workspace = true
|
path-slash.workspace = true
|
||||||
|
|
|
||||||
|
|
@ -1 +1 @@
|
||||||
3.1.64
|
3.1.64
|
||||||
|
|
@ -21,6 +21,7 @@ use anyhow::Error;
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use fs4::fs_std::FileExt;
|
use fs4::fs_std::FileExt;
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
use libloading::{Library, Symbol};
|
use libloading::{Library, Symbol};
|
||||||
use once_cell::unsync::OnceCell;
|
use once_cell::unsync::OnceCell;
|
||||||
use path_slash::PathBufExt as _;
|
use path_slash::PathBufExt as _;
|
||||||
|
|
@ -38,6 +39,10 @@ use tree_sitter_highlight::HighlightConfiguration;
|
||||||
use tree_sitter_tags::{Error as TagsError, TagsConfiguration};
|
use tree_sitter_tags::{Error as TagsError, TagsConfiguration};
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref GRAMMAR_NAME_REGEX: Regex = Regex::new(r#""name":\s*"(.*?)""#).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
pub const EMSCRIPTEN_TAG: &str = concat!("docker.io/emscripten/emsdk:", env!("EMSCRIPTEN_VERSION"));
|
pub const EMSCRIPTEN_TAG: &str = concat!("docker.io/emscripten/emsdk:", env!("EMSCRIPTEN_VERSION"));
|
||||||
|
|
||||||
#[derive(Default, Deserialize, Serialize)]
|
#[derive(Default, Deserialize, Serialize)]
|
||||||
|
|
@ -141,12 +146,10 @@ pub struct TreeSitterJSON {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TreeSitterJSON {
|
impl TreeSitterJSON {
|
||||||
pub fn from_file(path: &Path) -> Option<Self> {
|
pub fn from_file(path: &Path) -> Result<Self> {
|
||||||
if let Ok(file) = fs::File::open(path.join("tree-sitter.json")) {
|
Ok(serde_json::from_str(&fs::read_to_string(
|
||||||
Some(serde_json::from_reader(file).ok()?)
|
path.join("tree-sitter.json"),
|
||||||
} else {
|
)?)?)
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn has_multiple_language_configs(&self) -> bool {
|
pub fn has_multiple_language_configs(&self) -> bool {
|
||||||
|
|
@ -161,7 +164,8 @@ pub struct Grammar {
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub camelcase: Option<String>,
|
pub camelcase: Option<String>,
|
||||||
pub scope: String,
|
pub scope: String,
|
||||||
pub path: PathBuf,
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub path: Option<PathBuf>,
|
||||||
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
|
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
|
||||||
pub external_files: PathsJSON,
|
pub external_files: PathsJSON,
|
||||||
pub file_types: Option<Vec<String>>,
|
pub file_types: Option<Vec<String>>,
|
||||||
|
|
@ -192,7 +196,6 @@ pub struct Metadata {
|
||||||
pub authors: Option<Vec<Author>>,
|
pub authors: Option<Vec<Author>>,
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub links: Option<Links>,
|
pub links: Option<Links>,
|
||||||
// #[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
pub namespace: Option<String>,
|
pub namespace: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
@ -600,6 +603,13 @@ impl Loader {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn language_for_configuration(
|
||||||
|
&self,
|
||||||
|
configuration: &LanguageConfiguration,
|
||||||
|
) -> Result<Language> {
|
||||||
|
self.language_for_id(configuration.language_id)
|
||||||
|
}
|
||||||
|
|
||||||
fn language_for_id(&self, id: usize) -> Result<Language> {
|
fn language_for_id(&self, id: usize) -> Result<Language> {
|
||||||
let (path, language, externals) = &self.languages_by_id[id];
|
let (path, language, externals) = &self.languages_by_id[id];
|
||||||
language
|
language
|
||||||
|
|
@ -628,27 +638,7 @@ impl Loader {
|
||||||
|
|
||||||
pub fn load_language_at_path(&self, mut config: CompileConfig) -> Result<Language> {
|
pub fn load_language_at_path(&self, mut config: CompileConfig) -> Result<Language> {
|
||||||
let grammar_path = config.src_path.join("grammar.json");
|
let grammar_path = config.src_path.join("grammar.json");
|
||||||
|
config.name = Self::grammar_json_name(&grammar_path)?;
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct GrammarJSON {
|
|
||||||
name: String,
|
|
||||||
}
|
|
||||||
let mut grammar_file = fs::File::open(&grammar_path).with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to read grammar.json file at the following path:\n{:?}",
|
|
||||||
&grammar_path
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
let grammar_json: GrammarJSON = serde_json::from_reader(BufReader::new(&mut grammar_file))
|
|
||||||
.with_context(|| {
|
|
||||||
format!(
|
|
||||||
"Failed to parse grammar.json file at the following path:\n{:?}",
|
|
||||||
&grammar_path
|
|
||||||
)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
config.name = grammar_json.name;
|
|
||||||
|
|
||||||
self.load_language_at_path_with_name(config)
|
self.load_language_at_path_with_name(config)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -856,7 +846,7 @@ impl Loader {
|
||||||
format!("Failed to execute the C compiler with the following command:\n{command:?}")
|
format!("Failed to execute the C compiler with the following command:\n{command:?}")
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
lock_file.unlock()?;
|
FileExt::unlock(lock_file)?;
|
||||||
fs::remove_file(lock_path)?;
|
fs::remove_file(lock_path)?;
|
||||||
|
|
||||||
if output.status.success() {
|
if output.status.success() {
|
||||||
|
|
@ -1125,27 +1115,16 @@ impl Loader {
|
||||||
parser_path: &Path,
|
parser_path: &Path,
|
||||||
set_current_path_config: bool,
|
set_current_path_config: bool,
|
||||||
) -> Result<&[LanguageConfiguration]> {
|
) -> Result<&[LanguageConfiguration]> {
|
||||||
#[derive(Deserialize)]
|
|
||||||
struct GrammarJSON {
|
|
||||||
name: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
let initial_language_configuration_count = self.language_configurations.len();
|
let initial_language_configuration_count = self.language_configurations.len();
|
||||||
|
|
||||||
if let Some(config) = TreeSitterJSON::from_file(parser_path) {
|
let ts_json = TreeSitterJSON::from_file(parser_path);
|
||||||
|
if let Ok(config) = ts_json {
|
||||||
let language_count = self.languages_by_id.len();
|
let language_count = self.languages_by_id.len();
|
||||||
for grammar in config.grammars {
|
for grammar in config.grammars {
|
||||||
// Determine the path to the parser directory. This can be specified in
|
// Determine the path to the parser directory. This can be specified in
|
||||||
// the package.json, but defaults to the directory containing the
|
// the tree-sitter.json, but defaults to the directory containing the
|
||||||
// package.json.
|
// tree-sitter.json.
|
||||||
let language_path = parser_path.join(grammar.path);
|
let language_path = parser_path.join(grammar.path.unwrap_or(PathBuf::from(".")));
|
||||||
|
|
||||||
let grammar_path = language_path.join("src").join("grammar.json");
|
|
||||||
let mut grammar_file =
|
|
||||||
fs::File::open(grammar_path).with_context(|| "Failed to read grammar.json")?;
|
|
||||||
let grammar_json: GrammarJSON =
|
|
||||||
serde_json::from_reader(BufReader::new(&mut grammar_file))
|
|
||||||
.with_context(|| "Failed to parse grammar.json")?;
|
|
||||||
|
|
||||||
// Determine if a previous language configuration in this package.json file
|
// Determine if a previous language configuration in this package.json file
|
||||||
// already uses the same language.
|
// already uses the same language.
|
||||||
|
|
@ -1184,7 +1163,7 @@ impl Loader {
|
||||||
|
|
||||||
let configuration = LanguageConfiguration {
|
let configuration = LanguageConfiguration {
|
||||||
root_path: parser_path.to_path_buf(),
|
root_path: parser_path.to_path_buf(),
|
||||||
language_name: grammar_json.name,
|
language_name: grammar.name,
|
||||||
scope: Some(grammar.scope),
|
scope: Some(grammar.scope),
|
||||||
language_id,
|
language_id,
|
||||||
file_types: grammar.file_types.unwrap_or_default(),
|
file_types: grammar.file_types.unwrap_or_default(),
|
||||||
|
|
@ -1230,20 +1209,30 @@ impl Loader {
|
||||||
Some(self.language_configurations.len() - 1);
|
Some(self.language_configurations.len() - 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else if let Err(e) = ts_json {
|
||||||
|
match e.downcast_ref::<std::io::Error>() {
|
||||||
|
// This is noisy, and not really an issue.
|
||||||
|
Some(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||||
|
_ => {
|
||||||
|
eprintln!(
|
||||||
|
"Warning: Failed to parse {} -- {e}",
|
||||||
|
parser_path.join("tree-sitter.json").display()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If we didn't find any language configurations in the tree-sitter.json file,
|
||||||
|
// but there is a grammar.json file, then use the grammar file to form a simple
|
||||||
|
// language configuration.
|
||||||
if self.language_configurations.len() == initial_language_configuration_count
|
if self.language_configurations.len() == initial_language_configuration_count
|
||||||
&& parser_path.join("src").join("grammar.json").exists()
|
&& parser_path.join("src").join("grammar.json").exists()
|
||||||
{
|
{
|
||||||
let grammar_path = parser_path.join("src").join("grammar.json");
|
let grammar_path = parser_path.join("src").join("grammar.json");
|
||||||
let mut grammar_file =
|
let language_name = Self::grammar_json_name(&grammar_path)?;
|
||||||
fs::File::open(grammar_path).with_context(|| "Failed to read grammar.json")?;
|
|
||||||
let grammar_json: GrammarJSON =
|
|
||||||
serde_json::from_reader(BufReader::new(&mut grammar_file))
|
|
||||||
.with_context(|| "Failed to parse grammar.json")?;
|
|
||||||
let configuration = LanguageConfiguration {
|
let configuration = LanguageConfiguration {
|
||||||
root_path: parser_path.to_owned(),
|
root_path: parser_path.to_owned(),
|
||||||
language_name: grammar_json.name,
|
language_name,
|
||||||
language_id: self.languages_by_id.len(),
|
language_id: self.languages_by_id.len(),
|
||||||
file_types: Vec::new(),
|
file_types: Vec::new(),
|
||||||
scope: None,
|
scope: None,
|
||||||
|
|
@ -1279,6 +1268,36 @@ impl Loader {
|
||||||
pattern.and_then(|r| RegexBuilder::new(r).multi_line(true).build().ok())
|
pattern.and_then(|r| RegexBuilder::new(r).multi_line(true).build().ok())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn grammar_json_name(grammar_path: &Path) -> Result<String> {
|
||||||
|
let file = fs::File::open(grammar_path).with_context(|| {
|
||||||
|
format!("Failed to open grammar.json at {}", grammar_path.display())
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let first_three_lines = BufReader::new(file)
|
||||||
|
.lines()
|
||||||
|
.take(3)
|
||||||
|
.collect::<Result<Vec<_>, _>>()
|
||||||
|
.with_context(|| {
|
||||||
|
format!(
|
||||||
|
"Failed to read the first three lines of grammar.json at {}",
|
||||||
|
grammar_path.display()
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.join("\n");
|
||||||
|
|
||||||
|
let name = GRAMMAR_NAME_REGEX
|
||||||
|
.captures(&first_three_lines)
|
||||||
|
.and_then(|c| c.get(1))
|
||||||
|
.ok_or_else(|| {
|
||||||
|
anyhow!(
|
||||||
|
"Failed to parse the language name from grammar.json at {}",
|
||||||
|
grammar_path.display()
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(name.as_str().to_string())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn select_language(
|
pub fn select_language(
|
||||||
&mut self,
|
&mut self,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "tree-sitter-cli",
|
"name": "tree-sitter-cli",
|
||||||
"version": "0.24.1",
|
"version": "0.24.7",
|
||||||
"author": "Max Brunsfeld",
|
"author": "Max Brunsfeld",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
|
|
||||||
426
cli/src/init.rs
426
cli/src/init.rs
|
|
@ -1,17 +1,15 @@
|
||||||
use std::{
|
use std::{
|
||||||
fs::{self, File},
|
fs,
|
||||||
io::BufReader,
|
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
str::{self, FromStr},
|
str::{self, FromStr},
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
|
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
|
||||||
use indoc::indoc;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{json, Map, Value};
|
use serde_json::{Map, Value};
|
||||||
use tree_sitter_generate::write_file;
|
use tree_sitter_generate::write_file;
|
||||||
use tree_sitter_loader::{
|
use tree_sitter_loader::{
|
||||||
Author, Bindings, Grammar, Links, Metadata, PackageJSON, PackageJSONAuthor,
|
Author, Bindings, Grammar, Links, Metadata, PackageJSON, PackageJSONAuthor,
|
||||||
|
|
@ -77,7 +75,7 @@ const BINDING_GYP_TEMPLATE: &str = include_str!("./templates/binding.gyp");
|
||||||
const BINDING_TEST_JS_TEMPLATE: &str = include_str!("./templates/binding_test.js");
|
const BINDING_TEST_JS_TEMPLATE: &str = include_str!("./templates/binding_test.js");
|
||||||
|
|
||||||
const MAKEFILE_TEMPLATE: &str = include_str!("./templates/makefile");
|
const MAKEFILE_TEMPLATE: &str = include_str!("./templates/makefile");
|
||||||
const CMAKELISTS_TXT_TEMPLATE: &str = include_str!("./templates/cmakelists.txt");
|
const CMAKELISTS_TXT_TEMPLATE: &str = include_str!("./templates/cmakelists.cmake");
|
||||||
const PARSER_NAME_H_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.h");
|
const PARSER_NAME_H_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.h");
|
||||||
const PARSER_NAME_PC_IN_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.pc.in");
|
const PARSER_NAME_PC_IN_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.pc.in");
|
||||||
|
|
||||||
|
|
@ -112,22 +110,6 @@ pub fn path_in_ignore(repo_path: &Path) -> bool {
|
||||||
.any(|dir| repo_path.ends_with(dir))
|
.any(|dir| repo_path.ends_with(dir))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_after(
|
|
||||||
map: Map<String, Value>,
|
|
||||||
after: &str,
|
|
||||||
key: &str,
|
|
||||||
value: Value,
|
|
||||||
) -> Map<String, Value> {
|
|
||||||
let mut entries = map.into_iter().collect::<Vec<_>>();
|
|
||||||
let after_index = entries
|
|
||||||
.iter()
|
|
||||||
.position(|(k, _)| k == after)
|
|
||||||
.unwrap_or(entries.len() - 1)
|
|
||||||
+ 1;
|
|
||||||
entries.insert(after_index, (key.to_string(), value));
|
|
||||||
entries.into_iter().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Clone)]
|
#[derive(Serialize, Deserialize, Clone)]
|
||||||
pub struct JsonConfigOpts {
|
pub struct JsonConfigOpts {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
|
@ -153,9 +135,9 @@ impl JsonConfigOpts {
|
||||||
name: self.name.clone(),
|
name: self.name.clone(),
|
||||||
camelcase: Some(self.camelcase),
|
camelcase: Some(self.camelcase),
|
||||||
scope: self.scope,
|
scope: self.scope,
|
||||||
path: PathBuf::from("."),
|
path: None,
|
||||||
external_files: PathsJSON::Empty,
|
external_files: PathsJSON::Empty,
|
||||||
file_types: None,
|
file_types: Some(self.file_types),
|
||||||
highlights: PathsJSON::Empty,
|
highlights: PathsJSON::Empty,
|
||||||
injections: PathsJSON::Empty,
|
injections: PathsJSON::Empty,
|
||||||
locals: PathsJSON::Empty,
|
locals: PathsJSON::Empty,
|
||||||
|
|
@ -171,7 +153,7 @@ impl JsonConfigOpts {
|
||||||
authors: Some(vec![Author {
|
authors: Some(vec![Author {
|
||||||
name: self.author,
|
name: self.author,
|
||||||
email: self.email,
|
email: self.email,
|
||||||
url: None,
|
url: self.url.map(|url| url.to_string()),
|
||||||
}]),
|
}]),
|
||||||
links: Some(Links {
|
links: Some(Links {
|
||||||
repository: self.repository.unwrap_or_else(|| {
|
repository: self.repository.unwrap_or_else(|| {
|
||||||
|
|
@ -216,6 +198,7 @@ struct GenerateOpts<'a> {
|
||||||
description: Option<&'a str>,
|
description: Option<&'a str>,
|
||||||
repository: Option<&'a str>,
|
repository: Option<&'a str>,
|
||||||
version: &'a Version,
|
version: &'a Version,
|
||||||
|
camel_parser_name: &'a str,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: remove in 0.25
|
// TODO: remove in 0.25
|
||||||
|
|
@ -228,9 +211,9 @@ pub fn migrate_package_json(repo_path: &Path) -> Result<bool> {
|
||||||
root_path.join("tree-sitter.json"),
|
root_path.join("tree-sitter.json"),
|
||||||
);
|
);
|
||||||
|
|
||||||
let old_config = serde_json::from_reader::<_, PackageJSON>(
|
let old_config = serde_json::from_str::<PackageJSON>(
|
||||||
File::open(&package_json_path)
|
&fs::read_to_string(&package_json_path)
|
||||||
.with_context(|| format!("Failed to open package.json in {}", root_path.display()))?,
|
.with_context(|| format!("Failed to read package.json in {}", root_path.display()))?,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
if old_config.tree_sitter.is_none() {
|
if old_config.tree_sitter.is_none() {
|
||||||
|
|
@ -249,7 +232,7 @@ pub fn migrate_package_json(repo_path: &Path) -> Result<bool> {
|
||||||
name: name.clone(),
|
name: name.clone(),
|
||||||
camelcase: Some(name.to_upper_camel_case()),
|
camelcase: Some(name.to_upper_camel_case()),
|
||||||
scope: l.scope.unwrap_or_else(|| format!("source.{name}")),
|
scope: l.scope.unwrap_or_else(|| format!("source.{name}")),
|
||||||
path: l.path,
|
path: Some(l.path),
|
||||||
external_files: l.external_files,
|
external_files: l.external_files,
|
||||||
file_types: l.file_types,
|
file_types: l.file_types,
|
||||||
highlights: l.highlights,
|
highlights: l.highlights,
|
||||||
|
|
@ -352,19 +335,19 @@ pub fn migrate_package_json(repo_path: &Path) -> Result<bool> {
|
||||||
|
|
||||||
write_file(
|
write_file(
|
||||||
&tree_sitter_json_path,
|
&tree_sitter_json_path,
|
||||||
serde_json::to_string_pretty(&new_config)?,
|
serde_json::to_string_pretty(&new_config)? + "\n",
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
// Remove the `tree-sitter` field in-place
|
// Remove the `tree-sitter` field in-place
|
||||||
let mut package_json = serde_json::from_reader::<_, Map<String, Value>>(
|
let mut package_json = serde_json::from_str::<Map<String, Value>>(
|
||||||
File::open(&package_json_path)
|
&fs::read_to_string(&package_json_path)
|
||||||
.with_context(|| format!("Failed to open package.json in {}", root_path.display()))?,
|
.with_context(|| format!("Failed to read package.json in {}", root_path.display()))?,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
package_json.remove("tree-sitter");
|
package_json.remove("tree-sitter");
|
||||||
write_file(
|
write_file(
|
||||||
&root_path.join("package.json"),
|
&root_path.join("package.json"),
|
||||||
serde_json::to_string_pretty(&package_json)?,
|
serde_json::to_string_pretty(&package_json)? + "\n",
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
println!("Warning: your package.json's `tree-sitter` field has been automatically migrated to the new `tree-sitter.json` config file");
|
println!("Warning: your package.json's `tree-sitter` field has been automatically migrated to the new `tree-sitter.json` config file");
|
||||||
|
|
@ -383,8 +366,6 @@ pub fn generate_grammar_files(
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
let dashed_language_name = language_name.to_kebab_case();
|
let dashed_language_name = language_name.to_kebab_case();
|
||||||
|
|
||||||
// TODO: remove legacy code updates in v0.24.0
|
|
||||||
|
|
||||||
let tree_sitter_config = missing_path_else(
|
let tree_sitter_config = missing_path_else(
|
||||||
repo_path.join("tree-sitter.json"),
|
repo_path.join("tree-sitter.json"),
|
||||||
true,
|
true,
|
||||||
|
|
@ -407,12 +388,16 @@ pub fn generate_grammar_files(
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let tree_sitter_config = serde_json::from_reader::<_, TreeSitterJSON>(
|
let tree_sitter_config = serde_json::from_str::<TreeSitterJSON>(
|
||||||
File::open(tree_sitter_config.as_path())
|
&fs::read_to_string(tree_sitter_config.as_path())
|
||||||
.with_context(|| "Failed to open tree-sitter.json")?,
|
.with_context(|| "Failed to read tree-sitter.json")?,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let authors = tree_sitter_config.metadata.authors.as_ref();
|
let authors = tree_sitter_config.metadata.authors.as_ref();
|
||||||
|
let camel_name = tree_sitter_config.grammars[0]
|
||||||
|
.camelcase
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(|| language_name.to_upper_camel_case());
|
||||||
|
|
||||||
let generate_opts = GenerateOpts {
|
let generate_opts = GenerateOpts {
|
||||||
author_name: authors
|
author_name: authors
|
||||||
|
|
@ -432,126 +417,18 @@ pub fn generate_grammar_files(
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|l| l.repository.as_str()),
|
.map(|l| l.repository.as_str()),
|
||||||
version: &tree_sitter_config.metadata.version,
|
version: &tree_sitter_config.metadata.version,
|
||||||
|
camel_parser_name: &camel_name,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create or update package.json
|
// Create package.json
|
||||||
missing_path_else(
|
missing_path(repo_path.join("package.json"), |path| {
|
||||||
repo_path.join("package.json"),
|
generate_file(
|
||||||
allow_update,
|
path,
|
||||||
|path| {
|
PACKAGE_JSON_TEMPLATE,
|
||||||
generate_file(
|
dashed_language_name.as_str(),
|
||||||
path,
|
&generate_opts,
|
||||||
PACKAGE_JSON_TEMPLATE,
|
)
|
||||||
dashed_language_name.as_str(),
|
})?;
|
||||||
&generate_opts,
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|path| {
|
|
||||||
let package_json_str =
|
|
||||||
fs::read_to_string(path).with_context(|| "Failed to read package.json")?;
|
|
||||||
let mut package_json = serde_json::from_str::<Map<String, Value>>(&package_json_str)
|
|
||||||
.with_context(|| "Failed to parse package.json")?;
|
|
||||||
let mut updated = false;
|
|
||||||
|
|
||||||
let dependencies = package_json
|
|
||||||
.entry("dependencies".to_string())
|
|
||||||
.or_insert_with(|| Value::Object(Map::new()))
|
|
||||||
.as_object_mut()
|
|
||||||
.unwrap();
|
|
||||||
if dependencies.remove("nan").is_some() {
|
|
||||||
eprintln!("Replacing nan dependency with node-addon-api in package.json");
|
|
||||||
dependencies.insert("node-addon-api".to_string(), "^8.0.0".into());
|
|
||||||
updated = true;
|
|
||||||
}
|
|
||||||
if !dependencies.contains_key("node-gyp-build") {
|
|
||||||
eprintln!("Adding node-gyp-build dependency to package.json");
|
|
||||||
dependencies.insert("node-gyp-build".to_string(), "^4.8.1".into());
|
|
||||||
updated = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
let dev_dependencies = package_json
|
|
||||||
.entry("devDependencies".to_string())
|
|
||||||
.or_insert_with(|| Value::Object(Map::new()))
|
|
||||||
.as_object_mut()
|
|
||||||
.unwrap();
|
|
||||||
if !dev_dependencies.contains_key("prebuildify") {
|
|
||||||
eprintln!("Adding prebuildify devDependency to package.json");
|
|
||||||
dev_dependencies.insert("prebuildify".to_string(), "^6.0.1".into());
|
|
||||||
updated = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
let node_test = "node --test bindings/node/*_test.js";
|
|
||||||
let scripts = package_json
|
|
||||||
.entry("scripts".to_string())
|
|
||||||
.or_insert_with(|| Value::Object(Map::new()))
|
|
||||||
.as_object_mut()
|
|
||||||
.unwrap();
|
|
||||||
if !scripts.get("test").is_some_and(|v| v == node_test) {
|
|
||||||
eprintln!("Updating package.json scripts");
|
|
||||||
*scripts = Map::from_iter([
|
|
||||||
("install".to_string(), "node-gyp-build".into()),
|
|
||||||
("prestart".to_string(), "tree-sitter build --wasm".into()),
|
|
||||||
("start".to_string(), "tree-sitter playground".into()),
|
|
||||||
("test".to_string(), node_test.into()),
|
|
||||||
]);
|
|
||||||
updated = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// insert `peerDependencies` after `dependencies`
|
|
||||||
if !package_json.contains_key("peerDependencies") {
|
|
||||||
eprintln!("Adding peerDependencies to package.json");
|
|
||||||
package_json = insert_after(
|
|
||||||
package_json,
|
|
||||||
"dependencies",
|
|
||||||
"peerDependencies",
|
|
||||||
json!({"tree-sitter": "^0.21.1"}),
|
|
||||||
);
|
|
||||||
|
|
||||||
package_json = insert_after(
|
|
||||||
package_json,
|
|
||||||
"peerDependencies",
|
|
||||||
"peerDependenciesMeta",
|
|
||||||
json!({"tree_sitter": {"optional": true}}),
|
|
||||||
);
|
|
||||||
updated = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// insert `types` right after `main`
|
|
||||||
if !package_json.contains_key("types") {
|
|
||||||
eprintln!("Adding types to package.json");
|
|
||||||
package_json = insert_after(package_json, "main", "types", "bindings/node".into());
|
|
||||||
updated = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// insert `files` right after `keywords`
|
|
||||||
if !package_json.contains_key("files") {
|
|
||||||
eprintln!("Adding files to package.json");
|
|
||||||
package_json = insert_after(
|
|
||||||
package_json,
|
|
||||||
"keywords",
|
|
||||||
"files",
|
|
||||||
json!([
|
|
||||||
"grammar.js",
|
|
||||||
"binding.gyp",
|
|
||||||
"prebuilds/**",
|
|
||||||
"bindings/node/*",
|
|
||||||
"queries/*",
|
|
||||||
"src/**",
|
|
||||||
"*.wasm"
|
|
||||||
]),
|
|
||||||
);
|
|
||||||
updated = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if updated {
|
|
||||||
let mut package_json_str = serde_json::to_string_pretty(&package_json)?;
|
|
||||||
package_json_str.push('\n');
|
|
||||||
write_file(path, package_json_str)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Do not create a grammar.js file in a repo with multiple language configs
|
// Do not create a grammar.js file in a repo with multiple language configs
|
||||||
if !tree_sitter_config.has_multiple_language_configs() {
|
if !tree_sitter_config.has_multiple_language_configs() {
|
||||||
|
|
@ -580,83 +457,22 @@ pub fn generate_grammar_files(
|
||||||
// Generate Rust bindings
|
// Generate Rust bindings
|
||||||
if tree_sitter_config.bindings.rust {
|
if tree_sitter_config.bindings.rust {
|
||||||
missing_path(bindings_dir.join("rust"), create_dir)?.apply(|path| {
|
missing_path(bindings_dir.join("rust"), create_dir)?.apply(|path| {
|
||||||
missing_path_else(
|
missing_path(path.join("lib.rs"), |path| {
|
||||||
path.join("lib.rs"),
|
generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts)
|
||||||
allow_update,
|
})?;
|
||||||
|path| generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts),
|
|
||||||
|path| {
|
|
||||||
let lib_rs =
|
|
||||||
fs::read_to_string(path).with_context(|| "Failed to read lib.rs")?;
|
|
||||||
if !lib_rs.contains("tree_sitter_language") {
|
|
||||||
generate_file(path, LIB_RS_TEMPLATE, language_name, &generate_opts)?;
|
|
||||||
eprintln!("Updated lib.rs with `tree_sitter_language` dependency");
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
missing_path_else(
|
missing_path(path.join("build.rs"), |path| {
|
||||||
path.join("build.rs"),
|
generate_file(path, BUILD_RS_TEMPLATE, language_name, &generate_opts)
|
||||||
allow_update,
|
})?;
|
||||||
|path| generate_file(path, BUILD_RS_TEMPLATE, language_name, &generate_opts),
|
|
||||||
|path| {
|
|
||||||
let build_rs =
|
|
||||||
fs::read_to_string(path).with_context(|| "Failed to read build.rs")?;
|
|
||||||
if !build_rs.contains("-utf-8") {
|
|
||||||
let index = build_rs
|
|
||||||
.find(" let parser_path = src_dir.join(\"parser.c\")")
|
|
||||||
.ok_or_else(|| anyhow!(indoc!{
|
|
||||||
"Failed to auto-update build.rs with the `/utf-8` flag for windows.
|
|
||||||
To fix this, remove `bindings/rust/build.rs` and re-run `tree-sitter generate`"}))?;
|
|
||||||
|
|
||||||
let build_rs = format!(
|
missing_path(repo_path.join("Cargo.toml"), |path| {
|
||||||
"{}{}{}\n{}",
|
generate_file(
|
||||||
&build_rs[..index],
|
path,
|
||||||
" #[cfg(target_env = \"msvc\")]\n",
|
CARGO_TOML_TEMPLATE,
|
||||||
" c_config.flag(\"-utf-8\");\n",
|
dashed_language_name.as_str(),
|
||||||
&build_rs[index..]
|
&generate_opts,
|
||||||
);
|
)
|
||||||
|
})?;
|
||||||
write_file(path, build_rs)?;
|
|
||||||
eprintln!("Updated build.rs with the /utf-8 flag for Windows compilation");
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
missing_path_else(
|
|
||||||
repo_path.join("Cargo.toml"),
|
|
||||||
allow_update,
|
|
||||||
|path| generate_file(path, CARGO_TOML_TEMPLATE, dashed_language_name.as_str(), &generate_opts),
|
|
||||||
|path| {
|
|
||||||
let cargo_toml =
|
|
||||||
fs::read_to_string(path).with_context(|| "Failed to read Cargo.toml")?;
|
|
||||||
if !cargo_toml.contains("tree-sitter-language") {
|
|
||||||
let start_index = cargo_toml
|
|
||||||
.find("tree-sitter = \"")
|
|
||||||
.ok_or_else(|| anyhow!("Failed to find the `tree-sitter` dependency in Cargo.toml"))?;
|
|
||||||
|
|
||||||
let version_start_index = start_index + "tree-sitter = \"".len();
|
|
||||||
let version_end_index = cargo_toml[version_start_index..]
|
|
||||||
.find('\"')
|
|
||||||
.map(|i| i + version_start_index)
|
|
||||||
.ok_or_else(|| anyhow!("Failed to find the end of the `tree-sitter` version in Cargo.toml"))?;
|
|
||||||
|
|
||||||
let cargo_toml = format!(
|
|
||||||
"{}{}{}\n{}\n{}",
|
|
||||||
&cargo_toml[..start_index],
|
|
||||||
"tree-sitter-language = \"0.1.0\"",
|
|
||||||
&cargo_toml[version_end_index + 1..],
|
|
||||||
"[dev-dependencies]",
|
|
||||||
"tree-sitter = \"0.23\"",
|
|
||||||
);
|
|
||||||
|
|
||||||
write_file(path, cargo_toml)?;
|
|
||||||
eprintln!("Updated Cargo.toml with the `tree-sitter-language` dependency");
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
@ -670,10 +486,8 @@ pub fn generate_grammar_files(
|
||||||
allow_update,
|
allow_update,
|
||||||
|path| generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts),
|
|path| generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts),
|
||||||
|path| {
|
|path| {
|
||||||
let index_js =
|
let contents = fs::read_to_string(path)?;
|
||||||
fs::read_to_string(path).with_context(|| "Failed to read index.js")?;
|
if !contents.contains("bun") {
|
||||||
if index_js.contains("../../build/Release") {
|
|
||||||
eprintln!("Replacing index.js with new binding API");
|
|
||||||
generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts)?;
|
generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -693,36 +507,13 @@ pub fn generate_grammar_files(
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
missing_path_else(
|
missing_path(path.join("binding.cc"), |path| {
|
||||||
path.join("binding.cc"),
|
generate_file(path, JS_BINDING_CC_TEMPLATE, language_name, &generate_opts)
|
||||||
allow_update,
|
})?;
|
||||||
|path| generate_file(path, JS_BINDING_CC_TEMPLATE, language_name, &generate_opts),
|
|
||||||
|path| {
|
|
||||||
let binding_cc =
|
|
||||||
fs::read_to_string(path).with_context(|| "Failed to read binding.cc")?;
|
|
||||||
if binding_cc.contains("NAN_METHOD(New) {}") {
|
|
||||||
eprintln!("Replacing binding.cc with new binding API");
|
|
||||||
generate_file(path, JS_BINDING_CC_TEMPLATE, language_name, &generate_opts)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Create binding.gyp, or update it with new binding API.
|
missing_path(repo_path.join("binding.gyp"), |path| {
|
||||||
missing_path_else(
|
generate_file(path, BINDING_GYP_TEMPLATE, language_name, &generate_opts)
|
||||||
repo_path.join("binding.gyp"),
|
})?;
|
||||||
allow_update,
|
|
||||||
|path| generate_file(path, BINDING_GYP_TEMPLATE, language_name, &generate_opts),
|
|
||||||
|path| {
|
|
||||||
let binding_gyp =
|
|
||||||
fs::read_to_string(path).with_context(|| "Failed to read binding.gyp")?;
|
|
||||||
if binding_gyp.contains("require('nan')") {
|
|
||||||
eprintln!("Replacing binding.gyp with new binding API");
|
|
||||||
generate_file(path, BINDING_GYP_TEMPLATE, language_name, &generate_opts)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
@ -752,9 +543,20 @@ pub fn generate_grammar_files(
|
||||||
generate_file(path, MAKEFILE_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, MAKEFILE_TEMPLATE, language_name, &generate_opts)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
missing_path(repo_path.join("CMakeLists.txt"), |path| {
|
missing_path_else(
|
||||||
generate_file(path, CMAKELISTS_TXT_TEMPLATE, language_name, &generate_opts)
|
repo_path.join("CMakeLists.txt"),
|
||||||
})?;
|
allow_update,
|
||||||
|
|path| generate_file(path, CMAKELISTS_TXT_TEMPLATE, language_name, &generate_opts),
|
||||||
|
|path| {
|
||||||
|
let contents = fs::read_to_string(path)?;
|
||||||
|
let old = "add_custom_target(test";
|
||||||
|
if contents.contains(old) {
|
||||||
|
write_file(path, contents.replace(old, "add_custom_target(ts-test"))
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
@ -767,39 +569,14 @@ pub fn generate_grammar_files(
|
||||||
generate_file(path, BINDING_GO_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, BINDING_GO_TEMPLATE, language_name, &generate_opts)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
missing_path_else(
|
missing_path(path.join("binding_test.go"), |path| {
|
||||||
path.join("binding_test.go"),
|
generate_file(
|
||||||
allow_update,
|
path,
|
||||||
|path| {
|
BINDING_TEST_GO_TEMPLATE,
|
||||||
generate_file(
|
language_name,
|
||||||
path,
|
&generate_opts,
|
||||||
BINDING_TEST_GO_TEMPLATE,
|
)
|
||||||
language_name,
|
})?;
|
||||||
&generate_opts,
|
|
||||||
)
|
|
||||||
},
|
|
||||||
|path| {
|
|
||||||
let binding_test_go = fs::read_to_string(path)
|
|
||||||
.with_context(|| "Failed to read binding_test.go")?;
|
|
||||||
if binding_test_go.contains("smacker") {
|
|
||||||
eprintln!("Replacing binding_test.go with new binding API");
|
|
||||||
generate_file(
|
|
||||||
path,
|
|
||||||
BINDING_TEST_GO_TEMPLATE,
|
|
||||||
language_name,
|
|
||||||
&generate_opts,
|
|
||||||
)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
// Delete the old go.mod file that lives inside bindings/go, it now lives in the root
|
|
||||||
// dir
|
|
||||||
let go_mod_path = path.join("go.mod");
|
|
||||||
if allow_update && go_mod_path.exists() {
|
|
||||||
fs::remove_file(go_mod_path).with_context(|| "Failed to remove old go.mod file")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
missing_path(repo_path.join("go.mod"), |path| {
|
missing_path(repo_path.join("go.mod"), |path| {
|
||||||
generate_file(path, GO_MOD_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, GO_MOD_TEMPLATE, language_name, &generate_opts)
|
||||||
|
|
@ -815,20 +592,9 @@ pub fn generate_grammar_files(
|
||||||
let lang_path = path.join(format!("tree_sitter_{}", language_name.to_snake_case()));
|
let lang_path = path.join(format!("tree_sitter_{}", language_name.to_snake_case()));
|
||||||
missing_path(&lang_path, create_dir)?;
|
missing_path(&lang_path, create_dir)?;
|
||||||
|
|
||||||
missing_path_else(
|
missing_path(lang_path.join("binding.c"), |path| {
|
||||||
lang_path.join("binding.c"),
|
generate_file(path, PY_BINDING_C_TEMPLATE, language_name, &generate_opts)
|
||||||
allow_update,
|
})?;
|
||||||
|path| generate_file(path, PY_BINDING_C_TEMPLATE, language_name, &generate_opts),
|
|
||||||
|path| {
|
|
||||||
let binding_c = fs::read_to_string(path)
|
|
||||||
.with_context(|| "Failed to read bindings/python/binding.c")?;
|
|
||||||
if !binding_c.contains("PyCapsule_New") {
|
|
||||||
eprintln!("Replacing bindings/python/binding.c with new binding API");
|
|
||||||
generate_file(path, PY_BINDING_C_TEMPLATE, language_name, &generate_opts)?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
},
|
|
||||||
)?;
|
|
||||||
|
|
||||||
missing_path(lang_path.join("__init__.py"), |path| {
|
missing_path(lang_path.join("__init__.py"), |path| {
|
||||||
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)
|
generate_file(path, INIT_PY_TEMPLATE, language_name, &generate_opts)
|
||||||
|
|
@ -874,7 +640,7 @@ pub fn generate_grammar_files(
|
||||||
// Generate Swift bindings
|
// Generate Swift bindings
|
||||||
if tree_sitter_config.bindings.swift {
|
if tree_sitter_config.bindings.swift {
|
||||||
missing_path(bindings_dir.join("swift"), create_dir)?.apply(|path| {
|
missing_path(bindings_dir.join("swift"), create_dir)?.apply(|path| {
|
||||||
let lang_path = path.join(format!("TreeSitter{}", language_name.to_upper_camel_case()));
|
let lang_path = path.join(format!("TreeSitter{camel_name}",));
|
||||||
missing_path(&lang_path, create_dir)?;
|
missing_path(&lang_path, create_dir)?;
|
||||||
|
|
||||||
missing_path(lang_path.join(format!("{language_name}.h")), |path| {
|
missing_path(lang_path.join(format!("{language_name}.h")), |path| {
|
||||||
|
|
@ -882,18 +648,12 @@ pub fn generate_grammar_files(
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
missing_path(
|
missing_path(
|
||||||
path.join(format!(
|
path.join(format!("TreeSitter{camel_name}Tests",)),
|
||||||
"TreeSitter{}Tests",
|
|
||||||
language_name.to_upper_camel_case()
|
|
||||||
)),
|
|
||||||
create_dir,
|
create_dir,
|
||||||
)?
|
)?
|
||||||
.apply(|path| {
|
.apply(|path| {
|
||||||
missing_path(
|
missing_path(
|
||||||
path.join(format!(
|
path.join(format!("TreeSitter{camel_name}Tests.swift")),
|
||||||
"TreeSitter{}Tests.swift",
|
|
||||||
language_name.to_upper_camel_case()
|
|
||||||
)),
|
|
||||||
|path| generate_file(path, TESTS_SWIFT_TEMPLATE, language_name, &generate_opts),
|
|path| generate_file(path, TESTS_SWIFT_TEMPLATE, language_name, &generate_opts),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
|
@ -919,15 +679,14 @@ pub fn get_root_path(path: &Path) -> Result<PathBuf> {
|
||||||
let json = pathbuf
|
let json = pathbuf
|
||||||
.exists()
|
.exists()
|
||||||
.then(|| {
|
.then(|| {
|
||||||
let file = File::open(pathbuf.as_path())
|
let contents = fs::read_to_string(pathbuf.as_path())
|
||||||
.with_context(|| format!("Failed to open {filename}"))?;
|
.with_context(|| format!("Failed to read {filename}"))?;
|
||||||
let reader = BufReader::new(file);
|
|
||||||
if is_package_json {
|
if is_package_json {
|
||||||
serde_json::from_reader::<_, Map<String, Value>>(reader)
|
serde_json::from_str::<Map<String, Value>>(&contents)
|
||||||
.context(format!("Failed to parse {filename}"))
|
.context(format!("Failed to parse {filename}"))
|
||||||
.map(|v| v.contains_key("tree-sitter"))
|
.map(|v| v.contains_key("tree-sitter"))
|
||||||
} else {
|
} else {
|
||||||
serde_json::from_reader::<_, TreeSitterJSON>(reader)
|
serde_json::from_str::<TreeSitterJSON>(&contents)
|
||||||
.context(format!("Failed to parse {filename}"))
|
.context(format!("Failed to parse {filename}"))
|
||||||
.map(|_| true)
|
.map(|_| true)
|
||||||
}
|
}
|
||||||
|
|
@ -961,7 +720,7 @@ fn generate_file(
|
||||||
let mut replacement = template
|
let mut replacement = template
|
||||||
.replace(
|
.replace(
|
||||||
CAMEL_PARSER_NAME_PLACEHOLDER,
|
CAMEL_PARSER_NAME_PLACEHOLDER,
|
||||||
&language_name.to_upper_camel_case(),
|
generate_opts.camel_parser_name,
|
||||||
)
|
)
|
||||||
.replace(
|
.replace(
|
||||||
UPPER_PARSER_NAME_PLACEHOLDER,
|
UPPER_PARSER_NAME_PLACEHOLDER,
|
||||||
|
|
@ -1001,7 +760,12 @@ fn generate_file(
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(email) = generate_opts.author_email {
|
if let Some(email) = generate_opts.author_email {
|
||||||
replacement = replacement.replace(AUTHOR_EMAIL_PLACEHOLDER, email);
|
replacement = match filename {
|
||||||
|
"Cargo.toml" | "grammar.js" => {
|
||||||
|
replacement.replace(AUTHOR_EMAIL_PLACEHOLDER, &format!("<{email}>"))
|
||||||
|
}
|
||||||
|
_ => replacement.replace(AUTHOR_EMAIL_PLACEHOLDER, email),
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
match filename {
|
match filename {
|
||||||
"package.json" => {
|
"package.json" => {
|
||||||
|
|
@ -1099,7 +863,7 @@ fn generate_file(
|
||||||
PARSER_DESCRIPTION_PLACEHOLDER,
|
PARSER_DESCRIPTION_PLACEHOLDER,
|
||||||
&format!(
|
&format!(
|
||||||
"{} grammar for tree-sitter",
|
"{} grammar for tree-sitter",
|
||||||
language_name.to_upper_camel_case()
|
generate_opts.camel_parser_name,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -653,11 +653,11 @@ impl Init {
|
||||||
|
|
||||||
(opts.name.clone(), Some(opts))
|
(opts.name.clone(), Some(opts))
|
||||||
} else {
|
} else {
|
||||||
let json = serde_json::from_reader::<_, TreeSitterJSON>(
|
let mut json = serde_json::from_str::<TreeSitterJSON>(
|
||||||
fs::File::open(current_dir.join("tree-sitter.json"))
|
&fs::read_to_string(current_dir.join("tree-sitter.json"))
|
||||||
.with_context(|| "Failed to open tree-sitter.json")?,
|
.with_context(|| "Failed to read tree-sitter.json")?,
|
||||||
)?;
|
)?;
|
||||||
(json.grammars[0].name.clone(), None)
|
(json.grammars.swap_remove(0).name, None)
|
||||||
};
|
};
|
||||||
|
|
||||||
generate_grammar_files(current_dir, &language_name, self.update, json_config_opts)?;
|
generate_grammar_files(current_dir, &language_name, self.update, json_config_opts)?;
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,11 @@
|
||||||
<input id="logging-checkbox" type="checkbox"></input>
|
<input id="logging-checkbox" type="checkbox"></input>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div class=header-item>
|
||||||
|
<label for="anonymous-nodes-checkbox">show anonymous nodes</label>
|
||||||
|
<input id="anonymous-nodes-checkbox" type="checkbox"></input>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class=header-item>
|
<div class=header-item>
|
||||||
<label for="query-checkbox">query</label>
|
<label for="query-checkbox">query</label>
|
||||||
<input id="query-checkbox" type="checkbox"></input>
|
<input id="query-checkbox" type="checkbox"></input>
|
||||||
|
|
@ -67,6 +72,12 @@
|
||||||
<script src=tree-sitter.js></script>
|
<script src=tree-sitter.js></script>
|
||||||
<script src=playground.js></script>
|
<script src=playground.js></script>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
setTimeout(() => {
|
||||||
|
window.initializePlayground({local: true});
|
||||||
|
}, 1000);
|
||||||
|
</script>
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
body {
|
body {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
|
|
|
||||||
|
|
@ -41,3 +41,6 @@ indent_size = 8
|
||||||
|
|
||||||
[parser.c]
|
[parser.c]
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
[{alloc,array,parser}.h]
|
||||||
|
indent_size = 2
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,5 @@ Name: tree-sitter-PARSER_NAME
|
||||||
Description: @PROJECT_DESCRIPTION@
|
Description: @PROJECT_DESCRIPTION@
|
||||||
URL: @PROJECT_HOMEPAGE_URL@
|
URL: @PROJECT_HOMEPAGE_URL@
|
||||||
Version: @PROJECT_VERSION@
|
Version: @PROJECT_VERSION@
|
||||||
Requires: @TS_REQUIRES@
|
|
||||||
Libs: -L${libdir} -ltree-sitter-PARSER_NAME
|
Libs: -L${libdir} -ltree-sitter-PARSER_NAME
|
||||||
Cflags: -I${includedir}
|
Cflags: -I${includedir}
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ edition = "2021"
|
||||||
autoexamples = false
|
autoexamples = false
|
||||||
|
|
||||||
build = "bindings/rust/build.rs"
|
build = "bindings/rust/build.rs"
|
||||||
include = ["bindings/rust/*", "grammar.js", "queries/*", "src/*"]
|
include = ["bindings/rust/*", "grammar.js", "queries/*", "src/*", "tree-sitter.json"]
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
path = "bindings/rust/lib.rs"
|
path = "bindings/rust/lib.rs"
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
/// <reference types="node" />
|
|
||||||
|
|
||||||
const assert = require("node:assert");
|
const assert = require("node:assert");
|
||||||
const { test } = require("node:test");
|
const { test } = require("node:test");
|
||||||
|
|
||||||
|
const Parser = require("tree-sitter");
|
||||||
|
|
||||||
test("can load grammar", () => {
|
test("can load grammar", () => {
|
||||||
const parser = new (require("tree-sitter"))();
|
const parser = new Parser();
|
||||||
assert.doesNotThrow(() => parser.setLanguage(require(".")));
|
assert.doesNotThrow(() => parser.setLanguage(require(".")));
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,13 @@
|
||||||
cmake_minimum_required(VERSION 3.13)
|
cmake_minimum_required(VERSION 3.13)
|
||||||
|
|
||||||
project(tree-sitter-PARSER_NAME
|
project(tree-sitter-PARSER_NAME
|
||||||
VERSION "0.0.1"
|
VERSION "PARSER_VERSION"
|
||||||
DESCRIPTION "CAMEL_PARSER_NAME grammar for tree-sitter"
|
DESCRIPTION "PARSER_DESCRIPTION"
|
||||||
HOMEPAGE_URL "https://github.com/tree-sitter/tree-sitter-PARSER_NAME"
|
HOMEPAGE_URL "PARSER_URL"
|
||||||
LANGUAGES C)
|
LANGUAGES C)
|
||||||
|
|
||||||
option(BUILD_SHARED_LIBS "Build using shared libraries" ON)
|
option(BUILD_SHARED_LIBS "Build using shared libraries" ON)
|
||||||
|
option(TREE_SITTER_REUSE_ALLOCATOR "Reuse the library allocator" OFF)
|
||||||
|
|
||||||
set(TREE_SITTER_ABI_VERSION ABI_VERSION_MAX CACHE STRING "Tree-sitter ABI version")
|
set(TREE_SITTER_ABI_VERSION ABI_VERSION_MAX CACHE STRING "Tree-sitter ABI version")
|
||||||
if(NOT ${TREE_SITTER_ABI_VERSION} MATCHES "^[0-9]+$")
|
if(NOT ${TREE_SITTER_ABI_VERSION} MATCHES "^[0-9]+$")
|
||||||
|
|
@ -24,16 +25,21 @@ add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c"
|
||||||
COMMENT "Generating parser.c")
|
COMMENT "Generating parser.c")
|
||||||
|
|
||||||
add_library(tree-sitter-PARSER_NAME src/parser.c)
|
add_library(tree-sitter-PARSER_NAME src/parser.c)
|
||||||
if(EXISTS src/scanner.c)
|
if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/src/scanner.c)
|
||||||
target_sources(tree-sitter-PARSER_NAME PRIVATE src/scanner.c)
|
target_sources(tree-sitter-PARSER_NAME PRIVATE src/scanner.c)
|
||||||
endif()
|
endif()
|
||||||
target_include_directories(tree-sitter-PARSER_NAME PRIVATE src)
|
target_include_directories(tree-sitter-PARSER_NAME PRIVATE src)
|
||||||
|
|
||||||
|
target_compile_definitions(tree-sitter-PARSER_NAME PRIVATE
|
||||||
|
$<$<BOOL:${TREE_SITTER_REUSE_ALLOCATOR}>:TREE_SITTER_REUSE_ALLOCATOR>
|
||||||
|
$<$<CONFIG:Debug>:TREE_SITTER_DEBUG>)
|
||||||
|
|
||||||
set_target_properties(tree-sitter-PARSER_NAME
|
set_target_properties(tree-sitter-PARSER_NAME
|
||||||
PROPERTIES
|
PROPERTIES
|
||||||
C_STANDARD 11
|
C_STANDARD 11
|
||||||
POSITION_INDEPENDENT_CODE ON
|
POSITION_INDEPENDENT_CODE ON
|
||||||
SOVERSION "${TREE_SITTER_ABI_VERSION}.${PROJECT_VERSION_MAJOR}")
|
SOVERSION "${TREE_SITTER_ABI_VERSION}.${PROJECT_VERSION_MAJOR}"
|
||||||
|
DEFINE_SYMBOL "")
|
||||||
|
|
||||||
configure_file(bindings/c/tree-sitter-PARSER_NAME.pc.in
|
configure_file(bindings/c/tree-sitter-PARSER_NAME.pc.in
|
||||||
"${CMAKE_CURRENT_BINARY_DIR}/tree-sitter-PARSER_NAME.pc" @ONLY)
|
"${CMAKE_CURRENT_BINARY_DIR}/tree-sitter-PARSER_NAME.pc" @ONLY)
|
||||||
|
|
@ -47,8 +53,6 @@ install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter-PARSER_NAME.pc"
|
||||||
install(TARGETS tree-sitter-PARSER_NAME
|
install(TARGETS tree-sitter-PARSER_NAME
|
||||||
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
|
LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}")
|
||||||
|
|
||||||
add_custom_target(test "${TREE_SITTER_CLI}" test
|
add_custom_target(ts-test "${TREE_SITTER_CLI}" test
|
||||||
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
|
||||||
COMMENT "tree-sitter test")
|
COMMENT "tree-sitter test")
|
||||||
|
|
||||||
# vim:ft=cmake:
|
|
||||||
|
|
@ -1,13 +1,37 @@
|
||||||
* text=auto eol=lf
|
* text=auto eol=lf
|
||||||
|
|
||||||
|
# Generated source files
|
||||||
src/*.json linguist-generated
|
src/*.json linguist-generated
|
||||||
src/parser.c linguist-generated
|
src/parser.c linguist-generated
|
||||||
src/tree_sitter/* linguist-generated
|
src/tree_sitter/* linguist-generated
|
||||||
|
|
||||||
bindings/** linguist-generated
|
# C bindings
|
||||||
binding.gyp linguist-generated
|
bindings/c/* linguist-generated
|
||||||
setup.py linguist-generated
|
|
||||||
Makefile linguist-generated
|
|
||||||
CMakeLists.txt linguist-generated
|
CMakeLists.txt linguist-generated
|
||||||
Package.swift linguist-generated
|
Makefile linguist-generated
|
||||||
|
|
||||||
|
# Rust bindings
|
||||||
|
bindings/rust/* linguist-generated
|
||||||
|
Cargo.toml linguist-generated
|
||||||
|
Cargo.lock linguist-generated
|
||||||
|
|
||||||
|
# Node.js bindings
|
||||||
|
bindings/node/* linguist-generated
|
||||||
|
binding.gyp linguist-generated
|
||||||
|
package.json linguist-generated
|
||||||
|
package-lock.json linguist-generated
|
||||||
|
|
||||||
|
# Python bindings
|
||||||
|
bindings/python/** linguist-generated
|
||||||
|
setup.py linguist-generated
|
||||||
|
pyproject.toml linguist-generated
|
||||||
|
|
||||||
|
# Go bindings
|
||||||
|
bindings/go/* linguist-generated
|
||||||
go.mod linguist-generated
|
go.mod linguist-generated
|
||||||
|
go.sum linguist-generated
|
||||||
|
|
||||||
|
# Swift bindings
|
||||||
|
bindings/swift/** linguist-generated
|
||||||
|
Package.swift linguist-generated
|
||||||
|
Package.resolved linguist-generated
|
||||||
|
|
|
||||||
|
|
@ -5,11 +5,9 @@ target/
|
||||||
build/
|
build/
|
||||||
prebuilds/
|
prebuilds/
|
||||||
node_modules/
|
node_modules/
|
||||||
*.tgz
|
|
||||||
|
|
||||||
# Swift artifacts
|
# Swift artifacts
|
||||||
.build/
|
.build/
|
||||||
Package.resolved
|
|
||||||
|
|
||||||
# Go artifacts
|
# Go artifacts
|
||||||
_obj/
|
_obj/
|
||||||
|
|
@ -35,3 +33,8 @@ dist/
|
||||||
*.wasm
|
*.wasm
|
||||||
*.obj
|
*.obj
|
||||||
*.o
|
*.o
|
||||||
|
|
||||||
|
# Archives
|
||||||
|
*.tar.gz
|
||||||
|
*.tgz
|
||||||
|
*.zip
|
||||||
|
|
|
||||||
|
|
@ -2,4 +2,4 @@ module PARSER_URL_STRIPPED
|
||||||
|
|
||||||
go 1.22
|
go 1.22
|
||||||
|
|
||||||
require github.com/tree-sitter/go-tree-sitter v0.23.1
|
require github.com/tree-sitter/go-tree-sitter v0.24.0
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,10 @@
|
||||||
const root = require("path").join(__dirname, "..", "..");
|
const root = require("path").join(__dirname, "..", "..");
|
||||||
|
|
||||||
module.exports = require("node-gyp-build")(root);
|
module.exports =
|
||||||
|
typeof process.versions.bun === "string"
|
||||||
|
// Support `bun build --compile` by being statically analyzable enough to find the .node file at build-time
|
||||||
|
? require(`../../prebuilds/${process.platform}-${process.arch}/tree-sitter-PARSER_NAME.node`)
|
||||||
|
: require("node-gyp-build")(root);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
module.exports.nodeTypeInfo = require("../../src/node-types.json");
|
module.exports.nodeTypeInfo = require("../../src/node-types.json");
|
||||||
|
|
|
||||||
|
|
@ -59,8 +59,8 @@ endif
|
||||||
|
|
||||||
$(LANGUAGE_NAME).pc: bindings/c/$(LANGUAGE_NAME).pc.in
|
$(LANGUAGE_NAME).pc: bindings/c/$(LANGUAGE_NAME).pc.in
|
||||||
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
||||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR)|' \
|
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
||||||
-e 's|@CMAKE_INSTALL_INCLUDEDIR@|$(INCLUDEDIR)|' \
|
-e 's|@CMAKE_INSTALL_INCLUDEDIR@|$(INCLUDEDIR:$(PREFIX)/%=%)|' \
|
||||||
-e 's|@PROJECT_DESCRIPTION@|$(DESCRIPTION)|' \
|
-e 's|@PROJECT_DESCRIPTION@|$(DESCRIPTION)|' \
|
||||||
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
||||||
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
"name": "tree-sitter-PARSER_NAME",
|
"name": "tree-sitter-PARSER_NAME",
|
||||||
"version": "PARSER_VERSION",
|
"version": "PARSER_VERSION",
|
||||||
"description": "PARSER_DESCRIPTION",
|
"description": "PARSER_DESCRIPTION",
|
||||||
"repository": "github:tree-sitter/tree-sitter-PARSER_NAME",
|
"repository": "PARSER_URL",
|
||||||
"license": "PARSER_LICENSE",
|
"license": "PARSER_LICENSE",
|
||||||
"author": {
|
"author": {
|
||||||
"name": "PARSER_AUTHOR_NAME",
|
"name": "PARSER_AUTHOR_NAME",
|
||||||
|
|
@ -19,6 +19,7 @@
|
||||||
],
|
],
|
||||||
"files": [
|
"files": [
|
||||||
"grammar.js",
|
"grammar.js",
|
||||||
|
"tree-sitter.json",
|
||||||
"binding.gyp",
|
"binding.gyp",
|
||||||
"prebuilds/**",
|
"prebuilds/**",
|
||||||
"bindings/node/*",
|
"bindings/node/*",
|
||||||
|
|
@ -27,8 +28,8 @@
|
||||||
"*.wasm"
|
"*.wasm"
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"node-addon-api": "^8.0.0",
|
"node-addon-api": "^8.2.1",
|
||||||
"node-gyp-build": "^4.8.1"
|
"node-gyp-build": "^4.8.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"prebuildify": "^6.0.1",
|
"prebuildify": "^6.0.1",
|
||||||
|
|
@ -47,11 +48,5 @@
|
||||||
"prestart": "tree-sitter build --wasm",
|
"prestart": "tree-sitter build --wasm",
|
||||||
"start": "tree-sitter playground",
|
"start": "tree-sitter playground",
|
||||||
"test": "node --test bindings/node/*_test.js"
|
"test": "node --test bindings/node/*_test.js"
|
||||||
},
|
}
|
||||||
"tree-sitter": [
|
|
||||||
{
|
|
||||||
"scope": "source.LOWER_PARSER_NAME",
|
|
||||||
"injection-regex": "^LOWER_PARSER_NAME$"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,6 @@ version = "PARSER_VERSION"
|
||||||
keywords = ["incremental", "parsing", "tree-sitter", "PARSER_NAME"]
|
keywords = ["incremental", "parsing", "tree-sitter", "PARSER_NAME"]
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"License :: OSI Approved :: MIT License",
|
|
||||||
"Topic :: Software Development :: Compilers",
|
"Topic :: Software Development :: Compilers",
|
||||||
"Topic :: Text Processing :: Linguistic",
|
"Topic :: Text Processing :: Linguistic",
|
||||||
"Typing :: Typed",
|
"Typing :: Typed",
|
||||||
|
|
|
||||||
|
|
@ -32,7 +32,7 @@ fn detect_language_by_first_line_regex() {
|
||||||
.find_language_configurations_at_path(strace_dir.path(), false)
|
.find_language_configurations_at_path(strace_dir.path(), false)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
// this is just to validate that we can read the package.json correctly
|
// this is just to validate that we can read the tree-sitter.json correctly
|
||||||
assert_eq!(config[0].scope.as_ref().unwrap(), "source.strace");
|
assert_eq!(config[0].scope.as_ref().unwrap(), "source.strace");
|
||||||
|
|
||||||
let file_name = strace_dir.path().join("strace.log");
|
let file_name = strace_dir.path().join("strace.log");
|
||||||
|
|
|
||||||
|
|
@ -306,6 +306,33 @@ fn test_parent_of_zero_width_node() {
|
||||||
assert_eq!(parent, script_element);
|
assert_eq!(parent, script_element);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_next_sibling_of_zero_width_node() {
|
||||||
|
let grammar_json = load_grammar_file(
|
||||||
|
&fixtures_dir()
|
||||||
|
.join("test_grammars")
|
||||||
|
.join("next_sibling_from_zwt")
|
||||||
|
.join("grammar.js"),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let (parser_name, parser_code) = generate_parser_for_grammar(&grammar_json).unwrap();
|
||||||
|
|
||||||
|
let mut parser = Parser::new();
|
||||||
|
let language = get_test_language(&parser_name, &parser_code, None);
|
||||||
|
parser.set_language(&language).unwrap();
|
||||||
|
|
||||||
|
let tree = parser.parse("abdef", None).unwrap();
|
||||||
|
|
||||||
|
let root_node = tree.root_node();
|
||||||
|
let missing_c = root_node.child(2).unwrap();
|
||||||
|
assert!(missing_c.is_missing());
|
||||||
|
assert_eq!(missing_c.kind(), "c");
|
||||||
|
let node_d = root_node.child(3).unwrap();
|
||||||
|
assert_eq!(missing_c.next_sibling().unwrap(), node_d);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_node_field_name_for_child() {
|
fn test_node_field_name_for_child() {
|
||||||
let mut parser = Parser::new();
|
let mut parser = Parser::new();
|
||||||
|
|
@ -1026,6 +1053,31 @@ fn test_node_numeric_symbols_respect_simple_aliases() {
|
||||||
assert_eq!(unary_minus_node.kind_id(), binary_minus_node.kind_id());
|
assert_eq!(unary_minus_node.kind_id(), binary_minus_node.kind_id());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_hidden_zero_width_node_with_visible_child() {
|
||||||
|
let code = r"
|
||||||
|
class Foo {
|
||||||
|
std::
|
||||||
|
private:
|
||||||
|
std::string s;
|
||||||
|
};
|
||||||
|
";
|
||||||
|
|
||||||
|
let mut parser = Parser::new();
|
||||||
|
parser.set_language(&get_language("cpp")).unwrap();
|
||||||
|
let tree = parser.parse(code, None).unwrap();
|
||||||
|
let root = tree.root_node();
|
||||||
|
|
||||||
|
let class_specifier = root.child(0).unwrap();
|
||||||
|
let field_decl_list = class_specifier.child_by_field_name("body").unwrap();
|
||||||
|
let field_decl = field_decl_list.named_child(0).unwrap();
|
||||||
|
let field_ident = field_decl.child_by_field_name("declarator").unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
field_decl.child_with_descendant(field_ident).unwrap(),
|
||||||
|
field_ident
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
fn get_all_nodes(tree: &Tree) -> Vec<Node> {
|
fn get_all_nodes(tree: &Tree) -> Vec<Node> {
|
||||||
let mut result = Vec::new();
|
let mut result = Vec::new();
|
||||||
let mut visited_children = false;
|
let mut visited_children = false;
|
||||||
|
|
|
||||||
|
|
@ -1507,6 +1507,20 @@ fn test_parsing_with_scanner_logging() {
|
||||||
assert!(found);
|
assert!(found);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parsing_get_column_at_eof() {
|
||||||
|
let dir = fixtures_dir().join("test_grammars").join("get_col_eof");
|
||||||
|
let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap();
|
||||||
|
let (grammar_name, parser_code) = generate_parser_for_grammar(&grammar_json).unwrap();
|
||||||
|
|
||||||
|
let mut parser = Parser::new();
|
||||||
|
parser
|
||||||
|
.set_language(&get_test_language(&grammar_name, &parser_code, Some(&dir)))
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
parser.parse("a", None).unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
const fn simple_range(start: usize, end: usize) -> Range {
|
const fn simple_range(start: usize, end: usize) -> Range {
|
||||||
Range {
|
Range {
|
||||||
start_byte: start,
|
start_byte: start,
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,9 @@ edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
proc-macro = true
|
proc-macro = true
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,33 +1,116 @@
|
||||||
let tree;
|
function initializeLocalTheme() {
|
||||||
|
const themeToggle = document.getElementById('theme-toggle');
|
||||||
|
if (!themeToggle) return;
|
||||||
|
|
||||||
(async () => {
|
// Load saved theme or use system preference
|
||||||
const CAPTURE_REGEX = /@\s*([\w._-]+)/g;
|
const savedTheme = localStorage.getItem('theme');
|
||||||
const COLORS_BY_INDEX = [
|
const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||||
'blue',
|
const initialTheme = savedTheme || (prefersDark ? 'dark' : 'light');
|
||||||
'chocolate',
|
|
||||||
'darkblue',
|
// Set initial theme
|
||||||
'darkcyan',
|
document.documentElement.setAttribute('data-theme', initialTheme);
|
||||||
'darkgreen',
|
|
||||||
'darkred',
|
themeToggle.addEventListener('click', () => {
|
||||||
'darkslategray',
|
const currentTheme = document.documentElement.getAttribute('data-theme');
|
||||||
'dimgray',
|
const newTheme = currentTheme === 'light' ? 'dark' : 'light';
|
||||||
'green',
|
document.documentElement.setAttribute('data-theme', newTheme);
|
||||||
'indigo',
|
localStorage.setItem('theme', newTheme);
|
||||||
'navy',
|
});
|
||||||
'red',
|
}
|
||||||
'sienna',
|
|
||||||
|
function initializeCustomSelect({ initialValue = null, addListeners = false }) {
|
||||||
|
const button = document.getElementById('language-button');
|
||||||
|
const select = document.getElementById('language-select');
|
||||||
|
if (!button || !select) return;
|
||||||
|
|
||||||
|
const dropdown = button.nextElementSibling;
|
||||||
|
const selectedValue = button.querySelector('.selected-value');
|
||||||
|
|
||||||
|
if (initialValue) {
|
||||||
|
select.value = initialValue;
|
||||||
|
}
|
||||||
|
selectedValue.textContent = select.options[select.selectedIndex].text;
|
||||||
|
|
||||||
|
if (addListeners) {
|
||||||
|
button.addEventListener('click', (e) => {
|
||||||
|
e.preventDefault(); // Prevent form submission
|
||||||
|
dropdown.classList.toggle('show');
|
||||||
|
});
|
||||||
|
|
||||||
|
document.addEventListener('click', (e) => {
|
||||||
|
if (!button.contains(e.target)) {
|
||||||
|
dropdown.classList.remove('show');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
dropdown.querySelectorAll('.option').forEach(option => {
|
||||||
|
option.addEventListener('click', () => {
|
||||||
|
selectedValue.textContent = option.textContent;
|
||||||
|
select.value = option.dataset.value;
|
||||||
|
dropdown.classList.remove('show');
|
||||||
|
|
||||||
|
const event = new Event('change');
|
||||||
|
select.dispatchEvent(event);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
window.initializePlayground = async function initializePlayground(opts) {
|
||||||
|
const { local } = opts;
|
||||||
|
if (local) {
|
||||||
|
initializeLocalTheme();
|
||||||
|
}
|
||||||
|
initializeCustomSelect({ addListeners: true });
|
||||||
|
|
||||||
|
let tree;
|
||||||
|
|
||||||
|
const CAPTURE_REGEX = /@\s*([\w\._-]+)/g;
|
||||||
|
const LIGHT_COLORS = [
|
||||||
|
"#0550ae", // blue
|
||||||
|
"#ab5000", // rust brown
|
||||||
|
"#116329", // forest green
|
||||||
|
"#844708", // warm brown
|
||||||
|
"#6639ba", // purple
|
||||||
|
"#7d4e00", // orange brown
|
||||||
|
"#0969da", // bright blue
|
||||||
|
"#1a7f37", // green
|
||||||
|
"#cf222e", // red
|
||||||
|
"#8250df", // violet
|
||||||
|
"#6e7781", // gray
|
||||||
|
"#953800", // dark orange
|
||||||
|
"#1b7c83" // teal
|
||||||
];
|
];
|
||||||
|
|
||||||
const codeInput = document.getElementById('code-input');
|
const DARK_COLORS = [
|
||||||
const languageSelect = document.getElementById('language-select');
|
"#79c0ff", // light blue
|
||||||
const loggingCheckbox = document.getElementById('logging-checkbox');
|
"#ffa657", // orange
|
||||||
const outputContainer = document.getElementById('output-container');
|
"#7ee787", // light green
|
||||||
const outputContainerScroll = document.getElementById('output-container-scroll');
|
"#ff7b72", // salmon
|
||||||
const playgroundContainer = document.getElementById('playground-container');
|
"#d2a8ff", // light purple
|
||||||
const queryCheckbox = document.getElementById('query-checkbox');
|
"#ffa198", // pink
|
||||||
const queryContainer = document.getElementById('query-container');
|
"#a5d6ff", // pale blue
|
||||||
const queryInput = document.getElementById('query-input');
|
"#56d364", // bright green
|
||||||
const updateTimeSpan = document.getElementById('update-time');
|
"#ff9492", // light red
|
||||||
|
"#e0b8ff", // pale purple
|
||||||
|
"#9ca3af", // gray
|
||||||
|
"#ffb757", // yellow orange
|
||||||
|
"#80cbc4" // light teal
|
||||||
|
];
|
||||||
|
|
||||||
|
const codeInput = document.getElementById("code-input");
|
||||||
|
const languageSelect = document.getElementById("language-select");
|
||||||
|
const loggingCheckbox = document.getElementById("logging-checkbox");
|
||||||
|
const anonymousNodes = document.getElementById('anonymous-nodes-checkbox');
|
||||||
|
const outputContainer = document.getElementById("output-container");
|
||||||
|
const outputContainerScroll = document.getElementById(
|
||||||
|
"output-container-scroll",
|
||||||
|
);
|
||||||
|
const playgroundContainer = document.getElementById("playground-container");
|
||||||
|
const queryCheckbox = document.getElementById("query-checkbox");
|
||||||
|
const queryContainer = document.getElementById("query-container");
|
||||||
|
const queryInput = document.getElementById("query-input");
|
||||||
|
const updateTimeSpan = document.getElementById("update-time");
|
||||||
const languagesByName = {};
|
const languagesByName = {};
|
||||||
|
|
||||||
loadState();
|
loadState();
|
||||||
|
|
@ -35,21 +118,36 @@ let tree;
|
||||||
await TreeSitter.init();
|
await TreeSitter.init();
|
||||||
|
|
||||||
const parser = new TreeSitter();
|
const parser = new TreeSitter();
|
||||||
|
|
||||||
|
console.log(parser, codeInput, queryInput);
|
||||||
|
|
||||||
const codeEditor = CodeMirror.fromTextArea(codeInput, {
|
const codeEditor = CodeMirror.fromTextArea(codeInput, {
|
||||||
lineNumbers: true,
|
lineNumbers: true,
|
||||||
showCursorWhenSelecting: true
|
showCursorWhenSelecting: true
|
||||||
});
|
});
|
||||||
|
|
||||||
|
codeEditor.on('keydown', (_, event) => {
|
||||||
|
if (event.key === 'ArrowLeft' || event.key === 'ArrowRight') {
|
||||||
|
event.stopPropagation(); // Prevent mdBook from going back/forward
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
const queryEditor = CodeMirror.fromTextArea(queryInput, {
|
const queryEditor = CodeMirror.fromTextArea(queryInput, {
|
||||||
lineNumbers: true,
|
lineNumbers: true,
|
||||||
showCursorWhenSelecting: true
|
showCursorWhenSelecting: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
queryEditor.on('keydown', (_, event) => {
|
||||||
|
if (event.key === 'ArrowLeft' || event.key === 'ArrowRight') {
|
||||||
|
event.stopPropagation(); // Prevent mdBook from going back/forward
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const cluster = new Clusterize({
|
const cluster = new Clusterize({
|
||||||
rows: [],
|
rows: [],
|
||||||
noDataText: null,
|
noDataText: null,
|
||||||
contentElem: outputContainer,
|
contentElem: outputContainer,
|
||||||
scrollElem: outputContainerScroll
|
scrollElem: outputContainerScroll,
|
||||||
});
|
});
|
||||||
const renderTreeOnCodeChange = debounce(renderTree, 50);
|
const renderTreeOnCodeChange = debounce(renderTree, 50);
|
||||||
const saveStateOnChange = debounce(saveState, 2000);
|
const saveStateOnChange = debounce(saveState, 2000);
|
||||||
|
|
@ -62,32 +160,33 @@ let tree;
|
||||||
let isRendering = 0;
|
let isRendering = 0;
|
||||||
let query;
|
let query;
|
||||||
|
|
||||||
codeEditor.on('changes', handleCodeChange);
|
codeEditor.on("changes", handleCodeChange);
|
||||||
codeEditor.on('viewportChange', runTreeQueryOnChange);
|
codeEditor.on("viewportChange", runTreeQueryOnChange);
|
||||||
codeEditor.on('cursorActivity', debounce(handleCursorMovement, 150));
|
codeEditor.on("cursorActivity", debounce(handleCursorMovement, 150));
|
||||||
queryEditor.on('changes', debounce(handleQueryChange, 150));
|
queryEditor.on("changes", debounce(handleQueryChange, 150));
|
||||||
|
|
||||||
loggingCheckbox.addEventListener('change', handleLoggingChange);
|
loggingCheckbox.addEventListener("change", handleLoggingChange);
|
||||||
queryCheckbox.addEventListener('change', handleQueryEnableChange);
|
anonymousNodes.addEventListener('change', renderTree);
|
||||||
languageSelect.addEventListener('change', handleLanguageChange);
|
queryCheckbox.addEventListener("change", handleQueryEnableChange);
|
||||||
outputContainer.addEventListener('click', handleTreeClick);
|
languageSelect.addEventListener("change", handleLanguageChange);
|
||||||
|
outputContainer.addEventListener("click", handleTreeClick);
|
||||||
|
|
||||||
handleQueryEnableChange();
|
handleQueryEnableChange();
|
||||||
await handleLanguageChange()
|
await handleLanguageChange();
|
||||||
|
|
||||||
playgroundContainer.style.visibility = 'visible';
|
playgroundContainer.style.visibility = "visible";
|
||||||
|
|
||||||
async function handleLanguageChange() {
|
async function handleLanguageChange() {
|
||||||
const newLanguageName = languageSelect.value;
|
const newLanguageName = languageSelect.value;
|
||||||
if (!languagesByName[newLanguageName]) {
|
if (!languagesByName[newLanguageName]) {
|
||||||
const url = `${LANGUAGE_BASE_URL}/tree-sitter-${newLanguageName}.wasm`
|
const url = `${LANGUAGE_BASE_URL}/tree-sitter-${newLanguageName}.wasm`;
|
||||||
languageSelect.disabled = true;
|
languageSelect.disabled = true;
|
||||||
try {
|
try {
|
||||||
languagesByName[newLanguageName] = await TreeSitter.Language.load(url);
|
languagesByName[newLanguageName] = await TreeSitter.Language.load(url);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
console.error(e);
|
||||||
languageSelect.value = languageName;
|
languageSelect.value = languageName;
|
||||||
return
|
return;
|
||||||
} finally {
|
} finally {
|
||||||
languageSelect.disabled = false;
|
languageSelect.disabled = false;
|
||||||
}
|
}
|
||||||
|
|
@ -100,8 +199,8 @@ let tree;
|
||||||
handleQueryChange();
|
handleQueryChange();
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleCodeChange(_editor, changes) {
|
async function handleCodeChange(editor, changes) {
|
||||||
const newText = `${codeEditor.getValue()}\n`;
|
const newText = codeEditor.getValue() + "\n";
|
||||||
const edits = tree && changes && changes.map(treeEditForEditorChange);
|
const edits = tree && changes && changes.map(treeEditForEditorChange);
|
||||||
|
|
||||||
const start = performance.now();
|
const start = performance.now();
|
||||||
|
|
@ -126,16 +225,16 @@ let tree;
|
||||||
isRendering++;
|
isRendering++;
|
||||||
const cursor = tree.walk();
|
const cursor = tree.walk();
|
||||||
|
|
||||||
const currentRenderCount = parseCount;
|
let currentRenderCount = parseCount;
|
||||||
let row = '';
|
let row = "";
|
||||||
const rows = [];
|
let rows = [];
|
||||||
let finishedRow = false;
|
let finishedRow = false;
|
||||||
let visitedChildren = false;
|
let visitedChildren = false;
|
||||||
let indentLevel = 0;
|
let indentLevel = 0;
|
||||||
|
|
||||||
for (let i = 0;; i++) {
|
for (let i = 0; ; i++) {
|
||||||
if (i > 0 && i % 10000 === 0) {
|
if (i > 0 && i % 10000 === 0) {
|
||||||
await new Promise(r => setTimeout(r, 0));
|
await new Promise((r) => setTimeout(r, 0));
|
||||||
if (parseCount !== currentRenderCount) {
|
if (parseCount !== currentRenderCount) {
|
||||||
cursor.delete();
|
cursor.delete();
|
||||||
isRendering--;
|
isRendering--;
|
||||||
|
|
@ -145,9 +244,12 @@ let tree;
|
||||||
|
|
||||||
let displayName;
|
let displayName;
|
||||||
if (cursor.nodeIsMissing) {
|
if (cursor.nodeIsMissing) {
|
||||||
displayName = `MISSING ${cursor.nodeType}`
|
const nodeTypeText = cursor.nodeIsNamed ? cursor.nodeType : `"${cursor.nodeType}"`;
|
||||||
|
displayName = `MISSING ${nodeTypeText}`;
|
||||||
} else if (cursor.nodeIsNamed) {
|
} else if (cursor.nodeIsNamed) {
|
||||||
displayName = cursor.nodeType;
|
displayName = cursor.nodeType;
|
||||||
|
} else if (anonymousNodes.checked) {
|
||||||
|
displayName = cursor.nodeType
|
||||||
}
|
}
|
||||||
|
|
||||||
if (visitedChildren) {
|
if (visitedChildren) {
|
||||||
|
|
@ -166,7 +268,7 @@ let tree;
|
||||||
} else {
|
} else {
|
||||||
if (displayName) {
|
if (displayName) {
|
||||||
if (finishedRow) {
|
if (finishedRow) {
|
||||||
row += '</div>';
|
row += "</div>";
|
||||||
rows.push(row);
|
rows.push(row);
|
||||||
finishedRow = false;
|
finishedRow = false;
|
||||||
}
|
}
|
||||||
|
|
@ -175,11 +277,23 @@ let tree;
|
||||||
const id = cursor.nodeId;
|
const id = cursor.nodeId;
|
||||||
let fieldName = cursor.currentFieldName;
|
let fieldName = cursor.currentFieldName;
|
||||||
if (fieldName) {
|
if (fieldName) {
|
||||||
fieldName += ': ';
|
fieldName += ": ";
|
||||||
} else {
|
} else {
|
||||||
fieldName = '';
|
fieldName = "";
|
||||||
}
|
}
|
||||||
row = `<div>${' '.repeat(indentLevel)}${fieldName}<a class='plain' href="#" data-id=${id} data-range="${start.row},${start.column},${end.row},${end.column}">${displayName}</a> [${start.row}, ${start.column}] - [${end.row}, ${end.column}]`;
|
|
||||||
|
const nodeClass =
|
||||||
|
displayName === 'ERROR' || displayName.startsWith('MISSING')
|
||||||
|
? 'node-link error'
|
||||||
|
: cursor.nodeIsNamed
|
||||||
|
? 'node-link named'
|
||||||
|
: 'node-link anonymous';
|
||||||
|
|
||||||
|
row = `<div class="tree-row">${" ".repeat(indentLevel)}${fieldName}` +
|
||||||
|
`<a class='${nodeClass}' href="#" data-id=${id} ` +
|
||||||
|
`data-range="${start.row},${start.column},${end.row},${end.column}">` +
|
||||||
|
`${displayName}</a> <span class="position-info">` +
|
||||||
|
`[${start.row}, ${start.column}] - [${end.row}, ${end.column}]</span>`;
|
||||||
finishedRow = true;
|
finishedRow = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -192,7 +306,7 @@ let tree;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (finishedRow) {
|
if (finishedRow) {
|
||||||
row += '</div>';
|
row += "</div>";
|
||||||
rows.push(row);
|
rows.push(row);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -212,33 +326,48 @@ let tree;
|
||||||
|
|
||||||
codeEditor.operation(() => {
|
codeEditor.operation(() => {
|
||||||
const marks = codeEditor.getAllMarks();
|
const marks = codeEditor.getAllMarks();
|
||||||
marks.forEach(m => m.clear());
|
marks.forEach((m) => m.clear());
|
||||||
|
|
||||||
if (tree && query) {
|
if (tree && query) {
|
||||||
const captures = query.captures(
|
const captures = query.captures(
|
||||||
tree.rootNode,
|
tree.rootNode,
|
||||||
{row: startRow, column: 0},
|
{ row: startRow, column: 0 },
|
||||||
{row: endRow, column: 0},
|
{ row: endRow, column: 0 },
|
||||||
);
|
);
|
||||||
let lastNodeId;
|
let lastNodeId;
|
||||||
for (const {name, node} of captures) {
|
for (const { name, node } of captures) {
|
||||||
if (node.id === lastNodeId) continue;
|
if (node.id === lastNodeId) continue;
|
||||||
lastNodeId = node.id;
|
lastNodeId = node.id;
|
||||||
const {startPosition, endPosition} = node;
|
const { startPosition, endPosition } = node;
|
||||||
codeEditor.markText(
|
codeEditor.markText(
|
||||||
{line: startPosition.row, ch: startPosition.column},
|
{ line: startPosition.row, ch: startPosition.column },
|
||||||
{line: endPosition.row, ch: endPosition.column},
|
{ line: endPosition.row, ch: endPosition.column },
|
||||||
{
|
{
|
||||||
inclusiveLeft: true,
|
inclusiveLeft: true,
|
||||||
inclusiveRight: true,
|
inclusiveRight: true,
|
||||||
css: `color: ${colorForCaptureName(name)}`
|
css: `color: ${colorForCaptureName(name)}`,
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// When we change from a dark theme to a light theme (and vice versa), the colors of the
|
||||||
|
// captures need to be updated.
|
||||||
|
const observer = new MutationObserver((mutations) => {
|
||||||
|
mutations.forEach((mutation) => {
|
||||||
|
if (mutation.attributeName === 'class') {
|
||||||
|
handleQueryChange();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
observer.observe(document.documentElement, {
|
||||||
|
attributes: true,
|
||||||
|
attributeFilter: ['class']
|
||||||
|
});
|
||||||
|
|
||||||
function handleQueryChange() {
|
function handleQueryChange() {
|
||||||
if (query) {
|
if (query) {
|
||||||
query.delete();
|
query.delete();
|
||||||
|
|
@ -247,7 +376,7 @@ let tree;
|
||||||
}
|
}
|
||||||
|
|
||||||
queryEditor.operation(() => {
|
queryEditor.operation(() => {
|
||||||
queryEditor.getAllMarks().forEach(m => m.clear());
|
queryEditor.getAllMarks().forEach((m) => m.clear());
|
||||||
if (!queryCheckbox.checked) return;
|
if (!queryCheckbox.checked) return;
|
||||||
|
|
||||||
const queryText = queryEditor.getValue();
|
const queryText = queryEditor.getValue();
|
||||||
|
|
@ -258,15 +387,15 @@ let tree;
|
||||||
|
|
||||||
let row = 0;
|
let row = 0;
|
||||||
queryEditor.eachLine((line) => {
|
queryEditor.eachLine((line) => {
|
||||||
while (match = CAPTURE_REGEX.exec(line.text)) {
|
while ((match = CAPTURE_REGEX.exec(line.text))) {
|
||||||
queryEditor.markText(
|
queryEditor.markText(
|
||||||
{line: row, ch: match.index},
|
{ line: row, ch: match.index },
|
||||||
{line: row, ch: match.index + match[0].length},
|
{ line: row, ch: match.index + match[0].length },
|
||||||
{
|
{
|
||||||
inclusiveLeft: true,
|
inclusiveLeft: true,
|
||||||
inclusiveRight: true,
|
inclusiveRight: true,
|
||||||
css: `color: ${colorForCaptureName(match[1])}`
|
css: `color: ${colorForCaptureName(match[1])}`,
|
||||||
}
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
row++;
|
row++;
|
||||||
|
|
@ -275,7 +404,7 @@ let tree;
|
||||||
const startPosition = queryEditor.posFromIndex(error.index);
|
const startPosition = queryEditor.posFromIndex(error.index);
|
||||||
const endPosition = {
|
const endPosition = {
|
||||||
line: startPosition.line,
|
line: startPosition.line,
|
||||||
ch: startPosition.ch + (error.length || Infinity)
|
ch: startPosition.ch + (error.length || Infinity),
|
||||||
};
|
};
|
||||||
|
|
||||||
if (error.index === queryText.length) {
|
if (error.index === queryText.length) {
|
||||||
|
|
@ -287,16 +416,12 @@ let tree;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
queryEditor.markText(
|
queryEditor.markText(startPosition, endPosition, {
|
||||||
startPosition,
|
className: "query-error",
|
||||||
endPosition,
|
inclusiveLeft: true,
|
||||||
{
|
inclusiveRight: true,
|
||||||
className: 'query-error',
|
attributes: { title: error.message },
|
||||||
inclusiveLeft: true,
|
});
|
||||||
inclusiveRight: true,
|
|
||||||
attributes: {title: error.message}
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -308,16 +433,13 @@ let tree;
|
||||||
if (isRendering) return;
|
if (isRendering) return;
|
||||||
|
|
||||||
const selection = codeEditor.getDoc().listSelections()[0];
|
const selection = codeEditor.getDoc().listSelections()[0];
|
||||||
let start = {row: selection.anchor.line, column: selection.anchor.ch};
|
let start = { row: selection.anchor.line, column: selection.anchor.ch };
|
||||||
let end = {row: selection.head.line, column: selection.head.ch};
|
let end = { row: selection.head.line, column: selection.head.ch };
|
||||||
if (
|
if (
|
||||||
start.row > end.row ||
|
start.row > end.row ||
|
||||||
(
|
(start.row === end.row && start.column > end.column)
|
||||||
start.row === end.row &&
|
|
||||||
start.column > end.column
|
|
||||||
)
|
|
||||||
) {
|
) {
|
||||||
const swap = end;
|
let swap = end;
|
||||||
end = start;
|
end = start;
|
||||||
start = swap;
|
start = swap;
|
||||||
}
|
}
|
||||||
|
|
@ -325,12 +447,22 @@ let tree;
|
||||||
if (treeRows) {
|
if (treeRows) {
|
||||||
if (treeRowHighlightedIndex !== -1) {
|
if (treeRowHighlightedIndex !== -1) {
|
||||||
const row = treeRows[treeRowHighlightedIndex];
|
const row = treeRows[treeRowHighlightedIndex];
|
||||||
if (row) treeRows[treeRowHighlightedIndex] = row.replace('highlighted', 'plain');
|
if (row)
|
||||||
|
treeRows[treeRowHighlightedIndex] = row.replace(
|
||||||
|
"highlighted",
|
||||||
|
"plain",
|
||||||
|
);
|
||||||
}
|
}
|
||||||
treeRowHighlightedIndex = treeRows.findIndex(row => row.includes(`data-id=${node.id}`));
|
treeRowHighlightedIndex = treeRows.findIndex((row) =>
|
||||||
|
row.includes(`data-id=${node.id}`),
|
||||||
|
);
|
||||||
if (treeRowHighlightedIndex !== -1) {
|
if (treeRowHighlightedIndex !== -1) {
|
||||||
const row = treeRows[treeRowHighlightedIndex];
|
const row = treeRows[treeRowHighlightedIndex];
|
||||||
if (row) treeRows[treeRowHighlightedIndex] = row.replace('plain', 'highlighted');
|
if (row)
|
||||||
|
treeRows[treeRowHighlightedIndex] = row.replace(
|
||||||
|
"plain",
|
||||||
|
"highlighted",
|
||||||
|
);
|
||||||
}
|
}
|
||||||
cluster.update(treeRows);
|
cluster.update(treeRows);
|
||||||
const lineHeight = cluster.options.item_height;
|
const lineHeight = cluster.options.item_height;
|
||||||
|
|
@ -338,26 +470,25 @@ let tree;
|
||||||
const containerHeight = outputContainerScroll.clientHeight;
|
const containerHeight = outputContainerScroll.clientHeight;
|
||||||
const offset = treeRowHighlightedIndex * lineHeight;
|
const offset = treeRowHighlightedIndex * lineHeight;
|
||||||
if (scrollTop > offset - 20) {
|
if (scrollTop > offset - 20) {
|
||||||
$(outputContainerScroll).animate({scrollTop: offset - 20}, 150);
|
$(outputContainerScroll).animate({ scrollTop: offset - 20 }, 150);
|
||||||
} else if (scrollTop < offset + lineHeight + 40 - containerHeight) {
|
} else if (scrollTop < offset + lineHeight + 40 - containerHeight) {
|
||||||
$(outputContainerScroll).animate({scrollTop: offset - containerHeight + 40}, 150);
|
$(outputContainerScroll).animate(
|
||||||
|
{ scrollTop: offset - containerHeight + 40 },
|
||||||
|
150,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function handleTreeClick(event) {
|
function handleTreeClick(event) {
|
||||||
if (event.target.tagName === 'A') {
|
if (event.target.tagName === "A") {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
const [startRow, startColumn, endRow, endColumn] = event
|
const [startRow, startColumn, endRow, endColumn] =
|
||||||
.target
|
event.target.dataset.range.split(",").map((n) => parseInt(n));
|
||||||
.dataset
|
|
||||||
.range
|
|
||||||
.split(',')
|
|
||||||
.map(n => parseInt(n));
|
|
||||||
codeEditor.focus();
|
codeEditor.focus();
|
||||||
codeEditor.setSelection(
|
codeEditor.setSelection(
|
||||||
{line: startRow, ch: startColumn},
|
{ line: startRow, ch: startColumn },
|
||||||
{line: endRow, ch: endColumn}
|
{ line: endRow, ch: endColumn },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -366,9 +497,9 @@ let tree;
|
||||||
if (loggingCheckbox.checked) {
|
if (loggingCheckbox.checked) {
|
||||||
parser.setLogger((message, lexing) => {
|
parser.setLogger((message, lexing) => {
|
||||||
if (lexing) {
|
if (lexing) {
|
||||||
console.log(" ", message)
|
console.log(" ", message);
|
||||||
} else {
|
} else {
|
||||||
console.log(message)
|
console.log(message);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -378,11 +509,11 @@ let tree;
|
||||||
|
|
||||||
function handleQueryEnableChange() {
|
function handleQueryEnableChange() {
|
||||||
if (queryCheckbox.checked) {
|
if (queryCheckbox.checked) {
|
||||||
queryContainer.style.visibility = '';
|
queryContainer.style.visibility = "";
|
||||||
queryContainer.style.position = '';
|
queryContainer.style.position = "";
|
||||||
} else {
|
} else {
|
||||||
queryContainer.style.visibility = 'hidden';
|
queryContainer.style.visibility = "hidden";
|
||||||
queryContainer.style.position = 'absolute';
|
queryContainer.style.position = "absolute";
|
||||||
}
|
}
|
||||||
handleQueryChange();
|
handleQueryChange();
|
||||||
}
|
}
|
||||||
|
|
@ -392,48 +523,63 @@ let tree;
|
||||||
const newLineCount = change.text.length;
|
const newLineCount = change.text.length;
|
||||||
const lastLineLength = change.text[newLineCount - 1].length;
|
const lastLineLength = change.text[newLineCount - 1].length;
|
||||||
|
|
||||||
const startPosition = {row: change.from.line, column: change.from.ch};
|
const startPosition = { row: change.from.line, column: change.from.ch };
|
||||||
const oldEndPosition = {row: change.to.line, column: change.to.ch};
|
const oldEndPosition = { row: change.to.line, column: change.to.ch };
|
||||||
const newEndPosition = {
|
const newEndPosition = {
|
||||||
row: startPosition.row + newLineCount - 1,
|
row: startPosition.row + newLineCount - 1,
|
||||||
column: newLineCount === 1
|
column:
|
||||||
? startPosition.column + lastLineLength
|
newLineCount === 1
|
||||||
: lastLineLength
|
? startPosition.column + lastLineLength
|
||||||
|
: lastLineLength,
|
||||||
};
|
};
|
||||||
|
|
||||||
const startIndex = codeEditor.indexFromPos(change.from);
|
const startIndex = codeEditor.indexFromPos(change.from);
|
||||||
let newEndIndex = startIndex + newLineCount - 1;
|
let newEndIndex = startIndex + newLineCount - 1;
|
||||||
let oldEndIndex = startIndex + oldLineCount - 1;
|
let oldEndIndex = startIndex + oldLineCount - 1;
|
||||||
for (let i = 0; i < newLineCount; i++) newEndIndex += change.text[i].length;
|
for (let i = 0; i < newLineCount; i++) newEndIndex += change.text[i].length;
|
||||||
for (let i = 0; i < oldLineCount; i++) oldEndIndex += change.removed[i].length;
|
for (let i = 0; i < oldLineCount; i++)
|
||||||
|
oldEndIndex += change.removed[i].length;
|
||||||
|
|
||||||
return {
|
return {
|
||||||
startIndex, oldEndIndex, newEndIndex,
|
startIndex,
|
||||||
startPosition, oldEndPosition, newEndPosition
|
oldEndIndex,
|
||||||
|
newEndIndex,
|
||||||
|
startPosition,
|
||||||
|
oldEndPosition,
|
||||||
|
newEndPosition,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function colorForCaptureName(capture) {
|
function colorForCaptureName(capture) {
|
||||||
const id = query.captureNames.indexOf(capture);
|
const id = query.captureNames.indexOf(capture);
|
||||||
return COLORS_BY_INDEX[id % COLORS_BY_INDEX.length];
|
const isDark = document.querySelector('html').classList.contains('ayu') ||
|
||||||
|
document.querySelector('html').classList.contains('coal') ||
|
||||||
|
document.querySelector('html').classList.contains('navy');
|
||||||
|
|
||||||
|
const colors = isDark ? DARK_COLORS : LIGHT_COLORS;
|
||||||
|
return colors[id % colors.length];
|
||||||
}
|
}
|
||||||
|
|
||||||
function loadState() {
|
function loadState() {
|
||||||
const language = localStorage.getItem("language");
|
const language = localStorage.getItem("language");
|
||||||
const sourceCode = localStorage.getItem("sourceCode");
|
const sourceCode = localStorage.getItem("sourceCode");
|
||||||
|
const anonNodes = localStorage.getItem("anonymousNodes");
|
||||||
const query = localStorage.getItem("query");
|
const query = localStorage.getItem("query");
|
||||||
const queryEnabled = localStorage.getItem("queryEnabled");
|
const queryEnabled = localStorage.getItem("queryEnabled");
|
||||||
if (language != null && sourceCode != null && query != null) {
|
if (language != null && sourceCode != null && query != null) {
|
||||||
queryInput.value = query;
|
queryInput.value = query;
|
||||||
codeInput.value = sourceCode;
|
codeInput.value = sourceCode;
|
||||||
languageSelect.value = language;
|
languageSelect.value = language;
|
||||||
queryCheckbox.checked = (queryEnabled === 'true');
|
initializeCustomSelect({ initialValue: language });
|
||||||
|
anonymousNodes.checked = anonNodes === "true";
|
||||||
|
queryCheckbox.checked = queryEnabled === "true";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function saveState() {
|
function saveState() {
|
||||||
localStorage.setItem("language", languageSelect.value);
|
localStorage.setItem("language", languageSelect.value);
|
||||||
localStorage.setItem("sourceCode", codeEditor.getValue());
|
localStorage.setItem("sourceCode", codeEditor.getValue());
|
||||||
|
localStorage.setItem("anonymousNodes", anonymousNodes.checked);
|
||||||
saveQueryState();
|
saveQueryState();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -443,17 +589,18 @@ let tree;
|
||||||
}
|
}
|
||||||
|
|
||||||
function debounce(func, wait, immediate) {
|
function debounce(func, wait, immediate) {
|
||||||
let timeout;
|
var timeout;
|
||||||
return function() {
|
return function () {
|
||||||
const context = this, args = arguments;
|
var context = this,
|
||||||
const later = function() {
|
args = arguments;
|
||||||
|
var later = function () {
|
||||||
timeout = null;
|
timeout = null;
|
||||||
if (!immediate) func.apply(context, args);
|
if (!immediate) func.apply(context, args);
|
||||||
};
|
};
|
||||||
const callNow = immediate && !timeout;
|
var callNow = immediate && !timeout;
|
||||||
clearTimeout(timeout);
|
clearTimeout(timeout);
|
||||||
timeout = setTimeout(later, wait);
|
timeout = setTimeout(later, wait);
|
||||||
if (callNow) func.apply(context, args);
|
if (callNow) func.apply(context, args);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
})();
|
};
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,9 @@ license.workspace = true
|
||||||
keywords = ["incremental", "parsing", "syntax", "highlighting"]
|
keywords = ["incremental", "parsing", "syntax", "highlighting"]
|
||||||
categories = ["parsing", "text-editors"]
|
categories = ["parsing", "text-editors"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
crate-type = ["lib", "staticlib"]
|
crate-type = ["lib", "staticlib"]
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,60 +1,74 @@
|
||||||
cmake_minimum_required(VERSION 3.13)
|
cmake_minimum_required(VERSION 3.13)
|
||||||
|
|
||||||
project(tree-sitter
|
project(tree-sitter
|
||||||
VERSION "0.24.1"
|
VERSION "0.24.7"
|
||||||
DESCRIPTION "An incremental parsing system for programming tools"
|
DESCRIPTION "An incremental parsing system for programming tools"
|
||||||
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/"
|
||||||
LANGUAGES C)
|
LANGUAGES C)
|
||||||
|
|
||||||
option(BUILD_SHARED_LIBS "Build using shared libraries" ON)
|
option(BUILD_SHARED_LIBS "Build using shared libraries" ON)
|
||||||
option(TREE_SITTER_FEATURE_WASM "Enable the Wasm feature" OFF)
|
option(TREE_SITTER_FEATURE_WASM "Enable the Wasm feature" OFF)
|
||||||
|
option(AMALGAMATED "Build using an amalgamated source" OFF)
|
||||||
|
|
||||||
file(GLOB TS_SOURCE_FILES src/*.c)
|
if(AMALGAMATED)
|
||||||
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
set(TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
||||||
|
else()
|
||||||
|
file(GLOB TS_SOURCE_FILES src/*.c)
|
||||||
|
list(REMOVE_ITEM TS_SOURCE_FILES "${PROJECT_SOURCE_DIR}/src/lib.c")
|
||||||
|
endif()
|
||||||
|
|
||||||
add_library(tree-sitter ${TS_SOURCE_FILES})
|
add_library(tree-sitter ${TS_SOURCE_FILES})
|
||||||
|
|
||||||
target_include_directories(tree-sitter PRIVATE src src/wasm include)
|
target_include_directories(tree-sitter PRIVATE src src/wasm include)
|
||||||
|
|
||||||
if(NOT MSVC)
|
if(MSVC)
|
||||||
target_compile_options(tree-sitter PRIVATE -Wall -Wextra -Wshadow -Wno-unused-parameter -pedantic)
|
target_compile_options(tree-sitter PRIVATE
|
||||||
endif()
|
/wd4018 # disable 'signed/unsigned mismatch'
|
||||||
|
/wd4232 # disable 'nonstandard extension used'
|
||||||
if(NOT BUILD_SHARED_LIBS)
|
/wd4244 # disable 'possible loss of data'
|
||||||
if(WIN32)
|
/wd4267 # disable 'possible loss of data (size_t)'
|
||||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .lib .a)
|
/wd4701 # disable 'potentially uninitialized local variable'
|
||||||
else()
|
/we4022 # treat 'incompatible types' as an error
|
||||||
set(CMAKE_FIND_LIBRARY_SUFFIXES .a)
|
/W4)
|
||||||
endif()
|
else()
|
||||||
|
target_compile_options(tree-sitter PRIVATE
|
||||||
|
-Wall -Wextra -Wshadow -Wpedantic
|
||||||
|
-Werror=incompatible-pointer-types)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(TREE_SITTER_FEATURE_WASM)
|
if(TREE_SITTER_FEATURE_WASM)
|
||||||
if(NOT DEFINED CACHE{WASMTIME_INCLUDE_DIR})
|
if(NOT DEFINED CACHE{WASMTIME_INCLUDE_DIR})
|
||||||
message(CHECK_START "Looking for wasmtime headers")
|
message(CHECK_START "Looking for wasmtime headers")
|
||||||
find_path(WASMTIME_INCLUDE_DIR wasmtime.h
|
find_path(WASMTIME_INCLUDE_DIR wasmtime.h
|
||||||
PATHS ENV DEP_WASMTIME_C_API_INCLUDE
|
PATHS ENV DEP_WASMTIME_C_API_INCLUDE)
|
||||||
REQUIRED)
|
if(NOT WASMTIME_INCLUDE_DIR)
|
||||||
|
unset(WASMTIME_INCLUDE_DIR CACHE)
|
||||||
|
message(FATAL_ERROR "Could not find wasmtime headers.\nDid you forget to set CMAKE_INCLUDE_PATH?")
|
||||||
|
endif()
|
||||||
message(CHECK_PASS "found")
|
message(CHECK_PASS "found")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(NOT DEFINED CACHE{WASMTIME_LIBRARY})
|
if(NOT DEFINED CACHE{WASMTIME_LIBRARY})
|
||||||
message(CHECK_START "Looking for wasmtime library")
|
message(CHECK_START "Looking for wasmtime library")
|
||||||
find_library(WASMTIME_LIBRARY wasmtime
|
find_library(WASMTIME_LIBRARY wasmtime)
|
||||||
REQUIRED)
|
if(NOT WASMTIME_LIBRARY)
|
||||||
|
unset(WASMTIME_LIBRARY CACHE)
|
||||||
|
message(FATAL_ERROR "Could not find wasmtime library.\nDid you forget to set CMAKE_LIBRARY_PATH?")
|
||||||
|
endif()
|
||||||
message(CHECK_PASS "found")
|
message(CHECK_PASS "found")
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
target_compile_definitions(tree-sitter PUBLIC TREE_SITTER_FEATURE_WASM)
|
target_compile_definitions(tree-sitter PUBLIC TREE_SITTER_FEATURE_WASM)
|
||||||
target_include_directories(tree-sitter SYSTEM PRIVATE "${WASMTIME_INCLUDE_DIR}")
|
target_include_directories(tree-sitter SYSTEM PRIVATE "${WASMTIME_INCLUDE_DIR}")
|
||||||
target_link_libraries(tree-sitter PRIVATE "${WASMTIME_LIBRARY}")
|
target_link_libraries(tree-sitter PUBLIC "${WASMTIME_LIBRARY}")
|
||||||
set_property(TARGET tree-sitter PROPERTY C_STANDARD_REQUIRED ON)
|
set_property(TARGET tree-sitter PROPERTY C_STANDARD_REQUIRED ON)
|
||||||
|
|
||||||
if(NOT BUILD_SHARED_LIBS)
|
if(NOT BUILD_SHARED_LIBS)
|
||||||
if(WIN32)
|
if(WIN32)
|
||||||
target_compile_definitions(tree-sitter PRIVATE WASM_API_EXTERN= WASI_API_EXTERN=)
|
target_compile_definitions(tree-sitter PRIVATE WASM_API_EXTERN= WASI_API_EXTERN=)
|
||||||
target_link_libraries(tree-sitter PRIVATE ws2_32 advapi32 userenv ntdll shell32 ole32 bcrypt)
|
target_link_libraries(tree-sitter INTERFACE ws2_32 advapi32 userenv ntdll shell32 ole32 bcrypt)
|
||||||
elseif(NOT APPLE)
|
elseif(NOT APPLE)
|
||||||
target_link_libraries(tree-sitter PRIVATE pthread dl m)
|
target_link_libraries(tree-sitter INTERFACE pthread dl m)
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
endif()
|
endif()
|
||||||
|
|
@ -64,7 +78,10 @@ set_target_properties(tree-sitter
|
||||||
C_STANDARD 11
|
C_STANDARD 11
|
||||||
C_VISIBILITY_PRESET hidden
|
C_VISIBILITY_PRESET hidden
|
||||||
POSITION_INDEPENDENT_CODE ON
|
POSITION_INDEPENDENT_CODE ON
|
||||||
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}")
|
SOVERSION "${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}"
|
||||||
|
DEFINE_SYMBOL "")
|
||||||
|
|
||||||
|
target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE)
|
||||||
|
|
||||||
configure_file(tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY)
|
configure_file(tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,9 @@ include = [
|
||||||
"/include/tree_sitter/api.h",
|
"/include/tree_sitter/api.h",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["std"]
|
default = ["std"]
|
||||||
std = ["regex/std", "regex/perf", "regex-syntax/unicode"]
|
std = ["regex/std", "regex/perf", "regex-syntax/unicode"]
|
||||||
|
|
@ -37,7 +40,7 @@ tree-sitter-language = { version = "0.1", path = "language" }
|
||||||
streaming-iterator = "0.1.9"
|
streaming-iterator = "0.1.9"
|
||||||
|
|
||||||
[dependencies.wasmtime-c-api]
|
[dependencies.wasmtime-c-api]
|
||||||
version = "25.0.1"
|
version = "25.0.2"
|
||||||
optional = true
|
optional = true
|
||||||
package = "wasmtime-c-api-impl"
|
package = "wasmtime-c-api-impl"
|
||||||
default-features = false
|
default-features = false
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,6 @@ extern crate alloc;
|
||||||
#[cfg(not(feature = "std"))]
|
#[cfg(not(feature = "std"))]
|
||||||
use alloc::{boxed::Box, format, string::String, string::ToString, vec::Vec};
|
use alloc::{boxed::Box, format, string::String, string::ToString, vec::Vec};
|
||||||
use core::{
|
use core::{
|
||||||
char,
|
|
||||||
ffi::{c_char, c_void, CStr},
|
ffi::{c_char, c_void, CStr},
|
||||||
fmt::{self, Write},
|
fmt::{self, Write},
|
||||||
hash, iter,
|
hash, iter,
|
||||||
|
|
@ -489,9 +488,9 @@ impl Parser {
|
||||||
/// Get the parser's current language.
|
/// Get the parser's current language.
|
||||||
#[doc(alias = "ts_parser_language")]
|
#[doc(alias = "ts_parser_language")]
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn language(&self) -> Option<Language> {
|
pub fn language(&self) -> Option<LanguageRef<'_>> {
|
||||||
let ptr = unsafe { ffi::ts_parser_language(self.0.as_ptr()) };
|
let ptr = unsafe { ffi::ts_parser_language(self.0.as_ptr()) };
|
||||||
(!ptr.is_null()).then(|| Language(ptr))
|
(!ptr.is_null()).then_some(LanguageRef(ptr, PhantomData))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the parser's current logger.
|
/// Get the parser's current logger.
|
||||||
|
|
@ -1854,9 +1853,28 @@ impl Query {
|
||||||
// Error types that report names
|
// Error types that report names
|
||||||
ffi::TSQueryErrorNodeType | ffi::TSQueryErrorField | ffi::TSQueryErrorCapture => {
|
ffi::TSQueryErrorNodeType | ffi::TSQueryErrorField | ffi::TSQueryErrorCapture => {
|
||||||
let suffix = source.split_at(offset).1;
|
let suffix = source.split_at(offset).1;
|
||||||
let end_offset = suffix
|
let in_quotes = source.as_bytes()[offset - 1] == b'"';
|
||||||
.find(|c| !char::is_alphanumeric(c) && c != '_' && c != '-')
|
let mut end_offset = suffix.len();
|
||||||
.unwrap_or(suffix.len());
|
if let Some(pos) = suffix
|
||||||
|
.char_indices()
|
||||||
|
.take_while(|(_, c)| *c != '\n')
|
||||||
|
.find_map(|(i, c)| match c {
|
||||||
|
'"' if in_quotes
|
||||||
|
&& i > 0
|
||||||
|
&& suffix.chars().nth(i - 1) != Some('\\') =>
|
||||||
|
{
|
||||||
|
Some(i)
|
||||||
|
}
|
||||||
|
c if !in_quotes
|
||||||
|
&& (c.is_whitespace() || c == '(' || c == ')' || c == ':') =>
|
||||||
|
{
|
||||||
|
Some(i)
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
})
|
||||||
|
{
|
||||||
|
end_offset = pos;
|
||||||
|
}
|
||||||
message = suffix.split_at(end_offset).0.to_string();
|
message = suffix.split_at(end_offset).0.to_string();
|
||||||
kind = match error_type {
|
kind = match error_type {
|
||||||
ffi::TSQueryErrorNodeType => QueryErrorKind::NodeType,
|
ffi::TSQueryErrorNodeType => QueryErrorKind::NodeType,
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "web-tree-sitter",
|
"name": "web-tree-sitter",
|
||||||
"version": "0.24.1",
|
"version": "0.24.7",
|
||||||
"description": "Tree-sitter bindings for the web",
|
"description": "Tree-sitter bindings for the web",
|
||||||
"main": "tree-sitter.js",
|
"main": "tree-sitter.js",
|
||||||
"types": "tree-sitter-web.d.ts",
|
"types": "tree-sitter-web.d.ts",
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,19 @@
|
||||||
[package]
|
[package]
|
||||||
name = "tree-sitter-language"
|
name = "tree-sitter-language"
|
||||||
description = "The tree-sitter Language type, used by the library and by language implementations"
|
description = "The tree-sitter Language type, used by the library and by language implementations"
|
||||||
version = "0.1.2"
|
version = "0.1.3"
|
||||||
authors.workspace = true
|
authors.workspace = true
|
||||||
edition.workspace = true
|
edition.workspace = true
|
||||||
rust-version.workspace = true
|
rust-version.workspace = true
|
||||||
|
readme = "README.md"
|
||||||
homepage.workspace = true
|
homepage.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
|
categories = ["api-bindings", "development-tools::ffi", "parsing"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
path = "language.rs"
|
path = "language.rs"
|
||||||
|
|
|
||||||
4
lib/language/README.md
Normal file
4
lib/language/README.md
Normal file
|
|
@ -0,0 +1,4 @@
|
||||||
|
# Tree-sitter Language
|
||||||
|
|
||||||
|
This crate provides a `LanguageFn` type for grammars to create `Language` instances from a parser,
|
||||||
|
without having to worry about the `tree-sitter` crate version not matching.
|
||||||
|
|
@ -14,6 +14,7 @@ extern "C" {
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#ifdef _MSC_VER
|
#ifdef _MSC_VER
|
||||||
|
#pragma warning(push)
|
||||||
#pragma warning(disable : 4101)
|
#pragma warning(disable : 4101)
|
||||||
#elif defined(__GNUC__) || defined(__clang__)
|
#elif defined(__GNUC__) || defined(__clang__)
|
||||||
#pragma GCC diagnostic push
|
#pragma GCC diagnostic push
|
||||||
|
|
@ -278,7 +279,7 @@ static inline void _array__splice(Array *self, size_t element_size,
|
||||||
#define _compare_int(a, b) ((int)*(a) - (int)(b))
|
#define _compare_int(a, b) ((int)*(a) - (int)(b))
|
||||||
|
|
||||||
#ifdef _MSC_VER
|
#ifdef _MSC_VER
|
||||||
#pragma warning(default : 4101)
|
#pragma warning(pop)
|
||||||
#elif defined(__GNUC__) || defined(__clang__)
|
#elif defined(__GNUC__) || defined(__clang__)
|
||||||
#pragma GCC diagnostic pop
|
#pragma GCC diagnostic pop
|
||||||
#endif
|
#endif
|
||||||
|
|
|
||||||
|
|
@ -49,9 +49,9 @@ static inline bool clock_is_gt(TSClock self, TSClock other) {
|
||||||
return self > other;
|
return self > other;
|
||||||
}
|
}
|
||||||
|
|
||||||
#elif defined(CLOCK_MONOTONIC) && !defined(__APPLE__)
|
#elif defined(CLOCK_MONOTONIC)
|
||||||
|
|
||||||
// POSIX with monotonic clock support (Linux)
|
// POSIX with monotonic clock support (Linux, macOS)
|
||||||
// * Represent a time as a monotonic (seconds, nanoseconds) pair.
|
// * Represent a time as a monotonic (seconds, nanoseconds) pair.
|
||||||
// * Represent a duration as a number of microseconds.
|
// * Represent a duration as a number of microseconds.
|
||||||
//
|
//
|
||||||
|
|
@ -102,7 +102,7 @@ static inline bool clock_is_gt(TSClock self, TSClock other) {
|
||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
// macOS or POSIX without monotonic clock support
|
// POSIX without monotonic clock support
|
||||||
// * Represent a time as a process clock value.
|
// * Represent a time as a process clock value.
|
||||||
// * Represent a duration as a number of process clock ticks.
|
// * Represent a duration as a number of process clock ticks.
|
||||||
//
|
//
|
||||||
|
|
|
||||||
|
|
@ -252,12 +252,12 @@ static uint32_t ts_lexer__get_column(TSLexer *_self) {
|
||||||
uint32_t goal_byte = self->current_position.bytes;
|
uint32_t goal_byte = self->current_position.bytes;
|
||||||
|
|
||||||
self->did_get_column = true;
|
self->did_get_column = true;
|
||||||
self->current_position.bytes -= self->current_position.extent.column;
|
Length start_of_col = {
|
||||||
self->current_position.extent.column = 0;
|
self->current_position.bytes - self->current_position.extent.column,
|
||||||
|
{self->current_position.extent.row, 0},
|
||||||
if (self->current_position.bytes < self->chunk_start) {
|
};
|
||||||
ts_lexer__get_chunk(self);
|
ts_lexer_goto(self, start_of_col);
|
||||||
}
|
ts_lexer__get_chunk(self);
|
||||||
|
|
||||||
uint32_t result = 0;
|
uint32_t result = 0;
|
||||||
if (!ts_lexer__eof(_self)) {
|
if (!ts_lexer__eof(_self)) {
|
||||||
|
|
|
||||||
|
|
@ -103,21 +103,6 @@ static inline bool ts_node_child_iterator_next(
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// This will return true if the next sibling is a zero-width token that is adjacent to the current node and is relevant
|
|
||||||
static inline bool ts_node_child_iterator_next_sibling_is_empty_adjacent(NodeChildIterator *self, TSNode previous) {
|
|
||||||
if (!self->parent.ptr || ts_node_child_iterator_done(self)) return false;
|
|
||||||
if (self->child_index == 0) return false;
|
|
||||||
const Subtree *child = &ts_subtree_children(self->parent)[self->child_index];
|
|
||||||
TSSymbol alias = 0;
|
|
||||||
if (!ts_subtree_extra(*child)) {
|
|
||||||
if (self->alias_sequence) {
|
|
||||||
alias = self->alias_sequence[self->structural_child_index];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
TSNode next = ts_node_new(self->tree, child, self->position, alias);
|
|
||||||
return ts_node_end_byte(previous) == ts_node_end_byte(next) && ts_node__is_relevant(next, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TSNode - private
|
// TSNode - private
|
||||||
|
|
||||||
static inline bool ts_node__is_relevant(TSNode self, bool include_anonymous) {
|
static inline bool ts_node__is_relevant(TSNode self, bool include_anonymous) {
|
||||||
|
|
@ -277,8 +262,16 @@ static inline TSNode ts_node__next_sibling(TSNode self, bool include_anonymous)
|
||||||
TSNode child;
|
TSNode child;
|
||||||
NodeChildIterator iterator = ts_node_iterate_children(&node);
|
NodeChildIterator iterator = ts_node_iterate_children(&node);
|
||||||
while (ts_node_child_iterator_next(&iterator, &child)) {
|
while (ts_node_child_iterator_next(&iterator, &child)) {
|
||||||
if (iterator.position.bytes < target_end_byte) continue;
|
if (iterator.position.bytes <= target_end_byte) continue;
|
||||||
if (ts_node_start_byte(child) <= ts_node_start_byte(self)) {
|
uint32_t start_byte = ts_node_start_byte(self);
|
||||||
|
uint32_t child_start_byte = ts_node_start_byte(child);
|
||||||
|
|
||||||
|
bool is_empty = start_byte == target_end_byte;
|
||||||
|
bool contains_target = is_empty ?
|
||||||
|
child_start_byte < start_byte :
|
||||||
|
child_start_byte <= start_byte;
|
||||||
|
|
||||||
|
if (contains_target) {
|
||||||
if (ts_node__subtree(child).ptr != ts_node__subtree(self).ptr) {
|
if (ts_node__subtree(child).ptr != ts_node__subtree(self).ptr) {
|
||||||
child_containing_target = child;
|
child_containing_target = child;
|
||||||
}
|
}
|
||||||
|
|
@ -549,9 +542,9 @@ TSNode ts_node_parent(TSNode self) {
|
||||||
if (node.id == self.id) return ts_node__null();
|
if (node.id == self.id) return ts_node__null();
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
TSNode next_node = ts_node_child_containing_descendant(node, self);
|
TSNode next_node = ts_node_child_with_descendant(node, self);
|
||||||
if (ts_node_is_null(next_node)) break;
|
if (next_node.id == self.id || ts_node_is_null(next_node)) break;
|
||||||
node = next_node;
|
node = next_node;
|
||||||
}
|
}
|
||||||
|
|
||||||
return node;
|
return node;
|
||||||
|
|
@ -560,6 +553,7 @@ TSNode ts_node_parent(TSNode self) {
|
||||||
TSNode ts_node_child_containing_descendant(TSNode self, TSNode descendant) {
|
TSNode ts_node_child_containing_descendant(TSNode self, TSNode descendant) {
|
||||||
uint32_t start_byte = ts_node_start_byte(descendant);
|
uint32_t start_byte = ts_node_start_byte(descendant);
|
||||||
uint32_t end_byte = ts_node_end_byte(descendant);
|
uint32_t end_byte = ts_node_end_byte(descendant);
|
||||||
|
bool is_empty = start_byte == end_byte;
|
||||||
|
|
||||||
do {
|
do {
|
||||||
NodeChildIterator iter = ts_node_iterate_children(&self);
|
NodeChildIterator iter = ts_node_iterate_children(&self);
|
||||||
|
|
@ -572,24 +566,16 @@ TSNode ts_node_child_containing_descendant(TSNode self, TSNode descendant) {
|
||||||
return ts_node__null();
|
return ts_node__null();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Here we check the current self node and *all* of its zero-width token siblings that follow.
|
// If the descendant is empty, and the end byte is within `self`,
|
||||||
// If any of these nodes contain the target subnode, we return that node. Otherwise, we restore the node we started at
|
// we check whether `self` contains it or not.
|
||||||
// for the loop condition, and that will continue with the next *non-zero-width* sibling.
|
if (is_empty && iter.position.bytes >= end_byte && ts_node_child_count(self) > 0) {
|
||||||
TSNode old = self;
|
TSNode child = ts_node_child_with_descendant(self, descendant);
|
||||||
// While the next sibling is a zero-width token
|
// If the child is not null, return self if it's relevant, else return the child
|
||||||
while (ts_node_child_iterator_next_sibling_is_empty_adjacent(&iter, self)) {
|
if (!ts_node_is_null(child)) {
|
||||||
TSNode current_node = ts_node_child_containing_descendant(self, descendant);
|
return ts_node__is_relevant(self, true) ? self : child;
|
||||||
// If the target child is in self, return it
|
|
||||||
if (!ts_node_is_null(current_node)) {
|
|
||||||
return current_node;
|
|
||||||
}
|
|
||||||
ts_node_child_iterator_next(&iter, &self);
|
|
||||||
if (self.id == descendant.id) {
|
|
||||||
return ts_node__null();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self = old;
|
} while ((is_empty ? iter.position.bytes <= end_byte : iter.position.bytes < end_byte) || ts_node_child_count(self) == 0);
|
||||||
} while (iter.position.bytes < end_byte || ts_node_child_count(self) == 0);
|
|
||||||
} while (!ts_node__is_relevant(self, true));
|
} while (!ts_node__is_relevant(self, true));
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
|
|
@ -598,6 +584,7 @@ TSNode ts_node_child_containing_descendant(TSNode self, TSNode descendant) {
|
||||||
TSNode ts_node_child_with_descendant(TSNode self, TSNode descendant) {
|
TSNode ts_node_child_with_descendant(TSNode self, TSNode descendant) {
|
||||||
uint32_t start_byte = ts_node_start_byte(descendant);
|
uint32_t start_byte = ts_node_start_byte(descendant);
|
||||||
uint32_t end_byte = ts_node_end_byte(descendant);
|
uint32_t end_byte = ts_node_end_byte(descendant);
|
||||||
|
bool is_empty = start_byte == end_byte;
|
||||||
|
|
||||||
do {
|
do {
|
||||||
NodeChildIterator iter = ts_node_iterate_children(&self);
|
NodeChildIterator iter = ts_node_iterate_children(&self);
|
||||||
|
|
@ -612,24 +599,16 @@ TSNode ts_node_child_with_descendant(TSNode self, TSNode descendant) {
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Here we check the current self node and *all* of its zero-width token siblings that follow.
|
// If the descendant is empty, and the end byte is within `self`,
|
||||||
// If any of these nodes contain the target subnode, we return that node. Otherwise, we restore the node we started at
|
// we check whether `self` contains it or not.
|
||||||
// for the loop condition, and that will continue with the next *non-zero-width* sibling.
|
if (is_empty && iter.position.bytes >= end_byte && ts_node_child_count(self) > 0) {
|
||||||
TSNode old = self;
|
TSNode child = ts_node_child_with_descendant(self, descendant);
|
||||||
// While the next sibling is a zero-width token
|
// If the child is not null, return self if it's relevant, else return the child
|
||||||
while (ts_node_child_iterator_next_sibling_is_empty_adjacent(&iter, self)) {
|
if (!ts_node_is_null(child)) {
|
||||||
TSNode current_node = ts_node_child_with_descendant(self, descendant);
|
return ts_node__is_relevant(self, true) ? self : child;
|
||||||
// If the target child is in self, return it
|
|
||||||
if (!ts_node_is_null(current_node)) {
|
|
||||||
return current_node;
|
|
||||||
}
|
|
||||||
ts_node_child_iterator_next(&iter, &self);
|
|
||||||
if (self.id == descendant.id) {
|
|
||||||
return self;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self = old;
|
} while ((is_empty ? iter.position.bytes <= end_byte : iter.position.bytes < end_byte) || ts_node_child_count(self) == 0);
|
||||||
} while (iter.position.bytes < end_byte || ts_node_child_count(self) == 0);
|
|
||||||
} while (!ts_node__is_relevant(self, true));
|
} while (!ts_node__is_relevant(self, true));
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
|
|
|
||||||
|
|
@ -350,7 +350,7 @@ static bool ts_parser__call_main_lex_fn(TSParser *self, TSLexMode lex_mode) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static bool ts_parser__call_keyword_lex_fn(TSParser *self, TSLexMode lex_mode) {
|
static bool ts_parser__call_keyword_lex_fn(TSParser *self) {
|
||||||
if (ts_language_is_wasm(self->language)) {
|
if (ts_language_is_wasm(self->language)) {
|
||||||
return ts_wasm_store_call_lex_keyword(self->wasm_store, 0);
|
return ts_wasm_store_call_lex_keyword(self->wasm_store, 0);
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -553,27 +553,29 @@ static Subtree ts_parser__lex(
|
||||||
external_scanner_state_len
|
external_scanner_state_len
|
||||||
);
|
);
|
||||||
|
|
||||||
// When recovering from an error, ignore any zero-length external tokens
|
// Avoid infinite loops caused by the external scanner returning empty tokens.
|
||||||
// unless they have changed the external scanner's state. This helps to
|
// Empty tokens are needed in some circumstances, e.g. indent/dedent tokens
|
||||||
// avoid infinite loops which could otherwise occur, because the lexer is
|
// in Python. Ignore the following classes of empty tokens:
|
||||||
// looking for any possible token, instead of looking for the specific set of
|
|
||||||
// tokens that are valid in some parse state.
|
|
||||||
//
|
//
|
||||||
// Note that it's possible that the token end position may be *before* the
|
// * Tokens produced during error recovery. When recovering from an error,
|
||||||
// original position of the lexer because of the way that tokens are positioned
|
// all tokens are allowed, so it's easy to accidentally return unwanted
|
||||||
// at included range boundaries: when a token is terminated at the start of
|
// empty tokens.
|
||||||
// an included range, it is marked as ending at the *end* of the preceding
|
// * Tokens that are marked as 'extra' in the grammar. These don't change
|
||||||
// included range.
|
// the parse state, so they would definitely cause an infinite loop.
|
||||||
if (
|
if (
|
||||||
self->lexer.token_end_position.bytes <= current_position.bytes &&
|
self->lexer.token_end_position.bytes <= current_position.bytes &&
|
||||||
(error_mode || !ts_stack_has_advanced_since_error(self->stack, version)) &&
|
|
||||||
!external_scanner_state_changed
|
!external_scanner_state_changed
|
||||||
) {
|
) {
|
||||||
LOG(
|
TSSymbol symbol = self->language->external_scanner.symbol_map[self->lexer.data.result_symbol];
|
||||||
"ignore_empty_external_token symbol:%s",
|
TSStateId next_parse_state = ts_language_next_state(self->language, parse_state, symbol);
|
||||||
SYM_NAME(self->language->external_scanner.symbol_map[self->lexer.data.result_symbol])
|
bool token_is_extra = (next_parse_state == parse_state);
|
||||||
)
|
if (error_mode || !ts_stack_has_advanced_since_error(self->stack, version) || token_is_extra) {
|
||||||
found_token = false;
|
LOG(
|
||||||
|
"ignore_empty_external_token symbol:%s",
|
||||||
|
SYM_NAME(self->language->external_scanner.symbol_map[self->lexer.data.result_symbol])
|
||||||
|
);
|
||||||
|
found_token = false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -651,7 +653,7 @@ static Subtree ts_parser__lex(
|
||||||
ts_lexer_reset(&self->lexer, self->lexer.token_start_position);
|
ts_lexer_reset(&self->lexer, self->lexer.token_start_position);
|
||||||
ts_lexer_start(&self->lexer);
|
ts_lexer_start(&self->lexer);
|
||||||
|
|
||||||
is_keyword = ts_parser__call_keyword_lex_fn(self, lex_mode);
|
is_keyword = ts_parser__call_keyword_lex_fn(self);
|
||||||
|
|
||||||
if (
|
if (
|
||||||
is_keyword &&
|
is_keyword &&
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
#include "tree_sitter/api.h"
|
#include "tree_sitter/api.h"
|
||||||
#include "./alloc.h"
|
|
||||||
#include "./tree_cursor.h"
|
#include "./tree_cursor.h"
|
||||||
#include "./language.h"
|
#include "./language.h"
|
||||||
#include "./tree.h"
|
#include "./tree.h"
|
||||||
|
|
@ -212,7 +211,6 @@ bool ts_tree_cursor_goto_first_child(TSTreeCursor *self) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TreeCursorStep ts_tree_cursor_goto_last_child_internal(TSTreeCursor *_self) {
|
TreeCursorStep ts_tree_cursor_goto_last_child_internal(TSTreeCursor *_self) {
|
||||||
|
|
@ -253,7 +251,6 @@ bool ts_tree_cursor_goto_last_child(TSTreeCursor *self) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline int64_t ts_tree_cursor_goto_first_child_for_byte_and_point(
|
static inline int64_t ts_tree_cursor_goto_first_child_for_byte_and_point(
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,14 @@
|
||||||
#include <wasm.h>
|
#include <wasm.h>
|
||||||
#include <wasmtime.h>
|
#include <wasmtime.h>
|
||||||
|
|
||||||
|
#ifdef _MSC_VER
|
||||||
|
#pragma warning(push)
|
||||||
|
#pragma warning(disable : 4100)
|
||||||
|
#elif defined(__GNUC__) || defined(__clang__)
|
||||||
|
#pragma GCC diagnostic push
|
||||||
|
#pragma GCC diagnostic ignored "-Wunused-parameter"
|
||||||
|
#endif
|
||||||
|
|
||||||
#define array_len(a) (sizeof(a) / sizeof(a[0]))
|
#define array_len(a) (sizeof(a) / sizeof(a[0]))
|
||||||
|
|
||||||
// The following symbols from the C and C++ standard libraries are available
|
// The following symbols from the C and C++ standard libraries are available
|
||||||
|
|
@ -159,8 +167,6 @@ typedef struct {
|
||||||
int32_t eof;
|
int32_t eof;
|
||||||
} LexerInWasmMemory;
|
} LexerInWasmMemory;
|
||||||
|
|
||||||
static volatile uint32_t NEXT_LANGUAGE_ID;
|
|
||||||
|
|
||||||
// Linear memory layout:
|
// Linear memory layout:
|
||||||
// [ <-- stack | stdlib statics | lexer | language statics --> | serialization_buffer | heap --> ]
|
// [ <-- stack | stdlib statics | lexer | language statics --> | serialization_buffer | heap --> ]
|
||||||
#define MAX_MEMORY_SIZE (128 * 1024 * 1024 / MEMORY_PAGE_SIZE)
|
#define MAX_MEMORY_SIZE (128 * 1024 * 1024 / MEMORY_PAGE_SIZE)
|
||||||
|
|
@ -169,7 +175,7 @@ static volatile uint32_t NEXT_LANGUAGE_ID;
|
||||||
* WasmDylinkMemoryInfo
|
* WasmDylinkMemoryInfo
|
||||||
***********************/
|
***********************/
|
||||||
|
|
||||||
static uint8_t read_u8(const uint8_t **p, const uint8_t *end) {
|
static uint8_t read_u8(const uint8_t **p) {
|
||||||
return *(*p)++;
|
return *(*p)++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -204,7 +210,7 @@ static bool wasm_dylink_info__parse(
|
||||||
p += 4;
|
p += 4;
|
||||||
|
|
||||||
while (p < end) {
|
while (p < end) {
|
||||||
uint8_t section_id = read_u8(&p, end);
|
uint8_t section_id = read_u8(&p);
|
||||||
uint32_t section_length = read_uleb128(&p, end);
|
uint32_t section_length = read_uleb128(&p, end);
|
||||||
const uint8_t *section_end = p + section_length;
|
const uint8_t *section_end = p + section_length;
|
||||||
if (section_end > end) return false;
|
if (section_end > end) return false;
|
||||||
|
|
@ -217,7 +223,7 @@ static bool wasm_dylink_info__parse(
|
||||||
if (name_length == 8 && memcmp(p, "dylink.0", 8) == 0) {
|
if (name_length == 8 && memcmp(p, "dylink.0", 8) == 0) {
|
||||||
p = name_end;
|
p = name_end;
|
||||||
while (p < section_end) {
|
while (p < section_end) {
|
||||||
uint8_t subsection_type = read_u8(&p, section_end);
|
uint8_t subsection_type = read_u8(&p);
|
||||||
uint32_t subsection_size = read_uleb128(&p, section_end);
|
uint32_t subsection_size = read_uleb128(&p, section_end);
|
||||||
const uint8_t *subsection_end = p + subsection_size;
|
const uint8_t *subsection_end = p + subsection_size;
|
||||||
if (subsection_end > section_end) return false;
|
if (subsection_end > section_end) return false;
|
||||||
|
|
@ -545,6 +551,7 @@ TSWasmStore *ts_wasm_store_new(TSWasmEngine *engine, TSWasmError *wasm_error) {
|
||||||
wasm_trap_t *trap = NULL;
|
wasm_trap_t *trap = NULL;
|
||||||
wasm_message_t message = WASM_EMPTY_VEC;
|
wasm_message_t message = WASM_EMPTY_VEC;
|
||||||
wasm_exporttype_vec_t export_types = WASM_EMPTY_VEC;
|
wasm_exporttype_vec_t export_types = WASM_EMPTY_VEC;
|
||||||
|
wasm_importtype_vec_t import_types = WASM_EMPTY_VEC;
|
||||||
wasmtime_extern_t *imports = NULL;
|
wasmtime_extern_t *imports = NULL;
|
||||||
wasmtime_module_t *stdlib_module = NULL;
|
wasmtime_module_t *stdlib_module = NULL;
|
||||||
wasm_memorytype_t *memory_type = NULL;
|
wasm_memorytype_t *memory_type = NULL;
|
||||||
|
|
@ -660,11 +667,10 @@ TSWasmStore *ts_wasm_store_new(TSWasmEngine *engine, TSWasmError *wasm_error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Retrieve the stdlib module's imports.
|
// Retrieve the stdlib module's imports.
|
||||||
wasm_importtype_vec_t import_types = WASM_EMPTY_VEC;
|
|
||||||
wasmtime_module_imports(stdlib_module, &import_types);
|
wasmtime_module_imports(stdlib_module, &import_types);
|
||||||
|
|
||||||
// Find the initial number of memory pages needed by the stdlib.
|
// Find the initial number of memory pages needed by the stdlib.
|
||||||
const wasm_memorytype_t *stdlib_memory_type;
|
const wasm_memorytype_t *stdlib_memory_type = NULL;
|
||||||
for (unsigned i = 0; i < import_types.size; i++) {
|
for (unsigned i = 0; i < import_types.size; i++) {
|
||||||
wasm_importtype_t *import_type = import_types.data[i];
|
wasm_importtype_t *import_type = import_types.data[i];
|
||||||
const wasm_name_t *import_name = wasm_importtype_name(import_type);
|
const wasm_name_t *import_name = wasm_importtype_name(import_type);
|
||||||
|
|
@ -1545,13 +1551,22 @@ static void ts_wasm_store__call(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// The data fields of TSLexer, without the function pointers.
|
||||||
|
//
|
||||||
|
// This portion of the struct needs to be copied in and out
|
||||||
|
// of wasm memory before and after calling a scan function.
|
||||||
|
typedef struct {
|
||||||
|
int32_t lookahead;
|
||||||
|
TSSymbol result_symbol;
|
||||||
|
} TSLexerDataPrefix;
|
||||||
|
|
||||||
static bool ts_wasm_store__call_lex_function(TSWasmStore *self, unsigned function_index, TSStateId state) {
|
static bool ts_wasm_store__call_lex_function(TSWasmStore *self, unsigned function_index, TSStateId state) {
|
||||||
wasmtime_context_t *context = wasmtime_store_context(self->store);
|
wasmtime_context_t *context = wasmtime_store_context(self->store);
|
||||||
uint8_t *memory_data = wasmtime_memory_data(context, &self->memory);
|
uint8_t *memory_data = wasmtime_memory_data(context, &self->memory);
|
||||||
memcpy(
|
memcpy(
|
||||||
&memory_data[self->lexer_address],
|
&memory_data[self->lexer_address],
|
||||||
&self->current_lexer->lookahead,
|
self->current_lexer,
|
||||||
sizeof(self->current_lexer->lookahead)
|
sizeof(TSLexerDataPrefix)
|
||||||
);
|
);
|
||||||
|
|
||||||
wasmtime_val_raw_t args[2] = {
|
wasmtime_val_raw_t args[2] = {
|
||||||
|
|
@ -1563,9 +1578,9 @@ static bool ts_wasm_store__call_lex_function(TSWasmStore *self, unsigned functio
|
||||||
bool result = args[0].i32;
|
bool result = args[0].i32;
|
||||||
|
|
||||||
memcpy(
|
memcpy(
|
||||||
&self->current_lexer->lookahead,
|
self->current_lexer,
|
||||||
&memory_data[self->lexer_address],
|
&memory_data[self->lexer_address],
|
||||||
sizeof(self->current_lexer->lookahead) + sizeof(self->current_lexer->result_symbol)
|
sizeof(TSLexerDataPrefix)
|
||||||
);
|
);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
@ -1610,8 +1625,8 @@ bool ts_wasm_store_call_scanner_scan(
|
||||||
|
|
||||||
memcpy(
|
memcpy(
|
||||||
&memory_data[self->lexer_address],
|
&memory_data[self->lexer_address],
|
||||||
&self->current_lexer->lookahead,
|
self->current_lexer,
|
||||||
sizeof(self->current_lexer->lookahead)
|
sizeof(TSLexerDataPrefix)
|
||||||
);
|
);
|
||||||
|
|
||||||
uint32_t valid_tokens_address =
|
uint32_t valid_tokens_address =
|
||||||
|
|
@ -1626,9 +1641,9 @@ bool ts_wasm_store_call_scanner_scan(
|
||||||
if (self->has_error) return false;
|
if (self->has_error) return false;
|
||||||
|
|
||||||
memcpy(
|
memcpy(
|
||||||
&self->current_lexer->lookahead,
|
self->current_lexer,
|
||||||
&memory_data[self->lexer_address],
|
&memory_data[self->lexer_address],
|
||||||
sizeof(self->current_lexer->lookahead) + sizeof(self->current_lexer->result_symbol)
|
sizeof(TSLexerDataPrefix)
|
||||||
);
|
);
|
||||||
return args[0].i32;
|
return args[0].i32;
|
||||||
}
|
}
|
||||||
|
|
@ -1743,6 +1758,12 @@ void ts_wasm_language_release(const TSLanguage *self) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef _MSC_VER
|
||||||
|
#pragma warning(pop)
|
||||||
|
#elif defined(__GNUC__) || defined(__clang__)
|
||||||
|
#pragma GCC diagnostic pop
|
||||||
|
#endif
|
||||||
|
|
||||||
#else
|
#else
|
||||||
|
|
||||||
// If the WASM feature is not enabled, define dummy versions of all of the
|
// If the WASM feature is not enabled, define dummy versions of all of the
|
||||||
|
|
|
||||||
|
|
@ -1,62 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
function usage {
|
|
||||||
cat <<EOF
|
|
||||||
USAGE
|
|
||||||
|
|
||||||
$0 [-h] [-l language-name] [-e example-file-name] [-r repetition-count]
|
|
||||||
|
|
||||||
OPTIONS
|
|
||||||
|
|
||||||
-h print this message
|
|
||||||
|
|
||||||
-l run only the benchmarks for the given language
|
|
||||||
|
|
||||||
-e run only the benchmarks that parse the example file with the given name
|
|
||||||
|
|
||||||
-r parse each sample the given number of times (default 5)
|
|
||||||
|
|
||||||
-g debug
|
|
||||||
|
|
||||||
EOF
|
|
||||||
}
|
|
||||||
|
|
||||||
mode=normal
|
|
||||||
|
|
||||||
while getopts "hgl:e:r:" option; do
|
|
||||||
case ${option} in
|
|
||||||
h)
|
|
||||||
usage
|
|
||||||
exit
|
|
||||||
;;
|
|
||||||
g)
|
|
||||||
mode=debug
|
|
||||||
;;
|
|
||||||
e)
|
|
||||||
export TREE_SITTER_BENCHMARK_EXAMPLE_FILTER=${OPTARG}
|
|
||||||
;;
|
|
||||||
l)
|
|
||||||
export TREE_SITTER_BENCHMARK_LANGUAGE_FILTER=${OPTARG}
|
|
||||||
;;
|
|
||||||
r)
|
|
||||||
export TREE_SITTER_BENCHMARK_REPETITION_COUNT=${OPTARG}
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
usage
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
if [[ $mode == debug ]]; then
|
|
||||||
test_binary=$(
|
|
||||||
cargo bench benchmark -p tree-sitter-cli --no-run --message-format=json 2> /dev/null |
|
|
||||||
jq -rs 'map(select(.target.name == "benchmark" and .executable))[0].executable'
|
|
||||||
)
|
|
||||||
env | grep TREE_SITTER
|
|
||||||
echo "$test_binary"
|
|
||||||
else
|
|
||||||
exec cargo bench benchmark -p tree-sitter-cli
|
|
||||||
fi
|
|
||||||
|
|
@ -1,4 +0,0 @@
|
||||||
@echo off
|
|
||||||
|
|
||||||
cargo bench benchmark -p tree-sitter-cli
|
|
||||||
exit /b %errorlevel%
|
|
||||||
|
|
@ -1,76 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
if [[ $(uname -s) != Linux ]]; then
|
|
||||||
printf 'Fuzzing is only supported on Linux\n' >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
CC=${CC:-clang}
|
|
||||||
CXX=${CXX:-clang++}
|
|
||||||
|
|
||||||
default_fuzz_flags=-fsanitize=fuzzer,address,undefined
|
|
||||||
|
|
||||||
export CFLAGS="$default_fuzz_flags $CFLAGS"
|
|
||||||
export CXXFLAGS="$default_fuzz_flags $CXXFLAGS"
|
|
||||||
|
|
||||||
make CC="$CC" CXX="$CXX" libtree-sitter.a
|
|
||||||
|
|
||||||
if [[ -z $* ]]; then
|
|
||||||
mapfile -t languages < <(ls test/fixtures/grammars)
|
|
||||||
else
|
|
||||||
languages=("$@")
|
|
||||||
fi
|
|
||||||
|
|
||||||
mkdir -p test/fuzz/out
|
|
||||||
|
|
||||||
for lang in "${languages[@]}"; do
|
|
||||||
# skip typescript & php
|
|
||||||
if [[ $lang == typescript || $lang == php ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
printf 'Building %s fuzzer...\n' "$lang"
|
|
||||||
lang_dir="test/fixtures/grammars/$lang"
|
|
||||||
lang_grammar="${lang_dir}/src/grammar.json"
|
|
||||||
|
|
||||||
# The following assumes each language is implemented as src/parser.c plus an
|
|
||||||
# optional scanner in src/scanner.c
|
|
||||||
objects=()
|
|
||||||
|
|
||||||
lang_scanner="${lang_dir}/src/scanner"
|
|
||||||
if [[ -f "${lang_scanner}.c" ]]; then
|
|
||||||
$CC $CFLAGS -std=c11 -g -O1 -I "${lang_dir}/src" -c "${lang_scanner}.c" -o "${lang_scanner}.o"
|
|
||||||
objects+=("${lang_scanner}.o")
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Compiling with -O0 speeds up the build dramatically
|
|
||||||
$CC $CFLAGS -g -O0 -I "${lang_dir}/src" "${lang_dir}/src/parser.c" -c -o "${lang_dir}/src/parser.o"
|
|
||||||
objects+=("${lang_dir}/src/parser.o")
|
|
||||||
|
|
||||||
highlights_filename="${lang_dir}/queries/highlights.scm"
|
|
||||||
if [[ -f "${highlights_filename}" ]]; then
|
|
||||||
ts_lang_query_filename="${lang}.scm"
|
|
||||||
cp "${highlights_filename}" "test/fuzz/out/${ts_lang_query_filename}"
|
|
||||||
else
|
|
||||||
ts_lang_query_filename=""
|
|
||||||
fi
|
|
||||||
|
|
||||||
ts_lang="tree_sitter_$(jq -r .name "$lang_grammar")"
|
|
||||||
$CXX $CXXFLAGS -std=c++11 -Ilib/include \
|
|
||||||
-D TS_LANG="$ts_lang" \
|
|
||||||
-D TS_LANG_QUERY_FILENAME="\"${ts_lang_query_filename}\"" \
|
|
||||||
test/fuzz/fuzzer.cc \
|
|
||||||
"${objects[@]}" \
|
|
||||||
libtree-sitter.a \
|
|
||||||
-o "test/fuzz/out/${lang}_fuzzer"
|
|
||||||
|
|
||||||
jq '
|
|
||||||
[ ..
|
|
||||||
| if .type? == "STRING" or (.type? == "ALIAS" and .named? == false) then .value else empty end
|
|
||||||
| select(test("\\S") and length == utf8bytelength)
|
|
||||||
] | unique | .[]
|
|
||||||
' "$lang_grammar" | sort > "test/fuzz/out/${lang}.dict"
|
|
||||||
done
|
|
||||||
|
|
@ -1,28 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
declare -a EXPORT_FLAGS
|
|
||||||
while read -r -d, function; do
|
|
||||||
EXPORT_FLAGS+=("-Wl,--export=${function:1:-1}")
|
|
||||||
done < lib/src/wasm/stdlib-symbols.txt
|
|
||||||
|
|
||||||
target/wasi-sdk-21.0/bin/clang-17 \
|
|
||||||
-o stdlib.wasm \
|
|
||||||
-Os \
|
|
||||||
-fPIC \
|
|
||||||
-Wl,--no-entry \
|
|
||||||
-Wl,--stack-first \
|
|
||||||
-Wl,-z -Wl,stack-size=65536 \
|
|
||||||
-Wl,--import-undefined \
|
|
||||||
-Wl,--import-memory \
|
|
||||||
-Wl,--import-table \
|
|
||||||
-Wl,--strip-debug \
|
|
||||||
-Wl,--export=reset_heap \
|
|
||||||
-Wl,--export=__wasm_call_ctors \
|
|
||||||
-Wl,--export=__stack_pointer \
|
|
||||||
"${EXPORT_FLAGS[@]}" \
|
|
||||||
lib/src/wasm/stdlib.c
|
|
||||||
|
|
||||||
xxd -C -i stdlib.wasm > lib/src/wasm/wasm-stdlib.h
|
|
||||||
mv stdlib.wasm target/
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
src_dir=lib/src
|
|
||||||
allocation_functions=(malloc calloc realloc free)
|
|
||||||
|
|
||||||
for function in "${allocation_functions[@]}"; do
|
|
||||||
usages=$(grep -n -E "\b${function}\(" -r $src_dir --exclude alloc.c --exclude stdlib.c)
|
|
||||||
if [[ -n $usages ]]; then
|
|
||||||
printf 'The %s function should not be called directly, but is called here:\n%s\n' "$function" "$usages" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
@ -1,26 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
EMSDK_DIR=target/emsdk
|
|
||||||
EMSCRIPTEN_VERSION=$(< cli/loader/emscripten-version)
|
|
||||||
|
|
||||||
{
|
|
||||||
if [[ ! -f $EMSDK_DIR/emsdk ]]; then
|
|
||||||
printf 'Downloading emscripten SDK...\n'
|
|
||||||
git clone https://github.com/emscripten-core/emsdk.git $EMSDK_DIR
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd $EMSDK_DIR
|
|
||||||
|
|
||||||
printf 'Updating emscripten SDK...\n'
|
|
||||||
git reset --hard
|
|
||||||
git pull
|
|
||||||
./emsdk list
|
|
||||||
|
|
||||||
printf 'Installing emscripten...\n'
|
|
||||||
./emsdk install "$EMSCRIPTEN_VERSION"
|
|
||||||
|
|
||||||
printf 'Activating emscripten...\n'
|
|
||||||
./emsdk activate "$EMSCRIPTEN_VERSION"
|
|
||||||
} >&2
|
|
||||||
|
|
@ -1,37 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
GRAMMARS_DIR="$PWD/test/fixtures/grammars"
|
|
||||||
|
|
||||||
fetch_grammar() {
|
|
||||||
local grammar=$1
|
|
||||||
local ref=$2
|
|
||||||
local grammar_dir="${GRAMMARS_DIR}/${grammar}"
|
|
||||||
local grammar_url=https://github.com/tree-sitter/tree-sitter-${grammar}
|
|
||||||
|
|
||||||
printf 'Updating %s grammar...\n' "$grammar"
|
|
||||||
|
|
||||||
if [[ ! -d "$grammar_dir" ]]; then
|
|
||||||
git clone "$grammar_url" "$grammar_dir" --depth=1
|
|
||||||
fi
|
|
||||||
|
|
||||||
git -C "$grammar_dir" fetch origin "$ref" --depth=1
|
|
||||||
git -C "$grammar_dir" reset --hard FETCH_HEAD
|
|
||||||
}
|
|
||||||
|
|
||||||
fetch_grammar bash master
|
|
||||||
fetch_grammar c master
|
|
||||||
fetch_grammar cpp master
|
|
||||||
fetch_grammar embedded-template master
|
|
||||||
fetch_grammar go master
|
|
||||||
fetch_grammar html master
|
|
||||||
fetch_grammar java master
|
|
||||||
fetch_grammar javascript master
|
|
||||||
fetch_grammar jsdoc master
|
|
||||||
fetch_grammar json master
|
|
||||||
fetch_grammar php master
|
|
||||||
fetch_grammar python master
|
|
||||||
fetch_grammar ruby master
|
|
||||||
fetch_grammar rust master
|
|
||||||
fetch_grammar typescript master
|
|
||||||
|
|
@ -1,32 +0,0 @@
|
||||||
@echo off
|
|
||||||
|
|
||||||
call:fetch_grammar bash master
|
|
||||||
call:fetch_grammar c master
|
|
||||||
call:fetch_grammar cpp master
|
|
||||||
call:fetch_grammar embedded-template master
|
|
||||||
call:fetch_grammar go master
|
|
||||||
call:fetch_grammar html master
|
|
||||||
call:fetch_grammar java master
|
|
||||||
call:fetch_grammar javascript master
|
|
||||||
call:fetch_grammar jsdoc master
|
|
||||||
call:fetch_grammar json master
|
|
||||||
call:fetch_grammar php master
|
|
||||||
call:fetch_grammar python master
|
|
||||||
call:fetch_grammar ruby master
|
|
||||||
call:fetch_grammar rust master
|
|
||||||
call:fetch_grammar typescript master
|
|
||||||
exit /B 0
|
|
||||||
|
|
||||||
:fetch_grammar
|
|
||||||
setlocal
|
|
||||||
set grammar_dir=test\fixtures\grammars\%~1
|
|
||||||
set grammar_url=https://github.com/tree-sitter/tree-sitter-%~1
|
|
||||||
set grammar_branch=%~2
|
|
||||||
@if not exist %grammar_dir% (
|
|
||||||
git clone %grammar_url% %grammar_dir% --depth=1
|
|
||||||
)
|
|
||||||
pushd %grammar_dir%
|
|
||||||
git fetch origin %2 --depth=1
|
|
||||||
git reset --hard FETCH_HEAD
|
|
||||||
popd
|
|
||||||
exit /B 0
|
|
||||||
|
|
@ -1,44 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
output_path=lib/binding_rust/bindings.rs
|
|
||||||
header_path=lib/include/tree_sitter/api.h
|
|
||||||
no_derive_copy=(
|
|
||||||
TSInput
|
|
||||||
TSLanguage
|
|
||||||
TSLogger
|
|
||||||
TSLookaheadIterator
|
|
||||||
TSParser
|
|
||||||
TSTree
|
|
||||||
TSQuery
|
|
||||||
TSQueryCursor
|
|
||||||
TSQueryCapture
|
|
||||||
TSQueryMatch
|
|
||||||
TSQueryPredicateStep
|
|
||||||
)
|
|
||||||
no_copy=$(IFS='|'; echo "${no_derive_copy[*]}")
|
|
||||||
|
|
||||||
file_version=$(head -n1 "$output_path" | cut -d' ' -f6)
|
|
||||||
tool_version=$(bindgen --version | cut -d' ' -f2)
|
|
||||||
higher_version=$(printf '%s\n' "$file_version" "$tool_version" | sort -V | tail -n1)
|
|
||||||
|
|
||||||
if [[ "$higher_version" != "$tool_version" ]]; then
|
|
||||||
printf 'Latest used bindgen version was %s\n' "$file_version" >&2
|
|
||||||
printf 'Currently installed bindgen CLI version is %s\n\n' "$tool_version" >&2
|
|
||||||
# shellcheck disable=SC2016
|
|
||||||
printf 'You must upgrade bindgen CLI first with `cargo install bindgen-cli`\n' >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
bindgen \
|
|
||||||
--no-layout-tests \
|
|
||||||
--allowlist-type '^TS.*' \
|
|
||||||
--allowlist-function '^ts_.*' \
|
|
||||||
--allowlist-var '^TREE_SITTER.*' \
|
|
||||||
--blocklist-type '^__.*' \
|
|
||||||
--no-prepend-enum-name \
|
|
||||||
--no-copy "$no_copy" \
|
|
||||||
--use-core \
|
|
||||||
"$header_path" \
|
|
||||||
-- \
|
|
||||||
-D TREE_SITTER_FEATURE_WASM \
|
|
||||||
> "$output_path"
|
|
||||||
|
|
@ -1,27 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
ROOT_DIR="$PWD"
|
|
||||||
GRAMMARS_DIR="$ROOT_DIR/test/fixtures/grammars"
|
|
||||||
|
|
||||||
if [[ $CI == true ]]; then
|
|
||||||
set -x
|
|
||||||
else
|
|
||||||
cargo build --release
|
|
||||||
TREE_SITTER="$ROOT_DIR/target/release/tree-sitter"
|
|
||||||
fi
|
|
||||||
|
|
||||||
filter_grammar_name="$1"
|
|
||||||
|
|
||||||
while read -r grammar_file; do
|
|
||||||
grammar_dir="${grammar_file%/*}"
|
|
||||||
grammar_name="${grammar_dir##*/}"
|
|
||||||
|
|
||||||
if [[ -n $filter_grammar_name && "$filter_grammar_name" != "$grammar_name" ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf 'Regenerating %s parser\n' "$grammar_name"
|
|
||||||
(cd "$grammar_dir" && "$TREE_SITTER" generate src/grammar.json --abi=latest)
|
|
||||||
done < <(find "$GRAMMARS_DIR" -name grammar.js -not -path '*/node_modules/*')
|
|
||||||
|
|
@ -1,33 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
ROOT_DIR="$PWD"
|
|
||||||
GRAMMARS_DIR="$ROOT_DIR/test/fixtures/grammars"
|
|
||||||
|
|
||||||
if [[ $CI == true ]]; then
|
|
||||||
set -x
|
|
||||||
else
|
|
||||||
cargo build --release
|
|
||||||
TREE_SITTER="$ROOT_DIR/target/release/tree-sitter"
|
|
||||||
fi
|
|
||||||
|
|
||||||
build_wasm_args=
|
|
||||||
if [[ $1 == --docker ]]; then
|
|
||||||
build_wasm_args=--docker
|
|
||||||
shift
|
|
||||||
fi
|
|
||||||
|
|
||||||
filter_grammar_name="$1"
|
|
||||||
|
|
||||||
while read -r grammar_file; do
|
|
||||||
grammar_dir="${grammar_file%/*}"
|
|
||||||
grammar_name="${grammar_dir##*/}"
|
|
||||||
|
|
||||||
if [[ -n $filter_grammar_name && "$filter_grammar_name" != "$grammar_name" ]]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
|
|
||||||
printf 'Compiling %s parser to wasm\n' "$grammar_name"
|
|
||||||
"$TREE_SITTER" build --wasm $build_wasm_args -o "target/release/tree-sitter-${grammar_name}.wasm" "$grammar_dir"
|
|
||||||
done < <(find "$GRAMMARS_DIR" -name grammar.js -not -path '*/node_modules/*')
|
|
||||||
|
|
@ -1,13 +0,0 @@
|
||||||
@echo off
|
|
||||||
|
|
||||||
setlocal EnableDelayedExpansion
|
|
||||||
set tree_sitter="%cd%\target\release\tree-sitter"
|
|
||||||
|
|
||||||
for /f "tokens=*" %%f in ('dir test\fixtures\grammars\grammar.js /b/s') do (
|
|
||||||
pushd "%%f\.."
|
|
||||||
echo Regenerating parser !cd!
|
|
||||||
%tree_sitter% generate src\grammar.json --abi=latest
|
|
||||||
popd
|
|
||||||
)
|
|
||||||
|
|
||||||
exit /B 0
|
|
||||||
|
|
@ -1,36 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Usage:
|
|
||||||
# script/heap-profile
|
|
||||||
#
|
|
||||||
# Parse an example source file and record memory usage
|
|
||||||
#
|
|
||||||
# Dependencies:
|
|
||||||
# * `pprof` executable: https://github.com/google/pprof
|
|
||||||
# * `gperftools` package: https://github.com/gperftools/gperftools
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
GRAMMARS_DIR="$PWD/test/fixtures/grammars"
|
|
||||||
|
|
||||||
# Build the library
|
|
||||||
make libtree-sitter.a
|
|
||||||
|
|
||||||
# Build the heap-profiling harness
|
|
||||||
clang++ \
|
|
||||||
-Wno-reorder-init-list \
|
|
||||||
-Wno-c99-designator \
|
|
||||||
-I lib/include \
|
|
||||||
-I "$GRAMMARS_DIR" \
|
|
||||||
-D GRAMMARS_DIR="\"${GRAMMARS_DIR}/\"" \
|
|
||||||
test/profile/heap.cc \
|
|
||||||
-l tcmalloc \
|
|
||||||
libtree-sitter.a \
|
|
||||||
-o target/heap-profile
|
|
||||||
|
|
||||||
# Run the harness with heap profiling enabled.
|
|
||||||
export HEAPPROFILE="$PWD/profile"
|
|
||||||
target/heap-profile "$@"
|
|
||||||
|
|
||||||
# Extract statistics using pprof.
|
|
||||||
pprof -top -cum profile.0001.heap
|
|
||||||
|
|
@ -1,35 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
if (($# < 3)); then
|
|
||||||
echo "usage: $0 <language> <halt|recover> <testcase> [libFuzzer args...]" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
export ASAN_OPTIONS=quarantine_size_mb=10:detect_leaks=1:symbolize=1
|
|
||||||
export UBSAN=print_stacktrace=1:halt_on_error=1:symbolize=1
|
|
||||||
|
|
||||||
# check if CI env var exists
|
|
||||||
if [[ -z ${CI:-} ]]; then
|
|
||||||
declare -A mode_config=(
|
|
||||||
[halt]='-timeout=1 -rss_limit_mb=2048'
|
|
||||||
[recover]='-timeout=10 -rss_limit_mb=2048'
|
|
||||||
)
|
|
||||||
else
|
|
||||||
declare -A mode_config=(
|
|
||||||
[halt]='-max_total_time=120 -timeout=1 -rss_limit_mb=2048'
|
|
||||||
[recover]='-time=120 -timeout=10 -rss_limit_mb=2048'
|
|
||||||
)
|
|
||||||
fi
|
|
||||||
|
|
||||||
lang="$1"
|
|
||||||
shift
|
|
||||||
mode="$1"
|
|
||||||
shift
|
|
||||||
testcase="$1"
|
|
||||||
shift
|
|
||||||
# Treat remainder of arguments as libFuzzer arguments
|
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
test/fuzz/out/${lang}_fuzzer ${mode_config[$mode]} -runs=1 "$testcase" "$@"
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
if (($# < 2)); then
|
|
||||||
echo "usage: $0 <language> <halt|recover> [libFuzzer args...]" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
set -eu
|
|
||||||
|
|
||||||
export ASAN_OPTIONS=quarantine_size_mb=10:detect_leaks=1:symbolize=1
|
|
||||||
export UBSAN=print_stacktrace=1:halt_on_error=1:symbolize=1
|
|
||||||
|
|
||||||
# check if CI env var exists
|
|
||||||
if [[ -z ${CI:-} ]]; then
|
|
||||||
declare -A mode_config=(
|
|
||||||
[halt]='-timeout=1 -rss_limit_mb=2048'
|
|
||||||
[recover]='-timeout=10 -rss_limit_mb=2048'
|
|
||||||
)
|
|
||||||
else
|
|
||||||
declare -A mode_config=(
|
|
||||||
[halt]='-max_total_time=120 -timeout=1 -rss_limit_mb=2048'
|
|
||||||
[recover]='-time=120 -timeout=10 -rss_limit_mb=2048'
|
|
||||||
)
|
|
||||||
fi
|
|
||||||
|
|
||||||
lang="$1"
|
|
||||||
shift
|
|
||||||
mode="$1"
|
|
||||||
shift
|
|
||||||
# Treat remainder of arguments as libFuzzer arguments
|
|
||||||
|
|
||||||
# Fuzzing logs and testcases are always written to `pwd`, so `cd` there first
|
|
||||||
results="$PWD/test/fuzz/out/fuzz-results/${lang}"
|
|
||||||
mkdir -p "${results}"
|
|
||||||
cd "${results}"
|
|
||||||
|
|
||||||
# Create a corpus directory, so new discoveries are stored on disk. These will
|
|
||||||
# then be loaded on subsequent fuzzing runs
|
|
||||||
mkdir -p corpus
|
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
|
||||||
../../${lang}_fuzzer -dict="../../${lang}.dict" -artifact_prefix=${lang}_ -max_len=2048 ${mode_config[$mode]} corpus "$@"
|
|
||||||
|
|
@ -1,29 +0,0 @@
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
root=$PWD
|
|
||||||
cd docs
|
|
||||||
|
|
||||||
bundle exec jekyll serve "$@" &
|
|
||||||
|
|
||||||
bundle exec ruby <<RUBY &
|
|
||||||
require "listen"
|
|
||||||
|
|
||||||
def copy_wasm_files
|
|
||||||
`cp $root/lib/binding_web/tree-sitter.{js,wasm} $root/docs/assets/js/`
|
|
||||||
`cp $root/target/release/*.wasm $root/docs/assets/js/`
|
|
||||||
end
|
|
||||||
|
|
||||||
puts "Copying WASM files to docs folder..."
|
|
||||||
copy_wasm_files
|
|
||||||
|
|
||||||
puts "Watching release directory"
|
|
||||||
listener = Listen.to("$root/lib/binding_web", only: /^tree-sitter\.(js|wasm)$/, wait_for_delay: 2) do
|
|
||||||
puts "WASM files updated. Copying new files to docs folder..."
|
|
||||||
copy_wasm_files
|
|
||||||
end
|
|
||||||
|
|
||||||
listener.start
|
|
||||||
sleep
|
|
||||||
RUBY
|
|
||||||
|
|
||||||
wait
|
|
||||||
|
|
@ -1,50 +0,0 @@
|
||||||
#!/usr/bin/env node
|
|
||||||
|
|
||||||
const {statSync} = require('fs');
|
|
||||||
const {execFileSync} = require('child_process');
|
|
||||||
const libPath = process.argv[2];
|
|
||||||
|
|
||||||
if (!libPath || libPath === '--help') {
|
|
||||||
console.log(`Usage: ${process.argv[1]} <dylib-path>`);
|
|
||||||
process.exit(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get total file size
|
|
||||||
const totalSize = statSync(libPath).size
|
|
||||||
|
|
||||||
// Dump symbols with addresses
|
|
||||||
const output = execFileSync(
|
|
||||||
'nm',
|
|
||||||
['-t', 'd', libPath],
|
|
||||||
{encoding: 'utf8'}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Parse addresses
|
|
||||||
const addressEntries = [];
|
|
||||||
for (const line of output.split('\n')) {
|
|
||||||
const [address, _, name] = line.split(/\s+/);
|
|
||||||
if (address && name) {
|
|
||||||
addressEntries.push({name, address: parseInt(address)})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Compute sizes by subtracting addresses
|
|
||||||
addressEntries.sort((a, b) => a.address - b.address);
|
|
||||||
const sizeEntries = addressEntries.map(({name, address}, i) => {
|
|
||||||
const next = addressEntries[i + 1] ? addressEntries[i + 1].address : totalSize;
|
|
||||||
const size = next - address;
|
|
||||||
return {name, size}
|
|
||||||
})
|
|
||||||
|
|
||||||
function formatSize(sizeInBytes) {
|
|
||||||
return sizeInBytes > 1024
|
|
||||||
? `${(sizeInBytes / 1024).toFixed(1)} kb`
|
|
||||||
: `${sizeInBytes} b`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Display sizes
|
|
||||||
sizeEntries.sort((a, b) => b.size - a.size);
|
|
||||||
console.log('total'.padEnd(64, ' '), '\t', formatSize(totalSize));
|
|
||||||
for (const entry of sizeEntries) {
|
|
||||||
console.log(entry.name.padEnd(64, ' '), '\t', formatSize(entry.size));
|
|
||||||
}
|
|
||||||
101
script/test
101
script/test
|
|
@ -1,101 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
function usage {
|
|
||||||
cat <<EOF
|
|
||||||
USAGE
|
|
||||||
|
|
||||||
$0 [-adDg] [-s SEED] [-l LANGUAGE] [-e EXAMPLE]
|
|
||||||
|
|
||||||
OPTIONS
|
|
||||||
|
|
||||||
-h Print this message
|
|
||||||
|
|
||||||
-a Compile C code with the Clang address sanitizer
|
|
||||||
|
|
||||||
-e Run only the corpus tests whose name contain the given string
|
|
||||||
|
|
||||||
-i Run the given number of iterations of randomized tests (default 10)
|
|
||||||
|
|
||||||
-s Set the seed used to control random behavior
|
|
||||||
|
|
||||||
-d Print parsing log to stderr
|
|
||||||
|
|
||||||
-D Generate an SVG graph of parsing logs
|
|
||||||
|
|
||||||
-g Run the tests with a debugger
|
|
||||||
|
|
||||||
EOF
|
|
||||||
}
|
|
||||||
|
|
||||||
export RUST_BACKTRACE=full
|
|
||||||
|
|
||||||
mode=normal
|
|
||||||
test_flags=()
|
|
||||||
|
|
||||||
while getopts "adDghl:e:s:i:" option; do
|
|
||||||
case ${option} in
|
|
||||||
h)
|
|
||||||
usage
|
|
||||||
exit
|
|
||||||
;;
|
|
||||||
a)
|
|
||||||
export CFLAGS=-fsanitize=undefined,address
|
|
||||||
|
|
||||||
# When the Tree-sitter C library is compiled with the address sanitizer, the address sanitizer
|
|
||||||
# runtime library needs to be linked into the final test executable. When using Xcode clang,
|
|
||||||
# the Rust linker doesn't know where to find that library, so we need to specify linker flags directly.
|
|
||||||
runtime_dir=$(cc -print-runtime-dir)
|
|
||||||
if [[ $runtime_dir == */Xcode.app/* ]]; then
|
|
||||||
export RUSTFLAGS="-C link-arg=-L${runtime_dir} -C link-arg=-lclang_rt.asan_osx_dynamic -C link-arg=-Wl,-rpath,${runtime_dir}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Specify a `--target` explicitly. This is required for address sanitizer support.
|
|
||||||
toolchain=$(rustup show active-toolchain)
|
|
||||||
toolchain_regex='(stable|beta|nightly)-([_a-z0-9-]+).*'
|
|
||||||
if [[ $toolchain =~ $toolchain_regex ]]; then
|
|
||||||
release=${BASH_REMATCH[1]}
|
|
||||||
current_target=${BASH_REMATCH[2]}
|
|
||||||
else
|
|
||||||
printf "Failed to parse toolchain '%s'\n" "$toolchain" >&2
|
|
||||||
fi
|
|
||||||
|
|
||||||
test_flags+=("--target=$current_target")
|
|
||||||
;;
|
|
||||||
e)
|
|
||||||
export TREE_SITTER_EXAMPLE=${OPTARG}
|
|
||||||
;;
|
|
||||||
s)
|
|
||||||
export TREE_SITTER_SEED=${OPTARG}
|
|
||||||
;;
|
|
||||||
i)
|
|
||||||
export TREE_SITTER_ITERATIONS=${OPTARG}
|
|
||||||
;;
|
|
||||||
d)
|
|
||||||
export TREE_SITTER_LOG=1
|
|
||||||
;;
|
|
||||||
D)
|
|
||||||
export TREE_SITTER_LOG_GRAPHS=1
|
|
||||||
;;
|
|
||||||
g)
|
|
||||||
mode=debug
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
usage
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
shift $((OPTIND - 1))
|
|
||||||
|
|
||||||
if [[ ${mode} == debug ]]; then
|
|
||||||
test_binary=$(
|
|
||||||
cargo test "${test_flags[@]}" --no-run --message-format=json 2> /dev/null |
|
|
||||||
jq -rs 'map(select(.target.name == "tree-sitter-cli" and .executable))[0].executable'
|
|
||||||
)
|
|
||||||
lldb "${test_binary}" -- "$1"
|
|
||||||
else
|
|
||||||
cargo test "${test_flags[@]}" "$1" -- --nocapture
|
|
||||||
fi
|
|
||||||
|
|
@ -1,12 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
cd lib/binding_web
|
|
||||||
|
|
||||||
if [[ ! -d node_modules/chai ]] || [[ ! -d node_modules/mocha ]]; then
|
|
||||||
printf 'Installing test dependencies...\n'
|
|
||||||
npm install
|
|
||||||
fi
|
|
||||||
|
|
||||||
node_modules/.bin/mocha
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
||||||
@echo off
|
|
||||||
|
|
||||||
setlocal
|
|
||||||
set RUST_TEST_THREADS=1
|
|
||||||
set RUST_BACKTRACE=full
|
|
||||||
cargo test "%~1"
|
|
||||||
if %errorlevel% NEQ 0 (
|
|
||||||
exit /b %errorlevel%
|
|
||||||
)
|
|
||||||
endlocal
|
|
||||||
|
|
@ -1,24 +0,0 @@
|
||||||
function scan_build {
|
|
||||||
extra_args=()
|
|
||||||
|
|
||||||
# AFAICT, in the trusty travis container the scan-build tool is from the 3.4
|
|
||||||
# installation. Therefore, by default it will use clang-3.4 when analysing code
|
|
||||||
# which doesn't support the '-std=c++14' (it is available via '-std=c++1y').
|
|
||||||
# Use the system-wide installed clang instead which is 3.5 and does support
|
|
||||||
# '-std=c++14'.
|
|
||||||
extra_args+=("--use-analyzer=$(command -v clang)")
|
|
||||||
|
|
||||||
# scan-build will try to guess which CXX should be used to compile the actual
|
|
||||||
# code, which is usually g++ but we need g++5 in the CI. Explicitly pass
|
|
||||||
# $CC/$CXX to scan-build if they are set in the environment.
|
|
||||||
|
|
||||||
if [[ -n $CC ]]; then
|
|
||||||
extra_args+=("--use-cc=$CC")
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -n $CXX ]]; then
|
|
||||||
extra_args+=("--use-c++=$CXX")
|
|
||||||
fi
|
|
||||||
|
|
||||||
scan-build "${extra_args[@]}" --status-bugs -disable-checker deadcode.DeadStores "$@"
|
|
||||||
}
|
|
||||||
|
|
@ -1,256 +0,0 @@
|
||||||
# Errors
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Cond
|
|
||||||
fun:_ZN6option6Parser9workhorseEbPKNS_10DescriptorEiPPKcRNS0_6ActionEbbi
|
|
||||||
fun:_ZN6option6Parser5parseEbPKNS_10DescriptorEiPPKcPNS_6OptionES8_ibi
|
|
||||||
fun:_ZN6option6ParserC1EPKNS_10DescriptorEiPPcPNS_6OptionES7_ibi
|
|
||||||
fun:_ZN6bandit6detail7optionsC1EiPPc
|
|
||||||
fun:_ZN6bandit3runEiPPc
|
|
||||||
fun:main
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Cond
|
|
||||||
fun:_ZN6option6Parser9workhorseEbPKNS_10DescriptorEiPPKcRNS0_6ActionEbbi
|
|
||||||
fun:_ZN6option5Stats3addEbPKNS_10DescriptorEiPPKcib
|
|
||||||
fun:_ZN6option5StatsC1EPKNS_10DescriptorEiPPcib
|
|
||||||
fun:_ZN6bandit6detail7optionsC1EiPPc
|
|
||||||
fun:_ZN6bandit3runEiPPc
|
|
||||||
fun:main
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Cond
|
|
||||||
fun:_ZN6option6Parser9workhorseEbPKNS_10DescriptorEiPPKcRNS0_6ActionEbbi
|
|
||||||
fun:_ZN6bandit6detail7optionsC2EiPPc
|
|
||||||
fun:_ZN6bandit3runEiPPc
|
|
||||||
fun:main
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Value8
|
|
||||||
fun:_platform_memcmp
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_ENKUlvE_clEvEUlvE0_NS7_ISD_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_EUlvE_NS7_ISC_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Addr1
|
|
||||||
fun:_platform_memcmp
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_ENKUlvE_clEvEUlvE0_NS7_ISD_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_EUlvE_NS7_ISC_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Cond
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_ENKUlvE_clEvEUlvE0_NS7_ISD_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_EUlvE_NS7_ISC_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Cond
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE3_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE3_NS_9allocatorIS4_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Value8
|
|
||||||
fun:_platform_memcmp
|
|
||||||
fun:_ZNK9snowhouse16EqualsConstraintINSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEEclIS7_EEbRKT_
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE3_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE3_NS_9allocatorIS4_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Cond
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE1_NS_9allocatorIS4_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcI3$_0NS_9allocatorIS2_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit3runERKNS_6detail7optionsERKNSt3__14listINS4_8functionIFvvEEENS4_9allocatorIS8_EEEERNS4_5dequeIPNS0_7contextENS9_ISG_EEEERNS0_8listenerE
|
|
||||||
fun:_ZN6bandit3runEiPPc
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Value8
|
|
||||||
fun:_platform_memcmp
|
|
||||||
fun:_ZNK9snowhouse16EqualsConstraintINSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEEclIS7_EEbRKT_
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE1_NS_9allocatorIS4_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcI3$_0NS_9allocatorIS2_EEFvvEEclEv
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Cond
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE4_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE4_NS_9allocatorIS4_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Value8
|
|
||||||
fun:_platform_memcmp
|
|
||||||
fun:_ZNK9snowhouse16EqualsConstraintINSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEEclIS7_EEbRKT_
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE4_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE4_NS_9allocatorIS4_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Cond
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_ENKUlvE_clEvEUlvE0_NS7_ISF_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_EUlvE_NS7_ISE_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Addr1
|
|
||||||
fun:_platform_memcmp
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_ENKUlvE_clEvEUlvE0_NS7_ISF_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_EUlvE_NS7_ISE_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Value8
|
|
||||||
fun:_platform_memcmp
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
|
|
||||||
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
|
|
||||||
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_ENKUlvE_clEvEUlvE0_NS7_ISF_EEFvvEEclEv
|
|
||||||
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
|
|
||||||
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
|
|
||||||
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
|
|
||||||
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_EUlvE_NS7_ISE_EEFvvEEclEv
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
|
|
||||||
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
|
|
||||||
}
|
|
||||||
|
|
||||||
# Leaks
|
|
||||||
|
|
||||||
{
|
|
||||||
<insert_a_suppression_name_here>
|
|
||||||
Memcheck:Leak
|
|
||||||
match-leak-kinds: possible
|
|
||||||
fun:malloc_zone_malloc
|
|
||||||
fun:_objc_copyClassNamesForImage
|
|
||||||
fun:_ZL9protocolsv
|
|
||||||
fun:_Z9readClassP10objc_classbb
|
|
||||||
fun:gc_init
|
|
||||||
fun:_ZL33objc_initializeClassPair_internalP10objc_classPKcS0_S0_
|
|
||||||
fun:layout_string_create
|
|
||||||
fun:_ZL12realizeClassP10objc_class
|
|
||||||
fun:_ZL22copySwiftV1MangledNamePKcb
|
|
||||||
fun:_ZL22copySwiftV1MangledNamePKcb
|
|
||||||
fun:_ZL22copySwiftV1MangledNamePKcb
|
|
||||||
fun:_ZL22copySwiftV1MangledNamePKcb
|
|
||||||
}
|
|
||||||
|
|
@ -15,6 +15,9 @@ license.workspace = true
|
||||||
keywords = ["incremental", "parsing", "syntax", "tagging"]
|
keywords = ["incremental", "parsing", "syntax", "tagging"]
|
||||||
categories = ["parsing", "text-editors"]
|
categories = ["parsing", "text-editors"]
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
crate-type = ["lib", "staticlib"]
|
crate-type = ["lib", "staticlib"]
|
||||||
|
|
||||||
|
|
|
||||||
9
test/fixtures/test_grammars/epsilon_external_extra_tokens/corpus.txt
vendored
Normal file
9
test/fixtures/test_grammars/epsilon_external_extra_tokens/corpus.txt
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
==========================
|
||||||
|
A document
|
||||||
|
==========================
|
||||||
|
|
||||||
|
a b
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
(document)
|
||||||
11
test/fixtures/test_grammars/epsilon_external_extra_tokens/grammar.js
vendored
Normal file
11
test/fixtures/test_grammars/epsilon_external_extra_tokens/grammar.js
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
module.exports = grammar({
|
||||||
|
name: 'epsilon_external_extra_tokens',
|
||||||
|
|
||||||
|
extras: $ => [/\s/, $.comment],
|
||||||
|
|
||||||
|
externals: $ => [$.comment],
|
||||||
|
|
||||||
|
rules: {
|
||||||
|
document: $ => seq('a', 'b'),
|
||||||
|
}
|
||||||
|
});
|
||||||
33
test/fixtures/test_grammars/epsilon_external_extra_tokens/scanner.c
vendored
Normal file
33
test/fixtures/test_grammars/epsilon_external_extra_tokens/scanner.c
vendored
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
#include "tree_sitter/parser.h"
|
||||||
|
|
||||||
|
enum TokenType {
|
||||||
|
COMMENT
|
||||||
|
};
|
||||||
|
|
||||||
|
void *tree_sitter_epsilon_external_extra_tokens_external_scanner_create(void) {
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool tree_sitter_epsilon_external_extra_tokens_external_scanner_scan(
|
||||||
|
void *payload,
|
||||||
|
TSLexer *lexer,
|
||||||
|
const bool *valid_symbols
|
||||||
|
) {
|
||||||
|
lexer->result_symbol = COMMENT;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned tree_sitter_epsilon_external_extra_tokens_external_scanner_serialize(
|
||||||
|
void *payload,
|
||||||
|
char *buffer
|
||||||
|
) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void tree_sitter_epsilon_external_extra_tokens_external_scanner_deserialize(
|
||||||
|
void *payload,
|
||||||
|
const char *buffer,
|
||||||
|
unsigned length
|
||||||
|
) {}
|
||||||
|
|
||||||
|
void tree_sitter_epsilon_external_extra_tokens_external_scanner_destroy(void *payload) {}
|
||||||
0
test/fixtures/test_grammars/get_col_eof/corpus.txt
vendored
Normal file
0
test/fixtures/test_grammars/get_col_eof/corpus.txt
vendored
Normal file
11
test/fixtures/test_grammars/get_col_eof/grammar.js
vendored
Normal file
11
test/fixtures/test_grammars/get_col_eof/grammar.js
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
||||||
|
module.exports = grammar({
|
||||||
|
name: "get_col_eof",
|
||||||
|
|
||||||
|
externals: $ => [
|
||||||
|
$.char
|
||||||
|
],
|
||||||
|
|
||||||
|
rules: {
|
||||||
|
source_file: $ => repeat($.char),
|
||||||
|
}
|
||||||
|
});
|
||||||
34
test/fixtures/test_grammars/get_col_eof/scanner.c
vendored
Normal file
34
test/fixtures/test_grammars/get_col_eof/scanner.c
vendored
Normal file
|
|
@ -0,0 +1,34 @@
|
||||||
|
#include "tree_sitter/parser.h"
|
||||||
|
|
||||||
|
enum TokenType { CHAR };
|
||||||
|
|
||||||
|
void *tree_sitter_get_col_eof_external_scanner_create(void) { return NULL; }
|
||||||
|
|
||||||
|
void tree_sitter_get_col_eof_external_scanner_destroy(void *scanner) {}
|
||||||
|
|
||||||
|
unsigned tree_sitter_get_col_eof_external_scanner_serialize(void *scanner,
|
||||||
|
char *buffer) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void tree_sitter_get_col_eof_external_scanner_deserialize(void *scanner,
|
||||||
|
const char *buffer,
|
||||||
|
unsigned length) {}
|
||||||
|
|
||||||
|
bool tree_sitter_get_col_eof_external_scanner_scan(void *scanner,
|
||||||
|
TSLexer *lexer,
|
||||||
|
const bool *valid_symbols) {
|
||||||
|
if (lexer->eof(lexer)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (valid_symbols[CHAR]) {
|
||||||
|
lexer->advance(lexer, false);
|
||||||
|
lexer->get_column(lexer);
|
||||||
|
lexer->result_symbol = CHAR;
|
||||||
|
lexer->mark_end(lexer);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
10
test/fixtures/test_grammars/next_sibling_from_zwt/corpus.txt
vendored
Normal file
10
test/fixtures/test_grammars/next_sibling_from_zwt/corpus.txt
vendored
Normal file
|
|
@ -0,0 +1,10 @@
|
||||||
|
===========================
|
||||||
|
missing c node
|
||||||
|
===========================
|
||||||
|
|
||||||
|
abdef
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
(source
|
||||||
|
(MISSING "c"))
|
||||||
22
test/fixtures/test_grammars/next_sibling_from_zwt/grammar.js
vendored
Normal file
22
test/fixtures/test_grammars/next_sibling_from_zwt/grammar.js
vendored
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
module.exports = grammar({
|
||||||
|
name: "next_sibling_from_zwt",
|
||||||
|
extras: $ => [
|
||||||
|
/\s|\\\r?\n/,
|
||||||
|
],
|
||||||
|
|
||||||
|
rules: {
|
||||||
|
source: $ => seq(
|
||||||
|
'a',
|
||||||
|
$._bc,
|
||||||
|
'd',
|
||||||
|
'e',
|
||||||
|
'f',
|
||||||
|
),
|
||||||
|
|
||||||
|
_bc: $ => seq(
|
||||||
|
'b',
|
||||||
|
'c',
|
||||||
|
),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
|
@ -1,43 +0,0 @@
|
||||||
# Fuzzing tree-sitter
|
|
||||||
|
|
||||||
The tree-sitter fuzzing support requires 1) the `libFuzzer` runtime library and 2) a recent version of clang
|
|
||||||
|
|
||||||
## libFuzzer
|
|
||||||
|
|
||||||
The main fuzzing logic is implemented by `libFuzzer` which is part of the compiler-rt project but is not shipped by distros. `libFuzzer` will need to be built from source, e.g.:
|
|
||||||
|
|
||||||
```
|
|
||||||
cd ~/src
|
|
||||||
git clone https://github.com/llvm-mirror/compiler-rt
|
|
||||||
cd compiler-rt/lib/fuzzer
|
|
||||||
./build.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## clang
|
|
||||||
|
|
||||||
Using libFuzzer requires at least version 7 of `clang` and may _not_ work with your system-installed version. If your system-installed version is too old, the easiest way to get started is to use the version provided by the Chromium team. Instructions are available at [libFuzzer.info](http://libfuzzer.info).
|
|
||||||
|
|
||||||
The fuzzers can then be built with:
|
|
||||||
```
|
|
||||||
export CLANG_DIR=$HOME/src/third_party/llvm-build/Release+Asserts/bin
|
|
||||||
CC="$CLANG_DIR/clang" CXX="$CLANG_DIR/clang++" LINK="$CLANG_DIR/clang++" \
|
|
||||||
LIB_FUZZER_PATH=$HOME/src/compiler-rt/lib/fuzzer/libFuzzer.a \
|
|
||||||
./script/build-fuzzers
|
|
||||||
```
|
|
||||||
|
|
||||||
This will generate a separate fuzzer for each grammar defined in `test/fixtures/grammars` and will be instrumented with [AddressSanitizer](https://clang.llvm.org/docs/AddressSanitizer.html) and [UndefinedBehaviorSanitizer](https://clang.llvm.org/docs/UndefinedBehaviorSanitizer.html). Individual fuzzers can be built with, for example, `./script/build-fuzzers python ruby`.
|
|
||||||
|
|
||||||
The `run-fuzzer` script handles running an individual fuzzer with a sensible default set of arguments:
|
|
||||||
```
|
|
||||||
./script/run-fuzzer <grammar-name> (halt|recover) <extra libFuzzer arguments...>
|
|
||||||
```
|
|
||||||
|
|
||||||
which will log information to stdout. Failing testcases and a fuzz corpus will be saved to `fuzz-results/<grammar-name>`. The most important extra `libFuzzer` options are `-jobs` and `-workers` which allow parallel fuzzing. This is can done with, e.g.:
|
|
||||||
```
|
|
||||||
./script/run-fuzzer <grammar-name> halt -jobs=32 -workers=32
|
|
||||||
```
|
|
||||||
|
|
||||||
The testcase can be used to reproduce the crash by running:
|
|
||||||
```
|
|
||||||
./script/reproduce <grammar-name> (halt|recover) <path-to-testcase>
|
|
||||||
```
|
|
||||||
|
|
@ -1,79 +0,0 @@
|
||||||
#include <cassert>
|
|
||||||
#include <fstream>
|
|
||||||
#include "tree_sitter/api.h"
|
|
||||||
|
|
||||||
extern "C" const TSLanguage *TS_LANG();
|
|
||||||
|
|
||||||
static TSQuery *lang_query;
|
|
||||||
|
|
||||||
extern "C" int LLVMFuzzerInitialize(int *argc, char ***argv) {
|
|
||||||
if(TS_LANG_QUERY_FILENAME[0]) {
|
|
||||||
// The query filename is relative to the fuzzing binary. Convert it
|
|
||||||
// to an absolute path first
|
|
||||||
auto binary_filename = std::string((*argv)[0]);
|
|
||||||
auto binary_directory = binary_filename.substr(0, binary_filename.find_last_of("\\/"));
|
|
||||||
auto lang_query_filename = binary_directory + "/" + TS_LANG_QUERY_FILENAME;
|
|
||||||
|
|
||||||
auto f = std::ifstream(lang_query_filename);
|
|
||||||
assert(f.good());
|
|
||||||
std::string lang_query_source((std::istreambuf_iterator<char>(f)), std::istreambuf_iterator<char>());
|
|
||||||
|
|
||||||
uint32_t error_offset = 0;
|
|
||||||
TSQueryError error_type = TSQueryErrorNone;
|
|
||||||
|
|
||||||
lang_query = ts_query_new(
|
|
||||||
TS_LANG(),
|
|
||||||
lang_query_source.c_str(),
|
|
||||||
lang_query_source.size(),
|
|
||||||
&error_offset,
|
|
||||||
&error_type
|
|
||||||
);
|
|
||||||
|
|
||||||
assert(lang_query);
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
|
|
||||||
const char *str = reinterpret_cast<const char *>(data);
|
|
||||||
|
|
||||||
TSParser *parser = ts_parser_new();
|
|
||||||
|
|
||||||
// This can fail if the language version doesn't match the runtime version
|
|
||||||
bool language_ok = ts_parser_set_language(parser, TS_LANG());
|
|
||||||
assert(language_ok);
|
|
||||||
|
|
||||||
TSTree *tree = ts_parser_parse_string(parser, NULL, str, size);
|
|
||||||
TSNode root_node = ts_tree_root_node(tree);
|
|
||||||
|
|
||||||
if (lang_query != nullptr) {
|
|
||||||
{
|
|
||||||
TSQueryCursor *cursor = ts_query_cursor_new();
|
|
||||||
|
|
||||||
ts_query_cursor_exec(cursor, lang_query, root_node);
|
|
||||||
TSQueryMatch match;
|
|
||||||
while (ts_query_cursor_next_match(cursor, &match)) {
|
|
||||||
}
|
|
||||||
|
|
||||||
ts_query_cursor_delete(cursor);
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
TSQueryCursor *cursor = ts_query_cursor_new();
|
|
||||||
|
|
||||||
ts_query_cursor_exec(cursor, lang_query, root_node);
|
|
||||||
TSQueryMatch match;
|
|
||||||
uint32_t capture_index;
|
|
||||||
while (ts_query_cursor_next_capture(cursor, &match, &capture_index)) {
|
|
||||||
}
|
|
||||||
|
|
||||||
ts_query_cursor_delete(cursor);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ts_tree_delete(tree);
|
|
||||||
ts_parser_delete(parser);
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
@ -1,42 +0,0 @@
|
||||||
#include <fstream>
|
|
||||||
#include <string>
|
|
||||||
#include <cstdlib>
|
|
||||||
#include <tree_sitter/api.h>
|
|
||||||
|
|
||||||
extern "C" {
|
|
||||||
#include "javascript/src/parser.c"
|
|
||||||
#include "javascript/src/scanner.c"
|
|
||||||
}
|
|
||||||
|
|
||||||
#define LANGUAGE tree_sitter_javascript
|
|
||||||
#define SOURCE_PATH "javascript/examples/jquery.js"
|
|
||||||
|
|
||||||
int main() {
|
|
||||||
TSParser *parser = ts_parser_new();
|
|
||||||
if (!ts_parser_set_language(parser, LANGUAGE())) {
|
|
||||||
fprintf(stderr, "Invalid language\n");
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
const char *source_path = GRAMMARS_DIR SOURCE_PATH;
|
|
||||||
|
|
||||||
printf("Parsing %s\n", source_path);
|
|
||||||
|
|
||||||
std::ifstream source_file(source_path);
|
|
||||||
if (!source_file.good()) {
|
|
||||||
fprintf(stderr, "Invalid source path %s\n", source_path);
|
|
||||||
exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string source_code(
|
|
||||||
(std::istreambuf_iterator<char>(source_file)),
|
|
||||||
std::istreambuf_iterator<char>()
|
|
||||||
);
|
|
||||||
|
|
||||||
TSTree *tree = ts_parser_parse_string(
|
|
||||||
parser,
|
|
||||||
NULL,
|
|
||||||
source_code.c_str(),
|
|
||||||
source_code.size()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
@ -11,10 +11,19 @@ keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
|
[lints]
|
||||||
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
anstyle.workspace = true
|
||||||
|
anyhow.workspace = true
|
||||||
|
bindgen = { version = "0.70.1" }
|
||||||
|
cc.workspace = true
|
||||||
|
clap.workspace = true
|
||||||
git2.workspace = true
|
git2.workspace = true
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
toml.workspace = true
|
toml.workspace = true
|
||||||
|
regex.workspace = true
|
||||||
semver.workspace = true
|
semver.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
|
|
|
||||||
75
xtask/src/benchmark.rs
Normal file
75
xtask/src/benchmark.rs
Normal file
|
|
@ -0,0 +1,75 @@
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use crate::{bail_on_err, Benchmark};
|
||||||
|
|
||||||
|
pub fn run(args: &Benchmark) -> Result<()> {
|
||||||
|
if let Some(ref example) = args.example_file_name {
|
||||||
|
std::env::set_var("TREE_SITTER_BENCHMARK_EXAMPLE_FILTER", example);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(ref language) = args.language {
|
||||||
|
std::env::set_var("TREE_SITTER_BENCHMARK_LANGUAGE_FILTER", language);
|
||||||
|
}
|
||||||
|
|
||||||
|
if args.repetition_count != 5 {
|
||||||
|
std::env::set_var(
|
||||||
|
"TREE_SITTER_BENCHMARK_REPETITION_COUNT",
|
||||||
|
args.repetition_count.to_string(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if args.debug {
|
||||||
|
let output = std::process::Command::new("cargo")
|
||||||
|
.arg("bench")
|
||||||
|
.arg("benchmark")
|
||||||
|
.arg("-p")
|
||||||
|
.arg("tree-sitter-cli")
|
||||||
|
.arg("--no-run")
|
||||||
|
.arg("--message-format=json")
|
||||||
|
.spawn()?
|
||||||
|
.wait_with_output()?;
|
||||||
|
|
||||||
|
bail_on_err(&output, "Failed to run `cargo bench`")?;
|
||||||
|
|
||||||
|
let json_output = serde_json::from_slice::<serde_json::Value>(&output.stdout)?;
|
||||||
|
|
||||||
|
let test_binary = json_output
|
||||||
|
.as_array()
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Invalid JSON output"))?
|
||||||
|
.iter()
|
||||||
|
.find_map(|message| {
|
||||||
|
if message
|
||||||
|
.get("target")
|
||||||
|
.and_then(|target| target.get("name"))
|
||||||
|
.and_then(|name| name.as_str())
|
||||||
|
.is_some_and(|name| name == "benchmark")
|
||||||
|
&& message
|
||||||
|
.get("executable")
|
||||||
|
.and_then(|executable| executable.as_str())
|
||||||
|
.is_some()
|
||||||
|
{
|
||||||
|
message
|
||||||
|
.get("executable")
|
||||||
|
.and_then(|executable| executable.as_str())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.ok_or_else(|| anyhow::anyhow!("Failed to find benchmark executable"))?;
|
||||||
|
|
||||||
|
println!("{test_binary}");
|
||||||
|
} else {
|
||||||
|
let status = std::process::Command::new("cargo")
|
||||||
|
.arg("bench")
|
||||||
|
.arg("benchmark")
|
||||||
|
.arg("-p")
|
||||||
|
.arg("tree-sitter-cli")
|
||||||
|
.status()?;
|
||||||
|
|
||||||
|
if !status.success() {
|
||||||
|
anyhow::bail!("Failed to run `cargo bench`");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
228
xtask/src/build_wasm.rs
Normal file
228
xtask/src/build_wasm.rs
Normal file
|
|
@ -0,0 +1,228 @@
|
||||||
|
use std::{
|
||||||
|
ffi::{OsStr, OsString},
|
||||||
|
fmt::Write,
|
||||||
|
fs,
|
||||||
|
process::Command,
|
||||||
|
};
|
||||||
|
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
|
|
||||||
|
use crate::{bail_on_err, BuildWasm, EMSCRIPTEN_TAG};
|
||||||
|
|
||||||
|
#[derive(PartialEq, Eq)]
|
||||||
|
enum EmccSource {
|
||||||
|
Native,
|
||||||
|
Docker,
|
||||||
|
Podman,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_wasm(args: &BuildWasm) -> Result<()> {
|
||||||
|
let mut emscripten_flags = vec!["-O3", "--minify", "0"];
|
||||||
|
|
||||||
|
if args.debug {
|
||||||
|
emscripten_flags.extend(["-s", "ASSERTIONS=1", "-s", "SAFE_HEAP=1", "-O0", "-g"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
if args.verbose {
|
||||||
|
emscripten_flags.extend(["-s", "VERBOSE=1", "-v"]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let emcc_name = if cfg!(windows) { "emcc.bat" } else { "emcc" };
|
||||||
|
|
||||||
|
// Order of preference: emscripten > docker > podman > error
|
||||||
|
let source = if !args.docker && Command::new(emcc_name).output().is_ok() {
|
||||||
|
EmccSource::Native
|
||||||
|
} else if Command::new("docker")
|
||||||
|
.arg("info")
|
||||||
|
.output()
|
||||||
|
.map_or(false, |out| out.status.success())
|
||||||
|
{
|
||||||
|
EmccSource::Docker
|
||||||
|
} else if Command::new("podman")
|
||||||
|
.arg("--version")
|
||||||
|
.output()
|
||||||
|
.map_or(false, |out| out.status.success())
|
||||||
|
{
|
||||||
|
EmccSource::Podman
|
||||||
|
} else {
|
||||||
|
return Err(anyhow!(
|
||||||
|
"You must have either emcc, docker, or podman on your PATH to run this command"
|
||||||
|
));
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut command = match source {
|
||||||
|
EmccSource::Native => Command::new(emcc_name),
|
||||||
|
EmccSource::Docker | EmccSource::Podman => {
|
||||||
|
let mut command = match source {
|
||||||
|
EmccSource::Docker => Command::new("docker"),
|
||||||
|
EmccSource::Podman => Command::new("podman"),
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
command.args(["run", "--rm"]);
|
||||||
|
|
||||||
|
// Mount the root directory as a volume, which is the repo root
|
||||||
|
let mut volume_string = OsString::from(std::env::current_dir().unwrap());
|
||||||
|
volume_string.push(":/src:Z");
|
||||||
|
command.args([OsStr::new("--volume"), &volume_string]);
|
||||||
|
|
||||||
|
// In case `docker` is an alias to `podman`, ensure that podman
|
||||||
|
// mounts the current directory as writable by the container
|
||||||
|
// user which has the same uid as the host user. Setting the
|
||||||
|
// podman-specific variable is more reliable than attempting to
|
||||||
|
// detect whether `docker` is an alias for `podman`.
|
||||||
|
// see https://docs.podman.io/en/latest/markdown/podman-run.1.html#userns-mode
|
||||||
|
command.env("PODMAN_USERNS", "keep-id");
|
||||||
|
|
||||||
|
// Get the current user id so that files created in the docker container will have
|
||||||
|
// the same owner.
|
||||||
|
#[cfg(unix)]
|
||||||
|
{
|
||||||
|
#[link(name = "c")]
|
||||||
|
extern "C" {
|
||||||
|
fn getuid() -> u32;
|
||||||
|
}
|
||||||
|
// don't need to set user for podman since PODMAN_USERNS=keep-id is already set
|
||||||
|
if source == EmccSource::Docker {
|
||||||
|
let user_id = unsafe { getuid() };
|
||||||
|
command.args(["--user", &user_id.to_string()]);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Run `emcc` in a container using the `emscripten-slim` image
|
||||||
|
command.args([EMSCRIPTEN_TAG, "emcc"]);
|
||||||
|
command
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
fs::create_dir_all("target/scratch").unwrap();
|
||||||
|
|
||||||
|
let exported_functions = concat!(
|
||||||
|
include_str!("../../lib/src/wasm/stdlib-symbols.txt"),
|
||||||
|
include_str!("../../lib/binding_web/exports.txt")
|
||||||
|
)
|
||||||
|
.replace('"', "")
|
||||||
|
.lines()
|
||||||
|
.fold(String::new(), |mut output, line| {
|
||||||
|
let _ = write!(output, "_{line}");
|
||||||
|
output
|
||||||
|
})
|
||||||
|
.trim_end_matches(',')
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let exported_functions = format!("EXPORTED_FUNCTIONS={exported_functions}");
|
||||||
|
let exported_runtime_methods = "EXPORTED_RUNTIME_METHODS=stringToUTF16,AsciiToString";
|
||||||
|
|
||||||
|
emscripten_flags.extend([
|
||||||
|
"-s",
|
||||||
|
"WASM=1",
|
||||||
|
"-s",
|
||||||
|
"INITIAL_MEMORY=33554432",
|
||||||
|
"-s",
|
||||||
|
"ALLOW_MEMORY_GROWTH=1",
|
||||||
|
"-s",
|
||||||
|
"SUPPORT_BIG_ENDIAN=1",
|
||||||
|
"-s",
|
||||||
|
"MAIN_MODULE=2",
|
||||||
|
"-s",
|
||||||
|
"FILESYSTEM=0",
|
||||||
|
"-s",
|
||||||
|
"NODEJS_CATCH_EXIT=0",
|
||||||
|
"-s",
|
||||||
|
"NODEJS_CATCH_REJECTION=0",
|
||||||
|
"-s",
|
||||||
|
&exported_functions,
|
||||||
|
"-s",
|
||||||
|
exported_runtime_methods,
|
||||||
|
"-fno-exceptions",
|
||||||
|
"-std=c11",
|
||||||
|
"-D",
|
||||||
|
"fprintf(...)=",
|
||||||
|
"-D",
|
||||||
|
"NDEBUG=",
|
||||||
|
"-D",
|
||||||
|
"_POSIX_C_SOURCE=200112L",
|
||||||
|
"-D",
|
||||||
|
"_DEFAULT_SOURCE=",
|
||||||
|
"-I",
|
||||||
|
"lib/src",
|
||||||
|
"-I",
|
||||||
|
"lib/include",
|
||||||
|
"--js-library",
|
||||||
|
"lib/binding_web/imports.js",
|
||||||
|
"--pre-js",
|
||||||
|
"lib/binding_web/prefix.js",
|
||||||
|
"--post-js",
|
||||||
|
"lib/binding_web/binding.js",
|
||||||
|
"--post-js",
|
||||||
|
"lib/binding_web/suffix.js",
|
||||||
|
"lib/src/lib.c",
|
||||||
|
"lib/binding_web/binding.c",
|
||||||
|
"-o",
|
||||||
|
"target/scratch/tree-sitter.js",
|
||||||
|
]);
|
||||||
|
|
||||||
|
bail_on_err(
|
||||||
|
&command.args(emscripten_flags).spawn()?.wait_with_output()?,
|
||||||
|
"Failed to compile the Tree-sitter WASM library",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
fs::rename(
|
||||||
|
"target/scratch/tree-sitter.js",
|
||||||
|
"lib/binding_web/tree-sitter.js",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
fs::rename(
|
||||||
|
"target/scratch/tree-sitter.wasm",
|
||||||
|
"lib/binding_web/tree-sitter.wasm",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_wasm_stdlib() -> Result<()> {
|
||||||
|
let export_flags = include_str!("../../lib/src/wasm/stdlib-symbols.txt")
|
||||||
|
.lines()
|
||||||
|
.map(|line| format!("-Wl,--export={}", &line[1..line.len() - 1]))
|
||||||
|
.collect::<Vec<String>>();
|
||||||
|
|
||||||
|
let mut command = Command::new("target/wasi-sdk-21.0/bin/clang-17");
|
||||||
|
|
||||||
|
let output = command
|
||||||
|
.args([
|
||||||
|
"-o",
|
||||||
|
"stdlib.wasm",
|
||||||
|
"-Os",
|
||||||
|
"-fPIC",
|
||||||
|
"-Wl,--no-entry",
|
||||||
|
"-Wl,--stack-first",
|
||||||
|
"-Wl,-z",
|
||||||
|
"-Wl,stack-size=65536",
|
||||||
|
"-Wl,--import-undefined",
|
||||||
|
"-Wl,--import-memory",
|
||||||
|
"-Wl,--import-table",
|
||||||
|
"-Wl,--strip-debug",
|
||||||
|
"-Wl,--export=reset_heap",
|
||||||
|
"-Wl,--export=__wasm_call_ctors",
|
||||||
|
"-Wl,--export=__stack_pointer",
|
||||||
|
])
|
||||||
|
.args(export_flags)
|
||||||
|
.arg("lib/src/wasm/stdlib.c")
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
bail_on_err(&output, "Failed to compile the Tree-sitter WASM stdlib")?;
|
||||||
|
|
||||||
|
let xxd = Command::new("xxd")
|
||||||
|
.args(["-C", "-i", "stdlib.wasm"])
|
||||||
|
.output()?;
|
||||||
|
|
||||||
|
bail_on_err(
|
||||||
|
&xxd,
|
||||||
|
"Failed to run xxd on the compiled Tree-sitter WASM stdlib",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
fs::write("lib/src/wasm/wasm-stdlib.h", xxd.stdout)?;
|
||||||
|
|
||||||
|
fs::rename("stdlib.wasm", "target/stdlib.wasm")?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
@ -1,11 +1,14 @@
|
||||||
use std::{cmp::Ordering, path::Path};
|
use std::{cmp::Ordering, path::Path};
|
||||||
|
|
||||||
|
use anyhow::{anyhow, Result};
|
||||||
use git2::{DiffOptions, Repository};
|
use git2::{DiffOptions, Repository};
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
use semver::{BuildMetadata, Prerelease, Version};
|
use semver::{BuildMetadata, Prerelease, Version};
|
||||||
use toml::Value;
|
use toml::Value;
|
||||||
|
|
||||||
pub fn get_latest_tag(repo: &Repository) -> Result<String, Box<dyn std::error::Error>> {
|
use crate::BumpVersion;
|
||||||
|
|
||||||
|
pub fn get_latest_tag(repo: &Repository) -> Result<String> {
|
||||||
let mut tags = repo
|
let mut tags = repo
|
||||||
.tag_names(None)?
|
.tag_names(None)?
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
|
@ -23,10 +26,10 @@ pub fn get_latest_tag(repo: &Repository) -> Result<String, Box<dyn std::error::E
|
||||||
|
|
||||||
tags.last()
|
tags.last()
|
||||||
.map(std::string::ToString::to_string)
|
.map(std::string::ToString::to_string)
|
||||||
.ok_or_else(|| "No tags found".into())
|
.ok_or_else(|| anyhow!("No tags found"))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bump_versions() -> Result<(), Box<dyn std::error::Error>> {
|
pub fn run(args: BumpVersion) -> Result<()> {
|
||||||
let repo = Repository::open(".")?;
|
let repo = Repository::open(".")?;
|
||||||
let latest_tag = get_latest_tag(&repo)?;
|
let latest_tag = get_latest_tag(&repo)?;
|
||||||
let current_version = Version::parse(&latest_tag)?;
|
let current_version = Version::parse(&latest_tag)?;
|
||||||
|
|
@ -104,35 +107,39 @@ pub fn bump_versions() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut version = current_version.clone();
|
let next_version = if let Some(version) = args.version {
|
||||||
if should_increment_minor {
|
version
|
||||||
version.minor += 1;
|
|
||||||
version.patch = 0;
|
|
||||||
version.pre = Prerelease::EMPTY;
|
|
||||||
version.build = BuildMetadata::EMPTY;
|
|
||||||
} else if should_increment_patch {
|
|
||||||
version.patch += 1;
|
|
||||||
version.pre = Prerelease::EMPTY;
|
|
||||||
version.build = BuildMetadata::EMPTY;
|
|
||||||
} else {
|
} else {
|
||||||
return Err(format!("No source code changed since {current_version}").into());
|
let mut next_version = current_version.clone();
|
||||||
}
|
if should_increment_minor {
|
||||||
|
next_version.minor += 1;
|
||||||
|
next_version.patch = 0;
|
||||||
|
next_version.pre = Prerelease::EMPTY;
|
||||||
|
next_version.build = BuildMetadata::EMPTY;
|
||||||
|
} else if should_increment_patch {
|
||||||
|
next_version.patch += 1;
|
||||||
|
next_version.pre = Prerelease::EMPTY;
|
||||||
|
next_version.build = BuildMetadata::EMPTY;
|
||||||
|
} else {
|
||||||
|
return Err(anyhow!(format!(
|
||||||
|
"No source code changed since {current_version}"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
next_version
|
||||||
|
};
|
||||||
|
|
||||||
println!("Bumping from {current_version} to {version}");
|
println!("Bumping from {current_version} to {next_version}");
|
||||||
update_crates(¤t_version, &version)?;
|
update_crates(¤t_version, &next_version)?;
|
||||||
update_makefile(&version)?;
|
update_makefile(&next_version)?;
|
||||||
update_cmake(&version)?;
|
update_cmake(&next_version)?;
|
||||||
update_npm(&version)?;
|
update_npm(&next_version)?;
|
||||||
update_zig(&version)?;
|
update_zig(&next_version)?;
|
||||||
tag_next_version(&repo, &version)?;
|
tag_next_version(&repo, &next_version)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tag_next_version(
|
fn tag_next_version(repo: &Repository, next_version: &Version) -> Result<()> {
|
||||||
repo: &Repository,
|
|
||||||
next_version: &Version,
|
|
||||||
) -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
// first add the manifests
|
// first add the manifests
|
||||||
|
|
||||||
let mut index = repo.index()?;
|
let mut index = repo.index()?;
|
||||||
|
|
@ -184,7 +191,7 @@ fn tag_next_version(
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_makefile(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> {
|
fn update_makefile(next_version: &Version) -> Result<()> {
|
||||||
let makefile = std::fs::read_to_string("Makefile")?;
|
let makefile = std::fs::read_to_string("Makefile")?;
|
||||||
let makefile = makefile
|
let makefile = makefile
|
||||||
.lines()
|
.lines()
|
||||||
|
|
@ -204,7 +211,7 @@ fn update_makefile(next_version: &Version) -> Result<(), Box<dyn std::error::Err
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_cmake(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> {
|
fn update_cmake(next_version: &Version) -> Result<()> {
|
||||||
let cmake = std::fs::read_to_string("lib/CMakeLists.txt")?;
|
let cmake = std::fs::read_to_string("lib/CMakeLists.txt")?;
|
||||||
let cmake = cmake
|
let cmake = cmake
|
||||||
.lines()
|
.lines()
|
||||||
|
|
@ -230,10 +237,7 @@ fn update_cmake(next_version: &Version) -> Result<(), Box<dyn std::error::Error>
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_crates(
|
fn update_crates(current_version: &Version, next_version: &Version) -> Result<()> {
|
||||||
current_version: &Version,
|
|
||||||
next_version: &Version,
|
|
||||||
) -> Result<(), Box<dyn std::error::Error>> {
|
|
||||||
let mut cmd = std::process::Command::new("cargo");
|
let mut cmd = std::process::Command::new("cargo");
|
||||||
cmd.arg("workspaces").arg("version");
|
cmd.arg("workspaces").arg("version");
|
||||||
|
|
||||||
|
|
@ -253,20 +257,20 @@ fn update_crates(
|
||||||
let status = cmd.status()?;
|
let status = cmd.status()?;
|
||||||
|
|
||||||
if !status.success() {
|
if !status.success() {
|
||||||
return Err("Failed to update crates".into());
|
return Err(anyhow!("Failed to update crates"));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_npm(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> {
|
fn update_npm(next_version: &Version) -> Result<()> {
|
||||||
for path in ["lib/binding_web/package.json", "cli/npm/package.json"] {
|
for path in ["lib/binding_web/package.json", "cli/npm/package.json"] {
|
||||||
let package_json =
|
let package_json =
|
||||||
serde_json::from_str::<serde_json::Value>(&std::fs::read_to_string(path)?)?;
|
serde_json::from_str::<serde_json::Value>(&std::fs::read_to_string(path)?)?;
|
||||||
|
|
||||||
let mut package_json = package_json
|
let mut package_json = package_json
|
||||||
.as_object()
|
.as_object()
|
||||||
.ok_or("Invalid package.json")?
|
.ok_or_else(|| anyhow!("Invalid package.json"))?
|
||||||
.clone();
|
.clone();
|
||||||
package_json.insert(
|
package_json.insert(
|
||||||
"version".to_string(),
|
"version".to_string(),
|
||||||
|
|
@ -281,7 +285,7 @@ fn update_npm(next_version: &Version) -> Result<(), Box<dyn std::error::Error>>
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_zig(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> {
|
fn update_zig(next_version: &Version) -> Result<()> {
|
||||||
let zig = std::fs::read_to_string("build.zig.zon")?;
|
let zig = std::fs::read_to_string("build.zig.zon")?;
|
||||||
|
|
||||||
let zig = zig
|
let zig = zig
|
||||||
|
|
@ -303,7 +307,7 @@ fn update_zig(next_version: &Version) -> Result<(), Box<dyn std::error::Error>>
|
||||||
}
|
}
|
||||||
|
|
||||||
/// read Cargo.toml and get the version
|
/// read Cargo.toml and get the version
|
||||||
fn fetch_workspace_version() -> Result<String, Box<dyn std::error::Error>> {
|
fn fetch_workspace_version() -> Result<String> {
|
||||||
let cargo_toml = toml::from_str::<Value>(&std::fs::read_to_string("Cargo.toml")?)?;
|
let cargo_toml = toml::from_str::<Value>(&std::fs::read_to_string("Cargo.toml")?)?;
|
||||||
|
|
||||||
Ok(cargo_toml["workspace"]["package"]["version"]
|
Ok(cargo_toml["workspace"]["package"]["version"]
|
||||||
|
|
|
||||||
33
xtask/src/clippy.rs
Normal file
33
xtask/src/clippy.rs
Normal file
|
|
@ -0,0 +1,33 @@
|
||||||
|
use std::process::Command;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use crate::{bail_on_err, Clippy};
|
||||||
|
|
||||||
|
pub fn run(args: &Clippy) -> Result<()> {
|
||||||
|
let mut clippy_command = Command::new("cargo");
|
||||||
|
clippy_command.arg("+nightly").arg("clippy");
|
||||||
|
|
||||||
|
if let Some(package) = args.package.as_ref() {
|
||||||
|
clippy_command.args(["--package", package]);
|
||||||
|
} else {
|
||||||
|
clippy_command.arg("--workspace");
|
||||||
|
}
|
||||||
|
|
||||||
|
clippy_command
|
||||||
|
.arg("--release")
|
||||||
|
.arg("--all-targets")
|
||||||
|
.arg("--all-features")
|
||||||
|
.arg("--")
|
||||||
|
.arg("-D")
|
||||||
|
.arg("warnings");
|
||||||
|
|
||||||
|
if args.fix {
|
||||||
|
clippy_command.arg("--fix");
|
||||||
|
}
|
||||||
|
|
||||||
|
bail_on_err(
|
||||||
|
&clippy_command.spawn()?.wait_with_output()?,
|
||||||
|
"Clippy failed",
|
||||||
|
)
|
||||||
|
}
|
||||||
119
xtask/src/fetch.rs
Normal file
119
xtask/src/fetch.rs
Normal file
|
|
@ -0,0 +1,119 @@
|
||||||
|
use std::{path::Path, process::Command};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
|
|
||||||
|
use crate::{bail_on_err, EMSCRIPTEN_VERSION};
|
||||||
|
|
||||||
|
pub fn run_fixtures() -> Result<()> {
|
||||||
|
let grammars_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.join("test")
|
||||||
|
.join("fixtures")
|
||||||
|
.join("grammars");
|
||||||
|
|
||||||
|
[
|
||||||
|
("bash", "master"),
|
||||||
|
("c", "master"),
|
||||||
|
("cpp", "master"),
|
||||||
|
("embedded-template", "master"),
|
||||||
|
("go", "master"),
|
||||||
|
("html", "master"),
|
||||||
|
("java", "master"),
|
||||||
|
("javascript", "master"),
|
||||||
|
("jsdoc", "master"),
|
||||||
|
("json", "master"),
|
||||||
|
("php", "master"),
|
||||||
|
("python", "master"),
|
||||||
|
("ruby", "master"),
|
||||||
|
("rust", "master"),
|
||||||
|
("typescript", "master"),
|
||||||
|
]
|
||||||
|
.iter()
|
||||||
|
.try_for_each(|(grammar, r#ref)| {
|
||||||
|
let grammar_dir = grammars_dir.join(grammar);
|
||||||
|
let grammar_url = format!("https://github.com/tree-sitter/tree-sitter-{grammar}");
|
||||||
|
|
||||||
|
println!("Updating the {grammar} grammar...");
|
||||||
|
|
||||||
|
if !grammar_dir.exists() {
|
||||||
|
let mut command = Command::new("git");
|
||||||
|
command.args([
|
||||||
|
"clone",
|
||||||
|
"--depth",
|
||||||
|
"1",
|
||||||
|
&grammar_url,
|
||||||
|
&grammar_dir.to_string_lossy(),
|
||||||
|
]);
|
||||||
|
bail_on_err(
|
||||||
|
&command.spawn()?.wait_with_output()?,
|
||||||
|
"Failed to clone the {grammar} grammar",
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::env::set_current_dir(&grammar_dir)?;
|
||||||
|
|
||||||
|
let mut command = Command::new("git");
|
||||||
|
command.args(["fetch", "origin", r#ref, "--depth", "1"]);
|
||||||
|
bail_on_err(
|
||||||
|
&command.spawn()?.wait_with_output()?,
|
||||||
|
"Failed to fetch the {grammar} grammar",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut command = Command::new("git");
|
||||||
|
command.args(["reset", "--hard", "FETCH_HEAD"]);
|
||||||
|
bail_on_err(
|
||||||
|
&command.spawn()?.wait_with_output()?,
|
||||||
|
"Failed to reset the {grammar} grammar",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_emscripten() -> Result<()> {
|
||||||
|
let emscripten_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||||
|
.parent()
|
||||||
|
.unwrap()
|
||||||
|
.join("target")
|
||||||
|
.join("emsdk");
|
||||||
|
|
||||||
|
if emscripten_dir.exists() {
|
||||||
|
println!("Emscripten SDK already exists");
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
println!("Cloning the Emscripten SDK...");
|
||||||
|
|
||||||
|
let mut command = Command::new("git");
|
||||||
|
command.args([
|
||||||
|
"clone",
|
||||||
|
"https://github.com/emscripten-core/emsdk.git",
|
||||||
|
&emscripten_dir.to_string_lossy(),
|
||||||
|
]);
|
||||||
|
bail_on_err(
|
||||||
|
&command.spawn()?.wait_with_output()?,
|
||||||
|
"Failed to clone the Emscripten SDK",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
std::env::set_current_dir(&emscripten_dir)?;
|
||||||
|
|
||||||
|
let emsdk = if cfg!(windows) {
|
||||||
|
"emsdk.bat"
|
||||||
|
} else {
|
||||||
|
"./emsdk"
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut command = Command::new(emsdk);
|
||||||
|
command.args(["install", EMSCRIPTEN_VERSION]);
|
||||||
|
bail_on_err(
|
||||||
|
&command.spawn()?.wait_with_output()?,
|
||||||
|
"Failed to install Emscripten",
|
||||||
|
)?;
|
||||||
|
|
||||||
|
let mut command = Command::new(emsdk);
|
||||||
|
command.args(["activate", EMSCRIPTEN_VERSION]);
|
||||||
|
bail_on_err(
|
||||||
|
&command.spawn()?.wait_with_output()?,
|
||||||
|
"Failed to activate Emscripten",
|
||||||
|
)
|
||||||
|
}
|
||||||
118
xtask/src/generate.rs
Normal file
118
xtask/src/generate.rs
Normal file
|
|
@ -0,0 +1,118 @@
|
||||||
|
use std::{ffi::OsStr, fs, process::Command};
|
||||||
|
|
||||||
|
use anyhow::{Context, Result};
|
||||||
|
|
||||||
|
use crate::{bail_on_err, GenerateFixtures};
|
||||||
|
|
||||||
|
const HEADER_PATH: &str = "include/tree_sitter/api.h";
|
||||||
|
|
||||||
|
pub fn run_fixtures(args: &GenerateFixtures) -> Result<()> {
|
||||||
|
let output = std::process::Command::new("cargo")
|
||||||
|
.args(["build", "--release"])
|
||||||
|
.spawn()?
|
||||||
|
.wait_with_output()?;
|
||||||
|
bail_on_err(&output, "Failed to run cargo build")?;
|
||||||
|
|
||||||
|
let tree_sitter_binary = std::env::current_dir()?
|
||||||
|
.join("target")
|
||||||
|
.join("release")
|
||||||
|
.join("tree-sitter");
|
||||||
|
|
||||||
|
let grammars_dir = std::env::current_dir()?
|
||||||
|
.join("test")
|
||||||
|
.join("fixtures")
|
||||||
|
.join("grammars");
|
||||||
|
|
||||||
|
for grammar_file in find_grammar_files(grammars_dir.to_str().unwrap()).flatten() {
|
||||||
|
let grammar_dir = grammar_file.parent().unwrap();
|
||||||
|
let grammar_name = grammar_dir.file_name().and_then(OsStr::to_str).unwrap();
|
||||||
|
|
||||||
|
println!(
|
||||||
|
"Regenerating {grammar_name} parser{}",
|
||||||
|
if args.wasm { " to wasm" } else { "" }
|
||||||
|
);
|
||||||
|
|
||||||
|
if args.wasm {
|
||||||
|
let mut cmd = Command::new(&tree_sitter_binary);
|
||||||
|
let cmd = cmd.args([
|
||||||
|
"build",
|
||||||
|
"--wasm",
|
||||||
|
"-o",
|
||||||
|
&format!("target/release/tree-sitter-{grammar_name}.wasm"),
|
||||||
|
grammar_dir.to_str().unwrap(),
|
||||||
|
]);
|
||||||
|
if args.docker {
|
||||||
|
cmd.arg("--docker");
|
||||||
|
}
|
||||||
|
bail_on_err(
|
||||||
|
&cmd.spawn()?.wait_with_output()?,
|
||||||
|
&format!("Failed to regenerate {grammar_name} parser to wasm"),
|
||||||
|
)?;
|
||||||
|
} else {
|
||||||
|
let output = Command::new(&tree_sitter_binary)
|
||||||
|
.arg("generate")
|
||||||
|
.arg("src/grammar.json")
|
||||||
|
.arg("--abi=latest")
|
||||||
|
.current_dir(grammar_dir)
|
||||||
|
.spawn()?
|
||||||
|
.wait_with_output()?;
|
||||||
|
bail_on_err(
|
||||||
|
&output,
|
||||||
|
&format!("Failed to regenerate {grammar_name} parser"),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_bindings() -> Result<()> {
|
||||||
|
let no_copy = [
|
||||||
|
"TSInput",
|
||||||
|
"TSLanguage",
|
||||||
|
"TSLogger",
|
||||||
|
"TSLookaheadIterator",
|
||||||
|
"TSParser",
|
||||||
|
"TSTree",
|
||||||
|
"TSQuery",
|
||||||
|
"TSQueryCursor",
|
||||||
|
"TSQueryCapture",
|
||||||
|
"TSQueryMatch",
|
||||||
|
"TSQueryPredicateStep",
|
||||||
|
];
|
||||||
|
|
||||||
|
let bindings = bindgen::Builder::default()
|
||||||
|
.header(HEADER_PATH)
|
||||||
|
.layout_tests(false)
|
||||||
|
.allowlist_type("^TS.*")
|
||||||
|
.allowlist_function("^ts_.*")
|
||||||
|
.allowlist_var("^TREE_SITTER.*")
|
||||||
|
.no_copy(no_copy.join("|"))
|
||||||
|
.prepend_enum_name(false)
|
||||||
|
.use_core()
|
||||||
|
.clang_arg("-D TREE_SITTER_FEATURE_WASM")
|
||||||
|
.generate()
|
||||||
|
.expect("Failed to generate bindings");
|
||||||
|
|
||||||
|
bindings
|
||||||
|
.write_to_file("lib/binding_rust/bindings.rs")
|
||||||
|
.with_context(|| "Failed to write bindings")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn find_grammar_files(
|
||||||
|
dir: &str,
|
||||||
|
) -> impl Iterator<Item = Result<std::path::PathBuf, std::io::Error>> {
|
||||||
|
fs::read_dir(dir)
|
||||||
|
.expect("Failed to read directory")
|
||||||
|
.filter_map(Result::ok)
|
||||||
|
.flat_map(|entry| {
|
||||||
|
let path = entry.path();
|
||||||
|
if path.is_dir() && !path.to_string_lossy().contains("node_modules") {
|
||||||
|
Box::new(find_grammar_files(path.to_str().unwrap())) as Box<dyn Iterator<Item = _>>
|
||||||
|
} else if path.is_file() && path.file_name() == Some(OsStr::new("grammar.js")) {
|
||||||
|
Box::new(std::iter::once(Ok(path))) as _
|
||||||
|
} else {
|
||||||
|
Box::new(std::iter::empty()) as _
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue