feat: move scripts to xtasks

This commit is contained in:
Amaan Qureshi 2024-10-06 13:41:47 -04:00
parent 5c6445edea
commit dbe8bbf480
37 changed files with 1013 additions and 1234 deletions

View file

@ -36,22 +36,22 @@ jobs:
# When adding a new `target`: # When adding a new `target`:
# 1. Define a new platform alias above # 1. Define a new platform alias above
# 2. Add a new record to a matrix map in `cli/npm/install.js` # 2. Add a new record to a matrix map in `cli/npm/install.js`
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true } - { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
- { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true } - { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true }
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-20.04 , cli_features: wasm } #2272 - { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-20.04 , cli_features: wasm } #2272
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true } - { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true } - { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest } - { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest }
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , cli_features: wasm } - { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , cli_features: wasm }
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest } - { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest }
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-14 , cli_features: wasm } - { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-14 , cli_features: wasm }
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-12 , cli_features: wasm } - { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-12 , cli_features: wasm }
# Cross compilers for C library # Cross compilers for C library
- { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar } - { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar }
- { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar } - { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar }
- { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar } - { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar }
- { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar } - { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar }
# See #2041 tree-sitter issue # See #2041 tree-sitter issue
- { platform: windows-x64 , rust-test-threads: 1 } - { platform: windows-x64 , rust-test-threads: 1 }
@ -86,23 +86,30 @@ jobs:
- name: Install cross - name: Install cross
if: ${{ matrix.use-cross }} if: ${{ matrix.use-cross }}
uses: taiki-e/install-action@v2 run: cargo install cross --git https://github.com/cross-rs/cross
with:
tool: cross
- name: Build custom cross image - name: Build custom cross image
if: ${{ matrix.use-cross && matrix.os == 'ubuntu-latest' }} if: ${{ matrix.use-cross }}
run: | run: |
target="${{ matrix.target }}" target="${{ matrix.target }}"
image=ghcr.io/cross-rs/$target:custom image=ghcr.io/cross-rs/$target:custom
echo "CROSS_IMAGE=$image" >> $GITHUB_ENV
echo "[target.$target]" >> Cross.toml echo "[target.$target]" >> Cross.toml
echo "image = \"$image\"" >> Cross.toml echo "image = \"$image\"" >> Cross.toml
echo "CROSS_CONFIG=$PWD/Cross.toml" >> $GITHUB_ENV echo "[build]" >> Cross.toml
echo "pre-build = [" >> Cross.toml
echo " \"dpkg --add-architecture \$CROSS_DEB_ARCH\"," >> Cross.toml
echo " \"apt-get update && apt-get -y install libssl-dev\"" >> Cross.toml
echo "]" >> Cross.toml
echo "Cross.toml:"
cat Cross.toml
echo "CROSS_IMAGE=$image" >> $GITHUB_ENV
echo "CROSS_CONFIG=$PWD/Cross.toml" >> $GITHUB_ENV
echo "FROM ghcr.io/cross-rs/$target:edge" >> Dockerfile echo "FROM ghcr.io/cross-rs/$target:edge" >> Dockerfile
echo "RUN curl -fsSL https://deb.nodesource.com/setup_16.x | bash -" >> Dockerfile echo "RUN curl -fsSL https://deb.nodesource.com/setup_22.x | bash -" >> Dockerfile
echo "RUN apt-get update && apt-get -y install nodejs" >> Dockerfile echo "RUN apt-get update && apt-get -y install nodejs" >> Dockerfile
docker build -t $image . docker build -t $image .
@ -139,22 +146,22 @@ jobs:
- name: Build wasm library - name: Build wasm library
if: ${{ !matrix.cli-only && !matrix.use-cross }} # No sense to build on the same Github runner hosts many times if: ${{ !matrix.cli-only && !matrix.use-cross }} # No sense to build on the same Github runner hosts many times
run: script/build-wasm run: $BUILD_CMD run --package xtask -- build-wasm
- run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.cli_features }} - run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.cli_features }}
- run: script/fetch-fixtures - run: $BUILD_CMD run --package xtask -- fetch-fixtures
- uses: ./.github/actions/cache - uses: ./.github/actions/cache
id: cache id: cache
- name: Generate fixtures - name: Generate fixtures
if: ${{ !matrix.cli-only && inputs.run_test && steps.cache.outputs.cache-hit != 'true' }} # Can't natively run CLI on Github runner's host if: ${{ !matrix.cli-only && inputs.run_test && steps.cache.outputs.cache-hit != 'true' }} # Can't natively run CLI on Github runner's host
run: script/generate-fixtures run: $BUILD_CMD run --package xtask -- generate-fixtures
- name: Generate WASM fixtures - name: Generate WASM fixtures
if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test && steps.cache.outputs.cache-hit != 'true' }} # See comment for the "Build wasm library" step if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test && steps.cache.outputs.cache-hit != 'true' }} # See comment for the "Build wasm library" step
run: script/generate-fixtures-wasm run: $BUILD_CMD run --package xtask -- generate-fixtures --wasm
- name: Run main tests - name: Run main tests
if: ${{ !matrix.cli-only && inputs.run_test }} # Can't natively run CLI on Github runner's host if: ${{ !matrix.cli-only && inputs.run_test }} # Can't natively run CLI on Github runner's host
@ -162,7 +169,7 @@ jobs:
- name: Run wasm tests - name: Run wasm tests
if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test }} # See comment for the "Build wasm library" step if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test }} # See comment for the "Build wasm library" step
run: script/test-wasm run: $BUILD_CMD run --package xtask -- test-wasm
- name: Run benchmarks - name: Run benchmarks
if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test }} # Cross-compiled benchmarks make no sense if: ${{ !matrix.cli-only && !matrix.use-cross && inputs.run_test }} # Cross-compiled benchmarks make no sense

View file

@ -84,12 +84,12 @@ jobs:
- name: Build wasm - name: Build wasm
if: matrix.directory == 'lib/binding_web' if: matrix.directory == 'lib/binding_web'
run: ./script/build-wasm run: cargo xtask build-wasm
- name: Setup Node - name: Setup Node
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
node-version: 18 node-version: 20
registry-url: "https://registry.npmjs.org" registry-url: "https://registry.npmjs.org"
- name: Publish lib to npmjs.com - name: Publish lib to npmjs.com

View file

@ -24,13 +24,13 @@ jobs:
- run: rustup toolchain install stable --profile minimal - run: rustup toolchain install stable --profile minimal
- uses: Swatinem/rust-cache@v2 - uses: Swatinem/rust-cache@v2
- run: cargo build --release - run: cargo build --release
- run: script/fetch-fixtures - run: cargo xtask fetch-fixtures
- uses: ./.github/actions/cache - uses: ./.github/actions/cache
id: cache id: cache
- if: ${{ steps.cache.outputs.cache-hit != 'true' }} - if: ${{ steps.cache.outputs.cache-hit != 'true' }}
run: script/generate-fixtures run: cargo xtask generate-fixtures
- name: Run main tests with undefined behaviour sanitizer (UBSAN) - name: Run main tests with undefined behaviour sanitizer (UBSAN)
env: env:

View file

@ -95,13 +95,13 @@ uninstall:
##### Dev targets ##### ##### Dev targets #####
test: test:
script/fetch-fixtures cargo xtask fetch-fixtures
script/generate-fixtures cargo xtask generate-fixtures
script/test cargo xtask test
test_wasm: test_wasm:
script/generate-fixtures-wasm cargo xtask generate-fixtures-wasm
script/test-wasm cargo xtask test-wasm
lint: lint:
cargo update --workspace --locked --quiet cargo update --workspace --locked --quiet

View file

@ -1,62 +0,0 @@
#!/usr/bin/env bash
set -e
function usage {
cat <<EOF
USAGE
$0 [-h] [-l language-name] [-e example-file-name] [-r repetition-count]
OPTIONS
-h print this message
-l run only the benchmarks for the given language
-e run only the benchmarks that parse the example file with the given name
-r parse each sample the given number of times (default 5)
-g debug
EOF
}
mode=normal
while getopts "hgl:e:r:" option; do
case ${option} in
h)
usage
exit
;;
g)
mode=debug
;;
e)
export TREE_SITTER_BENCHMARK_EXAMPLE_FILTER=${OPTARG}
;;
l)
export TREE_SITTER_BENCHMARK_LANGUAGE_FILTER=${OPTARG}
;;
r)
export TREE_SITTER_BENCHMARK_REPETITION_COUNT=${OPTARG}
;;
*)
usage
exit 1
;;
esac
done
if [[ $mode == debug ]]; then
test_binary=$(
cargo bench benchmark -p tree-sitter-cli --no-run --message-format=json 2> /dev/null |
jq -rs 'map(select(.target.name == "benchmark" and .executable))[0].executable'
)
env | grep TREE_SITTER
echo "$test_binary"
else
exec cargo bench benchmark -p tree-sitter-cli
fi

View file

@ -1,4 +0,0 @@
@echo off
cargo bench benchmark -p tree-sitter-cli
exit /b %errorlevel%

View file

@ -1,76 +0,0 @@
#!/usr/bin/env bash
# shellcheck disable=SC2086
set -e
if [[ $(uname -s) != Linux ]]; then
printf 'Fuzzing is only supported on Linux\n' >&2
exit 1
fi
CC=${CC:-clang}
CXX=${CXX:-clang++}
default_fuzz_flags=-fsanitize=fuzzer,address,undefined
export CFLAGS="$default_fuzz_flags $CFLAGS"
export CXXFLAGS="$default_fuzz_flags $CXXFLAGS"
make CC="$CC" CXX="$CXX" libtree-sitter.a
if [[ -z $* ]]; then
mapfile -t languages < <(ls test/fixtures/grammars)
else
languages=("$@")
fi
mkdir -p test/fuzz/out
for lang in "${languages[@]}"; do
# skip typescript & php
if [[ $lang == typescript || $lang == php ]]; then
continue
fi
printf 'Building %s fuzzer...\n' "$lang"
lang_dir="test/fixtures/grammars/$lang"
lang_grammar="${lang_dir}/src/grammar.json"
# The following assumes each language is implemented as src/parser.c plus an
# optional scanner in src/scanner.c
objects=()
lang_scanner="${lang_dir}/src/scanner"
if [[ -f "${lang_scanner}.c" ]]; then
$CC $CFLAGS -std=c11 -g -O1 -I "${lang_dir}/src" -c "${lang_scanner}.c" -o "${lang_scanner}.o"
objects+=("${lang_scanner}.o")
fi
# Compiling with -O0 speeds up the build dramatically
$CC $CFLAGS -g -O0 -I "${lang_dir}/src" "${lang_dir}/src/parser.c" -c -o "${lang_dir}/src/parser.o"
objects+=("${lang_dir}/src/parser.o")
highlights_filename="${lang_dir}/queries/highlights.scm"
if [[ -f "${highlights_filename}" ]]; then
ts_lang_query_filename="${lang}.scm"
cp "${highlights_filename}" "test/fuzz/out/${ts_lang_query_filename}"
else
ts_lang_query_filename=""
fi
ts_lang="tree_sitter_$(jq -r .name "$lang_grammar")"
$CXX $CXXFLAGS -std=c++11 -Ilib/include \
-D TS_LANG="$ts_lang" \
-D TS_LANG_QUERY_FILENAME="\"${ts_lang_query_filename}\"" \
test/fuzz/fuzzer.cc \
"${objects[@]}" \
libtree-sitter.a \
-o "test/fuzz/out/${lang}_fuzzer"
jq '
[ ..
| if .type? == "STRING" or (.type? == "ALIAS" and .named? == false) then .value else empty end
| select(test("\\S") and length == utf8bytelength)
] | unique | .[]
' "$lang_grammar" | sort > "test/fuzz/out/${lang}.dict"
done

View file

@ -1,147 +0,0 @@
#!/usr/bin/env bash
usage() {
cat <<EOF
USAGE
$0 [--help] [--debug] [--docker]
SUMMARY
Compile the Tree-sitter WASM library. This will create two files in the
\`lib/binding_web\` directory: \`tree-sitter.js\` and \`tree-sitter.wasm\`.
REQUIREMENTS
You must have either the \`emcc\` command or the \`docker\` command
on your PATH for this to work.
OPTIONS
--help: Display this message.
--debug: Compile the library more quickly, with fewer optimizations
and more runtime assertions.
--docker: Run emscripten using docker, even if \`emcc\` is installed.
By default, \`emcc\` will be run directly when available.
EOF
}
set -e
WEB_DIR=lib/binding_web
SRC_DIR=lib/src
EMSCRIPTEN_VERSION=$(< cli/loader/emscripten-version)
verbose=0
force_docker=0
emscripten_flags=(-O3 --minify 0)
while (($# > 0)); do
case "$1" in
--debug)
emscripten_flags=(-s ASSERTIONS=1 -s SAFE_HEAP=1 -O0 -g)
;;
--help)
usage
exit 0
;;
--docker)
force_docker=1
;;
-v|--verbose)
verbose=1
;;
*)
usage
printf "Unrecognized argument '%s'\n" "$1" >&2
exit 1
;;
esac
shift
done
if [[ $verbose == 1 ]]; then
emscripten_flags+=(-s VERBOSE=1 -v)
fi
emcc=
docker=
if [[ $force_docker == 0 ]] && command -v emcc > /dev/null; then
emcc=emcc
elif command -v docker > /dev/null; then
# detect which one to use
docker=docker
elif command -v podman > /dev/null; then
docker=podman
fi
if [[ -z $emcc ]] && [[ -n $docker ]]; then
if [[ $docker == podman ]]; then
export PODMAN_USERNS=keep-id
fi
emcc="$docker run \
--rm \
-v $PWD:/src:Z \
-u $UID \
emscripten/emsdk:$EMSCRIPTEN_VERSION \
emcc"
fi
if [[ -z $emcc ]]; then
if [[ $force_docker == 1 ]]; then
# shellcheck disable=SC2016
printf 'You must have `docker` or `podman` in your PATH to run this script with --docker\n' >&2
else
# shellcheck disable=SC2016
printf 'You must have either `docker`, `podman`, or `emcc` in your PATH to run this script\n' >&2
fi
exit 1
fi
mkdir -p target/scratch
runtime_methods=stringToUTF16,AsciiToString
# Remove quotes, add leading underscores, remove newlines, remove trailing comma.
exported_functions=$(
cat ${SRC_DIR}/wasm/stdlib-symbols.txt ${WEB_DIR}/exports.txt |
sed -e 's/"//g;s/^/_/g' | tr -d '\n' | sed -e 's/,$//'
)
# Use emscripten to generate `tree-sitter.js` and `tree-sitter.wasm`
# in the `target/scratch` directory
$emcc \
-s WASM=1 \
-s INITIAL_MEMORY=33554432 \
-s ALLOW_MEMORY_GROWTH=1 \
-s SUPPORT_BIG_ENDIAN=1 \
-s MAIN_MODULE=2 \
-s FILESYSTEM=0 \
-s NODEJS_CATCH_EXIT=0 \
-s NODEJS_CATCH_REJECTION=0 \
-s EXPORTED_FUNCTIONS="${exported_functions}" \
-s EXPORTED_RUNTIME_METHODS=$runtime_methods \
"${emscripten_flags[@]}" \
-fno-exceptions \
-std=c11 \
-D 'fprintf(...)=' \
-D NDEBUG= \
-D _POSIX_C_SOURCE=200112L \
-D _DEFAULT_SOURCE= \
-I ${SRC_DIR} \
-I lib/include \
--js-library ${WEB_DIR}/imports.js \
--pre-js ${WEB_DIR}/prefix.js \
--post-js ${WEB_DIR}/binding.js \
--post-js ${WEB_DIR}/suffix.js \
lib/src/lib.c \
${WEB_DIR}/binding.c \
-o target/scratch/tree-sitter.js
mv target/scratch/tree-sitter.js ${WEB_DIR}/tree-sitter.js
mv target/scratch/tree-sitter.wasm ${WEB_DIR}/tree-sitter.wasm

View file

@ -1,28 +0,0 @@
#!/usr/bin/env bash
set -e
declare -a EXPORT_FLAGS
while read -r -d, function; do
EXPORT_FLAGS+=("-Wl,--export=${function:1:-1}")
done < lib/src/wasm/stdlib-symbols.txt
target/wasi-sdk-21.0/bin/clang-17 \
-o stdlib.wasm \
-Os \
-fPIC \
-Wl,--no-entry \
-Wl,--stack-first \
-Wl,-z -Wl,stack-size=65536 \
-Wl,--import-undefined \
-Wl,--import-memory \
-Wl,--import-table \
-Wl,--strip-debug \
-Wl,--export=reset_heap \
-Wl,--export=__wasm_call_ctors \
-Wl,--export=__stack_pointer \
"${EXPORT_FLAGS[@]}" \
lib/src/wasm/stdlib.c
xxd -C -i stdlib.wasm > lib/src/wasm/wasm-stdlib.h
mv stdlib.wasm target/

View file

@ -1,12 +0,0 @@
#!/usr/bin/env bash
src_dir=lib/src
allocation_functions=(malloc calloc realloc free)
for function in "${allocation_functions[@]}"; do
usages=$(grep -n -E "\b${function}\(" -r $src_dir --exclude alloc.c --exclude stdlib.c)
if [[ -n $usages ]]; then
printf 'The %s function should not be called directly, but is called here:\n%s\n' "$function" "$usages" >&2
exit 1
fi
done

View file

@ -1,26 +0,0 @@
#!/usr/bin/env bash
set -e
EMSDK_DIR=target/emsdk
EMSCRIPTEN_VERSION=$(< cli/loader/emscripten-version)
{
if [[ ! -f $EMSDK_DIR/emsdk ]]; then
printf 'Downloading emscripten SDK...\n'
git clone https://github.com/emscripten-core/emsdk.git $EMSDK_DIR
fi
cd $EMSDK_DIR
printf 'Updating emscripten SDK...\n'
git reset --hard
git pull
./emsdk list
printf 'Installing emscripten...\n'
./emsdk install "$EMSCRIPTEN_VERSION"
printf 'Activating emscripten...\n'
./emsdk activate "$EMSCRIPTEN_VERSION"
} >&2

View file

@ -1,37 +0,0 @@
#!/usr/bin/env bash
set -e
GRAMMARS_DIR="$PWD/test/fixtures/grammars"
fetch_grammar() {
local grammar=$1
local ref=$2
local grammar_dir="${GRAMMARS_DIR}/${grammar}"
local grammar_url=https://github.com/tree-sitter/tree-sitter-${grammar}
printf 'Updating %s grammar...\n' "$grammar"
if [[ ! -d "$grammar_dir" ]]; then
git clone "$grammar_url" "$grammar_dir" --depth=1
fi
git -C "$grammar_dir" fetch origin "$ref" --depth=1
git -C "$grammar_dir" reset --hard FETCH_HEAD
}
fetch_grammar bash master
fetch_grammar c master
fetch_grammar cpp master
fetch_grammar embedded-template master
fetch_grammar go master
fetch_grammar html master
fetch_grammar java master
fetch_grammar javascript master
fetch_grammar jsdoc master
fetch_grammar json master
fetch_grammar php master
fetch_grammar python master
fetch_grammar ruby master
fetch_grammar rust master
fetch_grammar typescript master

View file

@ -1,32 +0,0 @@
@echo off
call:fetch_grammar bash master
call:fetch_grammar c master
call:fetch_grammar cpp master
call:fetch_grammar embedded-template master
call:fetch_grammar go master
call:fetch_grammar html master
call:fetch_grammar java master
call:fetch_grammar javascript master
call:fetch_grammar jsdoc master
call:fetch_grammar json master
call:fetch_grammar php master
call:fetch_grammar python master
call:fetch_grammar ruby master
call:fetch_grammar rust master
call:fetch_grammar typescript master
exit /B 0
:fetch_grammar
setlocal
set grammar_dir=test\fixtures\grammars\%~1
set grammar_url=https://github.com/tree-sitter/tree-sitter-%~1
set grammar_branch=%~2
@if not exist %grammar_dir% (
git clone %grammar_url% %grammar_dir% --depth=1
)
pushd %grammar_dir%
git fetch origin %2 --depth=1
git reset --hard FETCH_HEAD
popd
exit /B 0

View file

@ -1,44 +0,0 @@
#!/bin/bash
output_path=lib/binding_rust/bindings.rs
header_path=lib/include/tree_sitter/api.h
no_derive_copy=(
TSInput
TSLanguage
TSLogger
TSLookaheadIterator
TSParser
TSTree
TSQuery
TSQueryCursor
TSQueryCapture
TSQueryMatch
TSQueryPredicateStep
)
no_copy=$(IFS='|'; echo "${no_derive_copy[*]}")
file_version=$(head -n1 "$output_path" | cut -d' ' -f6)
tool_version=$(bindgen --version | cut -d' ' -f2)
higher_version=$(printf '%s\n' "$file_version" "$tool_version" | sort -V | tail -n1)
if [[ "$higher_version" != "$tool_version" ]]; then
printf 'Latest used bindgen version was %s\n' "$file_version" >&2
printf 'Currently installed bindgen CLI version is %s\n\n' "$tool_version" >&2
# shellcheck disable=SC2016
printf 'You must upgrade bindgen CLI first with `cargo install bindgen-cli`\n' >&2
exit 1
fi
bindgen \
--no-layout-tests \
--allowlist-type '^TS.*' \
--allowlist-function '^ts_.*' \
--allowlist-var '^TREE_SITTER.*' \
--blocklist-type '^__.*' \
--no-prepend-enum-name \
--no-copy "$no_copy" \
--use-core \
"$header_path" \
-- \
-D TREE_SITTER_FEATURE_WASM \
> "$output_path"

View file

@ -1,27 +0,0 @@
#!/usr/bin/env bash
set -e
ROOT_DIR="$PWD"
GRAMMARS_DIR="$ROOT_DIR/test/fixtures/grammars"
if [[ $CI == true ]]; then
set -x
else
cargo build --release
TREE_SITTER="$ROOT_DIR/target/release/tree-sitter"
fi
filter_grammar_name="$1"
while read -r grammar_file; do
grammar_dir="${grammar_file%/*}"
grammar_name="${grammar_dir##*/}"
if [[ -n $filter_grammar_name && "$filter_grammar_name" != "$grammar_name" ]]; then
continue
fi
printf 'Regenerating %s parser\n' "$grammar_name"
(cd "$grammar_dir" && "$TREE_SITTER" generate src/grammar.json --abi=latest)
done < <(find "$GRAMMARS_DIR" -name grammar.js -not -path '*/node_modules/*')

View file

@ -1,33 +0,0 @@
#!/usr/bin/env bash
set -e
ROOT_DIR="$PWD"
GRAMMARS_DIR="$ROOT_DIR/test/fixtures/grammars"
if [[ $CI == true ]]; then
set -x
else
cargo build --release
TREE_SITTER="$ROOT_DIR/target/release/tree-sitter"
fi
build_wasm_args=
if [[ $1 == --docker ]]; then
build_wasm_args=--docker
shift
fi
filter_grammar_name="$1"
while read -r grammar_file; do
grammar_dir="${grammar_file%/*}"
grammar_name="${grammar_dir##*/}"
if [[ -n $filter_grammar_name && "$filter_grammar_name" != "$grammar_name" ]]; then
continue
fi
printf 'Compiling %s parser to wasm\n' "$grammar_name"
"$TREE_SITTER" build --wasm $build_wasm_args -o "target/release/tree-sitter-${grammar_name}.wasm" "$grammar_dir"
done < <(find "$GRAMMARS_DIR" -name grammar.js -not -path '*/node_modules/*')

View file

@ -1,13 +0,0 @@
@echo off
setlocal EnableDelayedExpansion
set tree_sitter="%cd%\target\release\tree-sitter"
for /f "tokens=*" %%f in ('dir test\fixtures\grammars\grammar.js /b/s') do (
pushd "%%f\.."
echo Regenerating parser !cd!
%tree_sitter% generate src\grammar.json --abi=latest
popd
)
exit /B 0

View file

@ -1,36 +0,0 @@
#!/usr/bin/env bash
# Usage:
# script/heap-profile
#
# Parse an example source file and record memory usage
#
# Dependencies:
# * `pprof` executable: https://github.com/google/pprof
# * `gperftools` package: https://github.com/gperftools/gperftools
set -e
GRAMMARS_DIR="$PWD/test/fixtures/grammars"
# Build the library
make libtree-sitter.a
# Build the heap-profiling harness
clang++ \
-Wno-reorder-init-list \
-Wno-c99-designator \
-I lib/include \
-I "$GRAMMARS_DIR" \
-D GRAMMARS_DIR="\"${GRAMMARS_DIR}/\"" \
test/profile/heap.cc \
-l tcmalloc \
libtree-sitter.a \
-o target/heap-profile
# Run the harness with heap profiling enabled.
export HEAPPROFILE="$PWD/profile"
target/heap-profile "$@"
# Extract statistics using pprof.
pprof -top -cum profile.0001.heap

View file

@ -1,35 +0,0 @@
#!/bin/bash
if (($# < 3)); then
echo "usage: $0 <language> <halt|recover> <testcase> [libFuzzer args...]" >&2
exit 1
fi
set -eu
export ASAN_OPTIONS=quarantine_size_mb=10:detect_leaks=1:symbolize=1
export UBSAN=print_stacktrace=1:halt_on_error=1:symbolize=1
# check if CI env var exists
if [[ -z ${CI:-} ]]; then
declare -A mode_config=(
[halt]='-timeout=1 -rss_limit_mb=2048'
[recover]='-timeout=10 -rss_limit_mb=2048'
)
else
declare -A mode_config=(
[halt]='-max_total_time=120 -timeout=1 -rss_limit_mb=2048'
[recover]='-time=120 -timeout=10 -rss_limit_mb=2048'
)
fi
lang="$1"
shift
mode="$1"
shift
testcase="$1"
shift
# Treat remainder of arguments as libFuzzer arguments
# shellcheck disable=SC2086
test/fuzz/out/${lang}_fuzzer ${mode_config[$mode]} -runs=1 "$testcase" "$@"

View file

@ -1,42 +0,0 @@
#!/usr/bin/env bash
if (($# < 2)); then
echo "usage: $0 <language> <halt|recover> [libFuzzer args...]" >&2
exit 1
fi
set -eu
export ASAN_OPTIONS=quarantine_size_mb=10:detect_leaks=1:symbolize=1
export UBSAN=print_stacktrace=1:halt_on_error=1:symbolize=1
# check if CI env var exists
if [[ -z ${CI:-} ]]; then
declare -A mode_config=(
[halt]='-timeout=1 -rss_limit_mb=2048'
[recover]='-timeout=10 -rss_limit_mb=2048'
)
else
declare -A mode_config=(
[halt]='-max_total_time=120 -timeout=1 -rss_limit_mb=2048'
[recover]='-time=120 -timeout=10 -rss_limit_mb=2048'
)
fi
lang="$1"
shift
mode="$1"
shift
# Treat remainder of arguments as libFuzzer arguments
# Fuzzing logs and testcases are always written to `pwd`, so `cd` there first
results="$PWD/test/fuzz/out/fuzz-results/${lang}"
mkdir -p "${results}"
cd "${results}"
# Create a corpus directory, so new discoveries are stored on disk. These will
# then be loaded on subsequent fuzzing runs
mkdir -p corpus
# shellcheck disable=SC2086
../../${lang}_fuzzer -dict="../../${lang}.dict" -artifact_prefix=${lang}_ -max_len=2048 ${mode_config[$mode]} corpus "$@"

View file

@ -1,29 +0,0 @@
#!/bin/bash
root=$PWD
cd docs
bundle exec jekyll serve "$@" &
bundle exec ruby <<RUBY &
require "listen"
def copy_wasm_files
`cp $root/lib/binding_web/tree-sitter.{js,wasm} $root/docs/assets/js/`
`cp $root/target/release/*.wasm $root/docs/assets/js/`
end
puts "Copying WASM files to docs folder..."
copy_wasm_files
puts "Watching release directory"
listener = Listen.to("$root/lib/binding_web", only: /^tree-sitter\.(js|wasm)$/, wait_for_delay: 2) do
puts "WASM files updated. Copying new files to docs folder..."
copy_wasm_files
end
listener.start
sleep
RUBY
wait

View file

@ -1,50 +0,0 @@
#!/usr/bin/env node
const {statSync} = require('fs');
const {execFileSync} = require('child_process');
const libPath = process.argv[2];
if (!libPath || libPath === '--help') {
console.log(`Usage: ${process.argv[1]} <dylib-path>`);
process.exit(0)
}
// Get total file size
const totalSize = statSync(libPath).size
// Dump symbols with addresses
const output = execFileSync(
'nm',
['-t', 'd', libPath],
{encoding: 'utf8'}
);
// Parse addresses
const addressEntries = [];
for (const line of output.split('\n')) {
const [address, _, name] = line.split(/\s+/);
if (address && name) {
addressEntries.push({name, address: parseInt(address)})
}
}
// Compute sizes by subtracting addresses
addressEntries.sort((a, b) => a.address - b.address);
const sizeEntries = addressEntries.map(({name, address}, i) => {
const next = addressEntries[i + 1] ? addressEntries[i + 1].address : totalSize;
const size = next - address;
return {name, size}
})
function formatSize(sizeInBytes) {
return sizeInBytes > 1024
? `${(sizeInBytes / 1024).toFixed(1)} kb`
: `${sizeInBytes} b`
}
// Display sizes
sizeEntries.sort((a, b) => b.size - a.size);
console.log('total'.padEnd(64, ' '), '\t', formatSize(totalSize));
for (const entry of sizeEntries) {
console.log(entry.name.padEnd(64, ' '), '\t', formatSize(entry.size));
}

View file

@ -1,101 +0,0 @@
#!/usr/bin/env bash
set -e
function usage {
cat <<EOF
USAGE
$0 [-adDg] [-s SEED] [-l LANGUAGE] [-e EXAMPLE]
OPTIONS
-h Print this message
-a Compile C code with the Clang address sanitizer
-e Run only the corpus tests whose name contain the given string
-i Run the given number of iterations of randomized tests (default 10)
-s Set the seed used to control random behavior
-d Print parsing log to stderr
-D Generate an SVG graph of parsing logs
-g Run the tests with a debugger
EOF
}
export RUST_BACKTRACE=full
mode=normal
test_flags=()
while getopts "adDghl:e:s:i:" option; do
case ${option} in
h)
usage
exit
;;
a)
export CFLAGS=-fsanitize=undefined,address
# When the Tree-sitter C library is compiled with the address sanitizer, the address sanitizer
# runtime library needs to be linked into the final test executable. When using Xcode clang,
# the Rust linker doesn't know where to find that library, so we need to specify linker flags directly.
runtime_dir=$(cc -print-runtime-dir)
if [[ $runtime_dir == */Xcode.app/* ]]; then
export RUSTFLAGS="-C link-arg=-L${runtime_dir} -C link-arg=-lclang_rt.asan_osx_dynamic -C link-arg=-Wl,-rpath,${runtime_dir}"
fi
# Specify a `--target` explicitly. This is required for address sanitizer support.
toolchain=$(rustup show active-toolchain)
toolchain_regex='(stable|beta|nightly)-([_a-z0-9-]+).*'
if [[ $toolchain =~ $toolchain_regex ]]; then
release=${BASH_REMATCH[1]}
current_target=${BASH_REMATCH[2]}
else
printf "Failed to parse toolchain '%s'\n" "$toolchain" >&2
fi
test_flags+=("--target=$current_target")
;;
e)
export TREE_SITTER_EXAMPLE=${OPTARG}
;;
s)
export TREE_SITTER_SEED=${OPTARG}
;;
i)
export TREE_SITTER_ITERATIONS=${OPTARG}
;;
d)
export TREE_SITTER_LOG=1
;;
D)
export TREE_SITTER_LOG_GRAPHS=1
;;
g)
mode=debug
;;
*)
usage
exit 1
;;
esac
done
shift $((OPTIND - 1))
if [[ ${mode} == debug ]]; then
test_binary=$(
cargo test "${test_flags[@]}" --no-run --message-format=json 2> /dev/null |
jq -rs 'map(select(.target.name == "tree-sitter-cli" and .executable))[0].executable'
)
lldb "${test_binary}" -- "$1"
else
cargo test "${test_flags[@]}" "$1" -- --nocapture
fi

View file

@ -1,12 +0,0 @@
#!/usr/bin/env bash
set -e
cd lib/binding_web
if [[ ! -d node_modules/chai ]] || [[ ! -d node_modules/mocha ]]; then
printf 'Installing test dependencies...\n'
npm install
fi
node_modules/.bin/mocha

View file

@ -1,10 +0,0 @@
@echo off
setlocal
set RUST_TEST_THREADS=1
set RUST_BACKTRACE=full
cargo test "%~1"
if %errorlevel% NEQ 0 (
exit /b %errorlevel%
)
endlocal

View file

@ -1,24 +0,0 @@
function scan_build {
extra_args=()
# AFAICT, in the trusty travis container the scan-build tool is from the 3.4
# installation. Therefore, by default it will use clang-3.4 when analysing code
# which doesn't support the '-std=c++14' (it is available via '-std=c++1y').
# Use the system-wide installed clang instead which is 3.5 and does support
# '-std=c++14'.
extra_args+=("--use-analyzer=$(command -v clang)")
# scan-build will try to guess which CXX should be used to compile the actual
# code, which is usually g++ but we need g++5 in the CI. Explicitly pass
# $CC/$CXX to scan-build if they are set in the environment.
if [[ -n $CC ]]; then
extra_args+=("--use-cc=$CC")
fi
if [[ -n $CXX ]]; then
extra_args+=("--use-c++=$CXX")
fi
scan-build "${extra_args[@]}" --status-bugs -disable-checker deadcode.DeadStores "$@"
}

View file

@ -1,256 +0,0 @@
# Errors
{
<insert_a_suppression_name_here>
Memcheck:Cond
fun:_ZN6option6Parser9workhorseEbPKNS_10DescriptorEiPPKcRNS0_6ActionEbbi
fun:_ZN6option6Parser5parseEbPKNS_10DescriptorEiPPKcPNS_6OptionES8_ibi
fun:_ZN6option6ParserC1EPKNS_10DescriptorEiPPcPNS_6OptionES7_ibi
fun:_ZN6bandit6detail7optionsC1EiPPc
fun:_ZN6bandit3runEiPPc
fun:main
}
{
<insert_a_suppression_name_here>
Memcheck:Cond
fun:_ZN6option6Parser9workhorseEbPKNS_10DescriptorEiPPKcRNS0_6ActionEbbi
fun:_ZN6option5Stats3addEbPKNS_10DescriptorEiPPKcib
fun:_ZN6option5StatsC1EPKNS_10DescriptorEiPPcib
fun:_ZN6bandit6detail7optionsC1EiPPc
fun:_ZN6bandit3runEiPPc
fun:main
}
{
<insert_a_suppression_name_here>
Memcheck:Cond
fun:_ZN6option6Parser9workhorseEbPKNS_10DescriptorEiPPKcRNS0_6ActionEbbi
fun:_ZN6bandit6detail7optionsC2EiPPc
fun:_ZN6bandit3runEiPPc
fun:main
}
{
<insert_a_suppression_name_here>
Memcheck:Value8
fun:_platform_memcmp
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_ENKUlvE_clEvEUlvE0_NS7_ISD_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_EUlvE_NS7_ISC_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
}
{
<insert_a_suppression_name_here>
Memcheck:Addr1
fun:_platform_memcmp
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_ENKUlvE_clEvEUlvE0_NS7_ISD_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_EUlvE_NS7_ISC_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
}
{
<insert_a_suppression_name_here>
Memcheck:Cond
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_ENKUlvE_clEvEUlvE0_NS7_ISD_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEE9ts_parserE_clES9_SA_EUlvE_NS7_ISC_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
}
{
<insert_a_suppression_name_here>
Memcheck:Cond
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE3_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE3_NS_9allocatorIS4_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
}
{
<insert_a_suppression_name_here>
Memcheck:Value8
fun:_platform_memcmp
fun:_ZNK9snowhouse16EqualsConstraintINSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEEclIS7_EEbRKT_
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE3_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE3_NS_9allocatorIS4_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
}
{
<insert_a_suppression_name_here>
Memcheck:Cond
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE1_NS_9allocatorIS4_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcI3$_0NS_9allocatorIS2_EEFvvEEclEv
fun:_ZN6bandit3runERKNS_6detail7optionsERKNSt3__14listINS4_8functionIFvvEEENS4_9allocatorIS8_EEEERNS4_5dequeIPNS0_7contextENS9_ISG_EEEERNS0_8listenerE
fun:_ZN6bandit3runEiPPc
}
{
<insert_a_suppression_name_here>
Memcheck:Value8
fun:_platform_memcmp
fun:_ZNK9snowhouse16EqualsConstraintINSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEEclIS7_EEbRKT_
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE1_NS_9allocatorIS4_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcI3$_0NS_9allocatorIS2_EEFvvEEclEv
}
{
<insert_a_suppression_name_here>
Memcheck:Cond
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE4_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE4_NS_9allocatorIS4_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
}
{
<insert_a_suppression_name_here>
Memcheck:Value8
fun:_platform_memcmp
fun:_ZNK9snowhouse16EqualsConstraintINSt3__112basic_stringIcNS1_11char_traitsIcEENS1_9allocatorIcEEEEEclIS7_EEbRKT_
fun:_ZN9snowhouse6Assert4ThatINSt3__112basic_stringIcNS2_11char_traitsIcEENS2_9allocatorIcEEEENS_16EqualsConstraintIS8_EEEEvRKT_RKT0_PKci
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlvE4_clEvEUlvE0_NS_9allocatorIS5_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZNK3$_0clEvENKUlvE_clEvEUlvE4_NS_9allocatorIS4_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
}
{
<insert_a_suppression_name_here>
Memcheck:Cond
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_ENKUlvE_clEvEUlvE0_NS7_ISF_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_EUlvE_NS7_ISE_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZNK3$_0clEvEUlvE_NS_9allocatorIS3_EEFvvEEclEv
}
{
<insert_a_suppression_name_here>
Memcheck:Addr1
fun:_platform_memcmp
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_ENKUlvE_clEvEUlvE0_NS7_ISF_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_EUlvE_NS7_ISE_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
}
{
<insert_a_suppression_name_here>
Memcheck:Value8
fun:_platform_memcmp
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEmmPKcm
fun:_ZNKSt3__112basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEE7compareEPKc
fun:_ZN9snowhouse6Assert4ThatIPKcNS_16EqualsConstraintINSt3__112basic_stringIcNS5_11char_traitsIcEENS5_9allocatorIcEEEEEEEEvRKT_RKT0_S3_i
fun:_ZNSt3__110__function6__funcIZZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_ENKUlvE_clEvEUlvE0_NS7_ISF_EEFvvEEclEv
fun:_ZNSt3__110__function6__funcIZN6bandit2itEPKcNS_8functionIFvvEEERNS2_6detail8listenerERNS_5dequeIPNS8_7contextENS_9allocatorISD_EEEERNS2_8adapters17assertion_adapterERKNS8_10run_policyEEUlvE1_NSE_ISO_EES6_EclEv
fun:_ZN6bandit8adapters17snowhouse_adapter16adapt_exceptionsENSt3__18functionIFvvEEE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEERNS_8adapters17assertion_adapterERKNS6_10run_policyE
fun:_ZN6bandit2itEPKcNSt3__18functionIFvvEEE
fun:_ZNSt3__110__function6__funcIZZZNK3$_0clEvENKUlvE_clEvENKUlNS_12basic_stringIcNS_11char_traitsIcEENS_9allocatorIcEEEEPF9ts_parservEE_clES9_SC_EUlvE_NS7_ISE_EEFvvEEclEv
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEERNS_6detail8listenerERNS2_5dequeIPNS6_7contextENS2_9allocatorISB_EEEEb
fun:_ZN6bandit8describeEPKcNSt3__18functionIFvvEEE
}
# Leaks
{
<insert_a_suppression_name_here>
Memcheck:Leak
match-leak-kinds: possible
fun:malloc_zone_malloc
fun:_objc_copyClassNamesForImage
fun:_ZL9protocolsv
fun:_Z9readClassP10objc_classbb
fun:gc_init
fun:_ZL33objc_initializeClassPair_internalP10objc_classPKcS0_S0_
fun:layout_string_create
fun:_ZL12realizeClassP10objc_class
fun:_ZL22copySwiftV1MangledNamePKcb
fun:_ZL22copySwiftV1MangledNamePKcb
fun:_ZL22copySwiftV1MangledNamePKcb
fun:_ZL22copySwiftV1MangledNamePKcb
}

View file

@ -11,10 +11,19 @@ keywords.workspace = true
categories.workspace = true categories.workspace = true
publish = false publish = false
[lints]
workspace = true
[dependencies] [dependencies]
anstyle.workspace = true
anyhow.workspace = true
bindgen = { version = "0.70.1" }
cc.workspace = true
clap.workspace = true
git2.workspace = true git2.workspace = true
indoc.workspace = true indoc.workspace = true
toml.workspace = true toml.workspace = true
regex.workspace = true
semver.workspace = true semver.workspace = true
serde.workspace = true serde.workspace = true
serde_json.workspace = true serde_json.workspace = true

75
xtask/src/benchmark.rs Normal file
View file

@ -0,0 +1,75 @@
use anyhow::Result;
use crate::{bail_on_err, Benchmark};
pub fn run(args: &Benchmark) -> Result<()> {
if let Some(ref example) = args.example_file_name {
std::env::set_var("TREE_SITTER_BENCHMARK_EXAMPLE_FILTER", example);
}
if let Some(ref language) = args.language {
std::env::set_var("TREE_SITTER_BENCHMARK_LANGUAGE_FILTER", language);
}
if args.repetition_count != 5 {
std::env::set_var(
"TREE_SITTER_BENCHMARK_REPETITION_COUNT",
args.repetition_count.to_string(),
);
}
if args.debug {
let output = std::process::Command::new("cargo")
.arg("bench")
.arg("benchmark")
.arg("-p")
.arg("tree-sitter-cli")
.arg("--no-run")
.arg("--message-format=json")
.spawn()?
.wait_with_output()?;
bail_on_err(&output, "Failed to run `cargo bench`")?;
let json_output = serde_json::from_slice::<serde_json::Value>(&output.stdout)?;
let test_binary = json_output
.as_array()
.ok_or_else(|| anyhow::anyhow!("Invalid JSON output"))?
.iter()
.find_map(|message| {
if message
.get("target")
.and_then(|target| target.get("name"))
.and_then(|name| name.as_str())
.is_some_and(|name| name == "benchmark")
&& message
.get("executable")
.and_then(|executable| executable.as_str())
.is_some()
{
message
.get("executable")
.and_then(|executable| executable.as_str())
} else {
None
}
})
.ok_or_else(|| anyhow::anyhow!("Failed to find benchmark executable"))?;
println!("{test_binary}");
} else {
let status = std::process::Command::new("cargo")
.arg("bench")
.arg("benchmark")
.arg("-p")
.arg("tree-sitter-cli")
.status()?;
if !status.success() {
anyhow::bail!("Failed to run `cargo bench`");
}
}
Ok(())
}

228
xtask/src/build_wasm.rs Normal file
View file

@ -0,0 +1,228 @@
use std::{
ffi::{OsStr, OsString},
fmt::Write,
fs,
process::Command,
};
use anyhow::{anyhow, Result};
use crate::{bail_on_err, BuildWasm, EMSCRIPTEN_TAG};
#[derive(PartialEq, Eq)]
enum EmccSource {
Native,
Docker,
Podman,
}
pub fn run_wasm(args: &BuildWasm) -> Result<()> {
let mut emscripten_flags = vec!["-O3", "--minify", "0"];
if args.debug {
emscripten_flags.extend(["-s", "ASSERTIONS=1", "-s", "SAFE_HEAP=1", "-O0", "-g"]);
}
if args.verbose {
emscripten_flags.extend(["-s", "VERBOSE=1", "-v"]);
}
let emcc_name = if cfg!(windows) { "emcc.bat" } else { "emcc" };
// Order of preference: emscripten > docker > podman > error
let source = if !args.docker && Command::new(emcc_name).output().is_ok() {
EmccSource::Native
} else if Command::new("docker")
.arg("info")
.output()
.map_or(false, |out| out.status.success())
{
EmccSource::Docker
} else if Command::new("podman")
.arg("--version")
.output()
.map_or(false, |out| out.status.success())
{
EmccSource::Podman
} else {
return Err(anyhow!(
"You must have either emcc, docker, or podman on your PATH to run this command"
));
};
let mut command = match source {
EmccSource::Native => Command::new(emcc_name),
EmccSource::Docker | EmccSource::Podman => {
let mut command = match source {
EmccSource::Docker => Command::new("docker"),
EmccSource::Podman => Command::new("podman"),
_ => unreachable!(),
};
command.args(["run", "--rm"]);
// Mount the root directory as a volume, which is the repo root
let mut volume_string = OsString::from(std::env::current_dir().unwrap());
volume_string.push(":/src:Z");
command.args([OsStr::new("--volume"), &volume_string]);
// In case `docker` is an alias to `podman`, ensure that podman
// mounts the current directory as writable by the container
// user which has the same uid as the host user. Setting the
// podman-specific variable is more reliable than attempting to
// detect whether `docker` is an alias for `podman`.
// see https://docs.podman.io/en/latest/markdown/podman-run.1.html#userns-mode
command.env("PODMAN_USERNS", "keep-id");
// Get the current user id so that files created in the docker container will have
// the same owner.
#[cfg(unix)]
{
#[link(name = "c")]
extern "C" {
fn getuid() -> u32;
}
// don't need to set user for podman since PODMAN_USERNS=keep-id is already set
if source == EmccSource::Docker {
let user_id = unsafe { getuid() };
command.args(["--user", &user_id.to_string()]);
}
};
// Run `emcc` in a container using the `emscripten-slim` image
command.args([EMSCRIPTEN_TAG, "emcc"]);
command
}
};
fs::create_dir_all("target/scratch").unwrap();
let exported_functions = concat!(
include_str!("../../lib/src/wasm/stdlib-symbols.txt"),
include_str!("../../lib/binding_web/exports.txt")
)
.replace('"', "")
.lines()
.fold(String::new(), |mut output, line| {
let _ = write!(output, "_{line}");
output
})
.trim_end_matches(',')
.to_string();
let exported_functions = format!("EXPORTED_FUNCTIONS={exported_functions}");
let exported_runtime_methods = "EXPORTED_RUNTIME_METHODS=stringToUTF16,AsciiToString";
emscripten_flags.extend([
"-s",
"WASM=1",
"-s",
"INITIAL_MEMORY=33554432",
"-s",
"ALLOW_MEMORY_GROWTH=1",
"-s",
"SUPPORT_BIG_ENDIAN=1",
"-s",
"MAIN_MODULE=2",
"-s",
"FILESYSTEM=0",
"-s",
"NODEJS_CATCH_EXIT=0",
"-s",
"NODEJS_CATCH_REJECTION=0",
"-s",
&exported_functions,
"-s",
exported_runtime_methods,
"-fno-exceptions",
"-std=c11",
"-D",
"fprintf(...)=",
"-D",
"NDEBUG=",
"-D",
"_POSIX_C_SOURCE=200112L",
"-D",
"_DEFAULT_SOURCE=",
"-I",
"lib/src",
"-I",
"lib/include",
"--js-library",
"lib/binding_web/imports.js",
"--pre-js",
"lib/binding_web/prefix.js",
"--post-js",
"lib/binding_web/binding.js",
"--post-js",
"lib/binding_web/suffix.js",
"lib/src/lib.c",
"lib/binding_web/binding.c",
"-o",
"target/scratch/tree-sitter.js",
]);
bail_on_err(
&command.args(emscripten_flags).spawn()?.wait_with_output()?,
"Failed to compile the Tree-sitter WASM library",
)?;
fs::rename(
"target/scratch/tree-sitter.js",
"lib/binding_web/tree-sitter.js",
)?;
fs::rename(
"target/scratch/tree-sitter.wasm",
"lib/binding_web/tree-sitter.wasm",
)?;
Ok(())
}
pub fn run_wasm_stdlib() -> Result<()> {
let export_flags = include_str!("../../lib/src/wasm/stdlib-symbols.txt")
.lines()
.map(|line| format!("-Wl,--export={}", &line[1..line.len() - 1]))
.collect::<Vec<String>>();
let mut command = Command::new("target/wasi-sdk-21.0/bin/clang-17");
let output = command
.args([
"-o",
"stdlib.wasm",
"-Os",
"-fPIC",
"-Wl,--no-entry",
"-Wl,--stack-first",
"-Wl,-z",
"-Wl,stack-size=65536",
"-Wl,--import-undefined",
"-Wl,--import-memory",
"-Wl,--import-table",
"-Wl,--strip-debug",
"-Wl,--export=reset_heap",
"-Wl,--export=__wasm_call_ctors",
"-Wl,--export=__stack_pointer",
])
.args(export_flags)
.arg("lib/src/wasm/stdlib.c")
.output()?;
bail_on_err(&output, "Failed to compile the Tree-sitter WASM stdlib")?;
let xxd = Command::new("xxd")
.args(["-C", "-i", "stdlib.wasm"])
.output()?;
bail_on_err(
&xxd,
"Failed to run xxd on the compiled Tree-sitter WASM stdlib",
)?;
fs::write("lib/src/wasm/wasm-stdlib.h", xxd.stdout)?;
fs::rename("stdlib.wasm", "target/stdlib.wasm")?;
Ok(())
}

View file

@ -1,11 +1,14 @@
use std::{cmp::Ordering, path::Path}; use std::{cmp::Ordering, path::Path};
use anyhow::{anyhow, Result};
use git2::{DiffOptions, Repository}; use git2::{DiffOptions, Repository};
use indoc::indoc; use indoc::indoc;
use semver::{BuildMetadata, Prerelease, Version}; use semver::{BuildMetadata, Prerelease, Version};
use toml::Value; use toml::Value;
pub fn get_latest_tag(repo: &Repository) -> Result<String, Box<dyn std::error::Error>> { use crate::BumpVersion;
pub fn get_latest_tag(repo: &Repository) -> Result<String> {
let mut tags = repo let mut tags = repo
.tag_names(None)? .tag_names(None)?
.into_iter() .into_iter()
@ -23,10 +26,10 @@ pub fn get_latest_tag(repo: &Repository) -> Result<String, Box<dyn std::error::E
tags.last() tags.last()
.map(std::string::ToString::to_string) .map(std::string::ToString::to_string)
.ok_or_else(|| "No tags found".into()) .ok_or_else(|| anyhow!("No tags found"))
} }
pub fn bump_versions() -> Result<(), Box<dyn std::error::Error>> { pub fn run(args: BumpVersion) -> Result<()> {
let repo = Repository::open(".")?; let repo = Repository::open(".")?;
let latest_tag = get_latest_tag(&repo)?; let latest_tag = get_latest_tag(&repo)?;
let current_version = Version::parse(&latest_tag)?; let current_version = Version::parse(&latest_tag)?;
@ -104,35 +107,39 @@ pub fn bump_versions() -> Result<(), Box<dyn std::error::Error>> {
} }
} }
let mut version = current_version.clone(); let next_version = if let Some(version) = args.version {
if should_increment_minor { version
version.minor += 1;
version.patch = 0;
version.pre = Prerelease::EMPTY;
version.build = BuildMetadata::EMPTY;
} else if should_increment_patch {
version.patch += 1;
version.pre = Prerelease::EMPTY;
version.build = BuildMetadata::EMPTY;
} else { } else {
return Err(format!("No source code changed since {current_version}").into()); let mut next_version = current_version.clone();
} if should_increment_minor {
next_version.minor += 1;
next_version.patch = 0;
next_version.pre = Prerelease::EMPTY;
next_version.build = BuildMetadata::EMPTY;
} else if should_increment_patch {
next_version.patch += 1;
next_version.pre = Prerelease::EMPTY;
next_version.build = BuildMetadata::EMPTY;
} else {
return Err(anyhow!(format!(
"No source code changed since {current_version}"
)));
}
next_version
};
println!("Bumping from {current_version} to {version}"); println!("Bumping from {current_version} to {next_version}");
update_crates(&current_version, &version)?; update_crates(&current_version, &next_version)?;
update_makefile(&version)?; update_makefile(&next_version)?;
update_cmake(&version)?; update_cmake(&next_version)?;
update_npm(&version)?; update_npm(&next_version)?;
update_zig(&version)?; update_zig(&next_version)?;
tag_next_version(&repo, &version)?; tag_next_version(&repo, &next_version)?;
Ok(()) Ok(())
} }
fn tag_next_version( fn tag_next_version(repo: &Repository, next_version: &Version) -> Result<()> {
repo: &Repository,
next_version: &Version,
) -> Result<(), Box<dyn std::error::Error>> {
// first add the manifests // first add the manifests
let mut index = repo.index()?; let mut index = repo.index()?;
@ -184,7 +191,7 @@ fn tag_next_version(
Ok(()) Ok(())
} }
fn update_makefile(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> { fn update_makefile(next_version: &Version) -> Result<()> {
let makefile = std::fs::read_to_string("Makefile")?; let makefile = std::fs::read_to_string("Makefile")?;
let makefile = makefile let makefile = makefile
.lines() .lines()
@ -204,7 +211,7 @@ fn update_makefile(next_version: &Version) -> Result<(), Box<dyn std::error::Err
Ok(()) Ok(())
} }
fn update_cmake(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> { fn update_cmake(next_version: &Version) -> Result<()> {
let cmake = std::fs::read_to_string("lib/CMakeLists.txt")?; let cmake = std::fs::read_to_string("lib/CMakeLists.txt")?;
let cmake = cmake let cmake = cmake
.lines() .lines()
@ -230,10 +237,7 @@ fn update_cmake(next_version: &Version) -> Result<(), Box<dyn std::error::Error>
Ok(()) Ok(())
} }
fn update_crates( fn update_crates(current_version: &Version, next_version: &Version) -> Result<()> {
current_version: &Version,
next_version: &Version,
) -> Result<(), Box<dyn std::error::Error>> {
let mut cmd = std::process::Command::new("cargo"); let mut cmd = std::process::Command::new("cargo");
cmd.arg("workspaces").arg("version"); cmd.arg("workspaces").arg("version");
@ -253,20 +257,20 @@ fn update_crates(
let status = cmd.status()?; let status = cmd.status()?;
if !status.success() { if !status.success() {
return Err("Failed to update crates".into()); return Err(anyhow!("Failed to update crates"));
} }
Ok(()) Ok(())
} }
fn update_npm(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> { fn update_npm(next_version: &Version) -> Result<()> {
for path in ["lib/binding_web/package.json", "cli/npm/package.json"] { for path in ["lib/binding_web/package.json", "cli/npm/package.json"] {
let package_json = let package_json =
serde_json::from_str::<serde_json::Value>(&std::fs::read_to_string(path)?)?; serde_json::from_str::<serde_json::Value>(&std::fs::read_to_string(path)?)?;
let mut package_json = package_json let mut package_json = package_json
.as_object() .as_object()
.ok_or("Invalid package.json")? .ok_or_else(|| anyhow!("Invalid package.json"))?
.clone(); .clone();
package_json.insert( package_json.insert(
"version".to_string(), "version".to_string(),
@ -281,7 +285,7 @@ fn update_npm(next_version: &Version) -> Result<(), Box<dyn std::error::Error>>
Ok(()) Ok(())
} }
fn update_zig(next_version: &Version) -> Result<(), Box<dyn std::error::Error>> { fn update_zig(next_version: &Version) -> Result<()> {
let zig = std::fs::read_to_string("build.zig.zon")?; let zig = std::fs::read_to_string("build.zig.zon")?;
let zig = zig let zig = zig
@ -303,7 +307,7 @@ fn update_zig(next_version: &Version) -> Result<(), Box<dyn std::error::Error>>
} }
/// read Cargo.toml and get the version /// read Cargo.toml and get the version
fn fetch_workspace_version() -> Result<String, Box<dyn std::error::Error>> { fn fetch_workspace_version() -> Result<String> {
let cargo_toml = toml::from_str::<Value>(&std::fs::read_to_string("Cargo.toml")?)?; let cargo_toml = toml::from_str::<Value>(&std::fs::read_to_string("Cargo.toml")?)?;
Ok(cargo_toml["workspace"]["package"]["version"] Ok(cargo_toml["workspace"]["package"]["version"]

33
xtask/src/clippy.rs Normal file
View file

@ -0,0 +1,33 @@
use std::process::Command;
use anyhow::Result;
use crate::{bail_on_err, Clippy};
pub fn run(args: &Clippy) -> Result<()> {
let mut clippy_command = Command::new("cargo");
clippy_command.arg("+nightly").arg("clippy");
if let Some(package) = args.package.as_ref() {
clippy_command.args(["--package", package]);
} else {
clippy_command.arg("--workspace");
}
clippy_command
.arg("--release")
.arg("--all-targets")
.arg("--all-features")
.arg("--")
.arg("-D")
.arg("warnings");
if args.fix {
clippy_command.arg("--fix");
}
bail_on_err(
&clippy_command.spawn()?.wait_with_output()?,
"Clippy failed",
)
}

119
xtask/src/fetch.rs Normal file
View file

@ -0,0 +1,119 @@
use std::{path::Path, process::Command};
use anyhow::Result;
use crate::{bail_on_err, EMSCRIPTEN_VERSION};
pub fn run_fixtures() -> Result<()> {
let grammars_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.join("test")
.join("fixtures")
.join("grammars");
[
("bash", "master"),
("c", "master"),
("cpp", "master"),
("embedded-template", "master"),
("go", "master"),
("html", "master"),
("java", "master"),
("javascript", "master"),
("jsdoc", "master"),
("json", "master"),
("php", "master"),
("python", "master"),
("ruby", "master"),
("rust", "master"),
("typescript", "master"),
]
.iter()
.try_for_each(|(grammar, r#ref)| {
let grammar_dir = grammars_dir.join(grammar);
let grammar_url = format!("https://github.com/tree-sitter/tree-sitter-{grammar}");
println!("Updating the {grammar} grammar...");
if !grammar_dir.exists() {
let mut command = Command::new("git");
command.args([
"clone",
"--depth",
"1",
&grammar_url,
&grammar_dir.to_string_lossy(),
]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
"Failed to clone the {grammar} grammar",
)?;
}
std::env::set_current_dir(&grammar_dir)?;
let mut command = Command::new("git");
command.args(["fetch", "origin", r#ref, "--depth", "1"]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
"Failed to fetch the {grammar} grammar",
)?;
let mut command = Command::new("git");
command.args(["reset", "--hard", "FETCH_HEAD"]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
"Failed to reset the {grammar} grammar",
)?;
Ok(())
})
}
pub fn run_emscripten() -> Result<()> {
let emscripten_dir = Path::new(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.join("target")
.join("emsdk");
if emscripten_dir.exists() {
println!("Emscripten SDK already exists");
return Ok(());
}
println!("Cloning the Emscripten SDK...");
let mut command = Command::new("git");
command.args([
"clone",
"https://github.com/emscripten-core/emsdk.git",
&emscripten_dir.to_string_lossy(),
]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
"Failed to clone the Emscripten SDK",
)?;
std::env::set_current_dir(&emscripten_dir)?;
let emsdk = if cfg!(windows) {
"emsdk.bat"
} else {
"./emsdk"
};
let mut command = Command::new(emsdk);
command.args(["install", EMSCRIPTEN_VERSION]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
"Failed to install Emscripten",
)?;
let mut command = Command::new(emsdk);
command.args(["activate", EMSCRIPTEN_VERSION]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
"Failed to activate Emscripten",
)
}

118
xtask/src/generate.rs Normal file
View file

@ -0,0 +1,118 @@
use std::{ffi::OsStr, fs, process::Command};
use anyhow::{Context, Result};
use crate::{bail_on_err, GenerateFixtures};
const HEADER_PATH: &str = "include/tree_sitter/api.h";
pub fn run_fixtures(args: &GenerateFixtures) -> Result<()> {
let output = std::process::Command::new("cargo")
.args(["build", "--release"])
.spawn()?
.wait_with_output()?;
bail_on_err(&output, "Failed to run cargo build")?;
let tree_sitter_binary = std::env::current_dir()?
.join("target")
.join("release")
.join("tree-sitter");
let grammars_dir = std::env::current_dir()?
.join("test")
.join("fixtures")
.join("grammars");
for grammar_file in find_grammar_files(grammars_dir.to_str().unwrap()).flatten() {
let grammar_dir = grammar_file.parent().unwrap();
let grammar_name = grammar_dir.file_name().and_then(OsStr::to_str).unwrap();
println!(
"Regenerating {grammar_name} parser{}",
if args.wasm { " to wasm" } else { "" }
);
if args.wasm {
let mut cmd = Command::new(&tree_sitter_binary);
let cmd = cmd.args([
"build",
"--wasm",
"-o",
&format!("target/release/tree-sitter-{grammar_name}.wasm"),
grammar_dir.to_str().unwrap(),
]);
if args.docker {
cmd.arg("--docker");
}
bail_on_err(
&cmd.spawn()?.wait_with_output()?,
&format!("Failed to regenerate {grammar_name} parser to wasm"),
)?;
} else {
let output = Command::new(&tree_sitter_binary)
.arg("generate")
.arg("src/grammar.json")
.arg("--abi=latest")
.current_dir(grammar_dir)
.spawn()?
.wait_with_output()?;
bail_on_err(
&output,
&format!("Failed to regenerate {grammar_name} parser"),
)?;
}
}
Ok(())
}
pub fn run_bindings() -> Result<()> {
let no_copy = [
"TSInput",
"TSLanguage",
"TSLogger",
"TSLookaheadIterator",
"TSParser",
"TSTree",
"TSQuery",
"TSQueryCursor",
"TSQueryCapture",
"TSQueryMatch",
"TSQueryPredicateStep",
];
let bindings = bindgen::Builder::default()
.header(HEADER_PATH)
.layout_tests(false)
.allowlist_type("^TS.*")
.allowlist_function("^ts_.*")
.allowlist_var("^TREE_SITTER.*")
.no_copy(no_copy.join("|"))
.prepend_enum_name(false)
.use_core()
.clang_arg("-D TREE_SITTER_FEATURE_WASM")
.generate()
.expect("Failed to generate bindings");
bindings
.write_to_file("lib/binding_rust/bindings.rs")
.with_context(|| "Failed to write bindings")
}
fn find_grammar_files(
dir: &str,
) -> impl Iterator<Item = Result<std::path::PathBuf, std::io::Error>> {
fs::read_dir(dir)
.expect("Failed to read directory")
.filter_map(Result::ok)
.flat_map(|entry| {
let path = entry.path();
if path.is_dir() && !path.to_string_lossy().contains("node_modules") {
Box::new(find_grammar_files(path.to_str().unwrap())) as Box<dyn Iterator<Item = _>>
} else if path.is_file() && path.file_name() == Some(OsStr::new("grammar.js")) {
Box::new(std::iter::once(Ok(path))) as _
} else {
Box::new(std::iter::empty()) as _
}
})
}

View file

@ -1,35 +1,235 @@
mod benchmark;
mod build_wasm;
mod bump; mod bump;
mod clippy;
mod fetch;
mod generate;
mod test;
use bump::bump_versions; use anstyle::{AnsiColor, Color, Style};
use anyhow::Result;
use clap::{crate_authors, Args, Command, FromArgMatches as _, Subcommand};
use semver::Version;
fn print_help() { #[derive(Subcommand)]
println!( #[command(about="Run various tasks", author=crate_authors!("\n"), styles=get_styles())]
" enum Commands {
xtask must specify a task to run. /// Runs `cargo benchmark` with some optional environment variables set.
Benchmark(Benchmark),
Usage: `cargo xtask <task>` /// Compile the Tree-sitter WASM library. This will create two files in the
/// `lib/binding_web` directory: `tree-sitter.js` and `tree-sitter.wasm`.
Tasks: BuildWasm(BuildWasm),
bump-version /// Compile the Tree-sitter WASM standard library.
" BuildWasmStdlib,
); /// Bumps the version of the workspace.
BumpVersion(BumpVersion),
/// Runs `cargo clippy`.
Clippy(Clippy),
/// Fetches emscripten.
FetchEmscripten,
/// Fetches the fixtures for testing tree-sitter.
FetchFixtures,
/// Generate the Rust bindings from the C library.
GenerateBindings,
/// Generates the fixtures for testing tree-sitter.
GenerateFixtures(GenerateFixtures),
/// Run the test suite
Test(Test),
/// Run the WASM test suite
TestWasm,
} }
fn main() -> Result<(), Box<dyn std::error::Error>> { #[derive(Args)]
let Some(task) = std::env::args().nth(1) else { struct Benchmark {
print_help(); /// The language to run the benchmarks for.
std::process::exit(0); #[arg(long, short)]
}; language: Option<String>,
/// The example file to run the benchmarks for.
#[arg(long, short)]
example_file_name: Option<String>,
/// The number of times to parse each sample (default is 5).
#[arg(long, short, default_value = "5")]
repetition_count: u32,
/// Whether to run the benchmarks in debug mode.
#[arg(long, short = 'g')]
debug: bool,
}
match task.as_str() { #[derive(Args)]
"bump-version" => { struct BuildWasm {
bump_versions()?; /// Compile the library more quickly, with fewer optimizations
/// and more runtime assertions.
#[arg(long, short = '0')]
debug: bool,
/// Run emscripten using docker, even if \`emcc\` is installed.
/// By default, \`emcc\` will be run directly when available.
#[arg(long, short)]
docker: bool,
/// Run emscripten with verbose output.
#[arg(long, short)]
verbose: bool,
}
#[derive(Args)]
struct BumpVersion {
/// The version to bump to.
#[arg(long, short)]
version: Option<Version>,
}
#[derive(Args)]
struct Clippy {
/// Automatically apply lint suggestions (`clippy --fix`).
#[arg(long, short)]
fix: bool,
/// The package to run Clippy against (`cargo -p <PACKAGE> clippy`).
#[arg(long, short)]
package: Option<String>,
}
#[derive(Args)]
struct GenerateFixtures {
/// Generates the parser to WASM
#[arg(long, short)]
wasm: bool,
/// Run emscripten via docker even if it is installed locally.
#[arg(long, short, requires = "wasm")]
docker: bool,
}
#[derive(Args)]
struct Test {
/// Compile C code with the Clang address sanitizer.
#[arg(long, short)]
address_sanitizer: bool,
/// Run only the corpus tests whose name contain the given string.
#[arg(long, short)]
example: Option<String>,
/// Run the given number of iterations of randomized tests (default 10).
#[arg(long, short)]
iterations: Option<u32>,
/// Set the seed used to control random behavior.
#[arg(long, short)]
seed: Option<u32>,
/// Print parsing log to stderr.
#[arg(long, short)]
debug: bool,
/// Generate an SVG graph of parsing logs.
#[arg(long, short = 'D')]
debug_graph: bool,
/// Run the tests with a debugger.
#[arg(short)]
g: bool,
#[arg(trailing_var_arg = true)]
args: Vec<String>,
/// Don't capture the output
#[arg(long)]
nocapture: bool,
}
const BUILD_VERSION: &str = env!("CARGO_PKG_VERSION");
const BUILD_SHA: Option<&str> = option_env!("BUILD_SHA");
const EMSCRIPTEN_VERSION: &str = include_str!("../../cli/loader/emscripten-version");
const EMSCRIPTEN_TAG: &str = concat!(
"docker.io/emscripten/emsdk:",
include_str!("../../cli/loader/emscripten-version")
);
fn main() {
let result = run();
if let Err(err) = &result {
// Ignore BrokenPipe errors
if let Some(error) = err.downcast_ref::<std::io::Error>() {
if error.kind() == std::io::ErrorKind::BrokenPipe {
return;
}
} }
_ => { if !err.to_string().is_empty() {
println!("invalid task: {task}"); eprintln!("{err:?}");
std::process::exit(1);
} }
std::process::exit(1);
}
}
fn run() -> Result<()> {
let version = BUILD_SHA.map_or_else(
|| BUILD_VERSION.to_string(),
|build_sha| format!("{BUILD_VERSION} ({build_sha})"),
);
let version: &'static str = Box::leak(version.into_boxed_str());
let cli = Command::new("xtask")
.help_template(
"\
{before-help}{name} {version}
{author-with-newline}{about-with-newline}
{usage-heading} {usage}
{all-args}{after-help}
",
)
.version(version)
.subcommand_required(true)
.arg_required_else_help(true)
.disable_help_subcommand(true)
.disable_colored_help(false);
let command = Commands::from_arg_matches(&Commands::augment_subcommands(cli).get_matches())?;
match command {
Commands::Benchmark(benchmark_options) => benchmark::run(&benchmark_options)?,
Commands::BuildWasm(build_wasm_options) => build_wasm::run_wasm(&build_wasm_options)?,
Commands::BuildWasmStdlib => build_wasm::run_wasm_stdlib()?,
Commands::BumpVersion(bump_options) => bump::run(bump_options)?,
Commands::Clippy(clippy_options) => clippy::run(&clippy_options)?,
Commands::FetchEmscripten => fetch::run_emscripten()?,
Commands::FetchFixtures => fetch::run_fixtures()?,
Commands::GenerateBindings => generate::run_bindings()?,
Commands::GenerateFixtures(generate_fixtures_options) => {
generate::run_fixtures(&generate_fixtures_options)?;
}
Commands::Test(test_options) => test::run(&test_options)?,
Commands::TestWasm => test::run_wasm()?,
} }
Ok(()) Ok(())
} }
fn bail_on_err(output: &std::process::Output, prefix: &str) -> Result<()> {
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
anyhow::bail!("{prefix}:\n{stderr}");
}
Ok(())
}
#[must_use]
const fn get_styles() -> clap::builder::Styles {
clap::builder::Styles::styled()
.usage(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Yellow))),
)
.header(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Yellow))),
)
.literal(Style::new().fg_color(Some(Color::Ansi(AnsiColor::Green))))
.invalid(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Red))),
)
.error(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Red))),
)
.valid(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Green))),
)
.placeholder(Style::new().fg_color(Some(Color::Ansi(AnsiColor::White))))
}

122
xtask/src/test.rs Normal file
View file

@ -0,0 +1,122 @@
use std::{
env,
path::Path,
process::{Command, Stdio},
};
use anyhow::{anyhow, Result};
use regex::Regex;
use crate::{bail_on_err, Test};
pub fn run(args: &Test) -> Result<()> {
let test_flags = if args.address_sanitizer {
env::set_var("CFLAGS", "-fsanitize=undefined,address");
// When the Tree-sitter C library is compiled with the address sanitizer, the address
// sanitizer runtime library needs to be linked into the final test executable. When
// using Xcode clang, the Rust linker doesn't know where to find that library, so we
// need to specify linker flags directly.
let output = Command::new("cc").arg("-print-runtime-dir").output()?;
bail_on_err(&output, "Failed to get clang runtime dir")?;
let runtime_dir = String::from_utf8(output.stdout)?;
if runtime_dir.contains("/Xcode.app/") {
env::set_var(
"RUSTFLAGS",
format!(
"-C link-arg=-L{runtime_dir} -C link-arg=-lclang_rt.asan_osx_dynamic -C link-arg=-Wl,-rpath,{runtime_dir}"
),
);
}
// Specify a `--target` explicitly. This is required for address sanitizer support.
let output = Command::new("rustup")
.arg("show")
.arg("active-toolchain")
.output()?;
bail_on_err(&output, "Failed to get active Rust toolchain")?;
let toolchain = String::from_utf8(output.stdout)?;
let re = Regex::new(r"(stable|beta|nightly)-([_a-z0-9-]+).*")?;
let captures = re
.captures(&toolchain)
.ok_or_else(|| anyhow!("Failed to parse toolchain '{toolchain}'"))?;
let current_target = captures.get(2).unwrap().as_str();
format!("--target={current_target}")
} else {
String::new()
};
if let Some(example) = &args.example {
env::set_var("TREE_SITTER_EXAMPLE", example);
}
if let Some(seed) = args.seed {
env::set_var("TREE_SITTER_SEED", seed.to_string());
}
if let Some(iterations) = args.iterations {
env::set_var("TREE_SITTER_ITERATIONS", iterations.to_string());
}
if args.debug {
env::set_var("TREE_SITTER_LOG", "1");
}
if args.debug_graph {
env::set_var("TREE_SITTER_LOG_GRAPHS", "1");
}
if args.g {
let cargo_cmd = Command::new("cargo")
.arg("test")
.arg(test_flags)
.arg("--no-run")
.arg("--message-format=json")
.stdout(Stdio::piped())
.spawn()?;
let jq_cmd = Command::new("jq")
.arg("-rs")
.arg(r#"map(select(.target.name == "tree_sitter_cli" and .executable))[0].executable"#)
.stdin(cargo_cmd.stdout.unwrap())
.output()?;
let test_binary = String::from_utf8(jq_cmd.stdout)?;
let mut lldb_cmd = Command::new("lldb");
lldb_cmd.arg(test_binary.trim()).arg("--").args(&args.args);
bail_on_err(
&lldb_cmd.spawn()?.wait_with_output()?,
&format!("Failed to run {lldb_cmd:?}"),
)?;
} else {
let mut cargo_cmd = Command::new("cargo");
cargo_cmd.arg("test").arg(test_flags).args(&args.args);
if args.nocapture {
cargo_cmd.arg("--").arg("--nocapture");
}
bail_on_err(
&cargo_cmd.spawn()?.wait_with_output()?,
&format!("Failed to run {cargo_cmd:?}"),
)?;
}
Ok(())
}
pub fn run_wasm() -> Result<()> {
std::env::set_current_dir("lib/binding_web")?;
let node_modules_dir = Path::new("node_modules");
let npm = if cfg!(target_os = "windows") {
"npm.cmd"
} else {
"npm"
};
if !node_modules_dir.join("chai").exists() || !node_modules_dir.join("mocha").exists() {
println!("Installing test dependencies...");
let output = Command::new(npm).arg("install").output()?;
bail_on_err(&output, "Failed to install test dependencies")?;
}
let output = Command::new(npm).arg("test").output()?;
bail_on_err(&output, &format!("Failed to run {npm} test"))?;
Ok(())
}