Compare commits
123 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2a1a33d649 | ||
|
|
a467ea8502 | ||
|
|
6cd25aadd5 | ||
|
|
027136c98a | ||
|
|
14c4d2f8ca | ||
|
|
8e2b5ad2a4 | ||
|
|
bb82b94ded | ||
|
|
59f3cb91c2 | ||
|
|
a80cd86d47 | ||
|
|
253003ccf8 | ||
|
|
e61407cc36 | ||
|
|
cd503e803d | ||
|
|
77e5c1c8aa | ||
|
|
22fa144016 | ||
|
|
1083795af6 | ||
|
|
dc0b5530b3 | ||
|
|
910b3c738c | ||
|
|
f764f485d2 | ||
|
|
d5b8c19d0b | ||
|
|
9504c247d6 | ||
|
|
17cb10a677 | ||
|
|
25d63ab7ab | ||
|
|
629093d2c3 | ||
|
|
dc4e5b5999 | ||
|
|
a53058b84d | ||
|
|
de141362d5 | ||
|
|
8f7539af72 | ||
|
|
c70d6c2dfd | ||
|
|
1b2fc42e45 | ||
|
|
dbbe8c642d | ||
|
|
362419836e | ||
|
|
0c83a5d03e | ||
|
|
05bfeb5b69 | ||
|
|
e7f4dfcd4a | ||
|
|
d507a2defb | ||
|
|
3c0088f037 | ||
|
|
e920009d60 | ||
|
|
b4fd46fdc0 | ||
|
|
81e7410b78 | ||
|
|
58edb3a11c | ||
|
|
ad95b2b906 | ||
|
|
d991edf074 | ||
|
|
f2f197b6b2 | ||
|
|
8bb33f7d8c | ||
|
|
6f944de32f | ||
|
|
c15938532d | ||
|
|
94b55bfcdc | ||
|
|
bcb30f7951 | ||
|
|
3bd8f7df8e | ||
|
|
d7529c3265 | ||
|
|
bf4217f0ff | ||
|
|
bb7b339ae2 | ||
|
|
9184a32b4b | ||
|
|
78a040d78a | ||
|
|
ab6c98eed7 | ||
|
|
6b84118e33 | ||
|
|
2bc8aa939f | ||
|
|
462fcd7c30 | ||
|
|
ffbe504242 | ||
|
|
4fcf78cfec | ||
|
|
415a657d08 | ||
|
|
a293dcc1c5 | ||
|
|
b890e8bea0 | ||
|
|
bf655c0bea | ||
|
|
8ef6f0685b | ||
|
|
057c6ad2ba | ||
|
|
c44110c29f | ||
|
|
baf222f772 | ||
|
|
4cac30b54a | ||
|
|
460118b4c8 | ||
|
|
42ca484b6b | ||
|
|
75550c8e2c | ||
|
|
02f9c1502b | ||
|
|
d6701c68d3 | ||
|
|
726dcd1e87 | ||
|
|
b0a6bde2fb | ||
|
|
69723ca40e | ||
|
|
97131b4a73 | ||
|
|
a3f86b1fa9 | ||
|
|
41413e7a71 | ||
|
|
d7d0d9fef3 | ||
|
|
a876fff5ba | ||
|
|
7ddcc7b20b | ||
|
|
779d613941 | ||
|
|
0d360a1831 | ||
|
|
d44d0f94da | ||
|
|
69e857b387 | ||
|
|
42624511cf | ||
|
|
20a5d46b50 | ||
|
|
62cc419262 | ||
|
|
264684d31d | ||
|
|
e295c99eca | ||
|
|
9fda3e417e | ||
|
|
d2914ca243 | ||
|
|
4619261da0 | ||
|
|
14d930d131 | ||
|
|
ff8bf05def | ||
|
|
150cd12b66 | ||
|
|
fae24b6da6 | ||
|
|
ed69a74463 | ||
|
|
acc9cafc7c | ||
|
|
d25e5d48ea | ||
|
|
774eebdf6b | ||
|
|
979e5ecec0 | ||
|
|
b1a9a827d6 | ||
|
|
e413947cc5 | ||
|
|
c313be63b2 | ||
|
|
4adcebe284 | ||
|
|
2a835ee029 | ||
|
|
3ad1c7d4e1 | ||
|
|
b1a7074010 | ||
|
|
6f2dbaab5f | ||
|
|
781dc0570d | ||
|
|
1f64036d87 | ||
|
|
4eb46b493f | ||
|
|
d73126d582 | ||
|
|
637a3e111b | ||
|
|
8b5c63bffa | ||
|
|
6e0618704a | ||
|
|
64665ec462 | ||
|
|
1925a70f7e | ||
|
|
02625fc959 | ||
|
|
d799b78663 |
400 changed files with 11906 additions and 17646 deletions
1
.envrc
1
.envrc
|
|
@ -1 +0,0 @@
|
||||||
use flake
|
|
||||||
1
.gitattributes
vendored
1
.gitattributes
vendored
|
|
@ -3,4 +3,5 @@
|
||||||
/lib/src/unicode/*.h linguist-vendored
|
/lib/src/unicode/*.h linguist-vendored
|
||||||
/lib/src/unicode/LICENSE linguist-vendored
|
/lib/src/unicode/LICENSE linguist-vendored
|
||||||
|
|
||||||
|
/cli/src/generate/prepare_grammar/*.json -diff
|
||||||
Cargo.lock -diff
|
Cargo.lock -diff
|
||||||
|
|
|
||||||
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
|
|
@ -1,6 +1,6 @@
|
||||||
name: Bug Report
|
name: Bug Report
|
||||||
description: Report a problem
|
description: Report a problem
|
||||||
type: Bug
|
labels: [bug]
|
||||||
body:
|
body:
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
|
|
|
||||||
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
|
|
@ -1,6 +1,6 @@
|
||||||
name: Feature request
|
name: Feature request
|
||||||
description: Request an enhancement
|
description: Request an enhancement
|
||||||
type: Feature
|
labels: [enhancement]
|
||||||
body:
|
body:
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
|
|
|
||||||
3
.github/actions/cache/action.yml
vendored
3
.github/actions/cache/action.yml
vendored
|
|
@ -17,9 +17,10 @@ runs:
|
||||||
test/fixtures/grammars
|
test/fixtures/grammars
|
||||||
target/release/tree-sitter-*.wasm
|
target/release/tree-sitter-*.wasm
|
||||||
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
key: fixtures-${{ join(matrix.*, '_') }}-${{ hashFiles(
|
||||||
'crates/generate/src/**',
|
'cli/generate/src/**',
|
||||||
'lib/src/parser.h',
|
'lib/src/parser.h',
|
||||||
'lib/src/array.h',
|
'lib/src/array.h',
|
||||||
'lib/src/alloc.h',
|
'lib/src/alloc.h',
|
||||||
|
'xtask/src/*',
|
||||||
'test/fixtures/grammars/*/**/src/*.c',
|
'test/fixtures/grammars/*/**/src/*.c',
|
||||||
'.github/actions/cache/action.yml') }}
|
'.github/actions/cache/action.yml') }}
|
||||||
|
|
|
||||||
27
.github/dependabot.yml
vendored
27
.github/dependabot.yml
vendored
|
|
@ -4,8 +4,6 @@ updates:
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
cooldown:
|
|
||||||
default-days: 3
|
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "build(deps)"
|
prefix: "build(deps)"
|
||||||
labels:
|
labels:
|
||||||
|
|
@ -14,16 +12,10 @@ updates:
|
||||||
groups:
|
groups:
|
||||||
cargo:
|
cargo:
|
||||||
patterns: ["*"]
|
patterns: ["*"]
|
||||||
ignore:
|
|
||||||
- dependency-name: "*"
|
|
||||||
update-types: ["version-update:semver-major", "version-update:semver-minor"]
|
|
||||||
|
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
cooldown:
|
|
||||||
default-days: 3
|
|
||||||
commit-message:
|
commit-message:
|
||||||
prefix: "ci"
|
prefix: "ci"
|
||||||
labels:
|
labels:
|
||||||
|
|
@ -32,22 +24,3 @@ updates:
|
||||||
groups:
|
groups:
|
||||||
actions:
|
actions:
|
||||||
patterns: ["*"]
|
patterns: ["*"]
|
||||||
|
|
||||||
- package-ecosystem: "npm"
|
|
||||||
versioning-strategy: increase
|
|
||||||
directories:
|
|
||||||
- "/crates/npm"
|
|
||||||
- "/crates/eslint"
|
|
||||||
- "/lib/binding_web"
|
|
||||||
schedule:
|
|
||||||
interval: "weekly"
|
|
||||||
cooldown:
|
|
||||||
default-days: 3
|
|
||||||
commit-message:
|
|
||||||
prefix: "build(deps)"
|
|
||||||
labels:
|
|
||||||
- "dependencies"
|
|
||||||
- "npm"
|
|
||||||
groups:
|
|
||||||
npm:
|
|
||||||
patterns: ["*"]
|
|
||||||
|
|
|
||||||
29
.github/scripts/close_spam.js
vendored
29
.github/scripts/close_spam.js
vendored
|
|
@ -1,29 +0,0 @@
|
||||||
module.exports = async ({ github, context }) => {
|
|
||||||
let target = context.payload.issue;
|
|
||||||
if (target) {
|
|
||||||
await github.rest.issues.update({
|
|
||||||
...context.repo,
|
|
||||||
issue_number: target.number,
|
|
||||||
state: "closed",
|
|
||||||
state_reason: "not_planned",
|
|
||||||
title: "[spam]",
|
|
||||||
body: "",
|
|
||||||
type: null,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
target = context.payload.pull_request;
|
|
||||||
await github.rest.pulls.update({
|
|
||||||
...context.repo,
|
|
||||||
pull_number: target.number,
|
|
||||||
state: "closed",
|
|
||||||
title: "[spam]",
|
|
||||||
body: "",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
await github.rest.issues.lock({
|
|
||||||
...context.repo,
|
|
||||||
issue_number: target.number,
|
|
||||||
lock_reason: "spam",
|
|
||||||
});
|
|
||||||
};
|
|
||||||
3
.github/scripts/cross.sh
vendored
Executable file
3
.github/scripts/cross.sh
vendored
Executable file
|
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/bash -eu
|
||||||
|
|
||||||
|
exec docker run --rm -v /home/runner:/home/runner -w "$PWD" "$CROSS_IMAGE" "$@"
|
||||||
9
.github/scripts/make.sh
vendored
Executable file
9
.github/scripts/make.sh
vendored
Executable file
|
|
@ -0,0 +1,9 @@
|
||||||
|
#!/bin/bash -eu
|
||||||
|
|
||||||
|
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
||||||
|
|
||||||
|
if [[ $BUILD_CMD == cross ]]; then
|
||||||
|
cross.sh make CC="$CC" AR="$AR" "$@"
|
||||||
|
else
|
||||||
|
exec make "$@"
|
||||||
|
fi
|
||||||
9
.github/scripts/tree-sitter.sh
vendored
Executable file
9
.github/scripts/tree-sitter.sh
vendored
Executable file
|
|
@ -0,0 +1,9 @@
|
||||||
|
#!/bin/bash -eu
|
||||||
|
|
||||||
|
tree_sitter="$ROOT"/target/"$TARGET"/release/tree-sitter
|
||||||
|
|
||||||
|
if [[ $BUILD_CMD == cross ]]; then
|
||||||
|
cross.sh "$CROSS_RUNNER" "$tree_sitter" "$@"
|
||||||
|
else
|
||||||
|
exec "$tree_sitter" "$@"
|
||||||
|
fi
|
||||||
25
.github/scripts/wasm_stdlib.js
vendored
25
.github/scripts/wasm_stdlib.js
vendored
|
|
@ -1,25 +0,0 @@
|
||||||
module.exports = async ({ github, context, core }) => {
|
|
||||||
if (context.eventName !== 'pull_request') return;
|
|
||||||
|
|
||||||
const prNumber = context.payload.pull_request.number;
|
|
||||||
const owner = context.repo.owner;
|
|
||||||
const repo = context.repo.repo;
|
|
||||||
|
|
||||||
const { data: files } = await github.rest.pulls.listFiles({
|
|
||||||
owner,
|
|
||||||
repo,
|
|
||||||
pull_number: prNumber
|
|
||||||
});
|
|
||||||
|
|
||||||
const changedFiles = files.map(file => file.filename);
|
|
||||||
|
|
||||||
const wasmStdLibSrc = 'crates/language/wasm/';
|
|
||||||
const dirChanged = changedFiles.some(file => file.startsWith(wasmStdLibSrc));
|
|
||||||
|
|
||||||
if (!dirChanged) return;
|
|
||||||
|
|
||||||
const wasmStdLibHeader = 'lib/src/wasm/wasm-stdlib.h';
|
|
||||||
const requiredChanged = changedFiles.includes(wasmStdLibHeader);
|
|
||||||
|
|
||||||
if (!requiredChanged) core.setFailed(`Changes detected in ${wasmStdLibSrc} but ${wasmStdLibHeader} was not modified.`);
|
|
||||||
};
|
|
||||||
6
.github/workflows/backport.yml
vendored
6
.github/workflows/backport.yml
vendored
|
|
@ -14,17 +14,17 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Create app token
|
- name: Create app token
|
||||||
uses: actions/create-github-app-token@v2
|
uses: actions/create-github-app-token@v1
|
||||||
id: app-token
|
id: app-token
|
||||||
with:
|
with:
|
||||||
app-id: ${{ vars.BACKPORT_APP }}
|
app-id: ${{ vars.BACKPORT_APP }}
|
||||||
private-key: ${{ secrets.BACKPORT_KEY }}
|
private-key: ${{ secrets.BACKPORT_KEY }}
|
||||||
|
|
||||||
- name: Create backport PR
|
- name: Create backport PR
|
||||||
uses: korthout/backport-action@v4
|
uses: korthout/backport-action@v3
|
||||||
with:
|
with:
|
||||||
pull_title: "${pull_title}"
|
pull_title: "${pull_title}"
|
||||||
label_pattern: "^ci:backport ([^ ]+)$"
|
label_pattern: "^ci:backport ([^ ]+)$"
|
||||||
|
|
|
||||||
2
.github/workflows/bindgen.yml
vendored
2
.github/workflows/bindgen.yml
vendored
|
|
@ -16,7 +16,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
|
||||||
295
.github/workflows/build.yml
vendored
295
.github/workflows/build.yml
vendored
|
|
@ -1,5 +1,10 @@
|
||||||
name: Build & Test
|
name: Build & Test
|
||||||
|
|
||||||
|
env:
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
RUSTFLAGS: "-D warnings"
|
||||||
|
CROSS_DEBUG: 1
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
inputs:
|
inputs:
|
||||||
|
|
@ -26,41 +31,38 @@ jobs:
|
||||||
- windows-x86
|
- windows-x86
|
||||||
- macos-arm64
|
- macos-arm64
|
||||||
- macos-x64
|
- macos-x64
|
||||||
- wasm32
|
|
||||||
|
|
||||||
include:
|
include:
|
||||||
# When adding a new `target`:
|
# When adding a new `target`:
|
||||||
# 1. Define a new platform alias above
|
# 1. Define a new platform alias above
|
||||||
# 2. Add a new record to the matrix map in `crates/cli/npm/install.js`
|
# 2. Add a new record to the matrix map in `cli/npm/install.js`
|
||||||
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-24.04-arm }
|
- { platform: linux-arm64 , target: aarch64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||||
- { platform: linux-arm , target: armv7-unknown-linux-gnueabihf , os: ubuntu-24.04-arm }
|
- { platform: linux-arm , target: arm-unknown-linux-gnueabi , os: ubuntu-latest , use-cross: true }
|
||||||
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-24.04 }
|
- { platform: linux-x64 , target: x86_64-unknown-linux-gnu , os: ubuntu-22.04 , features: wasm }
|
||||||
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-24.04 }
|
- { platform: linux-x86 , target: i686-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||||
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-24.04 }
|
- { platform: linux-powerpc64 , target: powerpc64-unknown-linux-gnu , os: ubuntu-latest , use-cross: true }
|
||||||
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-11-arm }
|
- { platform: windows-arm64 , target: aarch64-pc-windows-msvc , os: windows-latest }
|
||||||
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-2025 }
|
- { platform: windows-x64 , target: x86_64-pc-windows-msvc , os: windows-latest , features: wasm }
|
||||||
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-2025 }
|
- { platform: windows-x86 , target: i686-pc-windows-msvc , os: windows-latest }
|
||||||
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-15 }
|
- { platform: macos-arm64 , target: aarch64-apple-darwin , os: macos-latest , features: wasm }
|
||||||
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-15-intel }
|
- { platform: macos-x64 , target: x86_64-apple-darwin , os: macos-13 , features: wasm }
|
||||||
- { platform: wasm32 , target: wasm32-unknown-unknown , os: ubuntu-24.04 }
|
|
||||||
|
|
||||||
# Extra features
|
# Cross compilers for C library
|
||||||
- { platform: linux-arm64 , features: wasm }
|
- { platform: linux-arm64 , cc: aarch64-linux-gnu-gcc , ar: aarch64-linux-gnu-ar }
|
||||||
- { platform: linux-x64 , features: wasm }
|
- { platform: linux-arm , cc: arm-linux-gnueabi-gcc , ar: arm-linux-gnueabi-ar }
|
||||||
- { platform: macos-arm64 , features: wasm }
|
- { platform: linux-x86 , cc: i686-linux-gnu-gcc , ar: i686-linux-gnu-ar }
|
||||||
- { platform: macos-x64 , features: wasm }
|
- { platform: linux-powerpc64 , cc: powerpc64-linux-gnu-gcc , ar: powerpc64-linux-gnu-ar }
|
||||||
|
|
||||||
# Cross-compilation
|
# Prevent race condition (see #2041)
|
||||||
- { platform: linux-arm , cross: true }
|
- { platform: windows-x64 , rust-test-threads: 1 }
|
||||||
- { platform: linux-x86 , cross: true }
|
- { platform: windows-x86 , rust-test-threads: 1 }
|
||||||
- { platform: linux-powerpc64 , cross: true }
|
|
||||||
|
|
||||||
# Compile-only
|
# Can't natively run CLI on Github runner's host
|
||||||
- { platform: wasm32 , no-run: true }
|
- { platform: windows-arm64 , no-run: true }
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CARGO_TERM_COLOR: always
|
BUILD_CMD: cargo
|
||||||
RUSTFLAGS: -D warnings
|
SUFFIX: ${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
|
|
@ -68,28 +70,13 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up cross-compilation
|
- name: Read Emscripten version
|
||||||
if: matrix.cross
|
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<cli/loader/emscripten-version)" >> $GITHUB_ENV
|
||||||
run: |
|
|
||||||
for target in armv7-unknown-linux-gnueabihf i686-unknown-linux-gnu powerpc64-unknown-linux-gnu; do
|
|
||||||
camel_target=${target//-/_}; target_cc=${target/-unknown/}
|
|
||||||
printf 'CC_%s=%s\n' "$camel_target" "${target_cc/v7/}-gcc"
|
|
||||||
printf 'AR_%s=%s\n' "$camel_target" "${target_cc/v7/}-ar"
|
|
||||||
printf 'CARGO_TARGET_%s_LINKER=%s\n' "${camel_target^^}" "${target_cc/v7/}-gcc"
|
|
||||||
done >> $GITHUB_ENV
|
|
||||||
{
|
|
||||||
printf 'CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_RUNNER=qemu-arm -L /usr/arm-linux-gnueabihf\n'
|
|
||||||
printf 'CARGO_TARGET_POWERPC64_UNKNOWN_LINUX_GNU_RUNNER=qemu-ppc64 -L /usr/powerpc64-linux-gnu\n'
|
|
||||||
} >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Get emscripten version
|
|
||||||
if: contains(matrix.features, 'wasm')
|
|
||||||
run: printf 'EMSCRIPTEN_VERSION=%s\n' "$(<crates/loader/emscripten-version)" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Install Emscripten
|
- name: Install Emscripten
|
||||||
if: contains(matrix.features, 'wasm')
|
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
||||||
uses: mymindstorm/setup-emsdk@v14
|
uses: mymindstorm/setup-emsdk@v14
|
||||||
with:
|
with:
|
||||||
version: ${{ env.EMSCRIPTEN_VERSION }}
|
version: ${{ env.EMSCRIPTEN_VERSION }}
|
||||||
|
|
@ -99,82 +86,63 @@ jobs:
|
||||||
with:
|
with:
|
||||||
target: ${{ matrix.target }}
|
target: ${{ matrix.target }}
|
||||||
|
|
||||||
- name: Install cross-compilation toolchain
|
- name: Install cross
|
||||||
if: matrix.cross
|
if: ${{ matrix.use-cross }}
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update -qy
|
if [ ! -x "$(command -v cross)" ]; then
|
||||||
if [[ $PLATFORM == linux-arm ]]; then
|
# TODO: Remove 'RUSTFLAGS=""' once https://github.com/cross-rs/cross/issues/1561 is resolved
|
||||||
sudo apt-get install -qy {binutils,gcc}-arm-linux-gnueabihf qemu-user
|
RUSTFLAGS="" cargo install cross --git https://github.com/cross-rs/cross
|
||||||
elif [[ $PLATFORM == linux-x86 ]]; then
|
|
||||||
sudo apt-get install -qy {binutils,gcc}-i686-linux-gnu
|
|
||||||
elif [[ $PLATFORM == linux-powerpc64 ]]; then
|
|
||||||
sudo apt-get install -qy {binutils,gcc}-powerpc64-linux-gnu qemu-user
|
|
||||||
fi
|
fi
|
||||||
env:
|
|
||||||
PLATFORM: ${{ matrix.platform }}
|
|
||||||
|
|
||||||
- name: Install MinGW and Clang (Windows x64 MSYS2)
|
- name: Configure cross
|
||||||
if: matrix.platform == 'windows-x64'
|
if: ${{ matrix.use-cross }}
|
||||||
uses: msys2/setup-msys2@v2
|
|
||||||
with:
|
|
||||||
update: true
|
|
||||||
install: |
|
|
||||||
mingw-w64-x86_64-toolchain
|
|
||||||
mingw-w64-x86_64-clang
|
|
||||||
mingw-w64-x86_64-make
|
|
||||||
mingw-w64-x86_64-cmake
|
|
||||||
|
|
||||||
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
|
||||||
# the `mismatched-lifetime-syntaxes` lint
|
|
||||||
- name: Build wasmtime library (Windows x64 MSYS2)
|
|
||||||
if: contains(matrix.features, 'wasm') && matrix.platform == 'windows-x64'
|
|
||||||
run: |
|
run: |
|
||||||
mkdir -p target
|
printf '%s\n' > Cross.toml \
|
||||||
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
'[target.${{ matrix.target }}]' \
|
||||||
jq -r '.packages[] | select(.name == "wasmtime-c-api-impl") | .version')
|
'image = "ghcr.io/cross-rs/${{ matrix.target }}:edge"' \
|
||||||
curl -LSs "$WASMTIME_REPO/archive/refs/tags/v${WASMTIME_VERSION}.tar.gz" | tar xzf - -C target
|
'[build]' \
|
||||||
cd target/wasmtime-${WASMTIME_VERSION}
|
'pre-build = [' \
|
||||||
cmake -S crates/c-api -B target/c-api \
|
' "dpkg --add-architecture $CROSS_DEB_ARCH",' \
|
||||||
-DCMAKE_INSTALL_PREFIX="$PWD/artifacts" \
|
' "curl -fsSL https://deb.nodesource.com/setup_22.x | bash -",' \
|
||||||
-DWASMTIME_DISABLE_ALL_FEATURES=ON \
|
' "apt-get update && apt-get -y install libssl-dev nodejs"' \
|
||||||
-DWASMTIME_FEATURE_CRANELIFT=ON \
|
']'
|
||||||
-DWASMTIME_TARGET='x86_64-pc-windows-gnu'
|
cat - Cross.toml <<< 'Cross.toml:'
|
||||||
cmake --build target/c-api && cmake --install target/c-api
|
printf '%s\n' >> $GITHUB_ENV \
|
||||||
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
"CROSS_CONFIG=$PWD/Cross.toml" \
|
||||||
env:
|
"CROSS_IMAGE=ghcr.io/cross-rs/${{ matrix.target }}:edge"
|
||||||
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
|
||||||
RUSTFLAGS: ${{ env.RUSTFLAGS }} --cap-lints allow
|
|
||||||
|
|
||||||
- name: Build C library (Windows x64 MSYS2 CMake)
|
- name: Set up environment
|
||||||
if: matrix.platform == 'windows-x64'
|
env:
|
||||||
shell: msys2 {0}
|
RUST_TEST_THREADS: ${{ matrix.rust-test-threads }}
|
||||||
|
USE_CROSS: ${{ matrix.use-cross }}
|
||||||
|
TARGET: ${{ matrix.target }}
|
||||||
|
CC: ${{ matrix.cc }}
|
||||||
|
AR: ${{ matrix.ar }}
|
||||||
run: |
|
run: |
|
||||||
cmake -G Ninja -S . -B build/static \
|
PATH="$PWD/.github/scripts:$PATH"
|
||||||
-DBUILD_SHARED_LIBS=OFF \
|
printf '%s/.github/scripts\n' "$PWD" >> $GITHUB_PATH
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM \
|
|
||||||
-DCMAKE_C_COMPILER=clang
|
|
||||||
cmake --build build/static
|
|
||||||
|
|
||||||
cmake -G Ninja -S . -B build/shared \
|
printf '%s\n' >> $GITHUB_ENV \
|
||||||
-DBUILD_SHARED_LIBS=ON \
|
'TREE_SITTER=tree-sitter.sh' \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
"TARGET=$TARGET" \
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
"ROOT=$PWD"
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM \
|
|
||||||
-DCMAKE_C_COMPILER=clang
|
[[ -n $RUST_TEST_THREADS ]] && \
|
||||||
cmake --build build/shared
|
printf 'RUST_TEST_THREADS=%s\n' "$RUST_TEST_THREADS" >> $GITHUB_ENV
|
||||||
rm -rf \
|
|
||||||
build/{static,shared} \
|
[[ -n $CC ]] && printf 'CC=%s\n' "$CC" >> $GITHUB_ENV
|
||||||
"${CMAKE_PREFIX_PATH}/artifacts" \
|
[[ -n $AR ]] && printf 'AR=%s\n' "$AR" >> $GITHUB_ENV
|
||||||
target/wasmtime-${WASMTIME_VERSION}
|
|
||||||
env:
|
if [[ $USE_CROSS == true ]]; then
|
||||||
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
printf 'BUILD_CMD=cross\n' >> $GITHUB_ENV
|
||||||
|
runner=$(cross.sh bash -c "env | sed -n 's/^CARGO_TARGET_.*_RUNNER=//p'")
|
||||||
|
[[ -n $runner ]] && printf 'CROSS_RUNNER=%s\n' "$runner" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
# TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses
|
||||||
# the `mismatched-lifetime-syntaxes` lint
|
# the `mismatched-lifetime-syntaxes` lint
|
||||||
- name: Build wasmtime library
|
- name: Build wasmtime library
|
||||||
if: contains(matrix.features, 'wasm')
|
if: ${{ !matrix.use-cross && contains(matrix.features, 'wasm') }}
|
||||||
run: |
|
run: |
|
||||||
mkdir -p target
|
mkdir -p target
|
||||||
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
WASMTIME_VERSION=$(cargo metadata --format-version=1 --locked --features wasm | \
|
||||||
|
|
@ -190,47 +158,37 @@ jobs:
|
||||||
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV
|
||||||
env:
|
env:
|
||||||
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime
|
||||||
RUSTFLAGS: ${{ env.RUSTFLAGS }} --cap-lints allow
|
RUSTFLAGS: "--cap-lints allow"
|
||||||
|
|
||||||
- name: Build C library (make)
|
- name: Build C library (make)
|
||||||
if: runner.os != 'Windows'
|
if: ${{ runner.os != 'Windows' }}
|
||||||
run: |
|
run: make.sh -j CFLAGS="$CFLAGS"
|
||||||
if [[ $PLATFORM == linux-arm ]]; then
|
|
||||||
CC=arm-linux-gnueabihf-gcc; AR=arm-linux-gnueabihf-ar
|
|
||||||
elif [[ $PLATFORM == linux-x86 ]]; then
|
|
||||||
CC=i686-linux-gnu-gcc; AR=i686-linux-gnu-ar
|
|
||||||
elif [[ $PLATFORM == linux-powerpc64 ]]; then
|
|
||||||
CC=powerpc64-linux-gnu-gcc; AR=powerpc64-linux-gnu-ar
|
|
||||||
else
|
|
||||||
CC=gcc; AR=ar
|
|
||||||
fi
|
|
||||||
make -j CFLAGS="$CFLAGS" CC=$CC AR=$AR
|
|
||||||
env:
|
env:
|
||||||
PLATFORM: ${{ matrix.platform }}
|
|
||||||
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
|
|
||||||
- name: Build C library (CMake)
|
- name: Build C library (CMake)
|
||||||
if: "!matrix.cross"
|
if: ${{ !matrix.use-cross }}
|
||||||
run: |
|
run: |
|
||||||
cmake -S . -B build/static \
|
cmake -S lib -B build/static \
|
||||||
-DBUILD_SHARED_LIBS=OFF \
|
-DBUILD_SHARED_LIBS=OFF \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
cmake --build build/static --verbose
|
cmake --build build/static --verbose
|
||||||
|
|
||||||
cmake -S . -B build/shared \
|
cmake -S lib -B build/shared \
|
||||||
-DBUILD_SHARED_LIBS=ON \
|
-DBUILD_SHARED_LIBS=ON \
|
||||||
-DCMAKE_BUILD_TYPE=Debug \
|
-DCMAKE_BUILD_TYPE=Debug \
|
||||||
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
-DCMAKE_COMPILE_WARNING_AS_ERROR=ON \
|
||||||
-DTREE_SITTER_FEATURE_WASM=$WASM
|
-DTREE_SITTER_FEATURE_WASM=$WASM
|
||||||
cmake --build build/shared --verbose
|
cmake --build build/shared --verbose
|
||||||
env:
|
env:
|
||||||
CC: ${{ contains(matrix.platform, 'linux') && 'clang' || '' }}
|
CC: ${{ contains(matrix.target, 'linux') && 'clang' || '' }}
|
||||||
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
WASM: ${{ contains(matrix.features, 'wasm') && 'ON' || 'OFF' }}
|
||||||
|
|
||||||
- name: Build Wasm library
|
- name: Build wasm library
|
||||||
if: contains(matrix.features, 'wasm')
|
# No reason to build on the same Github runner hosts many times
|
||||||
|
if: ${{ !matrix.no-run && !matrix.use-cross }}
|
||||||
shell: bash
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
cd lib/binding_web
|
cd lib/binding_web
|
||||||
|
|
@ -241,71 +199,70 @@ jobs:
|
||||||
npm run build:debug
|
npm run build:debug
|
||||||
|
|
||||||
- name: Check no_std builds
|
- name: Check no_std builds
|
||||||
if: inputs.run-test && !matrix.no-run
|
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||||
working-directory: lib
|
|
||||||
shell: bash
|
shell: bash
|
||||||
run: cargo check --no-default-features --target='${{ matrix.target }}'
|
run: |
|
||||||
|
cd lib
|
||||||
|
$BUILD_CMD check --no-default-features
|
||||||
|
|
||||||
- name: Build target
|
- name: Build target
|
||||||
run: cargo build --release --target='${{ matrix.target }}' --features='${{ matrix.features }}' $PACKAGE
|
run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.features }}
|
||||||
env:
|
|
||||||
PACKAGE: ${{ matrix.platform == 'wasm32' && '-p tree-sitter' || '' }}
|
|
||||||
|
|
||||||
- name: Cache fixtures
|
- name: Cache fixtures
|
||||||
id: cache
|
id: cache
|
||||||
if: inputs.run-test && !matrix.no-run
|
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||||
uses: ./.github/actions/cache
|
uses: ./.github/actions/cache
|
||||||
|
|
||||||
- name: Fetch fixtures
|
- name: Fetch fixtures
|
||||||
if: inputs.run-test && !matrix.no-run
|
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||||
run: cargo run -p xtask --target='${{ matrix.target }}' -- fetch-fixtures
|
run: $BUILD_CMD run -p xtask -- fetch-fixtures
|
||||||
|
|
||||||
- name: Generate fixtures
|
- name: Generate fixtures
|
||||||
if: inputs.run-test && !matrix.no-run && steps.cache.outputs.cache-hit != 'true'
|
if: ${{ !matrix.no-run && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
||||||
run: cargo run -p xtask --target='${{ matrix.target }}' -- generate-fixtures
|
run: $BUILD_CMD run -p xtask -- generate-fixtures
|
||||||
|
|
||||||
- name: Generate Wasm fixtures
|
- name: Generate Wasm fixtures
|
||||||
if: inputs.run-test && !matrix.no-run && contains(matrix.features, 'wasm') && steps.cache.outputs.cache-hit != 'true'
|
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test && steps.cache.outputs.cache-hit != 'true' }}
|
||||||
run: cargo run -p xtask --target='${{ matrix.target }}' -- generate-fixtures --wasm
|
run: $BUILD_CMD run -p xtask -- generate-fixtures --wasm
|
||||||
|
|
||||||
- name: Run main tests
|
- name: Run main tests
|
||||||
if: inputs.run-test && !matrix.no-run
|
if: ${{ !matrix.no-run && inputs.run-test }}
|
||||||
run: cargo test --target='${{ matrix.target }}' --features='${{ matrix.features }}'
|
run: $BUILD_CMD test --target=${{ matrix.target }} --features=${{ matrix.features }}
|
||||||
|
|
||||||
- name: Run Wasm tests
|
- name: Run wasm tests
|
||||||
if: inputs.run-test && !matrix.no-run && contains(matrix.features, 'wasm')
|
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
||||||
run: cargo run -p xtask --target='${{ matrix.target }}' -- test-wasm
|
run: $BUILD_CMD run -p xtask -- test-wasm
|
||||||
|
|
||||||
|
- name: Run benchmarks
|
||||||
|
# Cross-compiled benchmarks are pointless
|
||||||
|
if: ${{ !matrix.no-run && !matrix.use-cross && inputs.run-test }}
|
||||||
|
run: $BUILD_CMD bench benchmark -p tree-sitter-cli --target=${{ matrix.target }}
|
||||||
|
|
||||||
- name: Upload CLI artifact
|
- name: Upload CLI artifact
|
||||||
if: "!matrix.no-run"
|
uses: actions/upload-artifact@v4
|
||||||
uses: actions/upload-artifact@v6
|
|
||||||
with:
|
with:
|
||||||
name: tree-sitter.${{ matrix.platform }}
|
name: tree-sitter.${{ matrix.platform }}
|
||||||
path: target/${{ matrix.target }}/release/tree-sitter${{ contains(matrix.target, 'windows') && '.exe' || '' }}
|
path: target/${{ matrix.target }}/release/tree-sitter${{ env.SUFFIX }}
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
||||||
- name: Upload Wasm artifacts
|
- name: Upload Wasm artifacts
|
||||||
if: matrix.platform == 'linux-x64'
|
if: ${{ matrix.platform == 'linux-x64' }}
|
||||||
uses: actions/upload-artifact@v6
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: tree-sitter.wasm
|
name: tree-sitter.wasm
|
||||||
path: |
|
path: |
|
||||||
lib/binding_web/web-tree-sitter.js
|
lib/binding_web/tree-sitter.js
|
||||||
lib/binding_web/web-tree-sitter.js.map
|
lib/binding_web/tree-sitter.js.map
|
||||||
lib/binding_web/web-tree-sitter.cjs
|
lib/binding_web/tree-sitter.cjs
|
||||||
lib/binding_web/web-tree-sitter.cjs.map
|
lib/binding_web/tree-sitter.cjs.map
|
||||||
lib/binding_web/web-tree-sitter.wasm
|
lib/binding_web/tree-sitter.wasm
|
||||||
lib/binding_web/web-tree-sitter.wasm.map
|
lib/binding_web/tree-sitter.wasm.map
|
||||||
lib/binding_web/debug/web-tree-sitter.cjs
|
lib/binding_web/debug/tree-sitter.cjs
|
||||||
lib/binding_web/debug/web-tree-sitter.cjs.map
|
lib/binding_web/debug/tree-sitter.cjs.map
|
||||||
lib/binding_web/debug/web-tree-sitter.js
|
lib/binding_web/debug/tree-sitter.js
|
||||||
lib/binding_web/debug/web-tree-sitter.js.map
|
lib/binding_web/debug/tree-sitter.js.map
|
||||||
lib/binding_web/debug/web-tree-sitter.wasm
|
lib/binding_web/debug/tree-sitter.wasm
|
||||||
lib/binding_web/debug/web-tree-sitter.wasm.map
|
lib/binding_web/debug/tree-sitter.wasm.map
|
||||||
lib/binding_web/lib/*.c
|
|
||||||
lib/binding_web/lib/*.h
|
|
||||||
lib/binding_web/lib/*.ts
|
|
||||||
lib/binding_web/src/*.ts
|
|
||||||
if-no-files-found: error
|
if-no-files-found: error
|
||||||
retention-days: 7
|
retention-days: 7
|
||||||
|
|
|
||||||
5
.github/workflows/ci.yml
vendored
5
.github/workflows/ci.yml
vendored
|
|
@ -26,7 +26,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
@ -44,6 +44,3 @@ jobs:
|
||||||
|
|
||||||
build:
|
build:
|
||||||
uses: ./.github/workflows/build.yml
|
uses: ./.github/workflows/build.yml
|
||||||
|
|
||||||
check-wasm-stdlib:
|
|
||||||
uses: ./.github/workflows/wasm_stdlib.yml
|
|
||||||
|
|
|
||||||
7
.github/workflows/docs.yml
vendored
7
.github/workflows/docs.yml
vendored
|
|
@ -3,7 +3,6 @@ on:
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
paths: [docs/**]
|
paths: [docs/**]
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy-docs:
|
deploy-docs:
|
||||||
|
|
@ -16,7 +15,7 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
@ -26,7 +25,7 @@ jobs:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
run: |
|
run: |
|
||||||
jq_expr='.assets[] | select(.name | contains("x86_64-unknown-linux-gnu")) | .browser_download_url'
|
jq_expr='.assets[] | select(.name | contains("x86_64-unknown-linux-gnu")) | .browser_download_url'
|
||||||
url=$(gh api repos/rust-lang/mdbook/releases/tags/v0.4.52 --jq "$jq_expr")
|
url=$(gh api repos/rust-lang/mdbook/releases/latest --jq "$jq_expr")
|
||||||
mkdir mdbook
|
mkdir mdbook
|
||||||
curl -sSL "$url" | tar -xz -C mdbook
|
curl -sSL "$url" | tar -xz -C mdbook
|
||||||
printf '%s/mdbook\n' "$PWD" >> "$GITHUB_PATH"
|
printf '%s/mdbook\n' "$PWD" >> "$GITHUB_PATH"
|
||||||
|
|
@ -41,7 +40,7 @@ jobs:
|
||||||
uses: actions/configure-pages@v5
|
uses: actions/configure-pages@v5
|
||||||
|
|
||||||
- name: Upload artifact
|
- name: Upload artifact
|
||||||
uses: actions/upload-pages-artifact@v4
|
uses: actions/upload-pages-artifact@v3
|
||||||
with:
|
with:
|
||||||
path: docs/book
|
path: docs/book
|
||||||
|
|
||||||
|
|
|
||||||
30
.github/workflows/emscripten.yml
vendored
Normal file
30
.github/workflows/emscripten.yml
vendored
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
name: Update Emscripten
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize]
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update-emscripten:
|
||||||
|
if: github.actor == 'dependabot[bot]'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: ${{ github.event.pull_request.head.sha }}
|
||||||
|
|
||||||
|
- name: Set up stable Rust toolchain
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
||||||
|
- name: Run emscripten update xtask
|
||||||
|
run: |
|
||||||
|
git config --global user.name "dependabot[bot]"
|
||||||
|
git config --global user.email "49699333+dependabot[bot]@users.noreply.github.com"
|
||||||
|
cargo xtask upgrade-emscripten
|
||||||
|
|
||||||
|
- name: Push updated version
|
||||||
|
run: git push origin HEAD:$GITHUB_HEAD_REF
|
||||||
13
.github/workflows/nvim_ts.yml
vendored
13
.github/workflows/nvim_ts.yml
vendored
|
|
@ -3,10 +3,7 @@ name: nvim-treesitter parser tests
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- 'crates/cli/**'
|
- 'cli/**'
|
||||||
- 'crates/config/**'
|
|
||||||
- 'crates/generate/**'
|
|
||||||
- 'crates/loader/**'
|
|
||||||
- '.github/workflows/nvim_ts.yml'
|
- '.github/workflows/nvim_ts.yml'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|
@ -16,7 +13,7 @@ concurrency:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check_compilation:
|
check_compilation:
|
||||||
timeout-minutes: 30
|
timeout-minutes: 20
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
|
@ -28,9 +25,9 @@ jobs:
|
||||||
NVIM: ${{ matrix.os == 'windows-latest' && 'nvim-win64\\bin\\nvim.exe' || 'nvim' }}
|
NVIM: ${{ matrix.os == 'windows-latest' && 'nvim-win64\\bin\\nvim.exe' || 'nvim' }}
|
||||||
NVIM_TS_DIR: nvim-treesitter
|
NVIM_TS_DIR: nvim-treesitter
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/checkout@v6
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
repository: nvim-treesitter/nvim-treesitter
|
repository: nvim-treesitter/nvim-treesitter
|
||||||
path: ${{ env.NVIM_TS_DIR }}
|
path: ${{ env.NVIM_TS_DIR }}
|
||||||
|
|
@ -58,7 +55,7 @@ jobs:
|
||||||
|
|
||||||
- if: matrix.type == 'build'
|
- if: matrix.type == 'build'
|
||||||
name: Compile parsers
|
name: Compile parsers
|
||||||
run: $NVIM -l ./scripts/install-parsers.lua --max-jobs=10
|
run: $NVIM -l ./scripts/install-parsers.lua
|
||||||
working-directory: ${{ env.NVIM_TS_DIR }}
|
working-directory: ${{ env.NVIM_TS_DIR }}
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
|
|
||||||
76
.github/workflows/release.yml
vendored
76
.github/workflows/release.yml
vendored
|
|
@ -17,15 +17,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: build
|
needs: build
|
||||||
permissions:
|
permissions:
|
||||||
id-token: write
|
|
||||||
attestations: write
|
|
||||||
contents: write
|
contents: write
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Download build artifacts
|
- name: Download build artifacts
|
||||||
uses: actions/download-artifact@v7
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
path: artifacts
|
path: artifacts
|
||||||
|
|
||||||
|
|
@ -35,13 +33,26 @@ jobs:
|
||||||
|
|
||||||
- name: Prepare release artifacts
|
- name: Prepare release artifacts
|
||||||
run: |
|
run: |
|
||||||
mkdir -p target web
|
mkdir -p target
|
||||||
mv artifacts/tree-sitter.wasm/* web/
|
mv artifacts/tree-sitter.wasm/* target/
|
||||||
|
|
||||||
tar -czf target/web-tree-sitter.tar.gz -C web .
|
# Rename files
|
||||||
|
mv target/tree-sitter.js target/web-tree-sitter.js
|
||||||
|
mv target/tree-sitter.js.map target/web-tree-sitter.js.map
|
||||||
|
mv target/tree-sitter.cjs target/web-tree-sitter.cjs
|
||||||
|
mv target/tree-sitter.cjs.map target/web-tree-sitter.cjs.map
|
||||||
|
mv target/tree-sitter.wasm target/web-tree-sitter.wasm
|
||||||
|
mv target/tree-sitter.wasm.map target/web-tree-sitter.wasm.map
|
||||||
|
|
||||||
|
mv target/debug/tree-sitter.js target/web-tree-sitter-debug.js
|
||||||
|
mv target/debug/tree-sitter.js.map target/web-tree-sitter-debug.js.map
|
||||||
|
mv target/debug/tree-sitter.cjs target/web-tree-sitter-debug.cjs
|
||||||
|
mv target/debug/tree-sitter.cjs.map target/web-tree-sitter-debug.cjs.map
|
||||||
|
mv target/debug/tree-sitter.wasm target/web-tree-sitter-debug.wasm
|
||||||
|
mv target/debug/tree-sitter.wasm.map target/web-tree-sitter-debug.wasm.map
|
||||||
|
rm -rf target/debug
|
||||||
|
|
||||||
rm -r artifacts/tree-sitter.wasm
|
rm -r artifacts/tree-sitter.wasm
|
||||||
|
|
||||||
for platform in $(cd artifacts; ls | sed 's/^tree-sitter\.//'); do
|
for platform in $(cd artifacts; ls | sed 's/^tree-sitter\.//'); do
|
||||||
exe=$(ls artifacts/tree-sitter.$platform/tree-sitter*)
|
exe=$(ls artifacts/tree-sitter.$platform/tree-sitter*)
|
||||||
gzip --stdout --name $exe > target/tree-sitter-$platform.gz
|
gzip --stdout --name $exe > target/tree-sitter-$platform.gz
|
||||||
|
|
@ -49,65 +60,57 @@ jobs:
|
||||||
rm -rf artifacts
|
rm -rf artifacts
|
||||||
ls -l target/
|
ls -l target/
|
||||||
|
|
||||||
- name: Generate attestations
|
|
||||||
uses: actions/attest-build-provenance@v3
|
|
||||||
with:
|
|
||||||
subject-path: |
|
|
||||||
target/tree-sitter-*.gz
|
|
||||||
target/web-tree-sitter.tar.gz
|
|
||||||
|
|
||||||
- name: Create release
|
- name: Create release
|
||||||
run: |-
|
run: |-
|
||||||
gh release create $GITHUB_REF_NAME \
|
gh release create ${{ github.ref_name }} \
|
||||||
target/tree-sitter-*.gz \
|
target/tree-sitter-*.gz \
|
||||||
target/web-tree-sitter.tar.gz
|
target/web-tree-sitter.js \
|
||||||
|
target/web-tree-sitter.js.map \
|
||||||
|
target/web-tree-sitter.cjs \
|
||||||
|
target/web-tree-sitter.cjs.map \
|
||||||
|
target/web-tree-sitter.wasm \
|
||||||
|
target/web-tree-sitter.wasm.map \
|
||||||
|
target/web-tree-sitter-debug.js \
|
||||||
|
target/web-tree-sitter-debug.js.map \
|
||||||
|
target/web-tree-sitter-debug.cjs \
|
||||||
|
target/web-tree-sitter-debug.cjs.map \
|
||||||
|
target/web-tree-sitter-debug.wasm \
|
||||||
|
target/web-tree-sitter-debug.wasm.map
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ github.token }}
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
|
||||||
crates_io:
|
crates_io:
|
||||||
name: Publish packages to Crates.io
|
name: Publish packages to Crates.io
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
environment: crates
|
|
||||||
permissions:
|
|
||||||
id-token: write
|
|
||||||
contents: read
|
|
||||||
needs: release
|
needs: release
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
||||||
- name: Set up registry token
|
|
||||||
id: auth
|
|
||||||
uses: rust-lang/crates-io-auth-action@v1
|
|
||||||
|
|
||||||
- name: Publish crates to Crates.io
|
- name: Publish crates to Crates.io
|
||||||
uses: katyo/publish-crates@v2
|
uses: katyo/publish-crates@v2
|
||||||
with:
|
with:
|
||||||
registry-token: ${{ steps.auth.outputs.token }}
|
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||||
|
|
||||||
npm:
|
npm:
|
||||||
name: Publish packages to npmjs.com
|
name: Publish packages to npmjs.com
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
environment: npm
|
|
||||||
permissions:
|
|
||||||
id-token: write
|
|
||||||
contents: read
|
|
||||||
needs: release
|
needs: release
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
directory: [crates/cli/npm, lib/binding_web]
|
directory: [cli/npm, lib/binding_web]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Node
|
- name: Set up Node
|
||||||
uses: actions/setup-node@v6
|
uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: 24
|
node-version: 20
|
||||||
registry-url: https://registry.npmjs.org
|
registry-url: https://registry.npmjs.org
|
||||||
|
|
||||||
- name: Set up Rust
|
- name: Set up Rust
|
||||||
|
|
@ -122,8 +125,9 @@ jobs:
|
||||||
npm run build:debug
|
npm run build:debug
|
||||||
CJS=true npm run build
|
CJS=true npm run build
|
||||||
CJS=true npm run build:debug
|
CJS=true npm run build:debug
|
||||||
npm run build:dts
|
|
||||||
|
|
||||||
- name: Publish to npmjs.com
|
- name: Publish to npmjs.com
|
||||||
working-directory: ${{ matrix.directory }}
|
working-directory: ${{ matrix.directory }}
|
||||||
run: npm publish
|
run: npm publish
|
||||||
|
env:
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
|
|
|
||||||
8
.github/workflows/response.yml
vendored
8
.github/workflows/response.yml
vendored
|
|
@ -17,13 +17,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/close_unresponsive.js
|
sparse-checkout: .github/scripts/close_unresponsive.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/close_unresponsive.js')
|
const script = require('./.github/scripts/close_unresponsive.js')
|
||||||
|
|
@ -35,13 +35,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/remove_response_label.js
|
sparse-checkout: .github/scripts/remove_response_label.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/remove_response_label.js')
|
const script = require('./.github/scripts/remove_response_label.js')
|
||||||
|
|
|
||||||
4
.github/workflows/reviewers_remove.yml
vendored
4
.github/workflows/reviewers_remove.yml
vendored
|
|
@ -12,13 +12,13 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout script
|
- name: Checkout script
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
sparse-checkout: .github/scripts/reviewers_remove.js
|
sparse-checkout: .github/scripts/reviewers_remove.js
|
||||||
sparse-checkout-cone-mode: false
|
sparse-checkout-cone-mode: false
|
||||||
|
|
||||||
- name: Run script
|
- name: Run script
|
||||||
uses: actions/github-script@v8
|
uses: actions/github-script@v7
|
||||||
with:
|
with:
|
||||||
script: |
|
script: |
|
||||||
const script = require('./.github/scripts/reviewers_remove.js')
|
const script = require('./.github/scripts/reviewers_remove.js')
|
||||||
|
|
|
||||||
2
.github/workflows/sanitize.yml
vendored
2
.github/workflows/sanitize.yml
vendored
|
|
@ -15,7 +15,7 @@ jobs:
|
||||||
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
TREE_SITTER: ${{ github.workspace }}/target/release/tree-sitter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Install UBSAN library
|
- name: Install UBSAN library
|
||||||
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
run: sudo apt-get update -y && sudo apt-get install -y libubsan1
|
||||||
|
|
|
||||||
29
.github/workflows/spam.yml
vendored
29
.github/workflows/spam.yml
vendored
|
|
@ -1,29 +0,0 @@
|
||||||
name: Close as spam
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [labeled]
|
|
||||||
pull_request_target:
|
|
||||||
types: [labeled]
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
spam:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event.label.name == 'spam'
|
|
||||||
steps:
|
|
||||||
- name: Checkout script
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
with:
|
|
||||||
sparse-checkout: .github/scripts/close_spam.js
|
|
||||||
sparse-checkout-cone-mode: false
|
|
||||||
|
|
||||||
- name: Run script
|
|
||||||
uses: actions/github-script@v8
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const script = require('./.github/scripts/close_spam.js')
|
|
||||||
await script({github, context})
|
|
||||||
11
.github/workflows/wasm_exports.yml
vendored
11
.github/workflows/wasm_exports.yml
vendored
|
|
@ -1,24 +1,23 @@
|
||||||
name: Check Wasm Exports
|
name: Check WASM Exports
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- lib/include/tree_sitter/api.h
|
- lib/include/tree_sitter/api.h
|
||||||
- lib/binding_web/**
|
- lib/binding_web/**
|
||||||
- xtask/src/**
|
|
||||||
push:
|
push:
|
||||||
branches: [master]
|
branches: [master]
|
||||||
paths:
|
paths:
|
||||||
- lib/include/tree_sitter/api.h
|
- lib/include/tree_sitter/api.h
|
||||||
- lib/binding_rust/bindings.rs
|
- lib/binding_rust/bindings.rs
|
||||||
- CMakeLists.txt
|
- lib/CMakeLists.txt
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-wasm-exports:
|
check-wasm-exports:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v6
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up stable Rust toolchain
|
- name: Set up stable Rust toolchain
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||||
|
|
@ -33,9 +32,9 @@ jobs:
|
||||||
env:
|
env:
|
||||||
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS: -g -Werror -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
|
|
||||||
- name: Build Wasm Library
|
- name: Build WASM Library
|
||||||
working-directory: lib/binding_web
|
working-directory: lib/binding_web
|
||||||
run: npm ci && npm run build:debug
|
run: npm ci && npm run build:debug
|
||||||
|
|
||||||
- name: Check Wasm exports
|
- name: Check WASM exports
|
||||||
run: cargo xtask check-wasm-exports
|
run: cargo xtask check-wasm-exports
|
||||||
|
|
|
||||||
19
.github/workflows/wasm_stdlib.yml
vendored
19
.github/workflows/wasm_stdlib.yml
vendored
|
|
@ -1,19 +0,0 @@
|
||||||
name: Check Wasm Stdlib build
|
|
||||||
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v6
|
|
||||||
|
|
||||||
- name: Check directory changes
|
|
||||||
uses: actions/github-script@v8
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const scriptPath = `${process.env.GITHUB_WORKSPACE}/.github/scripts/wasm_stdlib.js`;
|
|
||||||
const script = require(scriptPath);
|
|
||||||
return script({ github, context, core });
|
|
||||||
5
.gitignore
vendored
5
.gitignore
vendored
|
|
@ -1,12 +1,10 @@
|
||||||
log*.html
|
log*.html
|
||||||
.direnv
|
|
||||||
|
|
||||||
.idea
|
.idea
|
||||||
*.xcodeproj
|
*.xcodeproj
|
||||||
.vscode
|
.vscode
|
||||||
.cache
|
.cache
|
||||||
.zig-cache
|
.zig-cache
|
||||||
.direnv
|
|
||||||
|
|
||||||
profile*
|
profile*
|
||||||
fuzz-results
|
fuzz-results
|
||||||
|
|
@ -26,7 +24,6 @@ docs/assets/js/tree-sitter.js
|
||||||
*.dylib
|
*.dylib
|
||||||
*.so
|
*.so
|
||||||
*.so.[0-9]*
|
*.so.[0-9]*
|
||||||
*.dll
|
|
||||||
*.o
|
*.o
|
||||||
*.obj
|
*.obj
|
||||||
*.exp
|
*.exp
|
||||||
|
|
@ -36,5 +33,3 @@ docs/assets/js/tree-sitter.js
|
||||||
.build
|
.build
|
||||||
build
|
build
|
||||||
zig-*
|
zig-*
|
||||||
|
|
||||||
/result
|
|
||||||
|
|
|
||||||
|
|
@ -1,11 +0,0 @@
|
||||||
{
|
|
||||||
"lsp": {
|
|
||||||
"rust-analyzer": {
|
|
||||||
"initialization_options": {
|
|
||||||
"cargo": {
|
|
||||||
"features": "all"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
1934
Cargo.lock
generated
1934
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
101
Cargo.toml
101
Cargo.toml
|
|
@ -1,26 +1,26 @@
|
||||||
[workspace]
|
[workspace]
|
||||||
default-members = ["crates/cli"]
|
default-members = ["cli"]
|
||||||
members = [
|
members = [
|
||||||
"crates/cli",
|
"cli",
|
||||||
"crates/config",
|
"cli/config",
|
||||||
"crates/generate",
|
"cli/generate",
|
||||||
"crates/highlight",
|
"cli/loader",
|
||||||
"crates/loader",
|
|
||||||
"crates/tags",
|
|
||||||
"crates/xtask",
|
|
||||||
"crates/language",
|
|
||||||
"lib",
|
"lib",
|
||||||
|
"lib/language",
|
||||||
|
"tags",
|
||||||
|
"highlight",
|
||||||
|
"xtask",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "0.27.0"
|
version = "0.25.9"
|
||||||
authors = [
|
authors = [
|
||||||
"Max Brunsfeld <maxbrunsfeld@gmail.com>",
|
"Max Brunsfeld <maxbrunsfeld@gmail.com>",
|
||||||
"Amaan Qureshi <amaanq12@gmail.com>",
|
"Amaan Qureshi <amaanq12@gmail.com>",
|
||||||
]
|
]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
rust-version = "1.85"
|
rust-version = "1.82"
|
||||||
homepage = "https://tree-sitter.github.io/tree-sitter"
|
homepage = "https://tree-sitter.github.io/tree-sitter"
|
||||||
repository = "https://github.com/tree-sitter/tree-sitter"
|
repository = "https://github.com/tree-sitter/tree-sitter"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
@ -103,61 +103,62 @@ codegen-units = 256
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
ansi_colours = "1.2.3"
|
ansi_colours = "1.2.3"
|
||||||
anstyle = "1.0.13"
|
anstyle = "1.0.10"
|
||||||
anyhow = "1.0.100"
|
anyhow = "1.0.95"
|
||||||
bstr = "1.12.0"
|
bstr = "1.11.3"
|
||||||
cc = "1.2.53"
|
cc = "1.2.10"
|
||||||
clap = { version = "4.5.54", features = [
|
clap = { version = "4.5.27", features = [
|
||||||
"cargo",
|
"cargo",
|
||||||
"derive",
|
"derive",
|
||||||
"env",
|
"env",
|
||||||
"help",
|
"help",
|
||||||
"string",
|
|
||||||
"unstable-styles",
|
"unstable-styles",
|
||||||
] }
|
] }
|
||||||
clap_complete = "4.5.65"
|
clap_complete = "4.5.42"
|
||||||
clap_complete_nushell = "4.5.10"
|
clap_complete_nushell = "4.5.5"
|
||||||
crc32fast = "1.5.0"
|
|
||||||
ctor = "0.2.9"
|
ctor = "0.2.9"
|
||||||
ctrlc = { version = "3.5.0", features = ["termination"] }
|
ctrlc = { version = "3.4.5", features = ["termination"] }
|
||||||
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
dialoguer = { version = "0.11.0", features = ["fuzzy-select"] }
|
||||||
etcetera = "0.11.0"
|
etcetera = "0.8.0"
|
||||||
|
filetime = "0.2.25"
|
||||||
fs4 = "0.12.0"
|
fs4 = "0.12.0"
|
||||||
glob = "0.3.3"
|
git2 = "0.20.0"
|
||||||
|
glob = "0.3.2"
|
||||||
heck = "0.5.0"
|
heck = "0.5.0"
|
||||||
html-escape = "0.2.13"
|
html-escape = "0.2.13"
|
||||||
indexmap = "2.12.1"
|
indexmap = "2.7.1"
|
||||||
indoc = "2.0.6"
|
indoc = "2.0.5"
|
||||||
libloading = "0.9.0"
|
libloading = "0.8.6"
|
||||||
log = { version = "0.4.28", features = ["std"] }
|
log = { version = "0.4.25", features = ["std"] }
|
||||||
memchr = "2.7.6"
|
memchr = "2.7.4"
|
||||||
once_cell = "1.21.3"
|
once_cell = "1.20.2"
|
||||||
|
path-slash = "0.2.1"
|
||||||
pretty_assertions = "1.4.1"
|
pretty_assertions = "1.4.1"
|
||||||
rand = "0.8.5"
|
rand = "0.8.5"
|
||||||
regex = "1.11.3"
|
regex = "1.11.1"
|
||||||
regex-syntax = "0.8.6"
|
regex-syntax = "0.8.5"
|
||||||
rustc-hash = "2.1.1"
|
rustc-hash = "2.1.0"
|
||||||
schemars = "1.0.5"
|
semver = { version = "1.0.25", features = ["serde"] }
|
||||||
semver = { version = "1.0.27", features = ["serde"] }
|
serde = { version = "1.0.217", features = ["derive"] }
|
||||||
serde = { version = "1.0.219", features = ["derive"] }
|
serde_derive = "1.0.217"
|
||||||
serde_json = { version = "1.0.149", features = ["preserve_order"] }
|
serde_json = { version = "1.0.137", features = ["preserve_order"] }
|
||||||
similar = "2.7.0"
|
similar = "2.7.0"
|
||||||
smallbitvec = "2.6.0"
|
smallbitvec = "2.5.3"
|
||||||
streaming-iterator = "0.1.9"
|
streaming-iterator = "0.1.9"
|
||||||
tempfile = "3.23.0"
|
tempfile = "3.15.0"
|
||||||
thiserror = "2.0.17"
|
thiserror = "2.0.11"
|
||||||
tiny_http = "0.12.0"
|
tiny_http = "0.12.0"
|
||||||
|
toml = "0.8.19"
|
||||||
topological-sort = "0.2.2"
|
topological-sort = "0.2.2"
|
||||||
unindent = "0.2.4"
|
unindent = "0.2.3"
|
||||||
|
url = { version = "2.5.4", features = ["serde"] }
|
||||||
walkdir = "2.5.0"
|
walkdir = "2.5.0"
|
||||||
wasmparser = "0.243.0"
|
wasmparser = "0.224.0"
|
||||||
webbrowser = "1.0.5"
|
webbrowser = "1.0.3"
|
||||||
|
|
||||||
tree-sitter = { version = "0.27.0", path = "./lib" }
|
tree-sitter = { version = "0.25.9", path = "./lib" }
|
||||||
tree-sitter-generate = { version = "0.27.0", path = "./crates/generate" }
|
tree-sitter-generate = { version = "0.25.9", path = "./cli/generate" }
|
||||||
tree-sitter-loader = { version = "0.27.0", path = "./crates/loader" }
|
tree-sitter-loader = { version = "0.25.9", path = "./cli/loader" }
|
||||||
tree-sitter-config = { version = "0.27.0", path = "./crates/config" }
|
tree-sitter-config = { version = "0.25.9", path = "./cli/config" }
|
||||||
tree-sitter-highlight = { version = "0.27.0", path = "./crates/highlight" }
|
tree-sitter-highlight = { version = "0.25.9", path = "./highlight" }
|
||||||
tree-sitter-tags = { version = "0.27.0", path = "./crates/tags" }
|
tree-sitter-tags = { version = "0.25.9", path = "./tags" }
|
||||||
|
|
||||||
tree-sitter-language = { version = "0.1", path = "./crates/language" }
|
|
||||||
|
|
|
||||||
2
LICENSE
2
LICENSE
|
|
@ -1,6 +1,6 @@
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2018 Max Brunsfeld
|
Copyright (c) 2018-2024 Max Brunsfeld
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|
|
||||||
41
Makefile
41
Makefile
|
|
@ -1,4 +1,8 @@
|
||||||
VERSION := 0.27.0
|
ifeq ($(OS),Windows_NT)
|
||||||
|
$(error Windows is not supported)
|
||||||
|
endif
|
||||||
|
|
||||||
|
VERSION := 0.25.9
|
||||||
DESCRIPTION := An incremental parsing system for programming tools
|
DESCRIPTION := An incremental parsing system for programming tools
|
||||||
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||||
|
|
||||||
|
|
@ -6,7 +10,6 @@ HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/
|
||||||
PREFIX ?= /usr/local
|
PREFIX ?= /usr/local
|
||||||
INCLUDEDIR ?= $(PREFIX)/include
|
INCLUDEDIR ?= $(PREFIX)/include
|
||||||
LIBDIR ?= $(PREFIX)/lib
|
LIBDIR ?= $(PREFIX)/lib
|
||||||
BINDIR ?= $(PREFIX)/bin
|
|
||||||
PCLIBDIR ?= $(LIBDIR)/pkgconfig
|
PCLIBDIR ?= $(LIBDIR)/pkgconfig
|
||||||
|
|
||||||
# collect sources
|
# collect sources
|
||||||
|
|
@ -24,7 +27,7 @@ OBJ := $(SRC:.c=.o)
|
||||||
ARFLAGS := rcs
|
ARFLAGS := rcs
|
||||||
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
CFLAGS ?= -O3 -Wall -Wextra -Wshadow -Wpedantic -Werror=incompatible-pointer-types
|
||||||
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
override CFLAGS += -std=c11 -fPIC -fvisibility=hidden
|
||||||
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE -D_BSD_SOURCE -D_DARWIN_C_SOURCE
|
override CFLAGS += -D_POSIX_C_SOURCE=200112L -D_DEFAULT_SOURCE
|
||||||
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
override CFLAGS += -Ilib/src -Ilib/src/wasm -Ilib/include
|
||||||
|
|
||||||
# ABI versioning
|
# ABI versioning
|
||||||
|
|
@ -32,25 +35,20 @@ SONAME_MAJOR := $(word 1,$(subst ., ,$(VERSION)))
|
||||||
SONAME_MINOR := $(word 2,$(subst ., ,$(VERSION)))
|
SONAME_MINOR := $(word 2,$(subst ., ,$(VERSION)))
|
||||||
|
|
||||||
# OS-specific bits
|
# OS-specific bits
|
||||||
MACHINE := $(shell $(CC) -dumpmachine)
|
ifneq ($(findstring darwin,$(shell $(CC) -dumpmachine)),)
|
||||||
|
|
||||||
ifneq ($(findstring darwin,$(MACHINE)),)
|
|
||||||
SOEXT = dylib
|
SOEXT = dylib
|
||||||
SOEXTVER_MAJOR = $(SONAME_MAJOR).$(SOEXT)
|
SOEXTVER_MAJOR = $(SONAME_MAJOR).$(SOEXT)
|
||||||
SOEXTVER = $(SONAME_MAJOR).$(SONAME_MINOR).$(SOEXT)
|
SOEXTVER = $(SONAME_MAJOR).$(SONAME_MINOR).$(SOEXT)
|
||||||
LINKSHARED += -dynamiclib -Wl,-install_name,$(LIBDIR)/libtree-sitter.$(SOEXTVER)
|
LINKSHARED += -dynamiclib -Wl,-install_name,$(LIBDIR)/libtree-sitter.$(SOEXTVER)
|
||||||
else ifneq ($(findstring mingw32,$(MACHINE)),)
|
|
||||||
SOEXT = dll
|
|
||||||
LINKSHARED += -s -shared -Wl,--out-implib,libtree-sitter.dll.a
|
|
||||||
else
|
else
|
||||||
SOEXT = so
|
SOEXT = so
|
||||||
SOEXTVER_MAJOR = $(SOEXT).$(SONAME_MAJOR)
|
SOEXTVER_MAJOR = $(SOEXT).$(SONAME_MAJOR)
|
||||||
SOEXTVER = $(SOEXT).$(SONAME_MAJOR).$(SONAME_MINOR)
|
SOEXTVER = $(SOEXT).$(SONAME_MAJOR).$(SONAME_MINOR)
|
||||||
LINKSHARED += -shared -Wl,-soname,libtree-sitter.$(SOEXTVER)
|
LINKSHARED += -shared -Wl,-soname,libtree-sitter.$(SOEXTVER)
|
||||||
|
endif
|
||||||
ifneq ($(filter $(shell uname),FreeBSD NetBSD DragonFly),)
|
ifneq ($(filter $(shell uname),FreeBSD NetBSD DragonFly),)
|
||||||
PCLIBDIR := $(PREFIX)/libdata/pkgconfig
|
PCLIBDIR := $(PREFIX)/libdata/pkgconfig
|
||||||
endif
|
endif
|
||||||
endif
|
|
||||||
|
|
||||||
all: libtree-sitter.a libtree-sitter.$(SOEXT) tree-sitter.pc
|
all: libtree-sitter.a libtree-sitter.$(SOEXT) tree-sitter.pc
|
||||||
|
|
||||||
|
|
@ -63,10 +61,6 @@ ifneq ($(STRIP),)
|
||||||
$(STRIP) $@
|
$(STRIP) $@
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifneq ($(findstring mingw32,$(MACHINE)),)
|
|
||||||
libtree-sitter.dll.a: libtree-sitter.$(SOEXT)
|
|
||||||
endif
|
|
||||||
|
|
||||||
tree-sitter.pc: lib/tree-sitter.pc.in
|
tree-sitter.pc: lib/tree-sitter.pc.in
|
||||||
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
sed -e 's|@PROJECT_VERSION@|$(VERSION)|' \
|
||||||
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
-e 's|@CMAKE_INSTALL_LIBDIR@|$(LIBDIR:$(PREFIX)/%=%)|' \
|
||||||
|
|
@ -75,27 +69,17 @@ tree-sitter.pc: lib/tree-sitter.pc.in
|
||||||
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
-e 's|@PROJECT_HOMEPAGE_URL@|$(HOMEPAGE_URL)|' \
|
||||||
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
-e 's|@CMAKE_INSTALL_PREFIX@|$(PREFIX)|' $< > $@
|
||||||
|
|
||||||
shared: libtree-sitter.$(SOEXT)
|
|
||||||
|
|
||||||
static: libtree-sitter.a
|
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
$(RM) $(OBJ) tree-sitter.pc libtree-sitter.a libtree-sitter.$(SOEXT) libtree-stitter.dll.a
|
$(RM) $(OBJ) tree-sitter.pc libtree-sitter.a libtree-sitter.$(SOEXT)
|
||||||
|
|
||||||
install: all
|
install: all
|
||||||
install -d '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
install -d '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
||||||
install -m644 lib/include/tree_sitter/api.h '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h
|
install -m644 lib/include/tree_sitter/api.h '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h
|
||||||
install -m644 tree-sitter.pc '$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
install -m644 tree-sitter.pc '$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
||||||
install -m644 libtree-sitter.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a
|
install -m644 libtree-sitter.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a
|
||||||
ifneq ($(findstring mingw32,$(MACHINE)),)
|
|
||||||
install -d '$(DESTDIR)$(BINDIR)'
|
|
||||||
install -m755 libtree-sitter.dll '$(DESTDIR)$(BINDIR)'/libtree-sitter.dll
|
|
||||||
install -m755 libtree-sitter.dll.a '$(DESTDIR)$(LIBDIR)'/libtree-sitter.dll.a
|
|
||||||
else
|
|
||||||
install -m755 libtree-sitter.$(SOEXT) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER)
|
install -m755 libtree-sitter.$(SOEXT) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER)
|
||||||
cd '$(DESTDIR)$(LIBDIR)' && ln -sf libtree-sitter.$(SOEXTVER) libtree-sitter.$(SOEXTVER_MAJOR)
|
ln -sf libtree-sitter.$(SOEXTVER) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXTVER_MAJOR)
|
||||||
cd '$(DESTDIR)$(LIBDIR)' && ln -sf libtree-sitter.$(SOEXTVER_MAJOR) libtree-sitter.$(SOEXT)
|
ln -sf libtree-sitter.$(SOEXTVER_MAJOR) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT)
|
||||||
endif
|
|
||||||
|
|
||||||
uninstall:
|
uninstall:
|
||||||
$(RM) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a \
|
$(RM) '$(DESTDIR)$(LIBDIR)'/libtree-sitter.a \
|
||||||
|
|
@ -104,9 +88,8 @@ uninstall:
|
||||||
'$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT) \
|
'$(DESTDIR)$(LIBDIR)'/libtree-sitter.$(SOEXT) \
|
||||||
'$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h \
|
'$(DESTDIR)$(INCLUDEDIR)'/tree_sitter/api.h \
|
||||||
'$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
'$(DESTDIR)$(PCLIBDIR)'/tree-sitter.pc
|
||||||
rmdir '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter
|
|
||||||
|
|
||||||
.PHONY: all shared static install uninstall clean
|
.PHONY: all install uninstall clean
|
||||||
|
|
||||||
|
|
||||||
##### Dev targets #####
|
##### Dev targets #####
|
||||||
|
|
|
||||||
|
|
@ -27,8 +27,6 @@ let package = Package(
|
||||||
.headerSearchPath("src"),
|
.headerSearchPath("src"),
|
||||||
.define("_POSIX_C_SOURCE", to: "200112L"),
|
.define("_POSIX_C_SOURCE", to: "200112L"),
|
||||||
.define("_DEFAULT_SOURCE"),
|
.define("_DEFAULT_SOURCE"),
|
||||||
.define("_BSD_SOURCE"),
|
|
||||||
.define("_DARWIN_C_SOURCE"),
|
|
||||||
]),
|
]),
|
||||||
],
|
],
|
||||||
cLanguageStandard: .c11
|
cLanguageStandard: .c11
|
||||||
|
|
|
||||||
|
|
@ -14,8 +14,8 @@ Tree-sitter is a parser generator tool and an incremental parsing library. It ca
|
||||||
## Links
|
## Links
|
||||||
- [Documentation](https://tree-sitter.github.io)
|
- [Documentation](https://tree-sitter.github.io)
|
||||||
- [Rust binding](lib/binding_rust/README.md)
|
- [Rust binding](lib/binding_rust/README.md)
|
||||||
- [Wasm binding](lib/binding_web/README.md)
|
- [WASM binding](lib/binding_web/README.md)
|
||||||
- [Command-line interface](crates/cli/README.md)
|
- [Command-line interface](cli/README.md)
|
||||||
|
|
||||||
[discord]: https://img.shields.io/discord/1063097320771698699?logo=discord&label=discord
|
[discord]: https://img.shields.io/discord/1063097320771698699?logo=discord&label=discord
|
||||||
[matrix]: https://img.shields.io/matrix/tree-sitter-chat%3Amatrix.org?logo=matrix&label=matrix
|
[matrix]: https://img.shields.io/matrix/tree-sitter-chat%3Amatrix.org?logo=matrix&label=matrix
|
||||||
|
|
|
||||||
|
|
@ -40,8 +40,6 @@ pub fn build(b: *std.Build) !void {
|
||||||
|
|
||||||
lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L");
|
lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L");
|
||||||
lib.root_module.addCMacro("_DEFAULT_SOURCE", "");
|
lib.root_module.addCMacro("_DEFAULT_SOURCE", "");
|
||||||
lib.root_module.addCMacro("_BSD_SOURCE", "");
|
|
||||||
lib.root_module.addCMacro("_DARWIN_C_SOURCE", "");
|
|
||||||
|
|
||||||
if (wasm) {
|
if (wasm) {
|
||||||
if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| {
|
if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
.{
|
.{
|
||||||
.name = .tree_sitter,
|
.name = .tree_sitter,
|
||||||
.fingerprint = 0x841224b447ac0d4f,
|
.fingerprint = 0x841224b447ac0d4f,
|
||||||
.version = "0.27.0",
|
.version = "0.25.9",
|
||||||
.minimum_zig_version = "0.14.1",
|
.minimum_zig_version = "0.14.1",
|
||||||
.paths = .{
|
.paths = .{
|
||||||
"build.zig",
|
"build.zig",
|
||||||
|
|
@ -13,83 +13,63 @@
|
||||||
},
|
},
|
||||||
.dependencies = .{
|
.dependencies = .{
|
||||||
.wasmtime_c_api_aarch64_android = .{
|
.wasmtime_c_api_aarch64_android = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-android-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-android-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAIfPIgdw2YnV3QyiFQ2NHdrxrXzzCdjYJyxJDOta",
|
.hash = "N-V-__8AAC3KCQZMd5ea2CkcbjldaVqCT7BT_9_rLMId6V__",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_linux = .{
|
.wasmtime_c_api_aarch64_linux = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-linux-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-linux-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAIt97QZi7Pf7nNJ2mVY6uxA80Klyuvvtop3pLMRK",
|
.hash = "N-V-__8AAGUY3gU6jj2CNJAYb7HiMNVPV1FIcTCI6RSSYwXu",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_macos = .{
|
.wasmtime_c_api_aarch64_macos = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-macos-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-macos-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAAO48QQf91w9RmmUDHTja8DrXZA1n6Bmc8waW3qe",
|
.hash = "N-V-__8AAM1GMARD6LGQebhVsSZ0uePUoo3Fw5nEO2L764vf",
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_aarch64_musl = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-musl-c-api.tar.xz",
|
|
||||||
.hash = "N-V-__8AAI196wa9pwADoA2RbCDp5F7bKQg1iOPq6gIh8-FH",
|
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_aarch64_windows = .{
|
.wasmtime_c_api_aarch64_windows = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-aarch64-windows-c-api.zip",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-windows-c-api.zip",
|
||||||
.hash = "N-V-__8AAC9u4wXfqd1Q6XyQaC8_DbQZClXux60Vu5743N05",
|
.hash = "N-V-__8AAH8a_wQ7oAeVVsaJcoOZhKTMkHIBc_XjDyLlHp2x",
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_armv7_linux = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-armv7-linux-c-api.tar.xz",
|
|
||||||
.hash = "N-V-__8AAHXe8gWs3s83Cc5G6SIq0_jWxj8fGTT5xG4vb6-x",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_i686_linux = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-i686-linux-c-api.tar.xz",
|
|
||||||
.hash = "N-V-__8AAN2pzgUUfulRCYnipSfis9IIYHoTHVlieLRmKuct",
|
|
||||||
.lazy = true,
|
|
||||||
},
|
|
||||||
.wasmtime_c_api_i686_windows = .{
|
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-i686-windows-c-api.zip",
|
|
||||||
.hash = "N-V-__8AAJu0YAUUTFBLxFIOi-MSQVezA6MMkpoFtuaf2Quf",
|
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_riscv64gc_linux = .{
|
.wasmtime_c_api_riscv64gc_linux = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-riscv64gc-linux-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-riscv64gc-linux-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAG8m-gc3E3AIImtTZ3l1c7HC6HUWazQ9OH5KACX4",
|
.hash = "N-V-__8AAN2cuQadBwMc8zJxv0sMY99Ae1Nc1dZcZAK9b4DZ",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_s390x_linux = .{
|
.wasmtime_c_api_s390x_linux = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-s390x-linux-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-s390x-linux-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAH314gd-gE4IBp2uvAL3gHeuW1uUZjMiLLeUdXL_",
|
.hash = "N-V-__8AAPevngYz99mwT0KQY9my2ax1p6APzgLEJeV4II9U",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_android = .{
|
.wasmtime_c_api_x86_64_android = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-android-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-android-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAIPNRwfNkznebrcGb0IKUe7f35bkuZEYOjcx6q3f",
|
.hash = "N-V-__8AABHIEgaTyzPfjgnnCy0dwJiXoDiJFblCkYOJsQvy",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_linux = .{
|
.wasmtime_c_api_x86_64_linux = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-linux-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-linux-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAI8EDwcyTtk_Afhk47SEaqfpoRqGkJeZpGs69ChF",
|
.hash = "N-V-__8AALUN5AWSEDRulL9u-OJJ-l0_GoT5UFDtGWZayEIq",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_macos = .{
|
.wasmtime_c_api_x86_64_macos = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-macos-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-macos-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAGtGNgVaOpHSxC22IjrampbRIy6lLwscdcAE8nG1",
|
.hash = "N-V-__8AANUeXwSPh13TqJCSSFdi87GEcHs8zK6FqE4v_TjB",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_mingw = .{
|
.wasmtime_c_api_x86_64_mingw = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-mingw-c-api.zip",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-mingw-c-api.zip",
|
||||||
.hash = "N-V-__8AAPS2PAbVix50L6lnddlgazCPTz3whLUFk1qnRtnZ",
|
.hash = "N-V-__8AALundgW-p1ffOnd7bsYyL8SY5OziDUZu7cXio2EL",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_musl = .{
|
.wasmtime_c_api_x86_64_musl = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-musl-c-api.tar.xz",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-musl-c-api.tar.xz",
|
||||||
.hash = "N-V-__8AAF-WEQe0nzvi09PgusM5i46FIuCKJmIDWUleWgQ3",
|
.hash = "N-V-__8AALMZ5wXJWW5qY-3MMjTAYR0MusckvzCsmg-69ALH",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
.wasmtime_c_api_x86_64_windows = .{
|
.wasmtime_c_api_x86_64_windows = .{
|
||||||
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v33.0.2/wasmtime-v33.0.2-x86_64-windows-c-api.zip",
|
.url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-windows-c-api.zip",
|
||||||
.hash = "N-V-__8AAKGNXwbpJQsn0_6kwSIVDDWifSg8cBzf7T2RzsC9",
|
.hash = "N-V-__8AAG-uVQVEDMsB1ymJzxpHcoiXo1_I3TFnPM5Zjy1i",
|
||||||
.lazy = true,
|
.lazy = true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -8,18 +8,14 @@ rust-version.workspace = true
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
homepage.workspace = true
|
homepage.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
documentation = "https://docs.rs/tree-sitter-cli"
|
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
include = ["build.rs", "README.md", "LICENSE", "benches/*", "src/**"]
|
include = ["build.rs", "README.md", "benches/*", "src/**"]
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "src/tree_sitter_cli.rs"
|
|
||||||
|
|
||||||
[[bin]]
|
[[bin]]
|
||||||
name = "tree-sitter"
|
name = "tree-sitter"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
|
@ -30,9 +26,7 @@ name = "benchmark"
|
||||||
harness = false
|
harness = false
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["qjs-rt"]
|
|
||||||
wasm = ["tree-sitter/wasm", "tree-sitter-loader/wasm"]
|
wasm = ["tree-sitter/wasm", "tree-sitter-loader/wasm"]
|
||||||
qjs-rt = ["tree-sitter-generate/qjs-rt"]
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
ansi_colours.workspace = true
|
ansi_colours.workspace = true
|
||||||
|
|
@ -42,26 +36,31 @@ bstr.workspace = true
|
||||||
clap.workspace = true
|
clap.workspace = true
|
||||||
clap_complete.workspace = true
|
clap_complete.workspace = true
|
||||||
clap_complete_nushell.workspace = true
|
clap_complete_nushell.workspace = true
|
||||||
crc32fast.workspace = true
|
|
||||||
ctor.workspace = true
|
ctor.workspace = true
|
||||||
ctrlc.workspace = true
|
ctrlc.workspace = true
|
||||||
dialoguer.workspace = true
|
dialoguer.workspace = true
|
||||||
|
filetime.workspace = true
|
||||||
glob.workspace = true
|
glob.workspace = true
|
||||||
heck.workspace = true
|
heck.workspace = true
|
||||||
html-escape.workspace = true
|
html-escape.workspace = true
|
||||||
|
indexmap.workspace = true
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
memchr.workspace = true
|
memchr.workspace = true
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
schemars.workspace = true
|
regex-syntax.workspace = true
|
||||||
|
rustc-hash.workspace = true
|
||||||
semver.workspace = true
|
semver.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
serde_derive.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
similar.workspace = true
|
similar.workspace = true
|
||||||
|
smallbitvec.workspace = true
|
||||||
streaming-iterator.workspace = true
|
streaming-iterator.workspace = true
|
||||||
thiserror.workspace = true
|
|
||||||
tiny_http.workspace = true
|
tiny_http.workspace = true
|
||||||
|
topological-sort.workspace = true
|
||||||
|
url.workspace = true
|
||||||
walkdir.workspace = true
|
walkdir.workspace = true
|
||||||
wasmparser.workspace = true
|
wasmparser.workspace = true
|
||||||
webbrowser.workspace = true
|
webbrowser.workspace = true
|
||||||
|
|
@ -75,7 +74,7 @@ tree-sitter-tags.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
encoding_rs = "0.8.35"
|
encoding_rs = "0.8.35"
|
||||||
widestring = "1.2.1"
|
widestring = "1.1.0"
|
||||||
tree_sitter_proc_macro = { path = "src/tests/proc_macro", package = "tree-sitter-tests-proc-macro" }
|
tree_sitter_proc_macro = { path = "src/tests/proc_macro", package = "tree-sitter-tests-proc-macro" }
|
||||||
|
|
||||||
tempfile.workspace = true
|
tempfile.workspace = true
|
||||||
|
|
@ -7,8 +7,7 @@
|
||||||
[npmjs.com]: https://www.npmjs.org/package/tree-sitter-cli
|
[npmjs.com]: https://www.npmjs.org/package/tree-sitter-cli
|
||||||
[npmjs.com badge]: https://img.shields.io/npm/v/tree-sitter-cli.svg?color=%23BF4A4A
|
[npmjs.com badge]: https://img.shields.io/npm/v/tree-sitter-cli.svg?color=%23BF4A4A
|
||||||
|
|
||||||
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`,
|
The Tree-sitter CLI allows you to develop, test, and use Tree-sitter grammars from the command line. It works on `MacOS`, `Linux`, and `Windows`.
|
||||||
`Linux`, and `Windows`.
|
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
|
|
@ -35,11 +34,9 @@ The `tree-sitter` binary itself has no dependencies, but specific commands have
|
||||||
|
|
||||||
### Commands
|
### Commands
|
||||||
|
|
||||||
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current
|
* `generate` - The `tree-sitter generate` command will generate a Tree-sitter parser based on the grammar in the current working directory. See [the documentation] for more information.
|
||||||
working directory. See [the documentation] for more information.
|
|
||||||
|
|
||||||
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory.
|
* `test` - The `tree-sitter test` command will run the unit tests for the Tree-sitter parser in the current working directory. See [the documentation] for more information.
|
||||||
See [the documentation] for more information.
|
|
||||||
|
|
||||||
* `parse` - The `tree-sitter parse` command will parse a file (or list of files) using Tree-sitter parsers.
|
* `parse` - The `tree-sitter parse` command will parse a file (or list of files) using Tree-sitter parsers.
|
||||||
|
|
||||||
|
|
@ -8,7 +8,6 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use log::info;
|
|
||||||
use tree_sitter::{Language, Parser, Query};
|
use tree_sitter::{Language, Parser, Query};
|
||||||
use tree_sitter_loader::{CompileConfig, Loader};
|
use tree_sitter_loader::{CompileConfig, Loader};
|
||||||
|
|
||||||
|
|
@ -72,8 +71,6 @@ static EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR: LazyLock<
|
||||||
});
|
});
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
tree_sitter_cli::logger::init();
|
|
||||||
|
|
||||||
let max_path_length = EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR
|
let max_path_length = EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR
|
||||||
.values()
|
.values()
|
||||||
.flat_map(|(e, q)| {
|
.flat_map(|(e, q)| {
|
||||||
|
|
@ -84,7 +81,7 @@ fn main() {
|
||||||
.max()
|
.max()
|
||||||
.unwrap_or(0);
|
.unwrap_or(0);
|
||||||
|
|
||||||
info!("Benchmarking with {} repetitions", *REPETITION_COUNT);
|
eprintln!("Benchmarking with {} repetitions", *REPETITION_COUNT);
|
||||||
|
|
||||||
let mut parser = Parser::new();
|
let mut parser = Parser::new();
|
||||||
let mut all_normal_speeds = Vec::new();
|
let mut all_normal_speeds = Vec::new();
|
||||||
|
|
@ -101,11 +98,11 @@ fn main() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("\nLanguage: {language_name}");
|
eprintln!("\nLanguage: {language_name}");
|
||||||
let language = get_language(language_path);
|
let language = get_language(language_path);
|
||||||
parser.set_language(&language).unwrap();
|
parser.set_language(&language).unwrap();
|
||||||
|
|
||||||
info!(" Constructing Queries");
|
eprintln!(" Constructing Queries");
|
||||||
for path in query_paths {
|
for path in query_paths {
|
||||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||||
if !path.to_str().unwrap().contains(filter.as_str()) {
|
if !path.to_str().unwrap().contains(filter.as_str()) {
|
||||||
|
|
@ -120,7 +117,7 @@ fn main() {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(" Parsing Valid Code:");
|
eprintln!(" Parsing Valid Code:");
|
||||||
let mut normal_speeds = Vec::new();
|
let mut normal_speeds = Vec::new();
|
||||||
for example_path in example_paths {
|
for example_path in example_paths {
|
||||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||||
|
|
@ -134,7 +131,7 @@ fn main() {
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
info!(" Parsing Invalid Code (mismatched languages):");
|
eprintln!(" Parsing Invalid Code (mismatched languages):");
|
||||||
let mut error_speeds = Vec::new();
|
let mut error_speeds = Vec::new();
|
||||||
for (other_language_path, (example_paths, _)) in
|
for (other_language_path, (example_paths, _)) in
|
||||||
EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR.iter()
|
EXAMPLE_AND_QUERY_PATHS_BY_LANGUAGE_DIR.iter()
|
||||||
|
|
@ -155,30 +152,30 @@ fn main() {
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_normal, worst_normal)) = aggregate(&normal_speeds) {
|
if let Some((average_normal, worst_normal)) = aggregate(&normal_speeds) {
|
||||||
info!(" Average Speed (normal): {average_normal} bytes/ms");
|
eprintln!(" Average Speed (normal): {average_normal} bytes/ms");
|
||||||
info!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
eprintln!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_error, worst_error)) = aggregate(&error_speeds) {
|
if let Some((average_error, worst_error)) = aggregate(&error_speeds) {
|
||||||
info!(" Average Speed (errors): {average_error} bytes/ms");
|
eprintln!(" Average Speed (errors): {average_error} bytes/ms");
|
||||||
info!(" Worst Speed (errors): {worst_error} bytes/ms");
|
eprintln!(" Worst Speed (errors): {worst_error} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
all_normal_speeds.extend(normal_speeds);
|
all_normal_speeds.extend(normal_speeds);
|
||||||
all_error_speeds.extend(error_speeds);
|
all_error_speeds.extend(error_speeds);
|
||||||
}
|
}
|
||||||
|
|
||||||
info!("\n Overall");
|
eprintln!("\n Overall");
|
||||||
if let Some((average_normal, worst_normal)) = aggregate(&all_normal_speeds) {
|
if let Some((average_normal, worst_normal)) = aggregate(&all_normal_speeds) {
|
||||||
info!(" Average Speed (normal): {average_normal} bytes/ms");
|
eprintln!(" Average Speed (normal): {average_normal} bytes/ms");
|
||||||
info!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
eprintln!(" Worst Speed (normal): {worst_normal} bytes/ms");
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some((average_error, worst_error)) = aggregate(&all_error_speeds) {
|
if let Some((average_error, worst_error)) = aggregate(&all_error_speeds) {
|
||||||
info!(" Average Speed (errors): {average_error} bytes/ms");
|
eprintln!(" Average Speed (errors): {average_error} bytes/ms");
|
||||||
info!(" Worst Speed (errors): {worst_error} bytes/ms");
|
eprintln!(" Worst Speed (errors): {worst_error} bytes/ms");
|
||||||
}
|
}
|
||||||
info!("");
|
eprintln!();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
||||||
|
|
@ -197,6 +194,12 @@ fn aggregate(speeds: &[usize]) -> Option<(usize, usize)> {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) -> usize {
|
fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) -> usize {
|
||||||
|
eprint!(
|
||||||
|
" {:width$}\t",
|
||||||
|
path.file_name().unwrap().to_str().unwrap(),
|
||||||
|
width = max_path_length
|
||||||
|
);
|
||||||
|
|
||||||
let source_code = fs::read(path)
|
let source_code = fs::read(path)
|
||||||
.with_context(|| format!("Failed to read {}", path.display()))
|
.with_context(|| format!("Failed to read {}", path.display()))
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
@ -207,9 +210,8 @@ fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) ->
|
||||||
let duration = time.elapsed() / (*REPETITION_COUNT as u32);
|
let duration = time.elapsed() / (*REPETITION_COUNT as u32);
|
||||||
let duration_ns = duration.as_nanos();
|
let duration_ns = duration.as_nanos();
|
||||||
let speed = ((source_code.len() as u128) * 1_000_000) / duration_ns;
|
let speed = ((source_code.len() as u128) * 1_000_000) / duration_ns;
|
||||||
info!(
|
eprintln!(
|
||||||
" {:max_path_length$}\ttime {:>7.2} ms\t\tspeed {speed:>6} bytes/ms",
|
"time {:>7.2} ms\t\tspeed {speed:>6} bytes/ms",
|
||||||
path.file_name().unwrap().to_str().unwrap(),
|
|
||||||
(duration_ns as f64) / 1e6,
|
(duration_ns as f64) / 1e6,
|
||||||
);
|
);
|
||||||
speed as usize
|
speed as usize
|
||||||
|
|
@ -52,9 +52,9 @@ fn main() {
|
||||||
|
|
||||||
fn web_playground_files_present() -> bool {
|
fn web_playground_files_present() -> bool {
|
||||||
let paths = [
|
let paths = [
|
||||||
"../../docs/src/assets/js/playground.js",
|
"../docs/src/assets/js/playground.js",
|
||||||
"../../lib/binding_web/web-tree-sitter.js",
|
"../lib/binding_web/tree-sitter.js",
|
||||||
"../../lib/binding_web/web-tree-sitter.wasm",
|
"../lib/binding_web/tree-sitter.wasm",
|
||||||
];
|
];
|
||||||
|
|
||||||
paths.iter().all(|p| Path::new(p).exists())
|
paths.iter().all(|p| Path::new(p).exists())
|
||||||
|
|
@ -8,20 +8,15 @@ rust-version.workspace = true
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
homepage.workspace = true
|
homepage.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
documentation = "https://docs.rs/tree-sitter-config"
|
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "src/tree_sitter_config.rs"
|
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
anyhow.workspace = true
|
||||||
etcetera.workspace = true
|
etcetera.workspace = true
|
||||||
log.workspace = true
|
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
thiserror.workspace = true
|
|
||||||
|
|
@ -1,54 +1,11 @@
|
||||||
#![cfg_attr(not(any(test, doctest)), doc = include_str!("../README.md"))]
|
#![doc = include_str!("../README.md")]
|
||||||
|
|
||||||
use std::{
|
use std::{env, fs, path::PathBuf};
|
||||||
env, fs,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
|
|
||||||
|
use anyhow::{Context, Result};
|
||||||
use etcetera::BaseStrategy as _;
|
use etcetera::BaseStrategy as _;
|
||||||
use log::warn;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
pub type ConfigResult<T> = Result<T, ConfigError>;
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub enum ConfigError {
|
|
||||||
#[error("Bad JSON config {0} -- {1}")]
|
|
||||||
ConfigRead(String, serde_json::Error),
|
|
||||||
#[error(transparent)]
|
|
||||||
HomeDir(#[from] etcetera::HomeDirError),
|
|
||||||
#[error(transparent)]
|
|
||||||
IO(IoError),
|
|
||||||
#[error(transparent)]
|
|
||||||
Serialization(#[from] serde_json::Error),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Error)]
|
|
||||||
pub struct IoError {
|
|
||||||
pub error: std::io::Error,
|
|
||||||
pub path: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IoError {
|
|
||||||
fn new(error: std::io::Error, path: Option<&Path>) -> Self {
|
|
||||||
Self {
|
|
||||||
error,
|
|
||||||
path: path.map(|p| p.to_string_lossy().to_string()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for IoError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.error)?;
|
|
||||||
if let Some(ref path) = self.path {
|
|
||||||
write!(f, " ({path})")?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Holds the contents of tree-sitter's configuration file.
|
/// Holds the contents of tree-sitter's configuration file.
|
||||||
///
|
///
|
||||||
|
|
@ -65,7 +22,7 @@ pub struct Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Config {
|
impl Config {
|
||||||
pub fn find_config_file() -> ConfigResult<Option<PathBuf>> {
|
pub fn find_config_file() -> Result<Option<PathBuf>> {
|
||||||
if let Ok(path) = env::var("TREE_SITTER_DIR") {
|
if let Ok(path) = env::var("TREE_SITTER_DIR") {
|
||||||
let mut path = PathBuf::from(path);
|
let mut path = PathBuf::from(path);
|
||||||
path.push("config.json");
|
path.push("config.json");
|
||||||
|
|
@ -88,14 +45,10 @@ impl Config {
|
||||||
.join("tree-sitter")
|
.join("tree-sitter")
|
||||||
.join("config.json");
|
.join("config.json");
|
||||||
if legacy_apple_path.is_file() {
|
if legacy_apple_path.is_file() {
|
||||||
let xdg_dir = xdg_path.parent().unwrap();
|
fs::create_dir_all(xdg_path.parent().unwrap())?;
|
||||||
fs::create_dir_all(xdg_dir)
|
fs::rename(&legacy_apple_path, &xdg_path)?;
|
||||||
.map_err(|e| ConfigError::IO(IoError::new(e, Some(xdg_dir))))?;
|
println!(
|
||||||
fs::rename(&legacy_apple_path, &xdg_path).map_err(|e| {
|
"Warning: your config.json file has been automatically migrated from \"{}\" to \"{}\"",
|
||||||
ConfigError::IO(IoError::new(e, Some(legacy_apple_path.as_path())))
|
|
||||||
})?;
|
|
||||||
warn!(
|
|
||||||
"Your config.json file has been automatically migrated from \"{}\" to \"{}\"",
|
|
||||||
legacy_apple_path.display(),
|
legacy_apple_path.display(),
|
||||||
xdg_path.display()
|
xdg_path.display()
|
||||||
);
|
);
|
||||||
|
|
@ -113,7 +66,7 @@ impl Config {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn xdg_config_file() -> ConfigResult<PathBuf> {
|
fn xdg_config_file() -> Result<PathBuf> {
|
||||||
let xdg_path = etcetera::choose_base_strategy()?
|
let xdg_path = etcetera::choose_base_strategy()?
|
||||||
.config_dir()
|
.config_dir()
|
||||||
.join("tree-sitter")
|
.join("tree-sitter")
|
||||||
|
|
@ -130,7 +83,7 @@ impl Config {
|
||||||
/// [`etcetera::choose_base_strategy`](https://docs.rs/etcetera/*/etcetera/#basestrategy)
|
/// [`etcetera::choose_base_strategy`](https://docs.rs/etcetera/*/etcetera/#basestrategy)
|
||||||
/// - `$HOME/.tree-sitter/config.json` as a fallback from where tree-sitter _used_ to store
|
/// - `$HOME/.tree-sitter/config.json` as a fallback from where tree-sitter _used_ to store
|
||||||
/// its configuration
|
/// its configuration
|
||||||
pub fn load(path: Option<PathBuf>) -> ConfigResult<Self> {
|
pub fn load(path: Option<PathBuf>) -> Result<Self> {
|
||||||
let location = if let Some(path) = path {
|
let location = if let Some(path) = path {
|
||||||
path
|
path
|
||||||
} else if let Some(path) = Self::find_config_file()? {
|
} else if let Some(path) = Self::find_config_file()? {
|
||||||
|
|
@ -140,9 +93,9 @@ impl Config {
|
||||||
};
|
};
|
||||||
|
|
||||||
let content = fs::read_to_string(&location)
|
let content = fs::read_to_string(&location)
|
||||||
.map_err(|e| ConfigError::IO(IoError::new(e, Some(location.as_path()))))?;
|
.with_context(|| format!("Failed to read {}", location.to_string_lossy()))?;
|
||||||
let config = serde_json::from_str(&content)
|
let config = serde_json::from_str(&content)
|
||||||
.map_err(|e| ConfigError::ConfigRead(location.to_string_lossy().to_string(), e))?;
|
.with_context(|| format!("Bad JSON config {}", location.to_string_lossy()))?;
|
||||||
Ok(Self { location, config })
|
Ok(Self { location, config })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -152,7 +105,7 @@ impl Config {
|
||||||
/// disk.
|
/// disk.
|
||||||
///
|
///
|
||||||
/// (Note that this is typically only done by the `tree-sitter init-config` command.)
|
/// (Note that this is typically only done by the `tree-sitter init-config` command.)
|
||||||
pub fn initial() -> ConfigResult<Self> {
|
pub fn initial() -> Result<Self> {
|
||||||
let location = if let Ok(path) = env::var("TREE_SITTER_DIR") {
|
let location = if let Ok(path) = env::var("TREE_SITTER_DIR") {
|
||||||
let mut path = PathBuf::from(path);
|
let mut path = PathBuf::from(path);
|
||||||
path.push("config.json");
|
path.push("config.json");
|
||||||
|
|
@ -165,20 +118,17 @@ impl Config {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Saves this configuration to the file that it was originally loaded from.
|
/// Saves this configuration to the file that it was originally loaded from.
|
||||||
pub fn save(&self) -> ConfigResult<()> {
|
pub fn save(&self) -> Result<()> {
|
||||||
let json = serde_json::to_string_pretty(&self.config)?;
|
let json = serde_json::to_string_pretty(&self.config)?;
|
||||||
let config_dir = self.location.parent().unwrap();
|
fs::create_dir_all(self.location.parent().unwrap())?;
|
||||||
fs::create_dir_all(config_dir)
|
fs::write(&self.location, json)?;
|
||||||
.map_err(|e| ConfigError::IO(IoError::new(e, Some(config_dir))))?;
|
|
||||||
fs::write(&self.location, json)
|
|
||||||
.map_err(|e| ConfigError::IO(IoError::new(e, Some(self.location.as_path()))))?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses a component-specific configuration from the configuration file. The type `C` must
|
/// Parses a component-specific configuration from the configuration file. The type `C` must
|
||||||
/// be [deserializable](https://docs.rs/serde/*/serde/trait.Deserialize.html) from a JSON
|
/// be [deserializable](https://docs.rs/serde/*/serde/trait.Deserialize.html) from a JSON
|
||||||
/// object, and must only include the fields relevant to that component.
|
/// object, and must only include the fields relevant to that component.
|
||||||
pub fn get<C>(&self) -> ConfigResult<C>
|
pub fn get<C>(&self) -> Result<C>
|
||||||
where
|
where
|
||||||
C: for<'de> Deserialize<'de>,
|
C: for<'de> Deserialize<'de>,
|
||||||
{
|
{
|
||||||
|
|
@ -189,7 +139,7 @@ impl Config {
|
||||||
/// Adds a component-specific configuration to the configuration file. The type `C` must be
|
/// Adds a component-specific configuration to the configuration file. The type `C` must be
|
||||||
/// [serializable](https://docs.rs/serde/*/serde/trait.Serialize.html) into a JSON object, and
|
/// [serializable](https://docs.rs/serde/*/serde/trait.Serialize.html) into a JSON object, and
|
||||||
/// must only include the fields relevant to that component.
|
/// must only include the fields relevant to that component.
|
||||||
pub fn add<C>(&mut self, config: C) -> ConfigResult<()>
|
pub fn add<C>(&mut self, config: C) -> Result<()>
|
||||||
where
|
where
|
||||||
C: Serialize,
|
C: Serialize,
|
||||||
{
|
{
|
||||||
|
|
@ -305,9 +305,9 @@
|
||||||
"peer": true
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/brace-expansion": {
|
"node_modules/brace-expansion": {
|
||||||
"version": "1.1.12",
|
"version": "1.1.11",
|
||||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||||
"integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
|
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
@ -805,9 +805,9 @@
|
||||||
"peer": true
|
"peer": true
|
||||||
},
|
},
|
||||||
"node_modules/js-yaml": {
|
"node_modules/js-yaml": {
|
||||||
"version": "4.1.1",
|
"version": "4.1.0",
|
||||||
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz",
|
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
|
||||||
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==",
|
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"peer": true,
|
"peer": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|
@ -21,9 +21,5 @@
|
||||||
},
|
},
|
||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"eslint": ">= 9"
|
"eslint": ">= 9"
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"prepack": "cp ../../../LICENSE .",
|
|
||||||
"postpack": "rm LICENSE"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -8,44 +8,30 @@ rust-version.workspace = true
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
homepage.workspace = true
|
homepage.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
documentation = "https://docs.rs/tree-sitter-generate"
|
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "src/generate.rs"
|
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
[features]
|
|
||||||
default = ["qjs-rt"]
|
|
||||||
load = ["dep:semver"]
|
|
||||||
qjs-rt = ["load", "rquickjs", "pathdiff"]
|
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
bitflags = "2.9.4"
|
anyhow.workspace = true
|
||||||
dunce = "1.0.5"
|
heck.workspace = true
|
||||||
indexmap.workspace = true
|
indexmap.workspace = true
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
pathdiff = { version = "0.2.3", optional = true }
|
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
regex-syntax.workspace = true
|
regex-syntax.workspace = true
|
||||||
rquickjs = { version = "0.11.0", optional = true, features = [
|
|
||||||
"bindgen",
|
|
||||||
"loader",
|
|
||||||
"macro",
|
|
||||||
"phf",
|
|
||||||
] }
|
|
||||||
rustc-hash.workspace = true
|
rustc-hash.workspace = true
|
||||||
semver = { workspace = true, optional = true }
|
semver.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
smallbitvec.workspace = true
|
smallbitvec.workspace = true
|
||||||
thiserror.workspace = true
|
thiserror.workspace = true
|
||||||
topological-sort.workspace = true
|
topological-sort.workspace = true
|
||||||
|
|
||||||
[dev-dependencies]
|
tree-sitter.workspace = true
|
||||||
tempfile.workspace = true
|
|
||||||
|
[target.'cfg(windows)'.dependencies]
|
||||||
|
url.workspace = true
|
||||||
|
|
@ -3,7 +3,7 @@ use std::{
|
||||||
mem,
|
mem,
|
||||||
};
|
};
|
||||||
|
|
||||||
use log::debug;
|
use log::info;
|
||||||
|
|
||||||
use super::{coincident_tokens::CoincidentTokenIndex, token_conflicts::TokenConflictMap};
|
use super::{coincident_tokens::CoincidentTokenIndex, token_conflicts::TokenConflictMap};
|
||||||
use crate::{
|
use crate::{
|
||||||
|
|
@ -176,7 +176,7 @@ impl<'a> LexTableBuilder<'a> {
|
||||||
let (state_id, is_new) = self.add_state(nfa_states, eof_valid);
|
let (state_id, is_new) = self.add_state(nfa_states, eof_valid);
|
||||||
|
|
||||||
if is_new {
|
if is_new {
|
||||||
debug!(
|
info!(
|
||||||
"entry point state: {state_id}, tokens: {:?}",
|
"entry point state: {state_id}, tokens: {:?}",
|
||||||
tokens
|
tokens
|
||||||
.iter()
|
.iter()
|
||||||
|
|
@ -5,7 +5,6 @@ use std::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use indexmap::{map::Entry, IndexMap};
|
use indexmap::{map::Entry, IndexMap};
|
||||||
use log::warn;
|
|
||||||
use rustc_hash::FxHasher;
|
use rustc_hash::FxHasher;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
@ -77,11 +76,9 @@ pub enum ParseTableBuilderError {
|
||||||
"The non-terminal rule `{0}` is used in a non-terminal `extra` rule, which is not allowed."
|
"The non-terminal rule `{0}` is used in a non-terminal `extra` rule, which is not allowed."
|
||||||
)]
|
)]
|
||||||
ImproperNonTerminalExtra(String),
|
ImproperNonTerminalExtra(String),
|
||||||
#[error("State count `{0}` exceeds the max value {max}.", max=u16::MAX)]
|
|
||||||
StateCount(usize),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Serialize, Error)]
|
#[derive(Default, Debug, Serialize)]
|
||||||
pub struct ConflictError {
|
pub struct ConflictError {
|
||||||
pub symbol_sequence: Vec<String>,
|
pub symbol_sequence: Vec<String>,
|
||||||
pub conflicting_lookahead: String,
|
pub conflicting_lookahead: String,
|
||||||
|
|
@ -89,7 +86,7 @@ pub struct ConflictError {
|
||||||
pub possible_resolutions: Vec<Resolution>,
|
pub possible_resolutions: Vec<Resolution>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default, Debug, Serialize, Error)]
|
#[derive(Default, Debug, Serialize)]
|
||||||
pub struct Interpretation {
|
pub struct Interpretation {
|
||||||
pub preceding_symbols: Vec<String>,
|
pub preceding_symbols: Vec<String>,
|
||||||
pub variable_name: String,
|
pub variable_name: String,
|
||||||
|
|
@ -108,7 +105,7 @@ pub enum Resolution {
|
||||||
AddConflict { symbols: Vec<String> },
|
AddConflict { symbols: Vec<String> },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Error)]
|
#[derive(Debug, Serialize)]
|
||||||
pub struct AmbiguousExtraError {
|
pub struct AmbiguousExtraError {
|
||||||
pub parent_symbols: Vec<String>,
|
pub parent_symbols: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
@ -238,6 +235,9 @@ impl std::fmt::Display for AmbiguousExtraError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::error::Error for ConflictError {}
|
||||||
|
impl std::error::Error for AmbiguousExtraError {}
|
||||||
|
|
||||||
impl<'a> ParseTableBuilder<'a> {
|
impl<'a> ParseTableBuilder<'a> {
|
||||||
fn new(
|
fn new(
|
||||||
syntax_grammar: &'a SyntaxGrammar,
|
syntax_grammar: &'a SyntaxGrammar,
|
||||||
|
|
@ -346,21 +346,17 @@ impl<'a> ParseTableBuilder<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
if !self.actual_conflicts.is_empty() {
|
if !self.actual_conflicts.is_empty() {
|
||||||
warn!(
|
println!("Warning: unnecessary conflicts");
|
||||||
"unnecessary conflicts:\n {}",
|
for conflict in &self.actual_conflicts {
|
||||||
&self
|
println!(
|
||||||
.actual_conflicts
|
" {}",
|
||||||
.iter()
|
conflict
|
||||||
.map(|conflict| {
|
.iter()
|
||||||
conflict
|
.map(|symbol| format!("`{}`", self.symbol_name(symbol)))
|
||||||
.iter()
|
.collect::<Vec<_>>()
|
||||||
.map(|symbol| format!("`{}`", self.symbol_name(symbol)))
|
.join(", ")
|
||||||
.collect::<Vec<_>>()
|
);
|
||||||
.join(", ")
|
}
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join("\n ")
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((self.parse_table, self.parse_state_info_by_id))
|
Ok((self.parse_table, self.parse_state_info_by_id))
|
||||||
|
|
@ -860,9 +856,9 @@ impl<'a> ParseTableBuilder<'a> {
|
||||||
for symbol in preceding_symbols {
|
for symbol in preceding_symbols {
|
||||||
conflict_error
|
conflict_error
|
||||||
.symbol_sequence
|
.symbol_sequence
|
||||||
.push(self.symbol_name(symbol));
|
.push(self.symbol_name(symbol).to_string());
|
||||||
}
|
}
|
||||||
conflict_error.conflicting_lookahead = self.symbol_name(&conflicting_lookahead);
|
conflict_error.conflicting_lookahead = self.symbol_name(&conflicting_lookahead).to_string();
|
||||||
|
|
||||||
let interpretations = conflicting_items
|
let interpretations = conflicting_items
|
||||||
.iter()
|
.iter()
|
||||||
|
|
@ -870,7 +866,7 @@ impl<'a> ParseTableBuilder<'a> {
|
||||||
let preceding_symbols = preceding_symbols
|
let preceding_symbols = preceding_symbols
|
||||||
.iter()
|
.iter()
|
||||||
.take(preceding_symbols.len() - item.step_index as usize)
|
.take(preceding_symbols.len() - item.step_index as usize)
|
||||||
.map(|symbol| self.symbol_name(symbol))
|
.map(|symbol| self.symbol_name(symbol).to_string())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let variable_name = self.syntax_grammar.variables[item.variable_index as usize]
|
let variable_name = self.syntax_grammar.variables[item.variable_index as usize]
|
||||||
|
|
@ -881,7 +877,7 @@ impl<'a> ParseTableBuilder<'a> {
|
||||||
.production
|
.production
|
||||||
.steps
|
.steps
|
||||||
.iter()
|
.iter()
|
||||||
.map(|step| self.symbol_name(&step.symbol))
|
.map(|step| self.symbol_name(&step.symbol).to_string())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let precedence = match item.precedence() {
|
let precedence = match item.precedence() {
|
||||||
|
|
@ -897,7 +893,7 @@ impl<'a> ParseTableBuilder<'a> {
|
||||||
production_step_symbols,
|
production_step_symbols,
|
||||||
step_index: item.step_index,
|
step_index: item.step_index,
|
||||||
done: item.is_done(),
|
done: item.is_done(),
|
||||||
conflicting_lookahead: self.symbol_name(&conflicting_lookahead),
|
conflicting_lookahead: self.symbol_name(&conflicting_lookahead).to_string(),
|
||||||
precedence,
|
precedence,
|
||||||
associativity,
|
associativity,
|
||||||
}
|
}
|
||||||
|
|
@ -204,7 +204,7 @@ impl fmt::Display for ParseItemDisplay<'_> {
|
||||||
|| step.reserved_word_set_id != ReservedWordSetId::default()
|
|| step.reserved_word_set_id != ReservedWordSetId::default()
|
||||||
{
|
{
|
||||||
write!(f, " (")?;
|
write!(f, " (")?;
|
||||||
if !step.precedence.is_none() {
|
if step.precedence.is_none() {
|
||||||
write!(f, " {}", step.precedence)?;
|
write!(f, " {}", step.precedence)?;
|
||||||
}
|
}
|
||||||
if let Some(associativity) = step.associativity {
|
if let Some(associativity) = step.associativity {
|
||||||
|
|
@ -81,7 +81,7 @@ impl<'a> ParseItemSetBuilder<'a> {
|
||||||
.insert(symbol, ReservedWordSetId::default());
|
.insert(symbol, ReservedWordSetId::default());
|
||||||
}
|
}
|
||||||
|
|
||||||
// The FIRST set of a non-terminal `i` is the union of the FIRST sets
|
// The FIRST set of a non-terminal `i` is the union of the the FIRST sets
|
||||||
// of all the symbols that appear at the beginnings of i's productions. Some
|
// of all the symbols that appear at the beginnings of i's productions. Some
|
||||||
// of these symbols may themselves be non-terminals, so this is a recursive
|
// of these symbols may themselves be non-terminals, so this is a recursive
|
||||||
// definition.
|
// definition.
|
||||||
|
|
@ -3,7 +3,7 @@ use std::{
|
||||||
mem,
|
mem,
|
||||||
};
|
};
|
||||||
|
|
||||||
use log::debug;
|
use log::info;
|
||||||
|
|
||||||
use super::token_conflicts::TokenConflictMap;
|
use super::token_conflicts::TokenConflictMap;
|
||||||
use crate::{
|
use crate::{
|
||||||
|
|
@ -11,7 +11,6 @@ use crate::{
|
||||||
grammars::{LexicalGrammar, SyntaxGrammar, VariableType},
|
grammars::{LexicalGrammar, SyntaxGrammar, VariableType},
|
||||||
rules::{AliasMap, Symbol, TokenSet},
|
rules::{AliasMap, Symbol, TokenSet},
|
||||||
tables::{GotoAction, ParseAction, ParseState, ParseStateId, ParseTable, ParseTableEntry},
|
tables::{GotoAction, ParseAction, ParseState, ParseStateId, ParseTable, ParseTableEntry},
|
||||||
OptLevel,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn minimize_parse_table(
|
pub fn minimize_parse_table(
|
||||||
|
|
@ -21,7 +20,6 @@ pub fn minimize_parse_table(
|
||||||
simple_aliases: &AliasMap,
|
simple_aliases: &AliasMap,
|
||||||
token_conflict_map: &TokenConflictMap,
|
token_conflict_map: &TokenConflictMap,
|
||||||
keywords: &TokenSet,
|
keywords: &TokenSet,
|
||||||
optimizations: OptLevel,
|
|
||||||
) {
|
) {
|
||||||
let mut minimizer = Minimizer {
|
let mut minimizer = Minimizer {
|
||||||
parse_table,
|
parse_table,
|
||||||
|
|
@ -31,9 +29,7 @@ pub fn minimize_parse_table(
|
||||||
keywords,
|
keywords,
|
||||||
simple_aliases,
|
simple_aliases,
|
||||||
};
|
};
|
||||||
if optimizations.contains(OptLevel::MergeStates) {
|
minimizer.merge_compatible_states();
|
||||||
minimizer.merge_compatible_states();
|
|
||||||
}
|
|
||||||
minimizer.remove_unit_reductions();
|
minimizer.remove_unit_reductions();
|
||||||
minimizer.remove_unused_states();
|
minimizer.remove_unused_states();
|
||||||
minimizer.reorder_states_by_descending_size();
|
minimizer.reorder_states_by_descending_size();
|
||||||
|
|
@ -248,7 +244,7 @@ impl Minimizer<'_> {
|
||||||
let group1 = group_ids_by_state_id[*s1];
|
let group1 = group_ids_by_state_id[*s1];
|
||||||
let group2 = group_ids_by_state_id[*s2];
|
let group2 = group_ids_by_state_id[*s2];
|
||||||
if group1 != group2 {
|
if group1 != group2 {
|
||||||
debug!(
|
info!(
|
||||||
"split states {} {} - successors for {} are split: {s1} {s2}",
|
"split states {} {} - successors for {} are split: {s1} {s2}",
|
||||||
state1.id,
|
state1.id,
|
||||||
state2.id,
|
state2.id,
|
||||||
|
|
@ -269,7 +265,7 @@ impl Minimizer<'_> {
|
||||||
let group1 = group_ids_by_state_id[*s1];
|
let group1 = group_ids_by_state_id[*s1];
|
||||||
let group2 = group_ids_by_state_id[*s2];
|
let group2 = group_ids_by_state_id[*s2];
|
||||||
if group1 != group2 {
|
if group1 != group2 {
|
||||||
debug!(
|
info!(
|
||||||
"split states {} {} - successors for {} are split: {s1} {s2}",
|
"split states {} {} - successors for {} are split: {s1} {s2}",
|
||||||
state1.id,
|
state1.id,
|
||||||
state2.id,
|
state2.id,
|
||||||
|
|
@ -299,14 +295,16 @@ impl Minimizer<'_> {
|
||||||
let actions1 = &entry1.actions;
|
let actions1 = &entry1.actions;
|
||||||
let actions2 = &entry2.actions;
|
let actions2 = &entry2.actions;
|
||||||
if actions1.len() != actions2.len() {
|
if actions1.len() != actions2.len() {
|
||||||
debug!(
|
info!(
|
||||||
"split states {state_id1} {state_id2} - differing action counts for token {}",
|
"split states {state_id1} {state_id2} - differing action counts for token {}",
|
||||||
self.symbol_name(token)
|
self.symbol_name(token)
|
||||||
);
|
);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (action1, action2) in actions1.iter().zip(actions2.iter()) {
|
for (i, action1) in actions1.iter().enumerate() {
|
||||||
|
let action2 = &actions2[i];
|
||||||
|
|
||||||
// Two shift actions are equivalent if their destinations are in the same group.
|
// Two shift actions are equivalent if their destinations are in the same group.
|
||||||
if let (
|
if let (
|
||||||
ParseAction::Shift {
|
ParseAction::Shift {
|
||||||
|
|
@ -324,13 +322,13 @@ impl Minimizer<'_> {
|
||||||
if group1 == group2 && is_repetition1 == is_repetition2 {
|
if group1 == group2 && is_repetition1 == is_repetition2 {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
debug!(
|
info!(
|
||||||
"split states {state_id1} {state_id2} - successors for {} are split: {s1} {s2}",
|
"split states {state_id1} {state_id2} - successors for {} are split: {s1} {s2}",
|
||||||
self.symbol_name(token),
|
self.symbol_name(token),
|
||||||
);
|
);
|
||||||
return true;
|
return true;
|
||||||
} else if action1 != action2 {
|
} else if action1 != action2 {
|
||||||
debug!(
|
info!(
|
||||||
"split states {state_id1} {state_id2} - unequal actions for {}",
|
"split states {state_id1} {state_id2} - unequal actions for {}",
|
||||||
self.symbol_name(token),
|
self.symbol_name(token),
|
||||||
);
|
);
|
||||||
|
|
@ -349,14 +347,14 @@ impl Minimizer<'_> {
|
||||||
new_token: Symbol,
|
new_token: Symbol,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
if new_token == Symbol::end_of_nonterminal_extra() {
|
if new_token == Symbol::end_of_nonterminal_extra() {
|
||||||
debug!("split states {left_id} {right_id} - end of non-terminal extra",);
|
info!("split states {left_id} {right_id} - end of non-terminal extra",);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Do not add external tokens; they could conflict lexically with any of the state's
|
// Do not add external tokens; they could conflict lexically with any of the state's
|
||||||
// existing lookahead tokens.
|
// existing lookahead tokens.
|
||||||
if new_token.is_external() {
|
if new_token.is_external() {
|
||||||
debug!(
|
info!(
|
||||||
"split states {left_id} {right_id} - external token {}",
|
"split states {left_id} {right_id} - external token {}",
|
||||||
self.symbol_name(&new_token),
|
self.symbol_name(&new_token),
|
||||||
);
|
);
|
||||||
|
|
@ -375,7 +373,7 @@ impl Minimizer<'_> {
|
||||||
.iter()
|
.iter()
|
||||||
.any(|external| external.corresponding_internal_token == Some(new_token))
|
.any(|external| external.corresponding_internal_token == Some(new_token))
|
||||||
{
|
{
|
||||||
debug!(
|
info!(
|
||||||
"split states {left_id} {right_id} - internal/external token {}",
|
"split states {left_id} {right_id} - internal/external token {}",
|
||||||
self.symbol_name(&new_token),
|
self.symbol_name(&new_token),
|
||||||
);
|
);
|
||||||
|
|
@ -401,7 +399,7 @@ impl Minimizer<'_> {
|
||||||
.token_conflict_map
|
.token_conflict_map
|
||||||
.does_match_same_string(new_token.index, token.index)
|
.does_match_same_string(new_token.index, token.index)
|
||||||
{
|
{
|
||||||
debug!(
|
info!(
|
||||||
"split states {} {} - token {} conflicts with {}",
|
"split states {} {} - token {} conflicts with {}",
|
||||||
left_id,
|
left_id,
|
||||||
right_id,
|
right_id,
|
||||||
|
|
@ -11,7 +11,7 @@ use std::collections::{BTreeSet, HashMap};
|
||||||
pub use build_lex_table::LARGE_CHARACTER_RANGE_COUNT;
|
pub use build_lex_table::LARGE_CHARACTER_RANGE_COUNT;
|
||||||
use build_parse_table::BuildTableResult;
|
use build_parse_table::BuildTableResult;
|
||||||
pub use build_parse_table::ParseTableBuilderError;
|
pub use build_parse_table::ParseTableBuilderError;
|
||||||
use log::{debug, info};
|
use log::info;
|
||||||
|
|
||||||
use self::{
|
use self::{
|
||||||
build_lex_table::build_lex_table,
|
build_lex_table::build_lex_table,
|
||||||
|
|
@ -27,7 +27,6 @@ use crate::{
|
||||||
node_types::VariableInfo,
|
node_types::VariableInfo,
|
||||||
rules::{AliasMap, Symbol, SymbolType, TokenSet},
|
rules::{AliasMap, Symbol, SymbolType, TokenSet},
|
||||||
tables::{LexTable, ParseAction, ParseTable, ParseTableEntry},
|
tables::{LexTable, ParseAction, ParseTable, ParseTableEntry},
|
||||||
OptLevel,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
pub struct Tables {
|
pub struct Tables {
|
||||||
|
|
@ -44,7 +43,6 @@ pub fn build_tables(
|
||||||
variable_info: &[VariableInfo],
|
variable_info: &[VariableInfo],
|
||||||
inlines: &InlinedProductionMap,
|
inlines: &InlinedProductionMap,
|
||||||
report_symbol_name: Option<&str>,
|
report_symbol_name: Option<&str>,
|
||||||
optimizations: OptLevel,
|
|
||||||
) -> BuildTableResult<Tables> {
|
) -> BuildTableResult<Tables> {
|
||||||
let item_set_builder = ParseItemSetBuilder::new(syntax_grammar, lexical_grammar, inlines);
|
let item_set_builder = ParseItemSetBuilder::new(syntax_grammar, lexical_grammar, inlines);
|
||||||
let following_tokens =
|
let following_tokens =
|
||||||
|
|
@ -80,7 +78,6 @@ pub fn build_tables(
|
||||||
simple_aliases,
|
simple_aliases,
|
||||||
&token_conflict_map,
|
&token_conflict_map,
|
||||||
&keywords,
|
&keywords,
|
||||||
optimizations,
|
|
||||||
);
|
);
|
||||||
let lex_tables = build_lex_table(
|
let lex_tables = build_lex_table(
|
||||||
&mut parse_table,
|
&mut parse_table,
|
||||||
|
|
@ -103,10 +100,6 @@ pub fn build_tables(
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if parse_table.states.len() > u16::MAX as usize {
|
|
||||||
Err(ParseTableBuilderError::StateCount(parse_table.states.len()))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Tables {
|
Ok(Tables {
|
||||||
parse_table,
|
parse_table,
|
||||||
main_lex_table: lex_tables.main_lex_table,
|
main_lex_table: lex_tables.main_lex_table,
|
||||||
|
|
@ -179,7 +172,7 @@ fn populate_error_state(
|
||||||
if conflicts_with_other_tokens {
|
if conflicts_with_other_tokens {
|
||||||
None
|
None
|
||||||
} else {
|
} else {
|
||||||
debug!(
|
info!(
|
||||||
"error recovery - token {} has no conflicts",
|
"error recovery - token {} has no conflicts",
|
||||||
lexical_grammar.variables[i].name
|
lexical_grammar.variables[i].name
|
||||||
);
|
);
|
||||||
|
|
@ -205,14 +198,14 @@ fn populate_error_state(
|
||||||
!coincident_token_index.contains(symbol, *t)
|
!coincident_token_index.contains(symbol, *t)
|
||||||
&& token_conflict_map.does_conflict(symbol.index, t.index)
|
&& token_conflict_map.does_conflict(symbol.index, t.index)
|
||||||
}) {
|
}) {
|
||||||
debug!(
|
info!(
|
||||||
"error recovery - exclude token {} because of conflict with {}",
|
"error recovery - exclude token {} because of conflict with {}",
|
||||||
lexical_grammar.variables[i].name, lexical_grammar.variables[t.index].name
|
lexical_grammar.variables[i].name, lexical_grammar.variables[t.index].name
|
||||||
);
|
);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
debug!(
|
info!(
|
||||||
"error recovery - include token {}",
|
"error recovery - include token {}",
|
||||||
lexical_grammar.variables[i].name
|
lexical_grammar.variables[i].name
|
||||||
);
|
);
|
||||||
|
|
@ -345,7 +338,7 @@ fn identify_keywords(
|
||||||
&& token_conflict_map.does_match_same_string(i, word_token.index)
|
&& token_conflict_map.does_match_same_string(i, word_token.index)
|
||||||
&& !token_conflict_map.does_match_different_string(i, word_token.index)
|
&& !token_conflict_map.does_match_different_string(i, word_token.index)
|
||||||
{
|
{
|
||||||
debug!(
|
info!(
|
||||||
"Keywords - add candidate {}",
|
"Keywords - add candidate {}",
|
||||||
lexical_grammar.variables[i].name
|
lexical_grammar.variables[i].name
|
||||||
);
|
);
|
||||||
|
|
@ -364,7 +357,7 @@ fn identify_keywords(
|
||||||
if other_token != *token
|
if other_token != *token
|
||||||
&& token_conflict_map.does_match_same_string(other_token.index, token.index)
|
&& token_conflict_map.does_match_same_string(other_token.index, token.index)
|
||||||
{
|
{
|
||||||
debug!(
|
info!(
|
||||||
"Keywords - exclude {} because it matches the same string as {}",
|
"Keywords - exclude {} because it matches the same string as {}",
|
||||||
lexical_grammar.variables[token.index].name,
|
lexical_grammar.variables[token.index].name,
|
||||||
lexical_grammar.variables[other_token.index].name
|
lexical_grammar.variables[other_token.index].name
|
||||||
|
|
@ -406,7 +399,7 @@ fn identify_keywords(
|
||||||
word_token.index,
|
word_token.index,
|
||||||
other_index,
|
other_index,
|
||||||
) {
|
) {
|
||||||
debug!(
|
info!(
|
||||||
"Keywords - exclude {} because of conflict with {}",
|
"Keywords - exclude {} because of conflict with {}",
|
||||||
lexical_grammar.variables[token.index].name,
|
lexical_grammar.variables[token.index].name,
|
||||||
lexical_grammar.variables[other_index].name
|
lexical_grammar.variables[other_index].name
|
||||||
|
|
@ -415,7 +408,7 @@ fn identify_keywords(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!(
|
info!(
|
||||||
"Keywords - include {}",
|
"Keywords - include {}",
|
||||||
lexical_grammar.variables[token.index].name,
|
lexical_grammar.variables[token.index].name,
|
||||||
);
|
);
|
||||||
|
|
@ -487,14 +480,14 @@ fn report_state_info<'a>(
|
||||||
.max()
|
.max()
|
||||||
.unwrap();
|
.unwrap();
|
||||||
for (symbol, states) in &symbols_with_state_indices {
|
for (symbol, states) in &symbols_with_state_indices {
|
||||||
info!(
|
eprintln!(
|
||||||
"{:width$}\t{}",
|
"{:width$}\t{}",
|
||||||
syntax_grammar.variables[symbol.index].name,
|
syntax_grammar.variables[symbol.index].name,
|
||||||
states.len(),
|
states.len(),
|
||||||
width = max_symbol_name_length
|
width = max_symbol_name_length
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
info!("");
|
eprintln!();
|
||||||
|
|
||||||
let state_indices = if report_symbol_name == "*" {
|
let state_indices = if report_symbol_name == "*" {
|
||||||
Some(&all_state_indices)
|
Some(&all_state_indices)
|
||||||
|
|
@ -517,25 +510,20 @@ fn report_state_info<'a>(
|
||||||
for state_index in state_indices {
|
for state_index in state_indices {
|
||||||
let id = parse_table.states[state_index].id;
|
let id = parse_table.states[state_index].id;
|
||||||
let (preceding_symbols, item_set) = &parse_state_info[id];
|
let (preceding_symbols, item_set) = &parse_state_info[id];
|
||||||
info!("state index: {state_index}");
|
eprintln!("state index: {state_index}");
|
||||||
info!("state id: {id}");
|
eprintln!("state id: {id}");
|
||||||
info!(
|
eprint!("symbol sequence:");
|
||||||
"symbol sequence: {}",
|
for symbol in preceding_symbols {
|
||||||
preceding_symbols
|
let name = if symbol.is_terminal() {
|
||||||
.iter()
|
&lexical_grammar.variables[symbol.index].name
|
||||||
.map(|symbol| {
|
} else if symbol.is_external() {
|
||||||
if symbol.is_terminal() {
|
&syntax_grammar.external_tokens[symbol.index].name
|
||||||
lexical_grammar.variables[symbol.index].name.clone()
|
} else {
|
||||||
} else if symbol.is_external() {
|
&syntax_grammar.variables[symbol.index].name
|
||||||
syntax_grammar.external_tokens[symbol.index].name.clone()
|
};
|
||||||
} else {
|
eprint!(" {name}");
|
||||||
syntax_grammar.variables[symbol.index].name.clone()
|
}
|
||||||
}
|
eprintln!(
|
||||||
})
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(" ")
|
|
||||||
);
|
|
||||||
info!(
|
|
||||||
"\nitems:\n{}",
|
"\nitems:\n{}",
|
||||||
item::ParseItemSetDisplay(item_set, syntax_grammar, lexical_grammar),
|
item::ParseItemSetDisplay(item_set, syntax_grammar, lexical_grammar),
|
||||||
);
|
);
|
||||||
|
|
@ -28,7 +28,7 @@ pub struct TokenConflictMap<'a> {
|
||||||
|
|
||||||
impl<'a> TokenConflictMap<'a> {
|
impl<'a> TokenConflictMap<'a> {
|
||||||
/// Create a token conflict map based on a lexical grammar, which describes the structure
|
/// Create a token conflict map based on a lexical grammar, which describes the structure
|
||||||
/// of each token, and a `following_token` map, which indicates which tokens may be appear
|
/// each token, and a `following_token` map, which indicates which tokens may be appear
|
||||||
/// immediately after each other token.
|
/// immediately after each other token.
|
||||||
///
|
///
|
||||||
/// This analyzes the possible kinds of overlap between each pair of tokens and stores
|
/// This analyzes the possible kinds of overlap between each pair of tokens and stores
|
||||||
|
|
@ -70,7 +70,7 @@ function prec(number, rule) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
prec.left = function (number, rule) {
|
prec.left = function(number, rule) {
|
||||||
if (rule == null) {
|
if (rule == null) {
|
||||||
rule = number;
|
rule = number;
|
||||||
number = 0;
|
number = 0;
|
||||||
|
|
@ -92,7 +92,7 @@ prec.left = function (number, rule) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
prec.right = function (number, rule) {
|
prec.right = function(number, rule) {
|
||||||
if (rule == null) {
|
if (rule == null) {
|
||||||
rule = number;
|
rule = number;
|
||||||
number = 0;
|
number = 0;
|
||||||
|
|
@ -114,7 +114,7 @@ prec.right = function (number, rule) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
prec.dynamic = function (number, rule) {
|
prec.dynamic = function(number, rule) {
|
||||||
checkPrecedence(number);
|
checkPrecedence(number);
|
||||||
checkArguments(
|
checkArguments(
|
||||||
arguments,
|
arguments,
|
||||||
|
|
@ -184,7 +184,7 @@ function token(value) {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
token.immediate = function (value) {
|
token.immediate = function(value) {
|
||||||
checkArguments(arguments, arguments.length, token.immediate, 'token.immediate', '', 'literal');
|
checkArguments(arguments, arguments.length, token.immediate, 'token.immediate', '', 'literal');
|
||||||
return {
|
return {
|
||||||
type: "IMMEDIATE_TOKEN",
|
type: "IMMEDIATE_TOKEN",
|
||||||
|
|
@ -517,7 +517,6 @@ function checkPrecedence(value) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function getEnv(name) {
|
function getEnv(name) {
|
||||||
if (globalThis.native) return globalThis.__ts_grammar_path;
|
|
||||||
if (globalThis.process) return process.env[name]; // Node/Bun
|
if (globalThis.process) return process.env[name]; // Node/Bun
|
||||||
if (globalThis.Deno) return Deno.env.get(name); // Deno
|
if (globalThis.Deno) return Deno.env.get(name); // Deno
|
||||||
throw Error("Unsupported JS runtime");
|
throw Error("Unsupported JS runtime");
|
||||||
|
|
@ -538,23 +537,14 @@ globalThis.grammar = grammar;
|
||||||
globalThis.field = field;
|
globalThis.field = field;
|
||||||
globalThis.RustRegex = RustRegex;
|
globalThis.RustRegex = RustRegex;
|
||||||
|
|
||||||
const grammarPath = getEnv("TREE_SITTER_GRAMMAR_PATH");
|
const result = await import(getEnv("TREE_SITTER_GRAMMAR_PATH"));
|
||||||
let result = await import(grammarPath);
|
|
||||||
let grammarObj = result.default?.grammar ?? result.grammar;
|
|
||||||
|
|
||||||
if (globalThis.native && !grammarObj) {
|
|
||||||
grammarObj = module.exports.grammar;
|
|
||||||
}
|
|
||||||
|
|
||||||
const object = {
|
const object = {
|
||||||
"$schema": "https://tree-sitter.github.io/tree-sitter/assets/schemas/grammar.schema.json",
|
"$schema": "https://tree-sitter.github.io/tree-sitter/assets/schemas/grammar.schema.json",
|
||||||
...grammarObj,
|
...(result.default?.grammar ?? result.grammar)
|
||||||
};
|
};
|
||||||
const output = JSON.stringify(object);
|
const output = JSON.stringify(object);
|
||||||
|
|
||||||
if (globalThis.native) {
|
if (globalThis.process) { // Node/Bun
|
||||||
globalThis.output = output;
|
|
||||||
} else if (globalThis.process) { // Node/Bun
|
|
||||||
process.stdout.write(output);
|
process.stdout.write(output);
|
||||||
} else if (globalThis.Deno) { // Deno
|
} else if (globalThis.Deno) { // Deno
|
||||||
Deno.stdout.writeSync(new TextEncoder().encode(output));
|
Deno.stdout.writeSync(new TextEncoder().encode(output));
|
||||||
1
cli/generate/src/grammar_files.rs
Normal file
1
cli/generate/src/grammar_files.rs
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
|
||||||
|
|
@ -1,40 +1,32 @@
|
||||||
use std::{collections::BTreeMap, sync::LazyLock};
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
use std::{
|
use std::{
|
||||||
env, fs,
|
env, fs,
|
||||||
io::Write,
|
io::Write,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
process::{Command, Stdio},
|
process::{Command, Stdio},
|
||||||
|
sync::LazyLock,
|
||||||
};
|
};
|
||||||
|
|
||||||
use bitflags::bitflags;
|
use anyhow::Result;
|
||||||
use log::warn;
|
|
||||||
use node_types::VariableInfo;
|
|
||||||
use regex::{Regex, RegexBuilder};
|
use regex::{Regex, RegexBuilder};
|
||||||
use rules::{Alias, Symbol};
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
#[cfg(feature = "load")]
|
use serde::{Deserialize, Serialize};
|
||||||
use serde::Deserialize;
|
|
||||||
use serde::Serialize;
|
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
mod build_tables;
|
mod build_tables;
|
||||||
mod dedup;
|
mod dedup;
|
||||||
|
mod grammar_files;
|
||||||
mod grammars;
|
mod grammars;
|
||||||
mod nfa;
|
mod nfa;
|
||||||
mod node_types;
|
mod node_types;
|
||||||
pub mod parse_grammar;
|
pub mod parse_grammar;
|
||||||
mod prepare_grammar;
|
mod prepare_grammar;
|
||||||
#[cfg(feature = "qjs-rt")]
|
|
||||||
mod quickjs;
|
|
||||||
mod render;
|
mod render;
|
||||||
mod rules;
|
mod rules;
|
||||||
mod tables;
|
mod tables;
|
||||||
|
|
||||||
use build_tables::build_tables;
|
use build_tables::build_tables;
|
||||||
pub use build_tables::ParseTableBuilderError;
|
pub use build_tables::ParseTableBuilderError;
|
||||||
use grammars::{InlinedProductionMap, InputGrammar, LexicalGrammar, SyntaxGrammar};
|
use grammars::InputGrammar;
|
||||||
pub use node_types::{SuperTypeCycleError, VariableInfoError};
|
pub use node_types::{SuperTypeCycleError, VariableInfoError};
|
||||||
use parse_grammar::parse_grammar;
|
use parse_grammar::parse_grammar;
|
||||||
pub use parse_grammar::ParseGrammarError;
|
pub use parse_grammar::ParseGrammarError;
|
||||||
|
|
@ -50,29 +42,13 @@ static JSON_COMMENT_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
});
|
});
|
||||||
|
|
||||||
struct JSONOutput {
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
node_types_json: String,
|
|
||||||
syntax_grammar: SyntaxGrammar,
|
|
||||||
lexical_grammar: LexicalGrammar,
|
|
||||||
inlines: InlinedProductionMap,
|
|
||||||
simple_aliases: BTreeMap<Symbol, Alias>,
|
|
||||||
variable_info: Vec<VariableInfo>,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct GeneratedParser {
|
struct GeneratedParser {
|
||||||
c_code: String,
|
c_code: String,
|
||||||
#[cfg(feature = "load")]
|
|
||||||
node_types_json: String,
|
node_types_json: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: This constant must be kept in sync with the definition of
|
|
||||||
// `TREE_SITTER_LANGUAGE_VERSION` in `lib/include/tree_sitter/api.h`.
|
|
||||||
const LANGUAGE_VERSION: usize = 15;
|
|
||||||
|
|
||||||
pub const ALLOC_HEADER: &str = include_str!("templates/alloc.h");
|
pub const ALLOC_HEADER: &str = include_str!("templates/alloc.h");
|
||||||
pub const ARRAY_HEADER: &str = include_str!("templates/array.h");
|
pub const ARRAY_HEADER: &str = include_str!("templates/array.h");
|
||||||
pub const PARSER_HEADER: &str = include_str!("parser.h.inc");
|
|
||||||
|
|
||||||
pub type GenerateResult<T> = Result<T, GenerateError>;
|
pub type GenerateResult<T> = Result<T, GenerateError>;
|
||||||
|
|
||||||
|
|
@ -80,9 +56,8 @@ pub type GenerateResult<T> = Result<T, GenerateError>;
|
||||||
pub enum GenerateError {
|
pub enum GenerateError {
|
||||||
#[error("Error with specified path -- {0}")]
|
#[error("Error with specified path -- {0}")]
|
||||||
GrammarPath(String),
|
GrammarPath(String),
|
||||||
#[error(transparent)]
|
#[error("{0}")]
|
||||||
IO(IoError),
|
IO(String),
|
||||||
#[cfg(feature = "load")]
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
LoadGrammarFile(#[from] LoadGrammarError),
|
LoadGrammarFile(#[from] LoadGrammarError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
|
|
@ -93,42 +68,20 @@ pub enum GenerateError {
|
||||||
VariableInfo(#[from] VariableInfoError),
|
VariableInfo(#[from] VariableInfoError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
BuildTables(#[from] ParseTableBuilderError),
|
BuildTables(#[from] ParseTableBuilderError),
|
||||||
#[cfg(feature = "load")]
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
ParseVersion(#[from] ParseVersionError),
|
ParseVersion(#[from] ParseVersionError),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
SuperTypeCycle(#[from] SuperTypeCycleError),
|
SuperTypeCycle(#[from] SuperTypeCycleError),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Error, Serialize)]
|
impl From<std::io::Error> for GenerateError {
|
||||||
pub struct IoError {
|
fn from(value: std::io::Error) -> Self {
|
||||||
pub error: String,
|
Self::IO(value.to_string())
|
||||||
pub path: Option<String>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IoError {
|
|
||||||
fn new(error: &std::io::Error, path: Option<&Path>) -> Self {
|
|
||||||
Self {
|
|
||||||
error: error.to_string(),
|
|
||||||
path: path.map(|p| p.to_string_lossy().to_string()),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::fmt::Display for IoError {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
||||||
write!(f, "{}", self.error)?;
|
|
||||||
if let Some(ref path) = self.path {
|
|
||||||
write!(f, " ({path})")?;
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
pub type LoadGrammarFileResult<T> = Result<T, LoadGrammarError>;
|
pub type LoadGrammarFileResult<T> = Result<T, LoadGrammarError>;
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
#[derive(Debug, Error, Serialize)]
|
#[derive(Debug, Error, Serialize)]
|
||||||
pub enum LoadGrammarError {
|
pub enum LoadGrammarError {
|
||||||
#[error("Path to a grammar file with `.js` or `.json` extension is required")]
|
#[error("Path to a grammar file with `.js` or `.json` extension is required")]
|
||||||
|
|
@ -136,26 +89,29 @@ pub enum LoadGrammarError {
|
||||||
#[error("Failed to load grammar.js -- {0}")]
|
#[error("Failed to load grammar.js -- {0}")]
|
||||||
LoadJSGrammarFile(#[from] JSError),
|
LoadJSGrammarFile(#[from] JSError),
|
||||||
#[error("Failed to load grammar.json -- {0}")]
|
#[error("Failed to load grammar.json -- {0}")]
|
||||||
IO(IoError),
|
IO(String),
|
||||||
#[error("Unknown grammar file extension: {0:?}")]
|
#[error("Unknown grammar file extension: {0:?}")]
|
||||||
FileExtension(PathBuf),
|
FileExtension(PathBuf),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
impl From<std::io::Error> for LoadGrammarError {
|
||||||
|
fn from(value: std::io::Error) -> Self {
|
||||||
|
Self::IO(value.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Error, Serialize)]
|
#[derive(Debug, Error, Serialize)]
|
||||||
pub enum ParseVersionError {
|
pub enum ParseVersionError {
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
Version(String),
|
Version(String),
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
JSON(String),
|
JSON(String),
|
||||||
#[error(transparent)]
|
#[error("{0}")]
|
||||||
IO(IoError),
|
IO(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
pub type JSResult<T> = Result<T, JSError>;
|
pub type JSResult<T> = Result<T, JSError>;
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
#[derive(Debug, Error, Serialize)]
|
#[derive(Debug, Error, Serialize)]
|
||||||
pub enum JSError {
|
pub enum JSError {
|
||||||
#[error("Failed to run `{runtime}` -- {error}")]
|
#[error("Failed to run `{runtime}` -- {error}")]
|
||||||
|
|
@ -164,138 +120,85 @@ pub enum JSError {
|
||||||
JSRuntimeUtf8 { runtime: String, error: String },
|
JSRuntimeUtf8 { runtime: String, error: String },
|
||||||
#[error("`{runtime}` process exited with status {code}")]
|
#[error("`{runtime}` process exited with status {code}")]
|
||||||
JSRuntimeExit { runtime: String, code: i32 },
|
JSRuntimeExit { runtime: String, code: i32 },
|
||||||
#[error("Failed to open stdin for `{runtime}`")]
|
#[error("{0}")]
|
||||||
JSRuntimeStdin { runtime: String },
|
IO(String),
|
||||||
#[error("Failed to write {item} to `{runtime}`'s stdin -- {error}")]
|
|
||||||
JSRuntimeWrite {
|
|
||||||
runtime: String,
|
|
||||||
item: String,
|
|
||||||
error: String,
|
|
||||||
},
|
|
||||||
#[error("Failed to read output from `{runtime}` -- {error}")]
|
|
||||||
JSRuntimeRead { runtime: String, error: String },
|
|
||||||
#[error(transparent)]
|
|
||||||
IO(IoError),
|
|
||||||
#[cfg(feature = "qjs-rt")]
|
|
||||||
#[error("Failed to get relative path")]
|
|
||||||
RelativePath,
|
|
||||||
#[error("Could not parse this package's version as semver -- {0}")]
|
#[error("Could not parse this package's version as semver -- {0}")]
|
||||||
Semver(String),
|
Semver(String),
|
||||||
#[error("Failed to serialze grammar JSON -- {0}")]
|
#[error("Failed to serialze grammar JSON -- {0}")]
|
||||||
Serialzation(String),
|
Serialzation(String),
|
||||||
#[cfg(feature = "qjs-rt")]
|
|
||||||
#[error("QuickJS error: {0}")]
|
|
||||||
QuickJS(String),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
impl From<std::io::Error> for JSError {
|
||||||
|
fn from(value: std::io::Error) -> Self {
|
||||||
|
Self::IO(value.to_string())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<serde_json::Error> for JSError {
|
impl From<serde_json::Error> for JSError {
|
||||||
fn from(value: serde_json::Error) -> Self {
|
fn from(value: serde_json::Error) -> Self {
|
||||||
Self::Serialzation(value.to_string())
|
Self::Serialzation(value.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
impl From<semver::Error> for JSError {
|
impl From<semver::Error> for JSError {
|
||||||
fn from(value: semver::Error) -> Self {
|
fn from(value: semver::Error) -> Self {
|
||||||
Self::Semver(value.to_string())
|
Self::Semver(value.to_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "qjs-rt")]
|
pub fn generate_parser_in_directory(
|
||||||
impl From<rquickjs::Error> for JSError {
|
repo_path: &Path,
|
||||||
fn from(value: rquickjs::Error) -> Self {
|
out_path: Option<&str>,
|
||||||
Self::QuickJS(value.to_string())
|
grammar_path: Option<&str>,
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bitflags! {
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
|
||||||
pub struct OptLevel: u32 {
|
|
||||||
const MergeStates = 1 << 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for OptLevel {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::MergeStates
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
#[allow(clippy::too_many_arguments)]
|
|
||||||
pub fn generate_parser_in_directory<T, U, V>(
|
|
||||||
repo_path: T,
|
|
||||||
out_path: Option<U>,
|
|
||||||
grammar_path: Option<V>,
|
|
||||||
mut abi_version: usize,
|
mut abi_version: usize,
|
||||||
report_symbol_name: Option<&str>,
|
report_symbol_name: Option<&str>,
|
||||||
js_runtime: Option<&str>,
|
js_runtime: Option<&str>,
|
||||||
generate_parser: bool,
|
) -> GenerateResult<()> {
|
||||||
optimizations: OptLevel,
|
let mut repo_path = repo_path.to_owned();
|
||||||
) -> GenerateResult<()>
|
let mut grammar_path = grammar_path;
|
||||||
where
|
|
||||||
T: Into<PathBuf>,
|
|
||||||
U: Into<PathBuf>,
|
|
||||||
V: Into<PathBuf>,
|
|
||||||
{
|
|
||||||
let mut repo_path: PathBuf = repo_path.into();
|
|
||||||
|
|
||||||
// Populate a new empty grammar directory.
|
// Populate a new empty grammar directory.
|
||||||
let grammar_path = if let Some(path) = grammar_path {
|
if let Some(path) = grammar_path {
|
||||||
let path_buf: PathBuf = path.into();
|
let path = PathBuf::from(path);
|
||||||
if !path_buf
|
if !path
|
||||||
.try_exists()
|
.try_exists()
|
||||||
.map_err(|e| GenerateError::GrammarPath(e.to_string()))?
|
.map_err(|e| GenerateError::GrammarPath(e.to_string()))?
|
||||||
{
|
{
|
||||||
fs::create_dir_all(&path_buf)
|
fs::create_dir_all(&path)?;
|
||||||
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(path_buf.as_path()))))?;
|
grammar_path = None;
|
||||||
repo_path = path_buf;
|
repo_path = path;
|
||||||
repo_path.join("grammar.js")
|
|
||||||
} else {
|
|
||||||
path_buf
|
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
repo_path.join("grammar.js")
|
|
||||||
};
|
let grammar_path = grammar_path.map_or_else(|| repo_path.join("grammar.js"), PathBuf::from);
|
||||||
|
|
||||||
// Read the grammar file.
|
// Read the grammar file.
|
||||||
let grammar_json = load_grammar_file(&grammar_path, js_runtime)?;
|
let grammar_json = load_grammar_file(&grammar_path, js_runtime)?;
|
||||||
|
|
||||||
let src_path = out_path.map_or_else(|| repo_path.join("src"), |p| p.into());
|
let src_path = out_path.map_or_else(|| repo_path.join("src"), PathBuf::from);
|
||||||
let header_path = src_path.join("tree_sitter");
|
let header_path = src_path.join("tree_sitter");
|
||||||
|
|
||||||
// Ensure that the output directory exists
|
// Ensure that the output directories exist.
|
||||||
fs::create_dir_all(&src_path)
|
fs::create_dir_all(&src_path)?;
|
||||||
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(src_path.as_path()))))?;
|
fs::create_dir_all(&header_path)?;
|
||||||
|
|
||||||
if grammar_path.file_name().unwrap() != "grammar.json" {
|
if grammar_path.file_name().unwrap() != "grammar.json" {
|
||||||
fs::write(src_path.join("grammar.json"), &grammar_json)
|
fs::write(src_path.join("grammar.json"), &grammar_json).map_err(|e| {
|
||||||
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(src_path.as_path()))))?;
|
GenerateError::IO(format!(
|
||||||
|
"Failed to write grammar.json to {} -- {e}",
|
||||||
|
src_path.display()
|
||||||
|
))
|
||||||
|
})?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If our job is only to generate `grammar.json` and not `parser.c`, stop here.
|
// Parse and preprocess the grammar.
|
||||||
let input_grammar = parse_grammar(&grammar_json)?;
|
let input_grammar = parse_grammar(&grammar_json)?;
|
||||||
|
|
||||||
if !generate_parser {
|
|
||||||
let node_types_json = generate_node_types_from_grammar(&input_grammar)?.node_types_json;
|
|
||||||
write_file(&src_path.join("node-types.json"), node_types_json)?;
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let semantic_version = read_grammar_version(&repo_path)?;
|
let semantic_version = read_grammar_version(&repo_path)?;
|
||||||
|
|
||||||
if semantic_version.is_none() && abi_version > ABI_VERSION_MIN {
|
if semantic_version.is_none() && abi_version > ABI_VERSION_MIN {
|
||||||
warn!(
|
println!("Warning: No `tree-sitter.json` file found in your grammar, this file is required to generate with ABI {abi_version}. Using ABI version {ABI_VERSION_MIN} instead.");
|
||||||
concat!(
|
println!("This file can be set up with `tree-sitter init`. For more information, see https://tree-sitter.github.io/tree-sitter/cli/init.");
|
||||||
"No `tree-sitter.json` file found in your grammar, ",
|
|
||||||
"this file is required to generate with ABI {}. ",
|
|
||||||
"Using ABI version {} instead.\n",
|
|
||||||
"This file can be set up with `tree-sitter init`. ",
|
|
||||||
"For more information, see https://tree-sitter.github.io/tree-sitter/cli/init."
|
|
||||||
),
|
|
||||||
abi_version, ABI_VERSION_MIN
|
|
||||||
);
|
|
||||||
abi_version = ABI_VERSION_MIN;
|
abi_version = ABI_VERSION_MIN;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -308,16 +211,13 @@ where
|
||||||
abi_version,
|
abi_version,
|
||||||
semantic_version.map(|v| (v.major as u8, v.minor as u8, v.patch as u8)),
|
semantic_version.map(|v| (v.major as u8, v.minor as u8, v.patch as u8)),
|
||||||
report_symbol_name,
|
report_symbol_name,
|
||||||
optimizations,
|
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
write_file(&src_path.join("parser.c"), c_code)?;
|
write_file(&src_path.join("parser.c"), c_code)?;
|
||||||
write_file(&src_path.join("node-types.json"), node_types_json)?;
|
write_file(&src_path.join("node-types.json"), node_types_json)?;
|
||||||
fs::create_dir_all(&header_path)
|
|
||||||
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(header_path.as_path()))))?;
|
|
||||||
write_file(&header_path.join("alloc.h"), ALLOC_HEADER)?;
|
write_file(&header_path.join("alloc.h"), ALLOC_HEADER)?;
|
||||||
write_file(&header_path.join("array.h"), ARRAY_HEADER)?;
|
write_file(&header_path.join("array.h"), ARRAY_HEADER)?;
|
||||||
write_file(&header_path.join("parser.h"), PARSER_HEADER)?;
|
write_file(&header_path.join("parser.h"), tree_sitter::PARSER_HEADER)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
@ -330,54 +230,29 @@ pub fn generate_parser_for_grammar(
|
||||||
let input_grammar = parse_grammar(&grammar_json)?;
|
let input_grammar = parse_grammar(&grammar_json)?;
|
||||||
let parser = generate_parser_for_grammar_with_opts(
|
let parser = generate_parser_for_grammar_with_opts(
|
||||||
&input_grammar,
|
&input_grammar,
|
||||||
LANGUAGE_VERSION,
|
tree_sitter::LANGUAGE_VERSION,
|
||||||
semantic_version,
|
semantic_version,
|
||||||
None,
|
None,
|
||||||
OptLevel::empty(),
|
|
||||||
)?;
|
)?;
|
||||||
Ok((input_grammar.name, parser.c_code))
|
Ok((input_grammar.name, parser.c_code))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn generate_node_types_from_grammar(input_grammar: &InputGrammar) -> GenerateResult<JSONOutput> {
|
|
||||||
let (syntax_grammar, lexical_grammar, inlines, simple_aliases) =
|
|
||||||
prepare_grammar(input_grammar)?;
|
|
||||||
let variable_info =
|
|
||||||
node_types::get_variable_info(&syntax_grammar, &lexical_grammar, &simple_aliases)?;
|
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
let node_types_json = node_types::generate_node_types_json(
|
|
||||||
&syntax_grammar,
|
|
||||||
&lexical_grammar,
|
|
||||||
&simple_aliases,
|
|
||||||
&variable_info,
|
|
||||||
)?;
|
|
||||||
Ok(JSONOutput {
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
node_types_json: serde_json::to_string_pretty(&node_types_json).unwrap(),
|
|
||||||
syntax_grammar,
|
|
||||||
lexical_grammar,
|
|
||||||
inlines,
|
|
||||||
simple_aliases,
|
|
||||||
variable_info,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn generate_parser_for_grammar_with_opts(
|
fn generate_parser_for_grammar_with_opts(
|
||||||
input_grammar: &InputGrammar,
|
input_grammar: &InputGrammar,
|
||||||
abi_version: usize,
|
abi_version: usize,
|
||||||
semantic_version: Option<(u8, u8, u8)>,
|
semantic_version: Option<(u8, u8, u8)>,
|
||||||
report_symbol_name: Option<&str>,
|
report_symbol_name: Option<&str>,
|
||||||
optimizations: OptLevel,
|
|
||||||
) -> GenerateResult<GeneratedParser> {
|
) -> GenerateResult<GeneratedParser> {
|
||||||
let JSONOutput {
|
let (syntax_grammar, lexical_grammar, inlines, simple_aliases) =
|
||||||
syntax_grammar,
|
prepare_grammar(input_grammar)?;
|
||||||
lexical_grammar,
|
let variable_info =
|
||||||
inlines,
|
node_types::get_variable_info(&syntax_grammar, &lexical_grammar, &simple_aliases)?;
|
||||||
simple_aliases,
|
let node_types_json = node_types::generate_node_types_json(
|
||||||
variable_info,
|
&syntax_grammar,
|
||||||
#[cfg(feature = "load")]
|
&lexical_grammar,
|
||||||
node_types_json,
|
&simple_aliases,
|
||||||
} = generate_node_types_from_grammar(input_grammar)?;
|
&variable_info,
|
||||||
|
)?;
|
||||||
let supertype_symbol_map =
|
let supertype_symbol_map =
|
||||||
node_types::get_supertype_symbol_map(&syntax_grammar, &simple_aliases, &variable_info);
|
node_types::get_supertype_symbol_map(&syntax_grammar, &simple_aliases, &variable_info);
|
||||||
let tables = build_tables(
|
let tables = build_tables(
|
||||||
|
|
@ -387,7 +262,6 @@ fn generate_parser_for_grammar_with_opts(
|
||||||
&variable_info,
|
&variable_info,
|
||||||
&inlines,
|
&inlines,
|
||||||
report_symbol_name,
|
report_symbol_name,
|
||||||
optimizations,
|
|
||||||
)?;
|
)?;
|
||||||
let c_code = render_c_code(
|
let c_code = render_c_code(
|
||||||
&input_grammar.name,
|
&input_grammar.name,
|
||||||
|
|
@ -401,8 +275,7 @@ fn generate_parser_for_grammar_with_opts(
|
||||||
);
|
);
|
||||||
Ok(GeneratedParser {
|
Ok(GeneratedParser {
|
||||||
c_code,
|
c_code,
|
||||||
#[cfg(feature = "load")]
|
node_types_json: serde_json::to_string_pretty(&node_types_json).unwrap(),
|
||||||
node_types_json,
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -411,7 +284,6 @@ fn generate_parser_for_grammar_with_opts(
|
||||||
/// If the file is not found in the current directory or any of its parent directories, this will
|
/// If the file is not found in the current directory or any of its parent directories, this will
|
||||||
/// return `None` to maintain backwards compatibility. If the file is found but the version cannot
|
/// return `None` to maintain backwards compatibility. If the file is found but the version cannot
|
||||||
/// be parsed as semver, this will return an error.
|
/// be parsed as semver, this will return an error.
|
||||||
#[cfg(feature = "load")]
|
|
||||||
fn read_grammar_version(repo_path: &Path) -> Result<Option<Version>, ParseVersionError> {
|
fn read_grammar_version(repo_path: &Path) -> Result<Option<Version>, ParseVersionError> {
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
struct TreeSitterJson {
|
struct TreeSitterJson {
|
||||||
|
|
@ -430,8 +302,9 @@ fn read_grammar_version(repo_path: &Path) -> Result<Option<Version>, ParseVersio
|
||||||
let json = path
|
let json = path
|
||||||
.exists()
|
.exists()
|
||||||
.then(|| {
|
.then(|| {
|
||||||
let contents = fs::read_to_string(path.as_path())
|
let contents = fs::read_to_string(path.as_path()).map_err(|e| {
|
||||||
.map_err(|e| ParseVersionError::IO(IoError::new(&e, Some(path.as_path()))))?;
|
ParseVersionError::IO(format!("Failed to read `{}` -- {e}", path.display()))
|
||||||
|
})?;
|
||||||
serde_json::from_str::<TreeSitterJson>(&contents).map_err(|e| {
|
serde_json::from_str::<TreeSitterJson>(&contents).map_err(|e| {
|
||||||
ParseVersionError::JSON(format!("Failed to parse `{}` -- {e}", path.display()))
|
ParseVersionError::JSON(format!("Failed to parse `{}` -- {e}", path.display()))
|
||||||
})
|
})
|
||||||
|
|
@ -455,7 +328,6 @@ fn read_grammar_version(repo_path: &Path) -> Result<Option<Version>, ParseVersio
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
pub fn load_grammar_file(
|
pub fn load_grammar_file(
|
||||||
grammar_path: &Path,
|
grammar_path: &Path,
|
||||||
js_runtime: Option<&str>,
|
js_runtime: Option<&str>,
|
||||||
|
|
@ -465,26 +337,18 @@ pub fn load_grammar_file(
|
||||||
}
|
}
|
||||||
match grammar_path.extension().and_then(|e| e.to_str()) {
|
match grammar_path.extension().and_then(|e| e.to_str()) {
|
||||||
Some("js") => Ok(load_js_grammar_file(grammar_path, js_runtime)?),
|
Some("js") => Ok(load_js_grammar_file(grammar_path, js_runtime)?),
|
||||||
Some("json") => Ok(fs::read_to_string(grammar_path)
|
Some("json") => Ok(fs::read_to_string(grammar_path)?),
|
||||||
.map_err(|e| LoadGrammarError::IO(IoError::new(&e, Some(grammar_path))))?),
|
|
||||||
_ => Err(LoadGrammarError::FileExtension(grammar_path.to_owned()))?,
|
_ => Err(LoadGrammarError::FileExtension(grammar_path.to_owned()))?,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResult<String> {
|
fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResult<String> {
|
||||||
let grammar_path = dunce::canonicalize(grammar_path)
|
let grammar_path = fs::canonicalize(grammar_path)?;
|
||||||
.map_err(|e| JSError::IO(IoError::new(&e, Some(grammar_path))))?;
|
|
||||||
|
|
||||||
#[cfg(feature = "qjs-rt")]
|
|
||||||
if js_runtime == Some("native") {
|
|
||||||
return quickjs::execute_native_runtime(&grammar_path);
|
|
||||||
}
|
|
||||||
|
|
||||||
// The "file:///" prefix is incompatible with the quickjs runtime, but is required
|
|
||||||
// for node and bun
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let grammar_path = PathBuf::from(format!("file:///{}", grammar_path.display()));
|
let grammar_path = url::Url::from_file_path(grammar_path)
|
||||||
|
.expect("Failed to convert path to URL")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
let js_runtime = js_runtime.unwrap_or("node");
|
let js_runtime = js_runtime.unwrap_or("node");
|
||||||
|
|
||||||
|
|
@ -515,9 +379,7 @@ fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResu
|
||||||
let mut js_stdin = js_process
|
let mut js_stdin = js_process
|
||||||
.stdin
|
.stdin
|
||||||
.take()
|
.take()
|
||||||
.ok_or_else(|| JSError::JSRuntimeStdin {
|
.ok_or_else(|| JSError::IO(format!("Failed to open stdin for `{js_runtime}`")))?;
|
||||||
runtime: js_runtime.to_string(),
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let cli_version = Version::parse(env!("CARGO_PKG_VERSION"))?;
|
let cli_version = Version::parse(env!("CARGO_PKG_VERSION"))?;
|
||||||
write!(
|
write!(
|
||||||
|
|
@ -527,27 +389,23 @@ fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResu
|
||||||
globalThis.TREE_SITTER_CLI_VERSION_PATCH = {};",
|
globalThis.TREE_SITTER_CLI_VERSION_PATCH = {};",
|
||||||
cli_version.major, cli_version.minor, cli_version.patch,
|
cli_version.major, cli_version.minor, cli_version.patch,
|
||||||
)
|
)
|
||||||
.map_err(|e| JSError::JSRuntimeWrite {
|
.map_err(|e| {
|
||||||
runtime: js_runtime.to_string(),
|
JSError::IO(format!(
|
||||||
item: "tree-sitter version".to_string(),
|
"Failed to write tree-sitter version to `{js_runtime}`'s stdin -- {e}"
|
||||||
error: e.to_string(),
|
))
|
||||||
|
})?;
|
||||||
|
js_stdin.write(include_bytes!("./dsl.js")).map_err(|e| {
|
||||||
|
JSError::IO(format!(
|
||||||
|
"Failed to write grammar dsl to `{js_runtime}`'s stdin -- {e}"
|
||||||
|
))
|
||||||
})?;
|
})?;
|
||||||
js_stdin
|
|
||||||
.write(include_bytes!("./dsl.js"))
|
|
||||||
.map_err(|e| JSError::JSRuntimeWrite {
|
|
||||||
runtime: js_runtime.to_string(),
|
|
||||||
item: "grammar dsl".to_string(),
|
|
||||||
error: e.to_string(),
|
|
||||||
})?;
|
|
||||||
drop(js_stdin);
|
drop(js_stdin);
|
||||||
|
|
||||||
let output = js_process
|
let output = js_process
|
||||||
.wait_with_output()
|
.wait_with_output()
|
||||||
.map_err(|e| JSError::JSRuntimeRead {
|
.map_err(|e| JSError::IO(format!("Failed to read output from `{js_runtime}` -- {e}")))?;
|
||||||
runtime: js_runtime.to_string(),
|
|
||||||
error: e.to_string(),
|
|
||||||
})?;
|
|
||||||
match output.status.code() {
|
match output.status.code() {
|
||||||
|
None => panic!("`{js_runtime}` process was killed"),
|
||||||
Some(0) => {
|
Some(0) => {
|
||||||
let stdout = String::from_utf8(output.stdout).map_err(|e| JSError::JSRuntimeUtf8 {
|
let stdout = String::from_utf8(output.stdout).map_err(|e| JSError::JSRuntimeUtf8 {
|
||||||
runtime: js_runtime.to_string(),
|
runtime: js_runtime.to_string(),
|
||||||
|
|
@ -562,15 +420,9 @@ fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResu
|
||||||
grammar_json = &stdout[pos + 1..];
|
grammar_json = &stdout[pos + 1..];
|
||||||
|
|
||||||
let mut stdout = std::io::stdout().lock();
|
let mut stdout = std::io::stdout().lock();
|
||||||
stdout
|
stdout.write_all(node_output.as_bytes())?;
|
||||||
.write_all(node_output.as_bytes())
|
stdout.write_all(b"\n")?;
|
||||||
.map_err(|e| JSError::IO(IoError::new(&e, None)))?;
|
stdout.flush()?;
|
||||||
stdout
|
|
||||||
.write_all(b"\n")
|
|
||||||
.map_err(|e| JSError::IO(IoError::new(&e, None)))?;
|
|
||||||
stdout
|
|
||||||
.flush()
|
|
||||||
.map_err(|e| JSError::IO(IoError::new(&e, None)))?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(serde_json::to_string_pretty(&serde_json::from_str::<
|
Ok(serde_json::to_string_pretty(&serde_json::from_str::<
|
||||||
|
|
@ -581,41 +433,10 @@ fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResu
|
||||||
runtime: js_runtime.to_string(),
|
runtime: js_runtime.to_string(),
|
||||||
code,
|
code,
|
||||||
}),
|
}),
|
||||||
None => Err(JSError::JSRuntimeExit {
|
|
||||||
runtime: js_runtime.to_string(),
|
|
||||||
code: -1,
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
pub fn write_file(path: &Path, body: impl AsRef<[u8]>) -> GenerateResult<()> {
|
pub fn write_file(path: &Path, body: impl AsRef<[u8]>) -> GenerateResult<()> {
|
||||||
fs::write(path, body).map_err(|e| GenerateError::IO(IoError::new(&e, Some(path))))
|
fs::write(path, body)
|
||||||
}
|
.map_err(|e| GenerateError::IO(format!("Failed to write {:?} -- {e}", path.file_name())))
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::{LANGUAGE_VERSION, PARSER_HEADER};
|
|
||||||
#[test]
|
|
||||||
fn test_language_versions_are_in_sync() {
|
|
||||||
let api_h = include_str!("../../../lib/include/tree_sitter/api.h");
|
|
||||||
let api_language_version = api_h
|
|
||||||
.lines()
|
|
||||||
.find_map(|line| {
|
|
||||||
line.trim()
|
|
||||||
.strip_prefix("#define TREE_SITTER_LANGUAGE_VERSION ")
|
|
||||||
.and_then(|v| v.parse::<usize>().ok())
|
|
||||||
})
|
|
||||||
.expect("Failed to find TREE_SITTER_LANGUAGE_VERSION definition in api.h");
|
|
||||||
assert_eq!(LANGUAGE_VERSION, api_language_version);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parser_header_in_sync() {
|
|
||||||
let parser_h = include_str!("../../../lib/src/parser.h");
|
|
||||||
assert!(
|
|
||||||
parser_h == PARSER_HEADER,
|
|
||||||
"parser.h.inc is out of sync with lib/src/parser.h. Run: cp lib/src/parser.h crates/generate/src/parser.h.inc"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
@ -434,7 +434,6 @@ impl Nfa {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn last_state_id(&self) -> u32 {
|
pub fn last_state_id(&self) -> u32 {
|
||||||
assert!(!self.states.is_empty());
|
|
||||||
self.states.len() as u32 - 1
|
self.states.len() as u32 - 1
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
|
use std::collections::{BTreeMap, HashMap, HashSet};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
|
@ -29,7 +30,6 @@ pub struct VariableInfo {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, PartialEq, Eq, Default, PartialOrd, Ord)]
|
#[derive(Debug, Serialize, PartialEq, Eq, Default, PartialOrd, Ord)]
|
||||||
#[cfg(feature = "load")]
|
|
||||||
pub struct NodeInfoJSON {
|
pub struct NodeInfoJSON {
|
||||||
#[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
kind: String,
|
kind: String,
|
||||||
|
|
@ -47,7 +47,6 @@ pub struct NodeInfoJSON {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
#[derive(Clone, Debug, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
#[cfg(feature = "load")]
|
|
||||||
pub struct NodeTypeJSON {
|
pub struct NodeTypeJSON {
|
||||||
#[serde(rename = "type")]
|
#[serde(rename = "type")]
|
||||||
kind: String,
|
kind: String,
|
||||||
|
|
@ -55,7 +54,6 @@ pub struct NodeTypeJSON {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Debug, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||||
#[cfg(feature = "load")]
|
|
||||||
pub struct FieldInfoJSON {
|
pub struct FieldInfoJSON {
|
||||||
multiple: bool,
|
multiple: bool,
|
||||||
required: bool,
|
required: bool,
|
||||||
|
|
@ -69,7 +67,6 @@ pub struct ChildQuantity {
|
||||||
multiple: bool,
|
multiple: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
impl Default for FieldInfoJSON {
|
impl Default for FieldInfoJSON {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self {
|
Self {
|
||||||
|
|
@ -105,7 +102,7 @@ impl ChildQuantity {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const fn append(&mut self, other: Self) {
|
fn append(&mut self, other: Self) {
|
||||||
if other.exists {
|
if other.exists {
|
||||||
if self.exists || other.multiple {
|
if self.exists || other.multiple {
|
||||||
self.multiple = true;
|
self.multiple = true;
|
||||||
|
|
@ -117,7 +114,7 @@ impl ChildQuantity {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const fn union(&mut self, other: Self) -> bool {
|
fn union(&mut self, other: Self) -> bool {
|
||||||
let mut result = false;
|
let mut result = false;
|
||||||
if !self.exists && other.exists {
|
if !self.exists && other.exists {
|
||||||
result = true;
|
result = true;
|
||||||
|
|
@ -377,11 +374,11 @@ pub fn get_variable_info(
|
||||||
fn get_aliases_by_symbol(
|
fn get_aliases_by_symbol(
|
||||||
syntax_grammar: &SyntaxGrammar,
|
syntax_grammar: &SyntaxGrammar,
|
||||||
default_aliases: &AliasMap,
|
default_aliases: &AliasMap,
|
||||||
) -> HashMap<Symbol, BTreeSet<Option<Alias>>> {
|
) -> HashMap<Symbol, HashSet<Option<Alias>>> {
|
||||||
let mut aliases_by_symbol = HashMap::new();
|
let mut aliases_by_symbol = HashMap::new();
|
||||||
for (symbol, alias) in default_aliases {
|
for (symbol, alias) in default_aliases {
|
||||||
aliases_by_symbol.insert(*symbol, {
|
aliases_by_symbol.insert(*symbol, {
|
||||||
let mut aliases = BTreeSet::new();
|
let mut aliases = HashSet::new();
|
||||||
aliases.insert(Some(alias.clone()));
|
aliases.insert(Some(alias.clone()));
|
||||||
aliases
|
aliases
|
||||||
});
|
});
|
||||||
|
|
@ -390,7 +387,7 @@ fn get_aliases_by_symbol(
|
||||||
if !default_aliases.contains_key(extra_symbol) {
|
if !default_aliases.contains_key(extra_symbol) {
|
||||||
aliases_by_symbol
|
aliases_by_symbol
|
||||||
.entry(*extra_symbol)
|
.entry(*extra_symbol)
|
||||||
.or_insert_with(BTreeSet::new)
|
.or_insert_with(HashSet::new)
|
||||||
.insert(None);
|
.insert(None);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -399,7 +396,7 @@ fn get_aliases_by_symbol(
|
||||||
for step in &production.steps {
|
for step in &production.steps {
|
||||||
aliases_by_symbol
|
aliases_by_symbol
|
||||||
.entry(step.symbol)
|
.entry(step.symbol)
|
||||||
.or_insert_with(BTreeSet::new)
|
.or_insert_with(HashSet::new)
|
||||||
.insert(
|
.insert(
|
||||||
step.alias
|
step.alias
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
|
@ -444,7 +441,6 @@ pub fn get_supertype_symbol_map(
|
||||||
supertype_symbol_map
|
supertype_symbol_map
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
pub type SuperTypeCycleResult<T> = Result<T, SuperTypeCycleError>;
|
pub type SuperTypeCycleResult<T> = Result<T, SuperTypeCycleError>;
|
||||||
|
|
||||||
#[derive(Debug, Error, Serialize)]
|
#[derive(Debug, Error, Serialize)]
|
||||||
|
|
@ -466,7 +462,6 @@ impl std::fmt::Display for SuperTypeCycleError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
pub fn generate_node_types_json(
|
pub fn generate_node_types_json(
|
||||||
syntax_grammar: &SyntaxGrammar,
|
syntax_grammar: &SyntaxGrammar,
|
||||||
lexical_grammar: &LexicalGrammar,
|
lexical_grammar: &LexicalGrammar,
|
||||||
|
|
@ -530,7 +525,7 @@ pub fn generate_node_types_json(
|
||||||
|
|
||||||
let aliases_by_symbol = get_aliases_by_symbol(syntax_grammar, default_aliases);
|
let aliases_by_symbol = get_aliases_by_symbol(syntax_grammar, default_aliases);
|
||||||
|
|
||||||
let empty = BTreeSet::new();
|
let empty = HashSet::new();
|
||||||
let extra_names = syntax_grammar
|
let extra_names = syntax_grammar
|
||||||
.extra_symbols
|
.extra_symbols
|
||||||
.iter()
|
.iter()
|
||||||
|
|
@ -589,7 +584,7 @@ pub fn generate_node_types_json(
|
||||||
} else if !syntax_grammar.variables_to_inline.contains(&symbol) {
|
} else if !syntax_grammar.variables_to_inline.contains(&symbol) {
|
||||||
// If a rule is aliased under multiple names, then its information
|
// If a rule is aliased under multiple names, then its information
|
||||||
// contributes to multiple entries in the final JSON.
|
// contributes to multiple entries in the final JSON.
|
||||||
for alias in aliases_by_symbol.get(&symbol).unwrap_or(&BTreeSet::new()) {
|
for alias in aliases_by_symbol.get(&symbol).unwrap_or(&HashSet::new()) {
|
||||||
let kind;
|
let kind;
|
||||||
let is_named;
|
let is_named;
|
||||||
if let Some(alias) = alias {
|
if let Some(alias) = alias {
|
||||||
|
|
@ -783,15 +778,11 @@ pub fn generate_node_types_json(
|
||||||
a_is_leaf.cmp(&b_is_leaf)
|
a_is_leaf.cmp(&b_is_leaf)
|
||||||
})
|
})
|
||||||
.then_with(|| a.kind.cmp(&b.kind))
|
.then_with(|| a.kind.cmp(&b.kind))
|
||||||
.then_with(|| a.named.cmp(&b.named))
|
|
||||||
.then_with(|| a.root.cmp(&b.root))
|
|
||||||
.then_with(|| a.extra.cmp(&b.extra))
|
|
||||||
});
|
});
|
||||||
result.dedup();
|
result.dedup();
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(feature = "load")]
|
|
||||||
fn process_supertypes(info: &mut FieldInfoJSON, subtype_map: &[(NodeTypeJSON, Vec<NodeTypeJSON>)]) {
|
fn process_supertypes(info: &mut FieldInfoJSON, subtype_map: &[(NodeTypeJSON, Vec<NodeTypeJSON>)]) {
|
||||||
for (supertype, subtypes) in subtype_map {
|
for (supertype, subtypes) in subtype_map {
|
||||||
if info.types.contains(supertype) {
|
if info.types.contains(supertype) {
|
||||||
|
|
@ -828,17 +819,17 @@ fn extend_sorted<'a, T>(vec: &mut Vec<T>, values: impl IntoIterator<Item = &'a T
|
||||||
where
|
where
|
||||||
T: 'a + Clone + Eq + Ord,
|
T: 'a + Clone + Eq + Ord,
|
||||||
{
|
{
|
||||||
values.into_iter().fold(false, |acc, value| {
|
values.into_iter().any(|value| {
|
||||||
if let Err(i) = vec.binary_search(value) {
|
if let Err(i) = vec.binary_search(value) {
|
||||||
vec.insert(i, value.clone());
|
vec.insert(i, value.clone());
|
||||||
true
|
true
|
||||||
} else {
|
} else {
|
||||||
acc
|
false
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(all(test, feature = "load"))]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::{
|
use crate::{
|
||||||
|
|
@ -1,15 +1,16 @@
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
use log::warn;
|
use anyhow::Result;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use crate::{
|
use super::{
|
||||||
grammars::{InputGrammar, PrecedenceEntry, ReservedWordContext, Variable, VariableType},
|
grammars::{InputGrammar, PrecedenceEntry, Variable, VariableType},
|
||||||
rules::{Precedence, Rule},
|
rules::{Precedence, Rule},
|
||||||
};
|
};
|
||||||
|
use crate::grammars::ReservedWordContext;
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
|
|
@ -17,7 +18,7 @@ use crate::{
|
||||||
#[allow(clippy::upper_case_acronyms)]
|
#[allow(clippy::upper_case_acronyms)]
|
||||||
enum RuleJSON {
|
enum RuleJSON {
|
||||||
ALIAS {
|
ALIAS {
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
named: bool,
|
named: bool,
|
||||||
value: String,
|
value: String,
|
||||||
},
|
},
|
||||||
|
|
@ -33,46 +34,46 @@ enum RuleJSON {
|
||||||
name: String,
|
name: String,
|
||||||
},
|
},
|
||||||
CHOICE {
|
CHOICE {
|
||||||
members: Vec<Self>,
|
members: Vec<RuleJSON>,
|
||||||
},
|
},
|
||||||
FIELD {
|
FIELD {
|
||||||
name: String,
|
name: String,
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
SEQ {
|
SEQ {
|
||||||
members: Vec<Self>,
|
members: Vec<RuleJSON>,
|
||||||
},
|
},
|
||||||
REPEAT {
|
REPEAT {
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
REPEAT1 {
|
REPEAT1 {
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
PREC_DYNAMIC {
|
PREC_DYNAMIC {
|
||||||
value: i32,
|
value: i32,
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
PREC_LEFT {
|
PREC_LEFT {
|
||||||
value: PrecedenceValueJSON,
|
value: PrecedenceValueJSON,
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
PREC_RIGHT {
|
PREC_RIGHT {
|
||||||
value: PrecedenceValueJSON,
|
value: PrecedenceValueJSON,
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
PREC {
|
PREC {
|
||||||
value: PrecedenceValueJSON,
|
value: PrecedenceValueJSON,
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
TOKEN {
|
TOKEN {
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
IMMEDIATE_TOKEN {
|
IMMEDIATE_TOKEN {
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
RESERVED {
|
RESERVED {
|
||||||
context_name: String,
|
context_name: String,
|
||||||
content: Box<Self>,
|
content: Box<RuleJSON>,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -280,13 +281,7 @@ pub(crate) fn parse_grammar(input: &str) -> ParseGrammarResult<InputGrammar> {
|
||||||
_ => false,
|
_ => false,
|
||||||
};
|
};
|
||||||
if matches_empty {
|
if matches_empty {
|
||||||
warn!(
|
eprintln!("Warning: Named extra rule `{name}` matches the empty string. Inline this to avoid infinite loops while parsing.");
|
||||||
concat!(
|
|
||||||
"Named extra rule `{}` matches the empty string. ",
|
|
||||||
"Inline this to avoid infinite loops while parsing."
|
|
||||||
),
|
|
||||||
name
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
variables.push(Variable {
|
variables.push(Variable {
|
||||||
|
|
@ -347,7 +342,7 @@ fn parse_rule(json: RuleJSON, is_token: bool) -> ParseGrammarResult<Rule> {
|
||||||
} else {
|
} else {
|
||||||
// silently ignore unicode flags
|
// silently ignore unicode flags
|
||||||
if c != 'u' && c != 'v' {
|
if c != 'u' && c != 'v' {
|
||||||
warn!("unsupported flag {c}");
|
eprintln!("Warning: unsupported flag {c}");
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
use anyhow::Result;
|
||||||
use regex_syntax::{
|
use regex_syntax::{
|
||||||
hir::{Class, Hir, HirKind},
|
hir::{Class, Hir, HirKind},
|
||||||
ParserBuilder,
|
ParserBuilder,
|
||||||
|
|
@ -26,7 +27,7 @@ pub enum ExpandTokensError {
|
||||||
"The rule `{0}` matches the empty string.
|
"The rule `{0}` matches the empty string.
|
||||||
Tree-sitter does not support syntactic rules that match the empty string
|
Tree-sitter does not support syntactic rules that match the empty string
|
||||||
unless they are used only as the grammar's start rule.
|
unless they are used only as the grammar's start rule.
|
||||||
"
|
"
|
||||||
)]
|
)]
|
||||||
EmptyString(String),
|
EmptyString(String),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
|
|
@ -188,7 +189,7 @@ impl NfaBuilder {
|
||||||
}
|
}
|
||||||
Rule::String(s) => {
|
Rule::String(s) => {
|
||||||
for c in s.chars().rev() {
|
for c in s.chars().rev() {
|
||||||
self.push_advance(CharacterSet::from_char(c), next_state_id);
|
self.push_advance(CharacterSet::empty().add_char(c), next_state_id);
|
||||||
next_state_id = self.nfa.last_state_id();
|
next_state_id = self.nfa.last_state_id();
|
||||||
}
|
}
|
||||||
Ok(!s.is_empty())
|
Ok(!s.is_empty())
|
||||||
|
|
@ -69,7 +69,9 @@ pub(super) fn extract_default_aliases(
|
||||||
SymbolType::External => &mut external_status_list[symbol.index],
|
SymbolType::External => &mut external_status_list[symbol.index],
|
||||||
SymbolType::NonTerminal => &mut non_terminal_status_list[symbol.index],
|
SymbolType::NonTerminal => &mut non_terminal_status_list[symbol.index],
|
||||||
SymbolType::Terminal => &mut terminal_status_list[symbol.index],
|
SymbolType::Terminal => &mut terminal_status_list[symbol.index],
|
||||||
SymbolType::End | SymbolType::EndOfNonTerminalExtra => panic!("Unexpected end token"),
|
SymbolType::End | SymbolType::EndOfNonTerminalExtra => {
|
||||||
|
panic!("Unexpected end token")
|
||||||
|
}
|
||||||
};
|
};
|
||||||
status.appears_unaliased = true;
|
status.appears_unaliased = true;
|
||||||
}
|
}
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
|
@ -152,7 +153,7 @@ pub(super) fn extract_tokens(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut external_tokens = Vec::with_capacity(grammar.external_tokens.len());
|
let mut external_tokens = Vec::new();
|
||||||
for external_token in grammar.external_tokens {
|
for external_token in grammar.external_tokens {
|
||||||
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
|
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
|
||||||
if let Rule::Symbol(symbol) = rule {
|
if let Rule::Symbol(symbol) = rule {
|
||||||
|
|
@ -180,7 +181,8 @@ pub(super) fn extract_tokens(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let word_token = if let Some(token) = grammar.word_token {
|
let mut word_token = None;
|
||||||
|
if let Some(token) = grammar.word_token {
|
||||||
let token = symbol_replacer.replace_symbol(token);
|
let token = symbol_replacer.replace_symbol(token);
|
||||||
if token.is_non_terminal() {
|
if token.is_non_terminal() {
|
||||||
let word_token_variable = &variables[token.index];
|
let word_token_variable = &variables[token.index];
|
||||||
|
|
@ -195,10 +197,8 @@ pub(super) fn extract_tokens(
|
||||||
conflicting_symbol_name,
|
conflicting_symbol_name,
|
||||||
}))?;
|
}))?;
|
||||||
}
|
}
|
||||||
Some(token)
|
word_token = Some(token);
|
||||||
} else {
|
}
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut reserved_word_contexts = Vec::with_capacity(grammar.reserved_word_sets.len());
|
let mut reserved_word_contexts = Vec::with_capacity(grammar.reserved_word_sets.len());
|
||||||
for reserved_word_context in grammar.reserved_word_sets {
|
for reserved_word_context in grammar.reserved_word_sets {
|
||||||
|
|
@ -212,12 +212,7 @@ pub(super) fn extract_tokens(
|
||||||
{
|
{
|
||||||
reserved_words.push(Symbol::terminal(index));
|
reserved_words.push(Symbol::terminal(index));
|
||||||
} else {
|
} else {
|
||||||
let rule = if let Rule::Metadata { rule, .. } = &reserved_rule {
|
let token_name = match &reserved_rule {
|
||||||
rule.as_ref()
|
|
||||||
} else {
|
|
||||||
&reserved_rule
|
|
||||||
};
|
|
||||||
let token_name = match rule {
|
|
||||||
Rule::String(s) => s.clone(),
|
Rule::String(s) => s.clone(),
|
||||||
Rule::Pattern(p, _) => p.clone(),
|
Rule::Pattern(p, _) => p.clone(),
|
||||||
_ => "unknown".to_string(),
|
_ => "unknown".to_string(),
|
||||||
|
|
@ -285,11 +280,10 @@ impl TokenExtractor {
|
||||||
let mut params = params.clone();
|
let mut params = params.clone();
|
||||||
params.is_token = false;
|
params.is_token = false;
|
||||||
|
|
||||||
let string_value = if let Rule::String(value) = rule.as_ref() {
|
let mut string_value = None;
|
||||||
Some(value)
|
if let Rule::String(value) = rule.as_ref() {
|
||||||
} else {
|
string_value = Some(value);
|
||||||
None
|
}
|
||||||
};
|
|
||||||
|
|
||||||
let rule_to_extract = if params == MetadataParams::default() {
|
let rule_to_extract = if params == MetadataParams::default() {
|
||||||
rule.as_ref()
|
rule.as_ref()
|
||||||
|
|
@ -590,13 +584,14 @@ mod test {
|
||||||
]);
|
]);
|
||||||
grammar.external_tokens = vec![Variable::named("rule_1", Rule::non_terminal(1))];
|
grammar.external_tokens = vec![Variable::named("rule_1", Rule::non_terminal(1))];
|
||||||
|
|
||||||
let result = extract_tokens(grammar);
|
match extract_tokens(grammar) {
|
||||||
assert!(result.is_err(), "Expected an error but got no error");
|
Err(e) => {
|
||||||
let err = result.err().unwrap();
|
assert_eq!(e.to_string(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
|
||||||
assert_eq!(
|
}
|
||||||
err.to_string(),
|
_ => {
|
||||||
"Rule 'rule_1' cannot be used as both an external token and a non-terminal rule"
|
panic!("Expected an error but got no error");
|
||||||
);
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
use log::warn;
|
use anyhow::Result;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
|
@ -95,15 +95,14 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> InternSymbolsResult<Inte
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let word_token = if let Some(name) = grammar.word_token.as_ref() {
|
let mut word_token = None;
|
||||||
Some(
|
if let Some(name) = grammar.word_token.as_ref() {
|
||||||
|
word_token = Some(
|
||||||
interner
|
interner
|
||||||
.intern_name(name)
|
.intern_name(name)
|
||||||
.ok_or_else(|| InternSymbolsError::UndefinedWordToken(name.clone()))?,
|
.ok_or_else(|| InternSymbolsError::UndefinedWordToken(name.clone()))?,
|
||||||
)
|
);
|
||||||
} else {
|
}
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
for (i, variable) in variables.iter_mut().enumerate() {
|
for (i, variable) in variables.iter_mut().enumerate() {
|
||||||
if supertype_symbols.contains(&Symbol::non_terminal(i)) {
|
if supertype_symbols.contains(&Symbol::non_terminal(i)) {
|
||||||
|
|
@ -132,7 +131,7 @@ impl Interner<'_> {
|
||||||
fn intern_rule(&self, rule: &Rule, name: Option<&str>) -> InternSymbolsResult<Rule> {
|
fn intern_rule(&self, rule: &Rule, name: Option<&str>) -> InternSymbolsResult<Rule> {
|
||||||
match rule {
|
match rule {
|
||||||
Rule::Choice(elements) => {
|
Rule::Choice(elements) => {
|
||||||
self.check_single(elements, name, "choice");
|
self.check_single(elements, name);
|
||||||
let mut result = Vec::with_capacity(elements.len());
|
let mut result = Vec::with_capacity(elements.len());
|
||||||
for element in elements {
|
for element in elements {
|
||||||
result.push(self.intern_rule(element, name)?);
|
result.push(self.intern_rule(element, name)?);
|
||||||
|
|
@ -140,7 +139,7 @@ impl Interner<'_> {
|
||||||
Ok(Rule::Choice(result))
|
Ok(Rule::Choice(result))
|
||||||
}
|
}
|
||||||
Rule::Seq(elements) => {
|
Rule::Seq(elements) => {
|
||||||
self.check_single(elements, name, "seq");
|
self.check_single(elements, name);
|
||||||
let mut result = Vec::with_capacity(elements.len());
|
let mut result = Vec::with_capacity(elements.len());
|
||||||
for element in elements {
|
for element in elements {
|
||||||
result.push(self.intern_rule(element, name)?);
|
result.push(self.intern_rule(element, name)?);
|
||||||
|
|
@ -184,10 +183,10 @@ impl Interner<'_> {
|
||||||
|
|
||||||
// In the case of a seq or choice rule of 1 element in a hidden rule, weird
|
// In the case of a seq or choice rule of 1 element in a hidden rule, weird
|
||||||
// inconsistent behavior with queries can occur. So we should warn the user about it.
|
// inconsistent behavior with queries can occur. So we should warn the user about it.
|
||||||
fn check_single(&self, elements: &[Rule], name: Option<&str>, kind: &str) {
|
fn check_single(&self, elements: &[Rule], name: Option<&str>) {
|
||||||
if elements.len() == 1 && matches!(elements[0], Rule::String(_) | Rule::Pattern(_, _)) {
|
if elements.len() == 1 && matches!(elements[0], Rule::String(_) | Rule::Pattern(_, _)) {
|
||||||
warn!(
|
eprintln!(
|
||||||
"rule {} contains a `{kind}` rule with a single element. This is unnecessary.",
|
"Warning: rule {} contains a `seq` or `choice` rule with a single element. This is unnecessary.",
|
||||||
name.unwrap_or_default()
|
name.unwrap_or_default()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
@ -278,9 +277,10 @@ mod tests {
|
||||||
fn test_grammar_with_undefined_symbols() {
|
fn test_grammar_with_undefined_symbols() {
|
||||||
let result = intern_symbols(&build_grammar(vec![Variable::named("x", Rule::named("y"))]));
|
let result = intern_symbols(&build_grammar(vec![Variable::named("x", Rule::named("y"))]));
|
||||||
|
|
||||||
assert!(result.is_err(), "Expected an error but got none");
|
match result {
|
||||||
let e = result.err().unwrap();
|
Err(e) => assert_eq!(e.to_string(), "Undefined symbol `y`"),
|
||||||
assert_eq!(e.to_string(), "Undefined symbol `y`");
|
_ => panic!("Expected an error but got none"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_grammar(variables: Vec<Variable>) -> InputGrammar {
|
fn build_grammar(variables: Vec<Variable>) -> InputGrammar {
|
||||||
|
|
@ -12,6 +12,7 @@ use std::{
|
||||||
mem,
|
mem,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
pub use expand_tokens::ExpandTokensError;
|
pub use expand_tokens::ExpandTokensError;
|
||||||
pub use extract_tokens::ExtractTokensError;
|
pub use extract_tokens::ExtractTokensError;
|
||||||
pub use flatten_grammar::FlattenGrammarError;
|
pub use flatten_grammar::FlattenGrammarError;
|
||||||
|
|
@ -267,7 +268,7 @@ fn validate_precedences(grammar: &InputGrammar) -> ValidatePrecedenceResult<()>
|
||||||
if let Precedence::Name(n) = ¶ms.precedence {
|
if let Precedence::Name(n) = ¶ms.precedence {
|
||||||
if !names.contains(n) {
|
if !names.contains(n) {
|
||||||
Err(UndeclaredPrecedenceError {
|
Err(UndeclaredPrecedenceError {
|
||||||
precedence: n.clone(),
|
precedence: n.to_string(),
|
||||||
rule: rule_name.to_string(),
|
rule: rule_name.to_string(),
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use anyhow::Result;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
|
|
@ -70,13 +71,12 @@ impl InlinedProductionMapBuilder {
|
||||||
let production_map = production_indices_by_step_id
|
let production_map = production_indices_by_step_id
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(step_id, production_indices)| {
|
.map(|(step_id, production_indices)| {
|
||||||
let production =
|
let production = step_id.variable_index.map_or_else(
|
||||||
core::ptr::from_ref::<Production>(step_id.variable_index.map_or_else(
|
|| &productions[step_id.production_index],
|
||||||
|| &productions[step_id.production_index],
|
|variable_index| {
|
||||||
|variable_index| {
|
&grammar.variables[variable_index].productions[step_id.production_index]
|
||||||
&grammar.variables[variable_index].productions[step_id.production_index]
|
},
|
||||||
},
|
) as *const Production;
|
||||||
));
|
|
||||||
((production, step_id.step_index as u32), production_indices)
|
((production, step_id.step_index as u32), production_indices)
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
@ -549,9 +549,10 @@ mod tests {
|
||||||
..Default::default()
|
..Default::default()
|
||||||
};
|
};
|
||||||
|
|
||||||
let result = process_inlines(&grammar, &lexical_grammar);
|
if let Err(error) = process_inlines(&grammar, &lexical_grammar) {
|
||||||
assert!(result.is_err(), "expected an error, but got none");
|
assert_eq!(error.to_string(), "Token `something` cannot be inlined");
|
||||||
let err = result.err().unwrap();
|
} else {
|
||||||
assert_eq!(err.to_string(), "Token `something` cannot be inlined",);
|
panic!("expected an error, but got none");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -5,7 +5,6 @@ use std::{
|
||||||
mem::swap,
|
mem::swap,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::LANGUAGE_VERSION;
|
|
||||||
use indoc::indoc;
|
use indoc::indoc;
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
|
|
@ -22,10 +21,10 @@ use super::{
|
||||||
|
|
||||||
const SMALL_STATE_THRESHOLD: usize = 64;
|
const SMALL_STATE_THRESHOLD: usize = 64;
|
||||||
pub const ABI_VERSION_MIN: usize = 14;
|
pub const ABI_VERSION_MIN: usize = 14;
|
||||||
pub const ABI_VERSION_MAX: usize = LANGUAGE_VERSION;
|
pub const ABI_VERSION_MAX: usize = tree_sitter::LANGUAGE_VERSION;
|
||||||
const ABI_VERSION_WITH_RESERVED_WORDS: usize = 15;
|
const ABI_VERSION_WITH_RESERVED_WORDS: usize = 15;
|
||||||
|
const BUILD_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||||
|
|
||||||
#[clippy::format_args]
|
|
||||||
macro_rules! add {
|
macro_rules! add {
|
||||||
($this: tt, $($arg: tt)*) => {{
|
($this: tt, $($arg: tt)*) => {{
|
||||||
$this.buffer.write_fmt(format_args!($($arg)*)).unwrap();
|
$this.buffer.write_fmt(format_args!($($arg)*)).unwrap();
|
||||||
|
|
@ -34,15 +33,12 @@ macro_rules! add {
|
||||||
|
|
||||||
macro_rules! add_whitespace {
|
macro_rules! add_whitespace {
|
||||||
($this:tt) => {{
|
($this:tt) => {{
|
||||||
// 4 bytes per char, 2 spaces per indent level
|
|
||||||
$this.buffer.reserve(4 * 2 * $this.indent_level);
|
|
||||||
for _ in 0..$this.indent_level {
|
for _ in 0..$this.indent_level {
|
||||||
write!(&mut $this.buffer, " ").unwrap();
|
write!(&mut $this.buffer, " ").unwrap();
|
||||||
}
|
}
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
#[clippy::format_args]
|
|
||||||
macro_rules! add_line {
|
macro_rules! add_line {
|
||||||
($this: tt, $($arg: tt)*) => {
|
($this: tt, $($arg: tt)*) => {
|
||||||
add_whitespace!($this);
|
add_whitespace!($this);
|
||||||
|
|
@ -325,7 +321,10 @@ impl Generator {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_header(&mut self) {
|
fn add_header(&mut self) {
|
||||||
add_line!(self, "/* Automatically @generated by tree-sitter */",);
|
add_line!(
|
||||||
|
self,
|
||||||
|
"/* Automatically @generated by tree-sitter v{BUILD_VERSION} */",
|
||||||
|
);
|
||||||
add_line!(self, "");
|
add_line!(self, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -690,14 +689,13 @@ impl Generator {
|
||||||
flat_field_map.push((field_name.clone(), *location));
|
flat_field_map.push((field_name.clone(), *location));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let field_map_len = flat_field_map.len();
|
|
||||||
field_map_ids.push((
|
field_map_ids.push((
|
||||||
self.get_field_map_id(
|
self.get_field_map_id(
|
||||||
flat_field_map,
|
flat_field_map.clone(),
|
||||||
&mut flat_field_maps,
|
&mut flat_field_maps,
|
||||||
&mut next_flat_field_map_index,
|
&mut next_flat_field_map_index,
|
||||||
),
|
),
|
||||||
field_map_len,
|
flat_field_map.len(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -965,7 +963,10 @@ impl Generator {
|
||||||
large_char_set_ix = Some(char_set_ix);
|
large_char_set_ix = Some(char_set_ix);
|
||||||
}
|
}
|
||||||
|
|
||||||
let line_break = format!("\n{}", " ".repeat(self.indent_level + 2));
|
let mut line_break = "\n".to_string();
|
||||||
|
for _ in 0..self.indent_level + 2 {
|
||||||
|
line_break.push_str(" ");
|
||||||
|
}
|
||||||
|
|
||||||
let has_positive_condition = large_char_set_ix.is_some() || !asserted_chars.is_empty();
|
let has_positive_condition = large_char_set_ix.is_some() || !asserted_chars.is_empty();
|
||||||
let has_negative_condition = !negated_chars.is_empty();
|
let has_negative_condition = !negated_chars.is_empty();
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
use std::{collections::BTreeMap, fmt};
|
use std::{collections::HashMap, fmt};
|
||||||
|
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use smallbitvec::SmallBitVec;
|
use smallbitvec::SmallBitVec;
|
||||||
|
|
@ -34,7 +34,7 @@ pub enum Precedence {
|
||||||
Name(String),
|
Name(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type AliasMap = BTreeMap<Symbol, Alias>;
|
pub type AliasMap = HashMap<Symbol, Alias>;
|
||||||
|
|
||||||
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Serialize)]
|
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Serialize)]
|
||||||
pub struct MetadataParams {
|
pub struct MetadataParams {
|
||||||
|
|
@ -60,15 +60,15 @@ pub enum Rule {
|
||||||
Pattern(String, String),
|
Pattern(String, String),
|
||||||
NamedSymbol(String),
|
NamedSymbol(String),
|
||||||
Symbol(Symbol),
|
Symbol(Symbol),
|
||||||
Choice(Vec<Self>),
|
Choice(Vec<Rule>),
|
||||||
Metadata {
|
Metadata {
|
||||||
params: MetadataParams,
|
params: MetadataParams,
|
||||||
rule: Box<Self>,
|
rule: Box<Rule>,
|
||||||
},
|
},
|
||||||
Repeat(Box<Self>),
|
Repeat(Box<Rule>),
|
||||||
Seq(Vec<Self>),
|
Seq(Vec<Rule>),
|
||||||
Reserved {
|
Reserved {
|
||||||
rule: Box<Self>,
|
rule: Box<Rule>,
|
||||||
context_name: String,
|
context_name: String,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
@ -8,7 +8,6 @@ rust-version.workspace = true
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
homepage.workspace = true
|
homepage.workspace = true
|
||||||
repository.workspace = true
|
repository.workspace = true
|
||||||
documentation = "https://docs.rs/tree-sitter-loader"
|
|
||||||
license.workspace = true
|
license.workspace = true
|
||||||
keywords.workspace = true
|
keywords.workspace = true
|
||||||
categories.workspace = true
|
categories.workspace = true
|
||||||
|
|
@ -17,9 +16,6 @@ categories.workspace = true
|
||||||
all-features = true
|
all-features = true
|
||||||
rustdoc-args = ["--cfg", "docsrs"]
|
rustdoc-args = ["--cfg", "docsrs"]
|
||||||
|
|
||||||
[lib]
|
|
||||||
path = "src/loader.rs"
|
|
||||||
|
|
||||||
[lints]
|
[lints]
|
||||||
workspace = true
|
workspace = true
|
||||||
|
|
||||||
|
|
@ -28,19 +24,20 @@ wasm = ["tree-sitter/wasm"]
|
||||||
default = ["tree-sitter-highlight", "tree-sitter-tags"]
|
default = ["tree-sitter-highlight", "tree-sitter-tags"]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
|
anyhow.workspace = true
|
||||||
cc.workspace = true
|
cc.workspace = true
|
||||||
etcetera.workspace = true
|
etcetera.workspace = true
|
||||||
fs4.workspace = true
|
fs4.workspace = true
|
||||||
indoc.workspace = true
|
indoc.workspace = true
|
||||||
libloading.workspace = true
|
libloading.workspace = true
|
||||||
log.workspace = true
|
|
||||||
once_cell.workspace = true
|
once_cell.workspace = true
|
||||||
|
path-slash.workspace = true
|
||||||
regex.workspace = true
|
regex.workspace = true
|
||||||
semver.workspace = true
|
semver.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
tempfile.workspace = true
|
tempfile.workspace = true
|
||||||
thiserror.workspace = true
|
url.workspace = true
|
||||||
|
|
||||||
tree-sitter = { workspace = true }
|
tree-sitter = { workspace = true }
|
||||||
tree-sitter-highlight = { workspace = true, optional = true }
|
tree-sitter-highlight = { workspace = true, optional = true }
|
||||||
|
|
@ -7,4 +7,7 @@ fn main() {
|
||||||
"cargo:rustc-env=BUILD_HOST={}",
|
"cargo:rustc-env=BUILD_HOST={}",
|
||||||
std::env::var("HOST").unwrap()
|
std::env::var("HOST").unwrap()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let emscripten_version = std::fs::read_to_string("emscripten-version").unwrap();
|
||||||
|
println!("cargo:rustc-env=EMSCRIPTEN_VERSION={emscripten_version}");
|
||||||
}
|
}
|
||||||
1
cli/loader/emscripten-version
Normal file
1
cli/loader/emscripten-version
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
4.0.4
|
||||||
File diff suppressed because it is too large
Load diff
1
crates/cli/npm/dsl.d.ts → cli/npm/dsl.d.ts
vendored
1
crates/cli/npm/dsl.d.ts → cli/npm/dsl.d.ts
vendored
|
|
@ -29,7 +29,6 @@ type Rule =
|
||||||
| PrecRule
|
| PrecRule
|
||||||
| Repeat1Rule
|
| Repeat1Rule
|
||||||
| RepeatRule
|
| RepeatRule
|
||||||
| ReservedRule
|
|
||||||
| SeqRule
|
| SeqRule
|
||||||
| StringRule
|
| StringRule
|
||||||
| SymbolRule<string>
|
| SymbolRule<string>
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "tree-sitter-cli",
|
"name": "tree-sitter-cli",
|
||||||
"version": "0.27.0",
|
"version": "0.25.9",
|
||||||
"author": {
|
"author": {
|
||||||
"name": "Max Brunsfeld",
|
"name": "Max Brunsfeld",
|
||||||
"email": "maxbrunsfeld@gmail.com"
|
"email": "maxbrunsfeld@gmail.com"
|
||||||
|
|
@ -27,7 +27,7 @@
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"install": "node install.js",
|
"install": "node install.js",
|
||||||
"prepack": "cp ../../../LICENSE ../README.md .",
|
"prepack": "cp ../../LICENSE ../README.md .",
|
||||||
"postpack": "rm LICENSE README.md"
|
"postpack": "rm LICENSE README.md"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
|
|
@ -40,11 +40,7 @@ extern "C" {
|
||||||
fn free(ptr: *mut c_void);
|
fn free(ptr: *mut c_void);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn record<T>(f: impl FnOnce() -> T) -> T {
|
pub fn record<T>(f: impl FnOnce() -> T) -> Result<T, String> {
|
||||||
record_checked(f).unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn record_checked<T>(f: impl FnOnce() -> T) -> Result<T, String> {
|
|
||||||
RECORDER.with(|recorder| {
|
RECORDER.with(|recorder| {
|
||||||
recorder.enabled.store(true, SeqCst);
|
recorder.enabled.store(true, SeqCst);
|
||||||
recorder.allocation_count.store(0, SeqCst);
|
recorder.allocation_count.store(0, SeqCst);
|
||||||
|
|
@ -97,34 +93,19 @@ fn record_dealloc(ptr: *mut c_void) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
||||||
///
|
|
||||||
/// The caller must ensure that the returned pointer is eventually
|
|
||||||
/// freed by calling `ts_record_free`.
|
|
||||||
#[must_use]
|
|
||||||
pub unsafe extern "C" fn ts_record_malloc(size: usize) -> *mut c_void {
|
|
||||||
let result = malloc(size);
|
let result = malloc(size);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
||||||
///
|
|
||||||
/// The caller must ensure that the returned pointer is eventually
|
|
||||||
/// freed by calling `ts_record_free`.
|
|
||||||
#[must_use]
|
|
||||||
pub unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void {
|
|
||||||
let result = calloc(count, size);
|
let result = calloc(count, size);
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
||||||
///
|
|
||||||
/// The caller must ensure that the returned pointer is eventually
|
|
||||||
/// freed by calling `ts_record_free`.
|
|
||||||
#[must_use]
|
|
||||||
pub unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
|
|
||||||
let result = realloc(ptr, size);
|
let result = realloc(ptr, size);
|
||||||
if ptr.is_null() {
|
if ptr.is_null() {
|
||||||
record_alloc(result);
|
record_alloc(result);
|
||||||
|
|
@ -135,11 +116,7 @@ pub unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mu
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
/// # Safety
|
unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
||||||
///
|
|
||||||
/// The caller must ensure that `ptr` was allocated by a previous call
|
|
||||||
/// to `ts_record_malloc`, `ts_record_calloc`, or `ts_record_realloc`.
|
|
||||||
pub unsafe extern "C" fn ts_record_free(ptr: *mut c_void) {
|
|
||||||
record_dealloc(ptr);
|
record_dealloc(ptr);
|
||||||
free(ptr);
|
free(ptr);
|
||||||
}
|
}
|
||||||
|
|
@ -23,7 +23,7 @@ pub fn check_consistent_sizes(tree: &Tree, input: &[u8]) {
|
||||||
let mut some_child_has_changes = false;
|
let mut some_child_has_changes = false;
|
||||||
let mut actual_named_child_count = 0;
|
let mut actual_named_child_count = 0;
|
||||||
for i in 0..node.child_count() {
|
for i in 0..node.child_count() {
|
||||||
let child = node.child(i as u32).unwrap();
|
let child = node.child(i).unwrap();
|
||||||
assert!(child.start_byte() >= last_child_end_byte);
|
assert!(child.start_byte() >= last_child_end_byte);
|
||||||
assert!(child.start_position() >= last_child_end_point);
|
assert!(child.start_position() >= last_child_end_point);
|
||||||
check(child, line_offsets);
|
check(child, line_offsets);
|
||||||
|
|
@ -1,11 +1,5 @@
|
||||||
use std::{
|
use std::{collections::HashMap, env, fs, path::Path, sync::LazyLock};
|
||||||
collections::HashMap,
|
|
||||||
env, fs,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
sync::LazyLock,
|
|
||||||
};
|
|
||||||
|
|
||||||
use log::{error, info};
|
|
||||||
use rand::Rng;
|
use rand::Rng;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tree_sitter::{Language, Parser};
|
use tree_sitter::{Language, Parser};
|
||||||
|
|
@ -25,7 +19,7 @@ use crate::{
|
||||||
random::Rand,
|
random::Rand,
|
||||||
},
|
},
|
||||||
parse::perform_edit,
|
parse::perform_edit,
|
||||||
test::{parse_tests, strip_sexp_fields, DiffKey, TestDiff, TestEntry},
|
test::{parse_tests, print_diff, print_diff_key, strip_sexp_fields, TestEntry},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub static LOG_ENABLED: LazyLock<bool> = LazyLock::new(|| env::var("TREE_SITTER_LOG").is_ok());
|
pub static LOG_ENABLED: LazyLock<bool> = LazyLock::new(|| env::var("TREE_SITTER_LOG").is_ok());
|
||||||
|
|
@ -63,14 +57,14 @@ pub fn new_seed() -> usize {
|
||||||
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| {
|
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| {
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
let seed = rng.gen::<usize>();
|
let seed = rng.gen::<usize>();
|
||||||
info!("Seed: {seed}");
|
eprintln!("Seed: {seed}");
|
||||||
seed
|
seed
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct FuzzOptions {
|
pub struct FuzzOptions {
|
||||||
pub skipped: Option<Vec<String>>,
|
pub skipped: Option<Vec<String>>,
|
||||||
pub subdir: Option<PathBuf>,
|
pub subdir: Option<String>,
|
||||||
pub edits: usize,
|
pub edits: usize,
|
||||||
pub iterations: usize,
|
pub iterations: usize,
|
||||||
pub include: Option<Regex>,
|
pub include: Option<Regex>,
|
||||||
|
|
@ -109,12 +103,12 @@ pub fn fuzz_language_corpus(
|
||||||
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
|
let corpus_dir = grammar_dir.join(subdir).join("test").join("corpus");
|
||||||
|
|
||||||
if !corpus_dir.exists() || !corpus_dir.is_dir() {
|
if !corpus_dir.exists() || !corpus_dir.is_dir() {
|
||||||
error!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
|
eprintln!("No corpus directory found, ensure that you have a `test/corpus` directory in your grammar directory with at least one test file.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
|
if std::fs::read_dir(&corpus_dir).unwrap().count() == 0 {
|
||||||
error!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
|
eprintln!("No corpus files found in `test/corpus`, ensure that you have at least one test file in your corpus directory.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -150,7 +144,7 @@ pub fn fuzz_language_corpus(
|
||||||
let dump_edits = env::var("TREE_SITTER_DUMP_EDITS").is_ok();
|
let dump_edits = env::var("TREE_SITTER_DUMP_EDITS").is_ok();
|
||||||
|
|
||||||
if log_seed {
|
if log_seed {
|
||||||
info!(" start seed: {start_seed}");
|
println!(" start seed: {start_seed}");
|
||||||
}
|
}
|
||||||
|
|
||||||
println!();
|
println!();
|
||||||
|
|
@ -164,7 +158,7 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
println!(" {test_index}. {test_name}");
|
println!(" {test_index}. {test_name}");
|
||||||
|
|
||||||
let passed = allocations::record_checked(|| {
|
let passed = allocations::record(|| {
|
||||||
let mut log_session = None;
|
let mut log_session = None;
|
||||||
let mut parser = get_parser(&mut log_session, "log.html");
|
let mut parser = get_parser(&mut log_session, "log.html");
|
||||||
parser.set_language(language).unwrap();
|
parser.set_language(language).unwrap();
|
||||||
|
|
@ -183,8 +177,8 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
if actual_output != test.output {
|
if actual_output != test.output {
|
||||||
println!("Incorrect initial parse for {test_name}");
|
println!("Incorrect initial parse for {test_name}");
|
||||||
DiffKey::print();
|
print_diff_key();
|
||||||
println!("{}", TestDiff::new(&actual_output, &test.output));
|
print_diff(&actual_output, &test.output, true);
|
||||||
println!();
|
println!();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -192,7 +186,7 @@ pub fn fuzz_language_corpus(
|
||||||
true
|
true
|
||||||
})
|
})
|
||||||
.unwrap_or_else(|e| {
|
.unwrap_or_else(|e| {
|
||||||
error!("{e}");
|
eprintln!("Error: {e}");
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -208,7 +202,7 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
for trial in 0..options.iterations {
|
for trial in 0..options.iterations {
|
||||||
let seed = start_seed + trial;
|
let seed = start_seed + trial;
|
||||||
let passed = allocations::record_checked(|| {
|
let passed = allocations::record(|| {
|
||||||
let mut rand = Rand::new(seed);
|
let mut rand = Rand::new(seed);
|
||||||
let mut log_session = None;
|
let mut log_session = None;
|
||||||
let mut parser = get_parser(&mut log_session, "log.html");
|
let mut parser = get_parser(&mut log_session, "log.html");
|
||||||
|
|
@ -217,7 +211,7 @@ pub fn fuzz_language_corpus(
|
||||||
let mut input = test.input.clone();
|
let mut input = test.input.clone();
|
||||||
|
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
info!("{}\n", String::from_utf8_lossy(&input));
|
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Perform a random series of edits and reparse.
|
// Perform a random series of edits and reparse.
|
||||||
|
|
@ -230,7 +224,7 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if log_seed {
|
if log_seed {
|
||||||
info!(" {test_index}.{trial:<2} seed: {seed}");
|
println!(" {test_index}.{trial:<2} seed: {seed}");
|
||||||
}
|
}
|
||||||
|
|
||||||
if dump_edits {
|
if dump_edits {
|
||||||
|
|
@ -244,7 +238,7 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
info!("{}\n", String::from_utf8_lossy(&input));
|
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_included_ranges(&mut parser, &input, test.template_delimiters);
|
set_included_ranges(&mut parser, &input, test.template_delimiters);
|
||||||
|
|
@ -253,7 +247,7 @@ pub fn fuzz_language_corpus(
|
||||||
// Check that the new tree is consistent.
|
// Check that the new tree is consistent.
|
||||||
check_consistent_sizes(&tree2, &input);
|
check_consistent_sizes(&tree2, &input);
|
||||||
if let Err(message) = check_changed_ranges(&tree, &tree2, &input) {
|
if let Err(message) = check_changed_ranges(&tree, &tree2, &input) {
|
||||||
error!("\nUnexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n",);
|
println!("\nUnexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n",);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -262,7 +256,7 @@ pub fn fuzz_language_corpus(
|
||||||
perform_edit(&mut tree2, &mut input, &edit).unwrap();
|
perform_edit(&mut tree2, &mut input, &edit).unwrap();
|
||||||
}
|
}
|
||||||
if options.log_graphs {
|
if options.log_graphs {
|
||||||
info!("{}\n", String::from_utf8_lossy(&input));
|
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
||||||
}
|
}
|
||||||
|
|
||||||
set_included_ranges(&mut parser, &test.input, test.template_delimiters);
|
set_included_ranges(&mut parser, &test.input, test.template_delimiters);
|
||||||
|
|
@ -276,8 +270,8 @@ pub fn fuzz_language_corpus(
|
||||||
|
|
||||||
if actual_output != test.output && !test.error {
|
if actual_output != test.output && !test.error {
|
||||||
println!("Incorrect parse for {test_name} - seed {seed}");
|
println!("Incorrect parse for {test_name} - seed {seed}");
|
||||||
DiffKey::print();
|
print_diff_key();
|
||||||
println!("{}", TestDiff::new(&actual_output, &test.output));
|
print_diff(&actual_output, &test.output, true);
|
||||||
println!();
|
println!();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
@ -285,13 +279,13 @@ pub fn fuzz_language_corpus(
|
||||||
// Check that the edited tree is consistent.
|
// Check that the edited tree is consistent.
|
||||||
check_consistent_sizes(&tree3, &input);
|
check_consistent_sizes(&tree3, &input);
|
||||||
if let Err(message) = check_changed_ranges(&tree2, &tree3, &input) {
|
if let Err(message) = check_changed_ranges(&tree2, &tree3, &input) {
|
||||||
error!("Unexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n");
|
println!("Unexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n");
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
true
|
true
|
||||||
}).unwrap_or_else(|e| {
|
}).unwrap_or_else(|e| {
|
||||||
error!("{e}");
|
eprintln!("Error: {e}");
|
||||||
false
|
false
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -303,17 +297,17 @@ pub fn fuzz_language_corpus(
|
||||||
}
|
}
|
||||||
|
|
||||||
if failure_count != 0 {
|
if failure_count != 0 {
|
||||||
info!("{failure_count} {language_name} corpus tests failed fuzzing");
|
eprintln!("{failure_count} {language_name} corpus tests failed fuzzing");
|
||||||
}
|
}
|
||||||
|
|
||||||
skipped.retain(|_, v| *v == 0);
|
skipped.retain(|_, v| *v == 0);
|
||||||
|
|
||||||
if !skipped.is_empty() {
|
if !skipped.is_empty() {
|
||||||
info!("Non matchable skip definitions:");
|
println!("Non matchable skip definitions:");
|
||||||
for k in skipped.keys() {
|
for k in skipped.keys() {
|
||||||
info!(" {k}");
|
println!(" {k}");
|
||||||
}
|
}
|
||||||
panic!("Non matchable skip definitions need to be removed");
|
panic!("Non matchable skip definitions needs to be removed");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -12,7 +12,6 @@ use std::{
|
||||||
use ansi_colours::{ansi256_from_rgb, rgb_from_ansi256};
|
use ansi_colours::{ansi256_from_rgb, rgb_from_ansi256};
|
||||||
use anstyle::{Ansi256Color, AnsiColor, Color, Effects, RgbColor};
|
use anstyle::{Ansi256Color, AnsiColor, Color, Effects, RgbColor};
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use log::{info, warn};
|
|
||||||
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{ser::SerializeMap, Deserialize, Deserializer, Serialize, Serializer};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
use tree_sitter_highlight::{HighlightConfiguration, HighlightEvent, Highlighter, HtmlRenderer};
|
use tree_sitter_highlight::{HighlightConfiguration, HighlightEvent, Highlighter, HtmlRenderer};
|
||||||
|
|
@ -349,17 +348,19 @@ pub fn highlight(
|
||||||
config.nonconformant_capture_names(&HashSet::new())
|
config.nonconformant_capture_names(&HashSet::new())
|
||||||
};
|
};
|
||||||
if names.is_empty() {
|
if names.is_empty() {
|
||||||
info!("All highlight captures conform to standards.");
|
eprintln!("All highlight captures conform to standards.");
|
||||||
} else {
|
} else {
|
||||||
warn!(
|
eprintln!(
|
||||||
"Non-standard highlight {} detected:\n* {}",
|
"Non-standard highlight {} detected:",
|
||||||
if names.len() > 1 {
|
if names.len() > 1 {
|
||||||
"captures"
|
"captures"
|
||||||
} else {
|
} else {
|
||||||
"capture"
|
"capture"
|
||||||
},
|
}
|
||||||
names.join("\n* ")
|
|
||||||
);
|
);
|
||||||
|
for name in names {
|
||||||
|
eprintln!("* {name}");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -450,7 +451,7 @@ pub fn highlight(
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.print_time {
|
if opts.print_time {
|
||||||
info!("Time: {}ms", time.elapsed().as_millis());
|
eprintln!("Time: {}ms", time.elapsed().as_millis());
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,4 +1,4 @@
|
||||||
#![cfg_attr(not(any(test, doctest)), doc = include_str!("../README.md"))]
|
#![doc = include_str!("../README.md")]
|
||||||
|
|
||||||
pub mod fuzz;
|
pub mod fuzz;
|
||||||
pub mod highlight;
|
pub mod highlight;
|
||||||
|
|
@ -20,5 +20,6 @@ pub mod wasm;
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
|
// To run compile fail tests
|
||||||
#[cfg(doctest)]
|
#[cfg(doctest)]
|
||||||
mod tests;
|
mod tests;
|
||||||
30
cli/src/logger.rs
Normal file
30
cli/src/logger.rs
Normal file
|
|
@ -0,0 +1,30 @@
|
||||||
|
use log::{LevelFilter, Log, Metadata, Record};
|
||||||
|
|
||||||
|
#[allow(dead_code)]
|
||||||
|
struct Logger {
|
||||||
|
pub filter: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Log for Logger {
|
||||||
|
fn enabled(&self, _: &Metadata) -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn log(&self, record: &Record) {
|
||||||
|
eprintln!(
|
||||||
|
"[{}] {}",
|
||||||
|
record
|
||||||
|
.module_path()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.trim_start_matches("rust_tree_sitter_cli::"),
|
||||||
|
record.args()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn flush(&self) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init() {
|
||||||
|
log::set_boxed_logger(Box::new(Logger { filter: None })).unwrap();
|
||||||
|
log::set_max_level(LevelFilter::Info);
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load diff
|
|
@ -1,7 +1,6 @@
|
||||||
use std::{
|
use std::{
|
||||||
fmt, fs,
|
fmt, fs,
|
||||||
io::{self, Write},
|
io::{self, StdoutLock, Write},
|
||||||
ops::ControlFlow,
|
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::atomic::{AtomicUsize, Ordering},
|
sync::atomic::{AtomicUsize, Ordering},
|
||||||
time::{Duration, Instant},
|
time::{Duration, Instant},
|
||||||
|
|
@ -10,17 +9,16 @@ use std::{
|
||||||
use anstyle::{AnsiColor, Color, RgbColor};
|
use anstyle::{AnsiColor, Color, RgbColor};
|
||||||
use anyhow::{anyhow, Context, Result};
|
use anyhow::{anyhow, Context, Result};
|
||||||
use clap::ValueEnum;
|
use clap::ValueEnum;
|
||||||
use log::info;
|
|
||||||
use schemars::JsonSchema;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use tree_sitter::{
|
use tree_sitter::{
|
||||||
ffi, InputEdit, Language, LogType, ParseOptions, ParseState, Parser, Point, Range, Tree,
|
ffi, InputEdit, Language, LogType, ParseOptions, ParseState, Parser, Point, Range, Tree,
|
||||||
TreeCursor,
|
TreeCursor,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{fuzz::edits::Edit, logger::paint, util};
|
use super::util;
|
||||||
|
use crate::{fuzz::edits::Edit, test::paint};
|
||||||
|
|
||||||
#[derive(Debug, Default, Serialize, JsonSchema)]
|
#[derive(Debug, Default, Serialize)]
|
||||||
pub struct Stats {
|
pub struct Stats {
|
||||||
pub successful_parses: usize,
|
pub successful_parses: usize,
|
||||||
pub total_parses: usize,
|
pub total_parses: usize,
|
||||||
|
|
@ -231,21 +229,10 @@ impl ParseSummary {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug)]
|
#[derive(Serialize, Debug, Default)]
|
||||||
pub struct ParseStats {
|
pub struct ParseStats {
|
||||||
pub parse_summaries: Vec<ParseSummary>,
|
pub parse_summaries: Vec<ParseSummary>,
|
||||||
pub cumulative_stats: Stats,
|
pub cumulative_stats: Stats,
|
||||||
pub source_count: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for ParseStats {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self {
|
|
||||||
parse_summaries: Vec::new(),
|
|
||||||
cumulative_stats: Stats::default(),
|
|
||||||
source_count: 1,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ValueEnum, Debug, Copy, Clone, Default, Eq, PartialEq)]
|
#[derive(Serialize, ValueEnum, Debug, Copy, Clone, Default, Eq, PartialEq)]
|
||||||
|
|
@ -370,15 +357,15 @@ pub fn parse_file_at_path(
|
||||||
let progress_callback = &mut |_: &ParseState| {
|
let progress_callback = &mut |_: &ParseState| {
|
||||||
if let Some(cancellation_flag) = opts.cancellation_flag {
|
if let Some(cancellation_flag) = opts.cancellation_flag {
|
||||||
if cancellation_flag.load(Ordering::SeqCst) != 0 {
|
if cancellation_flag.load(Ordering::SeqCst) != 0 {
|
||||||
return ControlFlow::Break(());
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.timeout > 0 && start_time.elapsed().as_micros() > opts.timeout as u128 {
|
if opts.timeout > 0 && start_time.elapsed().as_micros() > opts.timeout as u128 {
|
||||||
return ControlFlow::Break(());
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
ControlFlow::Continue(())
|
false
|
||||||
};
|
};
|
||||||
|
|
||||||
let parse_opts = ParseOptions::new().progress_callback(progress_callback);
|
let parse_opts = ParseOptions::new().progress_callback(progress_callback);
|
||||||
|
|
@ -437,7 +424,7 @@ pub fn parse_file_at_path(
|
||||||
|
|
||||||
if let Some(mut tree) = tree {
|
if let Some(mut tree) = tree {
|
||||||
if opts.debug_graph && !opts.edits.is_empty() {
|
if opts.debug_graph && !opts.edits.is_empty() {
|
||||||
info!("BEFORE:\n{}", String::from_utf8_lossy(&source_code));
|
println!("BEFORE:\n{}", String::from_utf8_lossy(&source_code));
|
||||||
}
|
}
|
||||||
|
|
||||||
let edit_time = Instant::now();
|
let edit_time = Instant::now();
|
||||||
|
|
@ -447,7 +434,7 @@ pub fn parse_file_at_path(
|
||||||
tree = parser.parse(&source_code, Some(&tree)).unwrap();
|
tree = parser.parse(&source_code, Some(&tree)).unwrap();
|
||||||
|
|
||||||
if opts.debug_graph {
|
if opts.debug_graph {
|
||||||
info!("AFTER {i}:\n{}", String::from_utf8_lossy(&source_code));
|
println!("AFTER {i}:\n{}", String::from_utf8_lossy(&source_code));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let edit_duration = edit_time.elapsed();
|
let edit_duration = edit_time.elapsed();
|
||||||
|
|
@ -514,23 +501,63 @@ pub fn parse_file_at_path(
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Cst {
|
if opts.output == ParseOutput::Cst {
|
||||||
render_cst(&source_code, &tree, &mut cursor, opts, &mut stdout)?;
|
let lossy_source_code = String::from_utf8_lossy(&source_code);
|
||||||
|
let total_width = lossy_source_code
|
||||||
|
.lines()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(row, col)| {
|
||||||
|
(row as f64).log10() as usize + (col.len() as f64).log10() as usize + 1
|
||||||
|
})
|
||||||
|
.max()
|
||||||
|
.unwrap_or(1);
|
||||||
|
let mut indent_level = 1;
|
||||||
|
let mut did_visit_children = false;
|
||||||
|
let mut in_error = false;
|
||||||
|
loop {
|
||||||
|
if did_visit_children {
|
||||||
|
if cursor.goto_next_sibling() {
|
||||||
|
did_visit_children = false;
|
||||||
|
} else if cursor.goto_parent() {
|
||||||
|
did_visit_children = true;
|
||||||
|
indent_level -= 1;
|
||||||
|
if !cursor.node().has_error() {
|
||||||
|
in_error = false;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cst_render_node(
|
||||||
|
opts,
|
||||||
|
&mut cursor,
|
||||||
|
&source_code,
|
||||||
|
&mut stdout,
|
||||||
|
total_width,
|
||||||
|
indent_level,
|
||||||
|
in_error,
|
||||||
|
)?;
|
||||||
|
if cursor.goto_first_child() {
|
||||||
|
did_visit_children = false;
|
||||||
|
indent_level += 1;
|
||||||
|
if cursor.node().has_error() {
|
||||||
|
in_error = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
did_visit_children = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cursor.reset(tree.root_node());
|
||||||
|
println!();
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Xml {
|
if opts.output == ParseOutput::Xml {
|
||||||
let mut needs_newline = false;
|
let mut needs_newline = false;
|
||||||
let mut indent_level = 2;
|
let mut indent_level = 0;
|
||||||
let mut did_visit_children = false;
|
let mut did_visit_children = false;
|
||||||
let mut had_named_children = false;
|
let mut had_named_children = false;
|
||||||
let mut tags = Vec::<&str>::new();
|
let mut tags = Vec::<&str>::new();
|
||||||
|
writeln!(&mut stdout, "<?xml version=\"1.0\"?>")?;
|
||||||
// If we're parsing the first file, write the header
|
|
||||||
if opts.stats.parse_summaries.is_empty() {
|
|
||||||
writeln!(&mut stdout, "<?xml version=\"1.0\"?>")?;
|
|
||||||
writeln!(&mut stdout, "<sources>")?;
|
|
||||||
}
|
|
||||||
writeln!(&mut stdout, " <source name=\"{}\">", path.display())?;
|
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let node = cursor.node();
|
let node = cursor.node();
|
||||||
let is_named = node.is_named();
|
let is_named = node.is_named();
|
||||||
|
|
@ -545,7 +572,7 @@ pub fn parse_file_at_path(
|
||||||
write!(&mut stdout, "</{}>", tag.expect("there is a tag"))?;
|
write!(&mut stdout, "</{}>", tag.expect("there is a tag"))?;
|
||||||
// we only write a line in the case where it's the last sibling
|
// we only write a line in the case where it's the last sibling
|
||||||
if let Some(parent) = node.parent() {
|
if let Some(parent) = node.parent() {
|
||||||
if parent.child(parent.child_count() as u32 - 1).unwrap() == node {
|
if parent.child(parent.child_count() - 1).unwrap() == node {
|
||||||
stdout.write_all(b"\n")?;
|
stdout.write_all(b"\n")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -609,14 +636,8 @@ pub fn parse_file_at_path(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
writeln!(&mut stdout)?;
|
|
||||||
writeln!(&mut stdout, " </source>")?;
|
|
||||||
|
|
||||||
// If we parsed the last file, write the closing tag for the `sources` header
|
|
||||||
if opts.stats.parse_summaries.len() == opts.stats.source_count - 1 {
|
|
||||||
writeln!(&mut stdout, "</sources>")?;
|
|
||||||
}
|
|
||||||
cursor.reset(tree.root_node());
|
cursor.reset(tree.root_node());
|
||||||
|
println!();
|
||||||
}
|
}
|
||||||
|
|
||||||
if opts.output == ParseOutput::Dot {
|
if opts.output == ParseOutput::Dot {
|
||||||
|
|
@ -674,9 +695,10 @@ pub fn parse_file_at_path(
|
||||||
width = max_path_length
|
width = max_path_length
|
||||||
)?;
|
)?;
|
||||||
if let Some(node) = first_error {
|
if let Some(node) = first_error {
|
||||||
let node_kind = node.kind();
|
let start = node.start_position();
|
||||||
let mut node_text = String::with_capacity(node_kind.len());
|
let end = node.end_position();
|
||||||
for c in node_kind.chars() {
|
let mut node_text = String::new();
|
||||||
|
for c in node.kind().chars() {
|
||||||
if let Some(escaped) = escape_invisible(c) {
|
if let Some(escaped) = escape_invisible(c) {
|
||||||
node_text += escaped;
|
node_text += escaped;
|
||||||
} else {
|
} else {
|
||||||
|
|
@ -693,9 +715,6 @@ pub fn parse_file_at_path(
|
||||||
} else {
|
} else {
|
||||||
write!(&mut stdout, "{node_text}")?;
|
write!(&mut stdout, "{node_text}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let start = node.start_position();
|
|
||||||
let end = node.end_position();
|
|
||||||
write!(
|
write!(
|
||||||
&mut stdout,
|
&mut stdout,
|
||||||
" [{}, {}] - [{}, {}])",
|
" [{}, {}] - [{}, {}])",
|
||||||
|
|
@ -762,77 +781,12 @@ const fn escape_invisible(c: char) -> Option<&'static str> {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
const fn escape_delimiter(c: char) -> Option<&'static str> {
|
|
||||||
Some(match c {
|
|
||||||
'`' => "\\`",
|
|
||||||
'\"' => "\\\"",
|
|
||||||
_ => return None,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn render_cst<'a, 'b: 'a>(
|
|
||||||
source_code: &[u8],
|
|
||||||
tree: &'b Tree,
|
|
||||||
cursor: &mut TreeCursor<'a>,
|
|
||||||
opts: &ParseFileOptions,
|
|
||||||
out: &mut impl Write,
|
|
||||||
) -> Result<()> {
|
|
||||||
let lossy_source_code = String::from_utf8_lossy(source_code);
|
|
||||||
let total_width = lossy_source_code
|
|
||||||
.lines()
|
|
||||||
.enumerate()
|
|
||||||
.map(|(row, col)| (row as f64).log10() as usize + (col.len() as f64).log10() as usize + 1)
|
|
||||||
.max()
|
|
||||||
.unwrap_or(1);
|
|
||||||
let mut indent_level = usize::from(!opts.no_ranges);
|
|
||||||
let mut did_visit_children = false;
|
|
||||||
let mut in_error = false;
|
|
||||||
loop {
|
|
||||||
if did_visit_children {
|
|
||||||
if cursor.goto_next_sibling() {
|
|
||||||
did_visit_children = false;
|
|
||||||
} else if cursor.goto_parent() {
|
|
||||||
did_visit_children = true;
|
|
||||||
indent_level -= 1;
|
|
||||||
if !cursor.node().has_error() {
|
|
||||||
in_error = false;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
cst_render_node(
|
|
||||||
opts,
|
|
||||||
cursor,
|
|
||||||
source_code,
|
|
||||||
out,
|
|
||||||
total_width,
|
|
||||||
indent_level,
|
|
||||||
in_error,
|
|
||||||
)?;
|
|
||||||
if cursor.goto_first_child() {
|
|
||||||
did_visit_children = false;
|
|
||||||
indent_level += 1;
|
|
||||||
if cursor.node().has_error() {
|
|
||||||
in_error = true;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
did_visit_children = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
cursor.reset(tree.root_node());
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn render_node_text(source: &str) -> String {
|
fn render_node_text(source: &str) -> String {
|
||||||
source
|
source
|
||||||
.chars()
|
.chars()
|
||||||
.fold(String::with_capacity(source.len()), |mut acc, c| {
|
.fold(String::with_capacity(source.len()), |mut acc, c| {
|
||||||
if let Some(esc) = escape_invisible(c) {
|
if let Some(esc) = escape_invisible(c) {
|
||||||
acc.push_str(esc);
|
acc.push_str(esc);
|
||||||
} else if let Some(esc) = escape_delimiter(c) {
|
|
||||||
acc.push_str(esc);
|
|
||||||
} else {
|
} else {
|
||||||
acc.push(c);
|
acc.push(c);
|
||||||
}
|
}
|
||||||
|
|
@ -842,7 +796,7 @@ fn render_node_text(source: &str) -> String {
|
||||||
|
|
||||||
fn write_node_text(
|
fn write_node_text(
|
||||||
opts: &ParseFileOptions,
|
opts: &ParseFileOptions,
|
||||||
out: &mut impl Write,
|
stdout: &mut StdoutLock<'static>,
|
||||||
cursor: &TreeCursor,
|
cursor: &TreeCursor,
|
||||||
is_named: bool,
|
is_named: bool,
|
||||||
source: &str,
|
source: &str,
|
||||||
|
|
@ -858,7 +812,7 @@ fn write_node_text(
|
||||||
|
|
||||||
if !is_named {
|
if !is_named {
|
||||||
write!(
|
write!(
|
||||||
out,
|
stdout,
|
||||||
"{}{}{}",
|
"{}{}{}",
|
||||||
paint(quote_color, &String::from(quote)),
|
paint(quote_color, &String::from(quote)),
|
||||||
paint(color, &render_node_text(source)),
|
paint(color, &render_node_text(source)),
|
||||||
|
|
@ -882,24 +836,35 @@ fn write_node_text(
|
||||||
0
|
0
|
||||||
};
|
};
|
||||||
let formatted_line = render_line_feed(line, opts);
|
let formatted_line = render_line_feed(line, opts);
|
||||||
write!(
|
if !opts.no_ranges {
|
||||||
out,
|
write!(
|
||||||
"{}{}{}{}{}{}",
|
stdout,
|
||||||
if multiline { "\n" } else { " " },
|
"{}{}{}{}{}{}",
|
||||||
if multiline && !opts.no_ranges {
|
if multiline { "\n" } else { "" },
|
||||||
render_node_range(opts, cursor, is_named, true, total_width, node_range)
|
if multiline {
|
||||||
} else {
|
render_node_range(opts, cursor, is_named, true, total_width, node_range)
|
||||||
String::new()
|
} else {
|
||||||
},
|
String::new()
|
||||||
if multiline {
|
},
|
||||||
" ".repeat(indent_level + 1)
|
if multiline {
|
||||||
} else {
|
" ".repeat(indent_level + 1)
|
||||||
String::new()
|
} else {
|
||||||
},
|
String::new()
|
||||||
paint(quote_color, &String::from(quote)),
|
},
|
||||||
paint(color, &render_node_text(&formatted_line)),
|
paint(quote_color, &String::from(quote)),
|
||||||
paint(quote_color, &String::from(quote)),
|
&paint(color, &render_node_text(&formatted_line)),
|
||||||
)?;
|
paint(quote_color, &String::from(quote)),
|
||||||
|
)?;
|
||||||
|
} else {
|
||||||
|
write!(
|
||||||
|
stdout,
|
||||||
|
"\n{}{}{}{}",
|
||||||
|
" ".repeat(indent_level + 1),
|
||||||
|
paint(quote_color, &String::from(quote)),
|
||||||
|
&paint(color, &render_node_text(&formatted_line)),
|
||||||
|
paint(quote_color, &String::from(quote)),
|
||||||
|
)?;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -953,9 +918,9 @@ fn render_node_range(
|
||||||
|
|
||||||
fn cst_render_node(
|
fn cst_render_node(
|
||||||
opts: &ParseFileOptions,
|
opts: &ParseFileOptions,
|
||||||
cursor: &TreeCursor,
|
cursor: &mut TreeCursor,
|
||||||
source_code: &[u8],
|
source_code: &[u8],
|
||||||
out: &mut impl Write,
|
stdout: &mut StdoutLock<'static>,
|
||||||
total_width: usize,
|
total_width: usize,
|
||||||
indent_level: usize,
|
indent_level: usize,
|
||||||
in_error: bool,
|
in_error: bool,
|
||||||
|
|
@ -964,13 +929,13 @@ fn cst_render_node(
|
||||||
let is_named = node.is_named();
|
let is_named = node.is_named();
|
||||||
if !opts.no_ranges {
|
if !opts.no_ranges {
|
||||||
write!(
|
write!(
|
||||||
out,
|
stdout,
|
||||||
"{}",
|
"{}",
|
||||||
render_node_range(opts, cursor, is_named, false, total_width, node.range())
|
render_node_range(opts, cursor, is_named, false, total_width, node.range())
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
write!(
|
write!(
|
||||||
out,
|
stdout,
|
||||||
"{}{}",
|
"{}{}",
|
||||||
" ".repeat(indent_level),
|
" ".repeat(indent_level),
|
||||||
if in_error && !node.has_error() {
|
if in_error && !node.has_error() {
|
||||||
|
|
@ -982,14 +947,14 @@ fn cst_render_node(
|
||||||
if is_named {
|
if is_named {
|
||||||
if let Some(field_name) = cursor.field_name() {
|
if let Some(field_name) = cursor.field_name() {
|
||||||
write!(
|
write!(
|
||||||
out,
|
stdout,
|
||||||
"{}",
|
"{}",
|
||||||
paint(opts.parse_theme.field, &format!("{field_name}: "))
|
paint(opts.parse_theme.field, &format!("{field_name}: "))
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.has_error() || node.is_error() {
|
if node.has_error() || node.is_error() {
|
||||||
write!(out, "{}", paint(opts.parse_theme.error, "•"))?;
|
write!(stdout, "{}", paint(opts.parse_theme.error, "•"))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let kind_color = if node.is_error() {
|
let kind_color = if node.is_error() {
|
||||||
|
|
@ -999,13 +964,13 @@ fn cst_render_node(
|
||||||
} else {
|
} else {
|
||||||
opts.parse_theme.node_kind
|
opts.parse_theme.node_kind
|
||||||
};
|
};
|
||||||
write!(out, "{}", paint(kind_color, node.kind()))?;
|
write!(stdout, "{} ", paint(kind_color, node.kind()))?;
|
||||||
|
|
||||||
if node.child_count() == 0 {
|
if node.child_count() == 0 {
|
||||||
// Node text from a pattern or external scanner
|
// Node text from a pattern or external scanner
|
||||||
write_node_text(
|
write_node_text(
|
||||||
opts,
|
opts,
|
||||||
out,
|
stdout,
|
||||||
cursor,
|
cursor,
|
||||||
is_named,
|
is_named,
|
||||||
&String::from_utf8_lossy(&source_code[node.start_byte()..node.end_byte()]),
|
&String::from_utf8_lossy(&source_code[node.start_byte()..node.end_byte()]),
|
||||||
|
|
@ -1014,13 +979,17 @@ fn cst_render_node(
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
} else if node.is_missing() {
|
} else if node.is_missing() {
|
||||||
write!(out, "{}: ", paint(opts.parse_theme.missing, "MISSING"))?;
|
write!(stdout, "{}: ", paint(opts.parse_theme.missing, "MISSING"))?;
|
||||||
write!(out, "\"{}\"", paint(opts.parse_theme.missing, node.kind()))?;
|
write!(
|
||||||
|
stdout,
|
||||||
|
"\"{}\"",
|
||||||
|
paint(opts.parse_theme.missing, node.kind())
|
||||||
|
)?;
|
||||||
} else {
|
} else {
|
||||||
// Terminal literals, like "fn"
|
// Terminal literals, like "fn"
|
||||||
write_node_text(
|
write_node_text(
|
||||||
opts,
|
opts,
|
||||||
out,
|
stdout,
|
||||||
cursor,
|
cursor,
|
||||||
is_named,
|
is_named,
|
||||||
node.kind(),
|
node.kind(),
|
||||||
|
|
@ -1028,7 +997,7 @@ fn cst_render_node(
|
||||||
(total_width, indent_level),
|
(total_width, indent_level),
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
writeln!(out)?;
|
writeln!(stdout)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
410
cli/src/playground.html
Normal file
410
cli/src/playground.html
Normal file
|
|
@ -0,0 +1,410 @@
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>tree-sitter THE_LANGUAGE_NAME</title>
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.css">
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.css">
|
||||||
|
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-32x32.png"
|
||||||
|
sizes="32x32" />
|
||||||
|
<link rel="icon" type="image/png" href="https://tree-sitter.github.io/tree-sitter/assets/images/favicon-16x16.png"
|
||||||
|
sizes="16x16" />
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div id="playground-container" style="visibility: hidden;">
|
||||||
|
<header>
|
||||||
|
<div class="header-item">
|
||||||
|
<span class="language-name">Language: THE_LANGUAGE_NAME</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="logging-checkbox" type="checkbox">
|
||||||
|
<label for="logging-checkbox">log</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="anonymous-nodes-checkbox" type="checkbox">
|
||||||
|
<label for="anonymous-nodes-checkbox">show anonymous nodes</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="query-checkbox" type="checkbox">
|
||||||
|
<label for="query-checkbox">query</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<input id="accessibility-checkbox" type="checkbox">
|
||||||
|
<label for="accessibility-checkbox">accessibility</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<label for="update-time">parse time: </label>
|
||||||
|
<span id="update-time"></span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<a href="https://tree-sitter.github.io/tree-sitter/7-playground.html#about">(?)</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<select id="language-select" style="display: none;">
|
||||||
|
<option value="parser">Parser</option>
|
||||||
|
</select>
|
||||||
|
|
||||||
|
<div class="header-item">
|
||||||
|
<button id="theme-toggle" class="theme-toggle" aria-label="Toggle theme">
|
||||||
|
<svg class="sun-icon" viewBox="0 0 24 24" width="16" height="16">
|
||||||
|
<path fill="currentColor"
|
||||||
|
d="M12 17.5a5.5 5.5 0 1 0 0-11 5.5 5.5 0 0 0 0 11zm0 1.5a7 7 0 1 1 0-14 7 7 0 0 1 0 14zm0-16a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0V4a1 1 0 0 1 1-1zm0 15a1 1 0 0 1 1 1v2a1 1 0 1 1-2 0v-2a1 1 0 0 1 1-1zm9-9a1 1 0 0 1-1 1h-2a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1zM4 12a1 1 0 0 1-1 1H1a1 1 0 1 1 0-2h2a1 1 0 0 1 1 1z" />
|
||||||
|
</svg>
|
||||||
|
<svg class="moon-icon" viewBox="0 0 24 24" width="16" height="16">
|
||||||
|
<path fill="currentColor"
|
||||||
|
d="M12.1 22c-5.5 0-10-4.5-10-10s4.5-10 10-10c.2 0 .3 0 .5.1-1.3 1.4-2 3.2-2 5.2 0 4.1 3.4 7.5 7.5 7.5 2 0 3.8-.7 5.2-2 .1.2.1.3.1.5 0 5.4-4.5 9.7-10 9.7z" />
|
||||||
|
</svg>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<main>
|
||||||
|
<div id="input-pane">
|
||||||
|
<div class="panel-header">Code</div>
|
||||||
|
<div id="code-container">
|
||||||
|
<textarea id="code-input"></textarea>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="query-container" style="visibility: hidden; position: absolute;">
|
||||||
|
<div class="panel-header">Query</div>
|
||||||
|
<textarea id="query-input"></textarea>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div id="output-container-scroll">
|
||||||
|
<div class="panel-header">Tree</div>
|
||||||
|
<pre id="output-container" class="highlight"></pre>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script src="https://code.jquery.com/jquery-3.3.1.min.js" crossorigin="anonymous">
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/codemirror/6.65.7/codemirror.min.js"></script>
|
||||||
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/clusterize.js/0.19.0/clusterize.min.js"></script>
|
||||||
|
|
||||||
|
<script>LANGUAGE_BASE_URL = "";</script>
|
||||||
|
<script type="module" src="playground.js"></script>
|
||||||
|
<script type="module">
|
||||||
|
import * as TreeSitter from './tree-sitter.js';
|
||||||
|
window.TreeSitter = TreeSitter;
|
||||||
|
setTimeout(() => window.initializePlayground({local: true}), 1)
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<style>
|
||||||
|
/* Base Variables */
|
||||||
|
:root {
|
||||||
|
--light-bg: #f9f9f9;
|
||||||
|
--light-border: #e0e0e0;
|
||||||
|
--light-text: #333;
|
||||||
|
--light-hover-border: #c1c1c1;
|
||||||
|
--light-scrollbar-track: #f1f1f1;
|
||||||
|
--light-scrollbar-thumb: #c1c1c1;
|
||||||
|
--light-scrollbar-thumb-hover: #a8a8a8;
|
||||||
|
|
||||||
|
--dark-bg: #1d1f21;
|
||||||
|
--dark-border: #2d2d2d;
|
||||||
|
--dark-text: #c5c8c6;
|
||||||
|
--dark-panel-bg: #252526;
|
||||||
|
--dark-code-bg: #1e1e1e;
|
||||||
|
--dark-scrollbar-track: #25282c;
|
||||||
|
--dark-scrollbar-thumb: #4a4d51;
|
||||||
|
--dark-scrollbar-thumb-hover: #5a5d61;
|
||||||
|
|
||||||
|
--primary-color: #0550ae;
|
||||||
|
--primary-color-alpha: rgba(5, 80, 174, 0.1);
|
||||||
|
--primary-color-alpha-dark: rgba(121, 192, 255, 0.1);
|
||||||
|
--selection-color: rgba(39, 95, 255, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Theme Colors */
|
||||||
|
[data-theme="dark"] {
|
||||||
|
--bg-color: var(--dark-bg);
|
||||||
|
--border-color: var(--dark-border);
|
||||||
|
--text-color: var(--dark-text);
|
||||||
|
--panel-bg: var(--dark-panel-bg);
|
||||||
|
--code-bg: var(--dark-code-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="light"] {
|
||||||
|
--bg-color: var(--light-bg);
|
||||||
|
--border-color: var(--light-border);
|
||||||
|
--text-color: var(--light-text);
|
||||||
|
--panel-bg: white;
|
||||||
|
--code-bg: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Base Styles */
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
font-family: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif;
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Layout */
|
||||||
|
#playground-container {
|
||||||
|
width: 100%;
|
||||||
|
height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
background-color: var(--bg-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
header {
|
||||||
|
padding: 16px 24px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 20px;
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.language-name {
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
main {
|
||||||
|
flex: 1;
|
||||||
|
display: flex;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#input-pane {
|
||||||
|
width: 50%;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
border-right: 1px solid var(--border-color);
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#code-container {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
position: relative;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container:not([style*="visibility: hidden"]) {
|
||||||
|
flex: 1;
|
||||||
|
min-height: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container .panel-header {
|
||||||
|
flex: 0 0 auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
#query-container .CodeMirror {
|
||||||
|
flex: 1;
|
||||||
|
position: relative;
|
||||||
|
min-height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container-scroll {
|
||||||
|
width: 50%;
|
||||||
|
overflow: auto;
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
padding: 0;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container {
|
||||||
|
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
||||||
|
line-height: 1.5;
|
||||||
|
margin: 0;
|
||||||
|
padding: 16px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.panel-header {
|
||||||
|
padding: 8px 16px;
|
||||||
|
font-weight: 600;
|
||||||
|
font-size: 14px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
background-color: var(--panel-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.CodeMirror {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
right: 0;
|
||||||
|
bottom: 0;
|
||||||
|
height: 100%;
|
||||||
|
font-family: ui-monospace, "SF Mono", Menlo, Consolas, monospace;
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 1.6;
|
||||||
|
background-color: var(--code-bg) !important;
|
||||||
|
color: var(--text-color) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.query-error {
|
||||||
|
text-decoration: underline red dashed;
|
||||||
|
-webkit-text-decoration: underline red dashed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scrollbars */
|
||||||
|
::-webkit-scrollbar {
|
||||||
|
width: 8px;
|
||||||
|
height: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--light-scrollbar-track);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--light-scrollbar-thumb);
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: var(--light-scrollbar-thumb-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] {
|
||||||
|
::-webkit-scrollbar-track {
|
||||||
|
background: var(--dark-scrollbar-track) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb {
|
||||||
|
background: var(--dark-scrollbar-thumb) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
::-webkit-scrollbar-thumb:hover {
|
||||||
|
background: var(--dark-scrollbar-thumb-hover) !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Theme Toggle */
|
||||||
|
.theme-toggle {
|
||||||
|
background: none;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 4px;
|
||||||
|
padding: 6px;
|
||||||
|
cursor: pointer;
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-toggle:hover {
|
||||||
|
background-color: var(--primary-color-alpha);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="light"] .moon-icon,
|
||||||
|
[data-theme="dark"] .sun-icon {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Form Elements */
|
||||||
|
input[type="checkbox"] {
|
||||||
|
margin-right: 6px;
|
||||||
|
vertical-align: middle;
|
||||||
|
}
|
||||||
|
|
||||||
|
label {
|
||||||
|
font-size: 14px;
|
||||||
|
margin-right: 16px;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a {
|
||||||
|
cursor: pointer;
|
||||||
|
text-decoration: none;
|
||||||
|
color: #040404;
|
||||||
|
padding: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.named {
|
||||||
|
color: #0550ae;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous {
|
||||||
|
color: #116329;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous:before {
|
||||||
|
content: '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.anonymous:after {
|
||||||
|
content: '"';
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.node-link.error {
|
||||||
|
color: #cf222e;
|
||||||
|
}
|
||||||
|
|
||||||
|
#output-container a.highlighted {
|
||||||
|
background-color: #d9d9d9;
|
||||||
|
color: red;
|
||||||
|
border-radius: 3px;
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark Theme Node Colors */
|
||||||
|
[data-theme="dark"] {
|
||||||
|
& #output-container a {
|
||||||
|
color: #d4d4d4;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.named {
|
||||||
|
color: #79c0ff;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.anonymous {
|
||||||
|
color: #7ee787;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.node-link.error {
|
||||||
|
color: #ff7b72;
|
||||||
|
}
|
||||||
|
|
||||||
|
& #output-container a.highlighted {
|
||||||
|
background-color: #373b41;
|
||||||
|
color: red;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror {
|
||||||
|
background-color: var(--dark-code-bg) !important;
|
||||||
|
color: var(--dark-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-gutters {
|
||||||
|
background-color: var(--dark-panel-bg) !important;
|
||||||
|
border-color: var(--dark-border) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-cursor {
|
||||||
|
border-color: var(--dark-text) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
& .CodeMirror-selected {
|
||||||
|
background-color: rgba(255, 255, 255, 0.1) !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
</body>
|
||||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue