diff --git a/.editorconfig b/.editorconfig index 53780b34..0b70460a 100644 --- a/.editorconfig +++ b/.editorconfig @@ -10,6 +10,9 @@ insert_final_newline = true [*.rs] indent_size = 4 +[*.{zig,zon}] +indent_size = 4 + [Makefile] indent_style = tab indent_size = 8 diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6665c932..e6fed643 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -139,6 +139,8 @@ jobs: [[ -n $runner ]] && printf 'CROSS_RUNNER=%s\n' "$runner" >> $GITHUB_ENV fi + # TODO: Remove RUSTFLAGS="--cap-lints allow" once we use a wasmtime release that addresses + # the `mismatched-lifetime-syntaxes` lint - name: Build wasmtime library if: ${{ !matrix.use-cross && contains(matrix.features, 'wasm') }} run: | @@ -156,6 +158,7 @@ jobs: printf 'CMAKE_PREFIX_PATH=%s\n' "$PWD/artifacts" >> $GITHUB_ENV env: WASMTIME_REPO: https://github.com/bytecodealliance/wasmtime + RUSTFLAGS: "--cap-lints allow" - name: Build C library (make) if: ${{ runner.os != 'Windows' }} @@ -195,6 +198,13 @@ jobs: npm run build npm run build:debug + - name: Check no_std builds + if: ${{ !matrix.no-run && inputs.run-test }} + shell: bash + run: | + cd lib + $BUILD_CMD check --no-default-features + - name: Build target run: $BUILD_CMD build --release --target=${{ matrix.target }} --features=${{ matrix.features }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1c983757..01fb165b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,11 +32,6 @@ jobs: uses: actions-rust-lang/setup-rust-toolchain@v1 with: toolchain: stable - - - name: Set up nightly Rust toolchain - uses: actions-rust-lang/setup-rust-toolchain@v1 - with: - toolchain: nightly components: clippy, rustfmt - name: Lint files diff --git a/Cargo.lock b/Cargo.lock index a6deb5f4..39cec39c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1143,7 +1143,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ "cfg-if", - "windows-targets 0.52.6", + "windows-targets 0.48.5", ] [[package]] @@ -1615,15 +1615,14 @@ dependencies = [ [[package]] name = "ring" -version = "0.17.8" +version = "0.17.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "70ac5d832aa16abd7d1def883a8545280c20a60f523a370aa3a9617c2b8550ee" dependencies = [ "cc", "cfg-if", "getrandom", "libc", - "spin", "untrusted", "windows-sys 0.52.0", ] @@ -1787,12 +1786,6 @@ dependencies = [ "serde", ] -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - [[package]] name = "sptr" version = "0.3.2" @@ -2011,6 +2004,12 @@ dependencies = [ "winnow", ] +[[package]] +name = "topological-sort" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea68304e134ecd095ac6c3574494fc62b909f416c4fca77e440530221e549d3d" + [[package]] name = "tracing" version = "0.1.41" @@ -2044,7 +2043,7 @@ dependencies = [ [[package]] name = "tree-sitter" -version = "0.25.1" +version = "0.25.9" dependencies = [ "bindgen", "cc", @@ -2058,7 +2057,7 @@ dependencies = [ [[package]] name = "tree-sitter-cli" -version = "0.25.1" +version = "0.25.9" dependencies = [ "ansi_colours", "anstyle", @@ -2093,6 +2092,7 @@ dependencies = [ "streaming-iterator", "tempfile", "tiny_http", + "topological-sort", "tree-sitter", "tree-sitter-config", "tree-sitter-generate", @@ -2110,7 +2110,7 @@ dependencies = [ [[package]] name = "tree-sitter-config" -version = "0.25.1" +version = "0.25.9" dependencies = [ "anyhow", "etcetera", @@ -2120,7 +2120,7 @@ dependencies = [ [[package]] name = "tree-sitter-generate" -version = "0.25.1" +version = "0.25.9" dependencies = [ "anyhow", "heck", @@ -2135,13 +2135,14 @@ dependencies = [ "serde_json", "smallbitvec", "thiserror 2.0.11", + "topological-sort", "tree-sitter", "url", ] [[package]] name = "tree-sitter-highlight" -version = "0.25.1" +version = "0.25.9" dependencies = [ "regex", "streaming-iterator", @@ -2151,11 +2152,11 @@ dependencies = [ [[package]] name = "tree-sitter-language" -version = "0.1.4" +version = "0.1.5" [[package]] name = "tree-sitter-loader" -version = "0.25.1" +version = "0.25.9" dependencies = [ "anyhow", "cc", @@ -2178,7 +2179,7 @@ dependencies = [ [[package]] name = "tree-sitter-tags" -version = "0.25.1" +version = "0.25.9" dependencies = [ "memchr", "regex", diff --git a/Cargo.toml b/Cargo.toml index de45d7d2..ace5381d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,6 +3,7 @@ default-members = ["cli"] members = [ "cli", "cli/config", + "cli/generate", "cli/loader", "lib", "lib/language", @@ -13,7 +14,7 @@ members = [ resolver = "2" [workspace.package] -version = "0.25.1" +version = "0.25.9" authors = [ "Max Brunsfeld ", "Amaan Qureshi ", @@ -59,6 +60,8 @@ missing_errors_doc = "allow" missing_panics_doc = "allow" module_name_repetitions = "allow" multiple_crate_versions = "allow" +needless_for_each = "allow" +obfuscated_if_else = "allow" option_if_let_else = "allow" or_fun_call = "allow" range_plus_one = "allow" @@ -75,6 +78,9 @@ unnecessary_wraps = "allow" unused_self = "allow" used_underscore_items = "allow" +[workspace.lints.rust] +mismatched_lifetime_syntaxes = "allow" + [profile.optimize] inherits = "release" strip = true # Automatically strip symbols from the binary. @@ -143,15 +149,16 @@ tempfile = "3.15.0" thiserror = "2.0.11" tiny_http = "0.12.0" toml = "0.8.19" +topological-sort = "0.2.2" unindent = "0.2.3" url = { version = "2.5.4", features = ["serde"] } walkdir = "2.5.0" wasmparser = "0.224.0" webbrowser = "1.0.3" -tree-sitter = { version = "0.25.1", path = "./lib" } -tree-sitter-generate = { version = "0.25.1", path = "./cli/generate" } -tree-sitter-loader = { version = "0.25.1", path = "./cli/loader" } -tree-sitter-config = { version = "0.25.1", path = "./cli/config" } -tree-sitter-highlight = { version = "0.25.1", path = "./highlight" } -tree-sitter-tags = { version = "0.25.1", path = "./tags" } +tree-sitter = { version = "0.25.9", path = "./lib" } +tree-sitter-generate = { version = "0.25.9", path = "./cli/generate" } +tree-sitter-loader = { version = "0.25.9", path = "./cli/loader" } +tree-sitter-config = { version = "0.25.9", path = "./cli/config" } +tree-sitter-highlight = { version = "0.25.9", path = "./highlight" } +tree-sitter-tags = { version = "0.25.9", path = "./tags" } diff --git a/Makefile b/Makefile index 0c99eeef..3f5f5a4b 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ ifeq ($(OS),Windows_NT) $(error Windows is not supported) endif -VERSION := 0.25.1 +VERSION := 0.25.9 DESCRIPTION := An incremental parsing system for programming tools HOMEPAGE_URL := https://tree-sitter.github.io/tree-sitter/ @@ -106,15 +106,15 @@ test-wasm: lint: cargo update --workspace --locked --quiet cargo check --workspace --all-targets - cargo +nightly fmt --all --check - cargo +nightly clippy --workspace --all-targets -- -D warnings + cargo fmt --all --check + cargo clippy --workspace --all-targets -- -D warnings lint-web: npm --prefix lib/binding_web ci npm --prefix lib/binding_web run lint format: - cargo +nightly fmt --all + cargo fmt --all changelog: @git-cliff --config .github/cliff.toml --prepend CHANGELOG.md --latest --github-token $(shell gh auth token) diff --git a/build.zig b/build.zig index 7496b4ac..66a448cb 100644 --- a/build.zig +++ b/build.zig @@ -1,116 +1,140 @@ const std = @import("std"); pub fn build(b: *std.Build) !void { - const target = b.standardTargetOptions(.{}); - const optimize = b.standardOptimizeOption(.{}); + const target = b.standardTargetOptions(.{}); + const optimize = b.standardOptimizeOption(.{}); - const wasm = b.option(bool, "enable-wasm", "Enable Wasm support") orelse false; - const shared = b.option(bool, "build-shared", "Build a shared library") orelse false; - const amalgamated = b.option(bool, "amalgamated", "Build using an amalgamated source") orelse false; + const wasm = b.option(bool, "enable-wasm", "Enable Wasm support") orelse false; + const shared = b.option(bool, "build-shared", "Build a shared library") orelse false; + const amalgamated = b.option(bool, "amalgamated", "Build using an amalgamated source") orelse false; - const lib: *std.Build.Step.Compile = if (!shared) b.addStaticLibrary(.{ - .name = "tree-sitter", - .target = target, - .optimize = optimize, - .link_libc = true, - }) else b.addSharedLibrary(.{ - .name = "tree-sitter", - .pic = true, - .target = target, - .optimize = optimize, - .link_libc = true, - }); - - if (amalgamated) { - lib.addCSourceFile(.{ - .file = b.path("lib/src/lib.c"), - .flags = &.{"-std=c11"}, + const lib: *std.Build.Step.Compile = b.addLibrary(.{ + .name = "tree-sitter", + .linkage = if (shared) .dynamic else .static, + .root_module = b.createModule(.{ + .target = target, + .optimize = optimize, + .link_libc = true, + .pic = if (shared) true else null, + }), }); - } else { - lib.addCSourceFiles(.{ - .root = b.path("lib/src"), - .files = try findSourceFiles(b), - .flags = &.{"-std=c11"}, - }); - } - lib.addIncludePath(b.path("lib/include")); - lib.addIncludePath(b.path("lib/src")); - lib.addIncludePath(b.path("lib/src/wasm")); - - lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L"); - lib.root_module.addCMacro("_DEFAULT_SOURCE", ""); - - if (wasm) { - if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| { - lib.root_module.addCMacro("TREE_SITTER_FEATURE_WASM", ""); - lib.addSystemIncludePath(wasmtime.path("include")); - lib.addLibraryPath(wasmtime.path("lib")); - lib.linkSystemLibrary("wasmtime"); + if (amalgamated) { + lib.addCSourceFile(.{ + .file = b.path("lib/src/lib.c"), + .flags = &.{"-std=c11"}, + }); + } else { + const files = try findSourceFiles(b); + defer b.allocator.free(files); + lib.addCSourceFiles(.{ + .root = b.path("lib/src"), + .files = files, + .flags = &.{"-std=c11"}, + }); } - } - lib.installHeadersDirectory(b.path("lib/include"), ".", .{}); + lib.addIncludePath(b.path("lib/include")); + lib.addIncludePath(b.path("lib/src")); + lib.addIncludePath(b.path("lib/src/wasm")); - b.installArtifact(lib); + lib.root_module.addCMacro("_POSIX_C_SOURCE", "200112L"); + lib.root_module.addCMacro("_DEFAULT_SOURCE", ""); + + if (wasm) { + if (b.lazyDependency(wasmtimeDep(target.result), .{})) |wasmtime| { + lib.root_module.addCMacro("TREE_SITTER_FEATURE_WASM", ""); + lib.addSystemIncludePath(wasmtime.path("include")); + lib.addLibraryPath(wasmtime.path("lib")); + if (shared) lib.linkSystemLibrary("wasmtime"); + } + } + + lib.installHeadersDirectory(b.path("lib/include"), ".", .{}); + + b.installArtifact(lib); } -fn wasmtimeDep(target: std.Target) []const u8 { - const arch = target.cpu.arch; - const os = target.os.tag; - const abi = target.abi; - return switch (os) { - .linux => switch (arch) { - .x86_64 => switch (abi) { - .gnu => "wasmtime_c_api_x86_64_linux", - .musl => "wasmtime_c_api_x86_64_musl", - .android => "wasmtime_c_api_x86_64_android", - else => null - }, - .aarch64 => switch (abi) { - .gnu => "wasmtime_c_api_aarch64_linux", - .android => "wasmtime_c_api_aarch64_android", - else => null - }, - .s390x => "wasmtime_c_api_s390x_linux", - .riscv64 => "wasmtime_c_api_riscv64gc_linux", - else => null - }, - .windows => switch (arch) { - .x86_64 => switch (abi) { - .gnu => "wasmtime_c_api_x86_64_mingw", - .msvc => "wasmtime_c_api_x86_64_windows", - else => null - }, - else => null - }, - .macos => switch (arch) { - .x86_64 => "wasmtime_c_api_x86_64_macos", - .aarch64 => "wasmtime_c_api_aarch64_macos", - else => null - }, - else => null - } orelse std.debug.panic( - "Unsupported target for wasmtime: {s}-{s}-{s}", - .{ @tagName(arch), @tagName(os), @tagName(abi) } - ); +/// Get the name of the wasmtime dependency for this target. +pub fn wasmtimeDep(target: std.Target) []const u8 { + const arch = target.cpu.arch; + const os = target.os.tag; + const abi = target.abi; + return @as(?[]const u8, switch (os) { + .linux => switch (arch) { + .x86_64 => switch (abi) { + .gnu => "wasmtime_c_api_x86_64_linux", + .musl => "wasmtime_c_api_x86_64_musl", + .android => "wasmtime_c_api_x86_64_android", + else => null, + }, + .aarch64 => switch (abi) { + .gnu => "wasmtime_c_api_aarch64_linux", + .musl => "wasmtime_c_api_aarch64_musl", + .android => "wasmtime_c_api_aarch64_android", + else => null, + }, + .x86 => switch (abi) { + .gnu => "wasmtime_c_api_i686_linux", + else => null, + }, + .arm => switch (abi) { + .gnueabi => "wasmtime_c_api_armv7_linux", + else => null, + }, + .s390x => switch (abi) { + .gnu => "wasmtime_c_api_s390x_linux", + else => null, + }, + .riscv64 => switch (abi) { + .gnu => "wasmtime_c_api_riscv64gc_linux", + else => null, + }, + else => null, + }, + .windows => switch (arch) { + .x86_64 => switch (abi) { + .gnu => "wasmtime_c_api_x86_64_mingw", + .msvc => "wasmtime_c_api_x86_64_windows", + else => null, + }, + .aarch64 => switch (abi) { + .msvc => "wasmtime_c_api_aarch64_windows", + else => null, + }, + .x86 => switch (abi) { + .msvc => "wasmtime_c_api_i686_windows", + else => null, + }, + else => null, + }, + .macos => switch (arch) { + .x86_64 => "wasmtime_c_api_x86_64_macos", + .aarch64 => "wasmtime_c_api_aarch64_macos", + else => null, + }, + else => null, + }) orelse std.debug.panic( + "Unsupported target for wasmtime: {s}-{s}-{s}", + .{ @tagName(arch), @tagName(os), @tagName(abi) }, + ); } fn findSourceFiles(b: *std.Build) ![]const []const u8 { - var sources = std.ArrayList([]const u8).init(b.allocator); + var sources: std.ArrayListUnmanaged([]const u8) = .empty; - var dir = try b.build_root.handle.openDir("lib/src", .{ .iterate = true }); - var iter = dir.iterate(); - defer dir.close(); + var dir = try b.build_root.handle.openDir("lib/src", .{ .iterate = true }); + var iter = dir.iterate(); + defer dir.close(); - while (try iter.next()) |entry| { - if (entry.kind != .file) continue; - const file = entry.name; - const ext = std.fs.path.extension(file); - if (std.mem.eql(u8, ext, ".c") and !std.mem.eql(u8, file, "lib.c")) { - try sources.append(b.dupe(file)); + while (try iter.next()) |entry| { + if (entry.kind != .file) continue; + const file = entry.name; + const ext = std.fs.path.extension(file); + if (std.mem.eql(u8, ext, ".c") and !std.mem.eql(u8, file, "lib.c")) { + try sources.append(b.allocator, b.dupe(file)); + } } - } - return sources.items; + return sources.toOwnedSlice(b.allocator); } diff --git a/build.zig.zon b/build.zig.zon index 423ed6ea..998a88eb 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -1,69 +1,76 @@ .{ - .name = "tree-sitter", - .version = "0.25.1", - .paths = .{ - "build.zig", - "build.zig.zon", - "lib/src", - "lib/include", - "README.md", - "LICENSE", - }, - .dependencies = .{ - .wasmtime_c_api_aarch64_android = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-android-c-api.tar.xz", - .hash = "12204c77979ad8291c6e395d695a824fb053ffdfeb2cc21de95fffb09f77d77188d1", - .lazy = true, + .name = .tree_sitter, + .fingerprint = 0x841224b447ac0d4f, + .version = "0.25.9", + .minimum_zig_version = "0.14.1", + .paths = .{ + "build.zig", + "build.zig.zon", + "lib/src", + "lib/include", + "README.md", + "LICENSE", }, - .wasmtime_c_api_aarch64_linux = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-linux-c-api.tar.xz", - .hash = "12203a8e3d823490186fb1e230d54f575148713088e914926305ee5678790b731bba", - .lazy = true, + .dependencies = .{ + .wasmtime_c_api_aarch64_android = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-android-c-api.tar.xz", + .hash = "N-V-__8AAC3KCQZMd5ea2CkcbjldaVqCT7BT_9_rLMId6V__", + .lazy = true, + }, + .wasmtime_c_api_aarch64_linux = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-linux-c-api.tar.xz", + .hash = "N-V-__8AAGUY3gU6jj2CNJAYb7HiMNVPV1FIcTCI6RSSYwXu", + .lazy = true, + }, + .wasmtime_c_api_aarch64_macos = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-macos-c-api.tar.xz", + .hash = "N-V-__8AAM1GMARD6LGQebhVsSZ0uePUoo3Fw5nEO2L764vf", + .lazy = true, + }, + .wasmtime_c_api_aarch64_windows = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-windows-c-api.zip", + .hash = "N-V-__8AAH8a_wQ7oAeVVsaJcoOZhKTMkHIBc_XjDyLlHp2x", + .lazy = true, + }, + .wasmtime_c_api_riscv64gc_linux = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-riscv64gc-linux-c-api.tar.xz", + .hash = "N-V-__8AAN2cuQadBwMc8zJxv0sMY99Ae1Nc1dZcZAK9b4DZ", + .lazy = true, + }, + .wasmtime_c_api_s390x_linux = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-s390x-linux-c-api.tar.xz", + .hash = "N-V-__8AAPevngYz99mwT0KQY9my2ax1p6APzgLEJeV4II9U", + .lazy = true, + }, + .wasmtime_c_api_x86_64_android = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-android-c-api.tar.xz", + .hash = "N-V-__8AABHIEgaTyzPfjgnnCy0dwJiXoDiJFblCkYOJsQvy", + .lazy = true, + }, + .wasmtime_c_api_x86_64_linux = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-linux-c-api.tar.xz", + .hash = "N-V-__8AALUN5AWSEDRulL9u-OJJ-l0_GoT5UFDtGWZayEIq", + .lazy = true, + }, + .wasmtime_c_api_x86_64_macos = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-macos-c-api.tar.xz", + .hash = "N-V-__8AANUeXwSPh13TqJCSSFdi87GEcHs8zK6FqE4v_TjB", + .lazy = true, + }, + .wasmtime_c_api_x86_64_mingw = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-mingw-c-api.zip", + .hash = "N-V-__8AALundgW-p1ffOnd7bsYyL8SY5OziDUZu7cXio2EL", + .lazy = true, + }, + .wasmtime_c_api_x86_64_musl = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-musl-c-api.tar.xz", + .hash = "N-V-__8AALMZ5wXJWW5qY-3MMjTAYR0MusckvzCsmg-69ALH", + .lazy = true, + }, + .wasmtime_c_api_x86_64_windows = .{ + .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-windows-c-api.zip", + .hash = "N-V-__8AAG-uVQVEDMsB1ymJzxpHcoiXo1_I3TFnPM5Zjy1i", + .lazy = true, + }, }, - .wasmtime_c_api_aarch64_macos = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-aarch64-macos-c-api.tar.xz", - .hash = "122043e8b19079b855b12674b9e3d4a28dc5c399c43b62fbeb8bdf0fdb4ef2d1d38c", - .lazy = true, - }, - .wasmtime_c_api_riscv64gc_linux = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-riscv64gc-linux-c-api.tar.xz", - .hash = "12209d07031cf33271bf4b0c63df407b535cd5d65c6402bd6f80d99de439d6feb89b", - .lazy = true, - }, - .wasmtime_c_api_s390x_linux = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-s390x-linux-c-api.tar.xz", - .hash = "122033f7d9b04f429063d9b2d9ac75a7a00fce02c425e578208f54ddc40edaa1e355", - .lazy = true, - }, - .wasmtime_c_api_x86_64_android = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-android-c-api.tar.xz", - .hash = "122093cb33df8e09e70b2d1dc09897a0388915b942918389b10bf23f9684bdb6f047", - .lazy = true, - }, - .wasmtime_c_api_x86_64_linux = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-linux-c-api.tar.xz", - .hash = "12209210346e94bf6ef8e249fa5d3f1a84f95050ed19665ac8422a15b5f2246d83af", - .lazy = true, - }, - .wasmtime_c_api_x86_64_macos = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-macos-c-api.tar.xz", - .hash = "12208f875dd3a89092485762f3b184707b3cccae85a84e2ffd38c138cc3a3fd90447", - .lazy = true, - }, - .wasmtime_c_api_x86_64_mingw = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-mingw-c-api.zip", - .hash = "1220bea757df3a777b6ec6322fc498e4ece20d466eedc5e2a3610b338849553cd94d", - .lazy = true, - }, - .wasmtime_c_api_x86_64_musl = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-musl-c-api.tar.xz", - .hash = "1220c9596e6a63edcc3234c0611d0cbac724bf30ac9a0fbaf402c7da649b278b1322", - .lazy = true, - }, - .wasmtime_c_api_x86_64_windows = .{ - .url = "https://github.com/bytecodealliance/wasmtime/releases/download/v29.0.1/wasmtime-v29.0.1-x86_64-windows-c-api.zip", - .hash = "1220440ccb01d72989cf1a47728897a35fc8dd31673cce598f2d62c58e2c3228b0ed", - .lazy = true, - }, - } } diff --git a/cli/Cargo.toml b/cli/Cargo.toml index a3bdcc13..45aa5833 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -59,6 +59,7 @@ similar.workspace = true smallbitvec.workspace = true streaming-iterator.workspace = true tiny_http.workspace = true +topological-sort.workspace = true url.workspace = true walkdir.workspace = true wasmparser.workspace = true diff --git a/cli/benches/benchmark.rs b/cli/benches/benchmark.rs index 2b0e29ea..943390c6 100644 --- a/cli/benches/benchmark.rs +++ b/cli/benches/benchmark.rs @@ -112,7 +112,7 @@ fn main() { parse(path, max_path_length, |source| { Query::new(&language, str::from_utf8(source).unwrap()) - .with_context(|| format!("Query file path: {path:?}")) + .with_context(|| format!("Query file path: {}", path.display())) .expect("Failed to parse query"); }); } @@ -201,7 +201,7 @@ fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) -> ); let source_code = fs::read(path) - .with_context(|| format!("Failed to read {path:?}")) + .with_context(|| format!("Failed to read {}", path.display())) .unwrap(); let time = Instant::now(); for _ in 0..*REPETITION_COUNT { @@ -221,6 +221,6 @@ fn get_language(path: &Path) -> Language { let src_path = GRAMMARS_DIR.join(path).join("src"); TEST_LOADER .load_language_at_path(CompileConfig::new(&src_path, None, None)) - .with_context(|| format!("Failed to load language at path {src_path:?}")) + .with_context(|| format!("Failed to load language at path {}", src_path.display())) .unwrap() } diff --git a/cli/build.rs b/cli/build.rs index 04406487..0081395e 100644 --- a/cli/build.rs +++ b/cli/build.rs @@ -60,8 +60,6 @@ fn web_playground_files_present() -> bool { paths.iter().all(|p| Path::new(p).exists()) } -// When updating this function, don't forget to also update generate/build.rs which has a -// near-identical function. fn read_git_sha() -> Option { let crate_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); diff --git a/cli/eslint/package.json b/cli/eslint/package.json index d53bf3a4..a3ba26c9 100644 --- a/cli/eslint/package.json +++ b/cli/eslint/package.json @@ -4,7 +4,8 @@ "description": "Eslint configuration for Tree-sitter grammar files", "repository": { "type": "git", - "url": "git+https://github.com/tree-sitter/tree-sitter.git" + "url": "git+https://github.com/tree-sitter/tree-sitter.git", + "directory": "crates/cli/eslint" }, "license": "MIT", "author": "Amaan Qureshi ", diff --git a/cli/generate/Cargo.toml b/cli/generate/Cargo.toml index 7955a79c..d3e6b224 100644 --- a/cli/generate/Cargo.toml +++ b/cli/generate/Cargo.toml @@ -29,6 +29,9 @@ serde.workspace = true serde_json.workspace = true smallbitvec.workspace = true thiserror.workspace = true -url.workspace = true +topological-sort.workspace = true tree-sitter.workspace = true + +[target.'cfg(windows)'.dependencies] +url.workspace = true diff --git a/cli/generate/build.rs b/cli/generate/build.rs deleted file mode 100644 index 6fdbc45b..00000000 --- a/cli/generate/build.rs +++ /dev/null @@ -1,32 +0,0 @@ -use std::{env, path::PathBuf, process::Command}; - -fn main() { - if let Some(git_sha) = read_git_sha() { - println!("cargo:rustc-env=BUILD_SHA={git_sha}"); - } -} - -// This is copied from the build.rs in parent directory. This should be updated if the -// parent build.rs gets fixes. -fn read_git_sha() -> Option { - let crate_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); - - if !crate_path - .parent()? - .parent() - .is_some_and(|p| p.join(".git").exists()) - { - return None; - } - - Command::new("git") - .args(["rev-parse", "HEAD"]) - .current_dir(crate_path) - .output() - .map_or(None, |output| { - if !output.status.success() { - return None; - } - Some(String::from_utf8_lossy(&output.stdout).to_string()) - }) -} diff --git a/cli/generate/src/build_tables/build_parse_table.rs b/cli/generate/src/build_tables/build_parse_table.rs index c41c4838..9f43f0bd 100644 --- a/cli/generate/src/build_tables/build_parse_table.rs +++ b/cli/generate/src/build_tables/build_parse_table.rs @@ -312,6 +312,12 @@ impl<'a> ParseTableBuilder<'a> { } } + let non_terminal_sets_len = non_terminal_extra_item_sets_by_first_terminal.len(); + self.non_terminal_extra_states + .reserve(non_terminal_sets_len); + self.parse_state_info_by_id.reserve(non_terminal_sets_len); + self.parse_table.states.reserve(non_terminal_sets_len); + self.parse_state_queue.reserve(non_terminal_sets_len); // Add a state for each starting terminal of a non-terminal extra rule. for (terminal, item_set) in non_terminal_extra_item_sets_by_first_terminal { if terminal.is_non_terminal() { @@ -320,9 +326,10 @@ impl<'a> ParseTableBuilder<'a> { ))?; } - self.non_terminal_extra_states - .push((terminal, self.parse_table.states.len())); - self.add_parse_state(&Vec::new(), &Vec::new(), item_set); + // Add the parse state, and *then* push the terminal and the state id into the + // list of nonterminal extra states + let state_id = self.add_parse_state(&Vec::new(), &Vec::new(), item_set); + self.non_terminal_extra_states.push((terminal, state_id)); } while let Some(entry) = self.parse_state_queue.pop_front() { @@ -908,7 +915,7 @@ impl<'a> ParseTableBuilder<'a> { let get_rule_names = |items: &[&ParseItem]| -> Vec { let mut last_rule_id = None; - let mut result = Vec::new(); + let mut result = Vec::with_capacity(items.len()); for item in items { if last_rule_id == Some(item.variable_index) { continue; diff --git a/cli/generate/src/dsl.js b/cli/generate/src/dsl.js index dd59efa6..faaace05 100644 --- a/cli/generate/src/dsl.js +++ b/cli/generate/src/dsl.js @@ -529,7 +529,7 @@ globalThis.optional = optional; globalThis.prec = prec; globalThis.repeat = repeat; globalThis.repeat1 = repeat1; -global.reserved = reserved; +globalThis.reserved = reserved; globalThis.seq = seq; globalThis.sym = sym; globalThis.token = token; diff --git a/cli/generate/src/lib.rs b/cli/generate/src/lib.rs index 56b2cdc6..95bcb0a9 100644 --- a/cli/generate/src/lib.rs +++ b/cli/generate/src/lib.rs @@ -27,7 +27,7 @@ mod tables; use build_tables::build_tables; pub use build_tables::ParseTableBuilderError; use grammars::InputGrammar; -pub use node_types::VariableInfoError; +pub use node_types::{SuperTypeCycleError, VariableInfoError}; use parse_grammar::parse_grammar; pub use parse_grammar::ParseGrammarError; use prepare_grammar::prepare_grammar; @@ -70,6 +70,8 @@ pub enum GenerateError { BuildTables(#[from] ParseTableBuilderError), #[error(transparent)] ParseVersion(#[from] ParseVersionError), + #[error(transparent)] + SuperTypeCycle(#[from] SuperTypeCycleError), } impl From for GenerateError { @@ -183,7 +185,8 @@ pub fn generate_parser_in_directory( if grammar_path.file_name().unwrap() != "grammar.json" { fs::write(src_path.join("grammar.json"), &grammar_json).map_err(|e| { GenerateError::IO(format!( - "Failed to write grammar.json to {src_path:?} -- {e}" + "Failed to write grammar.json to {} -- {e}", + src_path.display() )) })?; } @@ -249,7 +252,7 @@ fn generate_parser_for_grammar_with_opts( &lexical_grammar, &simple_aliases, &variable_info, - ); + )?; let supertype_symbol_map = node_types::get_supertype_symbol_map(&syntax_grammar, &simple_aliases, &variable_info); let tables = build_tables( diff --git a/cli/generate/src/node_types.rs b/cli/generate/src/node_types.rs index 657f5c1f..07be52c6 100644 --- a/cli/generate/src/node_types.rs +++ b/cli/generate/src/node_types.rs @@ -1,7 +1,4 @@ -use std::{ - cmp::Ordering, - collections::{BTreeMap, HashMap, HashSet}, -}; +use std::collections::{BTreeMap, HashMap, HashSet}; use anyhow::Result; use serde::Serialize; @@ -444,12 +441,33 @@ pub fn get_supertype_symbol_map( supertype_symbol_map } +pub type SuperTypeCycleResult = Result; + +#[derive(Debug, Error, Serialize)] +pub struct SuperTypeCycleError { + items: Vec, +} + +impl std::fmt::Display for SuperTypeCycleError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Dependency cycle detected in node types:")?; + for (i, item) in self.items.iter().enumerate() { + write!(f, " {item}")?; + if i < self.items.len() - 1 { + write!(f, ",")?; + } + } + + Ok(()) + } +} + pub fn generate_node_types_json( syntax_grammar: &SyntaxGrammar, lexical_grammar: &LexicalGrammar, default_aliases: &AliasMap, variable_info: &[VariableInfo], -) -> Vec { +) -> SuperTypeCycleResult> { let mut node_types_json = BTreeMap::new(); let child_type_to_node_type = |child_type: &ChildType| match child_type { @@ -507,6 +525,31 @@ pub fn generate_node_types_json( let aliases_by_symbol = get_aliases_by_symbol(syntax_grammar, default_aliases); + let empty = HashSet::new(); + let extra_names = syntax_grammar + .extra_symbols + .iter() + .flat_map(|symbol| { + aliases_by_symbol + .get(symbol) + .unwrap_or(&empty) + .iter() + .map(|alias| { + alias.as_ref().map_or( + match symbol.kind { + SymbolType::NonTerminal => &syntax_grammar.variables[symbol.index].name, + SymbolType::Terminal => &lexical_grammar.variables[symbol.index].name, + SymbolType::External => { + &syntax_grammar.external_tokens[symbol.index].name + } + _ => unreachable!(), + }, + |alias| &alias.value, + ) + }) + }) + .collect::>(); + let mut subtype_map = Vec::new(); for (i, info) in variable_info.iter().enumerate() { let symbol = Symbol::non_terminal(i); @@ -519,7 +562,7 @@ pub fn generate_node_types_json( kind: variable.name.clone(), named: true, root: false, - extra: false, + extra: extra_names.contains(&variable.name), fields: None, children: None, subtypes: None, @@ -563,7 +606,7 @@ pub fn generate_node_types_json( kind: kind.clone(), named: is_named, root: i == 0, - extra: false, + extra: extra_names.contains(&kind), fields: Some(BTreeMap::new()), children: None, subtypes: None, @@ -602,15 +645,33 @@ pub fn generate_node_types_json( } } - // Sort the subtype map so that subtypes are listed before their supertypes. - subtype_map.sort_by(|a, b| { - if b.1.contains(&a.0) { - Ordering::Less - } else if a.1.contains(&b.0) { - Ordering::Greater - } else { - Ordering::Equal + // Sort the subtype map topologically so that subtypes are listed before their supertypes. + let mut sorted_kinds = Vec::with_capacity(subtype_map.len()); + let mut top_sort = topological_sort::TopologicalSort::::new(); + for (supertype, subtypes) in &subtype_map { + for subtype in subtypes { + top_sort.add_dependency(subtype.kind.clone(), supertype.kind.clone()); } + } + loop { + let mut next_kinds = top_sort.pop_all(); + match (next_kinds.is_empty(), top_sort.is_empty()) { + (true, true) => break, + (true, false) => { + let mut items = top_sort.collect::>(); + items.sort(); + return Err(SuperTypeCycleError { items }); + } + (false, _) => { + next_kinds.sort(); + sorted_kinds.extend(next_kinds); + } + } + } + subtype_map.sort_by(|a, b| { + let a_idx = sorted_kinds.iter().position(|n| n.eq(&a.0.kind)).unwrap(); + let b_idx = sorted_kinds.iter().position(|n| n.eq(&b.0.kind)).unwrap(); + a_idx.cmp(&b_idx) }); for node_type_json in node_types_json.values_mut() { @@ -634,7 +695,6 @@ pub fn generate_node_types_json( let mut anonymous_node_types = Vec::new(); - let empty = HashSet::new(); let regular_tokens = lexical_grammar .variables .iter() @@ -668,29 +728,6 @@ pub fn generate_node_types_json( }) }) }); - let extra_names = syntax_grammar - .extra_symbols - .iter() - .flat_map(|symbol| { - aliases_by_symbol - .get(symbol) - .unwrap_or(&empty) - .iter() - .map(|alias| { - alias.as_ref().map_or( - match symbol.kind { - SymbolType::NonTerminal => &syntax_grammar.variables[symbol.index].name, - SymbolType::Terminal => &lexical_grammar.variables[symbol.index].name, - SymbolType::External => { - &syntax_grammar.external_tokens[symbol.index].name - } - _ => unreachable!(), - }, - |alias| &alias.value, - ) - }) - }) - .collect::>(); for (name, kind) in regular_tokens.chain(external_tokens) { match kind { @@ -743,7 +780,7 @@ pub fn generate_node_types_json( .then_with(|| a.kind.cmp(&b.kind)) }); result.dedup(); - result + Ok(result) } fn process_supertypes(info: &mut FieldInfoJSON, subtype_map: &[(NodeTypeJSON, Vec)]) { @@ -829,7 +866,8 @@ mod tests { }, ], ..Default::default() - }); + }) + .unwrap(); assert_eq!(node_types.len(), 3); @@ -918,7 +956,9 @@ mod tests { }, // This rule is not reachable from the start symbol, but // it is reachable from the 'extra_symbols' so it - // should be present in the node_types + // should be present in the node_types. + // But because it's only a literal, it will get replaced by + // a lexical variable. Variable { name: "v3".to_string(), kind: VariableType::Named, @@ -926,7 +966,8 @@ mod tests { }, ], ..Default::default() - }); + }) + .unwrap(); assert_eq!(node_types.len(), 4); @@ -1007,6 +1048,118 @@ mod tests { ); } + #[test] + fn test_node_types_deeper_extras() { + let node_types = get_node_types(&InputGrammar { + extra_symbols: vec![Rule::named("v3")], + variables: vec![ + Variable { + name: "v1".to_string(), + kind: VariableType::Named, + rule: Rule::seq(vec![ + Rule::field("f1".to_string(), Rule::named("v2")), + Rule::field("f2".to_string(), Rule::string(";")), + ]), + }, + Variable { + name: "v2".to_string(), + kind: VariableType::Named, + rule: Rule::string("x"), + }, + // This rule is not reachable from the start symbol, but + // it is reachable from the 'extra_symbols' so it + // should be present in the node_types. + // Because it is not just a literal, it won't get replaced + // by a lexical variable. + Variable { + name: "v3".to_string(), + kind: VariableType::Named, + rule: Rule::seq(vec![Rule::string("y"), Rule::repeat(Rule::string("z"))]), + }, + ], + ..Default::default() + }) + .unwrap(); + + assert_eq!(node_types.len(), 6); + + assert_eq!( + node_types[0], + NodeInfoJSON { + kind: "v1".to_string(), + named: true, + root: true, + extra: false, + subtypes: None, + children: None, + fields: Some( + vec![ + ( + "f1".to_string(), + FieldInfoJSON { + multiple: false, + required: true, + types: vec![NodeTypeJSON { + kind: "v2".to_string(), + named: true, + }] + } + ), + ( + "f2".to_string(), + FieldInfoJSON { + multiple: false, + required: true, + types: vec![NodeTypeJSON { + kind: ";".to_string(), + named: false, + }] + } + ), + ] + .into_iter() + .collect() + ) + } + ); + assert_eq!( + node_types[1], + NodeInfoJSON { + kind: "v3".to_string(), + named: true, + root: false, + extra: true, + subtypes: None, + children: None, + fields: Some(BTreeMap::default()) + } + ); + assert_eq!( + node_types[2], + NodeInfoJSON { + kind: ";".to_string(), + named: false, + root: false, + extra: false, + subtypes: None, + children: None, + fields: None + } + ); + assert_eq!( + node_types[3], + NodeInfoJSON { + kind: "v2".to_string(), + named: true, + root: false, + extra: false, + subtypes: None, + children: None, + fields: None + } + ); + } + #[test] fn test_node_types_with_supertypes() { let node_types = get_node_types(&InputGrammar { @@ -1038,7 +1191,8 @@ mod tests { }, ], ..Default::default() - }); + }) + .unwrap(); assert_eq!( node_types[0], @@ -1127,7 +1281,8 @@ mod tests { }, ], ..Default::default() - }); + }) + .unwrap(); assert_eq!( node_types[0], @@ -1212,7 +1367,8 @@ mod tests { }, ], ..Default::default() - }); + }) + .unwrap(); assert_eq!( node_types[0], @@ -1286,7 +1442,8 @@ mod tests { }, ], ..Default::default() - }); + }) + .unwrap(); assert_eq!(node_types.iter().find(|t| t.kind == "foo_identifier"), None); assert_eq!( @@ -1342,7 +1499,8 @@ mod tests { }, ], ..Default::default() - }); + }) + .unwrap(); assert_eq!( node_types[0], @@ -1391,7 +1549,8 @@ mod tests { ]), }], ..Default::default() - }); + }) + .unwrap(); assert_eq!( node_types, @@ -1439,7 +1598,8 @@ mod tests { }, ], ..Default::default() - }); + }) + .unwrap(); assert_eq!( &node_types @@ -1558,7 +1718,8 @@ mod tests { }, ], ..Default::default() - }); + }) + .unwrap(); assert_eq!( node_types.iter().map(|n| &n.kind).collect::>(), @@ -1885,7 +2046,7 @@ mod tests { ); } - fn get_node_types(grammar: &InputGrammar) -> Vec { + fn get_node_types(grammar: &InputGrammar) -> SuperTypeCycleResult> { let (syntax_grammar, lexical_grammar, _, default_aliases) = prepare_grammar(grammar).unwrap(); let variable_info = diff --git a/cli/generate/src/parse_grammar.rs b/cli/generate/src/parse_grammar.rs index 0b8de958..2dbf7701 100644 --- a/cli/generate/src/parse_grammar.rs +++ b/cli/generate/src/parse_grammar.rs @@ -1,6 +1,7 @@ use std::collections::HashSet; use anyhow::Result; +use regex::Regex; use serde::{Deserialize, Serialize}; use serde_json::{Map, Value}; use thiserror::Error; @@ -238,13 +239,14 @@ pub(crate) fn parse_grammar(input: &str) -> ParseGrammarResult { let mut in_progress = HashSet::new(); for (name, rule) in &rules { - if !variable_is_used( - &rules, - &extra_symbols, - &external_tokens, - name, - &mut in_progress, - ) && grammar_json.word.as_ref().is_none_or(|w| w != name) + if grammar_json.word.as_ref().is_none_or(|w| w != name) + && !variable_is_used( + &rules, + &extra_symbols, + &external_tokens, + name, + &mut in_progress, + ) { grammar_json.conflicts.retain(|r| !r.contains(name)); grammar_json.supertypes.retain(|r| r != name); @@ -261,6 +263,27 @@ pub(crate) fn parse_grammar(input: &str) -> ParseGrammarResult { }); continue; } + + if extra_symbols + .iter() + .any(|r| rule_is_referenced(r, name, false)) + { + let inner_rule = if let Rule::Metadata { rule, .. } = rule { + rule + } else { + rule + }; + let matches_empty = match inner_rule { + Rule::String(rule_str) => rule_str.is_empty(), + Rule::Pattern(ref value, _) => Regex::new(value) + .map(|reg| reg.is_match("")) + .unwrap_or(false), + _ => false, + }; + if matches_empty { + eprintln!("Warning: Named extra rule `{name}` matches the empty string. Inline this to avoid infinite loops while parsing."); + } + } variables.push(Variable { name: name.clone(), kind: VariableType::Named, @@ -272,12 +295,11 @@ pub(crate) fn parse_grammar(input: &str) -> ParseGrammarResult { .reserved .into_iter() .map(|(name, rule_values)| { - let mut reserved_words = Vec::new(); - let Value::Array(rule_values) = rule_values else { Err(ParseGrammarError::InvalidReservedWordSet)? }; + let mut reserved_words = Vec::with_capacity(rule_values.len()); for value in rule_values { reserved_words.push(parse_rule(serde_json::from_value(value)?, false)?); } diff --git a/cli/generate/src/prepare_grammar/expand_tokens.rs b/cli/generate/src/prepare_grammar/expand_tokens.rs index ed4774d4..2762b41c 100644 --- a/cli/generate/src/prepare_grammar/expand_tokens.rs +++ b/cli/generate/src/prepare_grammar/expand_tokens.rs @@ -90,7 +90,7 @@ pub fn expand_tokens(mut grammar: ExtractedLexicalGrammar) -> ExpandTokensResult Rule::repeat(Rule::choice(grammar.separators)) }; - let mut variables = Vec::new(); + let mut variables = Vec::with_capacity(grammar.variables.len()); for (i, variable) in grammar.variables.into_iter().enumerate() { if variable.rule.is_empty() { Err(ExpandTokensError::EmptyString(variable.name.clone()))?; @@ -195,7 +195,7 @@ impl NfaBuilder { Ok(!s.is_empty()) } Rule::Choice(elements) => { - let mut alternative_state_ids = Vec::new(); + let mut alternative_state_ids = Vec::with_capacity(elements.len()); for element in elements { if self.expand_rule(element, next_state_id)? { alternative_state_ids.push(self.nfa.last_state_id()); @@ -338,7 +338,7 @@ impl NfaBuilder { Ok(result) } HirKind::Alternation(alternations) => { - let mut alternative_state_ids = Vec::new(); + let mut alternative_state_ids = Vec::with_capacity(alternations.len()); for hir in alternations { if self.expand_regex(hir, next_state_id)? { alternative_state_ids.push(self.nfa.last_state_id()); diff --git a/cli/generate/src/prepare_grammar/extract_tokens.rs b/cli/generate/src/prepare_grammar/extract_tokens.rs index 9276f68e..579ded06 100644 --- a/cli/generate/src/prepare_grammar/extract_tokens.rs +++ b/cli/generate/src/prepare_grammar/extract_tokens.rs @@ -26,10 +26,34 @@ unless they are used only as the grammar's start rule. ExternalTokenNonTerminal(String), #[error("Non-symbol rules cannot be used as external tokens")] NonSymbolExternalToken, - #[error("Non-terminal symbol '{0}' cannot be used as the word token, because its rule is duplicated in '{1}'")] - NonTerminalWordToken(String, String), - #[error("Reserved words must be tokens")] - NonTokenReservedWord, + #[error(transparent)] + WordToken(NonTerminalWordTokenError), + #[error("Reserved word '{0}' must be a token")] + NonTokenReservedWord(String), +} + +#[derive(Debug, Error, Serialize)] +pub struct NonTerminalWordTokenError { + pub symbol_name: String, + pub conflicting_symbol_name: Option, +} + +impl std::fmt::Display for NonTerminalWordTokenError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "Non-terminal symbol '{}' cannot be used as the word token", + self.symbol_name + )?; + if let Some(conflicting_name) = &self.conflicting_symbol_name { + writeln!( + f, + ", because its rule is duplicated in '{conflicting_name}'", + ) + } else { + writeln!(f) + } + } } pub(super) fn extract_tokens( @@ -62,7 +86,7 @@ pub(super) fn extract_tokens( // that pointed to that variable will need to be updated to point to the // variable in the lexical grammar. Symbols that pointed to later variables // will need to have their indices decremented. - let mut variables = Vec::new(); + let mut variables = Vec::with_capacity(grammar.variables.len()); let mut symbol_replacer = SymbolReplacer { replacements: HashMap::new(), }; @@ -162,23 +186,23 @@ pub(super) fn extract_tokens( let token = symbol_replacer.replace_symbol(token); if token.is_non_terminal() { let word_token_variable = &variables[token.index]; - let conflicting_variable = variables + let conflicting_symbol_name = variables .iter() .enumerate() .find(|(i, v)| *i != token.index && v.rule == word_token_variable.rule) - .expect("Failed to find a variable with the same rule as the word token"); + .map(|(_, v)| v.name.clone()); - Err(ExtractTokensError::NonTerminalWordToken( - word_token_variable.name.clone(), - conflicting_variable.1.name.clone(), - ))?; + Err(ExtractTokensError::WordToken(NonTerminalWordTokenError { + symbol_name: word_token_variable.name.clone(), + conflicting_symbol_name, + }))?; } word_token = Some(token); } - let mut reserved_word_contexts = Vec::new(); + let mut reserved_word_contexts = Vec::with_capacity(grammar.reserved_word_sets.len()); for reserved_word_context in grammar.reserved_word_sets { - let mut reserved_words = Vec::new(); + let mut reserved_words = Vec::with_capacity(reserved_word_contexts.len()); for reserved_rule in reserved_word_context.reserved_words { if let Rule::Symbol(symbol) = reserved_rule { reserved_words.push(symbol_replacer.replace_symbol(symbol)); @@ -188,7 +212,12 @@ pub(super) fn extract_tokens( { reserved_words.push(Symbol::terminal(index)); } else { - Err(ExtractTokensError::NonTokenReservedWord)?; + let token_name = match &reserved_rule { + Rule::String(s) => s.clone(), + Rule::Pattern(p, _) => p.clone(), + _ => "unknown".to_string(), + }; + Err(ExtractTokensError::NonTokenReservedWord(token_name))?; } } reserved_word_contexts.push(ReservedWordContext { diff --git a/cli/generate/src/prepare_grammar/flatten_grammar.rs b/cli/generate/src/prepare_grammar/flatten_grammar.rs index b8033d5f..cb0f1dae 100644 --- a/cli/generate/src/prepare_grammar/flatten_grammar.rs +++ b/cli/generate/src/prepare_grammar/flatten_grammar.rs @@ -57,8 +57,9 @@ impl RuleFlattener { } fn flatten_variable(&mut self, variable: Variable) -> FlattenGrammarResult { - let mut productions = Vec::new(); - for rule in extract_choices(variable.rule) { + let choices = extract_choices(variable.rule); + let mut productions = Vec::with_capacity(choices.len()); + for rule in choices { let production = self.flatten_rule(rule)?; if !productions.contains(&production) { productions.push(production); @@ -195,7 +196,7 @@ fn extract_choices(rule: Rule) -> Vec { let mut result = vec![Rule::Blank]; for element in elements { let extraction = extract_choices(element); - let mut next_result = Vec::new(); + let mut next_result = Vec::with_capacity(result.len()); for entry in result { for extraction_entry in &extraction { next_result.push(Rule::Seq(vec![entry.clone(), extraction_entry.clone()])); @@ -206,7 +207,7 @@ fn extract_choices(rule: Rule) -> Vec { result } Rule::Choice(elements) => { - let mut result = Vec::new(); + let mut result = Vec::with_capacity(elements.len()); for element in elements { for rule in extract_choices(element) { result.push(rule); @@ -262,9 +263,10 @@ pub(super) fn flatten_grammar( for (i, variable) in variables.iter().enumerate() { let symbol = Symbol::non_terminal(i); + let used = symbol_is_used(&variables, symbol); for production in &variable.productions { - if production.steps.is_empty() && symbol_is_used(&variables, symbol) { + if used && production.steps.is_empty() { Err(FlattenGrammarError::EmptyString(variable.name.clone()))?; } @@ -533,7 +535,7 @@ mod tests { assert_eq!( result.unwrap_err().to_string(), - "Rule `test` cannot be inlined because it contains a reference to itself.", + "Rule `test` cannot be inlined because it contains a reference to itself", ); } } diff --git a/cli/generate/src/prepare_grammar/intern_symbols.rs b/cli/generate/src/prepare_grammar/intern_symbols.rs index 6301e462..41a5b56e 100644 --- a/cli/generate/src/prepare_grammar/intern_symbols.rs +++ b/cli/generate/src/prepare_grammar/intern_symbols.rs @@ -65,7 +65,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> InternSymbolsResult InternSymbolsResult = Result; #[error(transparent)] pub enum PrepareGrammarError { ValidatePrecedences(#[from] ValidatePrecedenceError), + ValidateIndirectRecursion(#[from] IndirectRecursionError), InternSymbols(#[from] InternSymbolsError), ExtractTokens(#[from] ExtractTokensError), FlattenGrammar(#[from] FlattenGrammarError), @@ -96,6 +98,22 @@ pub enum ValidatePrecedenceError { Ordering(#[from] ConflictingPrecedenceOrderingError), } +#[derive(Debug, Error, Serialize)] +pub struct IndirectRecursionError(pub Vec); + +impl std::fmt::Display for IndirectRecursionError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Grammar contains an indirectly recursive rule: ")?; + for (i, symbol) in self.0.iter().enumerate() { + if i > 0 { + write!(f, " -> ")?; + } + write!(f, "{symbol}")?; + } + Ok(()) + } +} + #[derive(Debug, Error, Serialize)] pub struct UndeclaredPrecedenceError { pub precedence: String, @@ -141,6 +159,7 @@ pub fn prepare_grammar( AliasMap, )> { validate_precedences(input_grammar)?; + validate_indirect_recursion(input_grammar)?; let interned_grammar = intern_symbols(input_grammar)?; let (syntax_grammar, lexical_grammar) = extract_tokens(interned_grammar)?; @@ -152,6 +171,83 @@ pub fn prepare_grammar( Ok((syntax_grammar, lexical_grammar, inlines, default_aliases)) } +/// Check for indirect recursion cycles in the grammar that can cause infinite loops while +/// parsing. An indirect recursion cycle occurs when a non-terminal can derive itself through +/// a chain of single-symbol productions (e.g., A -> B, B -> A). +fn validate_indirect_recursion(grammar: &InputGrammar) -> Result<(), IndirectRecursionError> { + let mut epsilon_transitions: IndexMap<&str, BTreeSet> = IndexMap::new(); + + for variable in &grammar.variables { + let productions = get_single_symbol_productions(&variable.rule); + // Filter out rules that *directly* reference themselves, as this doesn't + // cause a parsing loop. + let filtered: BTreeSet = productions + .into_iter() + .filter(|s| s != &variable.name) + .collect(); + epsilon_transitions.insert(variable.name.as_str(), filtered); + } + + for start_symbol in epsilon_transitions.keys() { + let mut visited = BTreeSet::new(); + let mut path = Vec::new(); + if let Some((start_idx, end_idx)) = + get_cycle(start_symbol, &epsilon_transitions, &mut visited, &mut path) + { + let cycle_symbols = path[start_idx..=end_idx] + .iter() + .map(|s| (*s).to_string()) + .collect(); + return Err(IndirectRecursionError(cycle_symbols)); + } + } + + Ok(()) +} + +fn get_single_symbol_productions(rule: &Rule) -> BTreeSet { + match rule { + Rule::NamedSymbol(name) => BTreeSet::from([name.clone()]), + Rule::Choice(choices) => choices + .iter() + .flat_map(get_single_symbol_productions) + .collect(), + Rule::Metadata { rule, .. } => get_single_symbol_productions(rule), + _ => BTreeSet::new(), + } +} + +/// Perform a depth-first search to detect cycles in single state transitions. +fn get_cycle<'a>( + current: &'a str, + transitions: &'a IndexMap<&'a str, BTreeSet>, + visited: &mut BTreeSet<&'a str>, + path: &mut Vec<&'a str>, +) -> Option<(usize, usize)> { + if let Some(first_idx) = path.iter().position(|s| *s == current) { + path.push(current); + return Some((first_idx, path.len() - 1)); + } + + if visited.contains(current) { + return None; + } + + path.push(current); + visited.insert(current); + + if let Some(next_symbols) = transitions.get(current) { + for next in next_symbols { + if let Some(cycle) = get_cycle(next, transitions, visited, path) { + return Some(cycle); + } + } + } + + path.pop(); + None +} + /// Check that all of the named precedences used in the grammar are declared /// within the `precedences` lists, and also that there are no conflicting /// precedence orderings declared in those lists. diff --git a/cli/generate/src/render.rs b/cli/generate/src/render.rs index dc8f0825..2fd45d82 100644 --- a/cli/generate/src/render.rs +++ b/cli/generate/src/render.rs @@ -24,7 +24,6 @@ pub const ABI_VERSION_MIN: usize = 14; pub const ABI_VERSION_MAX: usize = tree_sitter::LANGUAGE_VERSION; const ABI_VERSION_WITH_RESERVED_WORDS: usize = 15; const BUILD_VERSION: &str = env!("CARGO_PKG_VERSION"); -const BUILD_SHA: Option<&'static str> = option_env!("BUILD_SHA"); macro_rules! add { ($this: tt, $($arg: tt)*) => {{ @@ -322,13 +321,9 @@ impl Generator { } fn add_header(&mut self) { - let version = BUILD_SHA.map_or_else( - || BUILD_VERSION.to_string(), - |build_sha| format!("{BUILD_VERSION} ({build_sha})"), - ); add_line!( self, - "/* Automatically generated by tree-sitter v{version} */", + "/* Automatically @generated by tree-sitter v{BUILD_VERSION} */", ); add_line!(self, ""); } @@ -683,12 +678,12 @@ impl Generator { &mut next_flat_field_map_index, ); - let mut field_map_ids = Vec::new(); + let mut field_map_ids = Vec::with_capacity(self.parse_table.production_infos.len()); for production_info in &self.parse_table.production_infos { if production_info.field_map.is_empty() { field_map_ids.push((0, 0)); } else { - let mut flat_field_map = Vec::new(); + let mut flat_field_map = Vec::with_capacity(production_info.field_map.len()); for (field_name, locations) in &production_info.field_map { for location in locations { flat_field_map.push((field_name.clone(), *location)); @@ -1111,7 +1106,11 @@ impl Generator { return; } - add_line!(self, "const TSCharacterRange {}[] = {{", info.constant_name); + add_line!( + self, + "static const TSCharacterRange {}[] = {{", + info.constant_name + ); indent!(self); for (ix, range) in characters.ranges().enumerate() { @@ -1351,7 +1350,12 @@ impl Generator { indent!(self); let mut next_table_index = 0; - let mut small_state_indices = Vec::new(); + let mut small_state_indices = Vec::with_capacity( + self.parse_table + .states + .len() + .saturating_sub(self.large_state_count), + ); let mut symbols_by_value = HashMap::<(usize, SymbolType), Vec>::new(); for state in self.parse_table.states.iter().skip(self.large_state_count) { small_state_indices.push(next_table_index); @@ -1847,11 +1851,11 @@ impl Generator { '\u{007F}' => "DEL", '\u{FEFF}' => "BOM", '\u{0080}'..='\u{FFFF}' => { - result.push_str(&format!("u{:04x}", c as u32)); + write!(result, "u{:04x}", c as u32).unwrap(); break 'special_chars; } '\u{10000}'..='\u{10FFFF}' => { - result.push_str(&format!("U{:08x}", c as u32)); + write!(result, "U{:08x}", c as u32).unwrap(); break 'special_chars; } '0'..='9' | 'a'..='z' | 'A'..='Z' | '_' => unreachable!(), @@ -1882,11 +1886,9 @@ impl Generator { '\r' => result += "\\r", '\t' => result += "\\t", '\0' => result += "\\0", - '\u{0001}'..='\u{001f}' => result += &format!("\\x{:02x}", c as u32), - '\u{007F}'..='\u{FFFF}' => result += &format!("\\u{:04x}", c as u32), - '\u{10000}'..='\u{10FFFF}' => { - result.push_str(&format!("\\U{:08x}", c as u32)); - } + '\u{0001}'..='\u{001f}' => write!(result, "\\x{:02x}", c as u32).unwrap(), + '\u{007F}'..='\u{FFFF}' => write!(result, "\\u{:04x}", c as u32).unwrap(), + '\u{10000}'..='\u{10FFFF}' => write!(result, "\\U{:08x}", c as u32).unwrap(), _ => result.push(c), } } diff --git a/cli/generate/src/rules.rs b/cli/generate/src/rules.rs index aa7d46ab..cd4aa482 100644 --- a/cli/generate/src/rules.rs +++ b/cli/generate/src/rules.rs @@ -306,7 +306,6 @@ impl Symbol { } impl From for Rule { - #[must_use] fn from(symbol: Symbol) -> Self { Self::Symbol(symbol) } diff --git a/cli/loader/emscripten-version b/cli/loader/emscripten-version index cc868b62..7d666cb2 100644 --- a/cli/loader/emscripten-version +++ b/cli/loader/emscripten-version @@ -1 +1 @@ -4.0.1 \ No newline at end of file +4.0.4 \ No newline at end of file diff --git a/cli/loader/src/lib.rs b/cli/loader/src/lib.rs index 7d309265..a2ae3eb1 100644 --- a/cli/loader/src/lib.rs +++ b/cli/loader/src/lib.rs @@ -11,6 +11,7 @@ use std::{ ffi::{OsStr, OsString}, fs, io::{BufRead, BufReader}, + marker::PhantomData, mem, path::{Path, PathBuf}, process::Command, @@ -18,9 +19,7 @@ use std::{ time::SystemTime, }; -#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))] -use anyhow::Error; -use anyhow::{anyhow, Context, Result}; +use anyhow::{anyhow, Context, Error, Result}; use etcetera::BaseStrategy as _; use fs4::fs_std::FileExt; use indoc::indoc; @@ -327,6 +326,7 @@ pub struct LanguageConfiguration<'a> { highlight_names: &'a Mutex>, #[cfg(feature = "tree-sitter-highlight")] use_all_highlight_names: bool, + _phantom: PhantomData<&'a ()>, } pub struct Loader { @@ -561,8 +561,8 @@ impl Loader { // If multiple language configurations match, then determine which // one to use by applying the configurations' content regexes. else { - let file_contents = - fs::read(path).with_context(|| format!("Failed to read path {path:?}"))?; + let file_contents = fs::read(path) + .with_context(|| format!("Failed to read path {}", path.display()))?; let file_contents = String::from_utf8_lossy(&file_contents); let mut best_score = -2isize; let mut best_configuration_id = None; @@ -780,8 +780,8 @@ impl Loader { if recompile { fs::create_dir_all(lock_path.parent().unwrap()).with_context(|| { format!( - "Failed to create directory {:?}", - lock_path.parent().unwrap() + "Failed to create directory {}", + lock_path.parent().unwrap().display() ) })?; let lock_file = fs::OpenOptions::new() @@ -799,7 +799,7 @@ impl Loader { } let library = unsafe { Library::new(&output_path) } - .with_context(|| format!("Error opening dynamic library {output_path:?}"))?; + .with_context(|| format!("Error opening dynamic library {}", output_path.display()))?; let language = unsafe { let language_fn = library .get:: Language>>(language_fn_name.as_bytes()) @@ -1214,6 +1214,7 @@ impl Loader { highlight_names: &self.highlight_names, #[cfg(feature = "tree-sitter-highlight")] use_all_highlight_names: self.use_all_highlight_names, + _phantom: PhantomData, }; for file_type in &configuration.file_types { @@ -1283,6 +1284,7 @@ impl Loader { highlight_names: &self.highlight_names, #[cfg(feature = "tree-sitter-highlight")] use_all_highlight_names: self.use_all_highlight_names, + _phantom: PhantomData, }; self.language_configurations.push(unsafe { mem::transmute::, LanguageConfiguration<'static>>( @@ -1564,7 +1566,7 @@ impl LanguageConfiguration<'_> { error.row = source[range.start..offset_within_section] .matches('\n') .count(); - Error::from(error).context(format!("Error in query file {path:?}")) + Error::from(error).context(format!("Error in query file {}", path.display())) } #[allow(clippy::type_complexity)] @@ -1581,7 +1583,7 @@ impl LanguageConfiguration<'_> { let abs_path = self.root_path.join(path); let prev_query_len = query.len(); query += &fs::read_to_string(&abs_path) - .with_context(|| format!("Failed to read query file {path:?}"))?; + .with_context(|| format!("Failed to read query file {}", path.display()))?; path_ranges.push((path.clone(), prev_query_len..query.len())); } } else { @@ -1599,7 +1601,7 @@ impl LanguageConfiguration<'_> { let path = queries_path.join(default_path); if path.exists() { query = fs::read_to_string(&path) - .with_context(|| format!("Failed to read query file {path:?}"))?; + .with_context(|| format!("Failed to read query file {}", path.display()))?; path_ranges.push((PathBuf::from(default_path), 0..query.len())); } } @@ -1612,8 +1614,8 @@ fn needs_recompile(lib_path: &Path, paths_to_check: &[PathBuf]) -> Result if !lib_path.exists() { return Ok(true); } - let lib_mtime = - mtime(lib_path).with_context(|| format!("Failed to read mtime of {lib_path:?}"))?; + let lib_mtime = mtime(lib_path) + .with_context(|| format!("Failed to read mtime of {}", lib_path.display()))?; for path in paths_to_check { if mtime(path)? > lib_mtime { return Ok(true); diff --git a/cli/npm/dsl.d.ts b/cli/npm/dsl.d.ts index 9ad40905..3ad9ea2a 100644 --- a/cli/npm/dsl.d.ts +++ b/cli/npm/dsl.d.ts @@ -10,6 +10,7 @@ type PrecRightRule = { type: 'PREC_RIGHT'; content: Rule; value: number }; type PrecRule = { type: 'PREC'; content: Rule; value: number }; type Repeat1Rule = { type: 'REPEAT1'; content: Rule }; type RepeatRule = { type: 'REPEAT'; content: Rule }; +type ReservedRule = { type: 'RESERVED'; content: Rule; context_name: string }; type SeqRule = { type: 'SEQ'; members: Rule[] }; type StringRule = { type: 'STRING'; value: string }; type SymbolRule = { type: 'SYMBOL'; name: Name }; @@ -33,12 +34,10 @@ type Rule = | SymbolRule | TokenRule; -class RustRegex { +declare class RustRegex { value: string; - constructor(pattern: string) { - this.value = pattern; - } + constructor(pattern: string); } type RuleOrLiteral = Rule | RegExp | RustRegex | string; @@ -167,6 +166,17 @@ interface Grammar< * @see https://tree-sitter.github.io/tree-sitter/creating-parsers/3-writing-the-grammar#keyword-extraction */ word?: ($: GrammarSymbols) => RuleOrLiteral; + + + /** + * Mapping of names to reserved word sets. The first reserved word set is the + * global word set, meaning it applies to every rule in every parse state. + * The other word sets can be used with the `reserved` function. + */ + reserved?: Record< + string, + ($: GrammarSymbols) => RuleOrLiteral[] + >; } type GrammarSchema = { @@ -251,7 +261,7 @@ declare function optional(rule: RuleOrLiteral): ChoiceRule; * @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html */ declare const prec: { - (value: String | number, rule: RuleOrLiteral): PrecRule; + (value: string | number, rule: RuleOrLiteral): PrecRule; /** * Marks the given rule as left-associative (and optionally applies a @@ -267,7 +277,7 @@ declare const prec: { * @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html */ left(rule: RuleOrLiteral): PrecLeftRule; - left(value: String | number, rule: RuleOrLiteral): PrecLeftRule; + left(value: string | number, rule: RuleOrLiteral): PrecLeftRule; /** * Marks the given rule as right-associative (and optionally applies a @@ -283,7 +293,7 @@ declare const prec: { * @see https://docs.oracle.com/cd/E19504-01/802-5880/6i9k05dh3/index.html */ right(rule: RuleOrLiteral): PrecRightRule; - right(value: String | number, rule: RuleOrLiteral): PrecRightRule; + right(value: string | number, rule: RuleOrLiteral): PrecRightRule; /** * Marks the given rule with a numerical precedence which will be used to @@ -300,7 +310,7 @@ declare const prec: { * * @see https://www.gnu.org/software/bison/manual/html_node/Generalized-LR-Parsing.html */ - dynamic(value: String | number, rule: RuleOrLiteral): PrecDynamicRule; + dynamic(value: string | number, rule: RuleOrLiteral): PrecDynamicRule; }; /** @@ -320,6 +330,15 @@ declare function repeat(rule: RuleOrLiteral): RepeatRule; */ declare function repeat1(rule: RuleOrLiteral): Repeat1Rule; +/** + * Overrides the global reserved word set for a given rule. The word set name + * should be defined in the `reserved` field in the grammar. + * + * @param wordset name of the reserved word set + * @param rule rule that will use the reserved word set + */ +declare function reserved(wordset: string, rule: RuleOrLiteral): ReservedRule; + /** * Creates a rule that matches any number of other rules, one after another. * It is analogous to simply writing multiple symbols next to each other @@ -338,7 +357,7 @@ declare function sym(name: Name): SymbolRule; /** * Marks the given rule as producing only a single token. Tree-sitter's - * default is to treat each String or RegExp literal in the grammar as a + * default is to treat each string or RegExp literal in the grammar as a * separate token. Each token is matched separately by the lexer and * returned as its own leaf node in the tree. The token function allows * you to express a complex rule using the DSL functions (rather diff --git a/cli/npm/install.js b/cli/npm/install.js old mode 100755 new mode 100644 index f2a4944d..6d0fbc57 --- a/cli/npm/install.js +++ b/cli/npm/install.js @@ -6,7 +6,8 @@ const http = require('http'); const https = require('https'); const packageJSON = require('./package.json'); -// Look to a results table in https://github.com/tree-sitter/tree-sitter/issues/2196 +https.globalAgent.keepAlive = false; + const matrix = { platform: { 'darwin': { diff --git a/cli/npm/package.json b/cli/npm/package.json index 1f76ebb2..5e4af444 100644 --- a/cli/npm/package.json +++ b/cli/npm/package.json @@ -1,6 +1,6 @@ { "name": "tree-sitter-cli", - "version": "0.25.1", + "version": "0.25.9", "author": { "name": "Max Brunsfeld", "email": "maxbrunsfeld@gmail.com" @@ -14,14 +14,14 @@ "license": "MIT", "repository": { "type": "git", - "url": "https://github.com/tree-sitter/tree-sitter.git" + "url": "git+https://github.com/tree-sitter/tree-sitter.git", + "directory": "crates/cli/npm" }, "description": "CLI for generating fast incremental parsers", "keywords": [ "parser", "lexer" ], - "main": "lib/api/index.js", "engines": { "node": ">=12.0.0" }, diff --git a/cli/src/fuzz/allocations.rs b/cli/src/fuzz/allocations.rs index fed446c6..9d7c91ab 100644 --- a/cli/src/fuzz/allocations.rs +++ b/cli/src/fuzz/allocations.rs @@ -109,7 +109,7 @@ unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_ let result = realloc(ptr, size); if ptr.is_null() { record_alloc(result); - } else if ptr != result { + } else if !core::ptr::eq(ptr, result) { record_dealloc(ptr); record_alloc(result); } diff --git a/cli/src/fuzz/mod.rs b/cli/src/fuzz/mod.rs index 85e219cc..04b4910d 100644 --- a/cli/src/fuzz/mod.rs +++ b/cli/src/fuzz/mod.rs @@ -56,7 +56,9 @@ fn regex_env_var(name: &'static str) -> Option { pub fn new_seed() -> usize { int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| { let mut rng = rand::thread_rng(); - rng.gen::() + let seed = rng.gen::(); + eprintln!("Seed: {seed}"); + seed }) } @@ -213,8 +215,9 @@ pub fn fuzz_language_corpus( } // Perform a random series of edits and reparse. - let mut undo_stack = Vec::new(); - for _ in 0..=rand.unsigned(*EDIT_COUNT) { + let edit_count = rand.unsigned(*EDIT_COUNT); + let mut undo_stack = Vec::with_capacity(edit_count); + for _ in 0..=edit_count { let edit = get_random_edit(&mut rand, &input); undo_stack.push(invert_edit(&input, &edit)); perform_edit(&mut tree, &mut input, &edit).unwrap(); diff --git a/cli/src/fuzz/random.rs b/cli/src/fuzz/random.rs index 8a8410f4..7b2ede62 100644 --- a/cli/src/fuzz/random.rs +++ b/cli/src/fuzz/random.rs @@ -20,8 +20,8 @@ impl Rand { } pub fn words(&mut self, max_count: usize) -> Vec { - let mut result = Vec::new(); let word_count = self.unsigned(max_count); + let mut result = Vec::with_capacity(2 * word_count); for i in 0..word_count { if i > 0 { if self.unsigned(5) == 0 { diff --git a/cli/src/highlight.rs b/cli/src/highlight.rs index 8611374d..abb64020 100644 --- a/cli/src/highlight.rs +++ b/cli/src/highlight.rs @@ -1,5 +1,5 @@ use std::{ - collections::{HashMap, HashSet}, + collections::{BTreeMap, HashSet}, fmt::Write, fs, io::{self, Write as _}, @@ -82,9 +82,9 @@ impl<'de> Deserialize<'de> for Theme { { let mut styles = Vec::new(); let mut highlight_names = Vec::new(); - if let Ok(colors) = HashMap::::deserialize(deserializer) { - highlight_names.reserve(colors.len()); + if let Ok(colors) = BTreeMap::::deserialize(deserializer) { styles.reserve(colors.len()); + highlight_names.reserve(colors.len()); for (name, style_value) in colors { let mut style = Style::default(); parse_style(&mut style, style_value); @@ -127,7 +127,7 @@ impl Serialize for Theme { || effects.contains(Effects::ITALIC) || effects.contains(Effects::UNDERLINE) { - let mut style_json = HashMap::new(); + let mut style_json = BTreeMap::new(); if let Some(color) = color { style_json.insert("color", color); } diff --git a/cli/src/init.rs b/cli/src/init.rs index f935f05b..709612af 100644 --- a/cli/src/init.rs +++ b/cli/src/init.rs @@ -98,6 +98,7 @@ const TESTS_SWIFT_TEMPLATE: &str = include_str!("./templates/tests.swift"); const BUILD_ZIG_TEMPLATE: &str = include_str!("./templates/build.zig"); const BUILD_ZIG_ZON_TEMPLATE: &str = include_str!("./templates/build.zig.zon"); const ROOT_ZIG_TEMPLATE: &str = include_str!("./templates/root.zig"); +const TEST_ZIG_TEMPLATE: &str = include_str!("./templates/test.zig"); const TREE_SITTER_JSON_SCHEMA: &str = "https://tree-sitter.github.io/tree-sitter/assets/schemas/config.schema.json"; @@ -301,14 +302,36 @@ pub fn generate_grammar_files( }; // Create package.json - missing_path(repo_path.join("package.json"), |path| { - generate_file( - path, - PACKAGE_JSON_TEMPLATE, - dashed_language_name.as_str(), - &generate_opts, - ) - })?; + missing_path_else( + repo_path.join("package.json"), + allow_update, + |path| { + generate_file( + path, + PACKAGE_JSON_TEMPLATE, + dashed_language_name.as_str(), + &generate_opts, + ) + }, + |path| { + let contents = fs::read_to_string(path)? + .replace( + r#""node-addon-api": "^8.3.1"#, + r#""node-addon-api": "^8.5.0""#, + ) + .replace( + indoc! {r#" + "prebuildify": "^6.0.1", + "tree-sitter-cli":"#}, + indoc! {r#" + "prebuildify": "^6.0.1", + "tree-sitter": "^0.22.4", + "tree-sitter-cli":"#}, + ); + write_file(path, contents)?; + Ok(()) + }, + )?; // Do not create a grammar.js file in a repo with multiple language configs if !tree_sitter_config.has_multiple_language_configs() { @@ -371,14 +394,25 @@ pub fn generate_grammar_files( generate_file(path, BUILD_RS_TEMPLATE, language_name, &generate_opts) })?; - missing_path(repo_path.join("Cargo.toml"), |path| { - generate_file( - path, - CARGO_TOML_TEMPLATE, - dashed_language_name.as_str(), - &generate_opts, - ) - })?; + missing_path_else( + repo_path.join("Cargo.toml"), + allow_update, + |path| { + generate_file( + path, + CARGO_TOML_TEMPLATE, + dashed_language_name.as_str(), + &generate_opts, + ) + }, + |path| { + let contents = fs::read_to_string(path)?; + if contents.contains("\"LICENSE\"") { + write_file(path, contents.replace("\"LICENSE\"", "\"/LICENSE\""))?; + } + Ok(()) + }, + )?; Ok(()) })?; @@ -394,6 +428,7 @@ pub fn generate_grammar_files( |path| { let contents = fs::read_to_string(path)?; if !contents.contains("bun") { + eprintln!("Replacing index.js"); generate_file(path, INDEX_JS_TEMPLATE, language_name, &generate_opts)?; } Ok(()) @@ -597,14 +632,32 @@ pub fn generate_grammar_files( })?; missing_path(path.join("tests"), create_dir)?.apply(|path| { - missing_path(path.join("test_binding.py"), |path| { - generate_file( - path, - TEST_BINDING_PY_TEMPLATE, - language_name, - &generate_opts, - ) - })?; + missing_path_else( + path.join("test_binding.py"), + allow_update, + |path| { + generate_file( + path, + TEST_BINDING_PY_TEMPLATE, + language_name, + &generate_opts, + ) + }, + |path| { + let mut contents = fs::read_to_string(path)?; + if !contents.contains("Parser(Language(") { + contents = contents + .replace("tree_sitter.Language(", "Parser(Language(") + .replace(".language())\n", ".language()))\n") + .replace( + "import tree_sitter\n", + "from tree_sitter import Language, Parser\n", + ); + write_file(path, contents)?; + } + Ok(()) + }, + )?; Ok(()) })?; @@ -614,7 +667,7 @@ pub fn generate_grammar_files( |path| generate_file(path, SETUP_PY_TEMPLATE, language_name, &generate_opts), |path| { let contents = fs::read_to_string(path)?; - if !contents.contains("egg_info") || !contents.contains("Py_GIL_DISABLED") { + if !contents.contains("build_ext") { eprintln!("Replacing setup.py"); generate_file(path, SETUP_PY_TEMPLATE, language_name, &generate_opts)?; } @@ -653,22 +706,17 @@ pub fn generate_grammar_files( // Generate Swift bindings if tree_sitter_config.bindings.swift { missing_path(bindings_dir.join("swift"), create_dir)?.apply(|path| { - let lang_path = path.join(format!("TreeSitter{camel_name}")); + let lang_path = path.join(&class_name); missing_path(&lang_path, create_dir)?; missing_path(lang_path.join(format!("{language_name}.h")), |path| { generate_file(path, PARSER_NAME_H_TEMPLATE, language_name, &generate_opts) })?; - missing_path( - path.join(format!("TreeSitter{camel_name}Tests")), - create_dir, - )? - .apply(|path| { - missing_path( - path.join(format!("TreeSitter{camel_name}Tests.swift")), - |path| generate_file(path, TESTS_SWIFT_TEMPLATE, language_name, &generate_opts), - )?; + missing_path(path.join(format!("{class_name}Tests")), create_dir)?.apply(|path| { + missing_path(path.join(format!("{class_name}Tests.swift")), |path| { + generate_file(path, TESTS_SWIFT_TEMPLATE, language_name, &generate_opts) + })?; Ok(()) })?; @@ -679,10 +727,13 @@ pub fn generate_grammar_files( |path| generate_file(path, PACKAGE_SWIFT_TEMPLATE, language_name, &generate_opts), |path| { let mut contents = fs::read_to_string(path)?; - contents = contents.replace( - "https://github.com/ChimeHQ/SwiftTreeSitter", - "https://github.com/tree-sitter/swift-tree-sitter", - ); + contents = contents + .replace( + "https://github.com/ChimeHQ/SwiftTreeSitter", + "https://github.com/tree-sitter/swift-tree-sitter", + ) + .replace("version: \"0.8.0\")", "version: \"0.9.0\")") + .replace("(url:", "(name: \"SwiftTreeSitter\", url:"); write_file(path, contents)?; Ok(()) }, @@ -694,17 +745,54 @@ pub fn generate_grammar_files( // Generate Zig bindings if tree_sitter_config.bindings.zig { - missing_path(repo_path.join("build.zig"), |path| { - generate_file(path, BUILD_ZIG_TEMPLATE, language_name, &generate_opts) - })?; + missing_path_else( + repo_path.join("build.zig"), + allow_update, + |path| generate_file(path, BUILD_ZIG_TEMPLATE, language_name, &generate_opts), + |path| { + let contents = fs::read_to_string(path)?; + if !contents.contains("b.pkg_hash.len") { + eprintln!("Replacing build.zig"); + generate_file(path, BUILD_ZIG_TEMPLATE, language_name, &generate_opts) + } else { + Ok(()) + } + }, + )?; - missing_path(repo_path.join("build.zig.zon"), |path| { - generate_file(path, BUILD_ZIG_ZON_TEMPLATE, language_name, &generate_opts) - })?; + missing_path_else( + repo_path.join("build.zig.zon"), + allow_update, + |path| generate_file(path, BUILD_ZIG_ZON_TEMPLATE, language_name, &generate_opts), + |path| { + let contents = fs::read_to_string(path)?; + if !contents.contains(".name = .tree_sitter_") { + eprintln!("Replacing build.zig.zon"); + generate_file(path, BUILD_ZIG_ZON_TEMPLATE, language_name, &generate_opts) + } else { + Ok(()) + } + }, + )?; missing_path(bindings_dir.join("zig"), create_dir)?.apply(|path| { - missing_path(path.join("root.zig"), |path| { - generate_file(path, ROOT_ZIG_TEMPLATE, language_name, &generate_opts) + missing_path_else( + path.join("root.zig"), + allow_update, + |path| generate_file(path, ROOT_ZIG_TEMPLATE, language_name, &generate_opts), + |path| { + let contents = fs::read_to_string(path)?; + if contents.contains("ts.Language") { + eprintln!("Replacing root.zig"); + generate_file(path, ROOT_ZIG_TEMPLATE, language_name, &generate_opts) + } else { + Ok(()) + } + }, + )?; + + missing_path(path.join("test.zig"), |path| { + generate_file(path, TEST_ZIG_TEMPLATE, language_name, &generate_opts) })?; Ok(()) diff --git a/cli/src/input.rs b/cli/src/input.rs index e22d19ef..5ab82087 100644 --- a/cli/src/input.rs +++ b/cli/src/input.rs @@ -89,8 +89,8 @@ pub fn get_input( let Some(path_str) = path.to_str() else { bail!("Invalid path: {}", path.display()); }; - let paths = - glob(path_str).with_context(|| format!("Invalid glob pattern {path:?}"))?; + let paths = glob(path_str) + .with_context(|| format!("Invalid glob pattern {}", path.display()))?; for path in paths { incorporate_path(path?, positive); } diff --git a/cli/src/main.rs b/cli/src/main.rs index 7ddc8dcb..ecb638f7 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -206,7 +206,8 @@ struct Parse { #[arg(long, short)] pub quiet: bool, #[allow(clippy::doc_markdown)] - /// Apply edits in the format: \"row, col delcount insert_text\" + /// Apply edits in the format: \"row,col|position delcount insert_text\", can be supplied + /// multiple times #[arg( long, num_args = 1.., @@ -964,8 +965,11 @@ impl Parse { for path in &paths { let path = Path::new(&path); - let language = - loader.select_language(path, current_dir, self.scope.as_deref())?; + let language = loader + .select_language(path, current_dir, self.scope.as_deref()) + .with_context(|| { + anyhow!("Failed to load langauge for path \"{}\"", path.display()) + })?; parse::parse_file_at_path( &mut parser, diff --git a/cli/src/parse.rs b/cli/src/parse.rs index 91956c69..bc01a908 100644 --- a/cli/src/parse.rs +++ b/cli/src/parse.rs @@ -29,18 +29,28 @@ pub struct Stats { impl fmt::Display for Stats { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let duration_us = self.total_duration.as_micros(); + let success_rate = if self.total_parses > 0 { + format!( + "{:.2}%", + ((self.successful_parses as f64) / (self.total_parses as f64)) * 100.0, + ) + } else { + "N/A".to_string() + }; + let duration_str = match (self.total_parses, duration_us) { + (0, _) => "N/A".to_string(), + (_, 0) => "0 bytes/ms".to_string(), + (_, _) => format!( + "{} bytes/ms", + ((self.total_bytes as u128) * 1_000) / duration_us + ), + }; writeln!( f, - "Total parses: {}; successful parses: {}; failed parses: {}; success percentage: {:.2}%; average speed: {} bytes/ms", + "Total parses: {}; successful parses: {}; failed parses: {}; success percentage: {success_rate}; average speed: {duration_str}", self.total_parses, self.successful_parses, self.total_parses - self.successful_parses, - ((self.successful_parses as f64) / (self.total_parses as f64)) * 100.0, - if duration_us != 0 { - ((self.total_bytes as u128) * 1_000) / duration_us - } else { - 0 - } ) } } @@ -225,7 +235,7 @@ pub struct ParseStats { pub cumulative_stats: Stats, } -#[derive(Serialize, ValueEnum, Debug, Clone, Default, Eq, PartialEq)] +#[derive(Serialize, ValueEnum, Debug, Copy, Clone, Default, Eq, PartialEq)] pub enum ParseDebugType { #[default] Quiet, @@ -273,10 +283,11 @@ pub fn parse_file_at_path( } // Log to stderr if `--debug` was passed else if opts.debug != ParseDebugType::Quiet { - let mut curr_version: usize = 0usize; + let mut curr_version: usize = 0; let use_color = std::env::var("NO_COLOR").map_or(true, |v| v != "1"); - parser.set_logger(Some(Box::new(|log_type, message| { - if opts.debug == ParseDebugType::Normal { + let debug = opts.debug; + parser.set_logger(Some(Box::new(move |log_type, message| { + if debug == ParseDebugType::Normal { if log_type == LogType::Lex { write!(&mut io::stderr(), " ").unwrap(); } @@ -686,19 +697,23 @@ pub fn parse_file_at_path( if let Some(node) = first_error { let start = node.start_position(); let end = node.end_position(); + let mut node_text = String::new(); + for c in node.kind().chars() { + if let Some(escaped) = escape_invisible(c) { + node_text += escaped; + } else { + node_text.push(c); + } + } write!(&mut stdout, "\t(")?; if node.is_missing() { if node.is_named() { - write!(&mut stdout, "MISSING {}", node.kind())?; + write!(&mut stdout, "MISSING {node_text}")?; } else { - write!( - &mut stdout, - "MISSING \"{}\"", - node.kind().replace('\n', "\\n") - )?; + write!(&mut stdout, "MISSING \"{node_text}\"")?; } } else { - write!(&mut stdout, "{}", node.kind())?; + write!(&mut stdout, "{node_text}")?; } write!( &mut stdout, diff --git a/cli/src/query.rs b/cli/src/query.rs index ea961880..81ccea50 100644 --- a/cli/src/query.rs +++ b/cli/src/query.rs @@ -34,7 +34,7 @@ pub fn query_file_at_path( let mut stdout = stdout.lock(); let query_source = fs::read_to_string(query_path) - .with_context(|| format!("Error reading query file {query_path:?}"))?; + .with_context(|| format!("Error reading query file {}", query_path.display()))?; let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?; let mut query_cursor = QueryCursor::new(); @@ -55,7 +55,7 @@ pub fn query_file_at_path( } let source_code = - fs::read(path).with_context(|| format!("Error reading source file {path:?}"))?; + fs::read(path).with_context(|| format!("Error reading source file {}", path.display()))?; let tree = parser.parse(&source_code, None).unwrap(); let start = Instant::now(); diff --git a/cli/src/templates/_cargo.toml b/cli/src/templates/_cargo.toml index 5ebe4899..74712694 100644 --- a/cli/src/templates/_cargo.toml +++ b/cli/src/templates/_cargo.toml @@ -18,7 +18,7 @@ include = [ "queries/*", "src/*", "tree-sitter.json", - "LICENSE", + "/LICENSE", ] [lib] diff --git a/cli/src/templates/build.zig b/cli/src/templates/build.zig index 65c3b837..c2428289 100644 --- a/cli/src/templates/build.zig +++ b/cli/src/templates/build.zig @@ -7,24 +7,24 @@ pub fn build(b: *std.Build) !void { const shared = b.option(bool, "build-shared", "Build a shared library") orelse true; const reuse_alloc = b.option(bool, "reuse-allocator", "Reuse the library allocator") orelse false; - const lib: *std.Build.Step.Compile = if (shared) b.addSharedLibrary(.{ - .name = "tree-sitter-PARSER_NAME", - .pic = true, - .target = target, - .optimize = optimize, - .link_libc = true, - }) else b.addStaticLibrary(.{ - .name = "tree-sitter-PARSER_NAME", - .target = target, - .optimize = optimize, - .link_libc = true, + const library_name = "tree-sitter-PARSER_NAME"; + + const lib: *std.Build.Step.Compile = b.addLibrary(.{ + .name = library_name, + .linkage = if (shared) .dynamic else .static, + .root_module = b.createModule(.{ + .target = target, + .optimize = optimize, + .link_libc = true, + .pic = if (shared) true else null, + }), }); lib.addCSourceFile(.{ .file = b.path("src/parser.c"), .flags = &.{"-std=c11"}, }); - if (hasScanner(b.build_root.handle)) { + if (fileExists(b, "src/scanner.c")) { lib.addCSourceFile(.{ .file = b.path("src/scanner.c"), .flags = &.{"-std=c11"}, @@ -42,38 +42,52 @@ pub fn build(b: *std.Build) !void { b.installArtifact(lib); b.installFile("src/node-types.json", "node-types.json"); - b.installDirectory(.{ .source_dir = b.path("queries"), .install_dir = .prefix, .install_subdir = "queries", .include_extensions = &.{"scm"} }); - const module = b.addModule("tree-sitter-PARSER_NAME", .{ + if (fileExists(b, "queries")) { + b.installDirectory(.{ + .source_dir = b.path("queries"), + .install_dir = .prefix, + .install_subdir = "queries", + .include_extensions = &.{"scm"}, + }); + } + + const module = b.addModule(library_name, .{ .root_source_file = b.path("bindings/zig/root.zig"), .target = target, .optimize = optimize, }); module.linkLibrary(lib); - const ts_dep = b.dependency("tree-sitter", .{}); - const ts_mod = ts_dep.module("tree-sitter"); - module.addImport("tree-sitter", ts_mod); - - // ╭─────────────────╮ - // │ Tests │ - // ╰─────────────────╯ - const tests = b.addTest(.{ - .root_source_file = b.path("bindings/zig/root.zig"), - .target = target, - .optimize = optimize, + .root_module = b.createModule(.{ + .root_source_file = b.path("bindings/zig/test.zig"), + .target = target, + .optimize = optimize, + }), }); - tests.linkLibrary(lib); - tests.root_module.addImport("tree-sitter", ts_mod); + tests.root_module.addImport(library_name, module); + + // HACK: fetch tree-sitter dependency only when testing this module + if (b.pkg_hash.len == 0) { + var args = try std.process.argsWithAllocator(b.allocator); + defer args.deinit(); + while (args.next()) |a| { + if (std.mem.eql(u8, a, "test")) { + const ts_dep = b.lazyDependency("tree_sitter", .{}) orelse continue; + tests.root_module.addImport("tree-sitter", ts_dep.module("tree-sitter")); + break; + } + } + } const run_tests = b.addRunArtifact(tests); - const test_step = b.step("test", "Run unit tests"); test_step.dependOn(&run_tests.step); } -inline fn hasScanner(dir: std.fs.Dir) bool { - dir.access("src/scanner.c", .{}) catch return false; +inline fn fileExists(b: *std.Build, filename: []const u8) bool { + const dir = b.build_root.handle; + dir.access(filename, .{}) catch return false; return true; } diff --git a/cli/src/templates/build.zig.zon b/cli/src/templates/build.zig.zon index 1f4b3907..ef084d23 100644 --- a/cli/src/templates/build.zig.zon +++ b/cli/src/templates/build.zig.zon @@ -1,10 +1,13 @@ .{ - .name = "tree-sitter-PARSER_NAME", + .name = .tree_sitter_PARSER_NAME, .version = "PARSER_VERSION", - .dependencies = .{ .@"tree-sitter" = .{ - .url = "https://github.com/tree-sitter/zig-tree-sitter/archive/refs/tags/v0.25.0.tar.gz", - .hash = "12201a8d5e840678bbbf5128e605519c4024af422295d68e2ba2090e675328e5811d", - } }, + .dependencies = .{ + .tree_sitter = .{ + .url = "git+https://github.com/tree-sitter/zig-tree-sitter#b4b72c903e69998fc88e27e154a5e3cc9166551b", + .hash = "tree_sitter-0.25.0-8heIf51vAQConvVIgvm-9mVIbqh7yabZYqPXfOpS3YoG", + .lazy = true, + }, + }, .paths = .{ "build.zig", "build.zig.zon", diff --git a/cli/src/templates/cmakelists.cmake b/cli/src/templates/cmakelists.cmake index ee6fe2b4..34dd8efc 100644 --- a/cli/src/templates/cmakelists.cmake +++ b/cli/src/templates/cmakelists.cmake @@ -15,6 +15,8 @@ if(NOT ${TREE_SITTER_ABI_VERSION} MATCHES "^[0-9]+$") message(FATAL_ERROR "TREE_SITTER_ABI_VERSION must be an integer") endif() +include(GNUInstallDirs) + find_program(TREE_SITTER_CLI tree-sitter DOC "Tree-sitter CLI") add_custom_command(OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/src/parser.c" @@ -47,13 +49,11 @@ set_target_properties(tree-sitter-KEBAB_PARSER_NAME configure_file(bindings/c/tree-sitter-KEBAB_PARSER_NAME.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter-KEBAB_PARSER_NAME.pc" @ONLY) -include(GNUInstallDirs) - install(DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/bindings/c/tree_sitter" DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}" FILES_MATCHING PATTERN "*.h") install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter-KEBAB_PARSER_NAME.pc" - DESTINATION "${CMAKE_INSTALL_DATAROOTDIR}/pkgconfig") + DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig") install(TARGETS tree-sitter-KEBAB_PARSER_NAME LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}") diff --git a/cli/src/templates/gitattributes b/cli/src/templates/gitattributes index 79475a5e..7772c942 100644 --- a/cli/src/templates/gitattributes +++ b/cli/src/templates/gitattributes @@ -37,5 +37,6 @@ Package.swift linguist-generated Package.resolved linguist-generated # Zig bindings +bindings/zig/* linguist-generated build.zig linguist-generated build.zig.zon linguist-generated diff --git a/cli/src/templates/gitignore b/cli/src/templates/gitignore index bc9e191a..87a0c80c 100644 --- a/cli/src/templates/gitignore +++ b/cli/src/templates/gitignore @@ -1,13 +1,16 @@ # Rust artifacts target/ +Cargo.lock # Node artifacts build/ prebuilds/ node_modules/ +package-lock.json # Swift artifacts .build/ +Package.resolved # Go artifacts _obj/ diff --git a/cli/src/templates/lib.rs b/cli/src/templates/lib.rs index bf9faa4d..8478f488 100644 --- a/cli/src/templates/lib.rs +++ b/cli/src/templates/lib.rs @@ -1,7 +1,7 @@ -//! This crate provides CAMEL_PARSER_NAME language support for the [tree-sitter][] parsing library. +//! This crate provides TITLE_PARSER_NAME language support for the [tree-sitter] parsing library. //! -//! Typically, you will use the [LANGUAGE][] constant to add this language to a -//! tree-sitter [Parser][], and then use the parser to parse some code: +//! Typically, you will use the [`LANGUAGE`] constant to add this language to a +//! tree-sitter [`Parser`], and then use the parser to parse some code: //! //! ``` //! let code = r#" @@ -15,7 +15,7 @@ //! assert!(!tree.root_node().has_error()); //! ``` //! -//! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html +//! [`Parser`]: https://docs.rs/tree-sitter/RUST_BINDING_VERSION/tree_sitter/struct.Parser.html //! [tree-sitter]: https://tree-sitter.github.io/ use tree_sitter_language::LanguageFn; @@ -24,12 +24,10 @@ extern "C" { fn tree_sitter_PARSER_NAME() -> *const (); } -/// The tree-sitter [`LanguageFn`][LanguageFn] for this grammar. -/// -/// [LanguageFn]: https://docs.rs/tree-sitter-language/*/tree_sitter_language/struct.LanguageFn.html +/// The tree-sitter [`LanguageFn`] for this grammar. pub const LANGUAGE: LanguageFn = unsafe { LanguageFn::from_raw(tree_sitter_PARSER_NAME) }; -/// The content of the [`node-types.json`][] file for this grammar. +/// The content of the [`node-types.json`] file for this grammar. /// /// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers/6-static-node-types pub const NODE_TYPES: &str = include_str!("../../src/node-types.json"); diff --git a/cli/src/templates/makefile b/cli/src/templates/makefile index 1aa71026..4e8411c5 100644 --- a/cli/src/templates/makefile +++ b/cli/src/templates/makefile @@ -77,7 +77,9 @@ install: all install -m755 lib$(LANGUAGE_NAME).$(SOEXT) '$(DESTDIR)$(LIBDIR)'/lib$(LANGUAGE_NAME).$(SOEXTVER) ln -sf lib$(LANGUAGE_NAME).$(SOEXTVER) '$(DESTDIR)$(LIBDIR)'/lib$(LANGUAGE_NAME).$(SOEXTVER_MAJOR) ln -sf lib$(LANGUAGE_NAME).$(SOEXTVER_MAJOR) '$(DESTDIR)$(LIBDIR)'/lib$(LANGUAGE_NAME).$(SOEXT) +ifneq ($(wildcard queries/*.scm),) install -m644 queries/*.scm '$(DESTDIR)$(DATADIR)'/tree-sitter/queries/KEBAB_PARSER_NAME +endif uninstall: $(RM) '$(DESTDIR)$(LIBDIR)'/lib$(LANGUAGE_NAME).a \ diff --git a/cli/src/templates/package.json b/cli/src/templates/package.json index ba86aef7..a2ee2e59 100644 --- a/cli/src/templates/package.json +++ b/cli/src/templates/package.json @@ -29,15 +29,16 @@ "*.wasm" ], "dependencies": { - "node-addon-api": "^8.2.1", - "node-gyp-build": "^4.8.2" + "node-addon-api": "^8.5.0", + "node-gyp-build": "^4.8.4" }, "devDependencies": { "prebuildify": "^6.0.1", + "tree-sitter": "^0.22.4", "tree-sitter-cli": "^CLI_VERSION" }, "peerDependencies": { - "tree-sitter": "^0.21.1" + "tree-sitter": "^0.22.4" }, "peerDependenciesMeta": { "tree-sitter": { diff --git a/cli/src/templates/package.swift b/cli/src/templates/package.swift index feb934f0..f5562891 100644 --- a/cli/src/templates/package.swift +++ b/cli/src/templates/package.swift @@ -14,7 +14,7 @@ let package = Package( .library(name: "PARSER_CLASS_NAME", targets: ["PARSER_CLASS_NAME"]), ], dependencies: [ - .package(url: "https://github.com/tree-sitter/swift-tree-sitter", from: "0.8.0"), + .package(name: "SwiftTreeSitter", url: "https://github.com/tree-sitter/swift-tree-sitter", from: "0.9.0"), ], targets: [ .target( diff --git a/cli/src/templates/pyproject.toml b/cli/src/templates/pyproject.toml index 0e0c4d24..0f47e0f4 100644 --- a/cli/src/templates/pyproject.toml +++ b/cli/src/templates/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=42", "wheel"] +requires = ["setuptools>=62.4.0", "wheel"] build-backend = "setuptools.build_meta" [project] diff --git a/cli/src/templates/root.zig b/cli/src/templates/root.zig index 26a6cbef..0e8f24bd 100644 --- a/cli/src/templates/root.zig +++ b/cli/src/templates/root.zig @@ -1,19 +1,5 @@ -const testing = @import("std").testing; +extern fn tree_sitter_PARSER_NAME() callconv(.c) *const anyopaque; -const ts = @import("tree-sitter"); -const Language = ts.Language; -const Parser = ts.Parser; - -pub extern fn tree_sitter_PARSER_NAME() callconv(.C) *const Language; - -pub export fn language() *const Language { +pub fn language() *const anyopaque { return tree_sitter_PARSER_NAME(); } - -test "can load grammar" { - const parser = Parser.create(); - defer parser.destroy(); - try testing.expectEqual(parser.setLanguage(language()), void{}); - try testing.expectEqual(parser.getLanguage(), tree_sitter_PARSER_NAME()); -} - diff --git a/cli/src/templates/setup.py b/cli/src/templates/setup.py index 534bd9f2..7f92eaee 100644 --- a/cli/src/templates/setup.py +++ b/cli/src/templates/setup.py @@ -1,31 +1,12 @@ from os import path -from platform import system from sysconfig import get_config_var from setuptools import Extension, find_packages, setup from setuptools.command.build import build +from setuptools.command.build_ext import build_ext from setuptools.command.egg_info import egg_info from wheel.bdist_wheel import bdist_wheel -sources = [ - "bindings/python/tree_sitter_LOWER_PARSER_NAME/binding.c", - "src/parser.c", -] -if path.exists("src/scanner.c"): - sources.append("src/scanner.c") - -macros: list[tuple[str, str | None]] = [ - ("PY_SSIZE_T_CLEAN", None), - ("TREE_SITTER_HIDE_SYMBOLS", None), -] -if limited_api := not get_config_var("Py_GIL_DISABLED"): - macros.append(("Py_LIMITED_API", "0x030A0000")) - -if system() != "Windows": - cflags = ["-std=c11", "-fvisibility=hidden"] -else: - cflags = ["/std:c11", "/utf-8"] - class Build(build): def run(self): @@ -35,6 +16,19 @@ class Build(build): super().run() +class BuildExt(build_ext): + def build_extension(self, ext: Extension): + if self.compiler.compiler_type != "msvc": + ext.extra_compile_args = ["-std=c11", "-fvisibility=hidden"] + else: + ext.extra_compile_args = ["/std:c11", "/utf-8"] + if path.exists("src/scanner.c"): + ext.sources.append("src/scanner.c") + if ext.py_limited_api: + ext.define_macros.append(("Py_LIMITED_API", "0x030A0000")) + super().build_extension(ext) + + class BdistWheel(bdist_wheel): def get_tag(self): python, abi, platform = super().get_tag() @@ -61,15 +55,21 @@ setup( ext_modules=[ Extension( name="_binding", - sources=sources, - extra_compile_args=cflags, - define_macros=macros, + sources=[ + "bindings/python/tree_sitter_LOWER_PARSER_NAME/binding.c", + "src/parser.c", + ], + define_macros=[ + ("PY_SSIZE_T_CLEAN", None), + ("TREE_SITTER_HIDE_SYMBOLS", None), + ], include_dirs=["src"], - py_limited_api=limited_api, + py_limited_api=not get_config_var("Py_GIL_DISABLED"), ) ], cmdclass={ "build": Build, + "build_ext": BuildExt, "bdist_wheel": BdistWheel, "egg_info": EggInfo, }, diff --git a/cli/src/templates/test.zig b/cli/src/templates/test.zig new file mode 100644 index 00000000..7baec557 --- /dev/null +++ b/cli/src/templates/test.zig @@ -0,0 +1,17 @@ +const testing = @import("std").testing; + +const ts = @import("tree-sitter"); +const root = @import("tree-sitter-PARSER_NAME"); +const Language = ts.Language; +const Parser = ts.Parser; + +test "can load grammar" { + const parser = Parser.create(); + defer parser.destroy(); + + const lang: *const ts.Language = @ptrCast(root.language()); + defer lang.destroy(); + + try testing.expectEqual(void{}, parser.setLanguage(lang)); + try testing.expectEqual(lang, parser.getLanguage()); +} diff --git a/cli/src/templates/test_binding.py b/cli/src/templates/test_binding.py index 31aef9ac..a832c368 100644 --- a/cli/src/templates/test_binding.py +++ b/cli/src/templates/test_binding.py @@ -1,12 +1,12 @@ from unittest import TestCase -import tree_sitter +from tree_sitter import Language, Parser import tree_sitter_LOWER_PARSER_NAME class TestLanguage(TestCase): def test_can_load_grammar(self): try: - tree_sitter.Language(tree_sitter_LOWER_PARSER_NAME.language()) + Parser(Language(tree_sitter_LOWER_PARSER_NAME.language())) except Exception: self.fail("Error loading TITLE_PARSER_NAME grammar") diff --git a/cli/src/test_highlight.rs b/cli/src/test_highlight.rs index 1bd8f358..d2f37e2f 100644 --- a/cli/src/test_highlight.rs +++ b/cli/src/test_highlight.rs @@ -172,7 +172,7 @@ pub fn iterate_assertions( let mut j = i; while let (false, Some(highlight)) = (passed, highlights.get(j)) { end_column = position.column + length - 1; - if highlight.0.column > end_column { + if highlight.0.row >= position.row && highlight.0.column > end_column { break 'highlight_loop; } diff --git a/cli/src/tests/async_context_test.rs b/cli/src/tests/async_context_test.rs index edcd5e4c..fbcc5c30 100644 --- a/cli/src/tests/async_context_test.rs +++ b/cli/src/tests/async_context_test.rs @@ -238,7 +238,7 @@ async fn yield_now() { SimpleYieldNow { yielded: false }.await; } -pub fn noop_waker() -> Waker { +pub const fn noop_waker() -> Waker { const VTABLE: RawWakerVTable = RawWakerVTable::new( // Cloning just returns a new no-op raw waker |_| RAW, diff --git a/cli/src/tests/corpus_test.rs b/cli/src/tests/corpus_test.rs index 750bf442..6d272f51 100644 --- a/cli/src/tests/corpus_test.rs +++ b/cli/src/tests/corpus_test.rs @@ -23,7 +23,7 @@ use crate::{ }; #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_bash(seed: usize) { +fn test_corpus_for_bash_language(seed: usize) { test_language_corpus( "bash", seed, @@ -39,73 +39,77 @@ fn test_corpus_for_bash(seed: usize) { } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_c(seed: usize) { +fn test_corpus_for_c_language(seed: usize) { test_language_corpus("c", seed, None, None); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_cpp(seed: usize) { +fn test_corpus_for_cpp_language(seed: usize) { test_language_corpus("cpp", seed, None, None); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_embedded_template(seed: usize) { +fn test_corpus_for_embedded_template_language(seed: usize) { test_language_corpus("embedded-template", seed, None, None); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_go(seed: usize) { +fn test_corpus_for_go_language(seed: usize) { test_language_corpus("go", seed, None, None); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_html(seed: usize) { +fn test_corpus_for_html_language(seed: usize) { test_language_corpus("html", seed, None, None); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_java(seed: usize) { - test_language_corpus("java", seed, None, None); +fn test_corpus_for_java_language(seed: usize) { + test_language_corpus( + "java", + seed, + Some(&["java - corpus - expressions - switch with unnamed pattern variable"]), + None, + ); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_javascript(seed: usize) { +fn test_corpus_for_javascript_language(seed: usize) { test_language_corpus("javascript", seed, None, None); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_json(seed: usize) { +fn test_corpus_for_json_language(seed: usize) { test_language_corpus("json", seed, None, None); } -#[ignore] #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_php(seed: usize) { - test_language_corpus("php", seed, None, None); +fn test_corpus_for_php_language(seed: usize) { + test_language_corpus("php", seed, None, Some("php")); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_python(seed: usize) { +fn test_corpus_for_python_language(seed: usize) { test_language_corpus("python", seed, None, None); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_ruby(seed: usize) { +fn test_corpus_for_ruby_language(seed: usize) { test_language_corpus("ruby", seed, None, None); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_rust(seed: usize) { +fn test_corpus_for_rust_language(seed: usize) { test_language_corpus("rust", seed, None, None); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_typescript(seed: usize) { +fn test_corpus_for_typescript_language(seed: usize) { test_language_corpus("typescript", seed, None, Some("typescript")); } #[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)] -fn test_corpus_for_tsx(seed: usize) { +fn test_corpus_for_tsx_language(seed: usize) { test_language_corpus("typescript", seed, None, Some("tsx")); } @@ -239,8 +243,9 @@ pub fn test_language_corpus( } // Perform a random series of edits and reparse. - let mut undo_stack = Vec::new(); - for _ in 0..=rand.unsigned(*EDIT_COUNT) { + let edit_count = rand.unsigned(*EDIT_COUNT); + let mut undo_stack = Vec::with_capacity(edit_count); + for _ in 0..=edit_count { let edit = get_random_edit(&mut rand, &input); undo_stack.push(invert_edit(&input, &edit)); perform_edit(&mut tree, &mut input, &edit).unwrap(); @@ -376,7 +381,7 @@ fn test_feature_corpus_files() { let actual_message = e.to_string().replace("\r\n", "\n"); if expected_message != actual_message { eprintln!( - "Unexpected error message.\n\nExpected:\n\n{expected_message}\nActual:\n\n{actual_message}\n", + "Unexpected error message.\n\nExpected:\n\n`{expected_message}`\nActual:\n\n`{actual_message}`\n", ); failure_count += 1; } diff --git a/cli/src/tests/helpers/allocations.rs b/cli/src/tests/helpers/allocations.rs index 103cb092..dec67b11 100644 --- a/cli/src/tests/helpers/allocations.rs +++ b/cli/src/tests/helpers/allocations.rs @@ -108,7 +108,7 @@ unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_ let result = realloc(ptr, size); if ptr.is_null() { record_alloc(result); - } else if ptr != result { + } else if !core::ptr::eq(ptr, result) { record_dealloc(ptr); record_alloc(result); } diff --git a/cli/src/tests/helpers/fixtures.rs b/cli/src/tests/helpers/fixtures.rs index 44da9b48..0b046bcc 100644 --- a/cli/src/tests/helpers/fixtures.rs +++ b/cli/src/tests/helpers/fixtures.rs @@ -6,11 +6,13 @@ use std::{ use anyhow::Context; use tree_sitter::Language; -use tree_sitter_generate::{ALLOC_HEADER, ARRAY_HEADER}; +use tree_sitter_generate::{load_grammar_file, ALLOC_HEADER, ARRAY_HEADER}; use tree_sitter_highlight::HighlightConfiguration; use tree_sitter_loader::{CompileConfig, Loader}; use tree_sitter_tags::TagsConfiguration; +use crate::tests::generate_parser; + include!("./dirs.rs"); static TEST_LOADER: LazyLock = LazyLock::new(|| { @@ -40,6 +42,13 @@ pub fn get_language(name: &str) -> Language { TEST_LOADER.load_language_at_path(config).unwrap() } +pub fn get_test_fixture_language(name: &str) -> Language { + let grammar_dir_path = fixtures_dir().join("test_grammars").join(name); + let grammar_json = load_grammar_file(&grammar_dir_path.join("grammar.js"), None).unwrap(); + let (parser_name, parser_code) = generate_parser(&grammar_json).unwrap(); + get_test_language(&parser_name, &parser_code, Some(&grammar_dir_path)) +} + pub fn get_language_queries_path(language_name: &str) -> PathBuf { GRAMMARS_DIR.join(language_name).join("queries") } diff --git a/cli/src/tests/highlight_test.rs b/cli/src/tests/highlight_test.rs index 8d7ff7b3..76c43f43 100644 --- a/cli/src/tests/highlight_test.rs +++ b/cli/src/tests/highlight_test.rs @@ -350,12 +350,11 @@ fn test_highlighting_empty_lines() { fn test_highlighting_carriage_returns() { let source = "a = \"a\rb\"\r\nb\r"; - // FIXME(amaanq): figure why this changed w/ JS's grammar changes assert_eq!( &to_html(source, &JS_HIGHLIGHT).unwrap(), &[ - "a = "ab"\n", - "b\n", + "a = "ab"\n", + "b\n", ], ); } @@ -598,7 +597,7 @@ fn test_highlighting_via_c_api() { let output_line_offsets = unsafe { slice::from_raw_parts(output_line_offsets, output_line_count as usize) }; - let mut lines = Vec::new(); + let mut lines = Vec::with_capacity(output_line_count as usize); for i in 0..(output_line_count as usize) { let line_start = output_line_offsets[i] as usize; let line_end = output_line_offsets diff --git a/cli/src/tests/language_test.rs b/cli/src/tests/language_test.rs index e59b0eb6..9f51f503 100644 --- a/cli/src/tests/language_test.rs +++ b/cli/src/tests/language_test.rs @@ -152,6 +152,7 @@ fn test_supertypes() { "_literal_pattern", "captured_pattern", "const_block", + "generic_pattern", "identifier", "macro_invocation", "mut_pattern", diff --git a/cli/src/tests/node_test.rs b/cli/src/tests/node_test.rs index 22e920d6..515d73aa 100644 --- a/cli/src/tests/node_test.rs +++ b/cli/src/tests/node_test.rs @@ -6,7 +6,10 @@ use super::{ helpers::fixtures::{fixtures_dir, get_language, get_test_language}, Rand, }; -use crate::{parse::perform_edit, tests::generate_parser}; +use crate::{ + parse::perform_edit, + tests::{generate_parser, helpers::fixtures::get_test_fixture_language}, +}; const JSON_EXAMPLE: &str = r#" @@ -308,19 +311,8 @@ fn test_parent_of_zero_width_node() { #[test] fn test_next_sibling_of_zero_width_node() { - let grammar_json = load_grammar_file( - &fixtures_dir() - .join("test_grammars") - .join("next_sibling_from_zwt") - .join("grammar.js"), - None, - ) - .unwrap(); - - let (parser_name, parser_code) = generate_parser(&grammar_json).unwrap(); - let mut parser = Parser::new(); - let language = get_test_language(&parser_name, &parser_code, None); + let language = get_test_fixture_language("next_sibling_from_zwt"); parser.set_language(&language).unwrap(); let tree = parser.parse("abdef", None).unwrap(); diff --git a/cli/src/tests/parser_test.rs b/cli/src/tests/parser_test.rs index a1f730d8..d8b9767d 100644 --- a/cli/src/tests/parser_test.rs +++ b/cli/src/tests/parser_test.rs @@ -6,7 +6,6 @@ use std::{ use tree_sitter::{ Decode, IncludedRangesError, InputEdit, LogType, ParseOptions, ParseState, Parser, Point, Range, }; -use tree_sitter_generate::load_grammar_file; use tree_sitter_proc_macro::retry; use super::helpers::{ @@ -17,7 +16,7 @@ use super::helpers::{ use crate::{ fuzz::edits::Edit, parse::perform_edit, - tests::{generate_parser, helpers::fixtures::fixtures_dir, invert_edit}, + tests::{generate_parser, helpers::fixtures::get_test_fixture_language, invert_edit}, }; #[test] @@ -482,15 +481,9 @@ fn test_parsing_empty_file_with_reused_tree() { #[test] fn test_parsing_after_editing_tree_that_depends_on_column_values() { - let dir = fixtures_dir() - .join("test_grammars") - .join("uses_current_column"); - let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap(); - let (grammar_name, parser_code) = generate_parser(&grammar_json).unwrap(); - let mut parser = Parser::new(); parser - .set_language(&get_test_language(&grammar_name, &parser_code, Some(&dir))) + .set_language(&get_test_fixture_language("uses_current_column")) .unwrap(); let mut code = b" @@ -559,16 +552,9 @@ h + i #[test] fn test_parsing_after_editing_tree_that_depends_on_column_position() { - let dir = fixtures_dir() - .join("test_grammars") - .join("depends_on_column"); - - let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap(); - let (grammar_name, parser_code) = generate_parser(grammar_json.as_str()).unwrap(); - let mut parser = Parser::new(); parser - .set_language(&get_test_language(&grammar_name, &parser_code, Some(&dir))) + .set_language(&get_test_fixture_language("depends_on_column")) .unwrap(); let mut code = b"\n x".to_vec(); @@ -1702,13 +1688,9 @@ if foo && bar || baz {} #[test] fn test_parsing_with_scanner_logging() { - let dir = fixtures_dir().join("test_grammars").join("external_tokens"); - let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap(); - let (grammar_name, parser_code) = generate_parser(&grammar_json).unwrap(); - let mut parser = Parser::new(); parser - .set_language(&get_test_language(&grammar_name, &parser_code, Some(&dir))) + .set_language(&get_test_fixture_language("external_tokens")) .unwrap(); let mut found = false; @@ -1726,13 +1708,9 @@ fn test_parsing_with_scanner_logging() { #[test] fn test_parsing_get_column_at_eof() { - let dir = fixtures_dir().join("test_grammars").join("get_col_eof"); - let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap(); - let (grammar_name, parser_code) = generate_parser(&grammar_json).unwrap(); - let mut parser = Parser::new(); parser - .set_language(&get_test_language(&grammar_name, &parser_code, Some(&dir))) + .set_language(&get_test_fixture_language("get_col_eof")) .unwrap(); parser.parse("a", None).unwrap(); diff --git a/cli/src/tests/query_test.rs b/cli/src/tests/query_test.rs index f741d88c..b8773acb 100644 --- a/cli/src/tests/query_test.rs +++ b/cli/src/tests/query_test.rs @@ -17,7 +17,10 @@ use super::helpers::{ }; use crate::tests::{ generate_parser, - helpers::query_helpers::{collect_captures, collect_matches}, + helpers::{ + fixtures::get_test_fixture_language, + query_helpers::{collect_captures, collect_matches}, + }, ITERATION_COUNT, }; @@ -330,6 +333,16 @@ fn test_query_errors_on_invalid_symbols() { message: "alternatives".to_string() } ); + assert_eq!( + Query::new(&language, "fakefield: (identifier)").unwrap_err(), + QueryError { + row: 0, + offset: 0, + column: 0, + kind: QueryErrorKind::Field, + message: "fakefield".to_string() + } + ); }); } @@ -2978,6 +2991,61 @@ fn test_query_matches_with_deeply_nested_patterns_with_fields() { }); } +#[test] +fn test_query_matches_with_alternations_and_predicates() { + allocations::record(|| { + let language = get_language("java"); + let query = Query::new( + &language, + " + (block + [ + (local_variable_declaration + (variable_declarator + (identifier) @def.a + (string_literal) @lit.a + ) + ) + (local_variable_declaration + (variable_declarator + (identifier) @def.b + (null_literal) @lit.b + ) + ) + ] + (expression_statement + (method_invocation [ + (argument_list + (identifier) @ref.a + (string_literal) + ) + (argument_list + (null_literal) + (identifier) @ref.b + ) + ]) + ) + (#eq? @def.a @ref.a ) + (#eq? @def.b @ref.b ) + ) + ", + ) + .unwrap(); + + assert_query_matches( + &language, + &query, + r#" + void test() { + int a = "foo"; + f(null, b); + } + "#, + &[], + ); + }); +} + #[test] fn test_query_matches_with_indefinite_step_containing_no_captures() { allocations::record(|| { @@ -5621,3 +5689,63 @@ const foo = [ assert_eq!(matches.len(), 1); assert_eq!(matches[0].1, captures); } + +#[test] +fn test_query_with_predicate_causing_oob_access() { + let language = get_language("rust"); + + let query = "(call_expression + function: (scoped_identifier + path: (scoped_identifier (identifier) @_regex (#any-of? @_regex \"Regex\" \"RegexBuilder\") .)) + (#set! injection.language \"regex\"))"; + Query::new(&language, query).unwrap(); +} + +#[test] +fn test_query_with_anonymous_error_node() { + let language = get_test_fixture_language("anonymous_error"); + let mut parser = Parser::new(); + parser.set_language(&language).unwrap(); + + let source = "ERROR"; + + let tree = parser.parse(source, None).unwrap(); + let query = Query::new( + &language, + r#" + "ERROR" @error + (document "ERROR" @error) + "#, + ) + .unwrap(); + let mut cursor = QueryCursor::new(); + let matches = cursor.matches(&query, tree.root_node(), source.as_bytes()); + let matches = collect_matches(matches, &query, source); + + assert_eq!( + matches, + vec![(1, vec![("error", "ERROR")]), (0, vec![("error", "ERROR")])] + ); +} + +#[test] +fn test_query_allows_error_nodes_with_children() { + allocations::record(|| { + let language = get_language("cpp"); + + let code = "SomeStruct foo{.bar{}};"; + + let mut parser = Parser::new(); + parser.set_language(&language).unwrap(); + + let tree = parser.parse(code, None).unwrap(); + let root = tree.root_node(); + + let query = Query::new(&language, "(initializer_list (ERROR) @error)").unwrap(); + let mut cursor = QueryCursor::new(); + + let matches = cursor.matches(&query, root, code.as_bytes()); + let matches = collect_matches(matches, &query, code); + assert_eq!(matches, &[(0, vec![("error", ".bar")])]); + }); +} diff --git a/cli/src/tests/tags_test.rs b/cli/src/tests/tags_test.rs index cb07fb75..232a01dc 100644 --- a/cli/src/tests/tags_test.rs +++ b/cli/src/tests/tags_test.rs @@ -401,8 +401,11 @@ fn test_tags_via_c_api() { let syntax_types = unsafe { let mut len = 0; - let ptr = - c::ts_tagger_syntax_kinds_for_scope_name(tagger, c_scope_name.as_ptr(), &mut len); + let ptr = c::ts_tagger_syntax_kinds_for_scope_name( + tagger, + c_scope_name.as_ptr(), + &raw mut len, + ); slice::from_raw_parts(ptr, len as usize) .iter() .map(|i| CStr::from_ptr(*i).to_str().unwrap()) diff --git a/cli/src/tests/text_provider_test.rs b/cli/src/tests/text_provider_test.rs index ffedc36b..d9ed454e 100644 --- a/cli/src/tests/text_provider_test.rs +++ b/cli/src/tests/text_provider_test.rs @@ -107,6 +107,19 @@ fn test_text_provider_for_arc_of_bytes_slice() { check_parsing(text.clone(), text.as_ref()); } +#[test] +fn test_text_provider_for_vec_utf16_text() { + let source_text = "你好".encode_utf16().collect::>(); + + let language = get_language("c"); + let mut parser = Parser::new(); + parser.set_language(&language).unwrap(); + let tree = parser.parse_utf16_le(&source_text, None).unwrap(); + + let tree_text = tree.root_node().utf16_text(&source_text); + assert_eq!(source_text, tree_text); +} + #[test] fn test_text_provider_callback_with_str_slice() { let text: &str = "// comment"; diff --git a/cli/src/tests/tree_test.rs b/cli/src/tests/tree_test.rs index 083955b1..a4941bb6 100644 --- a/cli/src/tests/tree_test.rs +++ b/cli/src/tests/tree_test.rs @@ -3,7 +3,11 @@ use std::str; use tree_sitter::{InputEdit, Parser, Point, Range, Tree}; use super::helpers::fixtures::get_language; -use crate::{fuzz::edits::Edit, parse::perform_edit, tests::invert_edit}; +use crate::{ + fuzz::edits::Edit, + parse::perform_edit, + tests::{helpers::fixtures::get_test_fixture_language, invert_edit}, +}; #[test] fn test_tree_edit() { @@ -377,6 +381,40 @@ fn test_tree_cursor() { assert_eq!(copy.node().kind(), "struct_item"); } +#[test] +fn test_tree_cursor_previous_sibling_with_aliases() { + let mut parser = Parser::new(); + parser + .set_language(&get_test_fixture_language("aliases_in_root")) + .unwrap(); + + let text = "# comment\n# \nfoo foo"; + let tree = parser.parse(text, None).unwrap(); + let mut cursor = tree.walk(); + assert_eq!(cursor.node().kind(), "document"); + + cursor.goto_first_child(); + assert_eq!(cursor.node().kind(), "comment"); + + assert!(cursor.goto_next_sibling()); + assert_eq!(cursor.node().kind(), "comment"); + + assert!(cursor.goto_next_sibling()); + assert_eq!(cursor.node().kind(), "bar"); + + assert!(cursor.goto_previous_sibling()); + assert_eq!(cursor.node().kind(), "comment"); + + assert!(cursor.goto_previous_sibling()); + assert_eq!(cursor.node().kind(), "comment"); + + assert!(cursor.goto_next_sibling()); + assert_eq!(cursor.node().kind(), "comment"); + + assert!(cursor.goto_next_sibling()); + assert_eq!(cursor.node().kind(), "bar"); +} + #[test] fn test_tree_cursor_previous_sibling() { let mut parser = Parser::new(); diff --git a/cli/src/wasm.rs b/cli/src/wasm.rs index eca6ac24..eef6d08b 100644 --- a/cli/src/wasm.rs +++ b/cli/src/wasm.rs @@ -23,10 +23,18 @@ pub fn load_language_wasm_file(language_dir: &Path) -> Result<(String, Vec)> pub fn get_grammar_name(language_dir: &Path) -> Result { let src_dir = language_dir.join("src"); let grammar_json_path = src_dir.join("grammar.json"); - let grammar_json = fs::read_to_string(&grammar_json_path) - .with_context(|| format!("Failed to read grammar file {grammar_json_path:?}"))?; - let grammar: GrammarJSON = serde_json::from_str(&grammar_json) - .with_context(|| format!("Failed to parse grammar file {grammar_json_path:?}"))?; + let grammar_json = fs::read_to_string(&grammar_json_path).with_context(|| { + format!( + "Failed to read grammar file {}", + grammar_json_path.display() + ) + })?; + let grammar: GrammarJSON = serde_json::from_str(&grammar_json).with_context(|| { + format!( + "Failed to parse grammar file {}", + grammar_json_path.display() + ) + })?; Ok(grammar.name) } diff --git a/docs/src/assets/schemas/config.schema.json b/docs/src/assets/schemas/config.schema.json index 3968af5a..f0fe92e6 100644 --- a/docs/src/assets/schemas/config.schema.json +++ b/docs/src/assets/schemas/config.schema.json @@ -22,8 +22,15 @@ "examples": [ "Rust", "HTML" - ], - "$comment": "This is used in the description and the class names." + ] + }, + "title": { + "type": "string", + "description": "The title of the language.", + "examples": [ + "Rust", + "HTML" + ] }, "scope": { "type": "string", @@ -237,9 +244,7 @@ "properties": { "c": { "type": "boolean", - "default": true, - "const": true, - "$comment": "Always generated" + "default": true }, "go": { "type": "boolean", @@ -255,9 +260,7 @@ }, "node": { "type": "boolean", - "default": true, - "const": true, - "$comment": "Always generated (for now)" + "default": true }, "python": { "type": "boolean", @@ -265,9 +268,7 @@ }, "rust": { "type": "boolean", - "default": true, - "const": true, - "$comment": "Always generated" + "default": true }, "swift": { "type": "boolean", diff --git a/docs/src/assets/schemas/grammar.schema.json b/docs/src/assets/schemas/grammar.schema.json index 12ed4d65..e30c7ba0 100644 --- a/docs/src/assets/schemas/grammar.schema.json +++ b/docs/src/assets/schemas/grammar.schema.json @@ -246,6 +246,21 @@ "required": ["type", "content"] }, + "reserved-rule": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "RESERVED" + }, + "context_name": { "type": "string" }, + "content": { + "$ref": "#/definitions/rule" + } + }, + "required": ["type", "context_name", "content"] + }, + "token-rule": { "type": "object", "properties": { @@ -313,6 +328,7 @@ { "$ref": "#/definitions/choice-rule" }, { "$ref": "#/definitions/repeat1-rule" }, { "$ref": "#/definitions/repeat-rule" }, + { "$ref": "#/definitions/reserved-rule" }, { "$ref": "#/definitions/token-rule" }, { "$ref": "#/definitions/field-rule" }, { "$ref": "#/definitions/prec-rule" } diff --git a/docs/src/cli/parse.md b/docs/src/cli/parse.md index 7f30cdb0..97c6422a 100644 --- a/docs/src/cli/parse.md +++ b/docs/src/cli/parse.md @@ -66,7 +66,7 @@ Suppress main output. ### `--edits ...` -Apply edits after parsing the file. Edits are in the form of `row, col delcount insert_text` where row and col are 0-indexed. +Apply edits after parsing the file. Edits are in the form of `row,col|position delcount insert_text` where row and col, or position are 0-indexed. ### `--encoding ` diff --git a/highlight/src/lib.rs b/highlight/src/lib.rs index 3998502f..9d296050 100644 --- a/highlight/src/lib.rs +++ b/highlight/src/lib.rs @@ -143,6 +143,8 @@ pub struct HtmlRenderer { pub html: Vec, pub line_offsets: Vec, carriage_return_highlight: Option, + // The offset in `self.html` of the last carriage return. + last_carriage_return: Option, } #[derive(Debug)] @@ -1090,6 +1092,7 @@ impl HtmlRenderer { html: Vec::with_capacity(BUFFER_HTML_RESERVE_CAPACITY), line_offsets: Vec::with_capacity(BUFFER_LINES_RESERVE_CAPACITY), carriage_return_highlight: None, + last_carriage_return: None, }; result.line_offsets.push(0); result @@ -1131,6 +1134,9 @@ impl HtmlRenderer { Err(a) => return Err(a), } } + if let Some(offset) = self.last_carriage_return.take() { + self.add_carriage_return(offset, attribute_callback); + } if self.html.last() != Some(&b'\n') { self.html.push(b'\n'); } @@ -1155,14 +1161,21 @@ impl HtmlRenderer { }) } - fn add_carriage_return(&mut self, attribute_callback: &F) + fn add_carriage_return(&mut self, offset: usize, attribute_callback: &F) where F: Fn(Highlight, &mut Vec), { if let Some(highlight) = self.carriage_return_highlight { + // If a CR is the last character in a `HighlightEvent::Source` + // region, then we don't know until the next `Source` event or EOF + // whether it is part of CRLF or on its own. To avoid unbounded + // lookahead, save the offset of the CR and insert there now that we + // know. + let rest = self.html.split_off(offset); self.html.extend(b""); + self.html.extend(rest); } } @@ -1194,19 +1207,17 @@ impl HtmlRenderer { } } - let mut last_char_was_cr = false; for c in LossyUtf8::new(src).flat_map(|p| p.bytes()) { // Don't render carriage return characters, but allow lone carriage returns (not // followed by line feeds) to be styled via the attribute callback. if c == b'\r' { - last_char_was_cr = true; + self.last_carriage_return = Some(self.html.len()); continue; } - if last_char_was_cr { + if let Some(offset) = self.last_carriage_return.take() { if c != b'\n' { - self.add_carriage_return(attribute_callback); + self.add_carriage_return(offset, attribute_callback); } - last_char_was_cr = false; } // At line boundaries, close and re-open all of the open tags. diff --git a/lib/CMakeLists.txt b/lib/CMakeLists.txt index 3e2b94d4..42aa60d0 100644 --- a/lib/CMakeLists.txt +++ b/lib/CMakeLists.txt @@ -1,7 +1,7 @@ cmake_minimum_required(VERSION 3.13) project(tree-sitter - VERSION "0.25.1" + VERSION "0.25.9" DESCRIPTION "An incremental parsing system for programming tools" HOMEPAGE_URL "https://tree-sitter.github.io/tree-sitter/" LANGUAGES C) @@ -83,13 +83,13 @@ set_target_properties(tree-sitter target_compile_definitions(tree-sitter PRIVATE _POSIX_C_SOURCE=200112L _DEFAULT_SOURCE) -configure_file(tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY) - include(GNUInstallDirs) +configure_file(tree-sitter.pc.in "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" @ONLY) + install(FILES include/tree_sitter/api.h DESTINATION "${CMAKE_INSTALL_INCLUDEDIR}/tree_sitter") install(FILES "${CMAKE_CURRENT_BINARY_DIR}/tree-sitter.pc" - DESTINATION "${CMAKE_INSTALL_DATAROOTDIR}/pkgconfig") + DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig") install(TARGETS tree-sitter LIBRARY DESTINATION "${CMAKE_INSTALL_LIBDIR}") diff --git a/lib/binding_rust/build.rs b/lib/binding_rust/build.rs index 255740af..5e918034 100644 --- a/lib/binding_rust/build.rs +++ b/lib/binding_rust/build.rs @@ -112,7 +112,10 @@ fn generate_bindings(out_dir: &std::path::Path) { .expect("Failed to generate bindings"); let bindings_rs = out_dir.join("bindings.rs"); - bindings - .write_to_file(&bindings_rs) - .unwrap_or_else(|_| panic!("Failed to write bindings into path: {bindings_rs:?}")); + bindings.write_to_file(&bindings_rs).unwrap_or_else(|_| { + panic!( + "Failed to write bindings into path: {}", + bindings_rs.display() + ) + }); } diff --git a/lib/binding_rust/ffi.rs b/lib/binding_rust/ffi.rs index af0824d7..4c68a633 100644 --- a/lib/binding_rust/ffi.rs +++ b/lib/binding_rust/ffi.rs @@ -16,6 +16,7 @@ extern "C" { } #[cfg(windows)] +#[cfg(feature = "std")] extern "C" { pub(crate) fn _ts_dup(handle: *mut std::os::raw::c_void) -> std::os::raw::c_int; } diff --git a/lib/binding_rust/lib.rs b/lib/binding_rust/lib.rs index d6487877..3574391f 100644 --- a/lib/binding_rust/lib.rs +++ b/lib/binding_rust/lib.rs @@ -1222,7 +1222,7 @@ impl Parser { len: u32, code_point: *mut i32, ) -> u32 { - let (c, len) = D::decode(std::slice::from_raw_parts(data, len as usize)); + let (c, len) = D::decode(core::slice::from_raw_parts(data, len as usize)); if let Some(code_point) = code_point.as_mut() { *code_point = c; } @@ -1422,7 +1422,7 @@ impl Parser { if let Some(flag) = flag { ffi::ts_parser_set_cancellation_flag( self.0.as_ptr(), - std::ptr::from_ref::(flag).cast::(), + core::ptr::from_ref::(flag).cast::(), ); } else { ffi::ts_parser_set_cancellation_flag(self.0.as_ptr(), ptr::null()); @@ -1432,7 +1432,11 @@ impl Parser { impl Drop for Parser { fn drop(&mut self) { - self.stop_printing_dot_graphs(); + #[cfg(feature = "std")] + #[cfg(not(target_os = "wasi"))] + { + self.stop_printing_dot_graphs(); + } self.set_logger(None); unsafe { ffi::ts_parser_delete(self.0.as_ptr()) } } @@ -2058,7 +2062,7 @@ impl<'tree> Node<'tree> { #[must_use] pub fn utf16_text<'a>(&self, source: &'a [u16]) -> &'a [u16] { - &source[self.start_byte()..self.end_byte()] + &source[self.start_byte() / 2..self.end_byte() / 2] } /// Create a new [`TreeCursor`] starting from this node. @@ -2087,7 +2091,7 @@ impl<'tree> Node<'tree> { impl PartialEq for Node<'_> { fn eq(&self, other: &Self) -> bool { - self.0.id == other.0.id + core::ptr::eq(self.0.id, other.0.id) } } @@ -2440,7 +2444,7 @@ impl Query { // Error types that report names ffi::TSQueryErrorNodeType | ffi::TSQueryErrorField | ffi::TSQueryErrorCapture => { let suffix = source.split_at(offset).1; - let in_quotes = source.as_bytes()[offset - 1] == b'"'; + let in_quotes = offset > 0 && source.as_bytes()[offset - 1] == b'"'; let mut backslashes = 0; let end_offset = suffix .find(|c| { @@ -3349,9 +3353,11 @@ impl<'tree> QueryMatch<'_, 'tree> { .iter() .all(|predicate| match predicate { TextPredicateCapture::EqCapture(i, j, is_positive, match_all_nodes) => { - let mut nodes_1 = self.nodes_for_capture_index(*i); - let mut nodes_2 = self.nodes_for_capture_index(*j); - while let (Some(node1), Some(node2)) = (nodes_1.next(), nodes_2.next()) { + let mut nodes_1 = self.nodes_for_capture_index(*i).peekable(); + let mut nodes_2 = self.nodes_for_capture_index(*j).peekable(); + while nodes_1.peek().is_some() && nodes_2.peek().is_some() { + let node1 = nodes_1.next().unwrap(); + let node2 = nodes_2.next().unwrap(); let mut text1 = text_provider.text(node1); let mut text2 = text_provider.text(node2); let text1 = node_text1.get_text(&mut text1); diff --git a/lib/binding_web/lib/tree-sitter.c b/lib/binding_web/lib/tree-sitter.c index dcf00e79..6148e542 100644 --- a/lib/binding_web/lib/tree-sitter.c +++ b/lib/binding_web/lib/tree-sitter.c @@ -44,17 +44,22 @@ static inline void marshal_node(const void **buffer, TSNode node) { buffer[4] = (const void *)node.context[3]; } -static inline TSNode unmarshal_node(const TSTree *tree) { +static inline TSNode unmarshal_node_at(const TSTree *tree, uint32_t index) { TSNode node; - node.id = TRANSFER_BUFFER[0]; - node.context[0] = code_unit_to_byte((uint32_t)TRANSFER_BUFFER[1]); - node.context[1] = (uint32_t)TRANSFER_BUFFER[2]; - node.context[2] = code_unit_to_byte((uint32_t)TRANSFER_BUFFER[3]); - node.context[3] = (uint32_t)TRANSFER_BUFFER[4]; + const void **buffer = TRANSFER_BUFFER + index * SIZE_OF_NODE; + node.id = buffer[0]; + node.context[0] = code_unit_to_byte((uint32_t)buffer[1]); + node.context[1] = (uint32_t)buffer[2]; + node.context[2] = code_unit_to_byte((uint32_t)buffer[3]); + node.context[3] = (uint32_t)buffer[4]; node.tree = tree; return node; } +static inline TSNode unmarshal_node(const TSTree *tree) { + return unmarshal_node_at(tree, 0); +} + static inline void marshal_cursor(const TSTreeCursor *cursor) { TRANSFER_BUFFER[0] = cursor->id; TRANSFER_BUFFER[1] = (const void *)cursor->context[0]; @@ -616,7 +621,7 @@ void ts_node_parent_wasm(const TSTree *tree) { void ts_node_child_with_descendant_wasm(const TSTree *tree) { TSNode node = unmarshal_node(tree); - TSNode descendant = unmarshal_node(tree); + TSNode descendant = unmarshal_node_at(tree, 1); marshal_node(TRANSFER_BUFFER, ts_node_child_with_descendant(node, descendant)); } diff --git a/lib/binding_web/lib/tree-sitter.d.ts b/lib/binding_web/lib/tree-sitter.d.ts index c6fdf7d5..ac9e1a33 100644 --- a/lib/binding_web/lib/tree-sitter.d.ts +++ b/lib/binding_web/lib/tree-sitter.d.ts @@ -50,7 +50,7 @@ declare namespace RuntimeExports { function setValue(ptr: number, value: number, type?: string): void; let currentParseCallback: ((index: number, position: {row: number, column: number}) => string | undefined) | null; let currentLogCallback: ((message: string, isLex: boolean) => void) | null; - let currentProgressCallback: ((state: {currentOffset: number}) => void) | null; + let currentProgressCallback: ((state: {currentOffset: number, hasError: boolean}) => void) | null; let currentQueryProgressCallback: ((state: {currentOffset: number}) => void) | null; let HEAPF32: Float32Array; let HEAPF64: Float64Array; diff --git a/lib/binding_web/package-lock.json b/lib/binding_web/package-lock.json index 688e971f..b2fcf758 100644 --- a/lib/binding_web/package-lock.json +++ b/lib/binding_web/package-lock.json @@ -1,18 +1,17 @@ { "name": "web-tree-sitter", - "version": "0.25.0", + "version": "0.25.9", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "web-tree-sitter", - "version": "0.25.0", + "version": "0.25.9", "license": "MIT", "devDependencies": { - "@eslint/js": "^9.19.0", - "@types/emscripten": "^1.40.0", - "@types/node": "^22.12.0", - "@vitest/coverage-v8": "^3.0.4", + "@eslint/js": "^9.20.0", + "@types/node": "^22.13.1", + "@vitest/coverage-v8": "^3.0.5", "dts-buddy": "^0.5.4", "esbuild": "^0.24.2", "eslint": "^9.19.0", @@ -21,6 +20,14 @@ "typescript": "^5.7.3", "typescript-eslint": "^8.22.0", "vitest": "^3.0.4" + }, + "peerDependencies": { + "@types/emscripten": "^1.40.0" + }, + "peerDependenciesMeta": { + "@types/emscripten": { + "optional": true + } } }, "node_modules/@ampproject/remapping": { @@ -454,6 +461,23 @@ "node": ">=18" } }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.9.tgz", + "integrity": "sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/@esbuild/sunos-x64": { "version": "0.24.2", "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.24.2.tgz", @@ -565,13 +589,13 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.19.1", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.1.tgz", - "integrity": "sha512-fo6Mtm5mWyKjA/Chy1BYTdn5mGJoDNjC7C64ug20ADsRDGrA85bN3uK3MaKbeRkRuuIEAR5N33Jr1pbm411/PA==", + "version": "0.21.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.0.tgz", + "integrity": "sha512-ENIdc4iLu0d93HeYirvKmrzshzofPw6VkZRKQGe9Nv46ZnWUzcF1xV01dcvEg/1wXUR61OmmlSfyeyO7EvjLxQ==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/object-schema": "^2.1.5", + "@eslint/object-schema": "^2.1.6", "debug": "^4.3.1", "minimatch": "^3.1.2" }, @@ -579,10 +603,20 @@ "node": "^18.18.0 || ^20.9.0 || >=21.1.0" } }, + "node_modules/@eslint/config-helpers": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.3.1.tgz", + "integrity": "sha512-xR93k9WhrDYpXHORXpxVL5oHj3Era7wo6k/Wd8/IsQNnZUTzkGS29lyn3nAT05v6ltUuTFVCCYDEGfy2Or/sPA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, "node_modules/@eslint/core": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.10.0.tgz", - "integrity": "sha512-gFHJ+xBOo4G3WRlR1e/3G8A6/KZAH6zcE/hkLRCZTi/B9avAG365QhFA8uOGzTMqgTghpn7/fSnscW++dpMSAw==", + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.15.2.tgz", + "integrity": "sha512-78Md3/Rrxh83gCxoUc0EiciuOHsIITzLy53m3d9UyiW8y9Dj2D29FeETqyKA+BRK76tnTp6RXWb3pCay8Oyomg==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -593,9 +627,9 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.2.0.tgz", - "integrity": "sha512-grOjVNN8P3hjJn/eIETF1wwd12DdnwFDoyceUJLYYdkpbwq3nLi+4fqrTAONx7XDALqlL220wC/RHSC/QTI/0w==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz", + "integrity": "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ==", "dev": true, "license": "MIT", "dependencies": { @@ -617,19 +651,22 @@ } }, "node_modules/@eslint/js": { - "version": "9.19.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.19.0.tgz", - "integrity": "sha512-rbq9/g38qjfqFLOVPvwjIvFFdNziEC5S65jmjPw5r6A//QH+W91akh9irMwjDN8zKUTak6W9EsAv4m/7Wnw0UQ==", + "version": "9.34.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.34.0.tgz", + "integrity": "sha512-EoyvqQnBNsV1CWaEJ559rxXL4c8V92gxirbawSmVUOWXlsRxxQXl6LmCpdUblgxgSkDIqKnhzba2SjRTI/A5Rw==", "dev": true, "license": "MIT", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" } }, "node_modules/@eslint/object-schema": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.5.tgz", - "integrity": "sha512-o0bhxnL89h5Bae5T318nFoFzGy+YE5i/gGkoPAgkmTVdRKTiv3p8JHevPiPaMwoloKfEiiaHlawCqaZMqRm+XQ==", + "version": "2.1.6", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.6.tgz", + "integrity": "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -637,13 +674,13 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.5.tgz", - "integrity": "sha512-lB05FkqEdUg2AA0xEbUz0SnkXT1LcCTa438W4IWTUh4hdOnVbQyOJ81OrDXsJk/LSiJHubgGEFoR5EHq1NsH1A==", + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.3.5.tgz", + "integrity": "sha512-Z5kJ+wU3oA7MMIqVR9tyZRtjYPr4OC004Q4Rw7pgOKUOKkJfZ3O24nz3WYfGRpMDNmcOi3TwQOmgm7B7Tpii0w==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.10.0", + "@eslint/core": "^0.15.2", "levn": "^0.4.1" }, "engines": { @@ -703,9 +740,9 @@ } }, "node_modules/@humanwhocodes/retry": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.1.tgz", - "integrity": "sha512-c7hNEllBlenFTHBky65mhq8WD2kbN9Q6gk0bTk8lSBvc554jpXSkST1iePudpt7+A/AQvuHs9EMqjHDXMY1lrA==", + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -798,9 +835,9 @@ "license": "MIT" }, "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.25", - "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", - "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "version": "0.3.30", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.30.tgz", + "integrity": "sha512-GQ7Nw5G2lTu/BtHTKfXhKHok2WGetd4XYcVKGx00SjAk8GMwgJM3zr6zORiPGuOE+/vkc90KtTosSSvaCjKb2Q==", "dev": true, "license": "MIT", "dependencies": { @@ -858,9 +895,9 @@ } }, "node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.32.1.tgz", - "integrity": "sha512-/pqA4DmqyCm8u5YIDzIdlLcEmuvxb0v8fZdFhVMszSpDTgbQKdw3/mB3eMUHIbubtJ6F9j+LtmyCnHTEqIHyzA==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.49.0.tgz", + "integrity": "sha512-rlKIeL854Ed0e09QGYFlmDNbka6I3EQFw7iZuugQjMb11KMpJCLPFL4ZPbMfaEhLADEL1yx0oujGkBQ7+qW3eA==", "cpu": [ "arm" ], @@ -872,9 +909,9 @@ ] }, "node_modules/@rollup/rollup-android-arm64": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.32.1.tgz", - "integrity": "sha512-If3PDskT77q7zgqVqYuj7WG3WC08G1kwXGVFi9Jr8nY6eHucREHkfpX79c0ACAjLj3QIWKPJR7w4i+f5EdLH5Q==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.49.0.tgz", + "integrity": "sha512-cqPpZdKUSQYRtLLr6R4X3sD4jCBO1zUmeo3qrWBCqYIeH8Q3KRL4F3V7XJ2Rm8/RJOQBZuqzQGWPjjvFUcYa/w==", "cpu": [ "arm64" ], @@ -886,9 +923,9 @@ ] }, "node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.32.1.tgz", - "integrity": "sha512-zCpKHioQ9KgZToFp5Wvz6zaWbMzYQ2LJHQ+QixDKq52KKrF65ueu6Af4hLlLWHjX1Wf/0G5kSJM9PySW9IrvHA==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.49.0.tgz", + "integrity": "sha512-99kMMSMQT7got6iYX3yyIiJfFndpojBmkHfTc1rIje8VbjhmqBXE+nb7ZZP3A5skLyujvT0eIUCUsxAe6NjWbw==", "cpu": [ "arm64" ], @@ -900,9 +937,9 @@ ] }, "node_modules/@rollup/rollup-darwin-x64": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.32.1.tgz", - "integrity": "sha512-sFvF+t2+TyUo/ZQqUcifrJIgznx58oFZbdHS9TvHq3xhPVL9nOp+yZ6LKrO9GWTP+6DbFtoyLDbjTpR62Mbr3Q==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.49.0.tgz", + "integrity": "sha512-y8cXoD3wdWUDpjOLMKLx6l+NFz3NlkWKcBCBfttUn+VGSfgsQ5o/yDUGtzE9HvsodkP0+16N0P4Ty1VuhtRUGg==", "cpu": [ "x64" ], @@ -914,9 +951,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.32.1.tgz", - "integrity": "sha512-NbOa+7InvMWRcY9RG+B6kKIMD/FsnQPH0MWUvDlQB1iXnF/UcKSudCXZtv4lW+C276g3w5AxPbfry5rSYvyeYA==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.49.0.tgz", + "integrity": "sha512-3mY5Pr7qv4GS4ZvWoSP8zha8YoiqrU+e0ViPvB549jvliBbdNLrg2ywPGkgLC3cmvN8ya3za+Q2xVyT6z+vZqA==", "cpu": [ "arm64" ], @@ -928,9 +965,9 @@ ] }, "node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.32.1.tgz", - "integrity": "sha512-JRBRmwvHPXR881j2xjry8HZ86wIPK2CcDw0EXchE1UgU0ubWp9nvlT7cZYKc6bkypBt745b4bglf3+xJ7hXWWw==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.49.0.tgz", + "integrity": "sha512-C9KzzOAQU5gU4kG8DTk+tjdKjpWhVWd5uVkinCwwFub2m7cDYLOdtXoMrExfeBmeRy9kBQMkiyJ+HULyF1yj9w==", "cpu": [ "x64" ], @@ -942,9 +979,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.32.1.tgz", - "integrity": "sha512-PKvszb+9o/vVdUzCCjL0sKHukEQV39tD3fepXxYrHE3sTKrRdCydI7uldRLbjLmDA3TFDmh418XH19NOsDRH8g==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.49.0.tgz", + "integrity": "sha512-OVSQgEZDVLnTbMq5NBs6xkmz3AADByCWI4RdKSFNlDsYXdFtlxS59J+w+LippJe8KcmeSSM3ba+GlsM9+WwC1w==", "cpu": [ "arm" ], @@ -956,9 +993,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.32.1.tgz", - "integrity": "sha512-9WHEMV6Y89eL606ReYowXuGF1Yb2vwfKWKdD1A5h+OYnPZSJvxbEjxTRKPgi7tkP2DSnW0YLab1ooy+i/FQp/Q==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.49.0.tgz", + "integrity": "sha512-ZnfSFA7fDUHNa4P3VwAcfaBLakCbYaxCk0jUnS3dTou9P95kwoOLAMlT3WmEJDBCSrOEFFV0Y1HXiwfLYJuLlA==", "cpu": [ "arm" ], @@ -970,9 +1007,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.32.1.tgz", - "integrity": "sha512-tZWc9iEt5fGJ1CL2LRPw8OttkCBDs+D8D3oEM8mH8S1ICZCtFJhD7DZ3XMGM8kpqHvhGUTvNUYVDnmkj4BDXnw==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.49.0.tgz", + "integrity": "sha512-Z81u+gfrobVK2iV7GqZCBfEB1y6+I61AH466lNK+xy1jfqFLiQ9Qv716WUM5fxFrYxwC7ziVdZRU9qvGHkYIJg==", "cpu": [ "arm64" ], @@ -984,9 +1021,9 @@ ] }, "node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.32.1.tgz", - "integrity": "sha512-FTYc2YoTWUsBz5GTTgGkRYYJ5NGJIi/rCY4oK/I8aKowx1ToXeoVVbIE4LGAjsauvlhjfl0MYacxClLld1VrOw==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.49.0.tgz", + "integrity": "sha512-zoAwS0KCXSnTp9NH/h9aamBAIve0DXeYpll85shf9NJ0URjSTzzS+Z9evmolN+ICfD3v8skKUPyk2PO0uGdFqg==", "cpu": [ "arm64" ], @@ -998,9 +1035,9 @@ ] }, "node_modules/@rollup/rollup-linux-loongarch64-gnu": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.32.1.tgz", - "integrity": "sha512-F51qLdOtpS6P1zJVRzYM0v6MrBNypyPEN1GfMiz0gPu9jN8ScGaEFIZQwteSsGKg799oR5EaP7+B2jHgL+d+Kw==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loongarch64-gnu/-/rollup-linux-loongarch64-gnu-4.49.0.tgz", + "integrity": "sha512-2QyUyQQ1ZtwZGiq0nvODL+vLJBtciItC3/5cYN8ncDQcv5avrt2MbKt1XU/vFAJlLta5KujqyHdYtdag4YEjYQ==", "cpu": [ "loong64" ], @@ -1011,10 +1048,10 @@ "linux" ] }, - "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.32.1.tgz", - "integrity": "sha512-wO0WkfSppfX4YFm5KhdCCpnpGbtgQNj/tgvYzrVYFKDpven8w2N6Gg5nB6w+wAMO3AIfSTWeTjfVe+uZ23zAlg==", + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.49.0.tgz", + "integrity": "sha512-k9aEmOWt+mrMuD3skjVJSSxHckJp+SiFzFG+v8JLXbc/xi9hv2icSkR3U7uQzqy+/QbbYY7iNB9eDTwrELo14g==", "cpu": [ "ppc64" ], @@ -1026,9 +1063,23 @@ ] }, "node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.32.1.tgz", - "integrity": "sha512-iWswS9cIXfJO1MFYtI/4jjlrGb/V58oMu4dYJIKnR5UIwbkzR0PJ09O0PDZT0oJ3LYWXBSWahNf/Mjo6i1E5/g==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.49.0.tgz", + "integrity": "sha512-rDKRFFIWJ/zJn6uk2IdYLc09Z7zkE5IFIOWqpuU0o6ZpHcdniAyWkwSUWE/Z25N/wNDmFHHMzin84qW7Wzkjsw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.49.0.tgz", + "integrity": "sha512-FkkhIY/hYFVnOzz1WeV3S9Bd1h0hda/gRqvZCMpHWDHdiIHn6pqsY3b5eSbvGccWHMQ1uUzgZTKS4oGpykf8Tw==", "cpu": [ "riscv64" ], @@ -1040,9 +1091,9 @@ ] }, "node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.32.1.tgz", - "integrity": "sha512-RKt8NI9tebzmEthMnfVgG3i/XeECkMPS+ibVZjZ6mNekpbbUmkNWuIN2yHsb/mBPyZke4nlI4YqIdFPgKuoyQQ==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.49.0.tgz", + "integrity": "sha512-gRf5c+A7QiOG3UwLyOOtyJMD31JJhMjBvpfhAitPAoqZFcOeK3Kc1Veg1z/trmt+2P6F/biT02fU19GGTS529A==", "cpu": [ "s390x" ], @@ -1054,9 +1105,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.32.1.tgz", - "integrity": "sha512-WQFLZ9c42ECqEjwg/GHHsouij3pzLXkFdz0UxHa/0OM12LzvX7DzedlY0SIEly2v18YZLRhCRoHZDxbBSWoGYg==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.49.0.tgz", + "integrity": "sha512-BR7+blScdLW1h/2hB/2oXM+dhTmpW3rQt1DeSiCP9mc2NMMkqVgjIN3DDsNpKmezffGC9R8XKVOLmBkRUcK/sA==", "cpu": [ "x64" ], @@ -1068,9 +1119,9 @@ ] }, "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.32.1.tgz", - "integrity": "sha512-BLoiyHDOWoS3uccNSADMza6V6vCNiphi94tQlVIL5de+r6r/CCQuNnerf+1g2mnk2b6edp5dk0nhdZ7aEjOBsA==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.49.0.tgz", + "integrity": "sha512-hDMOAe+6nX3V5ei1I7Au3wcr9h3ktKzDvF2ne5ovX8RZiAHEtX1A5SNNk4zt1Qt77CmnbqT+upb/umzoPMWiPg==", "cpu": [ "x64" ], @@ -1082,9 +1133,9 @@ ] }, "node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.32.1.tgz", - "integrity": "sha512-w2l3UnlgYTNNU+Z6wOR8YdaioqfEnwPjIsJ66KxKAf0p+AuL2FHeTX6qvM+p/Ue3XPBVNyVSfCrfZiQh7vZHLQ==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.49.0.tgz", + "integrity": "sha512-wkNRzfiIGaElC9kXUT+HLx17z7D0jl+9tGYRKwd8r7cUqTL7GYAvgUY++U2hK6Ar7z5Z6IRRoWC8kQxpmM7TDA==", "cpu": [ "arm64" ], @@ -1096,9 +1147,9 @@ ] }, "node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.32.1.tgz", - "integrity": "sha512-Am9H+TGLomPGkBnaPWie4F3x+yQ2rr4Bk2jpwy+iV+Gel9jLAu/KqT8k3X4jxFPW6Zf8OMnehyutsd+eHoq1WQ==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.49.0.tgz", + "integrity": "sha512-gq5aW/SyNpjp71AAzroH37DtINDcX1Qw2iv9Chyz49ZgdOP3NV8QCyKZUrGsYX9Yyggj5soFiRCgsL3HwD8TdA==", "cpu": [ "ia32" ], @@ -1110,9 +1161,9 @@ ] }, "node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.32.1.tgz", - "integrity": "sha512-ar80GhdZb4DgmW3myIS9nRFYcpJRSME8iqWgzH2i44u+IdrzmiXVxeFnExQ5v4JYUSpg94bWjevMG8JHf1Da5Q==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.49.0.tgz", + "integrity": "sha512-gEtqFbzmZLFk2xKh7g0Rlo8xzho8KrEFEkzvHbfUGkrgXOpZ4XagQ6n+wIZFNh1nTb8UD16J4nFSFKXYgnbdBg==", "cpu": [ "x64" ], @@ -1123,17 +1174,35 @@ "win32" ] }, + "node_modules/@types/chai": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.2.tgz", + "integrity": "sha512-8kB30R7Hwqf40JPiKhVzodJs2Qc1ZJ5zuT3uzw5Hq/dhNCl3G3l83jfpdI1e20BP348+fV7VIL/+FxaXkqBmWg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/deep-eql": "*" + } + }, + "node_modules/@types/deep-eql": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", + "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/emscripten": { "version": "1.40.0", "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.40.0.tgz", "integrity": "sha512-MD2JJ25S4tnjnhjWyalMS6K6p0h+zQV6+Ylm+aGbiS8tSn/aHLSGNzBgduj6FB4zH0ax2GRMGYi/8G1uOxhXWA==", - "dev": true, - "license": "MIT" + "license": "MIT", + "optional": true, + "peer": true }, "node_modules/@types/estree": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", - "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", "dev": true, "license": "MIT" }, @@ -1145,13 +1214,13 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.12.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.12.0.tgz", - "integrity": "sha512-Fll2FZ1riMjNmlmJOdAyY5pUbkftXslB5DgEzlIuNaiWhXd00FhWxVC/r4yV/4wBb9JfImTu+jiSvXTkJ7F/gA==", + "version": "22.18.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.0.tgz", + "integrity": "sha512-m5ObIqwsUp6BZzyiy4RdZpzWGub9bqLJMvZDD0QMXhxjqMHMENlj+SqF5QxoUwaQNFe+8kz8XM8ZQhqkQPTgMQ==", "dev": true, "license": "MIT", "dependencies": { - "undici-types": "~6.20.0" + "undici-types": "~6.21.0" } }, "node_modules/@typescript-eslint/eslint-plugin": { @@ -1293,9 +1362,9 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -1361,22 +1430,23 @@ } }, "node_modules/@vitest/coverage-v8": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.0.4.tgz", - "integrity": "sha512-f0twgRCHgbs24Dp8cLWagzcObXMcuKtAwgxjJV/nnysPAJJk1JiKu/W0gIehZLmkljhJXU/E0/dmuQzsA/4jhA==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-3.2.4.tgz", + "integrity": "sha512-EyF9SXU6kS5Ku/U82E259WSnvg6c8KTjppUncuNdm5QHpe17mwREHnjDzozC8x9MZ0xfBUFSaLkRv4TMA75ALQ==", "dev": true, "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.3.0", "@bcoe/v8-coverage": "^1.0.2", - "debug": "^4.4.0", + "ast-v8-to-istanbul": "^0.3.3", + "debug": "^4.4.1", "istanbul-lib-coverage": "^3.2.2", "istanbul-lib-report": "^3.0.1", "istanbul-lib-source-maps": "^5.0.6", "istanbul-reports": "^3.1.7", "magic-string": "^0.30.17", "magicast": "^0.3.5", - "std-env": "^3.8.0", + "std-env": "^3.9.0", "test-exclude": "^7.0.1", "tinyrainbow": "^2.0.0" }, @@ -1384,8 +1454,8 @@ "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "@vitest/browser": "3.0.4", - "vitest": "3.0.4" + "@vitest/browser": "3.2.4", + "vitest": "3.2.4" }, "peerDependenciesMeta": { "@vitest/browser": { @@ -1394,15 +1464,16 @@ } }, "node_modules/@vitest/expect": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.4.tgz", - "integrity": "sha512-Nm5kJmYw6P2BxhJPkO3eKKhGYKRsnqJqf+r0yOGRKpEP+bSCBDsjXgiu1/5QFrnPMEgzfC38ZEjvCFgaNBC0Eg==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.2.4.tgz", + "integrity": "sha512-Io0yyORnB6sikFlt8QW5K7slY4OjqNX9jmJQ02QDda8lyM6B5oNgVWoSoKPac8/kgnCUzuHQKrSLtu/uOqqrig==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.4", - "@vitest/utils": "3.0.4", - "chai": "^5.1.2", + "@types/chai": "^5.2.2", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", "tinyrainbow": "^2.0.0" }, "funding": { @@ -1410,13 +1481,13 @@ } }, "node_modules/@vitest/mocker": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.4.tgz", - "integrity": "sha512-gEef35vKafJlfQbnyOXZ0Gcr9IBUsMTyTLXsEQwuyYAerpHqvXhzdBnDFuHLpFqth3F7b6BaFr4qV/Cs1ULx5A==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.2.4.tgz", + "integrity": "sha512-46ryTE9RZO/rfDd7pEqFl7etuyzekzEhUbTW3BvmeO/BcCMEgq59BKhek3dXDWgAj4oMK6OZi+vRr1wPW6qjEQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.4", + "@vitest/spy": "3.2.4", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, @@ -1425,7 +1496,7 @@ }, "peerDependencies": { "msw": "^2.4.9", - "vite": "^5.0.0 || ^6.0.0" + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "peerDependenciesMeta": { "msw": { @@ -1437,9 +1508,9 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.4.tgz", - "integrity": "sha512-ts0fba+dEhK2aC9PFuZ9LTpULHpY/nd6jhAQ5IMU7Gaj7crPCTdCFfgvXxruRBLFS+MLraicCuFXxISEq8C93g==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.2.4.tgz", + "integrity": "sha512-IVNZik8IVRJRTr9fxlitMKeJeXFFFN0JaB9PHPGQ8NKQbGpfjlTx9zO4RefN8gp7eqjNy8nyK3NZmBzOPeIxtA==", "dev": true, "license": "MIT", "dependencies": { @@ -1450,56 +1521,57 @@ } }, "node_modules/@vitest/runner": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.4.tgz", - "integrity": "sha512-dKHzTQ7n9sExAcWH/0sh1elVgwc7OJ2lMOBrAm73J7AH6Pf9T12Zh3lNE1TETZaqrWFXtLlx3NVrLRb5hCK+iw==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.2.4.tgz", + "integrity": "sha512-oukfKT9Mk41LreEW09vt45f8wx7DordoWUZMYdY/cyAk7w5TWkTRCNZYF7sX7n2wB7jyGAl74OxgwhPgKaqDMQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "3.0.4", - "pathe": "^2.0.2" + "@vitest/utils": "3.2.4", + "pathe": "^2.0.3", + "strip-literal": "^3.0.0" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/snapshot": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.4.tgz", - "integrity": "sha512-+p5knMLwIk7lTQkM3NonZ9zBewzVp9EVkVpvNta0/PlFWpiqLaRcF4+33L1it3uRUCh0BGLOaXPPGEjNKfWb4w==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.2.4.tgz", + "integrity": "sha512-dEYtS7qQP2CjU27QBC5oUOxLE/v5eLkGqPE0ZKEIDGMs4vKWe7IjgLOeauHsR0D5YuuycGRO5oSRXnwnmA78fQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.4", + "@vitest/pretty-format": "3.2.4", "magic-string": "^0.30.17", - "pathe": "^2.0.2" + "pathe": "^2.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/spy": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.4.tgz", - "integrity": "sha512-sXIMF0oauYyUy2hN49VFTYodzEAu744MmGcPR3ZBsPM20G+1/cSW/n1U+3Yu/zHxX2bIDe1oJASOkml+osTU6Q==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.2.4.tgz", + "integrity": "sha512-vAfasCOe6AIK70iP5UD11Ac4siNUNJ9i/9PZ3NKx07sG6sUxeag1LWdNrMWeKKYBLlzuK+Gn65Yd5nyL6ds+nw==", "dev": true, "license": "MIT", "dependencies": { - "tinyspy": "^3.0.2" + "tinyspy": "^4.0.3" }, "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/utils": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.4.tgz", - "integrity": "sha512-8BqC1ksYsHtbWH+DfpOAKrFw3jl3Uf9J7yeFh85Pz52IWuh1hBBtyfEbRNNZNjl8H8A5yMLH9/t+k7HIKzQcZQ==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.2.4.tgz", + "integrity": "sha512-fB2V0JFrQSMsCo9HiSq3Ezpdv4iYaXRG1Sx8edX3MwxfyNn83mKiGzOcH+Fkxt4MHxr3y42fQi1oeAInqgX2QA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.4", - "loupe": "^3.1.2", + "@vitest/pretty-format": "3.2.4", + "loupe": "^3.1.4", "tinyrainbow": "^2.0.0" }, "funding": { @@ -1507,9 +1579,9 @@ } }, "node_modules/acorn": { - "version": "8.14.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz", - "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==", + "version": "8.15.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz", + "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", "bin": { @@ -1592,6 +1664,18 @@ "node": ">=12" } }, + "node_modules/ast-v8-to-istanbul": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/ast-v8-to-istanbul/-/ast-v8-to-istanbul-0.3.5.tgz", + "integrity": "sha512-9SdXjNheSiE8bALAQCQQuT6fgQaoxJh7IRYrRGZ8/9nv8WhJeC1aXAwN8TbaOssGOukUvyvnkgD9+Yuykvl1aA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.30", + "estree-walker": "^3.0.3", + "js-tokens": "^9.0.1" + } + }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", @@ -1600,9 +1684,9 @@ "license": "MIT" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "dev": true, "license": "MIT", "dependencies": { @@ -1644,9 +1728,9 @@ } }, "node_modules/chai": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/chai/-/chai-5.1.2.tgz", - "integrity": "sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==", + "version": "5.3.3", + "resolved": "https://registry.npmjs.org/chai/-/chai-5.3.3.tgz", + "integrity": "sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==", "dev": true, "license": "MIT", "dependencies": { @@ -1657,7 +1741,7 @@ "pathval": "^2.0.0" }, "engines": { - "node": ">=12" + "node": ">=18" } }, "node_modules/chalk": { @@ -1730,9 +1814,9 @@ } }, "node_modules/debug": { - "version": "4.4.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", - "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz", + "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==", "dev": true, "license": "MIT", "dependencies": { @@ -1816,9 +1900,9 @@ "license": "MIT" }, "node_modules/es-module-lexer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", - "integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", "dev": true, "license": "MIT" }, @@ -1877,22 +1961,23 @@ } }, "node_modules/eslint": { - "version": "9.19.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.19.0.tgz", - "integrity": "sha512-ug92j0LepKlbbEv6hD911THhoRHmbdXt2gX+VDABAW/Ir7D3nqKdv5Pf5vtlyY6HQMTEP2skXY43ueqTCWssEA==", + "version": "9.34.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.34.0.tgz", + "integrity": "sha512-RNCHRX5EwdrESy3Jc9o8ie8Bog+PeYvvSR8sDGoZxNFTvZ4dlxUB3WzQ3bQMztFrSRODGrLLj8g6OFuGY/aiQg==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.19.0", - "@eslint/core": "^0.10.0", - "@eslint/eslintrc": "^3.2.0", - "@eslint/js": "9.19.0", - "@eslint/plugin-kit": "^0.2.5", + "@eslint/config-array": "^0.21.0", + "@eslint/config-helpers": "^0.3.1", + "@eslint/core": "^0.15.2", + "@eslint/eslintrc": "^3.3.1", + "@eslint/js": "9.34.0", + "@eslint/plugin-kit": "^0.3.5", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", - "@humanwhocodes/retry": "^0.4.1", + "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", @@ -1900,9 +1985,9 @@ "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^8.2.0", - "eslint-visitor-keys": "^4.2.0", - "espree": "^10.3.0", + "eslint-scope": "^8.4.0", + "eslint-visitor-keys": "^4.2.1", + "espree": "^10.4.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -1937,9 +2022,9 @@ } }, "node_modules/eslint-scope": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz", - "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==", + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz", + "integrity": "sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==", "dev": true, "license": "BSD-2-Clause", "dependencies": { @@ -1954,9 +2039,9 @@ } }, "node_modules/eslint-visitor-keys": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz", - "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz", + "integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1967,15 +2052,15 @@ } }, "node_modules/espree": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz", - "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==", + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz", + "integrity": "sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==", "dev": true, "license": "BSD-2-Clause", "dependencies": { - "acorn": "^8.14.0", + "acorn": "^8.15.0", "acorn-jsx": "^5.3.2", - "eslint-visitor-keys": "^4.2.0" + "eslint-visitor-keys": "^4.2.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -2041,9 +2126,9 @@ } }, "node_modules/expect-type": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.1.0.tgz", - "integrity": "sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.2.tgz", + "integrity": "sha512-JhFGDVJ7tmDJItKhYgJCGLOWjuK9vPxiXoUFLwLDc99NlmklilbiQJwoctZtt13+xMw91MCk/REan6MWHqDjyA==", "dev": true, "license": "Apache-2.0", "engines": { @@ -2255,9 +2340,9 @@ } }, "node_modules/glob/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2342,9 +2427,9 @@ } }, "node_modules/import-fresh": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", - "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz", + "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==", "dev": true, "license": "MIT", "dependencies": { @@ -2488,6 +2573,13 @@ "@pkgjs/parseargs": "^0.11.0" } }, + "node_modules/js-tokens": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-9.0.1.tgz", + "integrity": "sha512-mxa9E9ITFOt0ban3j6L5MpjwegGz6lBQmM1IJkWeBZGcMxto50+eWdjC/52xDbS2vy0k7vIMK0Fe2wfL9OQSpQ==", + "dev": true, + "license": "MIT" + }, "node_modules/js-yaml": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", @@ -2587,9 +2679,9 @@ "license": "MIT" }, "node_modules/loupe": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.1.3.tgz", - "integrity": "sha512-kkIp7XSkP78ZxJEsSxW3712C6teJVoeHHwgo9zJ380de7IYyJ2ISlxojcH2pC5OFLewESmnRi/+XCDIEEVyoug==", + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/loupe/-/loupe-3.2.1.tgz", + "integrity": "sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==", "dev": true, "license": "MIT" }, @@ -2703,9 +2795,9 @@ "license": "MIT" }, "node_modules/nanoid": { - "version": "3.3.8", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.8.tgz", - "integrity": "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==", + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", "dev": true, "funding": [ { @@ -2836,16 +2928,16 @@ } }, "node_modules/pathe": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.2.tgz", - "integrity": "sha512-15Ztpk+nov8DR524R4BF7uEuzESgzUEAV4Ah7CUMNGXdE5ELuvxElxGXndBl32vMSsWa1jpNf22Z+Er3sKwq+w==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", + "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", "dev": true, "license": "MIT" }, "node_modules/pathval": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.0.tgz", - "integrity": "sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-2.0.1.tgz", + "integrity": "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==", "dev": true, "license": "MIT", "engines": { @@ -2873,9 +2965,9 @@ } }, "node_modules/postcss": { - "version": "8.5.1", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.1.tgz", - "integrity": "sha512-6oz2beyjc5VMn/KV1pPw8fliQkhBXrVn1Z3TVyqZxU8kZpzEKhBdmCFqI6ZbmGtamQvQGuU1sgPTk8ZrXDD7jQ==", + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", "dev": true, "funding": [ { @@ -2893,7 +2985,7 @@ ], "license": "MIT", "dependencies": { - "nanoid": "^3.3.8", + "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" }, @@ -2974,13 +3066,13 @@ } }, "node_modules/rollup": { - "version": "4.32.1", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.32.1.tgz", - "integrity": "sha512-z+aeEsOeEa3mEbS1Tjl6sAZ8NE3+AalQz1RJGj81M+fizusbdDMoEJwdJNHfaB40Scr4qNu+welOfes7maKonA==", + "version": "4.49.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.49.0.tgz", + "integrity": "sha512-3IVq0cGJ6H7fKXXEdVt+RcYvRCt8beYY9K1760wGQwSAHZcS9eot1zDG5axUbcp/kWRi5zKIIDX8MoKv/TzvZA==", "dev": true, "license": "MIT", "dependencies": { - "@types/estree": "1.0.6" + "@types/estree": "1.0.8" }, "bin": { "rollup": "dist/bin/rollup" @@ -2990,25 +3082,26 @@ "npm": ">=8.0.0" }, "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.32.1", - "@rollup/rollup-android-arm64": "4.32.1", - "@rollup/rollup-darwin-arm64": "4.32.1", - "@rollup/rollup-darwin-x64": "4.32.1", - "@rollup/rollup-freebsd-arm64": "4.32.1", - "@rollup/rollup-freebsd-x64": "4.32.1", - "@rollup/rollup-linux-arm-gnueabihf": "4.32.1", - "@rollup/rollup-linux-arm-musleabihf": "4.32.1", - "@rollup/rollup-linux-arm64-gnu": "4.32.1", - "@rollup/rollup-linux-arm64-musl": "4.32.1", - "@rollup/rollup-linux-loongarch64-gnu": "4.32.1", - "@rollup/rollup-linux-powerpc64le-gnu": "4.32.1", - "@rollup/rollup-linux-riscv64-gnu": "4.32.1", - "@rollup/rollup-linux-s390x-gnu": "4.32.1", - "@rollup/rollup-linux-x64-gnu": "4.32.1", - "@rollup/rollup-linux-x64-musl": "4.32.1", - "@rollup/rollup-win32-arm64-msvc": "4.32.1", - "@rollup/rollup-win32-ia32-msvc": "4.32.1", - "@rollup/rollup-win32-x64-msvc": "4.32.1", + "@rollup/rollup-android-arm-eabi": "4.49.0", + "@rollup/rollup-android-arm64": "4.49.0", + "@rollup/rollup-darwin-arm64": "4.49.0", + "@rollup/rollup-darwin-x64": "4.49.0", + "@rollup/rollup-freebsd-arm64": "4.49.0", + "@rollup/rollup-freebsd-x64": "4.49.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.49.0", + "@rollup/rollup-linux-arm-musleabihf": "4.49.0", + "@rollup/rollup-linux-arm64-gnu": "4.49.0", + "@rollup/rollup-linux-arm64-musl": "4.49.0", + "@rollup/rollup-linux-loongarch64-gnu": "4.49.0", + "@rollup/rollup-linux-ppc64-gnu": "4.49.0", + "@rollup/rollup-linux-riscv64-gnu": "4.49.0", + "@rollup/rollup-linux-riscv64-musl": "4.49.0", + "@rollup/rollup-linux-s390x-gnu": "4.49.0", + "@rollup/rollup-linux-x64-gnu": "4.49.0", + "@rollup/rollup-linux-x64-musl": "4.49.0", + "@rollup/rollup-win32-arm64-msvc": "4.49.0", + "@rollup/rollup-win32-ia32-msvc": "4.49.0", + "@rollup/rollup-win32-x64-msvc": "4.49.0", "fsevents": "~2.3.2" } }, @@ -3133,9 +3226,9 @@ "license": "MIT" }, "node_modules/std-env": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.0.tgz", - "integrity": "sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==", + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", + "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", "dev": true, "license": "MIT" }, @@ -3256,6 +3349,19 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/strip-literal": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.0.0.tgz", + "integrity": "sha512-TcccoMhJOM3OebGhSBEmp3UZ2SfDMZUEBdRA/9ynfLi8yYajyWX3JiXArcJt4Umh4vISpspkQIY8ZZoCqjbviA==", + "dev": true, + "license": "MIT", + "dependencies": { + "js-tokens": "^9.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, "node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -3285,9 +3391,9 @@ } }, "node_modules/test-exclude/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -3335,10 +3441,58 @@ "dev": true, "license": "MIT" }, + "node_modules/tinyglobby": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz", + "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/tinypool": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", - "integrity": "sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.1.1.tgz", + "integrity": "sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==", "dev": true, "license": "MIT", "engines": { @@ -3356,9 +3510,9 @@ } }, "node_modules/tinyspy": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-3.0.2.tgz", - "integrity": "sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/tinyspy/-/tinyspy-4.0.3.tgz", + "integrity": "sha512-t2T/WLB2WRgZ9EpE4jgPJ9w+i66UZfDc8wHh0xrwiRNN+UwH98GIJkTeZqX9rg0i0ptwzqW+uYeIF0T4F8LR7A==", "dev": true, "license": "MIT", "engines": { @@ -3392,13 +3546,13 @@ } }, "node_modules/tsx": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.19.2.tgz", - "integrity": "sha512-pOUl6Vo2LUq/bSa8S5q7b91cgNSjctn9ugq/+Mvow99qW6x/UZYwzxy/3NmqoT66eHYfCVvFvACC58UBPFf28g==", + "version": "4.20.5", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.20.5.tgz", + "integrity": "sha512-+wKjMNU9w/EaQayHXb7WA7ZaHY6hN8WgfvHNQ3t1PnU91/7O8TcTnIhCDYTZwnt8JsO9IBqZ30Ln1r7pPF52Aw==", "dev": true, "license": "MIT", "dependencies": { - "esbuild": "~0.23.0", + "esbuild": "~0.25.0", "get-tsconfig": "^4.7.5" }, "bin": { @@ -3412,9 +3566,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/aix-ppc64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.23.1.tgz", - "integrity": "sha512-6VhYk1diRqrhBAqpJEdjASR/+WVRtfjpqKuNw11cLiaWpAT/Uu+nokB+UJnevzy/P9C/ty6AOe0dwueMrGh/iQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.9.tgz", + "integrity": "sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==", "cpu": [ "ppc64" ], @@ -3429,9 +3583,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/android-arm": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.23.1.tgz", - "integrity": "sha512-uz6/tEy2IFm9RYOyvKl88zdzZfwEfKZmnX9Cj1BHjeSGNuGLuMD1kR8y5bteYmwqKm1tj8m4cb/aKEorr6fHWQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.9.tgz", + "integrity": "sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==", "cpu": [ "arm" ], @@ -3446,9 +3600,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/android-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.23.1.tgz", - "integrity": "sha512-xw50ipykXcLstLeWH7WRdQuysJqejuAGPd30vd1i5zSyKK3WE+ijzHmLKxdiCMtH1pHz78rOg0BKSYOSB/2Khw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.9.tgz", + "integrity": "sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==", "cpu": [ "arm64" ], @@ -3463,9 +3617,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/android-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.23.1.tgz", - "integrity": "sha512-nlN9B69St9BwUoB+jkyU090bru8L0NA3yFvAd7k8dNsVH8bi9a8cUAUSEcEEgTp2z3dbEDGJGfP6VUnkQnlReg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.9.tgz", + "integrity": "sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==", "cpu": [ "x64" ], @@ -3480,9 +3634,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/darwin-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.23.1.tgz", - "integrity": "sha512-YsS2e3Wtgnw7Wq53XXBLcV6JhRsEq8hkfg91ESVadIrzr9wO6jJDMZnCQbHm1Guc5t/CdDiFSSfWP58FNuvT3Q==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.9.tgz", + "integrity": "sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==", "cpu": [ "arm64" ], @@ -3497,9 +3651,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/darwin-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.23.1.tgz", - "integrity": "sha512-aClqdgTDVPSEGgoCS8QDG37Gu8yc9lTHNAQlsztQ6ENetKEO//b8y31MMu2ZaPbn4kVsIABzVLXYLhCGekGDqw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.9.tgz", + "integrity": "sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==", "cpu": [ "x64" ], @@ -3514,9 +3668,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/freebsd-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.23.1.tgz", - "integrity": "sha512-h1k6yS8/pN/NHlMl5+v4XPfikhJulk4G+tKGFIOwURBSFzE8bixw1ebjluLOjfwtLqY0kewfjLSrO6tN2MgIhA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.9.tgz", + "integrity": "sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==", "cpu": [ "arm64" ], @@ -3531,9 +3685,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/freebsd-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.23.1.tgz", - "integrity": "sha512-lK1eJeyk1ZX8UklqFd/3A60UuZ/6UVfGT2LuGo3Wp4/z7eRTRYY+0xOu2kpClP+vMTi9wKOfXi2vjUpO1Ro76g==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.9.tgz", + "integrity": "sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==", "cpu": [ "x64" ], @@ -3548,9 +3702,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-arm": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.23.1.tgz", - "integrity": "sha512-CXXkzgn+dXAPs3WBwE+Kvnrf4WECwBdfjfeYHpMeVxWE0EceB6vhWGShs6wi0IYEqMSIzdOF1XjQ/Mkm5d7ZdQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.9.tgz", + "integrity": "sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==", "cpu": [ "arm" ], @@ -3565,9 +3719,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.23.1.tgz", - "integrity": "sha512-/93bf2yxencYDnItMYV/v116zff6UyTjo4EtEQjUBeGiVpMmffDNUyD9UN2zV+V3LRV3/on4xdZ26NKzn6754g==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.9.tgz", + "integrity": "sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==", "cpu": [ "arm64" ], @@ -3582,9 +3736,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-ia32": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.23.1.tgz", - "integrity": "sha512-VTN4EuOHwXEkXzX5nTvVY4s7E/Krz7COC8xkftbbKRYAl96vPiUssGkeMELQMOnLOJ8k3BY1+ZY52tttZnHcXQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.9.tgz", + "integrity": "sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==", "cpu": [ "ia32" ], @@ -3599,9 +3753,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-loong64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.23.1.tgz", - "integrity": "sha512-Vx09LzEoBa5zDnieH8LSMRToj7ir/Jeq0Gu6qJ/1GcBq9GkfoEAoXvLiW1U9J1qE/Y/Oyaq33w5p2ZWrNNHNEw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.9.tgz", + "integrity": "sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==", "cpu": [ "loong64" ], @@ -3616,9 +3770,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-mips64el": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.23.1.tgz", - "integrity": "sha512-nrFzzMQ7W4WRLNUOU5dlWAqa6yVeI0P78WKGUo7lg2HShq/yx+UYkeNSE0SSfSure0SqgnsxPvmAUu/vu0E+3Q==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.9.tgz", + "integrity": "sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==", "cpu": [ "mips64el" ], @@ -3633,9 +3787,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-ppc64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.23.1.tgz", - "integrity": "sha512-dKN8fgVqd0vUIjxuJI6P/9SSSe/mB9rvA98CSH2sJnlZ/OCZWO1DJvxj8jvKTfYUdGfcq2dDxoKaC6bHuTlgcw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.9.tgz", + "integrity": "sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==", "cpu": [ "ppc64" ], @@ -3650,9 +3804,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-riscv64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.23.1.tgz", - "integrity": "sha512-5AV4Pzp80fhHL83JM6LoA6pTQVWgB1HovMBsLQ9OZWLDqVY8MVobBXNSmAJi//Csh6tcY7e7Lny2Hg1tElMjIA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.9.tgz", + "integrity": "sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==", "cpu": [ "riscv64" ], @@ -3667,9 +3821,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-s390x": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.23.1.tgz", - "integrity": "sha512-9ygs73tuFCe6f6m/Tb+9LtYxWR4c9yg7zjt2cYkjDbDpV/xVn+68cQxMXCjUpYwEkze2RcU/rMnfIXNRFmSoDw==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.9.tgz", + "integrity": "sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==", "cpu": [ "s390x" ], @@ -3684,9 +3838,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/linux-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.23.1.tgz", - "integrity": "sha512-EV6+ovTsEXCPAp58g2dD68LxoP/wK5pRvgy0J/HxPGB009omFPv3Yet0HiaqvrIrgPTBuC6wCH1LTOY91EO5hQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.9.tgz", + "integrity": "sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==", "cpu": [ "x64" ], @@ -3700,10 +3854,27 @@ "node": ">=18" } }, + "node_modules/tsx/node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.9.tgz", + "integrity": "sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, "node_modules/tsx/node_modules/@esbuild/netbsd-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.23.1.tgz", - "integrity": "sha512-aevEkCNu7KlPRpYLjwmdcuNz6bDFiE7Z8XC4CPqExjTvrHugh28QzUXVOZtiYghciKUacNktqxdpymplil1beA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.9.tgz", + "integrity": "sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==", "cpu": [ "x64" ], @@ -3718,9 +3889,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/openbsd-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.23.1.tgz", - "integrity": "sha512-3x37szhLexNA4bXhLrCC/LImN/YtWis6WXr1VESlfVtVeoFJBRINPJ3f0a/6LV8zpikqoUg4hyXw0sFBt5Cr+Q==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.9.tgz", + "integrity": "sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==", "cpu": [ "arm64" ], @@ -3735,9 +3906,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/openbsd-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.23.1.tgz", - "integrity": "sha512-aY2gMmKmPhxfU+0EdnN+XNtGbjfQgwZj43k8G3fyrDM/UdZww6xrWxmDkuz2eCZchqVeABjV5BpildOrUbBTqA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.9.tgz", + "integrity": "sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==", "cpu": [ "x64" ], @@ -3752,9 +3923,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/sunos-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.23.1.tgz", - "integrity": "sha512-RBRT2gqEl0IKQABT4XTj78tpk9v7ehp+mazn2HbUeZl1YMdaGAQqhapjGTCe7uw7y0frDi4gS0uHzhvpFuI1sA==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.9.tgz", + "integrity": "sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==", "cpu": [ "x64" ], @@ -3769,9 +3940,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/win32-arm64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.23.1.tgz", - "integrity": "sha512-4O+gPR5rEBe2FpKOVyiJ7wNDPA8nGzDuJ6gN4okSA1gEOYZ67N8JPk58tkWtdtPeLz7lBnY6I5L3jdsr3S+A6A==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.9.tgz", + "integrity": "sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==", "cpu": [ "arm64" ], @@ -3786,9 +3957,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/win32-ia32": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.23.1.tgz", - "integrity": "sha512-BcaL0Vn6QwCwre3Y717nVHZbAa4UBEigzFm6VdsVdT/MbZ38xoj1X9HPkZhbmaBGUD1W8vxAfffbDe8bA6AKnQ==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.9.tgz", + "integrity": "sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==", "cpu": [ "ia32" ], @@ -3803,9 +3974,9 @@ } }, "node_modules/tsx/node_modules/@esbuild/win32-x64": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.23.1.tgz", - "integrity": "sha512-BHpFFeslkWrXWyUPnbKm+xYYVYruCinGcftSBaa8zoF9hZO4BcSCFUvHVTtzpIY6YzUnYtuEhZ+C9iEXjxnasg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.9.tgz", + "integrity": "sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==", "cpu": [ "x64" ], @@ -3820,9 +3991,9 @@ } }, "node_modules/tsx/node_modules/esbuild": { - "version": "0.23.1", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.23.1.tgz", - "integrity": "sha512-VVNz/9Sa0bs5SELtn3f7qhJCDPCF5oMEl5cO9/SSinpE9hbPVvxbd572HH5AKiP7WD8INO53GgfDDhRjkylHEg==", + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.9.tgz", + "integrity": "sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==", "dev": true, "hasInstallScript": true, "license": "MIT", @@ -3833,30 +4004,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.23.1", - "@esbuild/android-arm": "0.23.1", - "@esbuild/android-arm64": "0.23.1", - "@esbuild/android-x64": "0.23.1", - "@esbuild/darwin-arm64": "0.23.1", - "@esbuild/darwin-x64": "0.23.1", - "@esbuild/freebsd-arm64": "0.23.1", - "@esbuild/freebsd-x64": "0.23.1", - "@esbuild/linux-arm": "0.23.1", - "@esbuild/linux-arm64": "0.23.1", - "@esbuild/linux-ia32": "0.23.1", - "@esbuild/linux-loong64": "0.23.1", - "@esbuild/linux-mips64el": "0.23.1", - "@esbuild/linux-ppc64": "0.23.1", - "@esbuild/linux-riscv64": "0.23.1", - "@esbuild/linux-s390x": "0.23.1", - "@esbuild/linux-x64": "0.23.1", - "@esbuild/netbsd-x64": "0.23.1", - "@esbuild/openbsd-arm64": "0.23.1", - "@esbuild/openbsd-x64": "0.23.1", - "@esbuild/sunos-x64": "0.23.1", - "@esbuild/win32-arm64": "0.23.1", - "@esbuild/win32-ia32": "0.23.1", - "@esbuild/win32-x64": "0.23.1" + "@esbuild/aix-ppc64": "0.25.9", + "@esbuild/android-arm": "0.25.9", + "@esbuild/android-arm64": "0.25.9", + "@esbuild/android-x64": "0.25.9", + "@esbuild/darwin-arm64": "0.25.9", + "@esbuild/darwin-x64": "0.25.9", + "@esbuild/freebsd-arm64": "0.25.9", + "@esbuild/freebsd-x64": "0.25.9", + "@esbuild/linux-arm": "0.25.9", + "@esbuild/linux-arm64": "0.25.9", + "@esbuild/linux-ia32": "0.25.9", + "@esbuild/linux-loong64": "0.25.9", + "@esbuild/linux-mips64el": "0.25.9", + "@esbuild/linux-ppc64": "0.25.9", + "@esbuild/linux-riscv64": "0.25.9", + "@esbuild/linux-s390x": "0.25.9", + "@esbuild/linux-x64": "0.25.9", + "@esbuild/netbsd-arm64": "0.25.9", + "@esbuild/netbsd-x64": "0.25.9", + "@esbuild/openbsd-arm64": "0.25.9", + "@esbuild/openbsd-x64": "0.25.9", + "@esbuild/openharmony-arm64": "0.25.9", + "@esbuild/sunos-x64": "0.25.9", + "@esbuild/win32-arm64": "0.25.9", + "@esbuild/win32-ia32": "0.25.9", + "@esbuild/win32-x64": "0.25.9" } }, "node_modules/type-check": { @@ -3910,9 +4083,9 @@ } }, "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", "dev": true, "license": "MIT" }, @@ -3927,21 +4100,24 @@ } }, "node_modules/vite": { - "version": "6.0.11", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.0.11.tgz", - "integrity": "sha512-4VL9mQPKoHy4+FE0NnRE/kbY51TOfaknxAjt3fJbGJxhIpBZiqVzlZDEesWWsuREXHwNdAoOFZ9MkPEVXczHwg==", + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.1.3.tgz", + "integrity": "sha512-OOUi5zjkDxYrKhTV3V7iKsoS37VUM7v40+HuwEmcrsf11Cdx9y3DIr2Px6liIcZFwt3XSRpQvFpL3WVy7ApkGw==", "dev": true, "license": "MIT", "dependencies": { - "esbuild": "^0.24.2", - "postcss": "^8.4.49", - "rollup": "^4.23.0" + "esbuild": "^0.25.0", + "fdir": "^6.5.0", + "picomatch": "^4.0.3", + "postcss": "^8.5.6", + "rollup": "^4.43.0", + "tinyglobby": "^0.2.14" }, "bin": { "vite": "bin/vite.js" }, "engines": { - "node": "^18.0.0 || ^20.0.0 || >=22.0.0" + "node": "^20.19.0 || >=22.12.0" }, "funding": { "url": "https://github.com/vitejs/vite?sponsor=1" @@ -3950,14 +4126,14 @@ "fsevents": "~2.3.3" }, "peerDependencies": { - "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", + "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", - "less": "*", + "less": "^4.0.0", "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", + "sass": "^1.70.0", + "sass-embedded": "^1.70.0", + "stylus": ">=0.54.8", + "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" @@ -3999,17 +4175,17 @@ } }, "node_modules/vite-node": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.4.tgz", - "integrity": "sha512-7JZKEzcYV2Nx3u6rlvN8qdo3QV7Fxyt6hx+CCKz9fbWxdX5IvUOmTWEAxMrWxaiSf7CKGLJQ5rFu8prb/jBjOA==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.2.4.tgz", + "integrity": "sha512-EbKSKh+bh1E1IFxeO0pg1n4dvoOTt0UDiXMd/qn++r98+jPO1xtJilvXldeuQ8giIB5IkpjCgMleHMNEsGH6pg==", "dev": true, "license": "MIT", "dependencies": { "cac": "^6.7.14", - "debug": "^4.4.0", - "es-module-lexer": "^1.6.0", - "pathe": "^2.0.2", - "vite": "^5.0.0 || ^6.0.0" + "debug": "^4.4.1", + "es-module-lexer": "^1.7.0", + "pathe": "^2.0.3", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0" }, "bin": { "vite-node": "vite-node.mjs" @@ -4021,32 +4197,533 @@ "url": "https://opencollective.com/vitest" } }, + "node_modules/vite/node_modules/@esbuild/aix-ppc64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.9.tgz", + "integrity": "sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.9.tgz", + "integrity": "sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.9.tgz", + "integrity": "sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/android-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.9.tgz", + "integrity": "sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.9.tgz", + "integrity": "sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/darwin-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.9.tgz", + "integrity": "sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.9.tgz", + "integrity": "sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/freebsd-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.9.tgz", + "integrity": "sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.9.tgz", + "integrity": "sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.9.tgz", + "integrity": "sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ia32": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.9.tgz", + "integrity": "sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-loong64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.9.tgz", + "integrity": "sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-mips64el": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.9.tgz", + "integrity": "sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-ppc64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.9.tgz", + "integrity": "sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-riscv64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.9.tgz", + "integrity": "sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-s390x": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.9.tgz", + "integrity": "sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/linux-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.9.tgz", + "integrity": "sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.9.tgz", + "integrity": "sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/netbsd-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.9.tgz", + "integrity": "sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.9.tgz", + "integrity": "sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/openbsd-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.9.tgz", + "integrity": "sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/sunos-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.9.tgz", + "integrity": "sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.9.tgz", + "integrity": "sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-ia32": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.9.tgz", + "integrity": "sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/@esbuild/win32-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.9.tgz", + "integrity": "sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/vite/node_modules/esbuild": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.9.tgz", + "integrity": "sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.9", + "@esbuild/android-arm": "0.25.9", + "@esbuild/android-arm64": "0.25.9", + "@esbuild/android-x64": "0.25.9", + "@esbuild/darwin-arm64": "0.25.9", + "@esbuild/darwin-x64": "0.25.9", + "@esbuild/freebsd-arm64": "0.25.9", + "@esbuild/freebsd-x64": "0.25.9", + "@esbuild/linux-arm": "0.25.9", + "@esbuild/linux-arm64": "0.25.9", + "@esbuild/linux-ia32": "0.25.9", + "@esbuild/linux-loong64": "0.25.9", + "@esbuild/linux-mips64el": "0.25.9", + "@esbuild/linux-ppc64": "0.25.9", + "@esbuild/linux-riscv64": "0.25.9", + "@esbuild/linux-s390x": "0.25.9", + "@esbuild/linux-x64": "0.25.9", + "@esbuild/netbsd-arm64": "0.25.9", + "@esbuild/netbsd-x64": "0.25.9", + "@esbuild/openbsd-arm64": "0.25.9", + "@esbuild/openbsd-x64": "0.25.9", + "@esbuild/openharmony-arm64": "0.25.9", + "@esbuild/sunos-x64": "0.25.9", + "@esbuild/win32-arm64": "0.25.9", + "@esbuild/win32-ia32": "0.25.9", + "@esbuild/win32-x64": "0.25.9" + } + }, + "node_modules/vite/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/vitest": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.4.tgz", - "integrity": "sha512-6XG8oTKy2gnJIFTHP6LD7ExFeNLxiTkK3CfMvT7IfR8IN+BYICCf0lXUQmX7i7JoxUP8QmeP4mTnWXgflu4yjw==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.2.4.tgz", + "integrity": "sha512-LUCP5ev3GURDysTWiP47wRRUpLKMOfPh+yKTx3kVIEiu5KOMeqzpnYNsKyOoVrULivR8tLcks4+lga33Whn90A==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/expect": "3.0.4", - "@vitest/mocker": "3.0.4", - "@vitest/pretty-format": "^3.0.4", - "@vitest/runner": "3.0.4", - "@vitest/snapshot": "3.0.4", - "@vitest/spy": "3.0.4", - "@vitest/utils": "3.0.4", - "chai": "^5.1.2", - "debug": "^4.4.0", - "expect-type": "^1.1.0", + "@types/chai": "^5.2.2", + "@vitest/expect": "3.2.4", + "@vitest/mocker": "3.2.4", + "@vitest/pretty-format": "^3.2.4", + "@vitest/runner": "3.2.4", + "@vitest/snapshot": "3.2.4", + "@vitest/spy": "3.2.4", + "@vitest/utils": "3.2.4", + "chai": "^5.2.0", + "debug": "^4.4.1", + "expect-type": "^1.2.1", "magic-string": "^0.30.17", - "pathe": "^2.0.2", - "std-env": "^3.8.0", + "pathe": "^2.0.3", + "picomatch": "^4.0.2", + "std-env": "^3.9.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.2", - "tinypool": "^1.0.2", + "tinyglobby": "^0.2.14", + "tinypool": "^1.1.1", "tinyrainbow": "^2.0.0", - "vite": "^5.0.0 || ^6.0.0", - "vite-node": "3.0.4", + "vite": "^5.0.0 || ^6.0.0 || ^7.0.0-0", + "vite-node": "3.2.4", "why-is-node-running": "^2.3.0" }, "bin": { @@ -4062,8 +4739,8 @@ "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.0.4", - "@vitest/ui": "3.0.4", + "@vitest/browser": "3.2.4", + "@vitest/ui": "3.2.4", "happy-dom": "*", "jsdom": "*" }, @@ -4091,6 +4768,19 @@ } } }, + "node_modules/vitest/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", diff --git a/lib/binding_web/package.json b/lib/binding_web/package.json index d394b2f9..78c4abba 100644 --- a/lib/binding_web/package.json +++ b/lib/binding_web/package.json @@ -1,9 +1,12 @@ { "name": "web-tree-sitter", - "version": "0.25.1", + "version": "0.25.9", "description": "Tree-sitter bindings for the web", - "repository": "https://github.com/tree-sitter/tree-sitter", - "homepage": "https://github.com/tree-sitter/tree-sitter/tree/master/lib/binding_web", + "repository": { + "type": "git", + "url": "git+https://github.com/tree-sitter/tree-sitter.git", + "directory": "lib/binding_web" + }, "license": "MIT", "author": { "name": "Max Brunsfeld", @@ -19,12 +22,16 @@ "exports": { ".": { "import": "./tree-sitter.js", - "require": "./tree-sitter.cjs" + "require": "./tree-sitter.cjs", + "types": "./web-tree-sitter.d.ts" }, + "./tree-sitter.wasm": "./tree-sitter.wasm", "./debug": { "import": "./debug/tree-sitter.js", - "require": "./debug/tree-sitter.cjs" - } + "require": "./debug/tree-sitter.cjs", + "types": "./web-tree-sitter.d.ts" + }, + "./debug/tree-sitter.wasm": "./debug/tree-sitter.wasm" }, "types": "web-tree-sitter.d.ts", "keywords": [ @@ -54,10 +61,9 @@ "lib/*.h" ], "devDependencies": { - "@eslint/js": "^9.19.0", - "@types/emscripten": "^1.40.0", - "@types/node": "^22.12.0", - "@vitest/coverage-v8": "^3.0.4", + "@eslint/js": "^9.20.0", + "@types/node": "^22.13.1", + "@vitest/coverage-v8": "^3.0.5", "dts-buddy": "^0.5.4", "esbuild": "^0.24.2", "eslint": "^9.19.0", @@ -67,8 +73,16 @@ "typescript-eslint": "^8.22.0", "vitest": "^3.0.4" }, + "peerDependencies": { + "@types/emscripten": "^1.40.0" + }, + "peerDependenciesMeta": { + "@types/emscripten": { + "optional": true + } + }, "scripts": { - "build:ts": "node script/build.js", + "build:ts": "tsc -b . && node script/build.js", "build:wasm": "cd ../../ && cargo xtask build-wasm", "build:wasm:debug": "cd ../../ && cargo xtask build-wasm --debug", "build": "npm run build:wasm && npm run build:ts", diff --git a/lib/binding_web/src/index.ts b/lib/binding_web/src/index.ts index 9ac4a835..92791145 100644 --- a/lib/binding_web/src/index.ts +++ b/lib/binding_web/src/index.ts @@ -1,4 +1,4 @@ -export { +export type { Point, Range, Edit, @@ -7,8 +7,8 @@ export { LogCallback, } from './constants'; export { - ParseOptions, - ParseState, + type ParseOptions, + type ParseState, LANGUAGE_VERSION, MIN_COMPATIBLE_VERSION, Parser, @@ -18,14 +18,14 @@ export { Tree } from './tree'; export { Node } from './node'; export { TreeCursor } from './tree_cursor'; export { - QueryOptions, - QueryState, - QueryProperties, - QueryPredicate, - QueryCapture, - QueryMatch, + type QueryOptions, + type QueryState, + type QueryProperties, + type QueryPredicate, + type QueryCapture, + type QueryMatch, CaptureQuantifier, - PredicateStep, + type PredicateStep, Query, -} from './query'; +} from './query'; export { LookaheadIterator } from './lookahead_iterator'; diff --git a/lib/binding_web/src/language.ts b/lib/binding_web/src/language.ts index 0b63d27e..cdcf159e 100644 --- a/lib/binding_web/src/language.ts +++ b/lib/binding_web/src/language.ts @@ -6,7 +6,7 @@ import { Query } from './query'; const LANGUAGE_FUNCTION_REGEX = /^tree_sitter_\w+$/; -export class LanguageMetadata { +export interface LanguageMetadata { readonly major_version: number; readonly minor_version: number; readonly patch_version: number; @@ -261,8 +261,7 @@ export class Language { } else { // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition if (globalThis.process?.versions.node) { - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-require-imports - const fs: typeof import('fs/promises') = require('fs/promises'); + const fs: typeof import('fs/promises') = await import('fs/promises'); bytes = fs.readFile(input); } else { bytes = fetch(input) diff --git a/lib/binding_web/src/marshal.ts b/lib/binding_web/src/marshal.ts index b2e468ec..c742afc6 100644 --- a/lib/binding_web/src/marshal.ts +++ b/lib/binding_web/src/marshal.ts @@ -34,8 +34,8 @@ export function unmarshalCaptures( * * Marshals a {@link Node} to the transfer buffer. */ -export function marshalNode(node: Node) { - let address = TRANSFER_BUFFER; +export function marshalNode(node: Node, index = 0) { + let address = TRANSFER_BUFFER + index * SIZE_OF_NODE; C.setValue(address, node.id, 'i32'); address += SIZE_OF_INT; C.setValue(address, node.startIndex, 'i32'); @@ -168,10 +168,9 @@ export function marshalEdit(edit: Edit, address = TRANSFER_BUFFER) { * * Unmarshals a {@link LanguageMetadata} from the transfer buffer. */ -export function unmarshalLanguageMetadata(address: number): LanguageMetadata { - const result = {} as LanguageMetadata; - result.major_version = C.getValue(address, 'i32'); address += SIZE_OF_INT; - result.minor_version = C.getValue(address, 'i32'); address += SIZE_OF_INT; - result.field_count = C.getValue(address, 'i32'); - return result; +export function unmarshalLanguageMetadata(address: number): LanguageMetadata { + const major_version = C.getValue(address, 'i32'); + const minor_version = C.getValue(address += SIZE_OF_INT, 'i32'); + const patch_version = C.getValue(address += SIZE_OF_INT, 'i32'); + return { major_version, minor_version, patch_version }; } diff --git a/lib/binding_web/src/node.ts b/lib/binding_web/src/node.ts index fd8530e1..df21b75d 100644 --- a/lib/binding_web/src/node.ts +++ b/lib/binding_web/src/node.ts @@ -9,7 +9,8 @@ import { TRANSFER_BUFFER } from './parser'; /** A single node within a syntax {@link Tree}. */ export class Node { /** @internal */ - private [0] = 0; // Internal handle for WASM + // @ts-expect-error: never read + private [0] = 0; // Internal handle for Wasm /** @internal */ private _children?: (Node | null)[]; @@ -416,6 +417,11 @@ export class Node { // Convert the type strings to numeric type symbols const symbols: number[] = []; const typesBySymbol = this.tree.language.types; + for (const node_type of types) { + if (node_type == "ERROR") { + symbols.push(65535); // Internally, ts_builtin_sym_error is -1, which is UINT_16MAX + } + } for (let i = 0, n = typesBySymbol.length; i < n; i++) { if (types.includes(typesBySymbol[i])) { symbols.push(i); @@ -517,7 +523,7 @@ export class Node { */ childWithDescendant(descendant: Node): Node | null { marshalNode(this); - marshalNode(descendant); + marshalNode(descendant, 1); C._ts_node_child_with_descendant_wasm(this.tree[0]); return unmarshalNode(this.tree); } @@ -630,7 +636,7 @@ export class Node { } /** Get the S-expression representation of this node. */ - toString() { + toString(): string { marshalNode(this); const address = C._ts_node_to_string_wasm(this.tree[0]); const result = C.AsciiToString(address); diff --git a/lib/binding_web/src/tree_cursor.ts b/lib/binding_web/src/tree_cursor.ts index 61c93006..7562bb7f 100644 --- a/lib/binding_web/src/tree_cursor.ts +++ b/lib/binding_web/src/tree_cursor.ts @@ -7,16 +7,20 @@ import { getText, Tree } from './tree'; /** A stateful object for walking a syntax {@link Tree} efficiently. */ export class TreeCursor { /** @internal */ - private [0] = 0; // Internal handle for WASM + // @ts-expect-error: never read + private [0] = 0; // Internal handle for Wasm /** @internal */ - private [1] = 0; // Internal handle for WASM + // @ts-expect-error: never read + private [1] = 0; // Internal handle for Wasm /** @internal */ - private [2] = 0; // Internal handle for WASM + // @ts-expect-error: never read + private [2] = 0; // Internal handle for Wasm /** @internal */ - private [3] = 0; // Internal handle for WASM + // @ts-expect-error: never read + private [3] = 0; // Internal handle for Wasm /** @internal */ private tree: Tree; diff --git a/lib/binding_web/test/language.test.ts b/lib/binding_web/test/language.test.ts index 1748302b..bda09a29 100644 --- a/lib/binding_web/test/language.test.ts +++ b/lib/binding_web/test/language.test.ts @@ -89,6 +89,7 @@ describe('Language', () => { '_literal_pattern', 'captured_pattern', 'const_block', + 'generic_pattern', 'identifier', 'macro_invocation', 'mut_pattern', diff --git a/lib/binding_web/test/node.test.ts b/lib/binding_web/test/node.test.ts index 51303808..58d143db 100644 --- a/lib/binding_web/test/node.test.ts +++ b/lib/binding_web/test/node.test.ts @@ -63,7 +63,7 @@ describe('Node', () => { tree = parser.parse('x10 + 1000')!; expect(tree.rootNode.children).toHaveLength(1); const sumNode = tree.rootNode.firstChild!.firstChild!; - expect(sumNode.children.map(child => child!.type)).toEqual(['identifier', '+', 'number' ]); + expect(sumNode.children.map(child => child!.type)).toEqual(['identifier', '+', 'number']); }); }); @@ -189,6 +189,21 @@ describe('Node', () => { }); }); + describe('.childWithDescendant()', () => { + it('correctly retrieves immediate children', () => { + const sourceCode = 'let x = 1; console.log(x);'; + tree = parser.parse(sourceCode)!; + const root = tree.rootNode; + const child = root.children[0]!.children[0]!; + const a = root.childWithDescendant(child); + expect(a!.startIndex).toBe(0); + const b = a!.childWithDescendant(child); + expect(b).toEqual(child); + const c = b!.childWithDescendant(child); + expect(c).toBeNull(); + }); + }); + describe('.nextSibling and .previousSibling', () => { it('returns the node\'s next and previous sibling', () => { tree = parser.parse('x10 + 1000')!; @@ -449,6 +464,24 @@ describe('Node', () => { }); }); + describe('.descendantsOfType("ERROR")', () => { + it('finds all of the descendants of an ERROR node', () => { + tree = parser.parse( + `if ({a: 'b'} {c: 'd'}) { + // ^ ERROR + x = function(a) { b; } function(c) { d; } + }` + )!; + const errorNode = tree.rootNode; + const descendants = errorNode.descendantsOfType('ERROR'); + expect( + descendants.map((node) => node!.startIndex) + ).toEqual( + [4] + ); + }); + }); + describe('.descendantsOfType', () => { it('finds all descendants of a given type in the given range', () => { tree = parser.parse('a + 1 * b * 2 + c + 3')!; diff --git a/lib/binding_web/test/parser.test.ts b/lib/binding_web/test/parser.test.ts index 88240d8c..aa1dcd1c 100644 --- a/lib/binding_web/test/parser.test.ts +++ b/lib/binding_web/test/parser.test.ts @@ -256,7 +256,7 @@ describe('Parser', () => { expect(() => parser.parse({})).toThrow('Argument must be a string or a function'); }); - it('handles long input strings', { timeout: 5000 }, () => { + it('handles long input strings', { timeout: 10000 }, () => { const repeatCount = 10000; const inputString = `[${Array(repeatCount).fill('0').join(',')}]`; diff --git a/lib/binding_web/test/query.test.ts b/lib/binding_web/test/query.test.ts index f10c5f28..546c2a19 100644 --- a/lib/binding_web/test/query.test.ts +++ b/lib/binding_web/test/query.test.ts @@ -64,7 +64,7 @@ describe('Query', () => { }); describe('.matches', () => { - it('returns all of the matches for the given query', () => { + it('returns all of the matches for the given query', { timeout: 10000 }, () => { tree = parser.parse('function one() { two(); function three() {} }')!; query = new Query(JavaScript, ` (function_declaration name: (identifier) @fn-def) @@ -462,7 +462,7 @@ describe('Query', () => { }); describe('Set a timeout', () => { - it('returns less than the expected matches', () => { + it('returns less than the expected matches', { timeout: 10000 }, () => { tree = parser.parse('function foo() while (true) { } }\n'.repeat(1000))!; query = new Query(JavaScript, '(function_declaration name: (identifier) @function)'); const matches = query.matches(tree.rootNode, { timeoutMicros: 1000 }); @@ -538,7 +538,7 @@ describe('Query', () => { }); }); - describe('Executes with a timeout', () => { + describe('Executes with a timeout', { timeout: 10000 }, () => { it('Returns less than the expected matches', () => { tree = parser.parse('function foo() while (true) { } }\n'.repeat(1000))!; query = new Query(JavaScript, '(function_declaration) @function'); diff --git a/lib/binding_web/tsconfig.json b/lib/binding_web/tsconfig.json index b7b45b69..b7ed356f 100644 --- a/lib/binding_web/tsconfig.json +++ b/lib/binding_web/tsconfig.json @@ -25,11 +25,14 @@ "esModuleInterop": true, "forceConsistentCasingInFileNames": true, "skipLibCheck": true, + "composite": true, + "isolatedModules": true, }, "include": [ - "src/**/*", - "script/**/*", - "test/**/*", + "src/*.ts", + "script/*", + "test/*", + "lib/*.ts" ], "exclude": [ "node_modules", diff --git a/lib/include/tree_sitter/api.h b/lib/include/tree_sitter/api.h index 9b0dfac0..2bbfe66f 100644 --- a/lib/include/tree_sitter/api.h +++ b/lib/include/tree_sitter/api.h @@ -42,7 +42,6 @@ typedef uint16_t TSStateId; typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; -typedef struct TSLanguageMetadata TSLanguageMetadata; typedef struct TSParser TSParser; typedef struct TSTree TSTree; typedef struct TSQuery TSQuery; diff --git a/lib/language/Cargo.toml b/lib/language/Cargo.toml index e8eeab19..dedcca59 100644 --- a/lib/language/Cargo.toml +++ b/lib/language/Cargo.toml @@ -1,10 +1,10 @@ [package] name = "tree-sitter-language" description = "The tree-sitter Language type, used by the library and by language implementations" -version = "0.1.4" +version = "0.1.5" authors.workspace = true edition.workspace = true -rust-version.workspace = true +rust-version = "1.76" readme = "README.md" homepage.workspace = true repository.workspace = true diff --git a/lib/src/get_changed_ranges.c b/lib/src/get_changed_ranges.c index 8ca5bab3..11084c33 100644 --- a/lib/src/get_changed_ranges.c +++ b/lib/src/get_changed_ranges.c @@ -34,7 +34,7 @@ bool ts_range_array_intersects( uint32_t end_byte ) { for (unsigned i = start_index; i < self->size; i++) { - TSRange *range = &self->contents[i]; + TSRange *range = array_get(self, i); if (range->end_byte > start_byte) { if (range->start_byte >= end_byte) break; return true; @@ -108,6 +108,7 @@ typedef struct { const TSLanguage *language; unsigned visible_depth; bool in_padding; + Subtree prev_external_token; } Iterator; static Iterator iterator_new( @@ -127,6 +128,7 @@ static Iterator iterator_new( .language = language, .visible_depth = 1, .in_padding = false, + .prev_external_token = NULL_SUBTREE, }; } @@ -157,7 +159,7 @@ static bool iterator_tree_is_visible(const Iterator *self) { TreeCursorEntry entry = *array_back(&self->cursor.stack); if (ts_subtree_visible(*entry.subtree)) return true; if (self->cursor.stack.size > 1) { - Subtree parent = *self->cursor.stack.contents[self->cursor.stack.size - 2].subtree; + Subtree parent = *array_get(&self->cursor.stack, self->cursor.stack.size - 2)->subtree; return ts_language_alias_at( self->language, parent.ptr->production_id, @@ -181,10 +183,10 @@ static void iterator_get_visible_state( } for (; i + 1 > 0; i--) { - TreeCursorEntry entry = self->cursor.stack.contents[i]; + TreeCursorEntry entry = *array_get(&self->cursor.stack, i); if (i > 0) { - const Subtree *parent = self->cursor.stack.contents[i - 1].subtree; + const Subtree *parent = array_get(&self->cursor.stack, i - 1)->subtree; *alias_symbol = ts_language_alias_at( self->language, parent->ptr->production_id, @@ -244,6 +246,10 @@ static bool iterator_descend(Iterator *self, uint32_t goal_position) { position = child_right; if (!ts_subtree_extra(*child)) structural_child_index++; + Subtree last_external_token = ts_subtree_last_external_token(*child); + if (last_external_token.ptr) { + self->prev_external_token = last_external_token; + } } } while (did_descend); @@ -268,6 +274,10 @@ static void iterator_advance(Iterator *self) { const Subtree *parent = array_back(&self->cursor.stack)->subtree; uint32_t child_index = entry.child_index + 1; + Subtree last_external_token = ts_subtree_last_external_token(*entry.subtree); + if (last_external_token.ptr) { + self->prev_external_token = last_external_token; + } if (ts_subtree_child_count(*parent) > child_index) { Length position = length_add(entry.position, ts_subtree_total_size(*entry.subtree)); uint32_t structural_child_index = entry.structural_child_index; @@ -313,29 +323,41 @@ static IteratorComparison iterator_compare( TSSymbol new_alias_symbol = 0; iterator_get_visible_state(old_iter, &old_tree, &old_alias_symbol, &old_start); iterator_get_visible_state(new_iter, &new_tree, &new_alias_symbol, &new_start); + TSSymbol old_symbol = ts_subtree_symbol(old_tree); + TSSymbol new_symbol = ts_subtree_symbol(new_tree); if (!old_tree.ptr && !new_tree.ptr) return IteratorMatches; if (!old_tree.ptr || !new_tree.ptr) return IteratorDiffers; + if (old_alias_symbol != new_alias_symbol || old_symbol != new_symbol) return IteratorDiffers; + + uint32_t old_size = ts_subtree_size(old_tree).bytes; + uint32_t new_size = ts_subtree_size(new_tree).bytes; + TSStateId old_state = ts_subtree_parse_state(old_tree); + TSStateId new_state = ts_subtree_parse_state(new_tree); + bool old_has_external_tokens = ts_subtree_has_external_tokens(old_tree); + bool new_has_external_tokens = ts_subtree_has_external_tokens(new_tree); + uint32_t old_error_cost = ts_subtree_error_cost(old_tree); + uint32_t new_error_cost = ts_subtree_error_cost(new_tree); if ( - old_alias_symbol == new_alias_symbol && - ts_subtree_symbol(old_tree) == ts_subtree_symbol(new_tree) + old_start != new_start || + old_symbol == ts_builtin_sym_error || + old_size != new_size || + old_state == TS_TREE_STATE_NONE || + new_state == TS_TREE_STATE_NONE || + ((old_state == ERROR_STATE) != (new_state == ERROR_STATE)) || + old_error_cost != new_error_cost || + old_has_external_tokens != new_has_external_tokens || + ts_subtree_has_changes(old_tree) || + ( + old_has_external_tokens && + !ts_subtree_external_scanner_state_eq(old_iter->prev_external_token, new_iter->prev_external_token) + ) ) { - if (old_start == new_start && - !ts_subtree_has_changes(old_tree) && - ts_subtree_symbol(old_tree) != ts_builtin_sym_error && - ts_subtree_size(old_tree).bytes == ts_subtree_size(new_tree).bytes && - ts_subtree_parse_state(old_tree) != TS_TREE_STATE_NONE && - ts_subtree_parse_state(new_tree) != TS_TREE_STATE_NONE && - (ts_subtree_parse_state(old_tree) == ERROR_STATE) == - (ts_subtree_parse_state(new_tree) == ERROR_STATE)) { - return IteratorMatches; - } else { - return IteratorMayDiffer; - } + return IteratorMayDiffer; } - return IteratorDiffers; + return IteratorMatches; } #ifdef DEBUG_GET_CHANGED_RANGES @@ -348,8 +370,8 @@ static inline void iterator_print_state(Iterator *self) { "(%-25s %s\t depth:%u [%u, %u] - [%u, %u])", name, self->in_padding ? "(p)" : " ", self->visible_depth, - start.row + 1, start.column, - end.row + 1, end.column + start.row, start.column, + end.row, end.column ); } #endif @@ -380,7 +402,7 @@ unsigned ts_subtree_get_changed_ranges( do { #ifdef DEBUG_GET_CHANGED_RANGES - printf("At [%-2u, %-2u] Compare ", position.extent.row + 1, position.extent.column); + printf("At [%-2u, %-2u] Compare ", position.extent.row, position.extent.column); iterator_print_state(&old_iter); printf("\tvs\t"); iterator_print_state(&new_iter); @@ -475,9 +497,9 @@ unsigned ts_subtree_get_changed_ranges( // Keep track of the current position in the included range differences // array in order to avoid scanning the entire array on each iteration. while (included_range_difference_index < included_range_differences->size) { - const TSRange *range = &included_range_differences->contents[ + const TSRange *range = array_get(included_range_differences, included_range_difference_index - ]; + ); if (range->end_byte <= position.bytes) { included_range_difference_index++; } else { diff --git a/lib/src/language.c b/lib/src/language.c index b341a670..2dce6998 100644 --- a/lib/src/language.c +++ b/lib/src/language.c @@ -186,7 +186,7 @@ TSSymbol ts_language_symbol_for_name( uint32_t length, bool is_named ) { - if (!strncmp(string, "ERROR", length)) return ts_builtin_sym_error; + if (is_named && !strncmp(string, "ERROR", length)) return ts_builtin_sym_error; uint16_t count = (uint16_t)ts_language_symbol_count(self); for (TSSymbol i = 0; i < count; i++) { TSSymbolMetadata metadata = ts_language_symbol_metadata(self, i); diff --git a/lib/src/parser.c b/lib/src/parser.c index 7aac259e..d0a2d2ca 100644 --- a/lib/src/parser.c +++ b/lib/src/parser.c @@ -193,7 +193,7 @@ static bool ts_parser__breakdown_top_of_stack( did_break_down = true; pending = false; for (uint32_t i = 0; i < pop.size; i++) { - StackSlice slice = pop.contents[i]; + StackSlice slice = *array_get(&pop, i); TSStateId state = ts_stack_state(self->stack, slice.version); Subtree parent = *array_front(&slice.subtrees); @@ -212,7 +212,7 @@ static bool ts_parser__breakdown_top_of_stack( } for (uint32_t j = 1; j < slice.subtrees.size; j++) { - Subtree tree = slice.subtrees.contents[j]; + Subtree tree = *array_get(&slice.subtrees, j); ts_stack_push(self->stack, slice.version, tree, false, state); } @@ -556,27 +556,29 @@ static Subtree ts_parser__lex( external_scanner_state_len ); - // When recovering from an error, ignore any zero-length external tokens - // unless they have changed the external scanner's state. This helps to - // avoid infinite loops which could otherwise occur, because the lexer is - // looking for any possible token, instead of looking for the specific set of - // tokens that are valid in some parse state. + // Avoid infinite loops caused by the external scanner returning empty tokens. + // Empty tokens are needed in some circumstances, e.g. indent/dedent tokens + // in Python. Ignore the following classes of empty tokens: // - // Note that it's possible that the token end position may be *before* the - // original position of the lexer because of the way that tokens are positioned - // at included range boundaries: when a token is terminated at the start of - // an included range, it is marked as ending at the *end* of the preceding - // included range. + // * Tokens produced during error recovery. When recovering from an error, + // all tokens are allowed, so it's easy to accidentally return unwanted + // empty tokens. + // * Tokens that are marked as 'extra' in the grammar. These don't change + // the parse state, so they would definitely cause an infinite loop. if ( self->lexer.token_end_position.bytes <= current_position.bytes && - (error_mode || !ts_stack_has_advanced_since_error(self->stack, version)) && !external_scanner_state_changed ) { - LOG( - "ignore_empty_external_token symbol:%s", - SYM_NAME(self->language->external_scanner.symbol_map[self->lexer.data.result_symbol]) - ) - found_token = false; + TSSymbol symbol = self->language->external_scanner.symbol_map[self->lexer.data.result_symbol]; + TSStateId next_parse_state = ts_language_next_state(self->language, parse_state, symbol); + bool token_is_extra = (next_parse_state == parse_state); + if (error_mode || !ts_stack_has_advanced_since_error(self->stack, version) || token_is_extra) { + LOG( + "ignore_empty_external_token symbol:%s", + SYM_NAME(self->language->external_scanner.symbol_map[self->lexer.data.result_symbol]) + ); + found_token = false; + } } } @@ -947,20 +949,22 @@ static StackVersion ts_parser__reduce( // children. StackSliceArray pop = ts_stack_pop_count(self->stack, version, count); uint32_t removed_version_count = 0; + uint32_t halted_version_count = ts_stack_halted_version_count(self->stack); for (uint32_t i = 0; i < pop.size; i++) { - StackSlice slice = pop.contents[i]; + StackSlice slice = *array_get(&pop, i); StackVersion slice_version = slice.version - removed_version_count; // This is where new versions are added to the parse stack. The versions // will all be sorted and truncated at the end of the outer parsing loop. // Allow the maximum version count to be temporarily exceeded, but only // by a limited threshold. - if (slice_version > MAX_VERSION_COUNT + MAX_VERSION_COUNT_OVERFLOW) { + if (slice_version > MAX_VERSION_COUNT + MAX_VERSION_COUNT_OVERFLOW + halted_version_count) { ts_stack_remove_version(self->stack, slice_version); ts_subtree_array_delete(&self->tree_pool, &slice.subtrees); removed_version_count++; while (i + 1 < pop.size) { - StackSlice next_slice = pop.contents[i + 1]; + LOG("aborting reduce with too many versions") + StackSlice next_slice = *array_get(&pop, i + 1); if (next_slice.version != slice.version) break; ts_subtree_array_delete(&self->tree_pool, &next_slice.subtrees); i++; @@ -983,7 +987,7 @@ static StackVersion ts_parser__reduce( // choose one of the arrays of trees to be the parent node's children, and // delete the rest of the tree arrays. while (i + 1 < pop.size) { - StackSlice next_slice = pop.contents[i + 1]; + StackSlice next_slice = *array_get(&pop, i + 1); if (next_slice.version != slice.version) break; i++; @@ -1025,7 +1029,7 @@ static StackVersion ts_parser__reduce( // were previously on top of the stack. ts_stack_push(self->stack, slice_version, ts_subtree_from_mut(parent), false, next_state); for (uint32_t j = 0; j < self->trailing_extras.size; j++) { - ts_stack_push(self->stack, slice_version, self->trailing_extras.contents[j], false, next_state); + ts_stack_push(self->stack, slice_version, *array_get(&self->trailing_extras, j), false, next_state); } for (StackVersion j = 0; j < slice_version; j++) { @@ -1053,11 +1057,11 @@ static void ts_parser__accept( StackSliceArray pop = ts_stack_pop_all(self->stack, version); for (uint32_t i = 0; i < pop.size; i++) { - SubtreeArray trees = pop.contents[i].subtrees; + SubtreeArray trees = array_get(&pop, i)->subtrees; Subtree root = NULL_SUBTREE; for (uint32_t j = trees.size - 1; j + 1 > 0; j--) { - Subtree tree = trees.contents[j]; + Subtree tree = *array_get(&trees, j); if (!ts_subtree_extra(tree)) { ts_assert(!tree.data.is_inline); uint32_t child_count = ts_subtree_child_count(tree); @@ -1092,7 +1096,7 @@ static void ts_parser__accept( } } - ts_stack_remove_version(self->stack, pop.contents[0].version); + ts_stack_remove_version(self->stack, array_get(&pop, 0)->version); ts_stack_halt(self->stack, version); } @@ -1158,7 +1162,7 @@ static bool ts_parser__do_all_potential_reductions( StackVersion reduction_version = STACK_VERSION_NONE; for (uint32_t j = 0; j < self->reduce_actions.size; j++) { - ReduceAction action = self->reduce_actions.contents[j]; + ReduceAction action = *array_get(&self->reduce_actions, j); reduction_version = ts_parser__reduce( self, version, action.symbol, action.count, @@ -1196,7 +1200,7 @@ static bool ts_parser__recover_to_state( StackVersion previous_version = STACK_VERSION_NONE; for (unsigned i = 0; i < pop.size; i++) { - StackSlice slice = pop.contents[i]; + StackSlice slice = *array_get(&pop, i); if (slice.version == previous_version) { ts_subtree_array_delete(&self->tree_pool, &slice.subtrees); @@ -1214,12 +1218,12 @@ static bool ts_parser__recover_to_state( SubtreeArray error_trees = ts_stack_pop_error(self->stack, slice.version); if (error_trees.size > 0) { ts_assert(error_trees.size == 1); - Subtree error_tree = error_trees.contents[0]; + Subtree error_tree = *array_get(&error_trees, 0); uint32_t error_child_count = ts_subtree_child_count(error_tree); if (error_child_count > 0) { array_splice(&slice.subtrees, 0, 0, error_child_count, ts_subtree_children(error_tree)); for (unsigned j = 0; j < error_child_count; j++) { - ts_subtree_retain(slice.subtrees.contents[j]); + ts_subtree_retain(*array_get(&slice.subtrees, j)); } } ts_subtree_array_delete(&self->tree_pool, &error_trees); @@ -1235,7 +1239,7 @@ static bool ts_parser__recover_to_state( } for (unsigned j = 0; j < self->trailing_extras.size; j++) { - Subtree tree = self->trailing_extras.contents[j]; + Subtree tree = *array_get(&self->trailing_extras, j); ts_stack_push(self->stack, slice.version, tree, false, goal_state); } @@ -1271,7 +1275,7 @@ static void ts_parser__recover( // if the current lookahead token would be valid in that state. if (summary && !ts_subtree_is_error(lookahead)) { for (unsigned i = 0; i < summary->size; i++) { - StackSummaryEntry entry = summary->contents[i]; + StackSummaryEntry entry = *array_get(summary, i); if (entry.state == ERROR_STATE) continue; if (entry.position.bytes == position.bytes) continue; @@ -1316,10 +1320,23 @@ static void ts_parser__recover( // and subsequently halted. Remove those versions. for (unsigned i = previous_version_count; i < ts_stack_version_count(self->stack); i++) { if (!ts_stack_is_active(self->stack, i)) { + LOG("removed paused version:%u", i); ts_stack_remove_version(self->stack, i--); + LOG_STACK(); } } + // If the parser is still in the error state at the end of the file, just wrap everything + // in an ERROR node and terminate. + if (ts_subtree_is_eof(lookahead)) { + LOG("recover_eof"); + SubtreeArray children = array_new(); + Subtree parent = ts_subtree_new_error_node(&children, false, self->language); + ts_stack_push(self->stack, version, parent, false, 1); + ts_parser__accept(self, version, lookahead); + return; + } + // If strategy 1 succeeded, a new stack version will have been created which is able to handle // the current lookahead token. Now, in addition, try strategy 2 described above: skip the // current lookahead token by wrapping it in an ERROR node. @@ -1340,17 +1357,6 @@ static void ts_parser__recover( return; } - // If the parser is still in the error state at the end of the file, just wrap everything - // in an ERROR node and terminate. - if (ts_subtree_is_eof(lookahead)) { - LOG("recover_eof"); - SubtreeArray children = array_new(); - Subtree parent = ts_subtree_new_error_node(&children, false, self->language); - ts_stack_push(self->stack, version, parent, false, 1); - ts_parser__accept(self, version, lookahead); - return; - } - // Do not recover if the result would clearly be worse than some existing stack version. unsigned new_cost = current_error_cost + ERROR_COST_PER_SKIPPED_TREE + @@ -1396,18 +1402,18 @@ static void ts_parser__recover( // arbitrarily and discard the rest. if (pop.size > 1) { for (unsigned i = 1; i < pop.size; i++) { - ts_subtree_array_delete(&self->tree_pool, &pop.contents[i].subtrees); + ts_subtree_array_delete(&self->tree_pool, &array_get(&pop, i)->subtrees); } - while (ts_stack_version_count(self->stack) > pop.contents[0].version + 1) { - ts_stack_remove_version(self->stack, pop.contents[0].version + 1); + while (ts_stack_version_count(self->stack) > array_get(&pop, 0)->version + 1) { + ts_stack_remove_version(self->stack, array_get(&pop, 0)->version + 1); } } - ts_stack_renumber_version(self->stack, pop.contents[0].version, version); - array_push(&pop.contents[0].subtrees, ts_subtree_from_mut(error_repeat)); + ts_stack_renumber_version(self->stack, array_get(&pop, 0)->version, version); + array_push(&array_get(&pop, 0)->subtrees, ts_subtree_from_mut(error_repeat)); error_repeat = ts_subtree_new_node( ts_builtin_sym_error_repeat, - &pop.contents[0].subtrees, + &array_get(&pop, 0)->subtrees, 0, self->language ); @@ -1534,7 +1540,7 @@ static bool ts_parser__check_progress(TSParser *self, Subtree *lookahead, const if (self->operation_count >= OP_COUNT_PER_PARSER_TIMEOUT_CHECK) { self->operation_count = 0; } - if (self->parse_options.progress_callback && position != NULL) { + if (position != NULL) { self->parse_state.current_byte_offset = *position; self->parse_state.has_error = self->has_error; } @@ -1616,6 +1622,7 @@ static bool ts_parser__advance( // an ambiguous state. REDUCE actions always create a new stack // version, whereas SHIFT actions update the existing stack version // and terminate this loop. + bool did_reduce = false; StackVersion last_reduction_version = STACK_VERSION_NONE; for (uint32_t i = 0; i < table_entry.action_count; i++) { TSParseAction action = table_entry.actions[i]; @@ -1651,6 +1658,7 @@ static bool ts_parser__advance( action.reduce.dynamic_precedence, action.reduce.production_id, is_fragile, end_of_non_terminal_extra ); + did_reduce = true; if (reduction_version != STACK_VERSION_NONE) { last_reduction_version = reduction_version; } @@ -1702,9 +1710,12 @@ static bool ts_parser__advance( continue; } - // A non-terminal extra rule was reduced and merged into an existing - // stack version. This version can be discarded. - if (!lookahead.ptr) { + // A reduction was performed, but was merged into an existing stack version. + // This version can be discarded. + if (did_reduce) { + if (lookahead.ptr) { + ts_subtree_release(&self->tree_pool, lookahead); + } ts_stack_halt(self->stack, version); return true; } @@ -1753,7 +1764,7 @@ static bool ts_parser__advance( // versions that exist. If some other version advances successfully, then // this version can simply be removed. But if all versions end up paused, // then error recovery is needed. - LOG("detect_error"); + LOG("detect_error lookahead:%s", TREE_NAME(lookahead)); ts_stack_pause(self->stack, version, lookahead); return true; } @@ -1842,6 +1853,7 @@ static unsigned ts_parser__condense_stack(TSParser *self) { has_unpaused_version = true; } else { ts_stack_remove_version(self->stack, i); + made_changes = true; i--; n--; } @@ -1877,9 +1889,9 @@ static bool ts_parser__balance_subtree(TSParser *self) { return false; } - MutableSubtree tree = self->tree_pool.tree_stack.contents[ + MutableSubtree tree = *array_get(&self->tree_pool.tree_stack, self->tree_pool.tree_stack.size - 1 - ]; + ); if (tree.ptr->repeat_depth > 0) { Subtree child1 = ts_subtree_children(tree)[0]; @@ -2128,7 +2140,7 @@ TSTree *ts_parser_parse( LOG("parse_after_edit"); LOG_TREE(self->old_tree); for (unsigned i = 0; i < self->included_range_differences.size; i++) { - TSRange *range = &self->included_range_differences.contents[i]; + TSRange *range = array_get(&self->included_range_differences, i); LOG("different_included_range %u - %u", range->start_byte, range->end_byte); } } else { @@ -2185,7 +2197,7 @@ TSTree *ts_parser_parse( } while (self->included_range_difference_index < self->included_range_differences.size) { - TSRange *range = &self->included_range_differences.contents[self->included_range_difference_index]; + TSRange *range = array_get(&self->included_range_differences, self->included_range_difference_index); if (range->end_byte <= position) { self->included_range_difference_index++; } else { @@ -2226,6 +2238,8 @@ TSTree *ts_parser_parse_with_options( self->parse_options = parse_options; self->parse_state.payload = parse_options.payload; TSTree *result = ts_parser_parse(self, old_tree, input); + // Reset parser options before further parse calls. + self->parse_options = (TSParseOptions) {0}; return result; } diff --git a/lib/src/parser.h b/lib/src/parser.h index cdbe64cc..858107de 100644 --- a/lib/src/parser.h +++ b/lib/src/parser.h @@ -18,7 +18,6 @@ typedef uint16_t TSStateId; typedef uint16_t TSSymbol; typedef uint16_t TSFieldId; typedef struct TSLanguage TSLanguage; -typedef struct TSLanguageMetadata TSLanguageMetadata; typedef struct TSLanguageMetadata { uint8_t major_version; uint8_t minor_version; diff --git a/lib/src/portable/endian.h b/lib/src/portable/endian.h index 6aa4438d..a6560826 100644 --- a/lib/src/portable/endian.h +++ b/lib/src/portable/endian.h @@ -18,16 +18,24 @@ #if defined(HAVE_ENDIAN_H) || \ defined(__linux__) || \ defined(__GNU__) || \ + defined(__HAIKU__) || \ + defined(__illumos__) || \ + defined(__NetBSD__) || \ defined(__OpenBSD__) || \ defined(__CYGWIN__) || \ defined(__MSYS__) || \ - defined(__EMSCRIPTEN__) + defined(__EMSCRIPTEN__) || \ + defined(__wasi__) || \ + defined(__wasm__) + +#if defined(__NetBSD__) +#define _NETBSD_SOURCE 1 +#endif # include #elif defined(HAVE_SYS_ENDIAN_H) || \ defined(__FreeBSD__) || \ - defined(__NetBSD__) || \ defined(__DragonFly__) # include diff --git a/lib/src/query.c b/lib/src/query.c index 05eddef5..6c514887 100644 --- a/lib/src/query.c +++ b/lib/src/query.c @@ -1,3 +1,12 @@ +/* + * On NetBSD, defining standard requirements like this removes symbols + * from the namespace; however, we need non-standard symbols for + * endian.h. + */ +#if defined(__NetBSD__) && defined(_POSIX_C_SOURCE) +#undef _POSIX_C_SOURCE +#endif + #include "tree_sitter/api.h" #include "./alloc.h" #include "./array.h" @@ -428,26 +437,26 @@ static CaptureListPool capture_list_pool_new(void) { static void capture_list_pool_reset(CaptureListPool *self) { for (uint16_t i = 0; i < (uint16_t)self->list.size; i++) { // This invalid size means that the list is not in use. - self->list.contents[i].size = UINT32_MAX; + array_get(&self->list, i)->size = UINT32_MAX; } self->free_capture_list_count = self->list.size; } static void capture_list_pool_delete(CaptureListPool *self) { for (uint16_t i = 0; i < (uint16_t)self->list.size; i++) { - array_delete(&self->list.contents[i]); + array_delete(array_get(&self->list, i)); } array_delete(&self->list); } static const CaptureList *capture_list_pool_get(const CaptureListPool *self, uint16_t id) { if (id >= self->list.size) return &self->empty_list; - return &self->list.contents[id]; + return array_get(&self->list, id); } static CaptureList *capture_list_pool_get_mut(CaptureListPool *self, uint16_t id) { ts_assert(id < self->list.size); - return &self->list.contents[id]; + return array_get(&self->list, id); } static bool capture_list_pool_is_empty(const CaptureListPool *self) { @@ -460,8 +469,8 @@ static uint16_t capture_list_pool_acquire(CaptureListPool *self) { // First see if any already allocated capture list is currently unused. if (self->free_capture_list_count > 0) { for (uint16_t i = 0; i < (uint16_t)self->list.size; i++) { - if (self->list.contents[i].size == UINT32_MAX) { - array_clear(&self->list.contents[i]); + if (array_get(&self->list, i)->size == UINT32_MAX) { + array_clear(array_get(&self->list, i)); self->free_capture_list_count--; return i; } @@ -482,7 +491,7 @@ static uint16_t capture_list_pool_acquire(CaptureListPool *self) { static void capture_list_pool_release(CaptureListPool *self, uint16_t id) { if (id >= self->list.size) return; - self->list.contents[id].size = UINT32_MAX; + array_get(&self->list, id)->size = UINT32_MAX; self->free_capture_list_count++; } @@ -765,10 +774,10 @@ static int symbol_table_id_for_name( uint32_t length ) { for (unsigned i = 0; i < self->slices.size; i++) { - Slice slice = self->slices.contents[i]; + Slice slice = *array_get(&self->slices, i); if ( slice.length == length && - !strncmp(&self->characters.contents[slice.offset], name, length) + !strncmp(array_get(&self->characters, slice.offset), name, length) ) return i; } return -1; @@ -779,9 +788,9 @@ static const char *symbol_table_name_for_id( uint16_t id, uint32_t *length ) { - Slice slice = self->slices.contents[id]; + Slice slice = *(array_get(&self->slices,id)); *length = slice.length; - return &self->characters.contents[slice.offset]; + return array_get(&self->characters, slice.offset); } static uint16_t symbol_table_insert_name( @@ -796,8 +805,8 @@ static uint16_t symbol_table_insert_name( .length = length, }; array_grow_by(&self->characters, length + 1); - memcpy(&self->characters.contents[slice.offset], name, length); - self->characters.contents[self->characters.size - 1] = 0; + memcpy(array_get(&self->characters, slice.offset), name, length); + *array_get(&self->characters, self->characters.size - 1) = 0; array_push(&self->slices, slice); return self->slices.size - 1; } @@ -919,35 +928,26 @@ static unsigned analysis_state__recursion_depth(const AnalysisState *self) { return result; } -static inline int analysis_state__compare_position( - AnalysisState *const *self, - AnalysisState *const *other -) { - for (unsigned i = 0; i < (*self)->depth; i++) { - if (i >= (*other)->depth) return -1; - if ((*self)->stack[i].child_index < (*other)->stack[i].child_index) return -1; - if ((*self)->stack[i].child_index > (*other)->stack[i].child_index) return 1; - } - if ((*self)->depth < (*other)->depth) return 1; - if ((*self)->step_index < (*other)->step_index) return -1; - if ((*self)->step_index > (*other)->step_index) return 1; - return 0; -} - static inline int analysis_state__compare( AnalysisState *const *self, AnalysisState *const *other ) { - int result = analysis_state__compare_position(self, other); - if (result != 0) return result; + if ((*self)->depth < (*other)->depth) return 1; for (unsigned i = 0; i < (*self)->depth; i++) { - if ((*self)->stack[i].parent_symbol < (*other)->stack[i].parent_symbol) return -1; - if ((*self)->stack[i].parent_symbol > (*other)->stack[i].parent_symbol) return 1; - if ((*self)->stack[i].parse_state < (*other)->stack[i].parse_state) return -1; - if ((*self)->stack[i].parse_state > (*other)->stack[i].parse_state) return 1; - if ((*self)->stack[i].field_id < (*other)->stack[i].field_id) return -1; - if ((*self)->stack[i].field_id > (*other)->stack[i].field_id) return 1; + if (i >= (*other)->depth) return -1; + AnalysisStateEntry s1 = (*self)->stack[i]; + AnalysisStateEntry s2 = (*other)->stack[i]; + if (s1.child_index < s2.child_index) return -1; + if (s1.child_index > s2.child_index) return 1; + if (s1.parent_symbol < s2.parent_symbol) return -1; + if (s1.parent_symbol > s2.parent_symbol) return 1; + if (s1.parse_state < s2.parse_state) return -1; + if (s1.parse_state > s2.parse_state) return 1; + if (s1.field_id < s2.field_id) return -1; + if (s1.field_id > s2.field_id) return 1; } + if ((*self)->step_index < (*other)->step_index) return -1; + if ((*self)->step_index > (*other)->step_index) return 1; return 0; } @@ -1109,23 +1109,23 @@ static inline bool ts_query__pattern_map_search( while (size > 1) { uint32_t half_size = size / 2; uint32_t mid_index = base_index + half_size; - TSSymbol mid_symbol = self->steps.contents[ - self->pattern_map.contents[mid_index].step_index - ].symbol; + TSSymbol mid_symbol = array_get(&self->steps, + array_get(&self->pattern_map, mid_index)->step_index + )->symbol; if (needle > mid_symbol) base_index = mid_index; size -= half_size; } - TSSymbol symbol = self->steps.contents[ - self->pattern_map.contents[base_index].step_index - ].symbol; + TSSymbol symbol = array_get(&self->steps, + array_get(&self->pattern_map, base_index)->step_index + )->symbol; if (needle > symbol) { base_index++; if (base_index < self->pattern_map.size) { - symbol = self->steps.contents[ - self->pattern_map.contents[base_index].step_index - ].symbol; + symbol = array_get(&self->steps, + array_get(&self->pattern_map, base_index)->step_index + )->symbol; } } @@ -1148,9 +1148,9 @@ static inline void ts_query__pattern_map_insert( // initiated first, which allows the ordering of the states array // to be maintained more efficiently. while (index < self->pattern_map.size) { - PatternEntry *entry = &self->pattern_map.contents[index]; + PatternEntry *entry = array_get(&self->pattern_map, index); if ( - self->steps.contents[entry->step_index].symbol == symbol && + array_get(&self->steps, entry->step_index)->symbol == symbol && entry->pattern_index < new_entry.pattern_index ) { index++; @@ -1183,11 +1183,11 @@ static void ts_query__perform_analysis( #ifdef DEBUG_ANALYZE_QUERY printf("Iteration: %u. Final step indices:", iteration); for (unsigned j = 0; j < analysis->final_step_indices.size; j++) { - printf(" %4u", analysis->final_step_indices.contents[j]); + printf(" %4u", *array_get(&analysis->final_step_indices, j)); } printf("\n"); for (unsigned j = 0; j < analysis->states.size; j++) { - AnalysisState *state = analysis->states.contents[j]; + AnalysisState *state = *array_get(&analysis->states, j); printf(" %3u: step: %u, stack: [", j, state->step_index); for (unsigned k = 0; k < state->depth; k++) { printf( @@ -1230,7 +1230,7 @@ static void ts_query__perform_analysis( analysis_state_set__clear(&analysis->next_states, &analysis->state_pool); for (unsigned j = 0; j < analysis->states.size; j++) { - AnalysisState * const state = analysis->states.contents[j]; + AnalysisState * const state = *array_get(&analysis->states, j); // For efficiency, it's important to avoid processing the same analysis state more // than once. To achieve this, keep the states in order of ascending position within @@ -1238,7 +1238,7 @@ static void ts_query__perform_analysis( // the states that have made the least progress. Avoid advancing states that have already // made more progress. if (analysis->next_states.size > 0) { - int comparison = analysis_state__compare_position( + int comparison = analysis_state__compare( &state, array_back(&analysis->next_states) ); @@ -1253,7 +1253,7 @@ static void ts_query__perform_analysis( analysis_state_set__push( &analysis->next_states, &analysis->state_pool, - analysis->states.contents[j] + *array_get(&analysis->states, j) ); j++; } @@ -1265,12 +1265,12 @@ static void ts_query__perform_analysis( const TSSymbol parent_symbol = analysis_state__top(state)->parent_symbol; const TSFieldId parent_field_id = analysis_state__top(state)->field_id; const unsigned child_index = analysis_state__top(state)->child_index; - const QueryStep * const step = &self->steps.contents[state->step_index]; + const QueryStep * const step = array_get(&self->steps, state->step_index); unsigned subgraph_index, exists; array_search_sorted_by(subgraphs, .symbol, parent_symbol, &subgraph_index, &exists); if (!exists) continue; - const AnalysisSubgraph *subgraph = &subgraphs->contents[subgraph_index]; + const AnalysisSubgraph *subgraph = array_get(subgraphs, subgraph_index); // Follow every possible path in the parse table, but only visit states that // are part of the subgraph for the current symbol. @@ -1306,7 +1306,8 @@ static void ts_query__perform_analysis( &node_index, &exists ); while (node_index < subgraph->nodes.size) { - AnalysisSubgraphNode *node = &subgraph->nodes.contents[node_index++]; + AnalysisSubgraphNode *node = array_get(&subgraph->nodes, node_index); + node_index++; if (node->state != successor.state || node->child_index != successor.child_index) break; // Use the subgraph to determine what alias and field will eventually be applied @@ -1339,7 +1340,12 @@ static void ts_query__perform_analysis( // Determine if this hypothetical child node would match the current step // of the query pattern. bool does_match = false; - if (visible_symbol) { + + // ERROR nodes can appear anywhere, so if the step is + // looking for an ERROR node, consider it potentially matchable. + if (step->symbol == ts_builtin_sym_error) { + does_match = true; + } else if (visible_symbol) { does_match = true; if (step->symbol == WILDCARD_SYMBOL) { if ( @@ -1407,7 +1413,7 @@ static void ts_query__perform_analysis( if (does_match) { for (;;) { next_state.step_index++; - next_step = &self->steps.contents[next_state.step_index]; + next_step = array_get(&self->steps, next_state.step_index); if ( next_step->depth == PATTERN_DONE_MARKER || next_step->depth <= step->depth @@ -1431,7 +1437,7 @@ static void ts_query__perform_analysis( // record that matching can terminate at this step of the pattern. Otherwise, // add this state to the list of states to process on the next iteration. if (!next_step->is_dead_end) { - bool did_finish_pattern = self->steps.contents[next_state.step_index].depth != step->depth; + bool did_finish_pattern = array_get(&self->steps, next_state.step_index)->depth != step->depth; if (did_finish_pattern) { array_insert_sorted_by(&analysis->finished_parent_symbols, , state->root_symbol); } else if (next_state.depth == 0) { @@ -1451,7 +1457,7 @@ static void ts_query__perform_analysis( next_step->alternative_index > next_state.step_index ) { next_state.step_index = next_step->alternative_index; - next_step = &self->steps.contents[next_state.step_index]; + next_step = array_get(&self->steps, next_state.step_index); } else { break; } @@ -1469,9 +1475,9 @@ static void ts_query__perform_analysis( static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { Array(uint16_t) non_rooted_pattern_start_steps = array_new(); for (unsigned i = 0; i < self->pattern_map.size; i++) { - PatternEntry *pattern = &self->pattern_map.contents[i]; + PatternEntry *pattern = array_get(&self->pattern_map, i); if (!pattern->is_rooted) { - QueryStep *step = &self->steps.contents[pattern->step_index]; + QueryStep *step = array_get(&self->steps, pattern->step_index); if (step->symbol != WILDCARD_SYMBOL) { array_push(&non_rooted_pattern_start_steps, i); } @@ -1483,7 +1489,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { // captures, and record the indices of all of the steps that have child steps. Array(uint32_t) parent_step_indices = array_new(); for (unsigned i = 0; i < self->steps.size; i++) { - QueryStep *step = &self->steps.contents[i]; + QueryStep *step = array_get(&self->steps, i); if (step->depth == PATTERN_DONE_MARKER) { step->parent_pattern_guaranteed = true; step->root_pattern_guaranteed = true; @@ -1494,7 +1500,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { bool is_wildcard = step->symbol == WILDCARD_SYMBOL; step->contains_captures = step->capture_ids[0] != NONE; for (unsigned j = i + 1; j < self->steps.size; j++) { - QueryStep *next_step = &self->steps.contents[j]; + QueryStep *next_step = array_get(&self->steps, j); if ( next_step->depth == PATTERN_DONE_MARKER || next_step->depth <= step->depth @@ -1524,8 +1530,8 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { // parent. AnalysisSubgraphArray subgraphs = array_new(); for (unsigned i = 0; i < parent_step_indices.size; i++) { - uint32_t parent_step_index = parent_step_indices.contents[i]; - TSSymbol parent_symbol = self->steps.contents[parent_step_index].symbol; + uint32_t parent_step_index = *array_get(&parent_step_indices, i); + TSSymbol parent_symbol = array_get(&self->steps, parent_step_index)->symbol; AnalysisSubgraph subgraph = { .symbol = parent_symbol }; array_insert_sorted_by(&subgraphs, .symbol, subgraph); } @@ -1567,7 +1573,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { &exists ); if (exists) { - AnalysisSubgraph *subgraph = &subgraphs.contents[subgraph_index]; + AnalysisSubgraph *subgraph = array_get(&subgraphs, subgraph_index); if (subgraph->nodes.size == 0 || array_back(&subgraph->nodes)->state != state) { array_push(&subgraph->nodes, ((AnalysisSubgraphNode) { .state = state, @@ -1604,7 +1610,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { &exists ); if (exists) { - AnalysisSubgraph *subgraph = &subgraphs.contents[subgraph_index]; + AnalysisSubgraph *subgraph = array_get(&subgraphs, subgraph_index); if ( subgraph->start_states.size == 0 || *array_back(&subgraph->start_states) != state @@ -1621,7 +1627,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { // from the end states using the predecessor map. Array(AnalysisSubgraphNode) next_nodes = array_new(); for (unsigned i = 0; i < subgraphs.size; i++) { - AnalysisSubgraph *subgraph = &subgraphs.contents[i]; + AnalysisSubgraph *subgraph = array_get(&subgraphs, i); if (subgraph->nodes.size == 0) { array_delete(&subgraph->start_states); array_erase(&subgraphs, i); @@ -1662,16 +1668,16 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { #ifdef DEBUG_ANALYZE_QUERY printf("\nSubgraphs:\n"); for (unsigned i = 0; i < subgraphs.size; i++) { - AnalysisSubgraph *subgraph = &subgraphs.contents[i]; + AnalysisSubgraph *subgraph = array_get(&subgraphs, i); printf(" %u, %s:\n", subgraph->symbol, ts_language_symbol_name(self->language, subgraph->symbol)); for (unsigned j = 0; j < subgraph->start_states.size; j++) { printf( " {state: %u}\n", - subgraph->start_states.contents[j] + *array_get(&subgraph->start_states, j) ); } for (unsigned j = 0; j < subgraph->nodes.size; j++) { - AnalysisSubgraphNode *node = &subgraph->nodes.contents[j]; + AnalysisSubgraphNode *node = array_get(&subgraph->nodes, j); printf( " {state: %u, child_index: %u, production_id: %u, done: %d}\n", node->state, node->child_index, node->production_id, node->done @@ -1686,9 +1692,9 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { bool all_patterns_are_valid = true; QueryAnalysis analysis = query_analysis__new(); for (unsigned i = 0; i < parent_step_indices.size; i++) { - uint16_t parent_step_index = parent_step_indices.contents[i]; - uint16_t parent_depth = self->steps.contents[parent_step_index].depth; - TSSymbol parent_symbol = self->steps.contents[parent_step_index].symbol; + uint16_t parent_step_index = *array_get(&parent_step_indices, i); + uint16_t parent_depth = array_get(&self->steps, parent_step_index)->depth; + TSSymbol parent_symbol = array_get(&self->steps, parent_step_index)->symbol; if (parent_symbol == ts_builtin_sym_error) continue; // Find the subgraph that corresponds to this pattern's root symbol. If the pattern's @@ -1700,18 +1706,18 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { uint32_t j, child_exists; array_search_sorted_by(&self->step_offsets, .step_index, first_child_step_index, &j, &child_exists); ts_assert(child_exists); - *error_offset = self->step_offsets.contents[j].byte_offset; + *error_offset = array_get(&self->step_offsets, j)->byte_offset; all_patterns_are_valid = false; break; } // Initialize an analysis state at every parse state in the table where // this parent symbol can occur. - AnalysisSubgraph *subgraph = &subgraphs.contents[subgraph_index]; + AnalysisSubgraph *subgraph = array_get(&subgraphs, subgraph_index); analysis_state_set__clear(&analysis.states, &analysis.state_pool); analysis_state_set__clear(&analysis.deeper_states, &analysis.state_pool); for (unsigned j = 0; j < subgraph->start_states.size; j++) { - TSStateId parse_state = subgraph->start_states.contents[j]; + TSStateId parse_state = *array_get(&subgraph->start_states, j); analysis_state_set__push(&analysis.states, &analysis.state_pool, &((AnalysisState) { .step_index = parent_step_index + 1, .stack = { @@ -1731,7 +1737,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { #ifdef DEBUG_ANALYZE_QUERY printf( "\nWalk states for %s:\n", - ts_language_symbol_name(self->language, analysis.states.contents[0]->stack[0].parent_symbol) + ts_language_symbol_name(self->language, (*array_get(&analysis.states, 0))->stack[0].parent_symbol) ); #endif @@ -1742,7 +1748,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { // be considered fallible. if (analysis.did_abort) { for (unsigned j = parent_step_index + 1; j < self->steps.size; j++) { - QueryStep *step = &self->steps.contents[j]; + QueryStep *step = array_get(&self->steps, j); if ( step->depth <= parent_depth || step->depth == PATTERN_DONE_MARKER @@ -1763,7 +1769,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { uint32_t j, impossible_exists; array_search_sorted_by(&self->step_offsets, .step_index, impossible_step_index, &j, &impossible_exists); if (j >= self->step_offsets.size) j = self->step_offsets.size - 1; - *error_offset = self->step_offsets.contents[j].byte_offset; + *error_offset = array_get(&self->step_offsets, j)->byte_offset; all_patterns_are_valid = false; break; } @@ -1771,8 +1777,8 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { // Mark as fallible any step where a match terminated. // Later, this property will be propagated to all of the step's predecessors. for (unsigned j = 0; j < analysis.final_step_indices.size; j++) { - uint32_t final_step_index = analysis.final_step_indices.contents[j]; - QueryStep *step = &self->steps.contents[final_step_index]; + uint32_t final_step_index = *array_get(&analysis.final_step_indices, j); + QueryStep *step = array_get(&self->steps, final_step_index); if ( step->depth != PATTERN_DONE_MARKER && step->depth > parent_depth && @@ -1787,7 +1793,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { // Mark as indefinite any step with captures that are used in predicates. Array(uint16_t) predicate_capture_ids = array_new(); for (unsigned i = 0; i < self->patterns.size; i++) { - QueryPattern *pattern = &self->patterns.contents[i]; + QueryPattern *pattern = array_get(&self->patterns, i); // Gather all of the captures that are used in predicates for this pattern. array_clear(&predicate_capture_ids); @@ -1796,7 +1802,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { end = start + pattern->predicate_steps.length, j = start; j < end; j++ ) { - TSQueryPredicateStep *step = &self->predicate_steps.contents[j]; + TSQueryPredicateStep *step = array_get(&self->predicate_steps, j); if (step->type == TSQueryPredicateStepTypeCapture) { uint16_t value_id = step->value_id; array_insert_sorted_by(&predicate_capture_ids, , value_id); @@ -1809,7 +1815,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { end = start + pattern->steps.length, j = start; j < end; j++ ) { - QueryStep *step = &self->steps.contents[j]; + QueryStep *step = array_get(&self->steps, j); for (unsigned k = 0; k < MAX_STEP_CAPTURE_COUNT; k++) { uint16_t capture_id = step->capture_ids[k]; if (capture_id == NONE) break; @@ -1829,7 +1835,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { while (!done) { done = true; for (unsigned i = self->steps.size - 1; i > 0; i--) { - QueryStep *step = &self->steps.contents[i]; + QueryStep *step = array_get(&self->steps, i); if (step->depth == PATTERN_DONE_MARKER) continue; // Determine if this step is definite or has definite alternatives. @@ -1842,12 +1848,12 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { if (step->alternative_index == NONE || step->alternative_index < i) { break; } - step = &self->steps.contents[step->alternative_index]; + step = array_get(&self->steps, step->alternative_index); } // If not, mark its predecessor as indefinite. if (!parent_pattern_guaranteed) { - QueryStep *prev_step = &self->steps.contents[i - 1]; + QueryStep *prev_step = array_get(&self->steps, i - 1); if ( !prev_step->is_dead_end && prev_step->depth != PATTERN_DONE_MARKER && @@ -1863,7 +1869,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { #ifdef DEBUG_ANALYZE_QUERY printf("Steps:\n"); for (unsigned i = 0; i < self->steps.size; i++) { - QueryStep *step = &self->steps.contents[i]; + QueryStep *step = array_get(&self->steps, i); if (step->depth == PATTERN_DONE_MARKER) { printf(" %u: DONE\n", i); } else { @@ -1887,18 +1893,18 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { // prevent certain optimizations with range restrictions. analysis.did_abort = false; for (uint32_t i = 0; i < non_rooted_pattern_start_steps.size; i++) { - uint16_t pattern_entry_index = non_rooted_pattern_start_steps.contents[i]; - PatternEntry *pattern_entry = &self->pattern_map.contents[pattern_entry_index]; + uint16_t pattern_entry_index = *array_get(&non_rooted_pattern_start_steps, i); + PatternEntry *pattern_entry = array_get(&self->pattern_map, pattern_entry_index); analysis_state_set__clear(&analysis.states, &analysis.state_pool); analysis_state_set__clear(&analysis.deeper_states, &analysis.state_pool); for (unsigned j = 0; j < subgraphs.size; j++) { - AnalysisSubgraph *subgraph = &subgraphs.contents[j]; + AnalysisSubgraph *subgraph = array_get(&subgraphs, j); TSSymbolMetadata metadata = ts_language_symbol_metadata(self->language, subgraph->symbol); if (metadata.visible || metadata.named) continue; for (uint32_t k = 0; k < subgraph->start_states.size; k++) { - TSStateId parse_state = subgraph->start_states.contents[k]; + TSStateId parse_state = *array_get(&subgraph->start_states, k); analysis_state_set__push(&analysis.states, &analysis.state_pool, &((AnalysisState) { .step_index = pattern_entry->step_index, .stack = { @@ -1927,11 +1933,11 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { ); if (analysis.finished_parent_symbols.size > 0) { - self->patterns.contents[pattern_entry->pattern_index].is_non_local = true; + array_get(&self->patterns, pattern_entry->pattern_index)->is_non_local = true; } for (unsigned k = 0; k < analysis.finished_parent_symbols.size; k++) { - TSSymbol symbol = analysis.finished_parent_symbols.contents[k]; + TSSymbol symbol = *array_get(&analysis.finished_parent_symbols, k); array_insert_sorted_by(&self->repeat_symbols_with_rootless_patterns, , symbol); } } @@ -1941,7 +1947,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { printf("\nRepetition symbols with rootless patterns:\n"); printf("aborted analysis: %d\n", analysis.did_abort); for (unsigned i = 0; i < self->repeat_symbols_with_rootless_patterns.size; i++) { - TSSymbol symbol = self->repeat_symbols_with_rootless_patterns.contents[i]; + TSSymbol symbol = *array_get(&self->repeat_symbols_with_rootless_patterns, i); printf(" %u, %s\n", symbol, ts_language_symbol_name(self->language, symbol)); } printf("\n"); @@ -1950,8 +1956,8 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) { // Cleanup for (unsigned i = 0; i < subgraphs.size; i++) { - array_delete(&subgraphs.contents[i].start_states); - array_delete(&subgraphs.contents[i].nodes); + array_delete(&array_get(&subgraphs, i)->start_states); + array_delete(&array_get(&subgraphs, i)->nodes); } array_delete(&subgraphs); query_analysis__delete(&analysis); @@ -1970,7 +1976,7 @@ static void ts_query__add_negated_fields( TSFieldId *field_ids, uint16_t field_count ) { - QueryStep *step = &self->steps.contents[step_index]; + QueryStep *step = array_get(&self->steps, step_index); // The negated field array stores a list of field lists, separated by zeros. // Try to find the start index of an existing list that matches this new list. @@ -1978,7 +1984,7 @@ static void ts_query__add_negated_fields( unsigned match_count = 0; unsigned start_i = 0; for (unsigned i = 0; i < self->negated_fields.size; i++) { - TSFieldId existing_field_id = self->negated_fields.contents[i]; + TSFieldId existing_field_id = *array_get(&self->negated_fields, i); // At each zero value, terminate the match attempt. If we've exactly // matched the new field list, then reuse this index. Otherwise, @@ -2248,10 +2254,10 @@ static TSQueryError ts_query__parse_pattern( // For all of the branches except for the last one, add the subsequent branch as an // alternative, and link the end of the branch to the current end of the steps. for (unsigned i = 0; i < branch_step_indices.size - 1; i++) { - uint32_t step_index = branch_step_indices.contents[i]; - uint32_t next_step_index = branch_step_indices.contents[i + 1]; - QueryStep *start_step = &self->steps.contents[step_index]; - QueryStep *end_step = &self->steps.contents[next_step_index - 1]; + uint32_t step_index = *array_get(&branch_step_indices, i); + uint32_t next_step_index = *array_get(&branch_step_indices, i + 1); + QueryStep *start_step = array_get(&self->steps, step_index); + QueryStep *end_step = array_get(&self->steps, next_step_index - 1); start_step->alternative_index = next_step_index; end_step->alternative_index = self->steps.size; end_step->is_dead_end = true; @@ -2514,6 +2520,9 @@ static TSQueryError ts_query__parse_pattern( child_is_immediate, &child_capture_quantifiers ); + // In the event we only parsed a predicate, meaning no new steps were added, + // then subtract one so we're not indexing past the end of the array + if (step_index == self->steps.size) step_index--; if (e == PARENT_DONE) { if (stream->next == ')') { if (child_is_immediate) { @@ -2522,19 +2531,19 @@ static TSQueryError ts_query__parse_pattern( return TSQueryErrorSyntax; } // Mark this step *and* its alternatives as the last child of the parent. - QueryStep *last_child_step = &self->steps.contents[last_child_step_index]; + QueryStep *last_child_step = array_get(&self->steps, last_child_step_index); last_child_step->is_last_child = true; if ( last_child_step->alternative_index != NONE && last_child_step->alternative_index < self->steps.size ) { - QueryStep *alternative_step = &self->steps.contents[last_child_step->alternative_index]; + QueryStep *alternative_step = array_get(&self->steps, last_child_step->alternative_index); alternative_step->is_last_child = true; while ( alternative_step->alternative_index != NONE && alternative_step->alternative_index < self->steps.size ) { - alternative_step = &self->steps.contents[alternative_step->alternative_index]; + alternative_step = array_get(&self->steps, alternative_step->alternative_index); alternative_step->is_last_child = true; } } @@ -2640,7 +2649,7 @@ static TSQueryError ts_query__parse_pattern( } uint32_t step_index = starting_step_index; - QueryStep *step = &self->steps.contents[step_index]; + QueryStep *step = array_get(&self->steps, step_index); for (;;) { step->field = field_id; if ( @@ -2649,7 +2658,7 @@ static TSQueryError ts_query__parse_pattern( step->alternative_index < self->steps.size ) { step_index = step->alternative_index; - step = &self->steps.contents[step_index]; + step = array_get(&self->steps, step_index); } else { break; } @@ -2698,9 +2707,9 @@ static TSQueryError ts_query__parse_pattern( // Stop when `step->alternative_index` is `NONE` or it points to // `repeat_step` or beyond. Note that having just been pushed, // `repeat_step` occupies slot `self->steps.size - 1`. - QueryStep *step = &self->steps.contents[starting_step_index]; + QueryStep *step = array_get(&self->steps, starting_step_index); while (step->alternative_index != NONE && step->alternative_index < self->steps.size - 1) { - step = &self->steps.contents[step->alternative_index]; + step = array_get(&self->steps, step->alternative_index); } step->alternative_index = self->steps.size; } @@ -2712,9 +2721,9 @@ static TSQueryError ts_query__parse_pattern( stream_advance(stream); stream_skip_whitespace(stream); - QueryStep *step = &self->steps.contents[starting_step_index]; + QueryStep *step = array_get(&self->steps, starting_step_index); while (step->alternative_index != NONE && step->alternative_index < self->steps.size) { - step = &self->steps.contents[step->alternative_index]; + step = array_get(&self->steps, step->alternative_index); } step->alternative_index = self->steps.size; } @@ -2740,7 +2749,7 @@ static TSQueryError ts_query__parse_pattern( uint32_t step_index = starting_step_index; for (;;) { - QueryStep *step = &self->steps.contents[step_index]; + QueryStep *step = array_get(&self->steps, step_index); query_step__add_capture(step, capture_id); if ( step->alternative_index != NONE && @@ -2838,14 +2847,14 @@ TSQuery *ts_query_new( // Maintain a map that can look up patterns for a given root symbol. uint16_t wildcard_root_alternative_index = NONE; for (;;) { - QueryStep *step = &self->steps.contents[start_step_index]; + QueryStep *step = array_get(&self->steps, start_step_index); // If a pattern has a wildcard at its root, but it has a non-wildcard child, // then optimize the matching process by skipping matching the wildcard. // Later, during the matching process, the query cursor will check that // there is a parent node, and capture it if necessary. if (step->symbol == WILDCARD_SYMBOL && step->depth == 0 && !step->field) { - QueryStep *second_step = &self->steps.contents[start_step_index + 1]; + QueryStep *second_step = array_get(&self->steps, start_step_index + 1); if (second_step->symbol != WILDCARD_SYMBOL && second_step->depth == 1 && !second_step->is_immediate) { wildcard_root_alternative_index = step->alternative_index; start_step_index += 1; @@ -2860,7 +2869,7 @@ TSQuery *ts_query_new( uint32_t start_depth = step->depth; bool is_rooted = start_depth == 0; for (uint32_t step_index = start_step_index + 1; step_index < self->steps.size; step_index++) { - QueryStep *child_step = &self->steps.contents[step_index]; + QueryStep *child_step = array_get(&self->steps, step_index); if (child_step->is_dead_end) break; if (child_step->depth == start_depth) { is_rooted = false; @@ -2964,26 +2973,24 @@ const TSQueryPredicateStep *ts_query_predicates_for_pattern( uint32_t pattern_index, uint32_t *step_count ) { - Slice slice = self->patterns.contents[pattern_index].predicate_steps; + Slice slice = array_get(&self->patterns, pattern_index)->predicate_steps; *step_count = slice.length; - if (self->predicate_steps.contents == NULL) { - return NULL; - } - return &self->predicate_steps.contents[slice.offset]; + if (slice.length == 0) return NULL; + return array_get(&self->predicate_steps, slice.offset); } uint32_t ts_query_start_byte_for_pattern( const TSQuery *self, uint32_t pattern_index ) { - return self->patterns.contents[pattern_index].start_byte; + return array_get(&self->patterns, pattern_index)->start_byte; } uint32_t ts_query_end_byte_for_pattern( const TSQuery *self, uint32_t pattern_index ) { - return self->patterns.contents[pattern_index].end_byte; + return array_get(&self->patterns, pattern_index)->end_byte; } bool ts_query_is_pattern_rooted( @@ -2991,7 +2998,7 @@ bool ts_query_is_pattern_rooted( uint32_t pattern_index ) { for (unsigned i = 0; i < self->pattern_map.size; i++) { - PatternEntry *entry = &self->pattern_map.contents[i]; + PatternEntry *entry = array_get(&self->pattern_map, i); if (entry->pattern_index == pattern_index) { if (!entry->is_rooted) return false; } @@ -3004,7 +3011,7 @@ bool ts_query_is_pattern_non_local( uint32_t pattern_index ) { if (pattern_index < self->patterns.size) { - return self->patterns.contents[pattern_index].is_non_local; + return array_get(&self->patterns, pattern_index)->is_non_local; } else { return false; } @@ -3016,12 +3023,12 @@ bool ts_query_is_pattern_guaranteed_at_step( ) { uint32_t step_index = UINT32_MAX; for (unsigned i = 0; i < self->step_offsets.size; i++) { - StepOffset *step_offset = &self->step_offsets.contents[i]; + StepOffset *step_offset = array_get(&self->step_offsets, i); if (step_offset->byte_offset > byte_offset) break; step_index = step_offset->step_index; } if (step_index < self->steps.size) { - return self->steps.contents[step_index].root_pattern_guaranteed; + return array_get(&self->steps, step_index)->root_pattern_guaranteed; } else { return false; } @@ -3032,8 +3039,8 @@ bool ts_query__step_is_fallible( uint16_t step_index ) { ts_assert((uint32_t)step_index + 1 < self->steps.size); - QueryStep *step = &self->steps.contents[step_index]; - QueryStep *next_step = &self->steps.contents[step_index + 1]; + QueryStep *step = array_get(&self->steps, step_index); + QueryStep *next_step = array_get(&self->steps, step_index + 1); return ( next_step->depth != PATTERN_DONE_MARKER && next_step->depth > step->depth && @@ -3051,7 +3058,7 @@ void ts_query_disable_capture( int id = symbol_table_id_for_name(&self->captures, name, length); if (id != -1) { for (unsigned i = 0; i < self->steps.size; i++) { - QueryStep *step = &self->steps.contents[i]; + QueryStep *step = array_get(&self->steps, i); query_step__remove_capture(step, id); } } @@ -3064,7 +3071,7 @@ void ts_query_disable_pattern( // Remove the given pattern from the pattern map. Its steps will still // be in the `steps` array, but they will never be read. for (unsigned i = 0; i < self->pattern_map.size; i++) { - PatternEntry *pattern = &self->pattern_map.contents[i]; + PatternEntry *pattern = array_get(&self->pattern_map, i); if (pattern->pattern_index == pattern_index) { array_erase(&self->pattern_map, i); i--; @@ -3141,7 +3148,7 @@ void ts_query_cursor_exec( if (query) { LOG("query steps:\n"); for (unsigned i = 0; i < query->steps.size; i++) { - QueryStep *step = &query->steps.contents[i]; + QueryStep *step = array_get(&query->steps, i); LOG(" %u: {", i); if (step->depth == PATTERN_DONE_MARKER) { LOG("DONE"); @@ -3246,7 +3253,7 @@ static bool ts_query_cursor__first_in_progress_capture( *byte_offset = UINT32_MAX; *pattern_index = UINT32_MAX; for (unsigned i = 0; i < self->states.size; i++) { - QueryState *state = &self->states.contents[i]; + QueryState *state = array_get(&self->states, i); if (state->dead) continue; const CaptureList *captures = capture_list_pool_get( @@ -3257,7 +3264,7 @@ static bool ts_query_cursor__first_in_progress_capture( continue; } - TSNode node = captures->contents[state->consumed_capture_count].node; + TSNode node = array_get(captures, state->consumed_capture_count)->node; if ( ts_node_end_byte(node) <= self->start_byte || point_lte(ts_node_end_point(node), self->start_point) @@ -3273,7 +3280,7 @@ static bool ts_query_cursor__first_in_progress_capture( node_start_byte < *byte_offset || (node_start_byte == *byte_offset && state->pattern_index < *pattern_index) ) { - QueryStep *step = &self->query->steps.contents[state->step_index]; + QueryStep *step = array_get(&self->query->steps, state->step_index); if (is_definite) { // We're being a bit conservative here by asserting that the following step // is not immediate, because this capture might end up being discarded if the @@ -3329,8 +3336,8 @@ void ts_query_cursor__compare_captures( for (;;) { if (i < left_captures->size) { if (j < right_captures->size) { - TSQueryCapture *left = &left_captures->contents[i]; - TSQueryCapture *right = &right_captures->contents[j]; + TSQueryCapture *left = array_get(left_captures, i); + TSQueryCapture *right = array_get(right_captures, j); if (left->node.id == right->node.id && left->index == right->index) { i++; j++; @@ -3369,7 +3376,7 @@ static void ts_query_cursor__add_state( TSQueryCursor *self, const PatternEntry *pattern ) { - QueryStep *step = &self->query->steps.contents[pattern->step_index]; + QueryStep *step = array_get(&self->query->steps, pattern->step_index); uint32_t start_depth = self->depth - step->depth; // Keep the states array in ascending order of start_depth and pattern_index, @@ -3393,7 +3400,7 @@ static void ts_query_cursor__add_state( // need to execute in order to keep the states ordered by pattern_index. uint32_t index = self->states.size; while (index > 0) { - QueryState *prev_state = &self->states.contents[index - 1]; + QueryState *prev_state = array_get(&self->states, index - 1); if (prev_state->start_depth < start_depth) break; if (prev_state->start_depth == start_depth) { // Avoid inserting an unnecessary duplicate state, which would be @@ -3457,7 +3464,7 @@ static CaptureList *ts_query_cursor__prepare_to_capture( " abandon state. index:%u, pattern:%u, offset:%u.\n", state_index, pattern_index, byte_offset ); - QueryState *other_state = &self->states.contents[state_index]; + QueryState *other_state = array_get(&self->states, state_index); state->capture_list_id = other_state->capture_list_id; other_state->capture_list_id = NONE; other_state->dead = true; @@ -3527,8 +3534,8 @@ static QueryState *ts_query_cursor__copy_state( } array_insert(&self->states, state_index + 1, copy); - *state_ref = &self->states.contents[state_index]; - return &self->states.contents[state_index + 1]; + *state_ref = array_get(&self->states, state_index); + return array_get(&self->states, state_index + 1); } static inline bool ts_query_cursor__should_descend( @@ -3543,8 +3550,8 @@ static inline bool ts_query_cursor__should_descend( // If there are in-progress matches whose remaining steps occur // deeper in the tree, then descend. for (unsigned i = 0; i < self->states.size; i++) { - QueryState *state = &self->states.contents[i];; - QueryStep *next_step = &self->query->steps.contents[state->step_index]; + QueryState *state = array_get(&self->states, i); + QueryStep *next_step = array_get(&self->query->steps, state->step_index); if ( next_step->depth != PATTERN_DONE_MARKER && state->start_depth + next_step->depth > self->depth @@ -3638,8 +3645,8 @@ static inline bool ts_query_cursor__advance( // After leaving a node, remove any states that cannot make further progress. uint32_t deleted_count = 0; for (unsigned i = 0, n = self->states.size; i < n; i++) { - QueryState *state = &self->states.contents[i]; - QueryStep *step = &self->query->steps.contents[state->step_index]; + QueryState *state = array_get(&self->states, i); + QueryStep *step = array_get(&self->query->steps, state->step_index); // If a state completed its pattern inside of this node, but was deferred from finishing // in order to search for longer matches, mark it as finished. @@ -3672,7 +3679,7 @@ static inline bool ts_query_cursor__advance( } else if (deleted_count > 0) { - self->states.contents[i - deleted_count] = *state; + *array_get(&self->states, i - deleted_count) = *state; } } self->states.size -= deleted_count; @@ -3775,11 +3782,11 @@ static inline bool ts_query_cursor__advance( // Add new states for any patterns whose root node is a wildcard. if (!node_is_error) { for (unsigned i = 0; i < self->query->wildcard_root_pattern_count; i++) { - PatternEntry *pattern = &self->query->pattern_map.contents[i]; + PatternEntry *pattern = array_get(&self->query->pattern_map, i); // If this node matches the first step of the pattern, then add a new // state at the start of this pattern. - QueryStep *step = &self->query->steps.contents[pattern->step_index]; + QueryStep *step = array_get(&self->query->steps, pattern->step_index); uint32_t start_depth = self->depth - step->depth; if ( (pattern->is_rooted ? @@ -3797,9 +3804,9 @@ static inline bool ts_query_cursor__advance( // Add new states for any patterns whose root node matches this node. unsigned i; if (ts_query__pattern_map_search(self->query, symbol, &i)) { - PatternEntry *pattern = &self->query->pattern_map.contents[i]; + PatternEntry *pattern = array_get(&self->query->pattern_map, i); - QueryStep *step = &self->query->steps.contents[pattern->step_index]; + QueryStep *step = array_get(&self->query->steps, pattern->step_index); uint32_t start_depth = self->depth - step->depth; do { // If this node matches the first step of the pattern, then add a new @@ -3817,15 +3824,15 @@ static inline bool ts_query_cursor__advance( // Advance to the next pattern whose root node matches this node. i++; if (i == self->query->pattern_map.size) break; - pattern = &self->query->pattern_map.contents[i]; - step = &self->query->steps.contents[pattern->step_index]; + pattern = array_get(&self->query->pattern_map, i); + step = array_get(&self->query->steps, pattern->step_index); } while (step->symbol == symbol); } // Update all of the in-progress states with current node. for (unsigned j = 0, copy_count = 0; j < self->states.size; j += 1 + copy_count) { - QueryState *state = &self->states.contents[j]; - QueryStep *step = &self->query->steps.contents[state->step_index]; + QueryState *state = array_get(&self->states, j); + QueryStep *step = array_get(&self->query->steps, state->step_index); state->has_in_progress_alternatives = false; copy_count = 0; @@ -3874,7 +3881,7 @@ static inline bool ts_query_cursor__advance( } if (step->negated_field_list_id) { - TSFieldId *negated_field_ids = &self->query->negated_fields.contents[step->negated_field_list_id]; + TSFieldId *negated_field_ids = array_get(&self->query->negated_fields, step->negated_field_list_id); for (;;) { TSFieldId negated_field_id = *negated_field_ids; if (negated_field_id) { @@ -3975,7 +3982,7 @@ static inline bool ts_query_cursor__advance( state->step_index ); - QueryStep *next_step = &self->query->steps.contents[state->step_index]; + QueryStep *next_step = array_get(&self->query->steps, state->step_index); // For a given step, if the current symbol is the wildcard symbol, `_`, and it is **not** // named, meaning it should capture anonymous nodes, **and** the next step is immediate, @@ -3998,8 +4005,8 @@ static inline bool ts_query_cursor__advance( // so this is an interactive process. unsigned end_index = j + 1; for (unsigned k = j; k < end_index; k++) { - QueryState *child_state = &self->states.contents[k]; - QueryStep *child_step = &self->query->steps.contents[child_state->step_index]; + QueryState *child_state = array_get(&self->states, k); + QueryStep *child_step = array_get(&self->query->steps, child_state->step_index); if (child_step->alternative_index != NONE) { // A "dead-end" step exists only to add a non-sequential jump into the step sequence, // via its alternative index. When a state reaches a dead-end step, it jumps straight @@ -4040,7 +4047,7 @@ static inline bool ts_query_cursor__advance( } for (unsigned j = 0; j < self->states.size; j++) { - QueryState *state = &self->states.contents[j]; + QueryState *state = array_get(&self->states, j); if (state->dead) { array_erase(&self->states, j); j--; @@ -4052,7 +4059,7 @@ static inline bool ts_query_cursor__advance( // one state has a strict subset of another state's captures. bool did_remove = false; for (unsigned k = j + 1; k < self->states.size; k++) { - QueryState *other_state = &self->states.contents[k]; + QueryState *other_state = array_get(&self->states, k); // Query states are kept in ascending order of start_depth and pattern_index. // Since the longest-match criteria is only used for deduping matches of the same @@ -4112,7 +4119,7 @@ static inline bool ts_query_cursor__advance( state->step_index, capture_list_pool_get(&self->capture_list_pool, state->capture_list_id)->size ); - QueryStep *next_step = &self->query->steps.contents[state->step_index]; + QueryStep *next_step = array_get(&self->query->steps, state->step_index); if (next_step->depth == PATTERN_DONE_MARKER) { if (state->has_in_progress_alternatives) { LOG(" defer finishing pattern %u\n", state->pattern_index); @@ -4157,7 +4164,7 @@ bool ts_query_cursor_next_match( } } - QueryState *state = &self->finished_states.contents[0]; + QueryState *state = array_get(&self->finished_states, 0); if (state->id == UINT32_MAX) state->id = self->next_state_id++; match->id = state->id; match->pattern_index = state->pattern_index; @@ -4177,7 +4184,7 @@ void ts_query_cursor_remove_match( uint32_t match_id ) { for (unsigned i = 0; i < self->finished_states.size; i++) { - const QueryState *state = &self->finished_states.contents[i]; + const QueryState *state = array_get(&self->finished_states, i); if (state->id == match_id) { capture_list_pool_release( &self->capture_list_pool, @@ -4191,7 +4198,7 @@ void ts_query_cursor_remove_match( // Remove unfinished query states as well to prevent future // captures for a match being removed. for (unsigned i = 0; i < self->states.size; i++) { - const QueryState *state = &self->states.contents[i]; + const QueryState *state = array_get(&self->states, i); if (state->id == match_id) { capture_list_pool_release( &self->capture_list_pool, @@ -4231,7 +4238,7 @@ bool ts_query_cursor_next_capture( uint32_t first_finished_capture_byte = first_unfinished_capture_byte; uint32_t first_finished_pattern_index = first_unfinished_pattern_index; for (unsigned i = 0; i < self->finished_states.size;) { - QueryState *state = &self->finished_states.contents[i]; + QueryState *state = array_get(&self->finished_states, i); const CaptureList *captures = capture_list_pool_get( &self->capture_list_pool, state->capture_list_id @@ -4247,7 +4254,7 @@ bool ts_query_cursor_next_capture( continue; } - TSNode node = captures->contents[state->consumed_capture_count].node; + TSNode node = array_get(captures, state->consumed_capture_count)->node; bool node_precedes_range = ( ts_node_end_byte(node) <= self->start_byte || @@ -4287,7 +4294,7 @@ bool ts_query_cursor_next_capture( if (first_finished_state) { state = first_finished_state; } else if (first_unfinished_state_is_definite) { - state = &self->states.contents[first_unfinished_state_index]; + state = array_get(&self->states, first_unfinished_state_index); } else { state = NULL; } @@ -4316,7 +4323,7 @@ bool ts_query_cursor_next_capture( ); capture_list_pool_release( &self->capture_list_pool, - self->states.contents[first_unfinished_state_index].capture_list_id + array_get(&self->states, first_unfinished_state_index)->capture_list_id ); array_erase(&self->states, first_unfinished_state_index); } diff --git a/lib/src/stack.c b/lib/src/stack.c index f0d57108..91420074 100644 --- a/lib/src/stack.c +++ b/lib/src/stack.c @@ -290,8 +290,8 @@ static StackVersion ts_stack__add_version( ) { StackHead head = { .node = node, - .node_count_at_last_error = self->heads.contents[original_version].node_count_at_last_error, - .last_external_token = self->heads.contents[original_version].last_external_token, + .node_count_at_last_error = array_get(&self->heads, original_version)->node_count_at_last_error, + .last_external_token = array_get(&self->heads, original_version)->last_external_token, .status = StackStatusActive, .lookahead_when_paused = NULL_SUBTREE, }; @@ -308,8 +308,8 @@ static void ts_stack__add_slice( SubtreeArray *subtrees ) { for (uint32_t i = self->slices.size - 1; i + 1 > 0; i--) { - StackVersion version = self->slices.contents[i].version; - if (self->heads.contents[version].node == node) { + StackVersion version = array_get(&self->slices, i)->version; + if (array_get(&self->heads, version)->node == node) { StackSlice slice = {*subtrees, version}; array_insert(&self->slices, i + 1, slice); return; @@ -349,7 +349,7 @@ static StackSliceArray stack__iter( while (self->iterators.size > 0) { for (uint32_t i = 0, size = self->iterators.size; i < size; i++) { - StackIterator *iterator = &self->iterators.contents[i]; + StackIterator *iterator = array_get(&self->iterators, i); StackNode *node = iterator->node; StackAction action = callback(payload, iterator); @@ -384,11 +384,11 @@ static StackSliceArray stack__iter( StackLink link; if (j == node->link_count) { link = node->links[0]; - next_iterator = &self->iterators.contents[i]; + next_iterator = array_get(&self->iterators, i); } else { if (self->iterators.size >= MAX_ITERATOR_COUNT) continue; link = node->links[j]; - StackIterator current_iterator = self->iterators.contents[i]; + StackIterator current_iterator = *array_get(&self->iterators, i); array_push(&self->iterators, current_iterator); next_iterator = array_back(&self->iterators); ts_subtree_array_copy(next_iterator->subtrees, &next_iterator->subtrees); @@ -444,12 +444,12 @@ void ts_stack_delete(Stack *self) { array_delete(&self->iterators); stack_node_release(self->base_node, &self->node_pool, self->subtree_pool); for (uint32_t i = 0; i < self->heads.size; i++) { - stack_head_delete(&self->heads.contents[i], &self->node_pool, self->subtree_pool); + stack_head_delete(array_get(&self->heads, i), &self->node_pool, self->subtree_pool); } array_clear(&self->heads); if (self->node_pool.contents) { for (uint32_t i = 0; i < self->node_pool.size; i++) - ts_free(self->node_pool.contents[i]); + ts_free(*array_get(&self->node_pool, i)); array_delete(&self->node_pool); } array_delete(&self->heads); @@ -460,6 +460,17 @@ uint32_t ts_stack_version_count(const Stack *self) { return self->heads.size; } +uint32_t ts_stack_halted_version_count(Stack *self) { + uint32_t count = 0; + for (uint32_t i = 0; i < self->heads.size; i++) { + StackHead *head = array_get(&self->heads, i); + if (head->status == StackStatusHalted) { + count++; + } + } + return count; +} + TSStateId ts_stack_state(const Stack *self, StackVersion version) { return array_get(&self->heads, version)->node->state; } @@ -524,6 +535,7 @@ StackSliceArray ts_stack_pop_count(Stack *self, StackVersion version, uint32_t c return stack__iter(self, version, pop_count_callback, &count, (int)count); } + forceinline StackAction pop_pending_callback(void *payload, const StackIterator *iterator) { (void)payload; if (iterator->subtree_count >= 1) { @@ -540,8 +552,8 @@ forceinline StackAction pop_pending_callback(void *payload, const StackIterator StackSliceArray ts_stack_pop_pending(Stack *self, StackVersion version) { StackSliceArray pop = stack__iter(self, version, pop_pending_callback, NULL, 0); if (pop.size > 0) { - ts_stack_renumber_version(self, pop.contents[0].version, version); - pop.contents[0].version = version; + ts_stack_renumber_version(self, array_get(&pop, 0)->version, version); + array_get(&pop, 0)->version = version; } return pop; } @@ -549,7 +561,7 @@ StackSliceArray ts_stack_pop_pending(Stack *self, StackVersion version) { forceinline StackAction pop_error_callback(void *payload, const StackIterator *iterator) { if (iterator->subtrees.size > 0) { bool *found_error = payload; - if (!*found_error && ts_subtree_is_error(iterator->subtrees.contents[0])) { + if (!*found_error && ts_subtree_is_error(*array_get(&iterator->subtrees, 0))) { *found_error = true; return StackActionPop | StackActionStop; } else { @@ -568,8 +580,8 @@ SubtreeArray ts_stack_pop_error(Stack *self, StackVersion version) { StackSliceArray pop = stack__iter(self, version, pop_error_callback, &found_error, 1); if (pop.size > 0) { ts_assert(pop.size == 1); - ts_stack_renumber_version(self, pop.contents[0].version, version); - return pop.contents[0].subtrees; + ts_stack_renumber_version(self, array_get(&pop, 0)->version, version); + return array_get(&pop, 0)->subtrees; } break; } @@ -597,7 +609,7 @@ forceinline StackAction summarize_stack_callback(void *payload, const StackItera unsigned depth = iterator->subtree_count; if (depth > session->max_depth) return StackActionStop; for (unsigned i = session->summary->size - 1; i + 1 > 0; i--) { - StackSummaryEntry entry = session->summary->contents[i]; + StackSummaryEntry entry = *array_get(session->summary, i); if (entry.depth < depth) break; if (entry.depth == depth && entry.state == state) return StackActionNone; } @@ -616,7 +628,7 @@ void ts_stack_record_summary(Stack *self, StackVersion version, unsigned max_dep }; array_init(session.summary); stack__iter(self, version, summarize_stack_callback, &session, -1); - StackHead *head = &self->heads.contents[version]; + StackHead *head = array_get(&self->heads, version); if (head->summary) { array_delete(head->summary); ts_free(head->summary); @@ -665,8 +677,8 @@ void ts_stack_renumber_version(Stack *self, StackVersion v1, StackVersion v2) { if (v1 == v2) return; ts_assert(v2 < v1); ts_assert((uint32_t)v1 < self->heads.size); - StackHead *source_head = &self->heads.contents[v1]; - StackHead *target_head = &self->heads.contents[v2]; + StackHead *source_head = array_get(&self->heads, v1); + StackHead *target_head = array_get(&self->heads, v2); if (target_head->summary && !source_head->summary) { source_head->summary = target_head->summary; target_head->summary = NULL; @@ -677,14 +689,15 @@ void ts_stack_renumber_version(Stack *self, StackVersion v1, StackVersion v2) { } void ts_stack_swap_versions(Stack *self, StackVersion v1, StackVersion v2) { - StackHead temporary_head = self->heads.contents[v1]; - self->heads.contents[v1] = self->heads.contents[v2]; - self->heads.contents[v2] = temporary_head; + StackHead temporary_head = *array_get(&self->heads, v1); + *array_get(&self->heads, v1) = *array_get(&self->heads, v2); + *array_get(&self->heads, v2) = temporary_head; } StackVersion ts_stack_copy_version(Stack *self, StackVersion version) { ts_assert(version < self->heads.size); - array_push(&self->heads, self->heads.contents[version]); + StackHead version_head = *array_get(&self->heads, version); + array_push(&self->heads, version_head); StackHead *head = array_back(&self->heads); stack_node_retain(head->node); if (head->last_external_token.ptr) ts_subtree_retain(head->last_external_token); @@ -694,8 +707,8 @@ StackVersion ts_stack_copy_version(Stack *self, StackVersion version) { bool ts_stack_merge(Stack *self, StackVersion version1, StackVersion version2) { if (!ts_stack_can_merge(self, version1, version2)) return false; - StackHead *head1 = &self->heads.contents[version1]; - StackHead *head2 = &self->heads.contents[version2]; + StackHead *head1 = array_get(&self->heads, version1); + StackHead *head2 = array_get(&self->heads, version2); for (uint32_t i = 0; i < head2->node->link_count; i++) { stack_node_add_link(head1->node, head2->node->links[i], self->subtree_pool); } @@ -707,8 +720,8 @@ bool ts_stack_merge(Stack *self, StackVersion version1, StackVersion version2) { } bool ts_stack_can_merge(Stack *self, StackVersion version1, StackVersion version2) { - StackHead *head1 = &self->heads.contents[version1]; - StackHead *head2 = &self->heads.contents[version2]; + StackHead *head1 = array_get(&self->heads, version1); + StackHead *head2 = array_get(&self->heads, version2); return head1->status == StackStatusActive && head2->status == StackStatusActive && @@ -753,7 +766,7 @@ Subtree ts_stack_resume(Stack *self, StackVersion version) { void ts_stack_clear(Stack *self) { stack_node_retain(self->base_node); for (uint32_t i = 0; i < self->heads.size; i++) { - stack_head_delete(&self->heads.contents[i], &self->node_pool, self->subtree_pool); + stack_head_delete(array_get(&self->heads, i), &self->node_pool, self->subtree_pool); } array_clear(&self->heads); array_push(&self->heads, ((StackHead) { @@ -776,7 +789,7 @@ bool ts_stack_print_dot_graph(Stack *self, const TSLanguage *language, FILE *f) array_clear(&self->iterators); for (uint32_t i = 0; i < self->heads.size; i++) { - StackHead *head = &self->heads.contents[i]; + StackHead *head = array_get(&self->heads, i); if (head->status == StackStatusHalted) continue; fprintf(f, "node_head_%u [shape=none, label=\"\"]\n", i); @@ -794,7 +807,7 @@ bool ts_stack_print_dot_graph(Stack *self, const TSLanguage *language, FILE *f) if (head->summary) { fprintf(f, "\nsummary:"); - for (uint32_t j = 0; j < head->summary->size; j++) fprintf(f, " %u", head->summary->contents[j].state); + for (uint32_t j = 0; j < head->summary->size; j++) fprintf(f, " %u", array_get(head->summary, j)->state); } if (head->last_external_token.ptr) { @@ -815,11 +828,11 @@ bool ts_stack_print_dot_graph(Stack *self, const TSLanguage *language, FILE *f) all_iterators_done = true; for (uint32_t i = 0; i < self->iterators.size; i++) { - StackIterator iterator = self->iterators.contents[i]; + StackIterator iterator = *array_get(&self->iterators, i); StackNode *node = iterator.node; for (uint32_t j = 0; j < visited_nodes.size; j++) { - if (visited_nodes.contents[j] == node) { + if (*array_get(&visited_nodes, j) == node) { node = NULL; break; } @@ -878,7 +891,7 @@ bool ts_stack_print_dot_graph(Stack *self, const TSLanguage *language, FILE *f) StackIterator *next_iterator; if (j == 0) { - next_iterator = &self->iterators.contents[i]; + next_iterator = array_get(&self->iterators, i); } else { array_push(&self->iterators, iterator); next_iterator = array_back(&self->iterators); diff --git a/lib/src/stack.h b/lib/src/stack.h index ac32234f..2619f1e8 100644 --- a/lib/src/stack.h +++ b/lib/src/stack.h @@ -36,6 +36,9 @@ void ts_stack_delete(Stack *self); // Get the stack's current number of versions. uint32_t ts_stack_version_count(const Stack *self); +// Get the stack's current number of halted versions. +uint32_t ts_stack_halted_version_count(Stack *self); + // Get the state at the top of the given version of the stack. If the stack is // empty, this returns the initial state, 0. TSStateId ts_stack_state(const Stack *self, StackVersion version); diff --git a/lib/src/subtree.c b/lib/src/subtree.c index 42794de7..97d55c86 100644 --- a/lib/src/subtree.c +++ b/lib/src/subtree.c @@ -73,14 +73,14 @@ void ts_subtree_array_copy(SubtreeArray self, SubtreeArray *dest) { dest->contents = ts_calloc(self.capacity, sizeof(Subtree)); memcpy(dest->contents, self.contents, self.size * sizeof(Subtree)); for (uint32_t i = 0; i < self.size; i++) { - ts_subtree_retain(dest->contents[i]); + ts_subtree_retain(*array_get(dest, i)); } } } void ts_subtree_array_clear(SubtreePool *pool, SubtreeArray *self) { for (uint32_t i = 0; i < self->size; i++) { - ts_subtree_release(pool, self->contents[i]); + ts_subtree_release(pool, *array_get(self, i)); } array_clear(self); } @@ -96,7 +96,7 @@ void ts_subtree_array_remove_trailing_extras( ) { array_clear(destination); while (self->size > 0) { - Subtree last = self->contents[self->size - 1]; + Subtree last = *array_get(self, self->size - 1); if (ts_subtree_extra(last)) { self->size--; array_push(destination, last); @@ -110,9 +110,9 @@ void ts_subtree_array_remove_trailing_extras( void ts_subtree_array_reverse(SubtreeArray *self) { for (uint32_t i = 0, limit = self->size / 2; i < limit; i++) { size_t reverse_index = self->size - 1 - i; - Subtree swap = self->contents[i]; - self->contents[i] = self->contents[reverse_index]; - self->contents[reverse_index] = swap; + Subtree swap = *array_get(self, i); + *array_get(self, i) = *array_get(self, reverse_index); + *array_get(self, reverse_index) = swap; } } @@ -127,7 +127,7 @@ SubtreePool ts_subtree_pool_new(uint32_t capacity) { void ts_subtree_pool_delete(SubtreePool *self) { if (self->free_trees.contents) { for (unsigned i = 0; i < self->free_trees.size; i++) { - ts_free(self->free_trees.contents[i].ptr); + ts_free(array_get(&self->free_trees, i)->ptr); } array_delete(&self->free_trees); } @@ -407,7 +407,12 @@ void ts_subtree_summarize_children( self.ptr->dynamic_precedence += ts_subtree_dynamic_precedence(child); self.ptr->visible_descendant_count += ts_subtree_visible_descendant_count(child); - if (alias_sequence && alias_sequence[structural_index] != 0 && !ts_subtree_extra(child)) { + if ( + !ts_subtree_extra(child) && + ts_subtree_symbol(child) != 0 && + alias_sequence && + alias_sequence[structural_index] != 0 + ) { self.ptr->visible_descendant_count++; self.ptr->visible_child_count++; if (ts_language_symbol_metadata(language, alias_sequence[structural_index]).named) { diff --git a/lib/src/tree_cursor.c b/lib/src/tree_cursor.c index 888e7781..70ef5e39 100644 --- a/lib/src/tree_cursor.c +++ b/lib/src/tree_cursor.c @@ -16,11 +16,11 @@ typedef struct { // CursorChildIterator static inline bool ts_tree_cursor_is_entry_visible(const TreeCursor *self, uint32_t index) { - TreeCursorEntry *entry = &self->stack.contents[index]; + TreeCursorEntry *entry = array_get(&self->stack, index); if (index == 0 || ts_subtree_visible(*entry->subtree)) { return true; } else if (!ts_subtree_extra(*entry->subtree)) { - TreeCursorEntry *parent_entry = &self->stack.contents[index - 1]; + TreeCursorEntry *parent_entry = array_get(&self->stack, index - 1); return ts_language_alias_at( self->tree->language, parent_entry->subtree->ptr->production_id, @@ -129,14 +129,17 @@ static inline bool ts_tree_cursor_child_iterator_previous( }; *visible = ts_subtree_visible(*child); bool extra = ts_subtree_extra(*child); - if (!extra && self->alias_sequence) { - *visible |= self->alias_sequence[self->structural_child_index]; - self->structural_child_index--; - } self->position = length_backtrack(self->position, ts_subtree_padding(*child)); self->child_index--; + if (!extra && self->alias_sequence) { + *visible |= self->alias_sequence[self->structural_child_index]; + if (self->structural_child_index > 0) { + self->structural_child_index--; + } + } + // unsigned can underflow so compare it to child_count if (self->child_index < self->parent.ptr->child_count) { Subtree previous_child = ts_subtree_children(self->parent)[self->child_index]; @@ -304,8 +307,9 @@ int64_t ts_tree_cursor_goto_first_child_for_point(TSTreeCursor *self, TSPoint go } TreeCursorStep ts_tree_cursor_goto_sibling_internal( - TSTreeCursor *_self, - bool (*advance)(CursorChildIterator *, TreeCursorEntry *, bool *)) { + TSTreeCursor *_self, + bool (*advance)(CursorChildIterator *, TreeCursorEntry *, bool *) +) { TreeCursor *self = (TreeCursor *)_self; uint32_t initial_size = self->stack.size; @@ -370,7 +374,7 @@ TreeCursorStep ts_tree_cursor_goto_previous_sibling_internal(TSTreeCursor *_self return step; // restore position from the parent node - const TreeCursorEntry *parent = &self->stack.contents[self->stack.size - 2]; + const TreeCursorEntry *parent = array_get(&self->stack, self->stack.size - 2); Length position = parent->position; uint32_t child_index = array_back(&self->stack)->child_index; const Subtree *children = ts_subtree_children((*(parent->subtree))); @@ -421,7 +425,7 @@ void ts_tree_cursor_goto_descendant( // Ascend to the lowest ancestor that contains the goal node. for (;;) { uint32_t i = self->stack.size - 1; - TreeCursorEntry *entry = &self->stack.contents[i]; + TreeCursorEntry *entry = array_get(&self->stack, i); uint32_t next_descendant_index = entry->descendant_index + (ts_tree_cursor_is_entry_visible(self, i) ? 1 : 0) + @@ -475,7 +479,7 @@ TSNode ts_tree_cursor_current_node(const TSTreeCursor *_self) { bool is_extra = ts_subtree_extra(*last_entry->subtree); TSSymbol alias_symbol = is_extra ? 0 : self->root_alias_symbol; if (self->stack.size > 1 && !is_extra) { - TreeCursorEntry *parent_entry = &self->stack.contents[self->stack.size - 2]; + TreeCursorEntry *parent_entry = array_get(&self->stack, self->stack.size - 2); alias_symbol = ts_language_alias_at( self->tree->language, parent_entry->subtree->ptr->production_id, @@ -512,8 +516,8 @@ void ts_tree_cursor_current_status( // Walk up the tree, visiting the current node and its invisible ancestors, // because fields can refer to nodes through invisible *wrapper* nodes, for (unsigned i = self->stack.size - 1; i > 0; i--) { - TreeCursorEntry *entry = &self->stack.contents[i]; - TreeCursorEntry *parent_entry = &self->stack.contents[i - 1]; + TreeCursorEntry *entry = array_get(&self->stack, i); + TreeCursorEntry *parent_entry = array_get(&self->stack, i - 1); const TSSymbol *alias_sequence = ts_language_alias_sequence( self->tree->language, @@ -626,11 +630,11 @@ uint32_t ts_tree_cursor_current_depth(const TSTreeCursor *_self) { TSNode ts_tree_cursor_parent_node(const TSTreeCursor *_self) { const TreeCursor *self = (const TreeCursor *)_self; for (int i = (int)self->stack.size - 2; i >= 0; i--) { - TreeCursorEntry *entry = &self->stack.contents[i]; + TreeCursorEntry *entry = array_get(&self->stack, i); bool is_visible = true; TSSymbol alias_symbol = 0; if (i > 0) { - TreeCursorEntry *parent_entry = &self->stack.contents[i - 1]; + TreeCursorEntry *parent_entry = array_get(&self->stack, i - 1); alias_symbol = ts_language_alias_at( self->tree->language, parent_entry->subtree->ptr->production_id, @@ -655,8 +659,8 @@ TSFieldId ts_tree_cursor_current_field_id(const TSTreeCursor *_self) { // Walk up the tree, visiting the current node and its invisible ancestors. for (unsigned i = self->stack.size - 1; i > 0; i--) { - TreeCursorEntry *entry = &self->stack.contents[i]; - TreeCursorEntry *parent_entry = &self->stack.contents[i - 1]; + TreeCursorEntry *entry = array_get(&self->stack, i); + TreeCursorEntry *parent_entry = array_get(&self->stack, i - 1); // Stop walking up when another visible node is found. if ( diff --git a/lib/src/wasm_store.c b/lib/src/wasm_store.c index d5fff257..f0c0970d 100644 --- a/lib/src/wasm_store.c +++ b/lib/src/wasm_store.c @@ -754,6 +754,7 @@ TSWasmStore *ts_wasm_store_new(TSWasmEngine *engine, TSWasmError *wasm_error) { wasmtime_val_t stack_pointer_value = WASM_I32_VAL(0); wasmtime_global_t stack_pointer_global; error = wasmtime_global_new(context, var_i32_type, &stack_pointer_value, &stack_pointer_global); + wasm_globaltype_delete(var_i32_type); ts_assert(!error); *self = (TSWasmStore) { @@ -946,7 +947,7 @@ void ts_wasm_store_delete(TSWasmStore *self) { wasmtime_store_delete(self->store); wasm_engine_delete(self->engine); for (unsigned i = 0; i < self->language_instances.size; i++) { - LanguageWasmInstance *instance = &self->language_instances.contents[i]; + LanguageWasmInstance *instance = array_get(&self->language_instances, i); language_id_delete(instance->language_id); } array_delete(&self->language_instances); @@ -956,7 +957,7 @@ void ts_wasm_store_delete(TSWasmStore *self) { size_t ts_wasm_store_language_count(const TSWasmStore *self) { size_t result = 0; for (unsigned i = 0; i < self->language_instances.size; i++) { - const WasmLanguageId *id = self->language_instances.contents[i].language_id; + const WasmLanguageId *id = array_get(&self->language_instances, i)->language_id; if (!id->is_language_deleted) { result++; } @@ -1220,6 +1221,10 @@ const TSLanguage *ts_wasm_store_load_language( const uint8_t *memory = wasmtime_memory_data(context, &self->memory); memcpy(&wasm_language, &memory[language_address], sizeof(LanguageInWasmMemory)); + bool has_supertypes = + wasm_language.abi_version > LANGUAGE_VERSION_WITH_RESERVED_WORDS && + wasm_language.supertype_count > 0; + int32_t addresses[] = { wasm_language.parse_table, wasm_language.small_parse_table, @@ -1239,9 +1244,9 @@ const TSLanguage *ts_wasm_store_load_language( wasm_language.primary_state_ids, wasm_language.name, wasm_language.reserved_words, - wasm_language.supertype_symbols, - wasm_language.supertype_map_entries, - wasm_language.supertype_map_slices, + has_supertypes ? wasm_language.supertype_symbols : 0, + has_supertypes ? wasm_language.supertype_map_entries : 0, + has_supertypes ? wasm_language.supertype_map_slices : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.states : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.symbol_map : 0, wasm_language.external_token_count > 0 ? wasm_language.external_scanner.create : 0, @@ -1331,7 +1336,7 @@ const TSLanguage *ts_wasm_store_load_language( ); } - if (language->supertype_count > 0) { + if (has_supertypes) { language->supertype_symbols = copy( &memory[wasm_language.supertype_symbols], wasm_language.supertype_count * sizeof(TSSymbol) @@ -1446,7 +1451,7 @@ const TSLanguage *ts_wasm_store_load_language( // Clear out any instances of languages that have been deleted. for (unsigned i = 0; i < self->language_instances.size; i++) { - WasmLanguageId *id = self->language_instances.contents[i].language_id; + WasmLanguageId *id = array_get(&self->language_instances, i)->language_id; if (id->is_language_deleted) { language_id_delete(id); array_erase(&self->language_instances, i); @@ -1487,7 +1492,7 @@ bool ts_wasm_store_add_language( // instances of languages that have been deleted. bool exists = false; for (unsigned i = 0; i < self->language_instances.size; i++) { - WasmLanguageId *id = self->language_instances.contents[i].language_id; + WasmLanguageId *id = array_get(&self->language_instances, i)->language_id; if (id->is_language_deleted) { language_id_delete(id); array_erase(&self->language_instances, i); @@ -1558,7 +1563,7 @@ bool ts_wasm_store_start(TSWasmStore *self, TSLexer *lexer, const TSLanguage *la uint32_t instance_index; if (!ts_wasm_store_add_language(self, language, &instance_index)) return false; self->current_lexer = lexer; - self->current_instance = &self->language_instances.contents[instance_index]; + self->current_instance = array_get(&self->language_instances, instance_index); self->has_error = false; ts_wasm_store_reset_heap(self); return true; @@ -1609,13 +1614,22 @@ static void ts_wasm_store__call( } } +// The data fields of TSLexer, without the function pointers. +// +// This portion of the struct needs to be copied in and out +// of wasm memory before and after calling a scan function. +typedef struct { + int32_t lookahead; + TSSymbol result_symbol; +} TSLexerDataPrefix; + static bool ts_wasm_store__call_lex_function(TSWasmStore *self, unsigned function_index, TSStateId state) { wasmtime_context_t *context = wasmtime_store_context(self->store); uint8_t *memory_data = wasmtime_memory_data(context, &self->memory); memcpy( &memory_data[self->lexer_address], - &self->current_lexer->lookahead, - sizeof(self->current_lexer->lookahead) + self->current_lexer, + sizeof(TSLexerDataPrefix) ); wasmtime_val_raw_t args[2] = { @@ -1627,9 +1641,9 @@ static bool ts_wasm_store__call_lex_function(TSWasmStore *self, unsigned functio bool result = args[0].i32; memcpy( - &self->current_lexer->lookahead, + self->current_lexer, &memory_data[self->lexer_address], - sizeof(self->current_lexer->lookahead) + sizeof(self->current_lexer->result_symbol) + sizeof(TSLexerDataPrefix) ); return result; } @@ -1674,8 +1688,8 @@ bool ts_wasm_store_call_scanner_scan( memcpy( &memory_data[self->lexer_address], - &self->current_lexer->lookahead, - sizeof(self->current_lexer->lookahead) + self->current_lexer, + sizeof(TSLexerDataPrefix) ); uint32_t valid_tokens_address = @@ -1690,9 +1704,9 @@ bool ts_wasm_store_call_scanner_scan( if (self->has_error) return false; memcpy( - &self->current_lexer->lookahead, + self->current_lexer, &memory_data[self->lexer_address], - sizeof(self->current_lexer->lookahead) + sizeof(self->current_lexer->result_symbol) + sizeof(TSLexerDataPrefix) ); return args[0].i32; } diff --git a/test/fixtures/test_grammars/aliases_in_root/corpus.txt b/test/fixtures/test_grammars/aliases_in_root/corpus.txt new file mode 100644 index 00000000..ed78852b --- /dev/null +++ b/test/fixtures/test_grammars/aliases_in_root/corpus.txt @@ -0,0 +1,13 @@ +====================================== +Aliases within the root node +====================================== + +# this is a comment +foo foo + +--- + +(document + (comment) + (bar) + (foo)) diff --git a/test/fixtures/test_grammars/aliases_in_root/grammar.js b/test/fixtures/test_grammars/aliases_in_root/grammar.js new file mode 100644 index 00000000..02d61646 --- /dev/null +++ b/test/fixtures/test_grammars/aliases_in_root/grammar.js @@ -0,0 +1,19 @@ +module.exports = grammar({ + name: 'aliases_in_root', + + extras: $ => [ + /\s/, + $.comment, + ], + + rules: { + document: $ => seq( + alias($.foo, $.bar), + $.foo, + ), + + foo: $ => "foo", + + comment: $ => /#.*/ + } +}); diff --git a/test/fixtures/test_grammars/anonymous_error/corpus.txt b/test/fixtures/test_grammars/anonymous_error/corpus.txt new file mode 100644 index 00000000..f1dd3d34 --- /dev/null +++ b/test/fixtures/test_grammars/anonymous_error/corpus.txt @@ -0,0 +1,9 @@ +====================== +A simple error literal +====================== + +ERROR + +--- + +(document) diff --git a/test/fixtures/test_grammars/anonymous_error/grammar.js b/test/fixtures/test_grammars/anonymous_error/grammar.js new file mode 100644 index 00000000..c06d1bd2 --- /dev/null +++ b/test/fixtures/test_grammars/anonymous_error/grammar.js @@ -0,0 +1,6 @@ +module.exports = grammar({ + name: 'anonymous_error', + rules: { + document: $ => repeat(choice('ok', 'ERROR')), + } +}); diff --git a/test/fixtures/test_grammars/epsilon_external_extra_tokens/corpus.txt b/test/fixtures/test_grammars/epsilon_external_extra_tokens/corpus.txt new file mode 100644 index 00000000..776db2ec --- /dev/null +++ b/test/fixtures/test_grammars/epsilon_external_extra_tokens/corpus.txt @@ -0,0 +1,9 @@ +========================== +A document +========================== + +a b + +--- + +(document) diff --git a/test/fixtures/test_grammars/epsilon_external_extra_tokens/grammar.js b/test/fixtures/test_grammars/epsilon_external_extra_tokens/grammar.js new file mode 100644 index 00000000..b808de62 --- /dev/null +++ b/test/fixtures/test_grammars/epsilon_external_extra_tokens/grammar.js @@ -0,0 +1,11 @@ +module.exports = grammar({ + name: 'epsilon_external_extra_tokens', + + extras: $ => [/\s/, $.comment], + + externals: $ => [$.comment], + + rules: { + document: $ => seq('a', 'b'), + } +}); diff --git a/test/fixtures/test_grammars/epsilon_external_extra_tokens/scanner.c b/test/fixtures/test_grammars/epsilon_external_extra_tokens/scanner.c new file mode 100644 index 00000000..c8949d1d --- /dev/null +++ b/test/fixtures/test_grammars/epsilon_external_extra_tokens/scanner.c @@ -0,0 +1,33 @@ +#include "tree_sitter/parser.h" + +enum TokenType { + COMMENT +}; + +void *tree_sitter_epsilon_external_extra_tokens_external_scanner_create(void) { + return NULL; +} + +bool tree_sitter_epsilon_external_extra_tokens_external_scanner_scan( + void *payload, + TSLexer *lexer, + const bool *valid_symbols +) { + lexer->result_symbol = COMMENT; + return true; +} + +unsigned tree_sitter_epsilon_external_extra_tokens_external_scanner_serialize( + void *payload, + char *buffer +) { + return 0; +} + +void tree_sitter_epsilon_external_extra_tokens_external_scanner_deserialize( + void *payload, + const char *buffer, + unsigned length +) {} + +void tree_sitter_epsilon_external_extra_tokens_external_scanner_destroy(void *payload) {} diff --git a/test/fixtures/test_grammars/extra_non_terminals/corpus.txt b/test/fixtures/test_grammars/extra_non_terminals/corpus.txt index 52b7d864..b58fa68b 100644 --- a/test/fixtures/test_grammars/extra_non_terminals/corpus.txt +++ b/test/fixtures/test_grammars/extra_non_terminals/corpus.txt @@ -12,11 +12,12 @@ a b c d Extras ============== -a (one) b (two) (three) c d +a (one) b (two) (three) c d // e --- (module - (comment) - (comment) - (comment)) + (comment (paren_comment)) + (comment (paren_comment)) + (comment (paren_comment)) + (comment (line_comment))) diff --git a/test/fixtures/test_grammars/extra_non_terminals/grammar.js b/test/fixtures/test_grammars/extra_non_terminals/grammar.js index d13cd68a..e66bc9ac 100644 --- a/test/fixtures/test_grammars/extra_non_terminals/grammar.js +++ b/test/fixtures/test_grammars/extra_non_terminals/grammar.js @@ -9,7 +9,12 @@ module.exports = grammar({ ], rules: { - module: $ => seq('a', 'b', 'c', 'd'), - comment: $ => seq('(', repeat(/[a-z]+/), ')'), + module: _ => seq('a', 'b', 'c', 'd'), + + comment: $ => choice($.paren_comment, $.line_comment), + + paren_comment: _ => token(seq('(', repeat(/[a-z]+/), ')')), + + line_comment: _ => token(seq('//', /.*/)), } }) diff --git a/test/fixtures/test_grammars/indirect_recursion_in_transitions/expected_error.txt b/test/fixtures/test_grammars/indirect_recursion_in_transitions/expected_error.txt new file mode 100644 index 00000000..4f244a6c --- /dev/null +++ b/test/fixtures/test_grammars/indirect_recursion_in_transitions/expected_error.txt @@ -0,0 +1 @@ +Grammar contains an indirectly recursive rule: type_expression -> _expression -> identifier_expression -> type_expression \ No newline at end of file diff --git a/test/fixtures/test_grammars/indirect_recursion_in_transitions/grammar.js b/test/fixtures/test_grammars/indirect_recursion_in_transitions/grammar.js new file mode 100644 index 00000000..65ff7b45 --- /dev/null +++ b/test/fixtures/test_grammars/indirect_recursion_in_transitions/grammar.js @@ -0,0 +1,16 @@ +module.exports = grammar({ + name: 'indirect_recursive_in_single_symbol_transitions', + rules: { + source_file: $ => repeat($._statement), + + _statement: $ => seq($.initialization_part, $.type_expression), + + type_expression: $ => choice('int', $._expression), + + initialization_part: $ => seq('=', $._expression), + + _expression: $ => choice($.identifier_expression, $.type_expression), + + identifier_expression: $ => choice(/[a-zA-Z_][a-zA-Z0-9_]*/, $.type_expression), + } +}); diff --git a/xtask/src/bump.rs b/xtask/src/bump.rs index cd7dc7eb..fbef5488 100644 --- a/xtask/src/bump.rs +++ b/xtask/src/bump.rs @@ -127,6 +127,11 @@ pub fn run(args: BumpVersion) -> Result<()> { } next_version }; + if next_version <= current_version { + return Err(anyhow!(format!( + "Next version {next_version} must be greater than current version {current_version}" + ))); + } println!("Bumping from {current_version} to {next_version}"); update_crates(¤t_version, &next_version)?; diff --git a/xtask/src/clippy.rs b/xtask/src/clippy.rs index c8d33348..664884f5 100644 --- a/xtask/src/clippy.rs +++ b/xtask/src/clippy.rs @@ -6,7 +6,7 @@ use crate::{bail_on_err, Clippy}; pub fn run(args: &Clippy) -> Result<()> { let mut clippy_command = Command::new("cargo"); - clippy_command.arg("+nightly").arg("clippy"); + clippy_command.arg("clippy"); if let Some(package) = args.package.as_ref() { clippy_command.args(["--package", package]); diff --git a/xtask/src/main.rs b/xtask/src/main.rs index b02136f5..06e11b30 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -144,7 +144,7 @@ struct Test { iterations: Option, /// Set the seed used to control random behavior. #[arg(long, short)] - seed: Option, + seed: Option, /// Print parsing log to stderr. #[arg(long, short)] debug: bool, diff --git a/xtask/src/test.rs b/xtask/src/test.rs index 6378f766..a095de07 100644 --- a/xtask/src/test.rs +++ b/xtask/src/test.rs @@ -65,13 +65,17 @@ pub fn run(args: &Test) -> Result<()> { } if args.g { - let cargo_cmd = Command::new("cargo") + let mut cargo_cmd = Command::new("cargo"); + cargo_cmd .arg("test") .arg(test_flags) .arg("--no-run") - .arg("--message-format=json") - .stdout(Stdio::piped()) - .spawn()?; + .arg("--message-format=json"); + + #[cfg(target_os = "windows")] + cargo_cmd.arg("--").arg("--test-threads=1"); + + let cargo_cmd = cargo_cmd.stdout(Stdio::piped()).spawn()?; let jq_cmd = Command::new("jq") .arg("-rs") @@ -97,8 +101,15 @@ pub fn run(args: &Test) -> Result<()> { cargo_cmd.arg(test_flags); } cargo_cmd.args(&args.args); + + #[cfg(target_os = "windows")] + cargo_cmd.arg("--").arg("--test-threads=1"); + if args.nocapture { - cargo_cmd.arg("--").arg("--nocapture"); + #[cfg(not(target_os = "windows"))] + cargo_cmd.arg("--"); + + cargo_cmd.arg("--nocapture"); } bail_on_err( &cargo_cmd.spawn()?.wait_with_output()?,