Merge pull request #2840 from tree-sitter/language-reference-count
Introduce APIs for managing the lifetimes of languages, allow WASM languages to be deleted
This commit is contained in:
commit
1d8975319c
38 changed files with 724 additions and 483 deletions
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
|
|
@ -224,7 +224,7 @@ jobs:
|
|||
|
||||
- name: Run main tests
|
||||
if: ${{ inputs.run-tests && !matrix.cli-only }} # Can't natively run CLI on Github runner's host
|
||||
run: $BUILD_CMD test --target=${{ matrix.target }}
|
||||
run: $BUILD_CMD test --target=${{ matrix.target }} --features=${CLI_FEATURES}
|
||||
|
||||
- name: Run wasm tests
|
||||
if: ${{ inputs.run-tests && !matrix.cli-only && !matrix.use-cross }} # See comment for the "Build wasm library" step
|
||||
|
|
|
|||
111
Cargo.lock
generated
111
Cargo.lock
generated
|
|
@ -213,16 +213,16 @@ checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa"
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-bforest"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"cranelift-entity",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"bumpalo",
|
||||
"cranelift-bforest",
|
||||
|
|
@ -241,29 +241,29 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-meta"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"cranelift-codegen-shared",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-codegen-shared"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-control"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-entity"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_derive",
|
||||
|
|
@ -271,8 +271,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-frontend"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"log",
|
||||
|
|
@ -282,13 +282,13 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-isle"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
|
||||
[[package]]
|
||||
name = "cranelift-native"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"libc",
|
||||
|
|
@ -297,8 +297,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "cranelift-wasm"
|
||||
version = "0.102.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "0.103.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"cranelift-codegen",
|
||||
"cranelift-entity",
|
||||
|
|
@ -1470,18 +1470,18 @@ checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
|
|||
|
||||
[[package]]
|
||||
name = "wasm-encoder"
|
||||
version = "0.35.0"
|
||||
version = "0.38.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ca90ba1b5b0a70d3d49473c5579951f3bddc78d47b59256d2f9d4922b150aca"
|
||||
checksum = "0ad2b51884de9c7f4fe2fd1043fccb8dcad4b1e29558146ee57a144d15779f3f"
|
||||
dependencies = [
|
||||
"leb128",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasmparser"
|
||||
version = "0.115.0"
|
||||
version = "0.118.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e06c0641a4add879ba71ccb3a1e4278fd546f76f1eafb21d8f7b07733b547cd5"
|
||||
checksum = "95ee9723b928e735d53000dec9eae7b07a60e490c85ab54abb66659fc61bfcd9"
|
||||
dependencies = [
|
||||
"indexmap",
|
||||
"semver",
|
||||
|
|
@ -1489,8 +1489,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
|
|
@ -1502,7 +1502,6 @@ dependencies = [
|
|||
"object",
|
||||
"once_cell",
|
||||
"paste",
|
||||
"psm",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
|
|
@ -1517,16 +1516,16 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-asm-macros"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasmtime-c-api-impl"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"log",
|
||||
|
|
@ -1539,7 +1538,7 @@ dependencies = [
|
|||
[[package]]
|
||||
name = "wasmtime-c-api-macros"
|
||||
version = "0.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -1547,8 +1546,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-cranelift"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cfg-if",
|
||||
|
|
@ -1571,8 +1570,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-cranelift-shared"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-codegen",
|
||||
|
|
@ -1586,8 +1585,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-environ"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cranelift-entity",
|
||||
|
|
@ -1605,8 +1604,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-jit"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bincode",
|
||||
|
|
@ -1624,19 +1623,10 @@ dependencies = [
|
|||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasmtime-jit-debug"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"wasmtime-versioned-export-macros",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasmtime-jit-icache-coherence"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
|
|
@ -1645,8 +1635,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-runtime"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cc",
|
||||
|
|
@ -1658,13 +1648,12 @@ dependencies = [
|
|||
"memfd",
|
||||
"memoffset",
|
||||
"paste",
|
||||
"rand",
|
||||
"psm",
|
||||
"rustix",
|
||||
"sptr",
|
||||
"wasm-encoder",
|
||||
"wasmtime-asm-macros",
|
||||
"wasmtime-environ",
|
||||
"wasmtime-jit-debug",
|
||||
"wasmtime-versioned-export-macros",
|
||||
"wasmtime-wmemcheck",
|
||||
"windows-sys 0.48.0",
|
||||
|
|
@ -1672,8 +1661,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-types"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"cranelift-entity",
|
||||
"serde",
|
||||
|
|
@ -1684,8 +1673,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-versioned-export-macros"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
|
|
@ -1694,8 +1683,8 @@ dependencies = [
|
|||
|
||||
[[package]]
|
||||
name = "wasmtime-wmemcheck"
|
||||
version = "15.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=fa6fcd946b8f6d60c2d191a1b14b9399e261a76d#fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
version = "16.0.0"
|
||||
source = "git+https://github.com/bytecodealliance/wasmtime?rev=v16.0.0#6613acd1e4817957a4a7745125ef063b43c273a7"
|
||||
|
||||
[[package]]
|
||||
name = "web-sys"
|
||||
|
|
|
|||
|
|
@ -92,7 +92,7 @@ fn main() {
|
|||
|
||||
eprintln!("\nLanguage: {}", language_name);
|
||||
let language = get_language(language_path);
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
eprintln!(" Constructing Queries");
|
||||
for path in query_paths {
|
||||
|
|
@ -103,7 +103,7 @@ fn main() {
|
|||
}
|
||||
|
||||
parse(&path, max_path_length, |source| {
|
||||
Query::new(language, str::from_utf8(source).unwrap())
|
||||
Query::new(&language, str::from_utf8(source).unwrap())
|
||||
.with_context(|| format!("Query file path: {path:?}"))
|
||||
.expect("Failed to parse query");
|
||||
});
|
||||
|
|
|
|||
|
|
@ -324,7 +324,7 @@ impl Loader {
|
|||
let src_path = path.join("src");
|
||||
self.load_language_at_path(&src_path, &src_path)
|
||||
})
|
||||
.map(|l| *l)
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn load_language_at_path(&self, src_path: &Path, header_path: &Path) -> Result<Language> {
|
||||
|
|
|
|||
|
|
@ -414,7 +414,7 @@ fn run() -> Result<()> {
|
|||
let language = languages
|
||||
.first()
|
||||
.ok_or_else(|| anyhow!("No language found"))?;
|
||||
parser.set_language(*language)?;
|
||||
parser.set_language(&language)?;
|
||||
|
||||
let test_dir = current_dir.join("test");
|
||||
|
||||
|
|
@ -435,7 +435,7 @@ fn run() -> Result<()> {
|
|||
}
|
||||
|
||||
// Check that all of the queries are valid.
|
||||
test::check_queries_at_path(*language, ¤t_dir.join("queries"))?;
|
||||
test::check_queries_at_path(language.clone(), ¤t_dir.join("queries"))?;
|
||||
|
||||
// Run the syntax highlighting tests.
|
||||
let test_highlight_dir = test_dir.join("highlight");
|
||||
|
|
@ -527,11 +527,11 @@ fn run() -> Result<()> {
|
|||
let language =
|
||||
loader.select_language(path, ¤t_dir, matches.value_of("scope"))?;
|
||||
parser
|
||||
.set_language(language)
|
||||
.set_language(&language)
|
||||
.context("incompatible language")?;
|
||||
|
||||
let opts = ParseFileOptions {
|
||||
language,
|
||||
language: language.clone(),
|
||||
path,
|
||||
edits: &edits,
|
||||
max_path_length,
|
||||
|
|
@ -636,10 +636,10 @@ fn run() -> Result<()> {
|
|||
|
||||
let cancellation_flag = util::cancel_on_signal();
|
||||
|
||||
let mut lang = None;
|
||||
let mut language = None;
|
||||
if let Some(scope) = matches.value_of("scope") {
|
||||
lang = loader.language_configuration_for_scope(scope)?;
|
||||
if lang.is_none() {
|
||||
language = loader.language_configuration_for_scope(scope)?;
|
||||
if language.is_none() {
|
||||
return Err(anyhow!("Unknown scope '{}'", scope));
|
||||
}
|
||||
}
|
||||
|
|
@ -655,7 +655,7 @@ fn run() -> Result<()> {
|
|||
|
||||
for path in paths {
|
||||
let path = Path::new(&path);
|
||||
let (language, language_config) = match lang {
|
||||
let (language, language_config) = match language.clone() {
|
||||
Some(v) => v,
|
||||
None => match loader.language_configuration_for_file_name(path)? {
|
||||
Some(v) => v,
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ pub struct ParseFileOptions<'a> {
|
|||
|
||||
pub fn parse_file_at_path(parser: &mut Parser, opts: ParseFileOptions) -> Result<bool> {
|
||||
let mut _log_session = None;
|
||||
parser.set_language(opts.language)?;
|
||||
parser.set_language(&opts.language)?;
|
||||
let mut source_code = fs::read(opts.path)
|
||||
.with_context(|| format!("Error reading source file {:?}", opts.path))?;
|
||||
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ pub fn query_files_at_paths(
|
|||
|
||||
let query_source = fs::read_to_string(query_path)
|
||||
.with_context(|| format!("Error reading query file {:?}", query_path))?;
|
||||
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
|
||||
let query = Query::new(&language, &query_source).with_context(|| "Query compilation failed")?;
|
||||
|
||||
let mut query_cursor = QueryCursor::new();
|
||||
if let Some(range) = byte_range {
|
||||
|
|
@ -36,7 +36,7 @@ pub fn query_files_at_paths(
|
|||
}
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language)?;
|
||||
parser.set_language(&language)?;
|
||||
|
||||
for path in paths {
|
||||
let mut results = Vec::new();
|
||||
|
|
@ -115,7 +115,7 @@ pub fn query_files_at_paths(
|
|||
)?;
|
||||
}
|
||||
if should_test {
|
||||
query_testing::assert_expected_captures(results, path, &mut parser, language)?
|
||||
query_testing::assert_expected_captures(results, path, &mut parser, language.clone())?
|
||||
}
|
||||
if print_time {
|
||||
writeln!(&mut stdout, "{:?}", start.elapsed())?;
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@ pub fn parse_position_comments(
|
|||
|
||||
// Parse the code.
|
||||
parser.set_included_ranges(&[]).unwrap();
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
let tree = parser.parse(source, None).unwrap();
|
||||
|
||||
// Walk the tree, finding comment nodes that contain assertions.
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ pub fn generate_tags(
|
|||
|
||||
for path in paths {
|
||||
let path = Path::new(&path);
|
||||
let (language, language_config) = match lang {
|
||||
let (language, language_config) = match lang.clone() {
|
||||
Some(v) => v,
|
||||
None => match loader.language_configuration_for_file_name(path)? {
|
||||
Some(v) => v,
|
||||
|
|
|
|||
|
|
@ -141,7 +141,7 @@ pub fn check_queries_at_path(language: Language, path: &Path) -> Result<()> {
|
|||
let filepath = entry.file_name().to_str().unwrap_or("");
|
||||
let content = fs::read_to_string(entry.path())
|
||||
.with_context(|| format!("Error reading query file {:?}", filepath))?;
|
||||
Query::new(language, &content)
|
||||
Query::new(&language, &content)
|
||||
.with_context(|| format!("Error in query file {:?}", filepath))?;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -197,8 +197,11 @@ pub fn test_highlight(
|
|||
// Highlight the file, and parse out all of the highlighting assertions.
|
||||
let highlight_names = loader.highlight_names();
|
||||
let highlights = get_highlight_positions(loader, highlighter, highlight_config, source)?;
|
||||
let assertions =
|
||||
parse_position_comments(highlighter.parser(), highlight_config.language, source)?;
|
||||
let assertions = parse_position_comments(
|
||||
highlighter.parser(),
|
||||
highlight_config.language.clone(),
|
||||
source,
|
||||
)?;
|
||||
|
||||
iterate_assertions(&assertions, &highlights, &highlight_names)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -88,7 +88,8 @@ pub fn test_tag(
|
|||
source: &[u8],
|
||||
) -> Result<usize> {
|
||||
let tags = get_tag_positions(tags_context, tags_config, source)?;
|
||||
let assertions = parse_position_comments(tags_context.parser(), tags_config.language, source)?;
|
||||
let assertions =
|
||||
parse_position_comments(tags_context.parser(), tags_config.language.clone(), source)?;
|
||||
|
||||
// Iterate through all of the assertions, checking against the actual tags.
|
||||
let mut i = 0;
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ fn test_node_in_fut() {
|
|||
let (ret, pended) = tokio_like_spawn(async {
|
||||
let mut parser = Parser::new();
|
||||
let language = get_language("bash");
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser.parse("#", None).unwrap();
|
||||
|
||||
|
|
@ -64,7 +64,7 @@ fn test_node_and_cursor_ref_in_fut() {
|
|||
let (_, pended) = tokio_like_spawn(async {
|
||||
let mut parser = Parser::new();
|
||||
let language = get_language("bash");
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser.parse("#", None).unwrap();
|
||||
|
||||
|
|
@ -103,7 +103,7 @@ fn test_node_and_cursor_ref_in_fut_with_fut_fabrics() {
|
|||
let (_, pended) = tokio_like_spawn(async {
|
||||
let mut parser = Parser::new();
|
||||
let language = get_language("bash");
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser.parse("#", None).unwrap();
|
||||
|
||||
|
|
@ -141,7 +141,7 @@ fn test_node_and_cursor_ref_in_fut_with_inner_spawns() {
|
|||
let (ret, pended) = tokio_like_spawn(async {
|
||||
let mut parser = Parser::new();
|
||||
let language = get_language("bash");
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser.parse("#", None).unwrap();
|
||||
|
||||
|
|
|
|||
|
|
@ -160,7 +160,7 @@ fn test_language_corpus(
|
|||
let passed = allocations::record(|| {
|
||||
let mut log_session = None;
|
||||
let mut parser = get_parser(&mut log_session, "log.html");
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
set_included_ranges(&mut parser, &test.input, test.template_delimiters);
|
||||
|
||||
let tree = parser.parse(&test.input, None).unwrap();
|
||||
|
|
@ -186,7 +186,7 @@ fn test_language_corpus(
|
|||
}
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
let tree = parser.parse(&test.input, None).unwrap();
|
||||
drop(parser);
|
||||
|
||||
|
|
@ -196,7 +196,7 @@ fn test_language_corpus(
|
|||
let mut rand = Rand::new(seed);
|
||||
let mut log_session = None;
|
||||
let mut parser = get_parser(&mut log_session, "log.html");
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
let mut tree = tree.clone();
|
||||
let mut input = test.input.clone();
|
||||
|
||||
|
|
@ -378,7 +378,7 @@ fn test_feature_corpus_files() {
|
|||
let passed = allocations::record(|| {
|
||||
let mut log_session = None;
|
||||
let mut parser = get_parser(&mut log_session, "log.html");
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
let tree = parser.parse(&test.input, None).unwrap();
|
||||
let mut actual_output = tree.root_node().to_sexp();
|
||||
if !test.has_fields {
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ use tree_sitter::Query;
|
|||
#[test]
|
||||
fn issue_2162_out_of_bound() {
|
||||
let language = get_language("java");
|
||||
assert!(Query::new(language, "(package_declaration _ (_) @name _)").is_ok());
|
||||
assert!(Query::new(&language, "(package_declaration _ (_) @name _)").is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
@ -32,9 +32,9 @@ fn issue_2107_first_child_group_anchor_had_no_effect() {
|
|||
)
|
||||
)
|
||||
"#};
|
||||
let query = Query::new(language, query).unwrap();
|
||||
let query = Query::new(&language, query).unwrap();
|
||||
assert_query_matches(
|
||||
language,
|
||||
&language,
|
||||
&query,
|
||||
source_code,
|
||||
&[(0, vec![("constant", "int a")])],
|
||||
|
|
|
|||
|
|
@ -308,13 +308,13 @@ fn compare_depth_first(a: Node, b: Node) -> Ordering {
|
|||
}
|
||||
|
||||
pub fn assert_query_matches(
|
||||
language: Language,
|
||||
language: &Language,
|
||||
query: &Query,
|
||||
source: &str,
|
||||
expected: &[(usize, Vec<(&str, &str)>)],
|
||||
) {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
let tree = parser.parse(source, None).unwrap();
|
||||
let mut cursor = QueryCursor::new();
|
||||
let matches = cursor.matches(&query, tree.root_node(), source.as_bytes());
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ use tree_sitter::Parser;
|
|||
fn test_lookahead_iterator() {
|
||||
let mut parser = Parser::new();
|
||||
let language = get_language("rust");
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser.parse("struct Stuff {}", None).unwrap();
|
||||
|
||||
|
|
@ -28,13 +28,13 @@ fn test_lookahead_iterator() {
|
|||
|
||||
let expected_symbols = ["identifier", "block_comment", "line_comment"];
|
||||
let mut lookahead = language.lookahead_iterator(next_state).unwrap();
|
||||
assert_eq!(lookahead.language(), language);
|
||||
assert_eq!(*lookahead.language(), language);
|
||||
assert!(lookahead.iter_names().eq(expected_symbols));
|
||||
|
||||
lookahead.reset_state(next_state);
|
||||
assert!(lookahead.iter_names().eq(expected_symbols));
|
||||
|
||||
lookahead.reset(language, next_state);
|
||||
lookahead.reset(language.clone(), next_state);
|
||||
assert!(lookahead
|
||||
.map(|s| language.node_kind_for_id(s).unwrap())
|
||||
.eq(expected_symbols));
|
||||
|
|
@ -44,7 +44,7 @@ fn test_lookahead_iterator() {
|
|||
fn test_lookahead_iterator_modifiable_only_by_mut() {
|
||||
let mut parser = Parser::new();
|
||||
let language = get_language("rust");
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser.parse("struct Stuff {}", None).unwrap();
|
||||
|
||||
|
|
|
|||
|
|
@ -202,7 +202,7 @@ fn test_node_children() {
|
|||
#[test]
|
||||
fn test_node_children_by_field_name() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("python")).unwrap();
|
||||
parser.set_language(&get_language("python")).unwrap();
|
||||
let source = "
|
||||
if one:
|
||||
a()
|
||||
|
|
@ -230,7 +230,7 @@ fn test_node_children_by_field_name() {
|
|||
#[test]
|
||||
fn test_node_parent_of_child_by_field_name() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let tree = parser.parse("foo(a().b[0].c.d.e())", None).unwrap();
|
||||
let call_node = tree
|
||||
.root_node()
|
||||
|
|
@ -251,7 +251,7 @@ fn test_node_parent_of_child_by_field_name() {
|
|||
#[test]
|
||||
fn test_node_field_name_for_child() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("c")).unwrap();
|
||||
parser.set_language(&get_language("c")).unwrap();
|
||||
let tree = parser.parse("int w = x + y;", None).unwrap();
|
||||
let translation_unit_node = tree.root_node();
|
||||
let declaration_node = translation_unit_node.named_child(0).unwrap();
|
||||
|
|
@ -278,7 +278,7 @@ fn test_node_field_name_for_child() {
|
|||
#[test]
|
||||
fn test_node_child_by_field_name_with_extra_hidden_children() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("python")).unwrap();
|
||||
parser.set_language(&get_language("python")).unwrap();
|
||||
|
||||
// In the Python grammar, some fields are applied to `suite` nodes,
|
||||
// which consist of an invisible `indent` token followed by a block.
|
||||
|
|
@ -373,7 +373,7 @@ fn test_node_named_child_with_aliases_and_extras() {
|
|||
|
||||
let mut parser = Parser::new();
|
||||
parser
|
||||
.set_language(get_test_language(&parser_name, &parser_code, None))
|
||||
.set_language(&get_test_language(&parser_name, &parser_code, None))
|
||||
.unwrap();
|
||||
|
||||
let tree = parser.parse("b ... b ... c", None).unwrap();
|
||||
|
|
@ -411,7 +411,7 @@ fn test_node_descendant_count() {
|
|||
fn test_descendant_count_single_node_tree() {
|
||||
let mut parser = Parser::new();
|
||||
parser
|
||||
.set_language(get_language("embedded-template"))
|
||||
.set_language(&get_language("embedded-template"))
|
||||
.unwrap();
|
||||
let tree = parser.parse("hello", None).unwrap();
|
||||
|
||||
|
|
@ -576,7 +576,7 @@ fn test_node_edit() {
|
|||
#[test]
|
||||
fn test_root_node_with_offset() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let tree = parser.parse(" if (a) b", None).unwrap();
|
||||
|
||||
let node = tree.root_node_with_offset(6, Point::new(2, 2));
|
||||
|
|
@ -604,7 +604,7 @@ fn test_root_node_with_offset() {
|
|||
#[test]
|
||||
fn test_node_is_extra() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let tree = parser.parse("foo(/* hi */);", None).unwrap();
|
||||
|
||||
let root_node = tree.root_node();
|
||||
|
|
@ -619,7 +619,7 @@ fn test_node_is_extra() {
|
|||
#[test]
|
||||
fn test_node_sexp() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let tree = parser.parse("if (a) b", None).unwrap();
|
||||
let root_node = tree.root_node();
|
||||
let if_node = root_node.descendant_for_byte_range(0, 0).unwrap();
|
||||
|
|
@ -708,7 +708,7 @@ fn test_node_field_names() {
|
|||
|
||||
let mut parser = Parser::new();
|
||||
let language = get_test_language(&parser_name, &parser_code, None);
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser
|
||||
.parse("child-0 child-1 child-2 child-3 child-4", None)
|
||||
|
|
@ -778,7 +778,7 @@ fn test_node_field_calls_in_language_without_fields() {
|
|||
|
||||
let mut parser = Parser::new();
|
||||
let language = get_test_language(&parser_name, &parser_code, None);
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser.parse("b c d", None).unwrap();
|
||||
|
||||
|
|
@ -807,7 +807,7 @@ fn test_node_is_named_but_aliased_as_anonymous() {
|
|||
|
||||
let mut parser = Parser::new();
|
||||
let language = get_test_language(&parser_name, &parser_code, None);
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let tree = parser.parse("B C B", None).unwrap();
|
||||
|
||||
|
|
@ -826,7 +826,7 @@ fn test_node_is_named_but_aliased_as_anonymous() {
|
|||
#[test]
|
||||
fn test_node_numeric_symbols_respect_simple_aliases() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("python")).unwrap();
|
||||
parser.set_language(&get_language("python")).unwrap();
|
||||
|
||||
// Example 1:
|
||||
// Python argument lists can contain "splat" arguments, which are not allowed within
|
||||
|
|
@ -857,7 +857,7 @@ fn test_node_numeric_symbols_respect_simple_aliases() {
|
|||
// Ruby handles the unary (negative) and binary (minus) `-` operators using two different
|
||||
// tokens. One or more of these is an external token that's aliased as `-`. Their numeric
|
||||
// kind ids should match.
|
||||
parser.set_language(get_language("ruby")).unwrap();
|
||||
parser.set_language(&get_language("ruby")).unwrap();
|
||||
let tree = parser.parse("-a - b", None).unwrap();
|
||||
let root = tree.root_node();
|
||||
assert_eq!(
|
||||
|
|
@ -903,6 +903,6 @@ fn get_all_nodes(tree: &Tree) -> Vec<Node> {
|
|||
|
||||
fn parse_json_example() -> Tree {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("json")).unwrap();
|
||||
parser.set_language(&get_language("json")).unwrap();
|
||||
parser.parse(JSON_EXAMPLE, None).unwrap()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -95,7 +95,7 @@ fn test_grammar_that_should_hang_and_not_segfault() {
|
|||
get_test_language(&parser_name, &parser_code, Some(test_grammar_dir.as_path()));
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
|
||||
let code_that_should_hang = "\nHello";
|
||||
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ use tree_sitter_proc_macro::retry;
|
|||
#[test]
|
||||
fn test_parsing_simple_string() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
|
||||
let tree = parser
|
||||
.parse(
|
||||
|
|
@ -51,7 +51,7 @@ fn test_parsing_simple_string() {
|
|||
#[test]
|
||||
fn test_parsing_with_logging() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
|
||||
let mut messages = Vec::new();
|
||||
parser.set_logger(Some(Box::new(|log_type, message| {
|
||||
|
|
@ -92,7 +92,7 @@ fn test_parsing_with_debug_graph_enabled() {
|
|||
let has_zero_indexed_row = |s: &str| s.contains("position: 0,");
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
|
||||
let mut debug_graph_file = tempfile::tempfile().unwrap();
|
||||
parser.print_dot_graphs(&debug_graph_file);
|
||||
|
|
@ -114,7 +114,7 @@ fn test_parsing_with_debug_graph_enabled() {
|
|||
#[test]
|
||||
fn test_parsing_with_custom_utf8_input() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
|
||||
let lines = &["pub fn foo() {", " 1", "}"];
|
||||
|
||||
|
|
@ -157,7 +157,7 @@ fn test_parsing_with_custom_utf8_input() {
|
|||
#[test]
|
||||
fn test_parsing_with_custom_utf16_input() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
|
||||
let lines: Vec<Vec<u16>> = ["pub fn foo() {", " 1", "}"]
|
||||
.iter()
|
||||
|
|
@ -196,7 +196,7 @@ fn test_parsing_with_custom_utf16_input() {
|
|||
#[test]
|
||||
fn test_parsing_with_callback_returning_owned_strings() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
|
||||
let text = b"pub fn foo() { 1 }";
|
||||
|
||||
|
|
@ -217,7 +217,7 @@ fn test_parsing_with_callback_returning_owned_strings() {
|
|||
#[test]
|
||||
fn test_parsing_text_with_byte_order_mark() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
|
||||
// Parse UTF16 text with a BOM
|
||||
let tree = parser
|
||||
|
|
@ -276,7 +276,7 @@ fn test_parsing_text_with_byte_order_mark() {
|
|||
#[test]
|
||||
fn test_parsing_invalid_chars_at_eof() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("json")).unwrap();
|
||||
parser.set_language(&get_language("json")).unwrap();
|
||||
let tree = parser.parse(b"\xdf", None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
|
|
@ -287,7 +287,7 @@ fn test_parsing_invalid_chars_at_eof() {
|
|||
#[test]
|
||||
fn test_parsing_unexpected_null_characters_within_source() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let tree = parser.parse(b"var \0 something;", None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
|
|
@ -298,7 +298,7 @@ fn test_parsing_unexpected_null_characters_within_source() {
|
|||
#[test]
|
||||
fn test_parsing_ends_when_input_callback_returns_empty() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let mut i = 0;
|
||||
let source = b"abcdefghijklmnoqrs";
|
||||
let tree = parser
|
||||
|
|
@ -322,7 +322,7 @@ fn test_parsing_ends_when_input_callback_returns_empty() {
|
|||
#[test]
|
||||
fn test_parsing_after_editing_beginning_of_code() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
|
||||
let mut code = b"123 + 456 * (10 + x);".to_vec();
|
||||
let mut tree = parser.parse(&code, None).unwrap();
|
||||
|
|
@ -370,7 +370,7 @@ fn test_parsing_after_editing_beginning_of_code() {
|
|||
#[test]
|
||||
fn test_parsing_after_editing_end_of_code() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
|
||||
let mut code = b"x * (100 + abc);".to_vec();
|
||||
let mut tree = parser.parse(&code, None).unwrap();
|
||||
|
|
@ -418,7 +418,7 @@ fn test_parsing_after_editing_end_of_code() {
|
|||
#[test]
|
||||
fn test_parsing_empty_file_with_reused_tree() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
|
||||
let tree = parser.parse("", None);
|
||||
parser.parse("", tree.as_ref());
|
||||
|
|
@ -437,7 +437,7 @@ fn test_parsing_after_editing_tree_that_depends_on_column_values() {
|
|||
|
||||
let mut parser = Parser::new();
|
||||
parser
|
||||
.set_language(get_test_language(&grammar_name, &parser_code, Some(&dir)))
|
||||
.set_language(&get_test_language(&grammar_name, &parser_code, Some(&dir)))
|
||||
.unwrap();
|
||||
|
||||
let mut code = b"
|
||||
|
|
@ -507,7 +507,7 @@ h + i
|
|||
#[test]
|
||||
fn test_parsing_after_detecting_error_in_the_middle_of_a_string_token() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("python")).unwrap();
|
||||
parser.set_language(&get_language("python")).unwrap();
|
||||
|
||||
let mut source = b"a = b, 'c, d'".to_vec();
|
||||
let tree = parser.parse(&source, None).unwrap();
|
||||
|
|
@ -551,7 +551,7 @@ fn test_parsing_on_multiple_threads() {
|
|||
let this_file_source = include_str!("parser_test.rs");
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
let tree = parser.parse(this_file_source, None).unwrap();
|
||||
|
||||
let mut parse_threads = Vec::new();
|
||||
|
|
@ -579,7 +579,7 @@ fn test_parsing_on_multiple_threads() {
|
|||
|
||||
// Reparse using the old tree as a starting point.
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
parser.parse(&prepended_source, Some(&tree_clone)).unwrap()
|
||||
}));
|
||||
}
|
||||
|
|
@ -600,7 +600,7 @@ fn test_parsing_cancelled_by_another_thread() {
|
|||
let cancellation_flag = std::sync::Arc::new(AtomicUsize::new(0));
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
unsafe { parser.set_cancellation_flag(Some(&cancellation_flag)) };
|
||||
|
||||
// Long input - parsing succeeds
|
||||
|
|
@ -649,7 +649,7 @@ fn test_parsing_cancelled_by_another_thread() {
|
|||
#[retry(10)]
|
||||
fn test_parsing_with_a_timeout() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("json")).unwrap();
|
||||
parser.set_language(&get_language("json")).unwrap();
|
||||
|
||||
// Parse an infinitely-long array, but pause after 1ms of processing.
|
||||
parser.set_timeout_micros(1000);
|
||||
|
|
@ -711,7 +711,7 @@ fn test_parsing_with_a_timeout() {
|
|||
#[retry(10)]
|
||||
fn test_parsing_with_a_timeout_and_a_reset() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("json")).unwrap();
|
||||
parser.set_language(&get_language("json")).unwrap();
|
||||
|
||||
parser.set_timeout_micros(5);
|
||||
let tree = parser.parse(
|
||||
|
|
@ -768,7 +768,7 @@ fn test_parsing_with_a_timeout_and_a_reset() {
|
|||
fn test_parsing_with_a_timeout_and_implicit_reset() {
|
||||
allocations::record(|| {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
|
||||
parser.set_timeout_micros(5);
|
||||
let tree = parser.parse(
|
||||
|
|
@ -779,7 +779,7 @@ fn test_parsing_with_a_timeout_and_implicit_reset() {
|
|||
|
||||
// Changing the parser's language implicitly resets, discarding
|
||||
// the previous partial parse.
|
||||
parser.set_language(get_language("json")).unwrap();
|
||||
parser.set_language(&get_language("json")).unwrap();
|
||||
parser.set_timeout_micros(0);
|
||||
let tree = parser.parse(
|
||||
"[null, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32]",
|
||||
|
|
@ -802,7 +802,7 @@ fn test_parsing_with_a_timeout_and_implicit_reset() {
|
|||
fn test_parsing_with_timeout_and_no_completion() {
|
||||
allocations::record(|| {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
|
||||
parser.set_timeout_micros(5);
|
||||
let tree = parser.parse(
|
||||
|
|
@ -822,7 +822,7 @@ fn test_parsing_with_one_included_range() {
|
|||
let source_code = "<span>hi</span><script>console.log('sup');</script>";
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("html")).unwrap();
|
||||
parser.set_language(&get_language("html")).unwrap();
|
||||
let html_tree = parser.parse(source_code, None).unwrap();
|
||||
let script_content_node = html_tree.root_node().child(1).unwrap().child(1).unwrap();
|
||||
assert_eq!(script_content_node.kind(), "raw_text");
|
||||
|
|
@ -830,7 +830,7 @@ fn test_parsing_with_one_included_range() {
|
|||
parser
|
||||
.set_included_ranges(&[script_content_node.range()])
|
||||
.unwrap();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let js_tree = parser.parse(source_code, None).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
|
|
@ -853,7 +853,7 @@ fn test_parsing_with_multiple_included_ranges() {
|
|||
let source_code = "html `<div>Hello, ${name.toUpperCase()}, it's <b>${now()}</b>.</div>`";
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let js_tree = parser.parse(source_code, None).unwrap();
|
||||
let template_string_node = js_tree
|
||||
.root_node()
|
||||
|
|
@ -869,7 +869,7 @@ fn test_parsing_with_multiple_included_ranges() {
|
|||
let interpolation_node2 = template_string_node.child(2).unwrap();
|
||||
let close_quote_node = template_string_node.child(3).unwrap();
|
||||
|
||||
parser.set_language(get_language("html")).unwrap();
|
||||
parser.set_language(&get_language("html")).unwrap();
|
||||
let html_ranges = &[
|
||||
Range {
|
||||
start_byte: open_quote_node.end_byte(),
|
||||
|
|
@ -948,7 +948,7 @@ fn test_parsing_with_included_range_containing_mismatched_positions() {
|
|||
let source_code = "<div>test</div>{_ignore_this_part_}";
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("html")).unwrap();
|
||||
parser.set_language(&get_language("html")).unwrap();
|
||||
|
||||
let end_byte = source_code.find("{_ignore_this_part_").unwrap();
|
||||
|
||||
|
|
@ -1029,7 +1029,7 @@ fn test_parsing_utf16_code_with_errors_at_the_end_of_an_included_range() {
|
|||
let end_byte = 2 * source_code.find("</script>").unwrap();
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
parser
|
||||
.set_included_ranges(&[Range {
|
||||
start_byte,
|
||||
|
|
@ -1051,7 +1051,7 @@ fn test_parsing_with_external_scanner_that_uses_included_range_boundaries() {
|
|||
let range2_end_byte = range2_start_byte + " d() ".len();
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
parser
|
||||
.set_included_ranges(&[
|
||||
Range {
|
||||
|
|
@ -1095,7 +1095,7 @@ fn test_parsing_with_a_newly_excluded_range() {
|
|||
|
||||
// Parse HTML including the template directive, which will cause an error
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("html")).unwrap();
|
||||
parser.set_language(&get_language("html")).unwrap();
|
||||
let mut first_tree = parser
|
||||
.parse_with(&mut chunked_input(&source_code, 3), None)
|
||||
.unwrap();
|
||||
|
|
@ -1182,7 +1182,7 @@ fn test_parsing_with_a_newly_included_range() {
|
|||
|
||||
// Parse only the first code directive as JavaScript
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
parser
|
||||
.set_included_ranges(&[simple_range(range1_start, range1_end)])
|
||||
.unwrap();
|
||||
|
|
@ -1274,7 +1274,7 @@ fn test_parsing_with_included_ranges_and_missing_tokens() {
|
|||
|
||||
let mut parser = Parser::new();
|
||||
parser
|
||||
.set_language(get_test_language(&parser_name, &parser_code, None))
|
||||
.set_language(&get_test_language(&parser_name, &parser_code, None))
|
||||
.unwrap();
|
||||
|
||||
// There's a missing `a` token at the beginning of the code. It must be inserted
|
||||
|
|
@ -1331,7 +1331,7 @@ fn test_grammars_that_can_hang_on_eof() {
|
|||
|
||||
let mut parser = Parser::new();
|
||||
parser
|
||||
.set_language(get_test_language(&parser_name, &parser_code, None))
|
||||
.set_language(&get_test_language(&parser_name, &parser_code, None))
|
||||
.unwrap();
|
||||
parser.parse("\"", None).unwrap();
|
||||
|
||||
|
|
@ -1356,7 +1356,7 @@ fn test_grammars_that_can_hang_on_eof() {
|
|||
.unwrap();
|
||||
|
||||
parser
|
||||
.set_language(get_test_language(&parser_name, &parser_code, None))
|
||||
.set_language(&get_test_language(&parser_name, &parser_code, None))
|
||||
.unwrap();
|
||||
parser.parse("\"", None).unwrap();
|
||||
|
||||
|
|
@ -1381,7 +1381,7 @@ fn test_grammars_that_can_hang_on_eof() {
|
|||
.unwrap();
|
||||
|
||||
parser
|
||||
.set_language(get_test_language(&parser_name, &parser_code, None))
|
||||
.set_language(&get_test_language(&parser_name, &parser_code, None))
|
||||
.unwrap();
|
||||
parser.parse("\"", None).unwrap();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ fn test_pathological_example_1() {
|
|||
|
||||
allocations::record(|| {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language(language)).unwrap();
|
||||
parser.set_language(&get_language(language)).unwrap();
|
||||
parser.parse(source, None).unwrap();
|
||||
});
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -6,7 +6,7 @@ use tree_sitter::{Language, Node, Parser, Point, Query, QueryCursor, TextProvide
|
|||
fn parse_text(text: impl AsRef<[u8]>) -> (Tree, Language) {
|
||||
let language = get_language("c");
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
(parser.parse(text, None).unwrap(), language)
|
||||
}
|
||||
|
||||
|
|
@ -17,7 +17,7 @@ where
|
|||
{
|
||||
let language = get_language("c");
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language).unwrap();
|
||||
parser.set_language(&language).unwrap();
|
||||
let tree = parser.parse_with(callback, None).unwrap();
|
||||
// eprintln!("{}", tree.clone().root_node().to_sexp());
|
||||
assert_eq!("comment", tree.clone().root_node().child(0).unwrap().kind());
|
||||
|
|
@ -25,7 +25,7 @@ where
|
|||
}
|
||||
|
||||
fn tree_query<I: AsRef<[u8]>>(tree: &Tree, text: impl TextProvider<I>, language: Language) {
|
||||
let query = Query::new(language, "((comment) @c (#eq? @c \"// comment\"))").unwrap();
|
||||
let query = Query::new(&language, "((comment) @c (#eq? @c \"// comment\"))").unwrap();
|
||||
let mut cursor = QueryCursor::new();
|
||||
let mut captures = cursor.captures(&query, tree.root_node(), text);
|
||||
let (match_, idx) = captures.next().unwrap();
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ use tree_sitter::{InputEdit, Parser, Point, Range, Tree};
|
|||
#[test]
|
||||
fn test_tree_edit() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let tree = parser.parse(" abc !== def", None).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
|
|
@ -235,7 +235,7 @@ fn test_tree_edit() {
|
|||
#[test]
|
||||
fn test_tree_edit_with_included_ranges() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("html")).unwrap();
|
||||
parser.set_language(&get_language("html")).unwrap();
|
||||
|
||||
let source = "<div><% if a %><span>a</span><% else %><span>b</span><% end %></div>";
|
||||
|
||||
|
|
@ -300,7 +300,7 @@ fn test_tree_edit_with_included_ranges() {
|
|||
#[test]
|
||||
fn test_tree_cursor() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
|
||||
let tree = parser
|
||||
.parse(
|
||||
|
|
@ -379,7 +379,7 @@ fn test_tree_cursor() {
|
|||
#[test]
|
||||
fn test_tree_cursor_previous_sibling() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
|
||||
let text = "
|
||||
// Hi there
|
||||
|
|
@ -418,7 +418,7 @@ fn test_tree_cursor_previous_sibling() {
|
|||
#[test]
|
||||
fn test_tree_cursor_fields() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
|
||||
let tree = parser
|
||||
.parse("function /*1*/ bar /*2*/ () {}", None)
|
||||
|
|
@ -455,7 +455,7 @@ fn test_tree_cursor_fields() {
|
|||
#[test]
|
||||
fn test_tree_cursor_child_for_point() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let source = &"
|
||||
[
|
||||
one,
|
||||
|
|
@ -562,7 +562,7 @@ fn test_tree_cursor_child_for_point() {
|
|||
#[test]
|
||||
fn test_tree_node_equality() {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("rust")).unwrap();
|
||||
parser.set_language(&get_language("rust")).unwrap();
|
||||
let tree = parser.parse("struct A {}", None).unwrap();
|
||||
let node1 = tree.root_node();
|
||||
let node2 = tree.root_node();
|
||||
|
|
@ -576,7 +576,7 @@ fn test_get_changed_ranges() {
|
|||
let source_code = b"{a: null};\n".to_vec();
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(get_language("javascript")).unwrap();
|
||||
parser.set_language(&get_language("javascript")).unwrap();
|
||||
let tree = parser.parse(&source_code, None).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
|
|
|
|||
|
|
@ -1,7 +1,9 @@
|
|||
use crate::tests::helpers::fixtures::WASM_DIR;
|
||||
use crate::tests::helpers::{allocations, fixtures::WASM_DIR};
|
||||
use lazy_static::lazy_static;
|
||||
use std::fs;
|
||||
use tree_sitter::{wasmtime::Engine, Parser, WasmError, WasmErrorKind, WasmStore};
|
||||
use tree_sitter::{
|
||||
wasmtime::Engine, Parser, Query, QueryCursor, WasmError, WasmErrorKind, WasmStore,
|
||||
};
|
||||
|
||||
lazy_static! {
|
||||
static ref ENGINE: Engine = Engine::default();
|
||||
|
|
@ -9,84 +11,133 @@ lazy_static! {
|
|||
|
||||
#[test]
|
||||
fn test_load_wasm_language() {
|
||||
let mut store = WasmStore::new(ENGINE.clone()).unwrap();
|
||||
let mut parser = Parser::new();
|
||||
allocations::record(|| {
|
||||
let mut store = WasmStore::new(ENGINE.clone()).unwrap();
|
||||
let mut parser = Parser::new();
|
||||
|
||||
let wasm_cpp = fs::read(&WASM_DIR.join(format!("tree-sitter-cpp.wasm"))).unwrap();
|
||||
let wasm_rs = fs::read(&WASM_DIR.join(format!("tree-sitter-rust.wasm"))).unwrap();
|
||||
let wasm_rb = fs::read(&WASM_DIR.join(format!("tree-sitter-ruby.wasm"))).unwrap();
|
||||
let wasm_typescript = fs::read(&WASM_DIR.join(format!("tree-sitter-typescript.wasm"))).unwrap();
|
||||
let wasm_cpp = fs::read(&WASM_DIR.join(format!("tree-sitter-cpp.wasm"))).unwrap();
|
||||
let wasm_rs = fs::read(&WASM_DIR.join(format!("tree-sitter-rust.wasm"))).unwrap();
|
||||
let wasm_rb = fs::read(&WASM_DIR.join(format!("tree-sitter-ruby.wasm"))).unwrap();
|
||||
let wasm_typescript =
|
||||
fs::read(&WASM_DIR.join(format!("tree-sitter-typescript.wasm"))).unwrap();
|
||||
|
||||
let language_rust = store.load_language("rust", &wasm_rs).unwrap();
|
||||
let language_cpp = store.load_language("cpp", &wasm_cpp).unwrap();
|
||||
let language_ruby = store.load_language("ruby", &wasm_rb).unwrap();
|
||||
let language_typescript = store.load_language("typescript", &wasm_typescript).unwrap();
|
||||
parser.set_wasm_store(store).unwrap();
|
||||
let language_rust = store.load_language("rust", &wasm_rs).unwrap();
|
||||
let language_cpp = store.load_language("cpp", &wasm_cpp).unwrap();
|
||||
let language_ruby = store.load_language("ruby", &wasm_rb).unwrap();
|
||||
let language_typescript = store.load_language("typescript", &wasm_typescript).unwrap();
|
||||
parser.set_wasm_store(store).unwrap();
|
||||
|
||||
let mut parser2 = Parser::new();
|
||||
parser2
|
||||
.set_wasm_store(WasmStore::new(ENGINE.clone()).unwrap())
|
||||
.unwrap();
|
||||
let mut parser2 = Parser::new();
|
||||
parser2
|
||||
.set_wasm_store(WasmStore::new(ENGINE.clone()).unwrap())
|
||||
.unwrap();
|
||||
let mut query_cursor = QueryCursor::new();
|
||||
|
||||
for mut parser in [parser, parser2] {
|
||||
for _ in 0..2 {
|
||||
parser.set_language(language_cpp).unwrap();
|
||||
let tree = parser.parse("A<B> c = d();", None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
"(translation_unit (declaration type: (template_type name: (type_identifier) arguments: (template_argument_list (type_descriptor type: (type_identifier)))) declarator: (init_declarator declarator: (identifier) value: (call_expression function: (identifier) arguments: (argument_list)))))"
|
||||
);
|
||||
for mut parser in [parser, parser2] {
|
||||
for _ in 0..2 {
|
||||
let query_rust = Query::new(&language_rust, "(const_item) @foo").unwrap();
|
||||
let query_typescript =
|
||||
Query::new(&language_typescript, "(class_declaration) @foo").unwrap();
|
||||
|
||||
parser.set_language(language_rust).unwrap();
|
||||
let tree = parser.parse("const A: B = c();", None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
"(source_file (const_item name: (identifier) type: (type_identifier) value: (call_expression function: (identifier) arguments: (arguments))))"
|
||||
);
|
||||
parser.set_language(&language_cpp).unwrap();
|
||||
let tree = parser.parse("A<B> c = d();", None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
"(translation_unit (declaration type: (template_type name: (type_identifier) arguments: (template_argument_list (type_descriptor type: (type_identifier)))) declarator: (init_declarator declarator: (identifier) value: (call_expression function: (identifier) arguments: (argument_list)))))"
|
||||
);
|
||||
|
||||
parser.set_language(language_ruby).unwrap();
|
||||
let tree = parser.parse("class A; end", None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
"(program (class name: (constant)))"
|
||||
);
|
||||
parser.set_language(&language_rust).unwrap();
|
||||
let source = "const A: B = c();";
|
||||
let tree = parser.parse(source, None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
"(source_file (const_item name: (identifier) type: (type_identifier) value: (call_expression function: (identifier) arguments: (arguments))))"
|
||||
);
|
||||
assert_eq!(
|
||||
query_cursor
|
||||
.matches(&query_rust, tree.root_node(), source.as_bytes())
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
|
||||
parser.set_language(language_typescript).unwrap();
|
||||
let tree = parser.parse("class A {}", None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
"(program (class_declaration name: (type_identifier) body: (class_body)))"
|
||||
);
|
||||
parser.set_language(&language_ruby).unwrap();
|
||||
let tree = parser.parse("class A; end", None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
"(program (class name: (constant)))"
|
||||
);
|
||||
|
||||
parser.set_language(&language_typescript).unwrap();
|
||||
let tree = parser.parse("class A {}", None).unwrap();
|
||||
assert_eq!(
|
||||
tree.root_node().to_sexp(),
|
||||
"(program (class_declaration name: (type_identifier) body: (class_body)))"
|
||||
);
|
||||
assert_eq!(
|
||||
query_cursor
|
||||
.matches(&query_typescript, tree.root_node(), source.as_bytes())
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_and_reload_wasm_language() {
|
||||
allocations::record(|| {
|
||||
let mut store = WasmStore::new(ENGINE.clone()).unwrap();
|
||||
|
||||
let wasm_rust = fs::read(&WASM_DIR.join(format!("tree-sitter-rust.wasm"))).unwrap();
|
||||
let wasm_typescript =
|
||||
fs::read(&WASM_DIR.join(format!("tree-sitter-typescript.wasm"))).unwrap();
|
||||
|
||||
let language_rust = store.load_language("rust", &wasm_rust).unwrap();
|
||||
let language_typescript = store.load_language("typescript", &wasm_typescript).unwrap();
|
||||
assert_eq!(store.language_count(), 2);
|
||||
|
||||
// When a language is dropped, stores can release their instances of that language.
|
||||
drop(language_rust);
|
||||
assert_eq!(store.language_count(), 1);
|
||||
|
||||
let language_rust = store.load_language("rust", &wasm_rust).unwrap();
|
||||
assert_eq!(store.language_count(), 2);
|
||||
|
||||
drop(language_rust);
|
||||
drop(language_typescript);
|
||||
assert_eq!(store.language_count(), 0);
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_wasm_errors() {
|
||||
let mut store = WasmStore::new(ENGINE.clone()).unwrap();
|
||||
let wasm = fs::read(&WASM_DIR.join(format!("tree-sitter-rust.wasm"))).unwrap();
|
||||
allocations::record(|| {
|
||||
let mut store = WasmStore::new(ENGINE.clone()).unwrap();
|
||||
let wasm = fs::read(&WASM_DIR.join(format!("tree-sitter-rust.wasm"))).unwrap();
|
||||
|
||||
let bad_wasm = &wasm[1..];
|
||||
assert_eq!(
|
||||
store.load_language("rust", &bad_wasm).unwrap_err(),
|
||||
WasmError {
|
||||
kind: WasmErrorKind::Parse,
|
||||
message: "failed to parse dylink section of wasm module".into(),
|
||||
}
|
||||
);
|
||||
let bad_wasm = &wasm[1..];
|
||||
assert_eq!(
|
||||
store.load_language("rust", &bad_wasm).unwrap_err(),
|
||||
WasmError {
|
||||
kind: WasmErrorKind::Parse,
|
||||
message: "failed to parse dylink section of wasm module".into(),
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
store.load_language("not_rust", &wasm).unwrap_err(),
|
||||
WasmError {
|
||||
kind: WasmErrorKind::Instantiate,
|
||||
message: "module did not contain language function: tree_sitter_not_rust".into(),
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
store.load_language("not_rust", &wasm).unwrap_err(),
|
||||
WasmError {
|
||||
kind: WasmErrorKind::Instantiate,
|
||||
message: "module did not contain language function: tree_sitter_not_rust".into(),
|
||||
}
|
||||
);
|
||||
|
||||
let mut bad_wasm = wasm.clone();
|
||||
bad_wasm[300..500].iter_mut().for_each(|b| *b = 0);
|
||||
assert_eq!(
|
||||
store.load_language("rust", &bad_wasm).unwrap_err().kind,
|
||||
WasmErrorKind::Compile,
|
||||
);
|
||||
let mut bad_wasm = wasm.clone();
|
||||
bad_wasm[300..500].iter_mut().for_each(|b| *b = 0);
|
||||
assert_eq!(
|
||||
store.load_language("rust", &bad_wasm).unwrap_err().kind,
|
||||
WasmErrorKind::Compile,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -267,7 +267,7 @@ impl HighlightConfiguration {
|
|||
|
||||
// Construct a single query by concatenating the three query strings, but record the
|
||||
// range of pattern indices that belong to each individual string.
|
||||
let mut query = Query::new(language, &query_source)?;
|
||||
let mut query = Query::new(&language, &query_source)?;
|
||||
let mut locals_pattern_index = 0;
|
||||
let mut highlights_pattern_index = 0;
|
||||
for i in 0..(query.pattern_count()) {
|
||||
|
|
@ -284,7 +284,7 @@ impl HighlightConfiguration {
|
|||
|
||||
// Construct a separate query just for dealing with the 'combined injections'.
|
||||
// Disable the combined injection patterns in the main query.
|
||||
let mut combined_injections_query = Query::new(language, injection_query)?;
|
||||
let mut combined_injections_query = Query::new(&language, injection_query)?;
|
||||
let mut has_combined_queries = false;
|
||||
for pattern_index in 0..locals_pattern_index {
|
||||
let settings = query.property_settings(pattern_index);
|
||||
|
|
@ -435,7 +435,7 @@ impl<'a> HighlightIterLayer<'a> {
|
|||
if highlighter.parser.set_included_ranges(&ranges).is_ok() {
|
||||
highlighter
|
||||
.parser
|
||||
.set_language(config.language)
|
||||
.set_language(&config.language)
|
||||
.map_err(|_| Error::InvalidLanguage)?;
|
||||
|
||||
unsafe { highlighter.parser.set_cancellation_flag(cancellation_flag) };
|
||||
|
|
|
|||
|
|
@ -30,14 +30,14 @@ regex = "1.9.1"
|
|||
|
||||
[dependencies.wasmtime]
|
||||
git = "https://github.com/bytecodealliance/wasmtime"
|
||||
rev = "fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
rev = "v16.0.0"
|
||||
optional = true
|
||||
default-features = false
|
||||
features = ["cranelift"]
|
||||
|
||||
[dependencies.wasmtime-c-api]
|
||||
git = "https://github.com/bytecodealliance/wasmtime"
|
||||
rev = "fa6fcd946b8f6d60c2d191a1b14b9399e261a76d"
|
||||
rev = "v16.0.0"
|
||||
optional = true
|
||||
package = "wasmtime-c-api-impl"
|
||||
default-features = false
|
||||
|
|
|
|||
|
|
@ -664,6 +664,14 @@ extern "C" {
|
|||
#[doc = " Set the maximum start depth for a query cursor.\n\n This prevents cursors from exploring children nodes at a certain depth.\n Note if a pattern includes many children, then they will still be checked.\n\n The zero max start depth value can be used as a special behavior and\n it helps to destructure a subtree by staying on a node and using captures\n for interested parts. Note that the zero max start depth only limit a search\n depth for a pattern's root node but other nodes that are parts of the pattern\n may be searched at any depth what defined by the pattern structure.\n\n Set to `UINT32_MAX` to remove the maximum start depth."]
|
||||
pub fn ts_query_cursor_set_max_start_depth(self_: *mut TSQueryCursor, max_start_depth: u32);
|
||||
}
|
||||
extern "C" {
|
||||
#[doc = " Get another reference to the given language."]
|
||||
pub fn ts_language_copy(self_: *const TSLanguage) -> *const TSLanguage;
|
||||
}
|
||||
extern "C" {
|
||||
#[doc = " Free any dynamically-allocated resources for this language, if\n this is the last reference."]
|
||||
pub fn ts_language_delete(self_: *const TSLanguage);
|
||||
}
|
||||
extern "C" {
|
||||
#[doc = " Get the number of distinct node types in the language."]
|
||||
pub fn ts_language_symbol_count(self_: *const TSLanguage) -> u32;
|
||||
|
|
@ -811,6 +819,10 @@ extern "C" {
|
|||
error: *mut TSWasmError,
|
||||
) -> *const TSLanguage;
|
||||
}
|
||||
extern "C" {
|
||||
#[doc = " Get the number of languages instantiated in the given wasm store."]
|
||||
pub fn ts_wasm_store_language_count(arg1: *const TSWasmStore) -> usize;
|
||||
}
|
||||
extern "C" {
|
||||
#[doc = " Check if the language came from a Wasm module. If so, then in order to use\n this langauge with a Parser, that parser must have a Wasm store assigned."]
|
||||
pub fn ts_language_is_wasm(arg1: *const TSLanguage) -> bool;
|
||||
|
|
|
|||
|
|
@ -11,9 +11,9 @@ use std::{
|
|||
ffi::CStr,
|
||||
fmt, hash, iter,
|
||||
marker::PhantomData,
|
||||
mem::MaybeUninit,
|
||||
mem::{self, MaybeUninit},
|
||||
num::NonZeroU16,
|
||||
ops,
|
||||
ops::{self, Deref},
|
||||
os::raw::{c_char, c_void},
|
||||
ptr::{self, NonNull},
|
||||
slice, str,
|
||||
|
|
@ -47,10 +47,12 @@ pub const PARSER_HEADER: &'static str = include_str!("../include/tree_sitter/par
|
|||
/// An opaque object that defines how to parse a particular language. The code for each
|
||||
/// `Language` is generated by the Tree-sitter CLI.
|
||||
#[doc(alias = "TSLanguage")]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct Language(*const ffi::TSLanguage);
|
||||
|
||||
pub struct LanguageRef<'a>(*const ffi::TSLanguage, PhantomData<&'a ()>);
|
||||
|
||||
/// A tree that represents the syntactic structure of a source code file.
|
||||
#[doc(alias = "TSTree")]
|
||||
pub struct Tree(NonNull<ffi::TSTree>);
|
||||
|
|
@ -385,6 +387,26 @@ impl Language {
|
|||
}
|
||||
}
|
||||
|
||||
impl Clone for Language {
|
||||
fn clone(&self) -> Self {
|
||||
unsafe { Self(ffi::ts_language_copy(self.0)) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Language {
|
||||
fn drop(&mut self) {
|
||||
unsafe { ffi::ts_language_delete(self.0) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Deref for LanguageRef<'a> {
|
||||
type Target = Language;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
unsafe { mem::transmute(&self.0) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
/// Create a new parser.
|
||||
pub fn new() -> Parser {
|
||||
|
|
@ -403,7 +425,7 @@ impl Parser {
|
|||
/// and compare it to this library's [`LANGUAGE_VERSION`](LANGUAGE_VERSION) and
|
||||
/// [`MIN_COMPATIBLE_LANGUAGE_VERSION`](MIN_COMPATIBLE_LANGUAGE_VERSION) constants.
|
||||
#[doc(alias = "ts_parser_set_language")]
|
||||
pub fn set_language(&mut self, language: Language) -> Result<(), LanguageError> {
|
||||
pub fn set_language(&mut self, language: &Language) -> Result<(), LanguageError> {
|
||||
let version = language.version();
|
||||
if version < MIN_COMPATIBLE_LANGUAGE_VERSION || version > LANGUAGE_VERSION {
|
||||
Err(LanguageError { version })
|
||||
|
|
@ -766,8 +788,11 @@ impl Tree {
|
|||
|
||||
/// Get the language that was used to parse the syntax tree.
|
||||
#[doc(alias = "ts_tree_language")]
|
||||
pub fn language(&self) -> Language {
|
||||
Language(unsafe { ffi::ts_tree_language(self.0.as_ptr()) })
|
||||
pub fn language(&self) -> LanguageRef {
|
||||
LanguageRef(
|
||||
unsafe { ffi::ts_tree_language(self.0.as_ptr()) },
|
||||
PhantomData,
|
||||
)
|
||||
}
|
||||
|
||||
/// Edit the syntax tree to keep it in sync with source code that has been
|
||||
|
|
@ -894,8 +919,8 @@ impl<'tree> Node<'tree> {
|
|||
|
||||
/// Get the [`Language`] that was used to parse this node's syntax tree.
|
||||
#[doc(alias = "ts_node_language")]
|
||||
pub fn language(&self) -> Language {
|
||||
Language(unsafe { ffi::ts_node_language(self.0) })
|
||||
pub fn language(&self) -> LanguageRef {
|
||||
LanguageRef(unsafe { ffi::ts_node_language(self.0) }, PhantomData)
|
||||
}
|
||||
|
||||
/// Check if this node is *named*.
|
||||
|
|
@ -1473,8 +1498,11 @@ impl Drop for TreeCursor<'_> {
|
|||
impl LookaheadIterator {
|
||||
/// Get the current language of the lookahead iterator.
|
||||
#[doc(alias = "ts_lookahead_iterator_language")]
|
||||
pub fn language(&self) -> Language {
|
||||
Language(unsafe { ffi::ts_lookahead_iterator_language(self.0.as_ptr()) })
|
||||
pub fn language(&self) -> LanguageRef<'_> {
|
||||
LanguageRef(
|
||||
unsafe { ffi::ts_lookahead_iterator_language(self.0.as_ptr()) },
|
||||
PhantomData,
|
||||
)
|
||||
}
|
||||
|
||||
/// Get the current symbol of the lookahead iterator.
|
||||
|
|
@ -1553,7 +1581,7 @@ impl Query {
|
|||
/// The query is associated with a particular language, and can only be run
|
||||
/// on syntax nodes parsed with that language. References to Queries can be
|
||||
/// shared between multiple threads.
|
||||
pub fn new(language: Language, source: &str) -> Result<Self, QueryError> {
|
||||
pub fn new(language: &Language, source: &str) -> Result<Self, QueryError> {
|
||||
let mut error_offset = 0u32;
|
||||
let mut error_type: ffi::TSQueryError = 0;
|
||||
let bytes = source.as_bytes();
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
use crate::{ffi, Language, LanguageError, Parser};
|
||||
use crate::{ffi, Language, LanguageError, Parser, FREE_FN};
|
||||
use std::{
|
||||
error,
|
||||
ffi::CString,
|
||||
ffi::{CStr, CString},
|
||||
fmt,
|
||||
mem::{self, MaybeUninit},
|
||||
os::raw::c_char,
|
||||
|
|
@ -73,11 +73,16 @@ impl WasmStore {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn language_count(&self) -> usize {
|
||||
unsafe { ffi::ts_wasm_store_language_count(self.0) as usize }
|
||||
}
|
||||
}
|
||||
|
||||
impl WasmError {
|
||||
unsafe fn new(error: ffi::TSWasmError) -> Self {
|
||||
let message = CString::from_raw(error.message);
|
||||
let message = CStr::from_ptr(error.message).to_str().unwrap().to_string();
|
||||
(FREE_FN)(error.message as *mut _);
|
||||
Self {
|
||||
kind: match error.kind {
|
||||
ffi::TSWasmErrorKindParse => WasmErrorKind::Parse,
|
||||
|
|
@ -85,7 +90,7 @@ impl WasmError {
|
|||
ffi::TSWasmErrorKindInstantiate => WasmErrorKind::Instantiate,
|
||||
_ => WasmErrorKind::Other,
|
||||
},
|
||||
message: message.into_string().unwrap(),
|
||||
message,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1013,6 +1013,17 @@ void ts_query_cursor_set_max_start_depth(TSQueryCursor *self, uint32_t max_start
|
|||
/* Section - Language */
|
||||
/**********************/
|
||||
|
||||
/**
|
||||
* Get another reference to the given language.
|
||||
*/
|
||||
const TSLanguage *ts_language_copy(const TSLanguage *self);
|
||||
|
||||
/**
|
||||
* Free any dynamically-allocated resources for this language, if
|
||||
* this is the last reference.
|
||||
*/
|
||||
void ts_language_delete(const TSLanguage *self);
|
||||
|
||||
/**
|
||||
* Get the number of distinct node types in the language.
|
||||
*/
|
||||
|
|
@ -1190,6 +1201,11 @@ const TSLanguage *ts_wasm_store_load_language(
|
|||
TSWasmError *error
|
||||
);
|
||||
|
||||
/**
|
||||
* Get the number of languages instantiated in the given wasm store.
|
||||
*/
|
||||
size_t ts_wasm_store_language_count(const TSWasmStore *);
|
||||
|
||||
/**
|
||||
* Check if the language came from a Wasm module. If so, then in order to use
|
||||
* this langauge with a Parser, that parser must have a Wasm store assigned.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,21 @@
|
|||
#include "./language.h"
|
||||
#include "./wasm.h"
|
||||
#include "tree_sitter/api.h"
|
||||
#include <string.h>
|
||||
|
||||
const TSLanguage *ts_language_copy(const TSLanguage *self) {
|
||||
if (self && ts_language_is_wasm(self)) {
|
||||
ts_wasm_language_retain(self);
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
||||
void ts_language_delete(const TSLanguage *self) {
|
||||
if (self && ts_language_is_wasm(self)) {
|
||||
ts_wasm_language_release(self);
|
||||
}
|
||||
}
|
||||
|
||||
uint32_t ts_language_symbol_count(const TSLanguage *self) {
|
||||
return self->symbol_count + self->alias_count;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1869,6 +1869,7 @@ const TSLanguage *ts_parser_language(const TSParser *self) {
|
|||
|
||||
bool ts_parser_set_language(TSParser *self, const TSLanguage *language) {
|
||||
ts_parser__external_scanner_destroy(self);
|
||||
ts_language_delete(self->language);
|
||||
self->language = NULL;
|
||||
|
||||
if (language) {
|
||||
|
|
@ -1885,7 +1886,7 @@ bool ts_parser_set_language(TSParser *self, const TSLanguage *language) {
|
|||
}
|
||||
}
|
||||
|
||||
self->language = language;
|
||||
self->language = ts_language_copy(language);
|
||||
ts_parser__external_scanner_create(self);
|
||||
ts_parser_reset(self);
|
||||
return true;
|
||||
|
|
|
|||
|
|
@ -2698,7 +2698,7 @@ TSQuery *ts_query_new(
|
|||
.negated_fields = array_new(),
|
||||
.repeat_symbols_with_rootless_patterns = array_new(),
|
||||
.wildcard_root_pattern_count = 0,
|
||||
.language = language,
|
||||
.language = ts_language_copy(language),
|
||||
};
|
||||
|
||||
array_push(&self->negated_fields, 0);
|
||||
|
|
@ -2812,6 +2812,7 @@ void ts_query_delete(TSQuery *self) {
|
|||
array_delete(&self->string_buffer);
|
||||
array_delete(&self->negated_fields);
|
||||
array_delete(&self->repeat_symbols_with_rootless_patterns);
|
||||
ts_language_delete(self->language);
|
||||
symbol_table_delete(&self->captures);
|
||||
symbol_table_delete(&self->predicate_values);
|
||||
for (uint32_t index = 0; index < self->capture_quantifiers.size; index++) {
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ TSTree *ts_tree_new(
|
|||
) {
|
||||
TSTree *result = ts_malloc(sizeof(TSTree));
|
||||
result->root = root;
|
||||
result->language = language;
|
||||
result->language = ts_language_copy(language);
|
||||
result->included_ranges = ts_calloc(included_range_count, sizeof(TSRange));
|
||||
memcpy(result->included_ranges, included_ranges, included_range_count * sizeof(TSRange));
|
||||
result->included_range_count = included_range_count;
|
||||
|
|
@ -30,6 +30,7 @@ void ts_tree_delete(TSTree *self) {
|
|||
SubtreePool pool = ts_subtree_pool_new(0);
|
||||
ts_subtree_release(&pool, self->root);
|
||||
ts_subtree_pool_delete(&pool);
|
||||
ts_language_delete(self->language);
|
||||
ts_free(self->included_ranges);
|
||||
ts_free(self);
|
||||
}
|
||||
|
|
|
|||
144
lib/src/wasm.c
144
lib/src/wasm.c
|
|
@ -67,13 +67,22 @@ typedef struct {
|
|||
uint32_t table_align;
|
||||
} WasmDylinkInfo;
|
||||
|
||||
// WasmLanguageId - A pointer used to identify a language. This language id is
|
||||
// reference-counted, so that its ownership can be shared between the language
|
||||
// itself and the instances of the language that are held in wasm stores.
|
||||
typedef struct {
|
||||
volatile uint32_t ref_count;
|
||||
volatile uint32_t is_language_deleted;
|
||||
} WasmLanguageId;
|
||||
|
||||
// LanguageWasmModule - Additional data associated with a wasm-backed
|
||||
// `TSLanguage`. This data is read-only and does not reference a particular
|
||||
// wasm store, so it can be shared by all users of a `TSLanguage`. A pointer to
|
||||
// this is stored on the language itself.
|
||||
typedef struct {
|
||||
volatile uint32_t ref_count;
|
||||
WasmLanguageId *language_id;
|
||||
wasmtime_module_t *module;
|
||||
uint32_t language_id;
|
||||
const char *name;
|
||||
char *symbol_name_buffer;
|
||||
char *field_name_buffer;
|
||||
|
|
@ -84,7 +93,7 @@ typedef struct {
|
|||
// a `TSLanguage` in a particular wasm store. The wasm store holds one of
|
||||
// these structs for each language that it has instantiated.
|
||||
typedef struct {
|
||||
uint32_t language_id;
|
||||
WasmLanguageId *language_id;
|
||||
wasmtime_instance_t instance;
|
||||
int32_t external_states_address;
|
||||
int32_t lex_main_fn_index;
|
||||
|
|
@ -471,6 +480,24 @@ static wasmtime_extern_t get_builtin_func_extern(
|
|||
snprintf(*output, message_length + 1, __VA_ARGS__); \
|
||||
} while (0)
|
||||
|
||||
WasmLanguageId *language_id_new() {
|
||||
WasmLanguageId *self = ts_malloc(sizeof(WasmLanguageId));
|
||||
self->is_language_deleted = false;
|
||||
self->ref_count = 1;
|
||||
return self;
|
||||
}
|
||||
|
||||
WasmLanguageId *language_id_clone(WasmLanguageId *self) {
|
||||
atomic_inc(&self->ref_count);
|
||||
return self;
|
||||
}
|
||||
|
||||
void language_id_delete(WasmLanguageId *self) {
|
||||
if (atomic_dec(&self->ref_count) == 0) {
|
||||
ts_free(self);
|
||||
}
|
||||
}
|
||||
|
||||
static bool ts_wasm_store__provide_builtin_import(
|
||||
TSWasmStore *self,
|
||||
const wasm_name_t *import_name,
|
||||
|
|
@ -794,10 +821,25 @@ void ts_wasm_store_delete(TSWasmStore *self) {
|
|||
wasm_globaltype_delete(self->var_i32_type);
|
||||
wasmtime_store_delete(self->store);
|
||||
wasm_engine_delete(self->engine);
|
||||
for (unsigned i = 0; i < self->language_instances.size; i++) {
|
||||
LanguageWasmInstance *instance = &self->language_instances.contents[i];
|
||||
language_id_delete(instance->language_id);
|
||||
}
|
||||
array_delete(&self->language_instances);
|
||||
ts_free(self);
|
||||
}
|
||||
|
||||
size_t ts_wasm_store_language_count(const TSWasmStore *self) {
|
||||
size_t result = 0;
|
||||
for (unsigned i = 0; i < self->language_instances.size; i++) {
|
||||
const WasmLanguageId *id = self->language_instances.contents[i].language_id;
|
||||
if (!id->is_language_deleted) {
|
||||
result++;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
static bool ts_wasm_store__instantiate(
|
||||
TSWasmStore *self,
|
||||
wasmtime_module_t *module,
|
||||
|
|
@ -1074,7 +1116,7 @@ const TSLanguage *ts_wasm_store_load_language(
|
|||
};
|
||||
uint32_t address_count = array_len(addresses);
|
||||
|
||||
TSLanguage *language = ts_malloc(sizeof(TSLanguage));
|
||||
TSLanguage *language = ts_calloc(1, sizeof(TSLanguage));
|
||||
StringData symbol_name_buffer = array_new();
|
||||
StringData field_name_buffer = array_new();
|
||||
|
||||
|
|
@ -1196,12 +1238,13 @@ const TSLanguage *ts_wasm_store_load_language(
|
|||
|
||||
LanguageWasmModule *language_module = ts_malloc(sizeof(LanguageWasmModule));
|
||||
*language_module = (LanguageWasmModule) {
|
||||
.language_id = atomic_inc(&NEXT_LANGUAGE_ID),
|
||||
.language_id = language_id_new(),
|
||||
.module = module,
|
||||
.name = name,
|
||||
.symbol_name_buffer = symbol_name_buffer.contents,
|
||||
.field_name_buffer = field_name_buffer.contents,
|
||||
.dylink_info = dylink_info,
|
||||
.ref_count = 1,
|
||||
};
|
||||
|
||||
// The lex functions are not used for wasm languages. Use those two fields
|
||||
|
|
@ -1210,10 +1253,19 @@ const TSLanguage *ts_wasm_store_load_language(
|
|||
language->lex_fn = ts_wasm_store__sentinel_lex_fn;
|
||||
language->keyword_lex_fn = (void *)language_module;
|
||||
|
||||
// Store some information about this store's specific instance of this
|
||||
// language module, keyed by the language's id.
|
||||
// Clear out any instances of languages that have been deleted.
|
||||
for (unsigned i = 0; i < self->language_instances.size; i++) {
|
||||
WasmLanguageId *id = self->language_instances.contents[i].language_id;
|
||||
if (id->is_language_deleted) {
|
||||
language_id_delete(id);
|
||||
array_erase(&self->language_instances, i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
|
||||
// Store this store's instance of this language module.
|
||||
array_push(&self->language_instances, ((LanguageWasmInstance) {
|
||||
.language_id = language_module->language_id,
|
||||
.language_id = language_id_clone(language_module->language_id),
|
||||
.instance = instance,
|
||||
.external_states_address = wasm_language.external_scanner.states,
|
||||
.lex_main_fn_index = wasm_language.lex_fn,
|
||||
|
|
@ -1240,19 +1292,25 @@ bool ts_wasm_store_add_language(
|
|||
wasmtime_context_t *context = wasmtime_store_context(self->store);
|
||||
const LanguageWasmModule *language_module = (void *)language->keyword_lex_fn;
|
||||
|
||||
// Search for the information about this store's instance of the language module.
|
||||
// Search for this store's instance of the language module. Also clear out any
|
||||
// instances of languages that have been deleted.
|
||||
bool exists = false;
|
||||
array_search_sorted_by(
|
||||
&self->language_instances,
|
||||
.language_id,
|
||||
language_module->language_id,
|
||||
index,
|
||||
&exists
|
||||
);
|
||||
for (unsigned i = 0; i < self->language_instances.size; i++) {
|
||||
WasmLanguageId *id = self->language_instances.contents[i].language_id;
|
||||
if (id->is_language_deleted) {
|
||||
language_id_delete(id);
|
||||
array_erase(&self->language_instances, i);
|
||||
i--;
|
||||
} else if (id == language_module->language_id) {
|
||||
exists = true;
|
||||
*index = i;
|
||||
}
|
||||
}
|
||||
|
||||
// If the language module has not been instantiated in this store, then add
|
||||
// it to this store.
|
||||
if (!exists) {
|
||||
*index = self->language_instances.size;
|
||||
char *message;
|
||||
wasmtime_instance_t instance;
|
||||
int32_t language_address;
|
||||
|
|
@ -1272,8 +1330,8 @@ bool ts_wasm_store_add_language(
|
|||
LanguageInWasmMemory wasm_language;
|
||||
const uint8_t *memory = wasmtime_memory_data(context, &self->memory);
|
||||
memcpy(&wasm_language, &memory[language_address], sizeof(LanguageInWasmMemory));
|
||||
array_insert(&self->language_instances, *index, ((LanguageWasmInstance) {
|
||||
.language_id = language_module->language_id,
|
||||
array_push(&self->language_instances, ((LanguageWasmInstance) {
|
||||
.language_id = language_id_clone(language_module->language_id),
|
||||
.instance = instance,
|
||||
.external_states_address = wasm_language.external_scanner.states,
|
||||
.lex_main_fn_index = wasm_language.lex_fn,
|
||||
|
|
@ -1468,6 +1526,50 @@ bool ts_language_is_wasm(const TSLanguage *self) {
|
|||
return self->lex_fn == ts_wasm_store__sentinel_lex_fn;
|
||||
}
|
||||
|
||||
static inline LanguageWasmModule *ts_language__wasm_module(const TSLanguage *self) {
|
||||
return (LanguageWasmModule *)self->keyword_lex_fn;
|
||||
}
|
||||
|
||||
void ts_wasm_language_retain(const TSLanguage *self) {
|
||||
LanguageWasmModule *module = ts_language__wasm_module(self);
|
||||
assert(module->ref_count > 0);
|
||||
atomic_inc(&module->ref_count);
|
||||
}
|
||||
|
||||
void ts_wasm_language_release(const TSLanguage *self) {
|
||||
LanguageWasmModule *module = ts_language__wasm_module(self);
|
||||
assert(module->ref_count > 0);
|
||||
if (atomic_dec(&module->ref_count) == 0) {
|
||||
// Update the language id to reflect that the language is deleted. This allows any wasm stores
|
||||
// that hold wasm instances for this language to delete those instances.
|
||||
atomic_inc(&module->language_id->is_language_deleted);
|
||||
language_id_delete(module->language_id);
|
||||
|
||||
ts_free((void *)module->field_name_buffer);
|
||||
ts_free((void *)module->symbol_name_buffer);
|
||||
ts_free((void *)module->name);
|
||||
wasmtime_module_delete(module->module);
|
||||
ts_free(module);
|
||||
|
||||
ts_free((void *)self->alias_map);
|
||||
ts_free((void *)self->alias_sequences);
|
||||
ts_free((void *)self->external_scanner.symbol_map);
|
||||
ts_free((void *)self->field_map_entries);
|
||||
ts_free((void *)self->field_map_slices);
|
||||
ts_free((void *)self->field_names);
|
||||
ts_free((void *)self->lex_modes);
|
||||
ts_free((void *)self->parse_actions);
|
||||
ts_free((void *)self->parse_table);
|
||||
ts_free((void *)self->primary_state_ids);
|
||||
ts_free((void *)self->public_symbol_map);
|
||||
ts_free((void *)self->small_parse_table);
|
||||
ts_free((void *)self->small_parse_table_map);
|
||||
ts_free((void *)self->symbol_metadata);
|
||||
ts_free((void *)self->symbol_names);
|
||||
ts_free((void *)self);
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
// If the WASM feature is not enabled, define dummy versions of all of the
|
||||
|
|
@ -1556,4 +1658,12 @@ bool ts_language_is_wasm(const TSLanguage *self) {
|
|||
return false;
|
||||
}
|
||||
|
||||
void ts_wasm_language_retain(const TSLanguage *self) {
|
||||
(void)self;
|
||||
}
|
||||
|
||||
void ts_wasm_language_release(const TSLanguage *self) {
|
||||
(void)self;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -20,6 +20,9 @@ bool ts_wasm_store_call_scanner_scan(TSWasmStore *, uint32_t, uint32_t);
|
|||
uint32_t ts_wasm_store_call_scanner_serialize(TSWasmStore *, uint32_t, char *);
|
||||
void ts_wasm_store_call_scanner_deserialize(TSWasmStore *, uint32_t, const char *, unsigned);
|
||||
|
||||
void ts_wasm_language_retain(const TSLanguage *);
|
||||
void ts_wasm_language_release(const TSLanguage *);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -117,7 +117,7 @@ struct LineInfo {
|
|||
|
||||
impl TagsConfiguration {
|
||||
pub fn new(language: Language, tags_query: &str, locals_query: &str) -> Result<Self, Error> {
|
||||
let query = Query::new(language, &format!("{}{}", locals_query, tags_query))?;
|
||||
let query = Query::new(&language, &format!("{}{}", locals_query, tags_query))?;
|
||||
|
||||
let tags_query_offset = locals_query.len();
|
||||
let mut tags_pattern_index = 0;
|
||||
|
|
@ -265,7 +265,7 @@ impl TagsContext {
|
|||
cancellation_flag: Option<&'a AtomicUsize>,
|
||||
) -> Result<(impl Iterator<Item = Result<Tag, Error>> + 'a, bool), Error> {
|
||||
self.parser
|
||||
.set_language(config.language)
|
||||
.set_language(&config.language)
|
||||
.map_err(|_| Error::InvalidLanguage)?;
|
||||
self.parser.reset();
|
||||
unsafe { self.parser.set_cancellation_flag(cancellation_flag) };
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue