feat!: move generation of grammar files to an init command
The generate subcommand should stick to solely generating a parser and headers.
This commit is contained in:
parent
50eaf0b6cd
commit
b2359e4020
38 changed files with 1000 additions and 724 deletions
|
|
@ -1,685 +1 @@
|
|||
use std::{
|
||||
fs,
|
||||
fs::File,
|
||||
io::BufReader,
|
||||
path::{Path, PathBuf},
|
||||
str,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
|
||||
use indoc::indoc;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{json, Map, Value};
|
||||
|
||||
use super::write_file;
|
||||
|
||||
const CLI_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const CLI_VERSION_PLACEHOLDER: &str = "CLI_VERSION";
|
||||
|
||||
const PARSER_NAME_PLACEHOLDER: &str = "PARSER_NAME";
|
||||
const CAMEL_PARSER_NAME_PLACEHOLDER: &str = "CAMEL_PARSER_NAME";
|
||||
const UPPER_PARSER_NAME_PLACEHOLDER: &str = "UPPER_PARSER_NAME";
|
||||
const LOWER_PARSER_NAME_PLACEHOLDER: &str = "LOWER_PARSER_NAME";
|
||||
|
||||
const GRAMMAR_JS_TEMPLATE: &str = include_str!("./templates/grammar.js");
|
||||
const PACKAGE_JSON_TEMPLATE: &str = include_str!("./templates/package.json");
|
||||
const GITIGNORE_TEMPLATE: &str = include_str!("./templates/gitignore");
|
||||
const GITATTRIBUTES_TEMPLATE: &str = include_str!("./templates/gitattributes");
|
||||
const EDITORCONFIG_TEMPLATE: &str = include_str!("./templates/.editorconfig");
|
||||
|
||||
const RUST_BINDING_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const RUST_BINDING_VERSION_PLACEHOLDER: &str = "RUST_BINDING_VERSION";
|
||||
|
||||
const LIB_RS_TEMPLATE: &str = include_str!("./templates/lib.rs");
|
||||
const BUILD_RS_TEMPLATE: &str = include_str!("./templates/build.rs");
|
||||
const CARGO_TOML_TEMPLATE: &str = include_str!("./templates/_cargo.toml");
|
||||
|
||||
const INDEX_JS_TEMPLATE: &str = include_str!("./templates/index.js");
|
||||
const INDEX_D_TS_TEMPLATE: &str = include_str!("./templates/index.d.ts");
|
||||
const JS_BINDING_CC_TEMPLATE: &str = include_str!("./templates/js-binding.cc");
|
||||
const BINDING_GYP_TEMPLATE: &str = include_str!("./templates/binding.gyp");
|
||||
const BINDING_TEST_JS_TEMPLATE: &str = include_str!("./templates/binding_test.js");
|
||||
|
||||
const MAKEFILE_TEMPLATE: &str = include_str!("./templates/makefile");
|
||||
const PARSER_NAME_H_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.h");
|
||||
const PARSER_NAME_PC_IN_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.pc.in");
|
||||
|
||||
const GO_MOD_TEMPLATE: &str = include_str!("./templates/go.mod");
|
||||
const BINDING_GO_TEMPLATE: &str = include_str!("./templates/binding.go");
|
||||
const BINDING_TEST_GO_TEMPLATE: &str = include_str!("./templates/binding_test.go");
|
||||
|
||||
const SETUP_PY_TEMPLATE: &str = include_str!("./templates/setup.py");
|
||||
const INIT_PY_TEMPLATE: &str = include_str!("./templates/__init__.py");
|
||||
const INIT_PYI_TEMPLATE: &str = include_str!("./templates/__init__.pyi");
|
||||
const PYPROJECT_TOML_TEMPLATE: &str = include_str!("./templates/pyproject.toml");
|
||||
const PY_BINDING_C_TEMPLATE: &str = include_str!("./templates/py-binding.c");
|
||||
const TEST_BINDING_PY_TEMPLATE: &str = include_str!("./templates/test_binding.py");
|
||||
|
||||
const PACKAGE_SWIFT_TEMPLATE: &str = include_str!("./templates/package.swift");
|
||||
const TESTS_SWIFT_TEMPLATE: &str = include_str!("./templates/tests.swift");
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct LanguageConfiguration {}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct PackageJSON {
|
||||
#[serde(rename = "tree-sitter")]
|
||||
tree_sitter: Option<Vec<LanguageConfiguration>>,
|
||||
}
|
||||
|
||||
pub fn path_in_ignore(repo_path: &Path) -> bool {
|
||||
[
|
||||
"bindings",
|
||||
"build",
|
||||
"examples",
|
||||
"node_modules",
|
||||
"queries",
|
||||
"script",
|
||||
"src",
|
||||
"target",
|
||||
"test",
|
||||
"types",
|
||||
]
|
||||
.iter()
|
||||
.any(|dir| repo_path.ends_with(dir))
|
||||
}
|
||||
|
||||
fn insert_after(
|
||||
map: Map<String, Value>,
|
||||
after: &str,
|
||||
key: &str,
|
||||
value: Value,
|
||||
) -> Map<String, Value> {
|
||||
let mut entries = map.into_iter().collect::<Vec<_>>();
|
||||
let after_index = entries
|
||||
.iter()
|
||||
.position(|(k, _)| k == after)
|
||||
.unwrap_or(entries.len() - 1)
|
||||
+ 1;
|
||||
entries.insert(after_index, (key.to_string(), value));
|
||||
entries.into_iter().collect()
|
||||
}
|
||||
|
||||
pub fn generate_grammar_files(
|
||||
repo_path: &Path,
|
||||
language_name: &str,
|
||||
generate_bindings: bool,
|
||||
) -> Result<()> {
|
||||
let dashed_language_name = language_name.to_kebab_case();
|
||||
|
||||
// TODO: remove legacy code updates in v0.24.0
|
||||
|
||||
// Create or update package.json
|
||||
let package_json_path_state = missing_path_else(
|
||||
repo_path.join("package.json"),
|
||||
|path| generate_file(path, PACKAGE_JSON_TEMPLATE, dashed_language_name.as_str()),
|
||||
|path| {
|
||||
let package_json_str =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read package.json")?;
|
||||
let mut package_json = serde_json::from_str::<Map<String, Value>>(&package_json_str)
|
||||
.with_context(|| "Failed to parse package.json")?;
|
||||
if generate_bindings {
|
||||
let mut updated = false;
|
||||
|
||||
let dependencies = package_json
|
||||
.entry("dependencies".to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
if dependencies.remove("nan").is_some() {
|
||||
eprintln!("Replacing nan dependency with node-addon-api in package.json");
|
||||
dependencies.insert("node-addon-api".to_string(), "^8.0.0".into());
|
||||
updated = true;
|
||||
}
|
||||
if !dependencies.contains_key("node-gyp-build") {
|
||||
eprintln!("Adding node-gyp-build dependency to package.json");
|
||||
dependencies.insert("node-gyp-build".to_string(), "^4.8.1".into());
|
||||
updated = true;
|
||||
}
|
||||
|
||||
let dev_dependencies = package_json
|
||||
.entry("devDependencies".to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
if !dev_dependencies.contains_key("prebuildify") {
|
||||
eprintln!("Adding prebuildify devDependency to package.json");
|
||||
dev_dependencies.insert("prebuildify".to_string(), "^6.0.1".into());
|
||||
updated = true;
|
||||
}
|
||||
|
||||
let node_test = "node --test bindings/node/*_test.js";
|
||||
let scripts = package_json
|
||||
.entry("scripts".to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
if !scripts.get("test").is_some_and(|v| v == node_test) {
|
||||
eprintln!("Updating package.json scripts");
|
||||
*scripts = Map::from_iter([
|
||||
("install".to_string(), "node-gyp-build".into()),
|
||||
("prestart".to_string(), "tree-sitter build --wasm".into()),
|
||||
("start".to_string(), "tree-sitter playground".into()),
|
||||
("test".to_string(), node_test.into()),
|
||||
]);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `peerDependencies` after `dependencies`
|
||||
if !package_json.contains_key("peerDependencies") {
|
||||
eprintln!("Adding peerDependencies to package.json");
|
||||
package_json = insert_after(
|
||||
package_json,
|
||||
"dependencies",
|
||||
"peerDependencies",
|
||||
json!({"tree-sitter": "^0.21.1"}),
|
||||
);
|
||||
|
||||
package_json = insert_after(
|
||||
package_json,
|
||||
"peerDependencies",
|
||||
"peerDependenciesMeta",
|
||||
json!({"tree_sitter": {"optional": true}}),
|
||||
);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `types` right after `main`
|
||||
if !package_json.contains_key("types") {
|
||||
eprintln!("Adding types to package.json");
|
||||
package_json =
|
||||
insert_after(package_json, "main", "types", "bindings/node".into());
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `files` right after `keywords`
|
||||
if !package_json.contains_key("files") {
|
||||
eprintln!("Adding files to package.json");
|
||||
package_json = insert_after(
|
||||
package_json,
|
||||
"keywords",
|
||||
"files",
|
||||
json!([
|
||||
"grammar.js",
|
||||
"binding.gyp",
|
||||
"prebuilds/**",
|
||||
"bindings/node/*",
|
||||
"queries/*",
|
||||
"src/**",
|
||||
"*.wasm"
|
||||
]),
|
||||
);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `tree-sitter` at the end
|
||||
if !package_json.contains_key("tree-sitter") {
|
||||
eprintln!("Adding a `tree-sitter` section to package.json");
|
||||
package_json.insert(
|
||||
"tree-sitter".to_string(),
|
||||
json!([{
|
||||
"scope": format!("source.{language_name}"),
|
||||
"injection-regex": format!("^{language_name}$"),
|
||||
}]),
|
||||
);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if updated {
|
||||
let mut package_json_str = serde_json::to_string_pretty(&package_json)?;
|
||||
package_json_str.push('\n');
|
||||
write_file(path, package_json_str)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
let package_json = match lookup_package_json_for_path(package_json_path_state.as_path()) {
|
||||
Ok((_, p)) => p,
|
||||
Err(e) if generate_bindings => return Err(e),
|
||||
_ => return Ok(()),
|
||||
};
|
||||
|
||||
// Do not create a grammar.js file in a repo with multiple language configs
|
||||
if !package_json.has_multiple_language_configs() {
|
||||
missing_path(repo_path.join("grammar.js"), |path| {
|
||||
generate_file(path, GRAMMAR_JS_TEMPLATE, language_name)
|
||||
})?;
|
||||
}
|
||||
|
||||
if !generate_bindings {
|
||||
// our job is done
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Write .gitignore file
|
||||
missing_path(repo_path.join(".gitignore"), |path| {
|
||||
generate_file(path, GITIGNORE_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
// Write .gitattributes file
|
||||
missing_path(repo_path.join(".gitattributes"), |path| {
|
||||
generate_file(path, GITATTRIBUTES_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
// Write .editorconfig file
|
||||
missing_path(repo_path.join(".editorconfig"), |path| {
|
||||
generate_file(path, EDITORCONFIG_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
let bindings_dir = repo_path.join("bindings");
|
||||
|
||||
// Generate Rust bindings
|
||||
missing_path(bindings_dir.join("rust"), create_dir)?.apply(|path| {
|
||||
missing_path_else(
|
||||
path.join("lib.rs"),
|
||||
|path| generate_file(path, LIB_RS_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let lib_rs =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read lib.rs")?;
|
||||
if !lib_rs.contains("tree_sitter_language") {
|
||||
generate_file(path, LIB_RS_TEMPLATE, language_name)?;
|
||||
eprintln!("Updated lib.rs with `tree_sitter_language` dependency");
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("build.rs"),
|
||||
|path| generate_file(path, BUILD_RS_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let build_rs =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read build.rs")?;
|
||||
if !build_rs.contains("-utf-8") {
|
||||
let index = build_rs
|
||||
.find(" let parser_path = src_dir.join(\"parser.c\")")
|
||||
.ok_or_else(|| anyhow!(indoc!{
|
||||
"Failed to auto-update build.rs with the `/utf-8` flag for windows.
|
||||
To fix this, remove `bindings/rust/build.rs` and re-run `tree-sitter generate`"}))?;
|
||||
|
||||
let build_rs = format!(
|
||||
"{}{}{}\n{}",
|
||||
&build_rs[..index],
|
||||
" #[cfg(target_env = \"msvc\")]\n",
|
||||
" c_config.flag(\"-utf-8\");\n",
|
||||
&build_rs[index..]
|
||||
);
|
||||
|
||||
write_file(path, build_rs)?;
|
||||
eprintln!("Updated build.rs with the /utf-8 flag for Windows compilation");
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path_else(
|
||||
repo_path.join("Cargo.toml"),
|
||||
|path| generate_file(path, CARGO_TOML_TEMPLATE, dashed_language_name.as_str()),
|
||||
|path| {
|
||||
let cargo_toml =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read Cargo.toml")?;
|
||||
if !cargo_toml.contains("tree-sitter-language") {
|
||||
let start_index = cargo_toml
|
||||
.find("tree-sitter = \"")
|
||||
.ok_or_else(|| anyhow!("Failed to find the `tree-sitter` dependency in Cargo.toml"))?;
|
||||
|
||||
let version_start_index = start_index + "tree-sitter = \"".len();
|
||||
let version_end_index = cargo_toml[version_start_index..]
|
||||
.find('\"')
|
||||
.map(|i| i + version_start_index)
|
||||
.ok_or_else(|| anyhow!("Failed to find the end of the `tree-sitter` version in Cargo.toml"))?;
|
||||
|
||||
let cargo_toml = format!(
|
||||
"{}{}{}\n{}\n{}",
|
||||
&cargo_toml[..start_index],
|
||||
"tree-sitter-language = \"0.1.0\"",
|
||||
&cargo_toml[version_end_index + 1..],
|
||||
"[dev-dependencies]",
|
||||
"tree-sitter = \"0.23\"",
|
||||
);
|
||||
|
||||
write_file(path, cargo_toml)?;
|
||||
eprintln!("Updated Cargo.toml with the `tree-sitter-language` dependency");
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate Node bindings
|
||||
missing_path(bindings_dir.join("node"), create_dir)?.apply(|path| {
|
||||
missing_path_else(
|
||||
path.join("index.js"),
|
||||
|path| generate_file(path, INDEX_JS_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let index_js =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read index.js")?;
|
||||
if index_js.contains("../../build/Release") {
|
||||
eprintln!("Replacing index.js with new binding API");
|
||||
generate_file(path, INDEX_JS_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path(path.join("index.d.ts"), |path| {
|
||||
generate_file(path, INDEX_D_TS_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(path.join("binding_test.js"), |path| {
|
||||
generate_file(path, BINDING_TEST_JS_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("binding.cc"),
|
||||
|path| generate_file(path, JS_BINDING_CC_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let binding_cc =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read binding.cc")?;
|
||||
if binding_cc.contains("NAN_METHOD(New) {}") {
|
||||
eprintln!("Replacing binding.cc with new binding API");
|
||||
generate_file(path, JS_BINDING_CC_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
// Create binding.gyp, or update it with new binding API.
|
||||
missing_path_else(
|
||||
repo_path.join("binding.gyp"),
|
||||
|path| generate_file(path, BINDING_GYP_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let binding_gyp =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read binding.gyp")?;
|
||||
if binding_gyp.contains("require('nan')") {
|
||||
eprintln!("Replacing binding.gyp with new binding API");
|
||||
generate_file(path, BINDING_GYP_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate C bindings
|
||||
missing_path(bindings_dir.join("c"), create_dir)?.apply(|path| {
|
||||
missing_path(
|
||||
path.join(format!("tree-sitter-{language_name}.h")),
|
||||
|path| generate_file(path, PARSER_NAME_H_TEMPLATE, language_name),
|
||||
)?;
|
||||
|
||||
missing_path(
|
||||
path.join(format!("tree-sitter-{language_name}.pc.in")),
|
||||
|path| generate_file(path, PARSER_NAME_PC_IN_TEMPLATE, language_name),
|
||||
)?;
|
||||
|
||||
missing_path(repo_path.join("Makefile"), |path| {
|
||||
generate_file(path, MAKEFILE_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate Go bindings
|
||||
missing_path(bindings_dir.join("go"), create_dir)?.apply(|path| {
|
||||
missing_path(path.join("binding.go"), |path| {
|
||||
generate_file(path, BINDING_GO_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("binding_test.go"),
|
||||
|path| generate_file(path, BINDING_TEST_GO_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let binding_test_go =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read binding_test.go")?;
|
||||
if binding_test_go.contains("smacker") {
|
||||
eprintln!("Replacing binding_test.go with new binding API");
|
||||
generate_file(path, BINDING_TEST_GO_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
// Delete the old go.mod file that lives inside bindings/go, it now lives in the root dir
|
||||
let go_mod_path = path.join("go.mod");
|
||||
if go_mod_path.exists() {
|
||||
fs::remove_file(go_mod_path).with_context(|| "Failed to remove old go.mod file")?;
|
||||
}
|
||||
|
||||
missing_path(repo_path.join("go.mod"), |path| {
|
||||
generate_file(path, GO_MOD_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate Python bindings
|
||||
missing_path(bindings_dir.join("python"), create_dir)?.apply(|path| {
|
||||
let lang_path = path.join(format!("tree_sitter_{}", language_name.to_snake_case()));
|
||||
missing_path(&lang_path, create_dir)?;
|
||||
|
||||
missing_path_else(
|
||||
lang_path.join("binding.c"),
|
||||
|path| generate_file(path, PY_BINDING_C_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let binding_c = fs::read_to_string(path)
|
||||
.with_context(|| "Failed to read bindings/python/binding.c")?;
|
||||
if !binding_c.contains("PyCapsule_New") {
|
||||
eprintln!("Replacing bindings/python/binding.c with new binding API");
|
||||
generate_file(path, PY_BINDING_C_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path(lang_path.join("__init__.py"), |path| {
|
||||
generate_file(path, INIT_PY_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(lang_path.join("__init__.pyi"), |path| {
|
||||
generate_file(path, INIT_PYI_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(lang_path.join("py.typed"), |path| {
|
||||
generate_file(path, "", language_name) // py.typed is empty
|
||||
})?;
|
||||
|
||||
missing_path(path.join("tests"), create_dir)?.apply(|path| {
|
||||
missing_path(path.join("test_binding.py"), |path| {
|
||||
generate_file(path, TEST_BINDING_PY_TEMPLATE, language_name)
|
||||
})?;
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
missing_path(repo_path.join("setup.py"), |path| {
|
||||
generate_file(path, SETUP_PY_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(repo_path.join("pyproject.toml"), |path| {
|
||||
generate_file(path, PYPROJECT_TOML_TEMPLATE, dashed_language_name.as_str())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate Swift bindings
|
||||
missing_path(bindings_dir.join("swift"), create_dir)?.apply(|path| {
|
||||
let lang_path = path.join(format!("TreeSitter{}", language_name.to_upper_camel_case()));
|
||||
missing_path(&lang_path, create_dir)?;
|
||||
|
||||
missing_path(lang_path.join(format!("{language_name}.h")), |path| {
|
||||
generate_file(path, PARSER_NAME_H_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(
|
||||
path.join(format!(
|
||||
"TreeSitter{}Tests",
|
||||
language_name.to_upper_camel_case()
|
||||
)),
|
||||
create_dir,
|
||||
)?
|
||||
.apply(|path| {
|
||||
missing_path(
|
||||
path.join(format!(
|
||||
"TreeSitter{}Tests.swift",
|
||||
language_name.to_upper_camel_case()
|
||||
)),
|
||||
|path| generate_file(path, TESTS_SWIFT_TEMPLATE, language_name),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
missing_path(repo_path.join("Package.swift"), |path| {
|
||||
generate_file(path, PACKAGE_SWIFT_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn lookup_package_json_for_path(path: &Path) -> Result<(PathBuf, PackageJSON)> {
|
||||
let mut pathbuf = path.to_owned();
|
||||
loop {
|
||||
let package_json = pathbuf
|
||||
.exists()
|
||||
.then(|| -> Result<PackageJSON> {
|
||||
let file =
|
||||
File::open(pathbuf.as_path()).with_context(|| "Failed to open package.json")?;
|
||||
serde_json::from_reader(BufReader::new(file)).context(
|
||||
"Failed to parse package.json, is the `tree-sitter` section malformed?",
|
||||
)
|
||||
})
|
||||
.transpose()?;
|
||||
if let Some(package_json) = package_json {
|
||||
if package_json.tree_sitter.is_some() {
|
||||
return Ok((pathbuf, package_json));
|
||||
}
|
||||
}
|
||||
pathbuf.pop(); // package.json
|
||||
if !pathbuf.pop() {
|
||||
return Err(anyhow!(concat!(
|
||||
"Failed to locate a package.json file that has a \"tree-sitter\" section,",
|
||||
" please ensure you have one, and if you don't then consult the docs",
|
||||
)));
|
||||
}
|
||||
pathbuf.push("package.json");
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_file(path: &Path, template: &str, language_name: &str) -> Result<()> {
|
||||
write_file(
|
||||
path,
|
||||
template
|
||||
.replace(
|
||||
CAMEL_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_upper_camel_case(),
|
||||
)
|
||||
.replace(
|
||||
UPPER_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_shouty_snake_case(),
|
||||
)
|
||||
.replace(
|
||||
LOWER_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_snake_case(),
|
||||
)
|
||||
.replace(PARSER_NAME_PLACEHOLDER, language_name)
|
||||
.replace(CLI_VERSION_PLACEHOLDER, CLI_VERSION)
|
||||
.replace(RUST_BINDING_VERSION_PLACEHOLDER, RUST_BINDING_VERSION),
|
||||
)
|
||||
}
|
||||
|
||||
fn create_dir(path: &Path) -> Result<()> {
|
||||
fs::create_dir_all(path)
|
||||
.with_context(|| format!("Failed to create {:?}", path.to_string_lossy()))
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
enum PathState<P>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
Exists(P),
|
||||
Missing(P),
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl<P> PathState<P>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
fn exists(&self, mut action: impl FnMut(&Path) -> Result<()>) -> Result<&Self> {
|
||||
if let Self::Exists(path) = self {
|
||||
action(path.as_ref())?;
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn missing(&self, mut action: impl FnMut(&Path) -> Result<()>) -> Result<&Self> {
|
||||
if let Self::Missing(path) = self {
|
||||
action(path.as_ref())?;
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn apply(&self, mut action: impl FnMut(&Path) -> Result<()>) -> Result<&Self> {
|
||||
action(self.as_path())?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn apply_state(&self, mut action: impl FnMut(&Self) -> Result<()>) -> Result<&Self> {
|
||||
action(self)?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn as_path(&self) -> &Path {
|
||||
match self {
|
||||
Self::Exists(path) | Self::Missing(path) => path.as_ref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn missing_path<P, F>(path: P, mut action: F) -> Result<PathState<P>>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
F: FnMut(&Path) -> Result<()>,
|
||||
{
|
||||
let path_ref = path.as_ref();
|
||||
if !path_ref.exists() {
|
||||
action(path_ref)?;
|
||||
Ok(PathState::Missing(path))
|
||||
} else {
|
||||
Ok(PathState::Exists(path))
|
||||
}
|
||||
}
|
||||
|
||||
fn missing_path_else<P, T, F>(path: P, mut action: T, mut else_action: F) -> Result<PathState<P>>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
T: FnMut(&Path) -> Result<()>,
|
||||
F: FnMut(&Path) -> Result<()>,
|
||||
{
|
||||
let path_ref = path.as_ref();
|
||||
if !path_ref.exists() {
|
||||
action(path_ref)?;
|
||||
Ok(PathState::Missing(path))
|
||||
} else {
|
||||
else_action(path_ref)?;
|
||||
Ok(PathState::Exists(path))
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageJSON {
|
||||
fn has_multiple_language_configs(&self) -> bool {
|
||||
self.tree_sitter.as_ref().is_some_and(|c| c.len() > 1)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ use std::{
|
|||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use build_tables::build_tables;
|
||||
use grammar_files::path_in_ignore;
|
||||
use grammars::InputGrammar;
|
||||
use lazy_static::lazy_static;
|
||||
use parse_grammar::parse_grammar;
|
||||
|
|
@ -28,8 +27,6 @@ mod render;
|
|||
mod rules;
|
||||
mod tables;
|
||||
|
||||
pub use grammar_files::lookup_package_json_for_path;
|
||||
|
||||
lazy_static! {
|
||||
static ref JSON_COMMENT_REGEX: Regex = RegexBuilder::new("^\\s*//.*")
|
||||
.multi_line(true)
|
||||
|
|
@ -42,13 +39,13 @@ struct GeneratedParser {
|
|||
node_types_json: String,
|
||||
}
|
||||
|
||||
pub const ALLOC_HEADER: &str = include_str!("./templates/alloc.h");
|
||||
pub const ALLOC_HEADER: &str = include_str!("../templates/alloc.h");
|
||||
pub const ARRAY_HEADER: &str = include_str!("../templates/array.h");
|
||||
|
||||
pub fn generate_parser_in_directory(
|
||||
repo_path: &Path,
|
||||
grammar_path: Option<&str>,
|
||||
abi_version: usize,
|
||||
generate_bindings: bool,
|
||||
report_symbol_name: Option<&str>,
|
||||
js_runtime: Option<&str>,
|
||||
) -> Result<()> {
|
||||
|
|
@ -72,20 +69,6 @@ pub fn generate_parser_in_directory(
|
|||
.map(PathBuf::from)
|
||||
.unwrap_or(repo_path.join("grammar.js"));
|
||||
|
||||
if repo_path.is_dir() && !grammar_path.exists() && !path_in_ignore(&repo_path) {
|
||||
if let Some(dir_name) = repo_path
|
||||
.file_name()
|
||||
.map(|x| x.to_string_lossy().to_ascii_lowercase())
|
||||
{
|
||||
if let Some(language_name) = dir_name
|
||||
.strip_prefix("tree-sitter-")
|
||||
.or_else(|| Some(dir_name.as_ref()))
|
||||
{
|
||||
grammar_files::generate_grammar_files(&repo_path, language_name, false)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Read the grammar file.
|
||||
let grammar_json = load_grammar_file(&grammar_path, js_runtime)?;
|
||||
|
||||
|
|
@ -113,13 +96,9 @@ pub fn generate_parser_in_directory(
|
|||
write_file(&src_path.join("parser.c"), c_code)?;
|
||||
write_file(&src_path.join("node-types.json"), node_types_json)?;
|
||||
write_file(&header_path.join("alloc.h"), ALLOC_HEADER)?;
|
||||
write_file(&header_path.join("array.h"), tree_sitter::ARRAY_HEADER)?;
|
||||
write_file(&header_path.join("array.h"), ARRAY_HEADER)?;
|
||||
write_file(&header_path.join("parser.h"), tree_sitter::PARSER_HEADER)?;
|
||||
|
||||
if !path_in_ignore(&repo_path) && grammar_path == repo_path.join("grammar.js") {
|
||||
grammar_files::generate_grammar_files(&repo_path, &input_grammar.name, generate_bindings)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
|
@ -267,7 +246,7 @@ fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> Result
|
|||
}
|
||||
}
|
||||
|
||||
fn write_file(path: &Path, body: impl AsRef<[u8]>) -> Result<()> {
|
||||
pub fn write_file(path: &Path, body: impl AsRef<[u8]>) -> Result<()> {
|
||||
fs::write(path, body)
|
||||
.with_context(|| format!("Failed to write {:?}", path.file_name().unwrap()))
|
||||
}
|
||||
|
|
|
|||
669
cli/src/init.rs
Normal file
669
cli/src/init.rs
Normal file
|
|
@ -0,0 +1,669 @@
|
|||
use std::{
|
||||
fs,
|
||||
fs::File,
|
||||
io::BufReader,
|
||||
path::{Path, PathBuf},
|
||||
str,
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use heck::{ToKebabCase, ToShoutySnakeCase, ToSnakeCase, ToUpperCamelCase};
|
||||
use indoc::indoc;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{json, Map, Value};
|
||||
|
||||
use crate::generate::write_file;
|
||||
|
||||
const CLI_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const CLI_VERSION_PLACEHOLDER: &str = "CLI_VERSION";
|
||||
|
||||
const PARSER_NAME_PLACEHOLDER: &str = "PARSER_NAME";
|
||||
const CAMEL_PARSER_NAME_PLACEHOLDER: &str = "CAMEL_PARSER_NAME";
|
||||
const UPPER_PARSER_NAME_PLACEHOLDER: &str = "UPPER_PARSER_NAME";
|
||||
const LOWER_PARSER_NAME_PLACEHOLDER: &str = "LOWER_PARSER_NAME";
|
||||
|
||||
const GRAMMAR_JS_TEMPLATE: &str = include_str!("./templates/grammar.js");
|
||||
const PACKAGE_JSON_TEMPLATE: &str = include_str!("./templates/package.json");
|
||||
const GITIGNORE_TEMPLATE: &str = include_str!("./templates/gitignore");
|
||||
const GITATTRIBUTES_TEMPLATE: &str = include_str!("./templates/gitattributes");
|
||||
const EDITORCONFIG_TEMPLATE: &str = include_str!("./templates/.editorconfig");
|
||||
|
||||
const RUST_BINDING_VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const RUST_BINDING_VERSION_PLACEHOLDER: &str = "RUST_BINDING_VERSION";
|
||||
|
||||
const LIB_RS_TEMPLATE: &str = include_str!("./templates/lib.rs");
|
||||
const BUILD_RS_TEMPLATE: &str = include_str!("./templates/build.rs");
|
||||
const CARGO_TOML_TEMPLATE: &str = include_str!("./templates/_cargo.toml");
|
||||
|
||||
const INDEX_JS_TEMPLATE: &str = include_str!("./templates/index.js");
|
||||
const INDEX_D_TS_TEMPLATE: &str = include_str!("./templates/index.d.ts");
|
||||
const JS_BINDING_CC_TEMPLATE: &str = include_str!("./templates/js-binding.cc");
|
||||
const BINDING_GYP_TEMPLATE: &str = include_str!("./templates/binding.gyp");
|
||||
const BINDING_TEST_JS_TEMPLATE: &str = include_str!("./templates/binding_test.js");
|
||||
|
||||
const MAKEFILE_TEMPLATE: &str = include_str!("./templates/makefile");
|
||||
const PARSER_NAME_H_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.h");
|
||||
const PARSER_NAME_PC_IN_TEMPLATE: &str = include_str!("./templates/PARSER_NAME.pc.in");
|
||||
|
||||
const GO_MOD_TEMPLATE: &str = include_str!("./templates/go.mod");
|
||||
const BINDING_GO_TEMPLATE: &str = include_str!("./templates/binding.go");
|
||||
const BINDING_TEST_GO_TEMPLATE: &str = include_str!("./templates/binding_test.go");
|
||||
|
||||
const SETUP_PY_TEMPLATE: &str = include_str!("./templates/setup.py");
|
||||
const INIT_PY_TEMPLATE: &str = include_str!("./templates/__init__.py");
|
||||
const INIT_PYI_TEMPLATE: &str = include_str!("./templates/__init__.pyi");
|
||||
const PYPROJECT_TOML_TEMPLATE: &str = include_str!("./templates/pyproject.toml");
|
||||
const PY_BINDING_C_TEMPLATE: &str = include_str!("./templates/py-binding.c");
|
||||
const TEST_BINDING_PY_TEMPLATE: &str = include_str!("./templates/test_binding.py");
|
||||
|
||||
const PACKAGE_SWIFT_TEMPLATE: &str = include_str!("./templates/package.swift");
|
||||
const TESTS_SWIFT_TEMPLATE: &str = include_str!("./templates/tests.swift");
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
struct LanguageConfiguration {}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct PackageJSON {
|
||||
#[serde(rename = "tree-sitter")]
|
||||
tree_sitter: Option<Vec<LanguageConfiguration>>,
|
||||
}
|
||||
|
||||
pub fn path_in_ignore(repo_path: &Path) -> bool {
|
||||
[
|
||||
"bindings",
|
||||
"build",
|
||||
"examples",
|
||||
"node_modules",
|
||||
"queries",
|
||||
"script",
|
||||
"src",
|
||||
"target",
|
||||
"test",
|
||||
"types",
|
||||
]
|
||||
.iter()
|
||||
.any(|dir| repo_path.ends_with(dir))
|
||||
}
|
||||
|
||||
fn insert_after(
|
||||
map: Map<String, Value>,
|
||||
after: &str,
|
||||
key: &str,
|
||||
value: Value,
|
||||
) -> Map<String, Value> {
|
||||
let mut entries = map.into_iter().collect::<Vec<_>>();
|
||||
let after_index = entries
|
||||
.iter()
|
||||
.position(|(k, _)| k == after)
|
||||
.unwrap_or(entries.len() - 1)
|
||||
+ 1;
|
||||
entries.insert(after_index, (key.to_string(), value));
|
||||
entries.into_iter().collect()
|
||||
}
|
||||
|
||||
pub fn generate_grammar_files(repo_path: &Path, language_name: &str) -> Result<()> {
|
||||
let dashed_language_name = language_name.to_kebab_case();
|
||||
|
||||
// TODO: remove legacy code updates in v0.24.0
|
||||
|
||||
// Create or update package.json
|
||||
let package_json_path_state = missing_path_else(
|
||||
repo_path.join("package.json"),
|
||||
|path| generate_file(path, PACKAGE_JSON_TEMPLATE, dashed_language_name.as_str()),
|
||||
|path| {
|
||||
let package_json_str =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read package.json")?;
|
||||
let mut package_json = serde_json::from_str::<Map<String, Value>>(&package_json_str)
|
||||
.with_context(|| "Failed to parse package.json")?;
|
||||
let mut updated = false;
|
||||
|
||||
let dependencies = package_json
|
||||
.entry("dependencies".to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
if dependencies.remove("nan").is_some() {
|
||||
eprintln!("Replacing nan dependency with node-addon-api in package.json");
|
||||
dependencies.insert("node-addon-api".to_string(), "^8.0.0".into());
|
||||
updated = true;
|
||||
}
|
||||
if !dependencies.contains_key("node-gyp-build") {
|
||||
eprintln!("Adding node-gyp-build dependency to package.json");
|
||||
dependencies.insert("node-gyp-build".to_string(), "^4.8.1".into());
|
||||
updated = true;
|
||||
}
|
||||
|
||||
let dev_dependencies = package_json
|
||||
.entry("devDependencies".to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
if !dev_dependencies.contains_key("prebuildify") {
|
||||
eprintln!("Adding prebuildify devDependency to package.json");
|
||||
dev_dependencies.insert("prebuildify".to_string(), "^6.0.1".into());
|
||||
updated = true;
|
||||
}
|
||||
|
||||
let node_test = "node --test bindings/node/*_test.js";
|
||||
let scripts = package_json
|
||||
.entry("scripts".to_string())
|
||||
.or_insert_with(|| Value::Object(Map::new()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
if !scripts.get("test").is_some_and(|v| v == node_test) {
|
||||
eprintln!("Updating package.json scripts");
|
||||
*scripts = Map::from_iter([
|
||||
("install".to_string(), "node-gyp-build".into()),
|
||||
("prestart".to_string(), "tree-sitter build --wasm".into()),
|
||||
("start".to_string(), "tree-sitter playground".into()),
|
||||
("test".to_string(), node_test.into()),
|
||||
]);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `peerDependencies` after `dependencies`
|
||||
if !package_json.contains_key("peerDependencies") {
|
||||
eprintln!("Adding peerDependencies to package.json");
|
||||
package_json = insert_after(
|
||||
package_json,
|
||||
"dependencies",
|
||||
"peerDependencies",
|
||||
json!({"tree-sitter": "^0.21.1"}),
|
||||
);
|
||||
|
||||
package_json = insert_after(
|
||||
package_json,
|
||||
"peerDependencies",
|
||||
"peerDependenciesMeta",
|
||||
json!({"tree_sitter": {"optional": true}}),
|
||||
);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `types` right after `main`
|
||||
if !package_json.contains_key("types") {
|
||||
eprintln!("Adding types to package.json");
|
||||
package_json = insert_after(package_json, "main", "types", "bindings/node".into());
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `files` right after `keywords`
|
||||
if !package_json.contains_key("files") {
|
||||
eprintln!("Adding files to package.json");
|
||||
package_json = insert_after(
|
||||
package_json,
|
||||
"keywords",
|
||||
"files",
|
||||
json!([
|
||||
"grammar.js",
|
||||
"binding.gyp",
|
||||
"prebuilds/**",
|
||||
"bindings/node/*",
|
||||
"queries/*",
|
||||
"src/**",
|
||||
"*.wasm"
|
||||
]),
|
||||
);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// insert `tree-sitter` at the end
|
||||
if !package_json.contains_key("tree-sitter") {
|
||||
eprintln!("Adding a `tree-sitter` section to package.json");
|
||||
package_json.insert(
|
||||
"tree-sitter".to_string(),
|
||||
json!([{
|
||||
"scope": format!("source.{language_name}"),
|
||||
"injection-regex": format!("^{language_name}$"),
|
||||
}]),
|
||||
);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if updated {
|
||||
let mut package_json_str = serde_json::to_string_pretty(&package_json)?;
|
||||
package_json_str.push('\n');
|
||||
write_file(path, package_json_str)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
let package_json = lookup_package_json_for_path(package_json_path_state.as_path())?.1;
|
||||
|
||||
// Do not create a grammar.js file in a repo with multiple language configs
|
||||
if !package_json.has_multiple_language_configs() {
|
||||
missing_path(repo_path.join("grammar.js"), |path| {
|
||||
generate_file(path, GRAMMAR_JS_TEMPLATE, language_name)
|
||||
})?;
|
||||
}
|
||||
|
||||
// Write .gitignore file
|
||||
missing_path(repo_path.join(".gitignore"), |path| {
|
||||
generate_file(path, GITIGNORE_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
// Write .gitattributes file
|
||||
missing_path(repo_path.join(".gitattributes"), |path| {
|
||||
generate_file(path, GITATTRIBUTES_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
// Write .editorconfig file
|
||||
missing_path(repo_path.join(".editorconfig"), |path| {
|
||||
generate_file(path, EDITORCONFIG_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
let bindings_dir = repo_path.join("bindings");
|
||||
|
||||
// Generate Rust bindings
|
||||
missing_path(bindings_dir.join("rust"), create_dir)?.apply(|path| {
|
||||
missing_path_else(
|
||||
path.join("lib.rs"),
|
||||
|path| generate_file(path, LIB_RS_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let lib_rs =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read lib.rs")?;
|
||||
if !lib_rs.contains("tree_sitter_language") {
|
||||
generate_file(path, LIB_RS_TEMPLATE, language_name)?;
|
||||
eprintln!("Updated lib.rs with `tree_sitter_language` dependency");
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("build.rs"),
|
||||
|path| generate_file(path, BUILD_RS_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let build_rs =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read build.rs")?;
|
||||
if !build_rs.contains("-utf-8") {
|
||||
let index = build_rs
|
||||
.find(" let parser_path = src_dir.join(\"parser.c\")")
|
||||
.ok_or_else(|| anyhow!(indoc!{
|
||||
"Failed to auto-update build.rs with the `/utf-8` flag for windows.
|
||||
To fix this, remove `bindings/rust/build.rs` and re-run `tree-sitter generate`"}))?;
|
||||
|
||||
let build_rs = format!(
|
||||
"{}{}{}\n{}",
|
||||
&build_rs[..index],
|
||||
" #[cfg(target_env = \"msvc\")]\n",
|
||||
" c_config.flag(\"-utf-8\");\n",
|
||||
&build_rs[index..]
|
||||
);
|
||||
|
||||
write_file(path, build_rs)?;
|
||||
eprintln!("Updated build.rs with the /utf-8 flag for Windows compilation");
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path_else(
|
||||
repo_path.join("Cargo.toml"),
|
||||
|path| generate_file(path, CARGO_TOML_TEMPLATE, dashed_language_name.as_str()),
|
||||
|path| {
|
||||
let cargo_toml =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read Cargo.toml")?;
|
||||
if !cargo_toml.contains("tree-sitter-language") {
|
||||
let start_index = cargo_toml
|
||||
.find("tree-sitter = \"")
|
||||
.ok_or_else(|| anyhow!("Failed to find the `tree-sitter` dependency in Cargo.toml"))?;
|
||||
|
||||
let version_start_index = start_index + "tree-sitter = \"".len();
|
||||
let version_end_index = cargo_toml[version_start_index..]
|
||||
.find('\"')
|
||||
.map(|i| i + version_start_index)
|
||||
.ok_or_else(|| anyhow!("Failed to find the end of the `tree-sitter` version in Cargo.toml"))?;
|
||||
|
||||
let cargo_toml = format!(
|
||||
"{}{}{}\n{}\n{}",
|
||||
&cargo_toml[..start_index],
|
||||
"tree-sitter-language = \"0.1.0\"",
|
||||
&cargo_toml[version_end_index + 1..],
|
||||
"[dev-dependencies]",
|
||||
"tree-sitter = \"0.23\"",
|
||||
);
|
||||
|
||||
write_file(path, cargo_toml)?;
|
||||
eprintln!("Updated Cargo.toml with the `tree-sitter-language` dependency");
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate Node bindings
|
||||
missing_path(bindings_dir.join("node"), create_dir)?.apply(|path| {
|
||||
missing_path_else(
|
||||
path.join("index.js"),
|
||||
|path| generate_file(path, INDEX_JS_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let index_js =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read index.js")?;
|
||||
if index_js.contains("../../build/Release") {
|
||||
eprintln!("Replacing index.js with new binding API");
|
||||
generate_file(path, INDEX_JS_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path(path.join("index.d.ts"), |path| {
|
||||
generate_file(path, INDEX_D_TS_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(path.join("binding_test.js"), |path| {
|
||||
generate_file(path, BINDING_TEST_JS_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("binding.cc"),
|
||||
|path| generate_file(path, JS_BINDING_CC_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let binding_cc =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read binding.cc")?;
|
||||
if binding_cc.contains("NAN_METHOD(New) {}") {
|
||||
eprintln!("Replacing binding.cc with new binding API");
|
||||
generate_file(path, JS_BINDING_CC_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
// Create binding.gyp, or update it with new binding API.
|
||||
missing_path_else(
|
||||
repo_path.join("binding.gyp"),
|
||||
|path| generate_file(path, BINDING_GYP_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let binding_gyp =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read binding.gyp")?;
|
||||
if binding_gyp.contains("require('nan')") {
|
||||
eprintln!("Replacing binding.gyp with new binding API");
|
||||
generate_file(path, BINDING_GYP_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate C bindings
|
||||
missing_path(bindings_dir.join("c"), create_dir)?.apply(|path| {
|
||||
missing_path(
|
||||
path.join(format!("tree-sitter-{language_name}.h")),
|
||||
|path| generate_file(path, PARSER_NAME_H_TEMPLATE, language_name),
|
||||
)?;
|
||||
|
||||
missing_path(
|
||||
path.join(format!("tree-sitter-{language_name}.pc.in")),
|
||||
|path| generate_file(path, PARSER_NAME_PC_IN_TEMPLATE, language_name),
|
||||
)?;
|
||||
|
||||
missing_path(repo_path.join("Makefile"), |path| {
|
||||
generate_file(path, MAKEFILE_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate Go bindings
|
||||
missing_path(bindings_dir.join("go"), create_dir)?.apply(|path| {
|
||||
missing_path(path.join("binding.go"), |path| {
|
||||
generate_file(path, BINDING_GO_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path_else(
|
||||
path.join("binding_test.go"),
|
||||
|path| generate_file(path, BINDING_TEST_GO_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let binding_test_go =
|
||||
fs::read_to_string(path).with_context(|| "Failed to read binding_test.go")?;
|
||||
if binding_test_go.contains("smacker") {
|
||||
eprintln!("Replacing binding_test.go with new binding API");
|
||||
generate_file(path, BINDING_TEST_GO_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
// Delete the old go.mod file that lives inside bindings/go, it now lives in the root dir
|
||||
let go_mod_path = path.join("go.mod");
|
||||
if go_mod_path.exists() {
|
||||
fs::remove_file(go_mod_path).with_context(|| "Failed to remove old go.mod file")?;
|
||||
}
|
||||
|
||||
missing_path(repo_path.join("go.mod"), |path| {
|
||||
generate_file(path, GO_MOD_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate Python bindings
|
||||
missing_path(bindings_dir.join("python"), create_dir)?.apply(|path| {
|
||||
let lang_path = path.join(format!("tree_sitter_{}", language_name.to_snake_case()));
|
||||
missing_path(&lang_path, create_dir)?;
|
||||
|
||||
missing_path_else(
|
||||
lang_path.join("binding.c"),
|
||||
|path| generate_file(path, PY_BINDING_C_TEMPLATE, language_name),
|
||||
|path| {
|
||||
let binding_c = fs::read_to_string(path)
|
||||
.with_context(|| "Failed to read bindings/python/binding.c")?;
|
||||
if !binding_c.contains("PyCapsule_New") {
|
||||
eprintln!("Replacing bindings/python/binding.c with new binding API");
|
||||
generate_file(path, PY_BINDING_C_TEMPLATE, language_name)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
|
||||
missing_path(lang_path.join("__init__.py"), |path| {
|
||||
generate_file(path, INIT_PY_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(lang_path.join("__init__.pyi"), |path| {
|
||||
generate_file(path, INIT_PYI_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(lang_path.join("py.typed"), |path| {
|
||||
generate_file(path, "", language_name) // py.typed is empty
|
||||
})?;
|
||||
|
||||
missing_path(path.join("tests"), create_dir)?.apply(|path| {
|
||||
missing_path(path.join("test_binding.py"), |path| {
|
||||
generate_file(path, TEST_BINDING_PY_TEMPLATE, language_name)
|
||||
})?;
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
missing_path(repo_path.join("setup.py"), |path| {
|
||||
generate_file(path, SETUP_PY_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(repo_path.join("pyproject.toml"), |path| {
|
||||
generate_file(path, PYPROJECT_TOML_TEMPLATE, dashed_language_name.as_str())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Generate Swift bindings
|
||||
missing_path(bindings_dir.join("swift"), create_dir)?.apply(|path| {
|
||||
let lang_path = path.join(format!("TreeSitter{}", language_name.to_upper_camel_case()));
|
||||
missing_path(&lang_path, create_dir)?;
|
||||
|
||||
missing_path(lang_path.join(format!("{language_name}.h")), |path| {
|
||||
generate_file(path, PARSER_NAME_H_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
missing_path(
|
||||
path.join(format!(
|
||||
"TreeSitter{}Tests",
|
||||
language_name.to_upper_camel_case()
|
||||
)),
|
||||
create_dir,
|
||||
)?
|
||||
.apply(|path| {
|
||||
missing_path(
|
||||
path.join(format!(
|
||||
"TreeSitter{}Tests.swift",
|
||||
language_name.to_upper_camel_case()
|
||||
)),
|
||||
|path| generate_file(path, TESTS_SWIFT_TEMPLATE, language_name),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
missing_path(repo_path.join("Package.swift"), |path| {
|
||||
generate_file(path, PACKAGE_SWIFT_TEMPLATE, language_name)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn lookup_package_json_for_path(path: &Path) -> Result<(PathBuf, PackageJSON)> {
|
||||
let mut pathbuf = path.to_owned();
|
||||
loop {
|
||||
let package_json = pathbuf
|
||||
.exists()
|
||||
.then(|| -> Result<PackageJSON> {
|
||||
let file =
|
||||
File::open(pathbuf.as_path()).with_context(|| "Failed to open package.json")?;
|
||||
serde_json::from_reader(BufReader::new(file)).context(
|
||||
"Failed to parse package.json, is the `tree-sitter` section malformed?",
|
||||
)
|
||||
})
|
||||
.transpose()?;
|
||||
if let Some(package_json) = package_json {
|
||||
if package_json.tree_sitter.is_some() {
|
||||
return Ok((pathbuf, package_json));
|
||||
}
|
||||
}
|
||||
pathbuf.pop(); // package.json
|
||||
if !pathbuf.pop() {
|
||||
return Err(anyhow!(concat!(
|
||||
"Failed to locate a package.json file that has a \"tree-sitter\" section,",
|
||||
" please ensure you have one, and if you don't then consult the docs",
|
||||
)));
|
||||
}
|
||||
pathbuf.push("package.json");
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_file(path: &Path, template: &str, language_name: &str) -> Result<()> {
|
||||
write_file(
|
||||
path,
|
||||
template
|
||||
.replace(
|
||||
CAMEL_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_upper_camel_case(),
|
||||
)
|
||||
.replace(
|
||||
UPPER_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_shouty_snake_case(),
|
||||
)
|
||||
.replace(
|
||||
LOWER_PARSER_NAME_PLACEHOLDER,
|
||||
&language_name.to_snake_case(),
|
||||
)
|
||||
.replace(PARSER_NAME_PLACEHOLDER, language_name)
|
||||
.replace(CLI_VERSION_PLACEHOLDER, CLI_VERSION)
|
||||
.replace(RUST_BINDING_VERSION_PLACEHOLDER, RUST_BINDING_VERSION),
|
||||
)
|
||||
}
|
||||
|
||||
fn create_dir(path: &Path) -> Result<()> {
|
||||
fs::create_dir_all(path)
|
||||
.with_context(|| format!("Failed to create {:?}", path.to_string_lossy()))
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
enum PathState<P>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
Exists(P),
|
||||
Missing(P),
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
impl<P> PathState<P>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
fn exists(&self, mut action: impl FnMut(&Path) -> Result<()>) -> Result<&Self> {
|
||||
if let Self::Exists(path) = self {
|
||||
action(path.as_ref())?;
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn missing(&self, mut action: impl FnMut(&Path) -> Result<()>) -> Result<&Self> {
|
||||
if let Self::Missing(path) = self {
|
||||
action(path.as_ref())?;
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn apply(&self, mut action: impl FnMut(&Path) -> Result<()>) -> Result<&Self> {
|
||||
action(self.as_path())?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn apply_state(&self, mut action: impl FnMut(&Self) -> Result<()>) -> Result<&Self> {
|
||||
action(self)?;
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn as_path(&self) -> &Path {
|
||||
match self {
|
||||
Self::Exists(path) | Self::Missing(path) => path.as_ref(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn missing_path<P, F>(path: P, mut action: F) -> Result<PathState<P>>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
F: FnMut(&Path) -> Result<()>,
|
||||
{
|
||||
let path_ref = path.as_ref();
|
||||
if !path_ref.exists() {
|
||||
action(path_ref)?;
|
||||
Ok(PathState::Missing(path))
|
||||
} else {
|
||||
Ok(PathState::Exists(path))
|
||||
}
|
||||
}
|
||||
|
||||
fn missing_path_else<P, T, F>(path: P, mut action: T, mut else_action: F) -> Result<PathState<P>>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
T: FnMut(&Path) -> Result<()>,
|
||||
F: FnMut(&Path) -> Result<()>,
|
||||
{
|
||||
let path_ref = path.as_ref();
|
||||
if !path_ref.exists() {
|
||||
action(path_ref)?;
|
||||
Ok(PathState::Missing(path))
|
||||
} else {
|
||||
else_action(path_ref)?;
|
||||
Ok(PathState::Exists(path))
|
||||
}
|
||||
}
|
||||
|
||||
impl PackageJSON {
|
||||
fn has_multiple_language_configs(&self) -> bool {
|
||||
self.tree_sitter.as_ref().is_some_and(|c| c.len() > 1)
|
||||
}
|
||||
}
|
||||
|
|
@ -3,6 +3,7 @@
|
|||
pub mod fuzz;
|
||||
pub mod generate;
|
||||
pub mod highlight;
|
||||
pub mod init;
|
||||
pub mod logger;
|
||||
pub mod parse;
|
||||
pub mod playground;
|
||||
|
|
|
|||
|
|
@ -16,8 +16,9 @@ use tree_sitter_cli::{
|
|||
fuzz_language_corpus, FuzzOptions, EDIT_COUNT, ITERATION_COUNT, LOG_ENABLED,
|
||||
LOG_GRAPH_ENABLED, START_SEED,
|
||||
},
|
||||
generate::{self, lookup_package_json_for_path},
|
||||
highlight, logger,
|
||||
generate, highlight,
|
||||
init::{generate_grammar_files, lookup_package_json_for_path},
|
||||
logger,
|
||||
parse::{self, ParseFileOptions, ParseOutput},
|
||||
playground, query, tags,
|
||||
test::{self, TestOptions},
|
||||
|
|
@ -36,6 +37,7 @@ const DEFAULT_GENERATE_ABI_VERSION: usize = 14;
|
|||
#[command(about="Generates and tests parsers", author=crate_authors!("\n"), styles=get_styles())]
|
||||
enum Commands {
|
||||
InitConfig(InitConfig),
|
||||
Init(Init),
|
||||
Generate(Generate),
|
||||
Build(Build),
|
||||
Parse(Parse),
|
||||
|
|
@ -53,6 +55,10 @@ enum Commands {
|
|||
#[command(about = "Generate a default config file")]
|
||||
struct InitConfig;
|
||||
|
||||
#[derive(Args)]
|
||||
#[command(about = "Initialize a grammar repository", alias = "i")]
|
||||
struct Init;
|
||||
|
||||
#[derive(Args)]
|
||||
#[command(about = "Generate a parser", alias = "gen", alias = "g")]
|
||||
struct Generate {
|
||||
|
|
@ -72,8 +78,6 @@ struct Generate {
|
|||
)
|
||||
)]
|
||||
pub abi_version: Option<String>,
|
||||
#[arg(long, help = "Don't generate language bindings")]
|
||||
pub no_bindings: bool,
|
||||
#[arg(
|
||||
long,
|
||||
short = 'b',
|
||||
|
|
@ -424,6 +428,24 @@ impl InitConfig {
|
|||
}
|
||||
}
|
||||
|
||||
impl Init {
|
||||
fn run(self, current_dir: PathBuf) -> Result<()> {
|
||||
if let Some(dir_name) = current_dir
|
||||
.file_name()
|
||||
.map(|x| x.to_string_lossy().to_ascii_lowercase())
|
||||
{
|
||||
if let Some(language_name) = dir_name
|
||||
.strip_prefix("tree-sitter-")
|
||||
.or_else(|| Some(dir_name.as_ref()))
|
||||
{
|
||||
generate_grammar_files(¤t_dir, language_name)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Generate {
|
||||
fn run(self, mut loader: loader::Loader, current_dir: PathBuf) -> Result<()> {
|
||||
if self.log {
|
||||
|
|
@ -443,7 +465,6 @@ impl Generate {
|
|||
¤t_dir,
|
||||
self.grammar_path.as_deref(),
|
||||
abi_version,
|
||||
!self.no_bindings,
|
||||
self.report_states_for_rule.as_deref(),
|
||||
self.js_runtime.as_deref(),
|
||||
)?;
|
||||
|
|
@ -1052,6 +1073,7 @@ fn run() -> Result<()> {
|
|||
|
||||
match command {
|
||||
Commands::InitConfig(init_config) => init_config.run()?,
|
||||
Commands::Init(init) => init.run(current_dir)?,
|
||||
Commands::Generate(generate_options) => generate_options.run(loader, current_dir)?,
|
||||
Commands::Build(build_options) => build_options.run(loader, current_dir)?,
|
||||
Commands::Parse(parse_options) => parse_options.run(loader, current_dir)?,
|
||||
|
|
|
|||
|
|
@ -12,10 +12,10 @@ extern "C" {
|
|||
// Allow clients to override allocation functions
|
||||
#ifdef TREE_SITTER_REUSE_ALLOCATOR
|
||||
|
||||
extern void *(*ts_current_malloc)(size_t);
|
||||
extern void *(*ts_current_calloc)(size_t, size_t);
|
||||
extern void *(*ts_current_realloc)(void *, size_t);
|
||||
extern void (*ts_current_free)(void *);
|
||||
extern void *(*ts_current_malloc)(size_t size);
|
||||
extern void *(*ts_current_calloc)(size_t count, size_t size);
|
||||
extern void *(*ts_current_realloc)(void *ptr, size_t size);
|
||||
extern void (*ts_current_free)(void *ptr);
|
||||
|
||||
#ifndef ts_malloc
|
||||
#define ts_malloc ts_current_malloc
|
||||
290
cli/src/templates/array.h
Normal file
290
cli/src/templates/array.h
Normal file
|
|
@ -0,0 +1,290 @@
|
|||
#ifndef TREE_SITTER_ARRAY_H_
|
||||
#define TREE_SITTER_ARRAY_H_
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include "./alloc.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(disable : 4101)
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wunused-variable"
|
||||
#endif
|
||||
|
||||
#define Array(T) \
|
||||
struct { \
|
||||
T *contents; \
|
||||
uint32_t size; \
|
||||
uint32_t capacity; \
|
||||
}
|
||||
|
||||
/// Initialize an array.
|
||||
#define array_init(self) \
|
||||
((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL)
|
||||
|
||||
/// Create an empty array.
|
||||
#define array_new() \
|
||||
{ NULL, 0, 0 }
|
||||
|
||||
/// Get a pointer to the element at a given `index` in the array.
|
||||
#define array_get(self, _index) \
|
||||
(assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index])
|
||||
|
||||
/// Get a pointer to the first element in the array.
|
||||
#define array_front(self) array_get(self, 0)
|
||||
|
||||
/// Get a pointer to the last element in the array.
|
||||
#define array_back(self) array_get(self, (self)->size - 1)
|
||||
|
||||
/// Clear the array, setting its size to zero. Note that this does not free any
|
||||
/// memory allocated for the array's contents.
|
||||
#define array_clear(self) ((self)->size = 0)
|
||||
|
||||
/// Reserve `new_capacity` elements of space in the array. If `new_capacity` is
|
||||
/// less than the array's current capacity, this function has no effect.
|
||||
#define array_reserve(self, new_capacity) \
|
||||
_array__reserve((Array *)(self), array_elem_size(self), new_capacity)
|
||||
|
||||
/// Free any memory allocated for this array. Note that this does not free any
|
||||
/// memory allocated for the array's contents.
|
||||
#define array_delete(self) _array__delete((Array *)(self))
|
||||
|
||||
/// Push a new `element` onto the end of the array.
|
||||
#define array_push(self, element) \
|
||||
(_array__grow((Array *)(self), 1, array_elem_size(self)), \
|
||||
(self)->contents[(self)->size++] = (element))
|
||||
|
||||
/// Increase the array's size by `count` elements.
|
||||
/// New elements are zero-initialized.
|
||||
#define array_grow_by(self, count) \
|
||||
do { \
|
||||
if ((count) == 0) break; \
|
||||
_array__grow((Array *)(self), count, array_elem_size(self)); \
|
||||
memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \
|
||||
(self)->size += (count); \
|
||||
} while (0)
|
||||
|
||||
/// Append all elements from one array to the end of another.
|
||||
#define array_push_all(self, other) \
|
||||
array_extend((self), (other)->size, (other)->contents)
|
||||
|
||||
/// Append `count` elements to the end of the array, reading their values from the
|
||||
/// `contents` pointer.
|
||||
#define array_extend(self, count, contents) \
|
||||
_array__splice( \
|
||||
(Array *)(self), array_elem_size(self), (self)->size, \
|
||||
0, count, contents \
|
||||
)
|
||||
|
||||
/// Remove `old_count` elements from the array starting at the given `index`. At
|
||||
/// the same index, insert `new_count` new elements, reading their values from the
|
||||
/// `new_contents` pointer.
|
||||
#define array_splice(self, _index, old_count, new_count, new_contents) \
|
||||
_array__splice( \
|
||||
(Array *)(self), array_elem_size(self), _index, \
|
||||
old_count, new_count, new_contents \
|
||||
)
|
||||
|
||||
/// Insert one `element` into the array at the given `index`.
|
||||
#define array_insert(self, _index, element) \
|
||||
_array__splice((Array *)(self), array_elem_size(self), _index, 0, 1, &(element))
|
||||
|
||||
/// Remove one element from the array at the given `index`.
|
||||
#define array_erase(self, _index) \
|
||||
_array__erase((Array *)(self), array_elem_size(self), _index)
|
||||
|
||||
/// Pop the last element off the array, returning the element by value.
|
||||
#define array_pop(self) ((self)->contents[--(self)->size])
|
||||
|
||||
/// Assign the contents of one array to another, reallocating if necessary.
|
||||
#define array_assign(self, other) \
|
||||
_array__assign((Array *)(self), (const Array *)(other), array_elem_size(self))
|
||||
|
||||
/// Swap one array with another
|
||||
#define array_swap(self, other) \
|
||||
_array__swap((Array *)(self), (Array *)(other))
|
||||
|
||||
/// Get the size of the array contents
|
||||
#define array_elem_size(self) (sizeof *(self)->contents)
|
||||
|
||||
/// Search a sorted array for a given `needle` value, using the given `compare`
|
||||
/// callback to determine the order.
|
||||
///
|
||||
/// If an existing element is found to be equal to `needle`, then the `index`
|
||||
/// out-parameter is set to the existing value's index, and the `exists`
|
||||
/// out-parameter is set to true. Otherwise, `index` is set to an index where
|
||||
/// `needle` should be inserted in order to preserve the sorting, and `exists`
|
||||
/// is set to false.
|
||||
#define array_search_sorted_with(self, compare, needle, _index, _exists) \
|
||||
_array__search_sorted(self, 0, compare, , needle, _index, _exists)
|
||||
|
||||
/// Search a sorted array for a given `needle` value, using integer comparisons
|
||||
/// of a given struct field (specified with a leading dot) to determine the order.
|
||||
///
|
||||
/// See also `array_search_sorted_with`.
|
||||
#define array_search_sorted_by(self, field, needle, _index, _exists) \
|
||||
_array__search_sorted(self, 0, _compare_int, field, needle, _index, _exists)
|
||||
|
||||
/// Insert a given `value` into a sorted array, using the given `compare`
|
||||
/// callback to determine the order.
|
||||
#define array_insert_sorted_with(self, compare, value) \
|
||||
do { \
|
||||
unsigned _index, _exists; \
|
||||
array_search_sorted_with(self, compare, &(value), &_index, &_exists); \
|
||||
if (!_exists) array_insert(self, _index, value); \
|
||||
} while (0)
|
||||
|
||||
/// Insert a given `value` into a sorted array, using integer comparisons of
|
||||
/// a given struct field (specified with a leading dot) to determine the order.
|
||||
///
|
||||
/// See also `array_search_sorted_by`.
|
||||
#define array_insert_sorted_by(self, field, value) \
|
||||
do { \
|
||||
unsigned _index, _exists; \
|
||||
array_search_sorted_by(self, field, (value) field, &_index, &_exists); \
|
||||
if (!_exists) array_insert(self, _index, value); \
|
||||
} while (0)
|
||||
|
||||
// Private
|
||||
|
||||
typedef Array(void) Array;
|
||||
|
||||
/// This is not what you're looking for, see `array_delete`.
|
||||
static inline void _array__delete(Array *self) {
|
||||
if (self->contents) {
|
||||
ts_free(self->contents);
|
||||
self->contents = NULL;
|
||||
self->size = 0;
|
||||
self->capacity = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/// This is not what you're looking for, see `array_erase`.
|
||||
static inline void _array__erase(Array *self, size_t element_size,
|
||||
uint32_t index) {
|
||||
assert(index < self->size);
|
||||
char *contents = (char *)self->contents;
|
||||
memmove(contents + index * element_size, contents + (index + 1) * element_size,
|
||||
(self->size - index - 1) * element_size);
|
||||
self->size--;
|
||||
}
|
||||
|
||||
/// This is not what you're looking for, see `array_reserve`.
|
||||
static inline void _array__reserve(Array *self, size_t element_size, uint32_t new_capacity) {
|
||||
if (new_capacity > self->capacity) {
|
||||
if (self->contents) {
|
||||
self->contents = ts_realloc(self->contents, new_capacity * element_size);
|
||||
} else {
|
||||
self->contents = ts_malloc(new_capacity * element_size);
|
||||
}
|
||||
self->capacity = new_capacity;
|
||||
}
|
||||
}
|
||||
|
||||
/// This is not what you're looking for, see `array_assign`.
|
||||
static inline void _array__assign(Array *self, const Array *other, size_t element_size) {
|
||||
_array__reserve(self, element_size, other->size);
|
||||
self->size = other->size;
|
||||
memcpy(self->contents, other->contents, self->size * element_size);
|
||||
}
|
||||
|
||||
/// This is not what you're looking for, see `array_swap`.
|
||||
static inline void _array__swap(Array *self, Array *other) {
|
||||
Array swap = *other;
|
||||
*other = *self;
|
||||
*self = swap;
|
||||
}
|
||||
|
||||
/// This is not what you're looking for, see `array_push` or `array_grow_by`.
|
||||
static inline void _array__grow(Array *self, uint32_t count, size_t element_size) {
|
||||
uint32_t new_size = self->size + count;
|
||||
if (new_size > self->capacity) {
|
||||
uint32_t new_capacity = self->capacity * 2;
|
||||
if (new_capacity < 8) new_capacity = 8;
|
||||
if (new_capacity < new_size) new_capacity = new_size;
|
||||
_array__reserve(self, element_size, new_capacity);
|
||||
}
|
||||
}
|
||||
|
||||
/// This is not what you're looking for, see `array_splice`.
|
||||
static inline void _array__splice(Array *self, size_t element_size,
|
||||
uint32_t index, uint32_t old_count,
|
||||
uint32_t new_count, const void *elements) {
|
||||
uint32_t new_size = self->size + new_count - old_count;
|
||||
uint32_t old_end = index + old_count;
|
||||
uint32_t new_end = index + new_count;
|
||||
assert(old_end <= self->size);
|
||||
|
||||
_array__reserve(self, element_size, new_size);
|
||||
|
||||
char *contents = (char *)self->contents;
|
||||
if (self->size > old_end) {
|
||||
memmove(
|
||||
contents + new_end * element_size,
|
||||
contents + old_end * element_size,
|
||||
(self->size - old_end) * element_size
|
||||
);
|
||||
}
|
||||
if (new_count > 0) {
|
||||
if (elements) {
|
||||
memcpy(
|
||||
(contents + index * element_size),
|
||||
elements,
|
||||
new_count * element_size
|
||||
);
|
||||
} else {
|
||||
memset(
|
||||
(contents + index * element_size),
|
||||
0,
|
||||
new_count * element_size
|
||||
);
|
||||
}
|
||||
}
|
||||
self->size += new_count - old_count;
|
||||
}
|
||||
|
||||
/// A binary search routine, based on Rust's `std::slice::binary_search_by`.
|
||||
/// This is not what you're looking for, see `array_search_sorted_with` or `array_search_sorted_by`.
|
||||
#define _array__search_sorted(self, start, compare, suffix, needle, _index, _exists) \
|
||||
do { \
|
||||
*(_index) = start; \
|
||||
*(_exists) = false; \
|
||||
uint32_t size = (self)->size - *(_index); \
|
||||
if (size == 0) break; \
|
||||
int comparison; \
|
||||
while (size > 1) { \
|
||||
uint32_t half_size = size / 2; \
|
||||
uint32_t mid_index = *(_index) + half_size; \
|
||||
comparison = compare(&((self)->contents[mid_index] suffix), (needle)); \
|
||||
if (comparison <= 0) *(_index) = mid_index; \
|
||||
size -= half_size; \
|
||||
} \
|
||||
comparison = compare(&((self)->contents[*(_index)] suffix), (needle)); \
|
||||
if (comparison == 0) *(_exists) = true; \
|
||||
else if (comparison < 0) *(_index) += 1; \
|
||||
} while (0)
|
||||
|
||||
/// Helper macro for the `_sorted_by` routines below. This takes the left (existing)
|
||||
/// parameter by reference in order to work with the generic sorting function above.
|
||||
#define _compare_int(a, b) ((int)*(a) - (int)(b))
|
||||
|
||||
#ifdef _MSC_VER
|
||||
#pragma warning(default : 4101)
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // TREE_SITTER_ARRAY_H_
|
||||
|
|
@ -86,7 +86,7 @@ $(LANGUAGE_NAME).pc: bindings/c/$(LANGUAGE_NAME).pc.in
|
|||
-e 's|@PREFIX@|$(PREFIX)|' $< > $@
|
||||
|
||||
$(PARSER): $(SRC_DIR)/grammar.json
|
||||
$(TS) generate --no-bindings $^
|
||||
$(TS) generate $^
|
||||
|
||||
install: all
|
||||
install -d '$(DESTDIR)$(INCLUDEDIR)'/tree_sitter '$(DESTDIR)$(PCLIBDIR)' '$(DESTDIR)$(LIBDIR)'
|
||||
|
|
@ -10,7 +10,7 @@ use tree_sitter_highlight::HighlightConfiguration;
|
|||
use tree_sitter_loader::{CompileConfig, Loader};
|
||||
use tree_sitter_tags::TagsConfiguration;
|
||||
|
||||
use crate::generate::ALLOC_HEADER;
|
||||
use crate::generate::{ALLOC_HEADER, ARRAY_HEADER};
|
||||
|
||||
include!("./dirs.rs");
|
||||
|
||||
|
|
@ -112,7 +112,7 @@ pub fn get_test_language(name: &str, parser_code: &str, path: Option<&Path>) ->
|
|||
|
||||
[
|
||||
("alloc.h", ALLOC_HEADER),
|
||||
("array.h", tree_sitter::ARRAY_HEADER),
|
||||
("array.h", ARRAY_HEADER),
|
||||
("parser.h", tree_sitter::PARSER_HEADER),
|
||||
]
|
||||
.iter()
|
||||
|
|
|
|||
|
|
@ -50,7 +50,6 @@ pub const LANGUAGE_VERSION: usize = ffi::TREE_SITTER_LANGUAGE_VERSION as usize;
|
|||
pub const MIN_COMPATIBLE_LANGUAGE_VERSION: usize =
|
||||
ffi::TREE_SITTER_MIN_COMPATIBLE_LANGUAGE_VERSION as usize;
|
||||
|
||||
pub const ARRAY_HEADER: &str = include_str!("../src/array.h");
|
||||
pub const PARSER_HEADER: &str = include_str!("../src/parser.h");
|
||||
|
||||
/// An opaque object that defines how to parse a particular language. The code
|
||||
|
|
|
|||
|
|
@ -23,5 +23,5 @@ while read -r grammar_file; do
|
|||
fi
|
||||
|
||||
printf 'Regenerating %s parser\n' "$grammar_name"
|
||||
(cd "$grammar_dir" && "$TREE_SITTER" generate src/grammar.json --no-bindings --abi=latest)
|
||||
(cd "$grammar_dir" && "$TREE_SITTER" generate src/grammar.json --abi=latest)
|
||||
done < <(find "$GRAMMARS_DIR" -name grammar.js -not -path '*/node_modules/*')
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ set tree_sitter="%cd%\target\release\tree-sitter"
|
|||
for /f "tokens=*" %%f in ('dir test\fixtures\grammars\grammar.js /b/s') do (
|
||||
pushd "%%f\.."
|
||||
echo Regenerating parser !cd!
|
||||
%tree_sitter% generate src\grammar.json --no-bindings --abi=latest
|
||||
%tree_sitter% generate src\grammar.json --abi=latest
|
||||
popd
|
||||
)
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue