feat: add the semantic version to TSLanguage, and expose an API for retrieving it

This commit is contained in:
Amaan Qureshi 2025-01-21 01:59:24 -05:00
parent f0222107b8
commit 8bb1448a6f
24 changed files with 371 additions and 77 deletions

View file

@ -359,7 +359,8 @@ fn test_feature_corpus_files() {
}
let error_message_path = test_path.join("expected_error.txt");
let grammar_json = tree_sitter_generate::load_grammar_file(&grammar_path, None).unwrap();
let generate_result = tree_sitter_generate::generate_parser_for_grammar(&grammar_json);
let generate_result =
tree_sitter_generate::generate_parser_for_grammar(&grammar_json, Some((0, 0, 0)));
if error_message_path.exists() {
if EXAMPLE_INCLUDE.is_some() || EXAMPLE_EXCLUDE.is_some() {

View file

@ -101,7 +101,7 @@ fn test_supertypes() {
let language = get_language("rust");
let supertypes = language.supertypes();
if language.version() < 15 {
if language.abi_version() < 15 {
return;
}

View file

@ -18,9 +18,17 @@ mod tree_test;
#[cfg(feature = "wasm")]
mod wasm_language_test;
use tree_sitter_generate::GenerateResult;
pub use crate::fuzz::{
allocations,
edits::{get_random_edit, invert_edit},
random::Rand,
ITERATION_COUNT,
};
/// This is a simple wrapper around [`tree_sitter_generate::generate_parser_for_grammar`], because
/// our tests do not need to pass in a version number, only the grammar JSON.
fn generate_parser(grammar_json: &str) -> GenerateResult<(String, String)> {
tree_sitter_generate::generate_parser_for_grammar(grammar_json, Some((0, 0, 0)))
}

View file

@ -1,12 +1,12 @@
use tree_sitter::{Node, Parser, Point, Tree};
use tree_sitter_generate::{generate_parser_for_grammar, load_grammar_file};
use tree_sitter_generate::load_grammar_file;
use super::{
get_random_edit,
helpers::fixtures::{fixtures_dir, get_language, get_test_language},
Rand,
};
use crate::parse::perform_edit;
use crate::{parse::perform_edit, tests::generate_parser};
const JSON_EXAMPLE: &str = r#"
@ -317,7 +317,7 @@ fn test_next_sibling_of_zero_width_node() {
)
.unwrap();
let (parser_name, parser_code) = generate_parser_for_grammar(&grammar_json).unwrap();
let (parser_name, parser_code) = generate_parser(&grammar_json).unwrap();
let mut parser = Parser::new();
let language = get_test_language(&parser_name, &parser_code, None);
@ -563,8 +563,7 @@ fn test_node_named_child() {
#[test]
fn test_node_named_child_with_aliases_and_extras() {
let (parser_name, parser_code) =
generate_parser_for_grammar(GRAMMAR_WITH_ALIASES_AND_EXTRAS).unwrap();
let (parser_name, parser_code) = generate_parser(GRAMMAR_WITH_ALIASES_AND_EXTRAS).unwrap();
let mut parser = Parser::new();
parser
@ -871,7 +870,7 @@ fn test_node_sexp() {
#[test]
fn test_node_field_names() {
let (parser_name, parser_code) = generate_parser_for_grammar(
let (parser_name, parser_code) = generate_parser(
r#"
{
"name": "test_grammar_with_fields",
@ -981,7 +980,7 @@ fn test_node_field_names() {
#[test]
fn test_node_field_calls_in_language_without_fields() {
let (parser_name, parser_code) = generate_parser_for_grammar(
let (parser_name, parser_code) = generate_parser(
r#"
{
"name": "test_grammar_with_no_fields",
@ -1039,7 +1038,7 @@ fn test_node_is_named_but_aliased_as_anonymous() {
)
.unwrap();
let (parser_name, parser_code) = generate_parser_for_grammar(&grammar_json).unwrap();
let (parser_name, parser_code) = generate_parser(&grammar_json).unwrap();
let mut parser = Parser::new();
let language = get_test_language(&parser_name, &parser_code, None);

View file

@ -7,8 +7,9 @@ use std::{
};
use tree_sitter::Parser;
use tree_sitter_generate::{generate_parser_for_grammar, load_grammar_file};
use tree_sitter_generate::load_grammar_file;
use super::generate_parser;
use crate::tests::helpers::fixtures::{fixtures_dir, get_test_language};
// The `sanitizing` cfg is required to don't run tests under specific sunitizer
@ -90,7 +91,7 @@ fn hang_test() {
.join("get_col_should_hang_not_crash");
let grammar_json = load_grammar_file(&test_grammar_dir.join("grammar.js"), None).unwrap();
let (parser_name, parser_code) = generate_parser_for_grammar(grammar_json.as_str()).unwrap();
let (parser_name, parser_code) = generate_parser(grammar_json.as_str()).unwrap();
let language = get_test_language(&parser_name, &parser_code, Some(test_grammar_dir.as_path()));

View file

@ -6,7 +6,7 @@ use std::{
use tree_sitter::{
Decode, IncludedRangesError, InputEdit, LogType, ParseOptions, ParseState, Parser, Point, Range,
};
use tree_sitter_generate::{generate_parser_for_grammar, load_grammar_file};
use tree_sitter_generate::load_grammar_file;
use tree_sitter_proc_macro::retry;
use super::helpers::{
@ -17,7 +17,7 @@ use super::helpers::{
use crate::{
fuzz::edits::Edit,
parse::perform_edit,
tests::{helpers::fixtures::fixtures_dir, invert_edit},
tests::{generate_parser, helpers::fixtures::fixtures_dir, invert_edit},
};
#[test]
@ -486,7 +486,7 @@ fn test_parsing_after_editing_tree_that_depends_on_column_values() {
.join("test_grammars")
.join("uses_current_column");
let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap();
let (grammar_name, parser_code) = generate_parser_for_grammar(&grammar_json).unwrap();
let (grammar_name, parser_code) = generate_parser(&grammar_json).unwrap();
let mut parser = Parser::new();
parser
@ -564,7 +564,7 @@ fn test_parsing_after_editing_tree_that_depends_on_column_position() {
.join("depends_on_column");
let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap();
let (grammar_name, parser_code) = generate_parser_for_grammar(grammar_json.as_str()).unwrap();
let (grammar_name, parser_code) = generate_parser(grammar_json.as_str()).unwrap();
let mut parser = Parser::new();
parser
@ -1475,7 +1475,7 @@ fn test_parsing_with_a_newly_included_range() {
#[test]
fn test_parsing_with_included_ranges_and_missing_tokens() {
let (parser_name, parser_code) = generate_parser_for_grammar(
let (parser_name, parser_code) = generate_parser(
r#"{
"name": "test_leading_missing_token",
"rules": {
@ -1536,7 +1536,7 @@ fn test_parsing_with_included_ranges_and_missing_tokens() {
#[test]
fn test_grammars_that_can_hang_on_eof() {
let (parser_name, parser_code) = generate_parser_for_grammar(
let (parser_name, parser_code) = generate_parser(
r#"
{
"name": "test_single_null_char_regex",
@ -1562,7 +1562,7 @@ fn test_grammars_that_can_hang_on_eof() {
.unwrap();
parser.parse("\"", None).unwrap();
let (parser_name, parser_code) = generate_parser_for_grammar(
let (parser_name, parser_code) = generate_parser(
r#"
{
"name": "test_null_char_with_next_char_regex",
@ -1587,7 +1587,7 @@ fn test_grammars_that_can_hang_on_eof() {
.unwrap();
parser.parse("\"", None).unwrap();
let (parser_name, parser_code) = generate_parser_for_grammar(
let (parser_name, parser_code) = generate_parser(
r#"
{
"name": "test_null_char_with_range_regex",
@ -1650,7 +1650,7 @@ if foo && bar || baz {}
fn test_parsing_with_scanner_logging() {
let dir = fixtures_dir().join("test_grammars").join("external_tokens");
let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap();
let (grammar_name, parser_code) = generate_parser_for_grammar(&grammar_json).unwrap();
let (grammar_name, parser_code) = generate_parser(&grammar_json).unwrap();
let mut parser = Parser::new();
parser
@ -1674,7 +1674,7 @@ fn test_parsing_with_scanner_logging() {
fn test_parsing_get_column_at_eof() {
let dir = fixtures_dir().join("test_grammars").join("get_col_eof");
let grammar_json = load_grammar_file(&dir.join("grammar.js"), None).unwrap();
let (grammar_name, parser_code) = generate_parser_for_grammar(&grammar_json).unwrap();
let (grammar_name, parser_code) = generate_parser(&grammar_json).unwrap();
let mut parser = Parser::new();
parser
@ -1884,7 +1884,7 @@ fn test_decode_utf24le() {
#[test]
fn test_grammars_that_should_not_compile() {
assert!(generate_parser_for_grammar(
assert!(generate_parser(
r#"
{
"name": "issue_1111",
@ -1896,7 +1896,7 @@ fn test_grammars_that_should_not_compile() {
)
.is_err());
assert!(generate_parser_for_grammar(
assert!(generate_parser(
r#"
{
"name": "issue_1271",
@ -1911,11 +1911,11 @@ fn test_grammars_that_should_not_compile() {
}
},
}
"#,
"#
)
.is_err());
assert!(generate_parser_for_grammar(
assert!(generate_parser(
r#"
{
"name": "issue_1156_expl_1",
@ -1929,11 +1929,11 @@ fn test_grammars_that_should_not_compile() {
}
},
}
"#
"#
)
.is_err());
assert!(generate_parser_for_grammar(
assert!(generate_parser(
r#"
{
"name": "issue_1156_expl_2",
@ -1950,11 +1950,11 @@ fn test_grammars_that_should_not_compile() {
}
},
}
"#
"#
)
.is_err());
assert!(generate_parser_for_grammar(
assert!(generate_parser(
r#"
{
"name": "issue_1156_expl_3",
@ -1968,11 +1968,11 @@ fn test_grammars_that_should_not_compile() {
}
},
}
"#
"#
)
.is_err());
assert!(generate_parser_for_grammar(
assert!(generate_parser(
r#"
{
"name": "issue_1156_expl_4",
@ -1989,7 +1989,7 @@ fn test_grammars_that_should_not_compile() {
}
},
}
"#
"#
)
.is_err());
}

View file

@ -8,7 +8,6 @@ use tree_sitter::{
QueryCursorOptions, QueryError, QueryErrorKind, QueryPredicate, QueryPredicateArg,
QueryProperty, Range,
};
use tree_sitter_generate::generate_parser_for_grammar;
use unindent::Unindent;
use super::helpers::{
@ -17,6 +16,7 @@ use super::helpers::{
query_helpers::{assert_query_matches, Match, Pattern},
};
use crate::tests::{
generate_parser,
helpers::query_helpers::{collect_captures, collect_matches},
ITERATION_COUNT,
};
@ -532,7 +532,7 @@ fn test_query_errors_on_impossible_patterns() {
}
);
if js_lang.version() >= 15 {
if js_lang.abi_version() >= 15 {
assert_eq!(
Query::new(&js_lang, "(statement/identifier)").unwrap_err(),
QueryError {
@ -5216,7 +5216,7 @@ fn test_grammar_with_aliased_literal_query() {
// expansion: $ => seq('}'),
// },
// });
let (parser_name, parser_code) = generate_parser_for_grammar(
let (parser_name, parser_code) = generate_parser(
r#"
{
"name": "test",