Merge branch 'master' into wasm-language

This commit is contained in:
Max Brunsfeld 2023-10-27 11:57:04 +01:00
commit f4e2f68f14
161 changed files with 10293 additions and 4253 deletions

View file

@ -0,0 +1,279 @@
use super::helpers::fixtures::get_language;
use std::future::Future;
use std::pin::{pin, Pin};
use std::ptr;
use std::task::{self, Context, Poll, RawWaker, RawWakerVTable, Waker};
use tree_sitter::Parser;
#[test]
fn test_node_in_fut() {
let (ret, pended) = tokio_like_spawn(async {
let mut parser = Parser::new();
let language = get_language("bash");
parser.set_language(language).unwrap();
let tree = parser.parse("#", None).unwrap();
let root = tree.root_node();
let root_ref = &root;
let fut_val_fn = || async {
// eprintln!("fut_val_fn: {}", root.child(0).unwrap().kind());
yield_now().await;
root.child(0).unwrap().kind()
};
yield_now().await;
let fut_ref_fn = || async {
// eprintln!("fut_ref_fn: {}", root_ref.child(0).unwrap().kind());
yield_now().await;
root_ref.child(0).unwrap().kind()
};
let f1 = fut_val_fn().await;
let f2 = fut_ref_fn().await;
assert_eq!(f1, f2);
let fut_val = async {
// eprintln!("fut_val: {}", root.child(0).unwrap().kind());
yield_now().await;
root.child(0).unwrap().kind()
};
let fut_ref = async {
// eprintln!("fut_ref: {}", root_ref.child(0).unwrap().kind());
yield_now().await;
root_ref.child(0).unwrap().kind()
};
let f1 = fut_val.await;
let f2 = fut_ref.await;
assert_eq!(f1, f2);
f1
})
.join();
// eprintln!("pended: {pended:?}");
assert_eq!(ret, "comment");
assert_eq!(pended, 5);
}
#[test]
fn test_node_and_cursor_ref_in_fut() {
let (_, pended) = tokio_like_spawn(async {
let mut parser = Parser::new();
let language = get_language("bash");
parser.set_language(language).unwrap();
let tree = parser.parse("#", None).unwrap();
let root = tree.root_node();
let root_ref = &root;
let mut cursor = tree.walk();
let cursor_ref = &mut cursor;
cursor_ref.goto_first_child();
let fut_val = async {
yield_now().await;
root.to_sexp();
};
yield_now().await;
let fut_ref = async {
yield_now().await;
root_ref.to_sexp();
cursor_ref.goto_first_child();
};
fut_val.await;
fut_ref.await;
cursor_ref.goto_first_child();
})
.join();
assert_eq!(pended, 3);
}
#[test]
fn test_node_and_cursor_ref_in_fut_with_fut_fabrics() {
let (_, pended) = tokio_like_spawn(async {
let mut parser = Parser::new();
let language = get_language("bash");
parser.set_language(language).unwrap();
let tree = parser.parse("#", None).unwrap();
let root = tree.root_node();
let root_ref = &root;
let mut cursor = tree.walk();
let cursor_ref = &mut cursor;
cursor_ref.goto_first_child();
let fut_val = || async {
yield_now().await;
root.to_sexp();
};
yield_now().await;
let fut_ref = || async move {
yield_now().await;
root_ref.to_sexp();
cursor_ref.goto_first_child();
};
fut_val().await;
fut_val().await;
fut_ref().await;
})
.join();
assert_eq!(pended, 4);
}
#[test]
fn test_node_and_cursor_ref_in_fut_with_inner_spawns() {
let (ret, pended) = tokio_like_spawn(async {
let mut parser = Parser::new();
let language = get_language("bash");
parser.set_language(language).unwrap();
let tree = parser.parse("#", None).unwrap();
let mut cursor = tree.walk();
let cursor_ref = &mut cursor;
cursor_ref.goto_first_child();
let fut_val = || {
let tree = tree.clone();
async move {
let root = tree.root_node();
let mut cursor = tree.walk();
let cursor_ref = &mut cursor;
yield_now().await;
root.to_sexp();
cursor_ref.goto_first_child();
}
};
yield_now().await;
let fut_ref = || {
let tree = tree.clone();
async move {
let root = tree.root_node();
let root_ref = &root;
let mut cursor = tree.walk();
let cursor_ref = &mut cursor;
yield_now().await;
root_ref.to_sexp();
cursor_ref.goto_first_child();
}
};
let (_, p1) = tokio_like_spawn(fut_val()).await.unwrap();
let (_, p2) = tokio_like_spawn(fut_ref()).await.unwrap();
cursor_ref.goto_first_child();
fut_val().await;
fut_val().await;
fut_ref().await;
cursor_ref.goto_first_child();
p1 + p2
})
.join();
assert_eq!(pended, 4);
assert_eq!(ret, 2);
}
fn tokio_like_spawn<T>(future: T) -> JoinHandle<(T::Output, usize)>
where
T: Future + Send + 'static,
T::Output: Send + 'static,
{
// No runtime, just noop waker
let waker = noop_waker();
let mut cx = task::Context::from_waker(&waker);
let mut pending = 0;
let mut future = pin!(future);
let ret = loop {
match future.as_mut().poll(&mut cx) {
Poll::Pending => pending += 1,
Poll::Ready(r) => {
// eprintln!("ready, pended: {pending}");
break r;
}
}
};
JoinHandle::new((ret, pending))
}
async fn yield_now() {
struct SimpleYieldNow {
yielded: bool,
}
impl Future for SimpleYieldNow {
type Output = ();
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {
cx.waker().clone().wake();
if self.yielded {
return Poll::Ready(());
}
self.yielded = true;
Poll::Pending
}
}
SimpleYieldNow { yielded: false }.await
}
pub fn noop_waker() -> Waker {
const VTABLE: RawWakerVTable = RawWakerVTable::new(
// Cloning just returns a new no-op raw waker
|_| RAW,
// `wake` does nothing
|_| {},
// `wake_by_ref` does nothing
|_| {},
// Dropping does nothing as we don't allocate anything
|_| {},
);
const RAW: RawWaker = RawWaker::new(ptr::null(), &VTABLE);
unsafe { Waker::from_raw(RAW) }
}
struct JoinHandle<T> {
data: Option<T>,
}
impl<T> JoinHandle<T> {
fn new(data: T) -> Self {
Self { data: Some(data) }
}
fn join(&mut self) -> T {
self.data.take().unwrap()
}
}
impl<T: Unpin> Future for JoinHandle<T> {
type Output = std::result::Result<T, ()>;
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
let data = self.get_mut().data.take().unwrap();
Poll::Ready(Ok(data))
}
}

View file

@ -1,7 +1,8 @@
use super::helpers::{
allocations,
edits::{get_random_edit, invert_edit},
fixtures::{fixtures_dir, get_language, get_test_language},
fixtures::{fixtures_dir, get_language, get_test_language, SCRATCH_BASE_DIR},
new_seed,
random::Rand,
scope_sequence::ScopeSequence,
EDIT_COUNT, EXAMPLE_FILTER, ITERATION_COUNT, LANGUAGE_FILTER, LOG_ENABLED, LOG_GRAPH_ENABLED,
@ -13,70 +14,81 @@ use crate::{
test::{parse_tests, print_diff, print_diff_key, strip_sexp_fields, TestEntry},
util,
};
use std::fs;
use std::{collections::HashMap, env, fs};
use tree_sitter::{LogType, Node, Parser, Point, Range, Tree};
use tree_sitter_proc_macro::test_with_seed;
#[test]
fn test_bash_corpus() {
test_language_corpus("bash");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_bash(seed: usize) {
test_language_corpus(
"bash",
seed,
Some(&[
// Fragile tests where edit customization changes
// lead to significant parse tree structure changes.
"bash - corpus - commands - Nested Heredocs",
"bash - corpus - commands - Quoted Heredocs",
"bash - corpus - commands - Heredocs with weird characters",
]),
);
}
#[test]
fn test_c_corpus() {
test_language_corpus("c");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_c(seed: usize) {
test_language_corpus("c", seed, None);
}
#[test]
fn test_cpp_corpus() {
test_language_corpus("cpp");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_cpp(seed: usize) {
test_language_corpus("cpp", seed, None);
}
#[test]
fn test_embedded_template_corpus() {
test_language_corpus("embedded-template");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_embedded_template(seed: usize) {
test_language_corpus("embedded-template", seed, None);
}
#[test]
fn test_go_corpus() {
test_language_corpus("go");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_go(seed: usize) {
test_language_corpus("go", seed, None);
}
#[test]
fn test_html_corpus() {
test_language_corpus("html");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_html(seed: usize) {
test_language_corpus("html", seed, None);
}
#[test]
fn test_javascript_corpus() {
test_language_corpus("javascript");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_javascript(seed: usize) {
test_language_corpus("javascript", seed, None);
}
#[test]
fn test_json_corpus() {
test_language_corpus("json");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_json(seed: usize) {
test_language_corpus("json", seed, None);
}
#[test]
fn test_php_corpus() {
test_language_corpus("php");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_php(seed: usize) {
test_language_corpus("php", seed, None);
}
#[test]
fn test_python_corpus() {
test_language_corpus("python");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_python(seed: usize) {
test_language_corpus("python", seed, None);
}
#[test]
fn test_ruby_corpus() {
test_language_corpus("ruby");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_ruby(seed: usize) {
test_language_corpus("ruby", seed, None);
}
#[test]
fn test_rust_corpus() {
test_language_corpus("rust");
#[test_with_seed(retry=10, seed=*START_SEED, seed_fn=new_seed)]
fn test_corpus_for_rust(seed: usize) {
test_language_corpus("rust", seed, None);
}
fn test_language_corpus(language_name: &str) {
fn test_language_corpus(language_name: &str, start_seed: usize, skipped: Option<&[&str]>) {
let grammars_dir = fixtures_dir().join("grammars");
let error_corpus_dir = fixtures_dir().join("error_corpus");
let template_corpus_dir = fixtures_dir().join("template_corpus");
@ -98,10 +110,30 @@ fn test_language_corpus(language_name: &str) {
t
}));
let mut skipped = skipped.map(|x| HashMap::<&str, usize>::from_iter(x.iter().map(|x| (*x, 0))));
let language = get_language(language_name);
let mut failure_count = 0;
for test in tests {
println!(" {} example - {}", language_name, test.name);
let log_seed = env::var("TREE_SITTER_LOG_SEED").is_ok();
let dump_edits = env::var("TREE_SITTER_DUMP_EDITS").is_ok();
if log_seed {
println!(" start seed: {}", start_seed);
}
println!();
for (test_index, test) in tests.iter().enumerate() {
let test_name = format!("{language_name} - {}", test.name);
if let Some(skipped) = skipped.as_mut() {
if let Some(counter) = skipped.get_mut(test_name.as_str()) {
println!(" {test_index}. {test_name} - SKIPPED");
*counter += 1;
continue;
}
}
println!(" {test_index}. {test_name}");
let passed = allocations::record(|| {
let mut log_session = None;
@ -116,10 +148,7 @@ fn test_language_corpus(language_name: &str) {
}
if actual_output != test.output {
println!(
"Incorrect initial parse for {} - {}",
language_name, test.name,
);
println!("Incorrect initial parse for {test_name}");
print_diff_key();
print_diff(&actual_output, &test.output);
println!("");
@ -140,7 +169,7 @@ fn test_language_corpus(language_name: &str) {
drop(parser);
for trial in 0..*ITERATION_COUNT {
let seed = *START_SEED + trial;
let seed = start_seed + trial;
let passed = allocations::record(|| {
let mut rand = Rand::new(seed);
let mut log_session = None;
@ -158,10 +187,21 @@ fn test_language_corpus(language_name: &str) {
for _ in 0..1 + rand.unsigned(*EDIT_COUNT) {
let edit = get_random_edit(&mut rand, &input);
undo_stack.push(invert_edit(&input, &edit));
perform_edit(&mut tree, &mut input, &edit);
perform_edit(&mut tree, &mut input, &edit).unwrap();
}
// println!(" seed: {}", seed);
if log_seed {
println!(" {test_index}.{trial:<2} seed: {}", seed);
}
if dump_edits {
fs::write(
SCRATCH_BASE_DIR
.join(format!("edit.{seed}.{test_index}.{trial} {test_name}")),
&input,
)
.unwrap();
}
if *LOG_GRAPH_ENABLED {
eprintln!("{}\n", String::from_utf8_lossy(&input));
@ -173,16 +213,13 @@ fn test_language_corpus(language_name: &str) {
// Check that the new tree is consistent.
check_consistent_sizes(&tree2, &input);
if let Err(message) = check_changed_ranges(&tree, &tree2, &input) {
println!(
"\nUnexpected scope change in seed {}\n{}\n\n",
seed, message
);
println!("\nUnexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n",);
return false;
}
// Undo all of the edits and re-parse again.
while let Some(edit) = undo_stack.pop() {
perform_edit(&mut tree2, &mut input, &edit);
perform_edit(&mut tree2, &mut input, &edit).unwrap();
}
if *LOG_GRAPH_ENABLED {
eprintln!("{}\n", String::from_utf8_lossy(&input));
@ -198,10 +235,7 @@ fn test_language_corpus(language_name: &str) {
}
if actual_output != test.output {
println!(
"Incorrect parse for {} - {} - seed {}",
language_name, test.name, seed
);
println!("Incorrect parse for {test_name} - seed {seed}");
print_diff_key();
print_diff(&actual_output, &test.output);
println!("");
@ -211,7 +245,7 @@ fn test_language_corpus(language_name: &str) {
// Check that the edited tree is consistent.
check_consistent_sizes(&tree3, &input);
if let Err(message) = check_changed_ranges(&tree2, &tree3, &input) {
eprintln!("Unexpected scope change in seed {}\n{}\n\n", seed, message);
println!("Unexpected scope change in seed {seed} with start seed {start_seed}\n{message}\n\n");
return false;
}
@ -228,6 +262,18 @@ fn test_language_corpus(language_name: &str) {
if failure_count > 0 {
panic!("{} {} corpus tests failed", failure_count, language_name);
}
if let Some(skipped) = skipped.as_mut() {
skipped.retain(|_, v| *v == 0);
if skipped.len() > 0 {
println!("Non matchable skip definitions:");
for k in skipped.keys() {
println!(" {k}");
}
panic!("Non matchable skip definitions needs to be removed");
}
}
}
#[test]
@ -255,7 +301,7 @@ fn test_feature_corpus_files() {
grammar_path = test_path.join("grammar.json");
}
let error_message_path = test_path.join("expected_error.txt");
let grammar_json = generate::load_grammar_file(&grammar_path).unwrap();
let grammar_json = generate::load_grammar_file(&grammar_path, None).unwrap();
let generate_result = generate::generate_parser_for_grammar(&grammar_json);
if error_message_path.exists() {
@ -424,7 +470,12 @@ fn set_included_ranges(parser: &mut Parser, input: &[u8], delimiters: Option<(&s
let mut ranges = Vec::new();
let mut ix = 0;
while ix < input.len() {
let Some(mut start_ix) = input[ix..].windows(2).position(|win| win == start.as_bytes()) else { break };
let Some(mut start_ix) = input[ix..]
.windows(2)
.position(|win| win == start.as_bytes())
else {
break;
};
start_ix += ix + start.len();
let end_ix = input[start_ix..]
.windows(2)
@ -492,6 +543,7 @@ fn flatten_tests(test: TestEntry) -> Vec<FlattenedTest> {
input,
output,
has_fields,
..
} => {
if !prefix.is_empty() {
name.insert_str(0, " - ");

View file

@ -0,0 +1,42 @@
// Tests in this mod need be executed with enabled UBSAN library:
// ```
// UBSAN_OPTIONS="halt_on_error=1" \
// CFLAGS="-fsanitize=undefined" \
// RUSTFLAGS="-lubsan" \
// cargo test --target $(rustc -vV | sed -nr 's/^host: //p') -- --test-threads 1
// ```
use super::helpers::query_helpers::assert_query_matches;
use crate::tests::helpers::fixtures::get_language;
use indoc::indoc;
use tree_sitter::Query;
#[test]
fn issue_2162_out_of_bound() {
let language = get_language("java");
assert!(Query::new(language, "(package_declaration _ (_) @name _)").is_ok());
}
#[test]
fn issue_2107_first_child_group_anchor_had_no_effect() {
let language = get_language("c");
let source_code = indoc! {r#"
void fun(int a, char b, int c) { };
"#};
let query = indoc! {r#"
(parameter_list
.
(
(parameter_declaration) @constant
(#match? @constant "^int")
)
)
"#};
let query = Query::new(language, query).unwrap();
assert_query_matches(
language,
&query,
source_code,
&[(0, vec![("constant", "int a")])],
);
}

View file

@ -2,7 +2,7 @@ use std::{
collections::HashMap,
os::raw::c_void,
sync::{
atomic::{AtomicBool, AtomicU64, Ordering::SeqCst},
atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
Mutex,
},
};
@ -25,8 +25,8 @@ unsafe impl Sync for Allocation {}
#[derive(Default)]
struct AllocationRecorder {
enabled: AtomicBool,
allocation_count: AtomicU64,
outstanding_allocations: Mutex<HashMap<Allocation, u64>>,
allocation_count: AtomicUsize,
outstanding_allocations: Mutex<HashMap<Allocation, usize>>,
}
thread_local! {
@ -83,6 +83,9 @@ fn record_alloc(ptr: *mut c_void) {
}
fn record_dealloc(ptr: *mut c_void) {
if ptr.is_null() {
panic!("Zero pointer deallocation!");
}
RECORDER.with(|recorder| {
if recorder.enabled.load(SeqCst) {
recorder
@ -107,9 +110,13 @@ unsafe extern "C" fn ts_record_calloc(count: usize, size: usize) -> *mut c_void
}
unsafe extern "C" fn ts_record_realloc(ptr: *mut c_void, size: usize) -> *mut c_void {
record_dealloc(ptr);
let result = realloc(ptr, size);
record_alloc(result);
if ptr.is_null() {
record_alloc(result);
} else if ptr != result {
record_dealloc(ptr);
record_alloc(result);
}
result
}

View file

@ -1,11 +1,46 @@
lazy_static! {
static ref ROOT_DIR: PathBuf = PathBuf::from(env!("CARGO_MANIFEST_DIR")).parent().unwrap().to_owned();
static ref FIXTURES_DIR: PathBuf = ROOT_DIR.join("test").join("fixtures");
static ref HEADER_DIR: PathBuf = ROOT_DIR.join("lib").join("include");
static ref GRAMMARS_DIR: PathBuf = ROOT_DIR.join("test").join("fixtures").join("grammars");
static ref SCRATCH_DIR: PathBuf = {
pub static ref ROOT_DIR: PathBuf = PathBuf::from(env!("CARGO_MANIFEST_DIR")).parent().unwrap().to_owned();
pub static ref FIXTURES_DIR: PathBuf = ROOT_DIR.join("test").join("fixtures");
pub static ref HEADER_DIR: PathBuf = ROOT_DIR.join("lib").join("include");
pub static ref GRAMMARS_DIR: PathBuf = ROOT_DIR.join("test").join("fixtures").join("grammars");
pub static ref SCRATCH_BASE_DIR: PathBuf = {
let result = ROOT_DIR.join("target").join("scratch");
fs::create_dir_all(&result).unwrap();
result
};
pub static ref SCRATCH_DIR: PathBuf = {
// https://doc.rust-lang.org/reference/conditional-compilation.html
let vendor = if cfg!(target_vendor = "apple") {
"apple"
} else if cfg!(target_vendor = "fortanix") {
"fortanix"
} else if cfg!(target_vendor = "pc") {
"pc"
} else {
"unknown"
};
let env = if cfg!(target_env = "gnu") {
"gnu"
} else if cfg!(target_env = "msvc") {
"msvc"
} else if cfg!(target_env = "musl") {
"musl"
} else if cfg!(target_env = "sgx") {
"sgx"
} else {
"unknown"
};
let endian = if cfg!(target_endian = "little") {
"little"
} else if cfg!(target_endian = "big") {
"big"
} else {
"unknown"
};
let machine = format!("{}-{}-{}-{}-{}", std::env::consts::ARCH, std::env::consts::OS, vendor, env, endian);
let result = SCRATCH_BASE_DIR.join(machine);
fs::create_dir_all(&result).unwrap();
result
};
}

View file

@ -1,6 +1,6 @@
use lazy_static::lazy_static;
use std::fs;
use std::path::{Path, PathBuf};
use std::{env, fs};
use tree_sitter::Language;
use tree_sitter_highlight::HighlightConfiguration;
use tree_sitter_loader::Loader;
@ -9,7 +9,13 @@ use tree_sitter_tags::TagsConfiguration;
include!("./dirs.rs");
lazy_static! {
static ref TEST_LOADER: Loader = Loader::with_parser_lib_path(SCRATCH_DIR.join("lib"));
static ref TEST_LOADER: Loader = {
let mut loader = Loader::with_parser_lib_path(SCRATCH_DIR.clone());
if env::var("TREE_SITTER_GRAMMAR_DEBUG").is_ok() {
loader.use_debug_build(true);
}
loader
};
}
pub fn test_loader<'a>() -> &'a Loader {
@ -46,9 +52,11 @@ pub fn get_highlight_config(
let locals_query = fs::read_to_string(queries_path.join("locals.scm")).unwrap_or(String::new());
let mut result = HighlightConfiguration::new(
language,
language_name,
&highlights_query,
&injections_query,
&locals_query,
false,
)
.unwrap();
result.configure(&highlight_names);
@ -63,11 +71,7 @@ pub fn get_tags_config(language_name: &str) -> TagsConfiguration {
TagsConfiguration::new(language, &tags_query, &locals_query).unwrap()
}
pub fn get_test_language(
name: &str,
parser_code: &str,
scanner_src_path: Option<&Path>,
) -> Language {
pub fn get_test_language(name: &str, parser_code: &str, path: Option<&Path>) -> Language {
let src_dir = SCRATCH_DIR.join("src").join(name);
fs::create_dir_all(&src_dir).unwrap();
@ -76,11 +80,16 @@ pub fn get_test_language(
fs::write(&parser_path, parser_code).unwrap();
}
if let Some(scanner_src_path) = scanner_src_path {
let scanner_code = fs::read_to_string(&scanner_src_path).unwrap();
let scanner_path = src_dir.join("scanner.c");
if !fs::read_to_string(&scanner_path).map_or(false, |content| content == scanner_code) {
fs::write(&scanner_path, scanner_code).unwrap();
if let Some(path) = path {
let scanner_path = path.join("scanner.c");
if scanner_path.exists() {
let scanner_code = fs::read_to_string(&scanner_path).unwrap();
let scanner_copy_path = src_dir.join("scanner.c");
if !fs::read_to_string(&scanner_copy_path)
.map_or(false, |content| content == scanner_code)
{
fs::write(&scanner_copy_path, scanner_code).unwrap();
}
}
}

View file

@ -6,7 +6,8 @@ pub(super) mod random;
pub(super) mod scope_sequence;
use lazy_static::lazy_static;
use std::{env, time, usize};
use rand::Rng;
use std::env;
lazy_static! {
pub static ref LOG_ENABLED: bool = env::var("TREE_SITTER_LOG").is_ok();
@ -16,11 +17,7 @@ lazy_static! {
}
lazy_static! {
pub static ref START_SEED: usize =
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| time::SystemTime::now()
.duration_since(time::UNIX_EPOCH)
.unwrap()
.as_secs() as usize,);
pub static ref START_SEED: usize = new_seed();
pub static ref EDIT_COUNT: usize = int_env_var("TREE_SITTER_EDITS").unwrap_or(3);
pub static ref ITERATION_COUNT: usize = int_env_var("TREE_SITTER_ITERATIONS").unwrap_or(10);
}
@ -28,3 +25,10 @@ lazy_static! {
fn int_env_var(name: &'static str) -> Option<usize> {
env::var(name).ok().and_then(|e| e.parse().ok())
}
pub(crate) fn new_seed() -> usize {
int_env_var("TREE_SITTER_SEED").unwrap_or_else(|| {
let mut rng = rand::thread_rng();
rng.gen::<usize>()
})
}

View file

@ -1,6 +1,8 @@
use rand::prelude::Rng;
use std::{cmp::Ordering, fmt::Write, ops::Range};
use tree_sitter::{Node, Point, Tree, TreeCursor};
use tree_sitter::{
Language, Node, Parser, Point, Query, QueryCapture, QueryCursor, QueryMatch, Tree, TreeCursor,
};
#[derive(Debug)]
pub struct Pattern {
@ -304,3 +306,56 @@ fn compare_depth_first(a: Node, b: Node) -> Ordering {
let b = b.byte_range();
a.start.cmp(&b.start).then_with(|| b.end.cmp(&a.end))
}
pub fn assert_query_matches(
language: Language,
query: &Query,
source: &str,
expected: &[(usize, Vec<(&str, &str)>)],
) {
let mut parser = Parser::new();
parser.set_language(language).unwrap();
let tree = parser.parse(source, None).unwrap();
let mut cursor = QueryCursor::new();
let matches = cursor.matches(&query, tree.root_node(), source.as_bytes());
pretty_assertions::assert_eq!(collect_matches(matches, &query, source), expected);
pretty_assertions::assert_eq!(cursor.did_exceed_match_limit(), false);
}
pub fn collect_matches<'a>(
matches: impl Iterator<Item = QueryMatch<'a, 'a>>,
query: &'a Query,
source: &'a str,
) -> Vec<(usize, Vec<(&'a str, &'a str)>)> {
matches
.map(|m| {
(
m.pattern_index,
format_captures(m.captures.iter().cloned(), query, source),
)
})
.collect()
}
pub fn collect_captures<'a>(
captures: impl Iterator<Item = (QueryMatch<'a, 'a>, usize)>,
query: &'a Query,
source: &'a str,
) -> Vec<(&'a str, &'a str)> {
format_captures(captures.map(|(m, i)| m.captures[i]), query, source)
}
fn format_captures<'a>(
captures: impl Iterator<Item = QueryCapture<'a>>,
query: &'a Query,
source: &'a str,
) -> Vec<(&'a str, &'a str)> {
captures
.map(|capture| {
(
query.capture_names()[capture.index as usize],
capture.node.utf8_text(source.as_bytes()).unwrap(),
)
})
.collect()
}

View file

@ -24,6 +24,7 @@ lazy_static! {
get_highlight_config("rust", Some("injections.scm"), &HIGHLIGHT_NAMES);
static ref HIGHLIGHT_NAMES: Vec<String> = [
"attribute",
"boolean",
"carriage-return",
"comment",
"constant",
@ -61,7 +62,7 @@ lazy_static! {
fn test_highlighting_javascript() {
let source = "const a = function(b) { return b + c; }";
assert_eq!(
&to_token_vector(&source, &JS_HIGHLIGHT).unwrap(),
&to_token_vector(source, &JS_HIGHLIGHT).unwrap(),
&[vec![
("const", vec!["keyword"]),
(" ", vec![]),
@ -71,14 +72,14 @@ fn test_highlighting_javascript() {
(" ", vec![]),
("function", vec!["keyword"]),
("(", vec!["punctuation.bracket"]),
("b", vec!["variable.parameter"]),
("b", vec!["variable"]),
(")", vec!["punctuation.bracket"]),
(" ", vec![]),
("{", vec!["punctuation.bracket"]),
(" ", vec![]),
("return", vec!["keyword"]),
(" ", vec![]),
("b", vec!["variable.parameter"]),
("b", vec!["variable"]),
(" ", vec![]),
("+", vec!["operator"]),
(" ", vec![]),
@ -92,7 +93,7 @@ fn test_highlighting_javascript() {
#[test]
fn test_highlighting_injected_html_in_javascript() {
let source = vec!["const s = html `<div>${a < b}</div>`;"].join("\n");
let source = ["const s = html `<div>${a < b}</div>`;"].join("\n");
assert_eq!(
&to_token_vector(&source, &JS_HIGHLIGHT).unwrap(),
@ -156,7 +157,7 @@ fn test_highlighting_injected_javascript_in_html_mini() {
#[test]
fn test_highlighting_injected_javascript_in_html() {
let source = vec![
let source = [
"<body>",
" <script>",
" const x = new Thing();",
@ -211,7 +212,7 @@ fn test_highlighting_injected_javascript_in_html() {
#[test]
fn test_highlighting_multiline_nodes_to_html() {
let source = vec![
let source = [
"const SOMETHING = `",
" one ${",
" two()",
@ -235,7 +236,7 @@ fn test_highlighting_multiline_nodes_to_html() {
#[test]
fn test_highlighting_with_local_variable_tracking() {
let source = vec![
let source = [
"module.exports = function a(b) {",
" const module = c;",
" console.log(module, b);",
@ -257,7 +258,7 @@ fn test_highlighting_with_local_variable_tracking() {
(" ", vec![]),
("a", vec!["function"]),
("(", vec!["punctuation.bracket"]),
("b", vec!["variable.parameter"]),
("b", vec!["variable"]),
(")", vec!["punctuation.bracket"]),
(" ", vec![]),
("{", vec!["punctuation.bracket"])
@ -284,7 +285,7 @@ fn test_highlighting_with_local_variable_tracking() {
(",", vec!["punctuation.delimiter"]),
(" ", vec![]),
// A parameter, because `b` was defined as a parameter above.
("b", vec!["variable.parameter"]),
("b", vec!["variable"]),
(")", vec!["punctuation.bracket"]),
(";", vec!["punctuation.delimiter"]),
],
@ -295,7 +296,7 @@ fn test_highlighting_with_local_variable_tracking() {
#[test]
fn test_highlighting_empty_lines() {
let source = vec![
let source = [
"class A {",
"",
" b(c) {",
@ -313,7 +314,7 @@ fn test_highlighting_empty_lines() {
&[
"<span class=keyword>class</span> <span class=constructor>A</span> <span class=punctuation.bracket>{</span>\n".to_string(),
"\n".to_string(),
" <span class=function>b</span><span class=punctuation.bracket>(</span><span class=variable.parameter>c</span><span class=punctuation.bracket>)</span> <span class=punctuation.bracket>{</span>\n".to_string(),
" <span class=function>b</span><span class=punctuation.bracket>(</span><span class=variable>c</span><span class=punctuation.bracket>)</span> <span class=punctuation.bracket>{</span>\n".to_string(),
"\n".to_string(),
" <span class=function>d</span><span class=punctuation.bracket>(</span><span class=variable>e</span><span class=punctuation.bracket>)</span>\n".to_string(),
"\n".to_string(),
@ -329,7 +330,7 @@ fn test_highlighting_carriage_returns() {
let source = "a = \"a\rb\"\r\nb\r";
assert_eq!(
&to_html(&source, &JS_HIGHLIGHT).unwrap(),
&to_html(source, &JS_HIGHLIGHT).unwrap(),
&[
"<span class=variable>a</span> <span class=operator>=</span> <span class=string>&quot;a<span class=carriage-return></span>b&quot;</span>\n",
"<span class=variable>b</span>\n",
@ -339,7 +340,7 @@ fn test_highlighting_carriage_returns() {
#[test]
fn test_highlighting_ejs_with_html_and_javascript() {
let source = vec!["<div><% foo() %></div><script> bar() </script>"].join("\n");
let source = ["<div><% foo() %></div><script> bar() </script>"].join("\n");
assert_eq!(
&to_token_vector(&source, &EJS_HIGHLIGHT).unwrap(),
@ -376,7 +377,7 @@ fn test_highlighting_ejs_with_html_and_javascript() {
fn test_highlighting_javascript_with_jsdoc() {
// Regression test: the middle comment has no highlights. This should not prevent
// later injections from highlighting properly.
let source = vec!["a /* @see a */ b; /* nothing */ c; /* @see b */"].join("\n");
let source = ["a /* @see a */ b; /* nothing */ c; /* @see b */"].join("\n");
assert_eq!(
&to_token_vector(&source, &JS_HIGHLIGHT).unwrap(),
@ -404,7 +405,7 @@ fn test_highlighting_javascript_with_jsdoc() {
#[test]
fn test_highlighting_with_content_children_included() {
let source = vec!["assert!(", " a.b.c() < D::e::<F>()", ");"].join("\n");
let source = ["assert!(", " a.b.c() < D::e::<F>()", ");"].join("\n");
assert_eq!(
&to_token_vector(&source, &RUST_HIGHLIGHT).unwrap(),
@ -482,7 +483,7 @@ fn test_highlighting_cancellation() {
#[test]
fn test_highlighting_via_c_api() {
let highlights = vec![
let highlights = [
"class=tag\0",
"class=function\0",
"class=string\0",
@ -496,68 +497,82 @@ fn test_highlighting_via_c_api() {
.iter()
.map(|h| h.as_bytes().as_ptr() as *const c_char)
.collect::<Vec<_>>();
let highlighter = c::ts_highlighter_new(
&highlight_names[0] as *const *const c_char,
&highlight_attrs[0] as *const *const c_char,
highlights.len() as u32,
);
let highlighter = unsafe {
c::ts_highlighter_new(
&highlight_names[0] as *const *const c_char,
&highlight_attrs[0] as *const *const c_char,
highlights.len() as u32,
)
};
let source_code = c_string("<script>\nconst a = b('c');\nc.d();\n</script>");
let js_scope = c_string("source.js");
let js_injection_regex = c_string("^javascript");
let language = get_language("javascript");
let lang_name = c_string("javascript");
let queries = get_language_queries_path("javascript");
let highlights_query = fs::read_to_string(queries.join("highlights.scm")).unwrap();
let injections_query = fs::read_to_string(queries.join("injections.scm")).unwrap();
let locals_query = fs::read_to_string(queries.join("locals.scm")).unwrap();
c::ts_highlighter_add_language(
highlighter,
js_scope.as_ptr(),
js_injection_regex.as_ptr(),
language,
highlights_query.as_ptr() as *const c_char,
injections_query.as_ptr() as *const c_char,
locals_query.as_ptr() as *const c_char,
highlights_query.len() as u32,
injections_query.len() as u32,
locals_query.len() as u32,
);
unsafe {
c::ts_highlighter_add_language(
highlighter,
lang_name.as_ptr(),
js_scope.as_ptr(),
js_injection_regex.as_ptr(),
language,
highlights_query.as_ptr() as *const c_char,
injections_query.as_ptr() as *const c_char,
locals_query.as_ptr() as *const c_char,
highlights_query.len() as u32,
injections_query.len() as u32,
locals_query.len() as u32,
false,
);
}
let html_scope = c_string("text.html.basic");
let html_injection_regex = c_string("^html");
let language = get_language("html");
let lang_name = c_string("html");
let queries = get_language_queries_path("html");
let highlights_query = fs::read_to_string(queries.join("highlights.scm")).unwrap();
let injections_query = fs::read_to_string(queries.join("injections.scm")).unwrap();
c::ts_highlighter_add_language(
highlighter,
html_scope.as_ptr(),
html_injection_regex.as_ptr(),
language,
highlights_query.as_ptr() as *const c_char,
injections_query.as_ptr() as *const c_char,
ptr::null(),
highlights_query.len() as u32,
injections_query.len() as u32,
0,
);
unsafe {
c::ts_highlighter_add_language(
highlighter,
lang_name.as_ptr(),
html_scope.as_ptr(),
html_injection_regex.as_ptr(),
language,
highlights_query.as_ptr() as *const c_char,
injections_query.as_ptr() as *const c_char,
ptr::null(),
highlights_query.len() as u32,
injections_query.len() as u32,
0,
false,
);
}
let buffer = c::ts_highlight_buffer_new();
c::ts_highlighter_highlight(
highlighter,
html_scope.as_ptr(),
source_code.as_ptr(),
source_code.as_bytes().len() as u32,
buffer,
ptr::null_mut(),
);
unsafe {
c::ts_highlighter_highlight(
highlighter,
html_scope.as_ptr(),
source_code.as_ptr(),
source_code.as_bytes().len() as u32,
buffer,
ptr::null_mut(),
);
}
let output_bytes = c::ts_highlight_buffer_content(buffer);
let output_line_offsets = c::ts_highlight_buffer_line_offsets(buffer);
let output_len = c::ts_highlight_buffer_len(buffer);
let output_line_count = c::ts_highlight_buffer_line_count(buffer);
let output_bytes = unsafe { c::ts_highlight_buffer_content(buffer) };
let output_line_offsets = unsafe { c::ts_highlight_buffer_line_offsets(buffer) };
let output_len = unsafe { c::ts_highlight_buffer_len(buffer) };
let output_line_count = unsafe { c::ts_highlight_buffer_line_count(buffer) };
let output_bytes = unsafe { slice::from_raw_parts(output_bytes, output_len as usize) };
let output_line_offsets =
@ -583,8 +598,69 @@ fn test_highlighting_via_c_api() {
]
);
c::ts_highlighter_delete(highlighter);
c::ts_highlight_buffer_delete(buffer);
unsafe {
c::ts_highlighter_delete(highlighter);
c::ts_highlight_buffer_delete(buffer);
}
}
#[test]
fn test_highlighting_with_all_captures_applied() {
let source = "fn main(a: u32, b: u32) -> { let c = a + b; }";
let language = get_language("rust");
let highlights_query = indoc::indoc! {"
[
\"fn\"
\"let\"
] @keyword
(identifier) @variable
(function_item name: (identifier) @function)
(parameter pattern: (identifier) @variable.parameter)
(primitive_type) @type.builtin
\"=\" @operator
[ \"->\" \":\" \";\" ] @punctuation.delimiter
[ \"{\" \"}\" \"(\" \")\" ] @punctuation.bracket
"};
let mut rust_highlight_reverse =
HighlightConfiguration::new(language, "rust", highlights_query, "", "", true).unwrap();
rust_highlight_reverse.configure(&HIGHLIGHT_NAMES);
assert_eq!(
&to_token_vector(source, &rust_highlight_reverse).unwrap(),
&[[
("fn", vec!["keyword"]),
(" ", vec![]),
("main", vec!["function"]),
("(", vec!["punctuation.bracket"]),
("a", vec!["variable.parameter"]),
(":", vec!["punctuation.delimiter"]),
(" ", vec![]),
("u32", vec!["type.builtin"]),
(", ", vec![]),
("b", vec!["variable.parameter"]),
(":", vec!["punctuation.delimiter"]),
(" ", vec![]),
("u32", vec!["type.builtin"]),
(")", vec!["punctuation.bracket"]),
(" ", vec![]),
("->", vec!["punctuation.delimiter"]),
(" ", vec![]),
("{", vec!["punctuation.bracket"]),
(" ", vec![]),
("let", vec!["keyword"]),
(" ", vec![]),
("c", vec!["variable"]),
(" ", vec![]),
("=", vec!["operator"]),
(" ", vec![]),
("a", vec!["variable"]),
(" + ", vec![]),
("b", vec!["variable"]),
(";", vec!["punctuation.delimiter"]),
(" ", vec![]),
("}", vec!["punctuation.bracket"])
]],
);
}
#[test]
@ -667,20 +743,20 @@ fn to_token_vector<'a>(
}
HighlightEvent::Source { start, end } => {
let s = str::from_utf8(&src[start..end]).unwrap();
for (i, l) in s.split("\n").enumerate() {
for (i, l) in s.split('\n').enumerate() {
let l = l.trim_end_matches('\r');
if i > 0 {
lines.push(line);
line = Vec::new();
}
if l.len() > 0 {
if !l.is_empty() {
line.push((l, highlights.clone()));
}
}
}
}
}
if line.len() > 0 {
if !line.is_empty() {
lines.push(line);
}
Ok(lines)

View file

@ -0,0 +1,95 @@
use super::helpers::fixtures::get_language;
use tree_sitter::Parser;
#[test]
fn test_lookahead_iterator() {
let mut parser = Parser::new();
let language = get_language("rust");
parser.set_language(language).unwrap();
let tree = parser.parse("struct Stuff {}", None).unwrap();
let mut cursor = tree.walk();
assert!(cursor.goto_first_child()); // struct
assert!(cursor.goto_first_child()); // struct keyword
let next_state = cursor.node().next_parse_state();
assert_ne!(next_state, 0);
assert_eq!(
next_state,
language.next_state(cursor.node().parse_state(), cursor.node().grammar_id())
);
assert!((next_state as usize) < language.parse_state_count());
assert!(cursor.goto_next_sibling()); // type_identifier
assert_eq!(next_state, cursor.node().parse_state());
assert_eq!(cursor.node().grammar_name(), "identifier");
assert_ne!(cursor.node().grammar_id(), cursor.node().kind_id());
let expected_symbols = ["identifier", "block_comment", "line_comment"];
let mut lookahead = language.lookahead_iterator(next_state).unwrap();
assert_eq!(lookahead.language(), language);
assert!(lookahead.iter_names().eq(expected_symbols));
lookahead.reset_state(next_state);
assert!(lookahead.iter_names().eq(expected_symbols));
lookahead.reset(language, next_state);
assert!(lookahead
.map(|s| language.node_kind_for_id(s).unwrap())
.eq(expected_symbols));
}
#[test]
fn test_lookahead_iterator_modifiable_only_by_mut() {
let mut parser = Parser::new();
let language = get_language("rust");
parser.set_language(language).unwrap();
let tree = parser.parse("struct Stuff {}", None).unwrap();
let mut cursor = tree.walk();
assert!(cursor.goto_first_child()); // struct
assert!(cursor.goto_first_child()); // struct keyword
let next_state = cursor.node().next_parse_state();
assert_ne!(next_state, 0);
let mut lookahead = language.lookahead_iterator(next_state).unwrap();
let _ = lookahead.next();
let mut names = lookahead.iter_names();
let _ = names.next();
}
/// It doesn't allowed to use lookahead iterator by shared ref:
/// error[E0596]: cannot borrow `lookahead` as mutable, as it is not declared as mutable
/// ```compile_fail
/// use tree_sitter::{Parser, Language};
/// let mut parser = Parser::new();
/// let language = unsafe { Language::from_raw(std::ptr::null()) };
/// let tree = parser.parse("", None).unwrap();
/// let mut cursor = tree.walk();
/// let next_state = cursor.node().next_parse_state();
/// let lookahead = language.lookahead_iterator(next_state).unwrap();
/// let _ = lookahead.next();
/// ```
/// It doesn't allowed to use lookahead names iterator by shared ref:
/// error[E0596]: cannot borrow `names` as mutable, as it is not declared as mutable
/// ```compile_fail
/// use tree_sitter::{Parser, Language};
/// let mut parser = Parser::new();
/// let language = unsafe { Language::from_raw(std::ptr::null()) };
/// let tree = parser.parse("", None).unwrap();
/// let mut cursor = tree.walk();
/// let next_state = cursor.node().next_parse_state();
/// if let Some(mut lookahead) = language.lookahead_iterator(next_state) {
/// let _ = lookahead.next();
/// let names = lookahead.iter_names();
/// let _ = names.next();
/// }
/// ```
fn _dummy() {}

View file

@ -1,11 +1,16 @@
mod async_context_test;
mod corpus_test;
mod github_issue_test;
mod helpers;
mod highlight_test;
mod language_test;
mod node_test;
mod parser_hang_test;
mod parser_test;
mod pathological_test;
mod query_test;
mod tags_test;
mod test_highlight_test;
mod test_tags_test;
mod text_provider_test;
mod tree_test;

View file

@ -252,12 +252,14 @@ fn test_node_parent_of_child_by_field_name() {
fn test_node_field_name_for_child() {
let mut parser = Parser::new();
parser.set_language(get_language("c")).unwrap();
let tree = parser.parse("x + y;", None).unwrap();
let tree = parser.parse("int w = x + y;", None).unwrap();
let translation_unit_node = tree.root_node();
let binary_expression_node = translation_unit_node
.named_child(0)
let declaration_node = translation_unit_node.named_child(0).unwrap();
let binary_expression_node = declaration_node
.child_by_field_name("declarator")
.unwrap()
.named_child(0)
.child_by_field_name("value")
.unwrap();
assert_eq!(binary_expression_node.field_name_for_child(0), Some("left"));
@ -385,10 +387,52 @@ fn test_node_named_child_with_aliases_and_extras() {
assert_eq!(root.named_child(4).unwrap().kind(), "C");
}
#[test]
fn test_node_descendant_count() {
let tree = parse_json_example();
let value_node = tree.root_node();
let all_nodes = get_all_nodes(&tree);
assert_eq!(value_node.descendant_count(), all_nodes.len());
let mut cursor = value_node.walk();
for (i, node) in all_nodes.iter().enumerate() {
cursor.goto_descendant(i);
assert_eq!(cursor.node(), *node, "index {i}");
}
for (i, node) in all_nodes.iter().enumerate().rev() {
cursor.goto_descendant(i);
assert_eq!(cursor.node(), *node, "rev index {i}");
}
}
#[test]
fn test_descendant_count_single_node_tree() {
let mut parser = Parser::new();
parser
.set_language(get_language("embedded-template"))
.unwrap();
let tree = parser.parse("hello", None).unwrap();
let nodes = get_all_nodes(&tree);
assert_eq!(nodes.len(), 2);
assert_eq!(tree.root_node().descendant_count(), 2);
let mut cursor = tree.root_node().walk();
cursor.goto_descendant(0);
assert_eq!(cursor.depth(), 0);
assert_eq!(cursor.node(), nodes[0]);
cursor.goto_descendant(1);
assert_eq!(cursor.depth(), 1);
assert_eq!(cursor.node(), nodes[1]);
}
#[test]
fn test_node_descendant_for_range() {
let tree = parse_json_example();
let array_node = tree.root_node().child(0).unwrap();
let array_node = tree.root_node();
// Leaf node exactly matches the given bounds - byte query
let colon_index = JSON_EXAMPLE.find(":").unwrap();
@ -508,7 +552,7 @@ fn test_node_edit() {
let edit = get_random_edit(&mut rand, &mut code);
let mut tree2 = tree.clone();
let edit = perform_edit(&mut tree2, &mut code, &edit);
let edit = perform_edit(&mut tree2, &mut code, &edit).unwrap();
for node in nodes_before.iter_mut() {
node.edit(&edit);
}
@ -841,15 +885,17 @@ fn get_all_nodes(tree: &Tree) -> Vec<Node> {
let mut visited_children = false;
let mut cursor = tree.walk();
loop {
result.push(cursor.node());
if !visited_children && cursor.goto_first_child() {
continue;
} else if cursor.goto_next_sibling() {
visited_children = false;
} else if cursor.goto_parent() {
visited_children = true;
if !visited_children {
result.push(cursor.node());
if !cursor.goto_first_child() {
visited_children = true;
}
} else {
break;
if cursor.goto_next_sibling() {
visited_children = false;
} else if !cursor.goto_parent() {
break;
}
}
}
return result;

View file

@ -0,0 +1,104 @@
// For some reasons `Command::spawn` doesn't work in CI env for many exotic arches.
#![cfg(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing)))]
use crate::{
generate::{generate_parser_for_grammar, load_grammar_file},
tests::helpers::fixtures::{fixtures_dir, get_test_language},
};
use std::{
env::VarError,
process::{Command, Stdio},
};
use tree_sitter::Parser;
// The `sanitizing` cfg is required to don't run tests under specific sunitizer
// because they don't work well with subprocesses _(it's an assumption)_.
//
// Bellow are two alternative examples of how to disable tests for some arches
// if a way with excluding the whole mod from compilation wouldn't work well.
//
// XXX: Also may be it makes sense to keep such tests as ignored by default
// to omit surprises and enable them on CI by passing an extra option explicitly:
//
// > cargo test -- --include-ignored
//
// #[cfg(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing)))]
// #[cfg_attr(not(all(any(target_arch = "x86_64", target_arch = "x86"), not(sanitizing))), ignore)]
//
#[test]
fn test_grammar_that_should_hang_and_not_segfault() {
let parent_sleep_millis = 1000;
let test_name = "test_grammar_that_should_hang_and_not_segfault";
let test_var = "CARGO_HANG_TEST";
eprintln!(" {test_name}");
let tests_exec_path = std::env::args()
.nth(0)
.expect("Failed get get tests executable path");
match std::env::var(test_var) {
Ok(v) if v == test_name => {
eprintln!(" child process id {}", std::process::id());
hang_test();
}
Err(VarError::NotPresent) => {
eprintln!(" parent process id {}", std::process::id());
if true {
let mut command = Command::new(tests_exec_path);
command.arg(test_name).env(test_var, test_name);
if std::env::args().any(|x| x == "--nocapture") {
command.arg("--nocapture");
} else {
command.stdout(Stdio::null()).stderr(Stdio::null());
}
match command.spawn() {
Ok(mut child) => {
std::thread::sleep(std::time::Duration::from_millis(parent_sleep_millis));
match child.try_wait() {
Ok(Some(status)) if status.success() => {
panic!("Child wasn't hang and exited successfully")
}
Ok(Some(status)) => panic!(
"Child wasn't hang and exited with status code: {:?}",
status.code()
),
_ => (),
}
if let Err(e) = child.kill() {
eprintln!(
"Failed to kill hang test sub process id: {}, error: {e}",
child.id()
);
}
}
Err(e) => panic!("{e}"),
}
}
}
Err(e) => panic!("Env var error: {e}"),
_ => unreachable!(),
}
fn hang_test() {
let test_grammar_dir = fixtures_dir()
.join("test_grammars")
.join("get_col_should_hang_not_crash");
let grammar_json = load_grammar_file(&test_grammar_dir.join("grammar.js"), None).unwrap();
let (parser_name, parser_code) =
generate_parser_for_grammar(grammar_json.as_str()).unwrap();
let language =
get_test_language(&parser_name, &parser_code, Some(test_grammar_dir.as_path()));
let mut parser = Parser::new();
parser.set_language(language).unwrap();
let code_that_should_hang = "\nHello";
parser.parse(code_that_should_hang, None).unwrap();
}
}

View file

@ -15,6 +15,7 @@ use std::{
thread, time,
};
use tree_sitter::{IncludedRangesError, InputEdit, LogType, Parser, Point, Range};
use tree_sitter_proc_macro::retry;
#[test]
fn test_parsing_simple_string() {
@ -149,7 +150,7 @@ fn test_parsing_with_custom_utf8_input() {
)
);
assert_eq!(root.kind(), "source_file");
assert_eq!(root.has_error(), false);
assert!(!root.has_error());
assert_eq!(root.child(0).unwrap().kind(), "function_item");
}
@ -188,7 +189,7 @@ fn test_parsing_with_custom_utf16_input() {
"(source_file (function_item (visibility_modifier) name: (identifier) parameters: (parameters) body: (block (integer_literal))))"
);
assert_eq!(root.kind(), "source_file");
assert_eq!(root.has_error(), false);
assert!(!root.has_error());
assert_eq!(root.child(0).unwrap().kind(), "function_item");
}
@ -277,7 +278,10 @@ fn test_parsing_invalid_chars_at_eof() {
let mut parser = Parser::new();
parser.set_language(get_language("json")).unwrap();
let tree = parser.parse(b"\xdf", None).unwrap();
assert_eq!(tree.root_node().to_sexp(), "(ERROR (UNEXPECTED INVALID))");
assert_eq!(
tree.root_node().to_sexp(),
"(document (ERROR (UNEXPECTED INVALID)))"
);
}
#[test]
@ -340,7 +344,8 @@ fn test_parsing_after_editing_beginning_of_code() {
deleted_length: 0,
inserted_text: b" || 5".to_vec(),
},
);
)
.unwrap();
let mut recorder = ReadRecorder::new(&code);
let tree = parser
@ -387,7 +392,8 @@ fn test_parsing_after_editing_end_of_code() {
deleted_length: 0,
inserted_text: b".d".to_vec(),
},
);
)
.unwrap();
let mut recorder = ReadRecorder::new(&code);
let tree = parser
@ -466,7 +472,8 @@ h + i
deleted_length: 0,
inserted_text: b"1234".to_vec(),
},
);
)
.unwrap();
assert_eq!(
code,
@ -511,7 +518,7 @@ fn test_parsing_after_detecting_error_in_the_middle_of_a_string_token() {
let tree = parser.parse(&source, None).unwrap();
assert_eq!(
tree.root_node().to_sexp(),
"(module (expression_statement (assignment left: (identifier) right: (expression_list (identifier) (string)))))"
"(module (expression_statement (assignment left: (identifier) right: (expression_list (identifier) (string (string_start) (string_content) (string_end))))))"
);
// Delete a suffix of the source code, starting in the middle of the string
@ -530,12 +537,12 @@ fn test_parsing_after_detecting_error_in_the_middle_of_a_string_token() {
let undo = invert_edit(&source, &edit);
let mut tree2 = tree.clone();
perform_edit(&mut tree2, &mut source, &edit);
perform_edit(&mut tree2, &mut source, &edit).unwrap();
tree2 = parser.parse(&source, Some(&tree2)).unwrap();
assert!(tree2.root_node().has_error());
let mut tree3 = tree2.clone();
perform_edit(&mut tree3, &mut source, &undo);
perform_edit(&mut tree3, &mut source, &undo).unwrap();
tree3 = parser.parse(&source, Some(&tree3)).unwrap();
assert_eq!(tree3.root_node().to_sexp(), tree.root_node().to_sexp(),);
}
@ -644,6 +651,7 @@ fn test_parsing_cancelled_by_another_thread() {
// Timeouts
#[test]
#[retry(10)]
fn test_parsing_with_a_timeout() {
let mut parser = Parser::new();
parser.set_language(get_language("json")).unwrap();
@ -662,8 +670,12 @@ fn test_parsing_with_a_timeout() {
None,
);
assert!(tree.is_none());
#[cfg(not(target_arch = "sparc64"))]
assert!(start_time.elapsed().as_micros() < 2000);
#[cfg(target_arch = "sparc64")]
assert!(start_time.elapsed().as_micros() < 8000);
// Continue parsing, but pause after 1 ms of processing.
parser.set_timeout_micros(5000);
let start_time = time::Instant::now();
@ -701,6 +713,7 @@ fn test_parsing_with_a_timeout() {
}
#[test]
#[retry(10)]
fn test_parsing_with_a_timeout_and_a_reset() {
let mut parser = Parser::new();
parser.set_language(get_language("json")).unwrap();
@ -756,6 +769,7 @@ fn test_parsing_with_a_timeout_and_a_reset() {
}
#[test]
#[retry(10)]
fn test_parsing_with_a_timeout_and_implicit_reset() {
allocations::record(|| {
let mut parser = Parser::new();
@ -789,6 +803,7 @@ fn test_parsing_with_a_timeout_and_implicit_reset() {
}
#[test]
#[retry(10)]
fn test_parsing_with_timeout_and_no_completion() {
allocations::record(|| {
let mut parser = Parser::new();
@ -828,7 +843,7 @@ fn test_parsing_with_one_included_range() {
concat!(
"(program (expression_statement (call_expression ",
"function: (member_expression object: (identifier) property: (property_identifier)) ",
"arguments: (arguments (string)))))",
"arguments: (arguments (string (string_fragment))))))",
)
);
assert_eq!(
@ -1177,7 +1192,7 @@ fn test_parsing_with_a_newly_included_range() {
.set_included_ranges(&[simple_range(range1_start, range1_end)])
.unwrap();
let tree = parser
.parse_with(&mut chunked_input(&source_code, 3), None)
.parse_with(&mut chunked_input(source_code, 3), None)
.unwrap();
assert_eq!(
tree.root_node().to_sexp(),
@ -1196,7 +1211,7 @@ fn test_parsing_with_a_newly_included_range() {
])
.unwrap();
let tree2 = parser
.parse_with(&mut chunked_input(&source_code, 3), Some(&tree))
.parse_with(&mut chunked_input(source_code, 3), Some(&tree))
.unwrap();
assert_eq!(
tree2.root_node().to_sexp(),
@ -1220,7 +1235,7 @@ fn test_parsing_with_a_newly_included_range() {
simple_range(range3_start, range3_end),
])
.unwrap();
let tree3 = parser.parse(&source_code, Some(&tree)).unwrap();
let tree3 = parser.parse(source_code, Some(&tree)).unwrap();
assert_eq!(
tree3.root_node().to_sexp(),
concat!(
@ -1297,6 +1312,85 @@ fn test_parsing_with_included_ranges_and_missing_tokens() {
assert_eq!(root.child(3).unwrap().start_byte(), 4);
}
#[test]
fn test_grammars_that_can_hang_on_eof() {
let (parser_name, parser_code) = generate_parser_for_grammar(
r#"
{
"name": "test_single_null_char_regex",
"rules": {
"source_file": {
"type": "SEQ",
"members": [
{ "type": "STRING", "value": "\"" },
{ "type": "PATTERN", "value": "[\\x00]*" },
{ "type": "STRING", "value": "\"" }
]
}
},
"extras": [ { "type": "PATTERN", "value": "\\s" } ]
}
"#,
)
.unwrap();
let mut parser = Parser::new();
parser
.set_language(get_test_language(&parser_name, &parser_code, None))
.unwrap();
parser.parse("\"", None).unwrap();
let (parser_name, parser_code) = generate_parser_for_grammar(
r#"
{
"name": "test_null_char_with_next_char_regex",
"rules": {
"source_file": {
"type": "SEQ",
"members": [
{ "type": "STRING", "value": "\"" },
{ "type": "PATTERN", "value": "[\\x00-\\x01]*" },
{ "type": "STRING", "value": "\"" }
]
}
},
"extras": [ { "type": "PATTERN", "value": "\\s" } ]
}
"#,
)
.unwrap();
parser
.set_language(get_test_language(&parser_name, &parser_code, None))
.unwrap();
parser.parse("\"", None).unwrap();
let (parser_name, parser_code) = generate_parser_for_grammar(
r#"
{
"name": "test_null_char_with_range_regex",
"rules": {
"source_file": {
"type": "SEQ",
"members": [
{ "type": "STRING", "value": "\"" },
{ "type": "PATTERN", "value": "[\\x00-\\x7F]*" },
{ "type": "STRING", "value": "\"" }
]
}
},
"extras": [ { "type": "PATTERN", "value": "\\s" } ]
}
"#,
)
.unwrap();
parser
.set_language(get_test_language(&parser_name, &parser_code, None))
.unwrap();
parser.parse("\"", None).unwrap();
}
fn simple_range(start: usize, end: usize) -> Range {
Range {
start_byte: start,

View file

@ -0,0 +1,15 @@
[package]
name = "tree-sitter-tests-proc-macro"
version = "0.0.0"
edition = "2021"
publish = false
rust-version.workspace = true
[lib]
proc-macro = true
[dependencies]
proc-macro2 = "1.0.63"
quote = "1"
rand = "0.8.5"
syn = { version = "1", features = ["full"] }

View file

@ -0,0 +1,137 @@
use proc_macro::TokenStream;
use proc_macro2::Span;
use quote::quote;
use syn::{
parse::{Parse, ParseStream},
parse_macro_input, Error, Expr, Ident, ItemFn, LitInt, Token,
};
#[proc_macro_attribute]
pub fn retry(args: TokenStream, input: TokenStream) -> TokenStream {
let count = parse_macro_input!(args as LitInt);
let input = parse_macro_input!(input as ItemFn);
let attrs = input.attrs.clone();
let name = input.sig.ident.clone();
TokenStream::from(quote! {
#(#attrs),*
fn #name() {
#input
for i in 0..=#count {
let result = std::panic::catch_unwind(|| {
#name();
});
if result.is_ok() {
return;
}
if i == #count {
std::panic::resume_unwind(result.unwrap_err());
}
}
}
})
}
#[proc_macro_attribute]
pub fn test_with_seed(args: TokenStream, input: TokenStream) -> TokenStream {
struct Args {
retry: LitInt,
seed: Expr,
seed_fn: Option<Ident>,
}
impl Parse for Args {
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut retry = None;
let mut seed = None;
let mut seed_fn = None;
while !input.is_empty() {
let name = input.parse::<Ident>()?;
match name.to_string().as_str() {
"retry" => {
input.parse::<Token![=]>()?;
retry.replace(input.parse()?);
}
"seed" => {
input.parse::<Token![=]>()?;
seed.replace(input.parse()?);
}
"seed_fn" => {
input.parse::<Token![=]>()?;
seed_fn.replace(input.parse()?);
}
x => {
return Err(Error::new(
name.span(),
format!("Unsupported parameter `{x}`"),
))
}
}
if !input.is_empty() {
input.parse::<Token![,]>()?;
}
}
if retry.is_none() {
retry.replace(LitInt::new("0", Span::mixed_site()));
}
Ok(Args {
retry: retry.expect("`retry` parameter is required"),
seed: seed.expect("`seed` parameter is required"),
seed_fn,
})
}
}
let Args {
retry,
seed,
seed_fn,
} = parse_macro_input!(args as Args);
let seed_fn = seed_fn.iter();
let func = parse_macro_input!(input as ItemFn);
let attrs = func.attrs.clone();
let name = func.sig.ident.clone();
// dbg!(quote::ToTokens::into_token_stream(&func));
TokenStream::from(quote! {
#[test]
#(#attrs),*
fn #name() {
#func
let mut seed = #seed;
for i in 0..=#retry {
let result = std::panic::catch_unwind(|| {
#name(seed);
});
if result.is_ok() {
return;
}
if i == #retry {
std::panic::resume_unwind(result.unwrap_err());
}
#(
seed = #seed_fn();
)*
if i < #retry {
println!("\nRetry {}/{} with a new seed {}", i + 1, #retry, seed);
}
}
}
})
}

File diff suppressed because it is too large Load diff

View file

@ -9,7 +9,7 @@ use std::{
use tree_sitter::Point;
use tree_sitter_tags::{c_lib as c, Error, TagsConfiguration, TagsContext};
const PYTHON_TAG_QUERY: &'static str = r#"
const PYTHON_TAG_QUERY: &str = r#"
(
(function_definition
name: (identifier) @name
@ -39,7 +39,7 @@ const PYTHON_TAG_QUERY: &'static str = r#"
attribute: (identifier) @name)) @reference.call
"#;
const JS_TAG_QUERY: &'static str = r#"
const JS_TAG_QUERY: &str = r#"
(
(comment)* @doc .
(class_declaration
@ -68,7 +68,7 @@ const JS_TAG_QUERY: &'static str = r#"
function: (identifier) @name) @reference.call
"#;
const RUBY_TAG_QUERY: &'static str = r#"
const RUBY_TAG_QUERY: &str = r#"
(method
name: (_) @name) @definition.method
@ -359,25 +359,29 @@ fn test_tags_via_c_api() {
);
let c_scope_name = CString::new(scope_name).unwrap();
let result = c::ts_tagger_add_language(
tagger,
c_scope_name.as_ptr(),
language,
JS_TAG_QUERY.as_ptr(),
ptr::null(),
JS_TAG_QUERY.len() as u32,
0,
);
let result = unsafe {
c::ts_tagger_add_language(
tagger,
c_scope_name.as_ptr(),
language,
JS_TAG_QUERY.as_ptr(),
ptr::null(),
JS_TAG_QUERY.len() as u32,
0,
)
};
assert_eq!(result, c::TSTagsError::Ok);
let result = c::ts_tagger_tag(
tagger,
c_scope_name.as_ptr(),
source_code.as_ptr(),
source_code.len() as u32,
buffer,
ptr::null(),
);
let result = unsafe {
c::ts_tagger_tag(
tagger,
c_scope_name.as_ptr(),
source_code.as_ptr(),
source_code.len() as u32,
buffer,
ptr::null(),
)
};
assert_eq!(result, c::TSTagsError::Ok);
let tags = unsafe {
slice::from_raw_parts(
@ -419,8 +423,10 @@ fn test_tags_via_c_api() {
]
);
c::ts_tags_buffer_delete(buffer);
c::ts_tagger_delete(tagger);
unsafe {
c::ts_tags_buffer_delete(buffer);
c::ts_tagger_delete(tagger);
}
});
}

View file

@ -12,7 +12,7 @@ fn test_highlight_test_with_basic_test() {
Some("injections.scm"),
&[
"function".to_string(),
"variable.parameter".to_string(),
"variable".to_string(),
"keyword".to_string(),
],
);
@ -22,7 +22,8 @@ fn test_highlight_test_with_basic_test() {
" // ^ function",
" // ^ keyword",
" return d + e;",
" // ^ variable.parameter",
" // ^ variable",
" // ^ !variable",
"};",
]
.join("\n");
@ -32,18 +33,10 @@ fn test_highlight_test_with_basic_test() {
assert_eq!(
assertions,
&[
Assertion {
position: Point::new(1, 5),
expected_capture_name: "function".to_string()
},
Assertion {
position: Point::new(1, 11),
expected_capture_name: "keyword".to_string()
},
Assertion {
position: Point::new(4, 9),
expected_capture_name: "variable.parameter".to_string()
},
Assertion::new(1, 5, false, String::from("function")),
Assertion::new(1, 11, false, String::from("keyword")),
Assertion::new(4, 9, false, String::from("variable")),
Assertion::new(4, 11, true, String::from("variable")),
]
);
@ -60,6 +53,7 @@ fn test_highlight_test_with_basic_test() {
(Point::new(1, 19), Point::new(1, 20), Highlight(1)), // "d"
(Point::new(4, 2), Point::new(4, 8), Highlight(2)), // "return"
(Point::new(4, 9), Point::new(4, 10), Highlight(1)), // "d"
(Point::new(4, 13), Point::new(4, 14), Highlight(1)), // "e"
]
);
}

View file

@ -16,6 +16,7 @@ fn test_tags_test_with_basic_test() {
" # ^ reference.call",
" return d(e)",
" # ^ reference.call",
" # ^ !variable.parameter",
"",
]
.join("\n");
@ -26,18 +27,10 @@ fn test_tags_test_with_basic_test() {
assert_eq!(
assertions,
&[
Assertion {
position: Point::new(1, 4),
expected_capture_name: "definition.function".to_string(),
},
Assertion {
position: Point::new(3, 9),
expected_capture_name: "reference.call".to_string(),
},
Assertion {
position: Point::new(5, 11),
expected_capture_name: "reference.call".to_string(),
},
Assertion::new(1, 4, false, String::from("definition.function")),
Assertion::new(3, 9, false, String::from("reference.call")),
Assertion::new(5, 11, false, String::from("reference.call")),
Assertion::new(5, 13, true, String::from("variable.parameter")),
]
);

View file

@ -0,0 +1,173 @@
use std::{iter, sync::Arc};
use crate::tests::helpers::fixtures::get_language;
use tree_sitter::{Language, Node, Parser, Point, Query, QueryCursor, TextProvider, Tree};
fn parse_text(text: impl AsRef<[u8]>) -> (Tree, Language) {
let language = get_language("c");
let mut parser = Parser::new();
parser.set_language(language).unwrap();
(parser.parse(text, None).unwrap(), language)
}
fn parse_text_with<T, F>(callback: &mut F) -> (Tree, Language)
where
T: AsRef<[u8]>,
F: FnMut(usize, Point) -> T,
{
let language = get_language("c");
let mut parser = Parser::new();
parser.set_language(language).unwrap();
let tree = parser.parse_with(callback, None).unwrap();
// eprintln!("{}", tree.clone().root_node().to_sexp());
assert_eq!("comment", tree.clone().root_node().child(0).unwrap().kind());
(tree, language)
}
fn tree_query<I: AsRef<[u8]>>(tree: &Tree, text: impl TextProvider<I>, language: Language) {
let query = Query::new(language, "((comment) @c (#eq? @c \"// comment\"))").unwrap();
let mut cursor = QueryCursor::new();
let mut captures = cursor.captures(&query, tree.root_node(), text);
let (match_, idx) = captures.next().unwrap();
let capture = match_.captures[idx];
assert_eq!(capture.index as usize, idx);
assert_eq!("comment", capture.node.kind());
}
fn check_parsing<I: AsRef<[u8]>>(
parser_text: impl AsRef<[u8]>,
text_provider: impl TextProvider<I>,
) {
let (tree, language) = parse_text(parser_text);
tree_query(&tree, text_provider, language);
}
fn check_parsing_callback<T, F, I: AsRef<[u8]>>(
parser_callback: &mut F,
text_provider: impl TextProvider<I>,
) where
T: AsRef<[u8]>,
F: FnMut(usize, Point) -> T,
{
let (tree, language) = parse_text_with(parser_callback);
tree_query(&tree, text_provider, language);
}
#[test]
fn test_text_provider_for_str_slice() {
let text: &str = "// comment";
check_parsing(text, text.as_bytes());
check_parsing(text.as_bytes(), text.as_bytes());
}
#[test]
fn test_text_provider_for_string() {
let text: String = "// comment".to_owned();
check_parsing(text.clone(), text.as_bytes());
check_parsing(text.as_bytes(), text.as_bytes());
check_parsing(<_ as AsRef<[u8]>>::as_ref(&text), text.as_bytes());
}
#[test]
fn test_text_provider_for_box_of_str_slice() {
let text: Box<str> = "// comment".to_owned().into_boxed_str();
check_parsing(text.as_bytes(), text.as_bytes());
check_parsing(<_ as AsRef<str>>::as_ref(&text), text.as_bytes());
check_parsing(text.as_ref(), text.as_ref().as_bytes());
check_parsing(text.as_ref(), text.as_bytes());
}
#[test]
fn test_text_provider_for_box_of_bytes_slice() {
let text: Box<[u8]> = "// comment".to_owned().into_boxed_str().into_boxed_bytes();
check_parsing(text.as_ref(), text.as_ref());
check_parsing(text.as_ref(), &*text);
check_parsing(&*text, &*text);
}
#[test]
fn test_text_provider_for_vec_of_bytes() {
let text: Vec<u8> = "// comment".to_owned().into_bytes();
check_parsing(&*text, &*text);
}
#[test]
fn test_text_provider_for_arc_of_bytes_slice() {
let text: Vec<u8> = "// comment".to_owned().into_bytes();
let text: Arc<[u8]> = Arc::from(text);
check_parsing(&*text, &*text);
check_parsing(text.as_ref(), text.as_ref());
check_parsing(text.clone(), text.as_ref());
}
#[test]
fn test_text_provider_callback_with_str_slice() {
let text: &str = "// comment";
check_parsing(text, |_node: Node<'_>| iter::once(text));
check_parsing_callback(
&mut |offset, _point| {
(offset < text.len())
.then(|| text.as_bytes())
.unwrap_or_default()
},
|_node: Node<'_>| iter::once(text),
);
}
#[test]
fn test_text_provider_callback_with_owned_string_slice() {
let text: &str = "// comment";
check_parsing_callback(
&mut |offset, _point| {
(offset < text.len())
.then(|| text.as_bytes())
.unwrap_or_default()
},
|_node: Node<'_>| {
let slice: String = text.to_owned();
iter::once(slice)
},
);
}
#[test]
fn test_text_provider_callback_with_owned_bytes_vec_slice() {
let text: &str = "// comment";
check_parsing_callback(
&mut |offset, _point| {
(offset < text.len())
.then(|| text.as_bytes())
.unwrap_or_default()
},
|_node: Node<'_>| {
let slice: Vec<u8> = text.to_owned().into_bytes();
iter::once(slice)
},
);
}
#[test]
fn test_text_provider_callback_with_owned_arc_of_bytes_slice() {
let text: &str = "// comment";
check_parsing_callback(
&mut |offset, _point| {
(offset < text.len())
.then(|| text.as_bytes())
.unwrap_or_default()
},
|_node: Node<'_>| {
let slice: Arc<[u8]> = text.to_owned().into_bytes().into();
iter::once(slice)
},
);
}

View file

@ -306,7 +306,7 @@ fn test_tree_cursor() {
.parse(
"
struct Stuff {
a: A;
a: A,
b: Option<B>,
}
",
@ -331,6 +331,88 @@ fn test_tree_cursor() {
assert!(cursor.goto_next_sibling());
assert_eq!(cursor.node().kind(), "field_declaration_list");
assert_eq!(cursor.node().is_named(), true);
assert!(cursor.goto_last_child());
assert_eq!(cursor.node().kind(), "}");
assert_eq!(cursor.node().is_named(), false);
assert_eq!(cursor.node().start_position(), Point { row: 4, column: 16 });
assert!(cursor.goto_previous_sibling());
assert_eq!(cursor.node().kind(), ",");
assert_eq!(cursor.node().is_named(), false);
assert_eq!(cursor.node().start_position(), Point { row: 3, column: 32 });
assert!(cursor.goto_previous_sibling());
assert_eq!(cursor.node().kind(), "field_declaration");
assert_eq!(cursor.node().is_named(), true);
assert_eq!(cursor.node().start_position(), Point { row: 3, column: 20 });
assert!(cursor.goto_previous_sibling());
assert_eq!(cursor.node().kind(), ",");
assert_eq!(cursor.node().is_named(), false);
assert_eq!(cursor.node().start_position(), Point { row: 2, column: 24 });
assert!(cursor.goto_previous_sibling());
assert_eq!(cursor.node().kind(), "field_declaration");
assert_eq!(cursor.node().is_named(), true);
assert_eq!(cursor.node().start_position(), Point { row: 2, column: 20 });
assert!(cursor.goto_previous_sibling());
assert_eq!(cursor.node().kind(), "{");
assert_eq!(cursor.node().is_named(), false);
assert_eq!(cursor.node().start_position(), Point { row: 1, column: 29 });
let mut copy = tree.walk();
copy.reset_to(cursor);
assert_eq!(copy.node().kind(), "{");
assert_eq!(copy.node().is_named(), false);
assert!(copy.goto_parent());
assert_eq!(copy.node().kind(), "field_declaration_list");
assert_eq!(copy.node().is_named(), true);
assert!(copy.goto_parent());
assert_eq!(copy.node().kind(), "struct_item");
}
#[test]
fn test_tree_cursor_previous_sibling() {
let mut parser = Parser::new();
parser.set_language(get_language("rust")).unwrap();
let text = "
// Hi there
// This is fun!
// Another one!
";
let tree = parser.parse(text, None).unwrap();
let mut cursor = tree.walk();
assert_eq!(cursor.node().kind(), "source_file");
assert!(cursor.goto_last_child());
assert_eq!(cursor.node().kind(), "line_comment");
assert_eq!(
cursor.node().utf8_text(text.as_bytes()).unwrap(),
"// Another one!"
);
assert!(cursor.goto_previous_sibling());
assert_eq!(cursor.node().kind(), "line_comment");
assert_eq!(
cursor.node().utf8_text(text.as_bytes()).unwrap(),
"// This is fun!"
);
assert!(cursor.goto_previous_sibling());
assert_eq!(cursor.node().kind(), "line_comment");
assert_eq!(
cursor.node().utf8_text(text.as_bytes()).unwrap(),
"// Hi there"
);
assert!(!cursor.goto_previous_sibling());
}
#[test]
@ -620,7 +702,7 @@ fn get_changed_ranges(
source_code: &mut Vec<u8>,
edit: Edit,
) -> Vec<Range> {
perform_edit(tree, source_code, &edit);
perform_edit(tree, source_code, &edit).unwrap();
let new_tree = parser.parse(&source_code, Some(tree)).unwrap();
let result = tree.changed_ranges(&new_tree).collect();
*tree = new_tree;