Add random mutation tests
This commit is contained in:
parent
e305012b31
commit
233d616ebf
12 changed files with 443 additions and 127 deletions
1
Cargo.lock
generated
1
Cargo.lock
generated
|
|
@ -583,6 +583,7 @@ dependencies = [
|
|||
"lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libloading 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex-syntax 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rsass 0.9.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
|
|
|||
|
|
@ -36,4 +36,5 @@ version = "0.4.6"
|
|||
features = ["std"]
|
||||
|
||||
[dev-dependencies]
|
||||
rand = "0.6.4"
|
||||
spin = "0.5"
|
||||
|
|
|
|||
|
|
@ -36,6 +36,12 @@ pub enum TestEntry {
|
|||
},
|
||||
}
|
||||
|
||||
impl Default for TestEntry {
|
||||
fn default() -> Self {
|
||||
TestEntry::Group { name: String::new(), children: Vec::new() }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run_tests_at_path(
|
||||
language: Language,
|
||||
path: &Path,
|
||||
|
|
@ -160,7 +166,7 @@ fn run_tests(
|
|||
|
||||
pub fn parse_tests(path: &Path) -> io::Result<TestEntry> {
|
||||
let name = path
|
||||
.file_name()
|
||||
.file_stem()
|
||||
.and_then(|s| s.to_str())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
|
|
|||
|
|
@ -40,13 +40,15 @@ pub fn stop_recording() {
|
|||
recorder.enabled = false;
|
||||
|
||||
if !recorder.outstanding_allocations.is_empty() {
|
||||
let mut allocation_indices = recorder
|
||||
.outstanding_allocations
|
||||
.iter()
|
||||
.map(|e| e.1)
|
||||
.collect::<Vec<_>>();
|
||||
allocation_indices.sort_unstable();
|
||||
panic!(
|
||||
"Leaked allocation indices: {:?}",
|
||||
recorder
|
||||
.outstanding_allocations
|
||||
.iter()
|
||||
.map(|e| e.1)
|
||||
.collect::<Vec<_>>()
|
||||
allocation_indices
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,11 +1,14 @@
|
|||
use super::allocations;
|
||||
use super::fixtures::{fixtures_dir, get_language, get_test_language};
|
||||
use super::random::Rand;
|
||||
use crate::generate;
|
||||
use crate::test::{parse_tests, print_diff, print_diff_key, TestEntry};
|
||||
use crate::util;
|
||||
use std::fs;
|
||||
use tree_sitter::{Language, LogType, Parser};
|
||||
use std::{env, fs, time, usize};
|
||||
use tree_sitter::{InputEdit, LogType, Parser, Point, Tree};
|
||||
|
||||
const EDIT_COUNT: usize = 3;
|
||||
const TRIAL_COUNT: usize = 10;
|
||||
const LANGUAGES: &'static [&'static str] = &[
|
||||
"bash",
|
||||
"c",
|
||||
|
|
@ -18,19 +21,30 @@ const LANGUAGES: &'static [&'static str] = &[
|
|||
];
|
||||
|
||||
lazy_static! {
|
||||
static ref LANGUAGE_FILTER: Option<String> =
|
||||
std::env::var("TREE_SITTER_TEST_LANGUAGE_FILTER").ok();
|
||||
static ref EXAMPLE_FILTER: Option<String> =
|
||||
std::env::var("TREE_SITTER_TEST_EXAMPLE_FILTER").ok();
|
||||
static ref LOG_ENABLED: bool = std::env::var("TREE_SITTER_ENABLE_LOG").is_ok();
|
||||
static ref LOG_GRAPH_ENABLED: bool = std::env::var("TREE_SITTER_ENABLE_LOG_GRAPHS").is_ok();
|
||||
static ref LOG_ENABLED: bool = env::var("TREE_SITTER_TEST_ENABLE_LOG").is_ok();
|
||||
static ref LOG_GRAPH_ENABLED: bool = env::var("TREE_SITTER_TEST_ENABLE_LOG_GRAPHS").is_ok();
|
||||
static ref LANGUAGE_FILTER: Option<String> = env::var("TREE_SITTER_TEST_LANGUAGE_FILTER").ok();
|
||||
static ref EXAMPLE_FILTER: Option<String> = env::var("TREE_SITTER_TEST_EXAMPLE_FILTER").ok();
|
||||
static ref TRIAL_FILTER: Option<usize> = env::var("TREE_SITTER_TEST_TRIAL_FILTER")
|
||||
.map(|s| usize::from_str_radix(&s, 10).unwrap())
|
||||
.ok();
|
||||
pub static ref SEED: usize = env::var("TREE_SITTER_TEST_SEED")
|
||||
.map(|s| usize::from_str_radix(&s, 10).unwrap())
|
||||
.unwrap_or(
|
||||
time::SystemTime::now()
|
||||
.duration_since(time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs() as usize,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_real_language_corpus_files() {
|
||||
eprintln!("\n\nRandom seed: {}\n", *SEED);
|
||||
let grammars_dir = fixtures_dir().join("grammars");
|
||||
let error_corpus_dir = fixtures_dir().join("error_corpus");
|
||||
|
||||
let mut did_fail = false;
|
||||
let mut failure_count = 0;
|
||||
for language_name in LANGUAGES.iter().cloned() {
|
||||
if let Some(filter) = LANGUAGE_FILTER.as_ref() {
|
||||
if language_name != filter.as_str() {
|
||||
|
|
@ -38,51 +52,133 @@ fn test_real_language_corpus_files() {
|
|||
}
|
||||
}
|
||||
|
||||
eprintln!("language: {:?}", language_name);
|
||||
|
||||
let language = get_language(language_name);
|
||||
let corpus_dir = grammars_dir.join(language_name).join("corpus");
|
||||
let test = parse_tests(&corpus_dir).unwrap();
|
||||
did_fail |= run_mutation_tests(language, test);
|
||||
}
|
||||
let error_corpus_file = error_corpus_dir.join(&format!("{}_errors.txt", language_name));
|
||||
let main_tests = parse_tests(&corpus_dir).unwrap();
|
||||
let error_tests = parse_tests(&error_corpus_file).unwrap_or(TestEntry::default());
|
||||
let mut tests = flatten_tests(main_tests);
|
||||
tests.extend(flatten_tests(error_tests));
|
||||
|
||||
if did_fail {
|
||||
panic!("Corpus tests failed");
|
||||
if !tests.is_empty() {
|
||||
eprintln!("language: {:?}", language_name);
|
||||
}
|
||||
|
||||
for (example_name, input, expected_output) in tests {
|
||||
eprintln!(" example: {:?}", example_name);
|
||||
|
||||
if TRIAL_FILTER.map_or(true, |t| t == 0) {
|
||||
allocations::start_recording();
|
||||
let mut log_session = None;
|
||||
let mut parser = get_parser(&mut log_session, "log.html");
|
||||
parser.set_language(language).unwrap();
|
||||
let tree = parser.parse_utf8(&mut |i, _| &input[i..], None).unwrap();
|
||||
let actual_output = tree.root_node().to_sexp();
|
||||
drop(tree);
|
||||
drop(parser);
|
||||
if actual_output != expected_output {
|
||||
print_diff_key();
|
||||
print_diff(&actual_output, &expected_output);
|
||||
println!("");
|
||||
failure_count += 1;
|
||||
continue;
|
||||
}
|
||||
allocations::stop_recording();
|
||||
}
|
||||
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language).unwrap();
|
||||
let tree = parser
|
||||
.parse_utf8(&mut |i, _| input.get(i..).unwrap_or(&[]), None)
|
||||
.unwrap();
|
||||
drop(parser);
|
||||
|
||||
for trial in 1..=TRIAL_COUNT {
|
||||
if TRIAL_FILTER.map_or(true, |filter| filter == trial) {
|
||||
let mut rand = Rand::new(*SEED + trial);
|
||||
|
||||
allocations::start_recording();
|
||||
let mut log_session = None;
|
||||
let mut parser = get_parser(&mut log_session, "log.html");
|
||||
parser.set_language(language).unwrap();
|
||||
let mut tree = tree.clone();
|
||||
let mut input = input.clone();
|
||||
|
||||
if *LOG_GRAPH_ENABLED {
|
||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
||||
}
|
||||
|
||||
// Perform a random series of edits and reparse.
|
||||
let mut undo_stack = Vec::new();
|
||||
for _ in 0..EDIT_COUNT {
|
||||
let edit = get_random_edit(&mut rand, &input);
|
||||
undo_stack.push(invert_edit(&input, &edit));
|
||||
perform_edit(&mut tree, &mut input, &edit);
|
||||
}
|
||||
if *LOG_GRAPH_ENABLED {
|
||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
||||
}
|
||||
|
||||
let mut tree2 = parser
|
||||
.parse_utf8(&mut |i, _| input.get(i..).unwrap_or(&[]), Some(&tree))
|
||||
.unwrap();
|
||||
|
||||
// Check that the new tree is consistent.
|
||||
check_consistent_sizes(&tree2, &input);
|
||||
check_changed_ranges(&tree, &tree2, &input);
|
||||
|
||||
// Undo all of the edits and re-parse again.
|
||||
while let Some(edit) = undo_stack.pop() {
|
||||
perform_edit(&mut tree2, &mut input, &edit);
|
||||
}
|
||||
if *LOG_GRAPH_ENABLED {
|
||||
eprintln!("{}\n", String::from_utf8_lossy(&input));
|
||||
}
|
||||
|
||||
let tree3 = parser
|
||||
.parse_utf8(&mut |i, _| input.get(i..).unwrap_or(&[]), Some(&tree2))
|
||||
.unwrap();
|
||||
|
||||
// Check that the edited tree is consistent.
|
||||
check_consistent_sizes(&tree3, &input);
|
||||
check_changed_ranges(&tree2, &tree3, &input);
|
||||
|
||||
// Verify that the final tree matches the expectation from the corpus.
|
||||
let actual_output = tree3.root_node().to_sexp();
|
||||
if actual_output != expected_output {
|
||||
println!("Incorrect parse for {} - {} - trial {}", language_name, example_name, trial);
|
||||
print_diff_key();
|
||||
print_diff(&actual_output, &expected_output);
|
||||
println!("");
|
||||
failure_count += 1;
|
||||
// break;
|
||||
}
|
||||
|
||||
drop(tree);
|
||||
drop(tree2);
|
||||
drop(tree3);
|
||||
drop(parser);
|
||||
allocations::stop_recording();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if failure_count > 0 {
|
||||
panic!("{} corpus tests failed", failure_count);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_error_corpus_files() {
|
||||
let corpus_dir = fixtures_dir().join("error_corpus");
|
||||
|
||||
let mut did_fail = false;
|
||||
for entry in fs::read_dir(&corpus_dir).unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
let language_name = entry.file_name();
|
||||
let language_name = language_name.to_str().unwrap().replace("_errors.txt", "");
|
||||
if let Some(filter) = LANGUAGE_FILTER.as_ref() {
|
||||
if language_name != filter.as_str() {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
eprintln!("language: {:?}", language_name);
|
||||
|
||||
let test = parse_tests(&entry.path()).unwrap();
|
||||
let language = get_language(&language_name);
|
||||
did_fail |= run_mutation_tests(language, test);
|
||||
}
|
||||
|
||||
if did_fail {
|
||||
panic!("Corpus tests failed");
|
||||
}
|
||||
struct Edit {
|
||||
position: usize,
|
||||
deleted_length: usize,
|
||||
inserted_text: Vec<u8>,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_feature_corpus_files() {
|
||||
let test_grammars_dir = fixtures_dir().join("test_grammars");
|
||||
|
||||
let mut did_fail = false;
|
||||
let mut failure_count = 0;
|
||||
for entry in fs::read_dir(&test_grammars_dir).unwrap() {
|
||||
let entry = entry.unwrap();
|
||||
if !entry.metadata().unwrap().is_dir() {
|
||||
|
|
@ -97,8 +193,6 @@ fn test_feature_corpus_files() {
|
|||
}
|
||||
}
|
||||
|
||||
eprintln!("test language: {:?}", language_name);
|
||||
|
||||
let test_path = entry.path();
|
||||
let grammar_path = test_path.join("grammar.json");
|
||||
let error_message_path = test_path.join("expected_error.txt");
|
||||
|
|
@ -106,79 +200,156 @@ fn test_feature_corpus_files() {
|
|||
let generate_result = generate::generate_parser_for_grammar(&grammar_json);
|
||||
|
||||
if error_message_path.exists() {
|
||||
if EXAMPLE_FILTER.is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
eprintln!("test language: {:?}", language_name);
|
||||
|
||||
let expected_message = fs::read_to_string(&error_message_path).unwrap();
|
||||
if let Err(e) = generate_result {
|
||||
if e.0 != expected_message {
|
||||
panic!(
|
||||
eprintln!(
|
||||
"Unexpected error message.\n\nExpected:\n\n{}\nActual:\n\n{}\n",
|
||||
expected_message, e.0
|
||||
);
|
||||
failure_count += 1;
|
||||
}
|
||||
} else {
|
||||
panic!(
|
||||
eprintln!(
|
||||
"Expected error message but got none for test grammar '{}'",
|
||||
language_name
|
||||
);
|
||||
failure_count += 1;
|
||||
}
|
||||
} else {
|
||||
let corpus_path = test_path.join("corpus.txt");
|
||||
let c_code = generate_result.unwrap().1;
|
||||
let language = get_test_language(language_name, c_code, &test_path);
|
||||
let test = parse_tests(&corpus_path).unwrap();
|
||||
did_fail |= run_mutation_tests(language, test);
|
||||
}
|
||||
}
|
||||
let tests = flatten_tests(test);
|
||||
|
||||
if did_fail {
|
||||
panic!("Corpus tests failed");
|
||||
}
|
||||
}
|
||||
if !tests.is_empty() {
|
||||
eprintln!("test language: {:?}", language_name);
|
||||
}
|
||||
|
||||
fn run_mutation_tests(language: Language, test: TestEntry) -> bool {
|
||||
match test {
|
||||
TestEntry::Example {
|
||||
name,
|
||||
input,
|
||||
output,
|
||||
} => {
|
||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||
if !name.contains(filter.as_str()) {
|
||||
return false;
|
||||
for (name, input, expected_output) in tests {
|
||||
eprintln!(" example: {:?}", name);
|
||||
|
||||
allocations::start_recording();
|
||||
let mut log_session = None;
|
||||
let mut parser = get_parser(&mut log_session, "log.html");
|
||||
parser.set_language(language).unwrap();
|
||||
let tree = parser.parse_utf8(&mut |i, _| &input[i..], None).unwrap();
|
||||
let actual_output = tree.root_node().to_sexp();
|
||||
drop(tree);
|
||||
drop(parser);
|
||||
if actual_output != expected_output {
|
||||
print_diff_key();
|
||||
print_diff(&actual_output, &expected_output);
|
||||
println!("");
|
||||
failure_count += 1;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
eprintln!(" example: {:?}", name);
|
||||
|
||||
allocations::start_recording();
|
||||
let mut log_session = None;
|
||||
let mut parser = get_parser(&mut log_session, "log.html");
|
||||
parser.set_language(language).unwrap();
|
||||
let tree = parser
|
||||
.parse_utf8(&mut |byte_offset, _| &input[byte_offset..], None)
|
||||
.unwrap();
|
||||
let actual = tree.root_node().to_sexp();
|
||||
drop(tree);
|
||||
drop(parser);
|
||||
if actual != output {
|
||||
print_diff_key();
|
||||
print_diff(&actual, &output);
|
||||
println!("");
|
||||
true
|
||||
} else {
|
||||
allocations::stop_recording();
|
||||
false
|
||||
}
|
||||
}
|
||||
TestEntry::Group { children, .. } => {
|
||||
let mut result = false;
|
||||
for child in children {
|
||||
result |= run_mutation_tests(language, child);
|
||||
}
|
||||
result
|
||||
}
|
||||
if failure_count > 0 {
|
||||
panic!("{} corpus tests failed", failure_count);
|
||||
}
|
||||
}
|
||||
|
||||
fn get_random_edit(rand: &mut Rand, input: &Vec<u8>) -> Edit {
|
||||
let choice = rand.unsigned(10);
|
||||
if choice < 2 {
|
||||
// Insert text at end
|
||||
let inserted_text = rand.words(3);
|
||||
Edit {
|
||||
position: input.len(),
|
||||
deleted_length: 0,
|
||||
inserted_text,
|
||||
}
|
||||
} else if choice < 5 {
|
||||
// Delete text from the end
|
||||
let mut deleted_length = rand.unsigned(10);
|
||||
if deleted_length > input.len() {
|
||||
deleted_length = input.len();
|
||||
}
|
||||
Edit {
|
||||
position: input.len() - deleted_length,
|
||||
deleted_length,
|
||||
inserted_text: vec![],
|
||||
}
|
||||
} else if choice < 8 {
|
||||
// Insert at a random position
|
||||
let position = rand.unsigned(input.len());
|
||||
let word_count = 1 + rand.unsigned(3);
|
||||
let inserted_text = rand.words(word_count);
|
||||
Edit {
|
||||
position,
|
||||
deleted_length: 0,
|
||||
inserted_text,
|
||||
}
|
||||
} else {
|
||||
// Replace at random position
|
||||
let position = rand.unsigned(input.len());
|
||||
let deleted_length = rand.unsigned(input.len() - position);
|
||||
let word_count = 1 + rand.unsigned(3);
|
||||
let inserted_text = rand.words(word_count);
|
||||
Edit {
|
||||
position,
|
||||
deleted_length,
|
||||
inserted_text,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn invert_edit(input: &Vec<u8>, edit: &Edit) -> Edit {
|
||||
let position = edit.position;
|
||||
let removed_content = &input[position..(position + edit.deleted_length)];
|
||||
Edit {
|
||||
position,
|
||||
deleted_length: edit.inserted_text.len(),
|
||||
inserted_text: removed_content.to_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
fn perform_edit(tree: &mut Tree, input: &mut Vec<u8>, edit: &Edit) {
|
||||
let start_byte = edit.position;
|
||||
let old_end_byte = edit.position + edit.deleted_length;
|
||||
let new_end_byte = edit.position + edit.inserted_text.len();
|
||||
let start_position = position_for_offset(input, start_byte);
|
||||
let old_end_position = position_for_offset(input, old_end_byte);
|
||||
input.splice(start_byte..old_end_byte, edit.inserted_text.iter().cloned());
|
||||
let new_end_position = position_for_offset(input, new_end_byte);
|
||||
tree.edit(&InputEdit {
|
||||
start_byte,
|
||||
old_end_byte,
|
||||
new_end_byte,
|
||||
start_position,
|
||||
old_end_position,
|
||||
new_end_position,
|
||||
});
|
||||
}
|
||||
|
||||
fn position_for_offset(input: &Vec<u8>, offset: usize) -> Point {
|
||||
let mut result = Point { row: 0, column: 0 };
|
||||
for c in &input[0..offset] {
|
||||
if *c as char == '\n' {
|
||||
result.row += 1;
|
||||
result.column = 0;
|
||||
} else {
|
||||
result.column += 1;
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn check_consistent_sizes(tree: &Tree, input: &Vec<u8>) {}
|
||||
|
||||
fn check_changed_ranges(old_tree: &Tree, new_tree: &Tree, input: &Vec<u8>) {}
|
||||
|
||||
fn get_parser(session: &mut Option<util::LogSession>, log_filename: &str) -> Parser {
|
||||
let mut parser = Parser::new();
|
||||
|
||||
|
|
@ -196,3 +367,38 @@ fn get_parser(session: &mut Option<util::LogSession>, log_filename: &str) -> Par
|
|||
|
||||
parser
|
||||
}
|
||||
|
||||
fn flatten_tests(test: TestEntry) -> Vec<(String, Vec<u8>, String)> {
|
||||
fn helper(test: TestEntry, prefix: &str, result: &mut Vec<(String, Vec<u8>, String)>) {
|
||||
match test {
|
||||
TestEntry::Example {
|
||||
mut name,
|
||||
input,
|
||||
output,
|
||||
} => {
|
||||
if !prefix.is_empty() {
|
||||
name.insert_str(0, " - ");
|
||||
name.insert_str(0, prefix);
|
||||
}
|
||||
if let Some(filter) = EXAMPLE_FILTER.as_ref() {
|
||||
if !name.contains(filter.as_str()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
result.push((name, input, output));
|
||||
}
|
||||
TestEntry::Group { mut name, children } => {
|
||||
if !prefix.is_empty() {
|
||||
name.insert_str(0, " - ");
|
||||
name.insert_str(0, prefix);
|
||||
}
|
||||
for child in children {
|
||||
helper(child, &name, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut result = Vec::new();
|
||||
helper(test, "", &mut result);
|
||||
result
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
mod allocations;
|
||||
mod corpuses;
|
||||
mod fixtures;
|
||||
mod random;
|
||||
mod parser_api;
|
||||
|
|
|
|||
|
|
@ -2,10 +2,6 @@ use super::fixtures::get_language;
|
|||
use std::thread;
|
||||
use tree_sitter::{InputEdit, Language, LogType, Parser, Point, PropertySheet};
|
||||
|
||||
fn rust() -> Language {
|
||||
get_language("rust")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_basic_parsing() {
|
||||
let mut parser = Parser::new();
|
||||
|
|
@ -505,3 +501,7 @@ fn test_parallel_parsing() {
|
|||
|
||||
assert_eq!(child_count_differences, &[1, 2, 3, 4]);
|
||||
}
|
||||
|
||||
fn rust() -> Language {
|
||||
get_language("rust")
|
||||
}
|
||||
|
|
|
|||
41
cli/src/tests/random.rs
Normal file
41
cli/src/tests/random.rs
Normal file
|
|
@ -0,0 +1,41 @@
|
|||
use rand::distributions::Alphanumeric;
|
||||
use rand::prelude::{Rng, SeedableRng, SmallRng};
|
||||
|
||||
const OPERATORS: &[char] = &[
|
||||
'+', '-', '<', '>', '(', ')', '*', '/', '&', '|', '!', ',', '.',
|
||||
];
|
||||
|
||||
pub struct Rand(SmallRng);
|
||||
|
||||
impl Rand {
|
||||
pub fn new(seed: usize) -> Self {
|
||||
Rand(SmallRng::seed_from_u64(seed as u64))
|
||||
}
|
||||
|
||||
pub fn unsigned(&mut self, max: usize) -> usize {
|
||||
self.0.gen_range(0, max + 1)
|
||||
}
|
||||
|
||||
pub fn words(&mut self, max_count: usize) -> Vec<u8> {
|
||||
let mut result = Vec::new();
|
||||
let word_count = self.unsigned(max_count);
|
||||
for i in 0..word_count {
|
||||
if i > 0 {
|
||||
if self.unsigned(5) == 0 {
|
||||
result.push('\n' as u8);
|
||||
} else {
|
||||
result.push(' ' as u8);
|
||||
}
|
||||
}
|
||||
if self.unsigned(3) == 0 {
|
||||
let index = self.unsigned(OPERATORS.len() - 1);
|
||||
result.push(OPERATORS[index] as u8);
|
||||
} else {
|
||||
for _ in 0..self.unsigned(8) {
|
||||
result.push(self.0.sample(Alphanumeric) as u8);
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
|
@ -18,6 +18,7 @@ use std::io::{self, Read, Seek};
|
|||
use std::marker::PhantomData;
|
||||
use std::os::raw::{c_char, c_void};
|
||||
use std::ptr;
|
||||
use std::slice;
|
||||
use std::str;
|
||||
use std::u16;
|
||||
|
||||
|
|
@ -427,6 +428,18 @@ impl Tree {
|
|||
) -> TreePropertyCursor<'a, P> {
|
||||
TreePropertyCursor::new(self, property_sheet, source)
|
||||
}
|
||||
|
||||
pub fn changed_ranges(&self, other: &Tree) -> Vec<Range> {
|
||||
unsafe {
|
||||
let mut count = 0;
|
||||
let ptr =
|
||||
ffi::ts_tree_get_changed_ranges(self.0, other.0, &mut count as *mut _ as *mut u32);
|
||||
let ranges = slice::from_raw_parts(ptr, count);
|
||||
let result = ranges.into_iter().map(|r| r.clone().into()).collect();
|
||||
free(ptr as *mut c_void);
|
||||
result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Send for Tree {}
|
||||
|
|
@ -558,10 +571,6 @@ impl<'tree> Node<'tree> {
|
|||
}
|
||||
|
||||
pub fn to_sexp(&self) -> String {
|
||||
extern "C" {
|
||||
fn free(pointer: *mut c_void);
|
||||
}
|
||||
|
||||
let c_string = unsafe { ffi::ts_node_string(self.0) };
|
||||
let result = unsafe { CStr::from_ptr(c_string) }
|
||||
.to_str()
|
||||
|
|
@ -788,6 +797,17 @@ impl Into<ffi::TSRange> for Range {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<ffi::TSRange> for Range {
|
||||
fn from(range: ffi::TSRange) -> Self {
|
||||
Self {
|
||||
start_byte: range.start_byte as usize,
|
||||
end_byte: range.end_byte as usize,
|
||||
start_point: range.start_point.into(),
|
||||
end_point: range.end_point.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<P> PropertySheet<P> {
|
||||
pub fn new(language: Language, json: &str) -> Result<Self, PropertySheetError>
|
||||
where
|
||||
|
|
@ -860,3 +880,7 @@ impl<P> PropertySheet<P> {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
extern "C" {
|
||||
fn free(pointer: *mut c_void);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -28,10 +28,10 @@
|
|||
fputs("\n\n", self->dot_graph_file); \
|
||||
}
|
||||
|
||||
#define LOG_TREE() \
|
||||
if (self->dot_graph_file) { \
|
||||
ts_subtree_print_dot_graph(self->finished_tree, self->language, self->dot_graph_file); \
|
||||
fputs("\n", self->dot_graph_file); \
|
||||
#define LOG_TREE(tree) \
|
||||
if (self->dot_graph_file) { \
|
||||
ts_subtree_print_dot_graph(tree, self->language, self->dot_graph_file); \
|
||||
fputs("\n", self->dot_graph_file); \
|
||||
}
|
||||
|
||||
#define SYM_NAME(symbol) ts_language_symbol_name(self->language, symbol)
|
||||
|
|
@ -417,6 +417,13 @@ static Subtree ts_parser__lex(TSParser *self, StackVersion version, TSStateId pa
|
|||
parse_state,
|
||||
self->language
|
||||
);
|
||||
|
||||
LOG(
|
||||
"lexed_lookahead sym:%s, size:%u, character:'%c'",
|
||||
SYM_NAME(ts_subtree_symbol(result)),
|
||||
ts_subtree_total_size(result).bytes,
|
||||
first_error_character
|
||||
);
|
||||
} else {
|
||||
if (self->lexer.token_end_position.bytes < self->lexer.token_start_position.bytes) {
|
||||
self->lexer.token_start_position = self->lexer.token_end_position;
|
||||
|
|
@ -467,13 +474,14 @@ static Subtree ts_parser__lex(TSParser *self, StackVersion version, TSStateId pa
|
|||
length
|
||||
);
|
||||
}
|
||||
|
||||
LOG(
|
||||
"lexed_lookahead sym:%s, size:%u",
|
||||
SYM_NAME(ts_subtree_symbol(result)),
|
||||
ts_subtree_total_size(result).bytes
|
||||
);
|
||||
}
|
||||
|
||||
LOG(
|
||||
"lexed_lookahead sym:%s, size:%u",
|
||||
SYM_NAME(ts_subtree_symbol(result)),
|
||||
ts_subtree_total_size(result).bytes
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
@ -1623,6 +1631,7 @@ TSTree *ts_parser_parse(TSParser *self, const TSTree *old_tree, TSInput input) {
|
|||
);
|
||||
reusable_node_reset(&self->reusable_node, old_tree->root);
|
||||
LOG("parse_after_edit");
|
||||
LOG_TREE(self->old_tree);
|
||||
for (unsigned i = 0; i < self->included_range_differences.size; i++) {
|
||||
TSRange *range = &self->included_range_differences.contents[i];
|
||||
LOG("different_included_range %u - %u", range->start_byte, range->end_byte);
|
||||
|
|
@ -1681,7 +1690,7 @@ TSTree *ts_parser_parse(TSParser *self, const TSTree *old_tree, TSInput input) {
|
|||
|
||||
ts_subtree_balance(self->finished_tree, &self->tree_pool, self->language);
|
||||
LOG("done");
|
||||
LOG_TREE();
|
||||
LOG_TREE(self->finished_tree);
|
||||
|
||||
TSTree *result = ts_tree_new(
|
||||
self->finished_tree,
|
||||
|
|
|
|||
|
|
@ -879,7 +879,7 @@ void ts_subtree__print_dot_graph(const Subtree *self, uint32_t start_offset,
|
|||
"error-cost: %u\n"
|
||||
"has-changes: %u\n"
|
||||
"repeat-depth: %u\n"
|
||||
"lookahead-bytes: %u\"]\n",
|
||||
"lookahead-bytes: %u",
|
||||
start_offset, end_offset,
|
||||
ts_subtree_parse_state(*self),
|
||||
ts_subtree_error_cost(*self),
|
||||
|
|
@ -888,6 +888,12 @@ void ts_subtree__print_dot_graph(const Subtree *self, uint32_t start_offset,
|
|||
ts_subtree_lookahead_bytes(*self)
|
||||
);
|
||||
|
||||
if (ts_subtree_is_error(*self) && ts_subtree_child_count(*self) == 0) {
|
||||
fprintf(f, "\ncharacter: '%c'", self->ptr->lookahead_char);
|
||||
}
|
||||
|
||||
fprintf(f, "\"]\n");
|
||||
|
||||
uint32_t child_start_offset = start_offset;
|
||||
uint32_t structural_child_index = 0;
|
||||
const TSSymbol *alias_sequence = ts_language_alias_sequence(
|
||||
|
|
|
|||
43
script/test
43
script/test
|
|
@ -10,19 +10,23 @@ USAGE
|
|||
|
||||
OPTIONS
|
||||
|
||||
-h print this message
|
||||
-h Print this message
|
||||
|
||||
-a Compile C code with the Clang static analyzer
|
||||
|
||||
-l run only the corpus tests for the given language
|
||||
-l Run only the corpus tests for the given language
|
||||
|
||||
-e run only the corpus tests whose name contain the given string
|
||||
-e Run only the corpus tests whose name contain the given string
|
||||
|
||||
-s set the seed used to control random behavior
|
||||
-t Run only the given trial number of randomized test
|
||||
|
||||
-d print parsing log to stderr
|
||||
-s Set the seed used to control random behavior
|
||||
|
||||
-D pipe tests' stderr to \`dot(1)\` to render an SVG log
|
||||
-d Print parsing log to stderr
|
||||
|
||||
-D Generate an SVG graph of parsing logs
|
||||
|
||||
-g Run the tests with a debugger
|
||||
|
||||
EOF
|
||||
}
|
||||
|
|
@ -31,7 +35,9 @@ export TREE_SITTER_TEST=1
|
|||
export RUST_TEST_THREADS=1
|
||||
export RUST_BACKTRACE=full
|
||||
|
||||
while getopts "bdl:e:s:gGhpvD" option; do
|
||||
mode=normal
|
||||
|
||||
while getopts "dDghl:e:s:t:" option; do
|
||||
case ${option} in
|
||||
h)
|
||||
usage
|
||||
|
|
@ -43,22 +49,35 @@ while getopts "bdl:e:s:gGhpvD" option; do
|
|||
e)
|
||||
export TREE_SITTER_TEST_EXAMPLE_FILTER=${OPTARG}
|
||||
;;
|
||||
t)
|
||||
export TREE_SITTER_TEST_TRIAL_FILTER=${OPTARG}
|
||||
;;
|
||||
s)
|
||||
export TREE_SITTER_SEED=${OPTARG}
|
||||
export TREE_SITTER_TEST_SEED=${OPTARG}
|
||||
;;
|
||||
d)
|
||||
export TREE_SITTER_ENABLE_LOG=1
|
||||
export TREE_SITTER_TEST_ENABLE_LOG=1
|
||||
;;
|
||||
D)
|
||||
export TREE_SITTER_ENABLE_LOG_GRAPHS=1
|
||||
export TREE_SITTER_TEST_ENABLE_LOG_GRAPHS=1
|
||||
;;
|
||||
g)
|
||||
mode=debug
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -n $TREE_SITTER_TEST_LANGUAGE_FILTER || -n $TREE_SITTER_TEST_EXAMPLE_FILTER ]]; then
|
||||
shift $(expr $OPTIND - 1 )
|
||||
|
||||
if [[ -n $TREE_SITTER_TEST_LANGUAGE_FILTER || -n $TREE_SITTER_TEST_EXAMPLE_FILTER || -n $TREE_SITTER_TEST_TRIAL_FILTER ]]; then
|
||||
top_level_filter=corpus
|
||||
else
|
||||
top_level_filter=$1
|
||||
fi
|
||||
|
||||
cargo test --jobs 1 $top_level_filter -- --nocapture
|
||||
if [[ "${mode}" == "debug" ]]; then
|
||||
test_binary=$(cargo test --no-run --message-format=json 2> /dev/null | jq -rs '.[-1].filenames[0]')
|
||||
lldb "${test_binary}" -- $top_level_filter
|
||||
else
|
||||
cargo test --jobs 1 $top_level_filter -- --nocapture
|
||||
fi
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue