Remove the concept of fragile reductions

They were a vestige of when Tree-sitter did sentential form-based
incremental parsing (as opposed to simply state matching). This was
elegant but not compatible with GLR as far as I could tell.
This commit is contained in:
Max Brunsfeld 2018-03-02 14:51:54 -08:00
parent 07fa3eb386
commit 52087de4f0
7 changed files with 20 additions and 66 deletions

View file

@ -270,7 +270,15 @@ describe("Parser", [&]() {
"(parenthesized_expression "
"(binary_expression (number) (member_expression (identifier) (property_identifier)))))))");
AssertThat(input->strings_read(), Equals(vector<string>({ " abc.d);" })));
AssertThat(input->strings_read(), Equals(vector<string>({
// The '*' is not reused because the preceding `x` expression is reused, which
// puts the parser into a different state than when the `*` was initially tokenized.
// When the `*` was initially tokenized, `x` was just an identifier. In both of these
// states, external tokens are valid so we don't reuse tokens unless the lex states
// match. This could probably be improved somehow.
" * ",
" abc.d);"
})));
});
});
@ -295,7 +303,10 @@ describe("Parser", [&]() {
"(number) "
"(binary_expression (number) (parenthesized_expression (binary_expression (number) (identifier))))))))");
AssertThat(input->strings_read(), Equals(vector<string>({"123 || 5 "})));
AssertThat(input->strings_read(), Equals(vector<string>({
"123 || 5 ",
";"
})));
});
});