In compiler, distinguish between anonymous tokens and hidden rules

This commit is contained in:
Max Brunsfeld 2015-09-05 17:05:37 -07:00
parent 4b270c8604
commit 5982b77c97
46 changed files with 41131 additions and 40884 deletions

View file

@ -1,8 +1,7 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/build_tables/build_parse_table.h"
#include "compiler/parse_table.h"
#include "compiler/lexical_grammar.h"
#include "compiler/syntax_grammar.h"
#include "compiler/prepared_grammar.h"
#include "compiler/rules/built_in_symbols.h"
using namespace rules;
@ -12,15 +11,35 @@ START_TEST
describe("build_parse_table", []() {
SyntaxGrammar parse_grammar{{
{ "rule0", choice({ i_sym(1), i_sym(2) }) },
{ "rule1", i_token(0) },
{ "rule2", i_token(1) },
}, {}, { Symbol(2, SymbolOptionToken) }, {}};
{
"rule0",
choice({ i_sym(1), i_sym(2) }),
RuleEntryTypeNamed
},
{
"rule1",
i_token(0),
RuleEntryTypeNamed
},
{
"rule2",
i_token(1),
RuleEntryTypeNamed
},
}, { Symbol(2, true) }, {}};
LexicalGrammar lex_grammar{{
{ "token0", pattern("[a-c]") },
{ "token1", pattern("[b-d]") },
}, {}, {}};
{
"token0",
pattern("[a-c]"),
RuleEntryTypeNamed
},
{
"token1",
pattern("[b-d]"),
RuleEntryTypeNamed
},
}, {}};
it("first looks for the start rule and its item set closure", [&]() {
auto result = build_parse_table(parse_grammar, lex_grammar);
@ -32,11 +51,11 @@ describe("build_parse_table", []() {
// expanded from the item set closure of the start item
{ Symbol(1), {ParseAction::Shift(2, { 0 })} },
{ Symbol(2), {ParseAction::Shift(2, { 0 })} },
{ Symbol(0, SymbolOptionToken), {ParseAction::Shift(3, { 0 })} },
{ Symbol(1, SymbolOptionToken), {ParseAction::Shift(4, { 0 })} },
{ Symbol(0, true), {ParseAction::Shift(3, { 0 })} },
{ Symbol(1, true), {ParseAction::Shift(4, { 0 })} },
// for the ubiquitous_token 'token2'
{ Symbol(2, SymbolOptionToken), {ParseAction::ShiftExtra()} },
{ Symbol(2, true), {ParseAction::ShiftExtra()} },
})));
});
@ -52,7 +71,7 @@ describe("build_parse_table", []() {
{ END_OF_INPUT(), {ParseAction::Accept()} },
// for the ubiquitous_token 'token2'
{ Symbol(2, SymbolOptionToken), {ParseAction::ShiftExtra()} },
{ Symbol(2, true), {ParseAction::ShiftExtra()} },
})));
});
@ -63,7 +82,7 @@ describe("build_parse_table", []() {
{ END_OF_INPUT(), {ParseAction::Reduce(Symbol(0), 1, 0, AssociativityLeft, 0)} },
// for the ubiquitous_token 'token2'
{ Symbol(2, SymbolOptionToken), {ParseAction::ShiftExtra()} },
{ Symbol(2, true), {ParseAction::ShiftExtra()} },
})));
});
});

View file

@ -1,5 +1,5 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/syntax_grammar.h"
#include "compiler/prepared_grammar.h"
#include "compiler/build_tables/first_symbols.h"
#include "compiler/rules/metadata.h"
@ -16,7 +16,7 @@ describe("first_symbols", []() {
auto rule = seq({ i_token(0), i_token(1) });
AssertThat(first_symbols(rule, null_grammar), Equals(set<Symbol>({
Symbol(0, SymbolOptionToken),
Symbol(0, true),
})));
});
@ -28,8 +28,8 @@ describe("first_symbols", []() {
i_token(1) });
AssertThat(first_symbols(rule, null_grammar), Equals(set<Symbol>({
Symbol(0, SymbolOptionToken),
Symbol(1, SymbolOptionToken)
Symbol(0, true),
Symbol(1, true)
})));
});
@ -41,16 +41,21 @@ describe("first_symbols", []() {
i_sym(0) });
SyntaxGrammar grammar{{
{ "rule0", seq({
i_token(2),
i_token(3),
i_token(4) }) }
}, {}, {}, {}};
{
"rule0",
seq({
i_token(2),
i_token(3),
i_token(4),
}),
RuleEntryTypeNamed
}
}, {}, {}};
AssertThat(first_symbols(rule, grammar), Equals(set<Symbol>({
Symbol(0),
Symbol(0, SymbolOptionToken),
Symbol(2, SymbolOptionToken),
Symbol(0, true),
Symbol(2, true),
})));
});
@ -60,15 +65,20 @@ describe("first_symbols", []() {
i_token(1) });
SyntaxGrammar grammar{{
{ "rule0", choice({
i_token(0),
blank() }) }
}, {}, {}, {}};
{
"rule0",
choice({
i_token(0),
blank(),
}),
RuleEntryTypeNamed
},
}, {}, {}};
AssertThat(first_symbols(rule, grammar), Equals(set<Symbol>({
Symbol(0),
Symbol(0, SymbolOptionToken),
Symbol(1, SymbolOptionToken),
Symbol(0, true),
Symbol(1, true),
})));
});
});
@ -76,17 +86,21 @@ describe("first_symbols", []() {
describe("when there are left-recursive rules", [&]() {
it("terminates", [&]() {
SyntaxGrammar grammar{{
{ "rule0", choice({
seq({ i_sym(0), i_token(10) }),
i_token(11),
}) },
}, {}, {}, {}};
{
"rule0",
choice({
seq({ i_sym(0), i_token(10) }),
i_token(11),
}),
RuleEntryTypeNamed
},
}, {}, {}};
auto rule = i_sym(0);
AssertThat(first_symbols(rule, grammar), Equals(set<Symbol>({
Symbol(0),
Symbol(11, SymbolOptionToken)
Symbol(11, true)
})));
});
});
@ -95,7 +109,7 @@ describe("first_symbols", []() {
auto rule = make_shared<Metadata>(i_token(3), map<rules::MetadataKey, int>());
AssertThat(first_symbols(rule, null_grammar), Equals(set<Symbol>({
Symbol(3, SymbolOptionToken),
Symbol(3, true),
})));
});
});

View file

@ -1,5 +1,5 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/syntax_grammar.h"
#include "compiler/prepared_grammar.h"
#include "compiler/build_tables/item_set_closure.h"
#include "compiler/build_tables/item_set_transitions.h"
@ -10,29 +10,39 @@ START_TEST
describe("item_set_closure", []() {
SyntaxGrammar grammar{{
{ "E", seq({
i_sym(1),
i_token(11) }) },
{ "T", seq({
i_token(12),
i_token(13) }) },
}, {}, {}, {}};
{
"E",
seq({
i_sym(1),
i_token(11),
}),
RuleEntryTypeNamed,
},
{
"T",
seq({
i_token(12),
i_token(13),
}),
RuleEntryTypeNamed,
},
}, {}, {}};
it("adds items at the beginnings of referenced rules", [&]() {
ParseItemSet item_set = item_set_closure(
ParseItem(Symbol(0), grammar.rule(Symbol(0)), {}),
set<Symbol>({ Symbol(10, SymbolOptionToken) }),
ParseItem(Symbol(0), grammar.rules[0].rule, {}),
set<Symbol>({ Symbol(10, true) }),
grammar
);
AssertThat(item_set, Equals(ParseItemSet({
{
ParseItem(Symbol(1), grammar.rule(Symbol(1)), {}),
set<Symbol>({ Symbol(11, SymbolOptionToken) }),
ParseItem(Symbol(1), grammar.rules[1].rule, {}),
set<Symbol>({ Symbol(11, true) }),
},
{
ParseItem(Symbol(0), grammar.rule(Symbol(0)), {}),
set<Symbol>({ Symbol(10, SymbolOptionToken) }),
ParseItem(Symbol(0), grammar.rules[0].rule, {}),
set<Symbol>({ Symbol(10, true) }),
},
})));
});

View file

@ -1,6 +1,6 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/build_tables/item_set_transitions.h"
#include "compiler/syntax_grammar.h"
#include "compiler/prepared_grammar.h"
#include "compiler/helpers/rule_helpers.h"
using namespace rules;
@ -43,29 +43,37 @@ describe("char_transitions(LexItemSet)", []() {
describe("sym_transitions(ParseItemSet, SyntaxGrammar)", [&]() {
SyntaxGrammar grammar{{
{ "A", blank() },
{ "B", i_token(21) },
}, {}, {}, {}};
{
"A",
blank(),
RuleEntryTypeNamed
},
{
"B",
i_token(21),
RuleEntryTypeNamed
},
}, {}, {}};
it("computes the closure of the new item sets", [&]() {
ParseItemSet set1({
{
ParseItem(Symbol(0), seq({ i_token(22), i_sym(1) }), { Symbol(101) }),
set<Symbol>({ Symbol(23, SymbolOptionToken) })
set<Symbol>({ Symbol(23, true) })
},
});
AssertThat(sym_transitions(set1, grammar), Equals(map<Symbol, ParseItemSet>({
{
Symbol(22, SymbolOptionToken),
Symbol(22, true),
ParseItemSet({
{
ParseItem(Symbol(0), i_sym(1), { Symbol(101), Symbol(22) }),
set<Symbol>({ Symbol(23, SymbolOptionToken) }),
set<Symbol>({ Symbol(23, true) }),
},
{
ParseItem(Symbol(1), i_token(21), {}),
set<Symbol>({ Symbol(23, SymbolOptionToken) })
set<Symbol>({ Symbol(23, true) })
},
})
},

View file

@ -2,7 +2,7 @@
#include "compiler/rules/built_in_symbols.h"
#include "compiler/parse_table.h"
#include "compiler/build_tables/lex_conflict_manager.h"
#include "compiler/syntax_grammar.h"
#include "compiler/prepared_grammar.h"
using namespace rules;
using namespace build_tables;
@ -11,16 +11,24 @@ START_TEST
describe("LexConflictManager", []() {
LexicalGrammar lexical_grammar{{
{ "other_token", pattern("[a-b]") },
{ "lookahead_token", pattern("[c-d]") },
}, {}, {}};
{
"other_token",
pattern("[a-b]"),
RuleEntryTypeNamed
},
{
"lookahead_token",
pattern("[c-d]"),
RuleEntryTypeNamed
},
}, {}};
LexConflictManager conflict_manager(lexical_grammar);
bool update;
Symbol sym1(0, SymbolOptionToken);
Symbol sym2(1, SymbolOptionToken);
Symbol sym3(2, SymbolOptionToken);
Symbol sym1(0, true);
Symbol sym2(1, true);
Symbol sym3(2, true);
it("favors non-errors over lexical errors", [&]() {
update = conflict_manager.resolve(LexAction::Advance(2, {0}), LexAction::Error());

View file

@ -2,7 +2,7 @@
#include "compiler/rules/built_in_symbols.h"
#include "compiler/parse_table.h"
#include "compiler/build_tables/parse_conflict_manager.h"
#include "compiler/syntax_grammar.h"
#include "compiler/prepared_grammar.h"
using namespace rules;
using namespace build_tables;
@ -11,17 +11,37 @@ START_TEST
describe("ParseConflictManager", []() {
SyntaxGrammar syntax_grammar{{
{ "in_progress_rule1", i_token(0) },
{ "in_progress_rule2", i_token(0) },
{ "reduced_rule", i_token(0) },
{ "other_rule1", i_token(0) },
{ "other_rule2", i_token(0) },
}, {}, { Symbol(2, SymbolOptionToken) }, {}};
{
"in_progress_rule1",
i_token(0),
RuleEntryTypeNamed,
},
{
"in_progress_rule2",
i_token(0),
RuleEntryTypeNamed,
},
{
"reduced_rule",
i_token(0),
RuleEntryTypeNamed,
},
{
"other_rule1",
i_token(0),
RuleEntryTypeNamed,
},
{
"other_rule2",
i_token(0),
RuleEntryTypeNamed,
},
}, { Symbol(2, true) }, {}};
pair<bool, ConflictType> result;
Symbol sym1(0);
Symbol sym2(1);
Symbol lookahead_sym(1, SymbolOptionToken);
Symbol lookahead_sym(1, true);
ParseConflictManager *conflict_manager;
before_each([&]() {

View file

@ -1,7 +1,7 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/build_tables/rule_can_be_blank.h"
#include "compiler/rules/metadata.h"
#include "compiler/syntax_grammar.h"
#include "compiler/prepared_grammar.h"
using namespace rules;
using build_tables::rule_can_be_blank;
@ -57,13 +57,23 @@ describe("rule_can_be_blank", [&]() {
describe("checking recursively (by expanding non-terminals)", [&]() {
SyntaxGrammar grammar{{
{ "A", choice({
seq({ i_sym(0), i_token(11) }),
blank() }) },
{ "B", choice({
seq({ i_sym(1), i_token(12) }),
i_token(13) }) },
}, {}, {}, {}};
{
"A",
choice({
seq({ i_sym(0), i_token(11) }),
blank()
}),
RuleEntryTypeNamed,
},
{
"B",
choice({
seq({ i_sym(1), i_token(12) }),
i_token(13)
}),
RuleEntryTypeNamed,
},
}, {}, {}};
it("terminates for left-recursive rules that can be blank", [&]() {
rule = i_sym(0);

View file

@ -48,6 +48,20 @@ class rule_list : public vector<pair<string, rule_ptr>> {
vector<pair<string, rule_ptr>>(list) {}
};
template<typename T>
class eq_vector : public vector<T> {
public:
bool operator==(const vector<T> &other) const {
if (this->size() != other.size()) return false;
for (size_t i = 0; i < this->size(); i++)
if (!(this->operator[](i) == other[i]))
return false;
return true;
}
eq_vector(const initializer_list<T> &list) : vector<T>(list) {}
};
class rule_vector : public vector<rule_ptr> {
public:
bool operator==(const vector<rule_ptr> &other) const {

View file

@ -6,43 +6,41 @@ namespace tree_sitter {
using std::make_shared;
using std::set;
using std::map;
using std::ostream;
using std::string;
using std::to_string;
namespace rules {
rule_ptr character(const set<uint32_t> &ranges) {
return character(ranges, true);
}
rule_ptr character(const set<uint32_t> &ranges) {
return character(ranges, true);
}
rule_ptr character(const set<uint32_t> &chars, bool sign) {
CharacterSet result;
if (sign) {
for (uint32_t c : chars)
result.include(c);
} else {
result.include_all();
for (uint32_t c : chars)
result.exclude(c);
}
return result.copy();
rule_ptr character(const set<uint32_t> &chars, bool sign) {
rules::CharacterSet result;
if (sign) {
for (uint32_t c : chars)
result.include(c);
} else {
result.include_all();
for (uint32_t c : chars)
result.exclude(c);
}
return result.copy();
}
rule_ptr i_sym(size_t index) {
return make_shared<rules::Symbol>(index);
}
rule_ptr i_sym(size_t index) {
return make_shared<rules::Symbol>(index);
}
rule_ptr i_aux_sym(size_t index) {
return make_shared<rules::Symbol>(index, SymbolOptionAuxiliary);
}
rule_ptr i_token(size_t index) {
return make_shared<rules::Symbol>(index, true);
}
rule_ptr i_token(size_t index) {
return make_shared<rules::Symbol>(index, SymbolOptionToken);
}
rule_ptr metadata(rule_ptr rule, map<rules::MetadataKey, int> values) {
return make_shared<rules::Metadata>(rule, values);
}
rule_ptr i_aux_token(size_t index) {
return make_shared<rules::Symbol>(index, SymbolOption(SymbolOptionAuxiliary|SymbolOptionToken));
}
rule_ptr metadata(rule_ptr rule, map<MetadataKey, int> values) {
return make_shared<Metadata>(rule, values);
}
bool operator==(const RuleEntry &left, const RuleEntry &right) {
return left.name == right.name && left.rule->operator==(*right.rule) &&
left.type == right.type;
}
}

View file

@ -4,17 +4,16 @@
#include "tree_sitter/compiler.h"
#include "compiler/rules/character_set.h"
#include "compiler/rules/metadata.h"
#include "compiler/prepared_grammar.h"
namespace tree_sitter {
namespace rules {
rule_ptr metadata(rule_ptr, std::map<MetadataKey, int>);
rule_ptr character(const std::set<uint32_t> &);
rule_ptr character(const std::set<uint32_t> &, bool sign);
rule_ptr i_sym(size_t index);
rule_ptr i_aux_sym(size_t index);
rule_ptr i_token(size_t index);
rule_ptr i_aux_token(size_t index);
}
rule_ptr metadata(rule_ptr, std::map<rules::MetadataKey, int>);
rule_ptr character(const std::set<uint32_t> &);
rule_ptr character(const std::set<uint32_t> &, bool sign);
rule_ptr i_sym(size_t index);
rule_ptr i_token(size_t index);
bool operator==(const RuleEntry &left, const RuleEntry &right);
}
#endif

View file

@ -7,6 +7,7 @@
#include <map>
#include <unordered_set>
#include <vector>
#include "compiler/prepared_grammar.h"
using std::cout;
@ -83,4 +84,16 @@ inline std::ostream& operator<<(std::ostream &stream, const std::pair<T1, T2> &p
} // namespace std
namespace tree_sitter {
using std::ostream;
using std::string;
using std::to_string;
inline ostream &operator<<(ostream &stream, const RuleEntry &entry) {
return stream << string("{") << entry.name << string(", ") << entry.rule << string(", ") << to_string(entry.type) << string("}");
}
}
#endif

View file

@ -1,5 +1,5 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/syntax_grammar.h"
#include "compiler/prepared_grammar.h"
#include "compiler/prepare_grammar/expand_repeats.h"
#include "compiler/helpers/containers.h"
@ -11,131 +11,223 @@ using prepare_grammar::expand_repeats;
describe("expand_repeats", []() {
it("replaces repeat rules with pairs of recursive rules", [&]() {
SyntaxGrammar grammar{{
{ "rule0", repeat(i_token(0)) },
}, {}, {}, {}};
{
"rule0",
repeat(i_token(0)),
RuleEntryTypeNamed,
},
}, {}, {}};
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(rule_list({
{ "rule0", choice({ i_aux_sym(0), blank() }) },
})));
AssertThat(match.aux_rules, Equals(rule_list({
{ "rule0_repeat0", seq({
i_token(0),
choice({ i_aux_sym(0), blank() }) }) },
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
{
"rule0",
choice({ i_sym(1), blank() }),
RuleEntryTypeNamed,
},
{
"rule0_repeat1",
seq({
i_token(0),
choice({ i_sym(1), blank() })
}),
RuleEntryTypeHidden
},
})));
});
it("replaces repeats inside of sequences", [&]() {
SyntaxGrammar grammar{{
{ "rule0", seq({
i_token(10),
repeat(i_token(11)) }) },
}, {}, {}, {}};
{
"rule0",
seq({
i_token(10),
repeat(i_token(11)),
}),
RuleEntryTypeNamed,
},
}, {}, {}};
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(rule_list({
{ "rule0", seq({
i_token(10),
choice({ i_aux_sym(0), blank() }) }) },
})));
AssertThat(match.aux_rules, Equals(rule_list({
{ "rule0_repeat0", seq({
i_token(11),
choice({ i_aux_sym(0), blank() }) }) },
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
{
"rule0",
seq({
i_token(10),
choice({ i_sym(1), blank() })
}),
RuleEntryTypeNamed
},
{
"rule0_repeat1",
seq({
i_token(11),
choice({ i_sym(1), blank() })
}),
RuleEntryTypeHidden
},
})));
});
it("replaces repeats inside of choices", [&]() {
SyntaxGrammar grammar{{
{ "rule0", choice({ i_token(10), repeat(i_token(11)) }) },
}, {}, {}, {}};
{
"rule0",
choice({ i_token(10), repeat(i_token(11)) }),
RuleEntryTypeNamed
},
}, {}, {}};
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(rule_list({
{ "rule0", choice({ i_token(10), i_aux_sym(0), blank() }) },
})));
AssertThat(match.aux_rules, Equals(rule_list({
{ "rule0_repeat0", seq({
i_token(11),
choice({ i_aux_sym(0), blank() }) }) },
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
{
"rule0",
choice({ i_token(10), i_sym(1), blank() }),
RuleEntryTypeNamed
},
{
"rule0_repeat1",
seq({
i_token(11),
choice({ i_sym(1), blank() }),
}),
RuleEntryTypeHidden
},
})));
});
it("does not create redundant auxiliary rules", [&]() {
SyntaxGrammar grammar{{
{ "rule0", choice({
seq({ i_token(1), repeat(i_token(4)) }),
seq({ i_token(2), repeat(i_token(4)) }) }) },
{ "rule1", seq({ i_token(3), repeat(i_token(4)) }) },
}, {}, {}, {}};
{
"rule0",
choice({
seq({ i_token(1), repeat(i_token(4)) }),
seq({ i_token(2), repeat(i_token(4)) }),
}),
RuleEntryTypeNamed
},
{
"rule1",
seq({ i_token(3), repeat(i_token(4)) }),
RuleEntryTypeNamed
},
}, {}, {}};
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(rule_list({
{ "rule0", choice({
seq({ i_token(1), choice({ i_aux_sym(0), blank() }) }),
seq({ i_token(2), choice({ i_aux_sym(0), blank() }) }) }) },
{ "rule1", seq({ i_token(3), choice({ i_aux_sym(0), blank() }) }) },
})));
AssertThat(match.aux_rules, Equals(rule_list({
{ "rule0_repeat0", seq({
i_token(4),
choice({ i_aux_sym(0), blank() }) }) },
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
{
"rule0",
choice({
seq({ i_token(1), choice({ i_sym(2), blank() }) }),
seq({ i_token(2), choice({ i_sym(2), blank() }) }),
}),
RuleEntryTypeNamed
},
{
"rule1",
seq({ i_token(3), choice({ i_sym(2), blank() }) }),
RuleEntryTypeNamed
},
{
"rule0_repeat1",
seq({
i_token(4),
choice({ i_sym(2), blank() }),
}),
RuleEntryTypeHidden
},
})));
});
it("can replace multiple repeats in the same rule", [&]() {
SyntaxGrammar grammar{{
{ "rule0", seq({
repeat(i_token(10)),
repeat(i_token(11)) }) },
}, {}, {}, {}};
{
"rule0",
seq({
repeat(i_token(10)),
repeat(i_token(11)),
}),
RuleEntryTypeNamed
},
}, {}, {}};
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(rule_list({
{ "rule0", seq({
choice({ i_aux_sym(0), blank() }),
choice({ i_aux_sym(1), blank() }) }) },
})));
AssertThat(match.aux_rules, Equals(rule_list({
{ "rule0_repeat0", seq({
i_token(10),
choice({ i_aux_sym(0), blank() }) }) },
{ "rule0_repeat1", seq({
i_token(11),
choice({ i_aux_sym(1), blank() }) }) },
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
{
"rule0",
seq({
choice({ i_sym(1), blank() }),
choice({ i_sym(2), blank() }),
}),
RuleEntryTypeNamed
},
{
"rule0_repeat1",
seq({
i_token(10),
choice({ i_sym(1), blank() }),
}),
RuleEntryTypeHidden
},
{
"rule0_repeat2",
seq({
i_token(11),
choice({ i_sym(2), blank() }),
}),
RuleEntryTypeHidden
},
})));
});
it("can replace repeats in multiple rules", [&]() {
SyntaxGrammar grammar{{
{ "rule0", repeat(i_token(10)) },
{ "rule1", repeat(i_token(11)) },
}, {}, {}, {}};
{
"rule0",
repeat(i_token(10)),
RuleEntryTypeNamed,
},
{
"rule1",
repeat(i_token(11)),
RuleEntryTypeNamed,
},
}, {}, {}};
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(rule_list({
{ "rule0", choice({ i_aux_sym(0), blank() }) },
{ "rule1", choice({ i_aux_sym(1), blank() }) },
})));
AssertThat(match.aux_rules, Equals(rule_list({
{ "rule0_repeat0", seq({
i_token(10),
choice({ i_aux_sym(0), blank() }) }) },
{ "rule1_repeat0", seq({
i_token(11),
choice({ i_aux_sym(1), blank() }) }) },
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
{
"rule0",
choice({ i_sym(2), blank() }),
RuleEntryTypeNamed
},
{
"rule1",
choice({ i_sym(3), blank() }),
RuleEntryTypeNamed
},
{
"rule0_repeat1",
seq({
i_token(10),
choice({ i_sym(2), blank() }),
}),
RuleEntryTypeHidden
},
{
"rule1_repeat1",
seq({
i_token(11),
choice({ i_sym(3), blank() })
}),
RuleEntryTypeHidden
},
})));
});
});

View file

@ -1,5 +1,5 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/lexical_grammar.h"
#include "compiler/prepared_grammar.h"
#include "compiler/helpers/containers.h"
#include "compiler/prepare_grammar/expand_tokens.h"
@ -12,36 +12,64 @@ describe("expand_tokens", []() {
describe("string rules", [&]() {
it("replaces strings with sequences of character sets", [&]() {
LexicalGrammar grammar{{
{ "rule_A", seq({
i_sym(10),
str("xyz"),
i_sym(11) }) },
}, {}, {}};
{
"rule_A",
seq({
i_sym(10),
str("xyz"),
i_sym(11),
}),
RuleEntryTypeNamed
},
}, {}};
auto result = expand_tokens(grammar);
AssertThat(result.second, Equals((const GrammarError *)nullptr));
AssertThat(result.first.rules, Equals(rule_list({
{ "rule_A", seq({
i_sym(10),
token(prec(1, seq({ character({ 'x' }), character({ 'y' }), character({ 'z' }) }))),
i_sym(11) }) },
AssertThat(result.first.rules, Equals(eq_vector<RuleEntry>({
{
"rule_A",
seq({
i_sym(10),
metadata(seq({
character({ 'x' }),
character({ 'y' }),
character({ 'z' }),
}), {
{PRECEDENCE, 1},
{IS_TOKEN, 1},
}),
i_sym(11),
}),
RuleEntryTypeNamed
},
})));
});
it("handles strings containing non-ASCII UTF8 characters", [&]() {
LexicalGrammar grammar{{
// α β
{ "rule_A", str("\u03B1 \u03B2") },
}, {}, {}};
{
"rule_A",
str("\u03B1 \u03B2"), // α β
RuleEntryTypeNamed
},
}, {}};
auto result = expand_tokens(grammar);
AssertThat(result.first.rules, Equals(rule_list({
{ "rule_A", token(prec(1, seq({
character({ 945 }),
character({ ' ' }),
character({ 946 }) }))) }
AssertThat(result.first.rules, Equals(eq_vector<RuleEntry>({
{
"rule_A",
metadata(seq({
character({ 945 }),
character({ ' ' }),
character({ 946 }),
}), {
{PRECEDENCE, 1},
{IS_TOKEN, 1},
}),
RuleEntryTypeNamed
}
})));
});
});
@ -49,43 +77,65 @@ describe("expand_tokens", []() {
describe("regexp rules", [&]() {
it("replaces regexps with the equivalent rule tree", [&]() {
LexicalGrammar grammar{{
{ "rule_A", seq({
i_sym(10),
pattern("x*"),
i_sym(11) }) },
}, {}, {}};
{
"rule_A",
seq({
i_sym(10),
pattern("x*"),
i_sym(11),
}),
RuleEntryTypeNamed
},
}, {}};
auto result = expand_tokens(grammar);
AssertThat(result.second, Equals((const GrammarError *)nullptr));
AssertThat(result.first.rules, Equals(rule_list({
{ "rule_A", seq({
i_sym(10),
repeat(character({ 'x' })),
i_sym(11) }) },
AssertThat(result.first.rules, Equals(eq_vector<RuleEntry>({
{
"rule_A",
seq({
i_sym(10),
repeat(character({ 'x' })),
i_sym(11),
}),
RuleEntryTypeNamed
},
})));
});
it("handles regexps containing non-ASCII UTF8 characters", [&]() {
LexicalGrammar grammar{{
// [^α-δ]
{ "rule_A", pattern("[^\u03B1-\u03B4]*") },
}, {}, {}};
{
"rule_A",
pattern("[^\u03B1-\u03B4]*"), // [^α-δ]
RuleEntryTypeNamed
},
}, {}};
auto result = expand_tokens(grammar);
AssertThat(result.first.rules, Equals(rule_list({
{ "rule_A", repeat(character({ 945, 946, 947, 948 }, false)) }
AssertThat(result.first.rules, Equals(eq_vector<RuleEntry>({
{
"rule_A",
repeat(character({ 945, 946, 947, 948 }, false)),
RuleEntryTypeNamed
}
})));
});
it("returns an error when the grammar contains an invalid regex", [&]() {
LexicalGrammar grammar{{
{ "rule_A", seq({
pattern("("),
str("xyz"),
pattern("[") }) },
}, {}, {}};
{
"rule_A",
seq({
pattern("("),
str("xyz"),
pattern("["),
}),
RuleEntryTypeNamed
},
}, {}};
auto result = expand_tokens(grammar);

View file

@ -1,6 +1,5 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/lexical_grammar.h"
#include "compiler/syntax_grammar.h"
#include "compiler/prepared_grammar.h"
#include "compiler/prepare_grammar/interned_grammar.h"
#include "compiler/prepare_grammar/extract_tokens.h"
#include "compiler/helpers/containers.h"
@ -12,271 +11,301 @@ using prepare_grammar::extract_tokens;
using prepare_grammar::InternedGrammar;
describe("extract_tokens", []() {
it("moves string rules into the lexical grammar", [&]() {
it("moves strings, patterns, and sub-rules marked as tokens into the lexical grammar", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", seq({ str("ab"), i_sym(0) }) }
{
"rule_A",
repeat(seq({
str("ab"),
pattern("cd*"),
choice({
i_sym(1),
i_sym(2),
token(repeat(choice({ str("ef"), str("gh") }))),
}),
})),
},
{
"rule_B",
pattern("ij+"),
},
{
"rule_C",
choice({ str("kl"), blank() }),
},
{
"rule_D",
repeat(i_sym(3))
}
}, {}, {}});
AssertThat(get<0>(result).rules, Equals(rule_list({
{ "rule_A", seq({ i_aux_token(0), i_sym(0) }) }
SyntaxGrammar &syntax_grammar = get<0>(result);
LexicalGrammar &lexical_grammar = get<1>(result);
const GrammarError *error = get<2>(result);
AssertThat(error, Equals<const GrammarError *>(nullptr));
AssertThat(syntax_grammar.rules, Equals(eq_vector<RuleEntry>({
{
"rule_A",
repeat(seq({
// This string is now the first token in the lexical grammar.
i_token(0),
// This pattern is now the second rule in the lexical grammar.
i_token(1),
choice({
// Rule 1, which this symbol pointed to, has been moved to the
// lexical grammar.
i_token(3),
// This symbol's index has been decremented, because a previous rule
// was moved to the lexical grammar.
i_sym(1),
// This token rule is now the third rule in the lexical grammar.
i_token(2),
}),
})),
RuleEntryTypeNamed,
},
{
"rule_C",
choice({ i_token(4), blank() }),
RuleEntryTypeNamed,
},
{
"rule_D",
repeat(i_sym(2)),
RuleEntryTypeNamed,
}
})));
AssertThat(get<0>(result).aux_rules, IsEmpty())
AssertThat(get<1>(result).rules, IsEmpty())
AssertThat(get<1>(result).aux_rules, Equals(rule_list({
{ "'ab'", str("ab") },
AssertThat(lexical_grammar.rules, Equals(eq_vector<RuleEntry>({
// Strings become anonymous rules.
{
"ab",
str("ab"),
RuleEntryTypeAnonymous,
},
// Patterns become hidden rules.
{
"/cd*/",
pattern("cd*"),
RuleEntryTypeHidden,
},
// Rules marked as tokens become hidden rules.
{
"/(ef|gh)*/",
repeat(choice({ str("ef"), str("gh") })),
RuleEntryTypeHidden,
},
// This named rule was moved wholesale to the lexical grammar.
{
"rule_B",
pattern("ij+"),
RuleEntryTypeNamed,
},
// Strings become anonymous rules.
{
"kl",
str("kl"),
RuleEntryTypeAnonymous,
},
})));
});
it("moves pattern rules into the lexical grammar", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", seq({ pattern("a+"), i_sym(0) }) }
}, {}, {}});
AssertThat(get<0>(result).rules, Equals(rule_list({
{ "rule_A", seq({ i_aux_token(0), i_sym(0) }) }
})));
AssertThat(get<0>(result).aux_rules, IsEmpty())
AssertThat(get<1>(result).rules, IsEmpty())
AssertThat(get<1>(result).aux_rules, Equals(rule_list({
{ "/a+/", pattern("a+") },
})));
});
it("moves other rules marked as tokens into the lexical grammar", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", seq({
token(seq({ pattern("."), choice({ str("a"), str("b") }) })),
i_sym(0) }) }
}, {}, {}});
AssertThat(get<0>(result).rules, Equals(rule_list({
{ "rule_A", seq({ i_aux_token(0), i_sym(0) }) }
})));
AssertThat(get<0>(result).aux_rules, IsEmpty())
AssertThat(get<1>(result).rules, IsEmpty())
AssertThat(get<1>(result).aux_rules, Equals(rule_list({
{ "(seq /./ (choice 'a' 'b'))", token(seq({ pattern("."), choice({ str("a"), str("b") }) })) },
})));
});
it("does not move blank rules", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", choice({ i_sym(0), blank() }) },
}, {}, {}});
AssertThat(get<0>(result).rules, Equals(rule_list({
{ "rule_A", choice({ i_sym(0), blank() }) },
})));
AssertThat(get<0>(result).aux_rules, IsEmpty())
AssertThat(get<1>(result).rules, IsEmpty())
AssertThat(get<1>(result).aux_rules, IsEmpty())
});
it("does not create duplicate tokens in the lexical grammar", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", seq({ str("ab"), i_sym(0), str("ab") }) },
{
"rule_A",
seq({
str("ab"),
i_sym(0),
str("ab"),
})
},
}, {}, {}});
AssertThat(get<0>(result).rules, Equals(rule_list({
{ "rule_A", seq({ i_aux_token(0), i_sym(0), i_aux_token(0) }) }
})));
AssertThat(get<0>(result).aux_rules, IsEmpty())
SyntaxGrammar &syntax_grammar = get<0>(result);
LexicalGrammar &lexical_grammar = get<1>(result);
AssertThat(get<1>(result).rules, IsEmpty())
AssertThat(get<1>(result).aux_rules, Equals(rule_list({
{ "'ab'", str("ab") },
AssertThat(syntax_grammar.rules, Equals(eq_vector<RuleEntry>({
{
"rule_A",
seq({ i_token(0), i_sym(0), i_token(0) }),
RuleEntryTypeNamed
}
})));
AssertThat(lexical_grammar.rules, Equals(eq_vector<RuleEntry>({
{
"ab",
str("ab"),
RuleEntryTypeAnonymous
},
})))
});
it("updates the grammar's expected conflict symbols", [&]() {
auto result = extract_tokens(InternedGrammar{
it("does not move entire rules into the lexical grammar if their content is used elsewhere in the grammar", [&]() {
auto result = extract_tokens(InternedGrammar{{
{
{ "rule_A", str("ok") },
{ "rule_B", repeat(i_sym(0)) },
{ "rule_C", repeat(seq({ i_sym(0), i_sym(0) })) },
"rule_A",
seq({ i_sym(1), str("ab") })
},
{ str(" ") },
{ { Symbol(1), Symbol(2) } }
});
{
"rule_B",
str("cd")
},
{
"rule_C",
seq({ str("ef"), str("cd") })
},
}, {}, {}});
AssertThat(get<0>(result).rules.size(), Equals<size_t>(2));
AssertThat(get<1>(result).rules.size(), Equals<size_t>(1));
AssertThat(get<0>(result).expected_conflicts, Equals(set<set<Symbol>>({
SyntaxGrammar &syntax_grammar = get<0>(result);
LexicalGrammar &lexical_grammar = get<1>(result);
AssertThat(syntax_grammar.rules, Equals(eq_vector<RuleEntry>({
{
"rule_A",
seq({ i_sym(1), i_token(0) }),
RuleEntryTypeNamed
},
{
"rule_B",
i_token(1),
RuleEntryTypeNamed
},
{
"rule_C",
seq({ i_token(2), i_token(1) }),
RuleEntryTypeNamed
},
})));
AssertThat(lexical_grammar.rules, Equals(eq_vector<RuleEntry>({
{
"ab",
str("ab"),
RuleEntryTypeAnonymous
},
{
"cd",
str("cd"),
RuleEntryTypeAnonymous
},
{
"ef",
str("ef"),
RuleEntryTypeAnonymous
},
})));
});
it("renumbers the grammar's expected conflict symbols based on any moved rules", [&]() {
auto result = extract_tokens(InternedGrammar{{
{
"rule_A",
str("ok")
},
{
"rule_B",
repeat(i_sym(0))
},
{
"rule_C",
repeat(seq({ i_sym(0), i_sym(0) }))
},
}, { str(" ") }, { { Symbol(1), Symbol(2) } }});
SyntaxGrammar &syntax_grammar = get<0>(result);
AssertThat(syntax_grammar.rules.size(), Equals<size_t>(2));
AssertThat(syntax_grammar.expected_conflicts, Equals(set<set<Symbol>>({
{ Symbol(0), Symbol(1) },
})));
});
describe("when an entire grammar rule is a token", [&]() {
it("moves the rule the lexical grammar and updates referencing symbols", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", i_sym(1) },
{ "rule_B", pattern("a|b") },
{ "rule_C", token(seq({ str("a"), str("b") })) },
}, {}, {}});
AssertThat(get<0>(result).rules, Equals(rule_list({
{ "rule_A", i_token(0) }
})));
AssertThat(get<0>(result).aux_rules, IsEmpty());
AssertThat(get<1>(result).rules, Equals(rule_list({
{ "rule_B", pattern("a|b") },
{ "rule_C", token(seq({ str("a"), str("b") })) },
})));
// TODO put back
// AssertThat(get<1>(result).aux_rules, IsEmpty());
});
it("updates symbols whose indices need to change due to deleted rules", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", str("ab") },
{ "rule_B", i_sym(0) },
{ "rule_C", i_sym(1) },
}, {}, {}});
AssertThat(get<0>(result).rules, Equals(rule_list({
{ "rule_B", i_token(0) },
{ "rule_C", i_sym(0) },
})));
AssertThat(get<0>(result).aux_rules, IsEmpty());
AssertThat(get<1>(result).rules, Equals(rule_list({
{ "rule_A", str("ab") },
})));
// TODO put back
// AssertThat(get<1>(result).aux_rules, IsEmpty());
});
it("does not move the rule if its content is used elsewhere in the grammar", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", seq({ i_sym(1), str("ab") }) },
{ "rule_B", str("cd") },
{ "rule_C", seq({ str("ef"), str("cd") }) },
}, {}, {}});
AssertThat(get<0>(result).rules, Equals(rule_list({
{ "rule_A", seq({ i_sym(1), i_aux_token(0) }) },
{ "rule_B", i_aux_token(1) },
{ "rule_C", seq({ i_aux_token(2), i_aux_token(1) }) },
})));
AssertThat(get<0>(result).aux_rules, IsEmpty());
AssertThat(get<1>(result).rules, IsEmpty())
AssertThat(get<1>(result).aux_rules, Equals(rule_list({
{ "'ab'", str("ab") },
{ "'cd'", str("cd") },
{ "'ef'", str("ef") },
})));
});
});
describe("handling ubiquitous tokens", [&]() {
describe("ubiquitous tokens that are not symbols", [&]() {
it("adds them to the lexical grammar's separators", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", str("x") },
}, {
pattern("\\s+"),
str("y"),
}, {}});
it("adds inline ubiquitous tokens to the lexical grammar's separators", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", str("x") },
}, {
pattern("\\s+"),
str("y"),
}, {}});
AssertThat(get<2>(result), Equals<const GrammarError *>(nullptr));
AssertThat(get<2>(result), Equals<const GrammarError *>(nullptr));
AssertThat(get<1>(result).separators, Equals(rule_vector({
pattern("\\s+"),
str("y"),
})));
AssertThat(get<1>(result).separators, Equals(rule_vector({
pattern("\\s+"),
str("y"),
})));
AssertThat(get<0>(result).ubiquitous_tokens, IsEmpty());
});
AssertThat(get<0>(result).ubiquitous_tokens, IsEmpty());
});
describe("ubiquitous tokens that point to moved rules", [&]() {
it("updates them according to the new symbol numbers", [&]() {
auto result = extract_tokens(InternedGrammar{ {
{ "rule_A", seq({ str("w"), i_sym(1) }) },
{ "rule_B", str("x") },
{ "rule_C", str("y") },
}, {
i_sym(2),
}, {}});
it("updates ubiquitous symbols according to the new symbol numbers", [&]() {
auto result = extract_tokens(InternedGrammar{ {
{ "rule_A", seq({ str("w"), str("x"), i_sym(1) }) },
{ "rule_B", str("y") },
{ "rule_C", str("z") },
}, {
i_sym(2),
}, {}});
AssertThat(get<2>(result), Equals<const GrammarError *>(nullptr));
AssertThat(get<2>(result), Equals<const GrammarError *>(nullptr));
AssertThat(get<0>(result).ubiquitous_tokens, Equals(set<Symbol>({
{ Symbol(1, SymbolOptionToken) },
})));
AssertThat(get<0>(result).ubiquitous_tokens, Equals(set<Symbol>({
{ Symbol(3, true) },
})));
AssertThat(get<1>(result).separators, IsEmpty());
});
AssertThat(get<1>(result).separators, IsEmpty());
});
describe("ubiquitous tokens that are visible", [&]() {
it("preserves them in the syntactic grammar", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", str("ab") },
{ "rule_B", str("bc") },
}, { i_sym(1) }, {}});
it("returns an error if any ubiquitous tokens are non-token symbols", [&]() {
auto result = extract_tokens(InternedGrammar{{
{
"rule_A",
seq({ str("x"), i_sym(1) }),
},
{
"rule_B",
seq({ str("y"), str("z") })
},
}, { i_sym(1) }, {}});
AssertThat(get<2>(result), Equals<const GrammarError *>(nullptr));
AssertThat(get<0>(result).ubiquitous_tokens, Equals(set<Symbol>({
Symbol(1, SymbolOptionToken)
})));
AssertThat(get<1>(result).separators, IsEmpty());
});
AssertThat(get<2>(result), !Equals<const GrammarError *>(nullptr));
AssertThat(get<2>(result), EqualsPointer(
new GrammarError(GrammarErrorTypeInvalidUbiquitousToken,
"Not a token: rule_B")));
});
describe("ubiquitous tokens that are used in other grammar rules", [&]() {
it("preserves them in the syntactic grammar", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", seq({ i_sym(1), str("ab") }) },
{ "_rule_B", str("bc") },
}, { i_sym(1) }, {}});
it("returns an error if any ubiquitous tokens are non-token rules", [&]() {
auto result = extract_tokens(InternedGrammar{{
{
"rule_A",
str("x")
},
{
"rule_B",
str("y")
},
}, { choice({ i_sym(1), blank() }) }, {}});
AssertThat(get<2>(result), Equals<const GrammarError *>(nullptr));
AssertThat(get<0>(result).ubiquitous_tokens, Equals(set<Symbol>({
Symbol(0, SymbolOptionToken),
})));
AssertThat(get<1>(result).separators, IsEmpty());
});
});
describe("ubiquitous tokens that are non-token symbols", [&]() {
it("returns an error", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", seq({ str("x"), i_sym(1) }), },
{ "rule_B", seq({ str("y"), str("z") }) },
}, { i_sym(1) }, {}});
AssertThat(get<2>(result), !Equals<const GrammarError *>(nullptr));
AssertThat(get<2>(result), EqualsPointer(
new GrammarError(GrammarErrorTypeInvalidUbiquitousToken,
"Not a token: rule_B")));
});
});
describe("ubiquitous tokens that are not symbols", [&]() {
it("returns an error", [&]() {
auto result = extract_tokens(InternedGrammar{{
{ "rule_A", str("x") },
{ "rule_B", str("y") },
}, { choice({ i_sym(1), blank() }) }, {}});
AssertThat(get<2>(result), !Equals<const GrammarError *>(nullptr));
AssertThat(get<2>(result), EqualsPointer(
new GrammarError(GrammarErrorTypeInvalidUbiquitousToken,
"Not a token: (choice (sym 1) (blank))")));
});
AssertThat(get<2>(result), !Equals<const GrammarError *>(nullptr));
AssertThat(get<2>(result), EqualsPointer(
new GrammarError(GrammarErrorTypeInvalidUbiquitousToken,
"Not a token: (choice (sym 1) (blank))")));
});
});
});

View file

@ -12,16 +12,16 @@ enum {
sym_quotient,
sym_exponent,
sym_group,
aux_sym_PLUS,
aux_sym_DASH,
aux_sym_STAR,
aux_sym_SLASH,
aux_sym_CARET,
aux_sym_LPAREN,
aux_sym_RPAREN,
sym_number,
sym_variable,
sym_comment,
aux_sym_STR_PLUS,
aux_sym_STR_DASH,
aux_sym_STR_STAR,
aux_sym_STR_SLASH,
aux_sym_STR_CARET,
aux_sym_STR_LPAREN,
aux_sym_STR_RPAREN,
};
static const char *ts_symbol_names[] = {
@ -35,27 +35,27 @@ static const char *ts_symbol_names[] = {
[sym_group] = "group",
[ts_builtin_sym_error] = "ERROR",
[ts_builtin_sym_end] = "END",
[aux_sym_PLUS] = "+",
[aux_sym_DASH] = "-",
[aux_sym_STAR] = "*",
[aux_sym_SLASH] = "/",
[aux_sym_CARET] = "^",
[aux_sym_LPAREN] = "(",
[aux_sym_RPAREN] = ")",
[sym_number] = "number",
[sym_variable] = "variable",
[sym_comment] = "comment",
[aux_sym_STR_PLUS] = "STR_+",
[aux_sym_STR_DASH] = "STR_-",
[aux_sym_STR_STAR] = "STR_*",
[aux_sym_STR_SLASH] = "STR_/",
[aux_sym_STR_CARET] = "STR_^",
[aux_sym_STR_LPAREN] = "STR_(",
[aux_sym_STR_RPAREN] = "STR_)",
};
static const int ts_hidden_symbol_flags[SYMBOL_COUNT] = {
[sym__expression] = 1,
[aux_sym_STR_PLUS] = 1,
[aux_sym_STR_DASH] = 1,
[aux_sym_STR_STAR] = 1,
[aux_sym_STR_SLASH] = 1,
[aux_sym_STR_CARET] = 1,
[aux_sym_STR_LPAREN] = 1,
[aux_sym_STR_RPAREN] = 1,
[aux_sym_PLUS] = 1,
[aux_sym_DASH] = 1,
[aux_sym_STAR] = 1,
[aux_sym_SLASH] = 1,
[aux_sym_CARET] = 1,
[aux_sym_LPAREN] = 1,
[aux_sym_RPAREN] = 1,
};
static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
@ -85,7 +85,7 @@ static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
ADVANCE(2);
ACCEPT_TOKEN(sym_comment);
case 3:
ACCEPT_TOKEN(aux_sym_STR_LPAREN);
ACCEPT_TOKEN(aux_sym_LPAREN);
case 4:
if ('0' <= lookahead && lookahead <= '9')
ADVANCE(4);
@ -139,15 +139,15 @@ static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
ADVANCE(14);
LEX_ERROR();
case 10:
ACCEPT_TOKEN(aux_sym_STR_STAR);
ACCEPT_TOKEN(aux_sym_STAR);
case 11:
ACCEPT_TOKEN(aux_sym_STR_PLUS);
ACCEPT_TOKEN(aux_sym_PLUS);
case 12:
ACCEPT_TOKEN(aux_sym_STR_DASH);
ACCEPT_TOKEN(aux_sym_DASH);
case 13:
ACCEPT_TOKEN(aux_sym_STR_SLASH);
ACCEPT_TOKEN(aux_sym_SLASH);
case 14:
ACCEPT_TOKEN(aux_sym_STR_CARET);
ACCEPT_TOKEN(aux_sym_CARET);
case 15:
START_TOKEN();
if ((lookahead == '\t') ||
@ -171,7 +171,7 @@ static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
ADVANCE(14);
LEX_ERROR();
case 16:
ACCEPT_TOKEN(aux_sym_STR_RPAREN);
ACCEPT_TOKEN(aux_sym_RPAREN);
case 17:
START_TOKEN();
if ((lookahead == '\t') ||
@ -302,10 +302,10 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(3)),
[sym_exponent] = ACTIONS(SHIFT(3)),
[sym_group] = ACTIONS(SHIFT(3)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(4)),
[sym_number] = ACTIONS(SHIFT(3)),
[sym_variable] = ACTIONS(SHIFT(3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(4)),
},
[1] = {
[ts_builtin_sym_end] = ACTIONS(ACCEPT_INPUT()),
@ -313,21 +313,21 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
},
[2] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym_program, 1)),
[aux_sym_PLUS] = ACTIONS(SHIFT(23)),
[aux_sym_DASH] = ACTIONS(SHIFT(24)),
[aux_sym_STAR] = ACTIONS(SHIFT(25)),
[aux_sym_SLASH] = ACTIONS(SHIFT(26)),
[aux_sym_CARET] = ACTIONS(SHIFT(27)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(SHIFT(23)),
[aux_sym_STR_DASH] = ACTIONS(SHIFT(24)),
[aux_sym_STR_STAR] = ACTIONS(SHIFT(25)),
[aux_sym_STR_SLASH] = ACTIONS(SHIFT(26)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(27)),
},
[3] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_PLUS] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_DASH] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STAR] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_SLASH] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_CARET] = ACTIONS(REDUCE(sym__expression, 1)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STR_CARET] = ACTIONS(REDUCE(sym__expression, 1)),
},
[4] = {
[sym__expression] = ACTIONS(SHIFT(5)),
@ -338,32 +338,32 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_exponent] = ACTIONS(SHIFT(6)),
[sym_group] = ACTIONS(SHIFT(6)),
[ts_builtin_sym_error] = ACTIONS(SHIFT(7)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(8)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_variable] = ACTIONS(SHIFT(6)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(8)),
},
[5] = {
[aux_sym_PLUS] = ACTIONS(SHIFT(12)),
[aux_sym_DASH] = ACTIONS(SHIFT(13)),
[aux_sym_STAR] = ACTIONS(SHIFT(14)),
[aux_sym_SLASH] = ACTIONS(SHIFT(15)),
[aux_sym_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_RPAREN] = ACTIONS(SHIFT(22)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(SHIFT(12)),
[aux_sym_STR_DASH] = ACTIONS(SHIFT(13)),
[aux_sym_STR_STAR] = ACTIONS(SHIFT(14)),
[aux_sym_STR_SLASH] = ACTIONS(SHIFT(15)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_STR_RPAREN] = ACTIONS(SHIFT(22)),
},
[6] = {
[aux_sym_PLUS] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_DASH] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STAR] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_SLASH] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_CARET] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_RPAREN] = ACTIONS(REDUCE(sym__expression, 1)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STR_CARET] = ACTIONS(REDUCE(sym__expression, 1)),
[aux_sym_STR_RPAREN] = ACTIONS(REDUCE(sym__expression, 1)),
},
[7] = {
[aux_sym_RPAREN] = ACTIONS(SHIFT(22)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_RPAREN] = ACTIONS(SHIFT(22)),
},
[8] = {
[sym__expression] = ACTIONS(SHIFT(9)),
@ -374,32 +374,32 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_exponent] = ACTIONS(SHIFT(6)),
[sym_group] = ACTIONS(SHIFT(6)),
[ts_builtin_sym_error] = ACTIONS(SHIFT(10)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(8)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_variable] = ACTIONS(SHIFT(6)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(8)),
},
[9] = {
[aux_sym_PLUS] = ACTIONS(SHIFT(12)),
[aux_sym_DASH] = ACTIONS(SHIFT(13)),
[aux_sym_STAR] = ACTIONS(SHIFT(14)),
[aux_sym_SLASH] = ACTIONS(SHIFT(15)),
[aux_sym_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_RPAREN] = ACTIONS(SHIFT(11)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(SHIFT(12)),
[aux_sym_STR_DASH] = ACTIONS(SHIFT(13)),
[aux_sym_STR_STAR] = ACTIONS(SHIFT(14)),
[aux_sym_STR_SLASH] = ACTIONS(SHIFT(15)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_STR_RPAREN] = ACTIONS(SHIFT(11)),
},
[10] = {
[aux_sym_RPAREN] = ACTIONS(SHIFT(11)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_RPAREN] = ACTIONS(SHIFT(11)),
},
[11] = {
[aux_sym_PLUS] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STAR] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_SLASH] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_CARET] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_RPAREN] = ACTIONS(REDUCE(sym_group, 3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STR_CARET] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STR_RPAREN] = ACTIONS(REDUCE(sym_group, 3)),
},
[12] = {
[sym__expression] = ACTIONS(SHIFT(21)),
@ -409,10 +409,10 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(6)),
[sym_exponent] = ACTIONS(SHIFT(6)),
[sym_group] = ACTIONS(SHIFT(6)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(8)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_variable] = ACTIONS(SHIFT(6)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(8)),
},
[13] = {
[sym__expression] = ACTIONS(SHIFT(20)),
@ -422,10 +422,10 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(6)),
[sym_exponent] = ACTIONS(SHIFT(6)),
[sym_group] = ACTIONS(SHIFT(6)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(8)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_variable] = ACTIONS(SHIFT(6)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(8)),
},
[14] = {
[sym__expression] = ACTIONS(SHIFT(19)),
@ -435,10 +435,10 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(6)),
[sym_exponent] = ACTIONS(SHIFT(6)),
[sym_group] = ACTIONS(SHIFT(6)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(8)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_variable] = ACTIONS(SHIFT(6)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(8)),
},
[15] = {
[sym__expression] = ACTIONS(SHIFT(18)),
@ -448,10 +448,10 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(6)),
[sym_exponent] = ACTIONS(SHIFT(6)),
[sym_group] = ACTIONS(SHIFT(6)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(8)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_variable] = ACTIONS(SHIFT(6)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(8)),
},
[16] = {
[sym__expression] = ACTIONS(SHIFT(17)),
@ -461,64 +461,64 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(6)),
[sym_exponent] = ACTIONS(SHIFT(6)),
[sym_group] = ACTIONS(SHIFT(6)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(8)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_variable] = ACTIONS(SHIFT(6)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(8)),
},
[17] = {
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STAR] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_CARET] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STR_CARET] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STR_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
},
[18] = {
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STAR] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_STR_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
},
[19] = {
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STAR] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_STR_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
},
[20] = {
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[aux_sym_STAR] = ACTIONS(SHIFT(14)),
[aux_sym_SLASH] = ACTIONS(SHIFT(15)),
[aux_sym_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[aux_sym_STR_STAR] = ACTIONS(SHIFT(14)),
[aux_sym_STR_SLASH] = ACTIONS(SHIFT(15)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_STR_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
},
[21] = {
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[aux_sym_STAR] = ACTIONS(SHIFT(14)),
[aux_sym_SLASH] = ACTIONS(SHIFT(15)),
[aux_sym_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[aux_sym_STR_STAR] = ACTIONS(SHIFT(14)),
[aux_sym_STR_SLASH] = ACTIONS(SHIFT(15)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(16)),
[aux_sym_STR_RPAREN] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
},
[22] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_PLUS] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STAR] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_SLASH] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_CARET] = ACTIONS(REDUCE(sym_group, 3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE(sym_group, 3)),
[aux_sym_STR_CARET] = ACTIONS(REDUCE(sym_group, 3)),
},
[23] = {
[sym__expression] = ACTIONS(SHIFT(32)),
@ -528,10 +528,10 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(3)),
[sym_exponent] = ACTIONS(SHIFT(3)),
[sym_group] = ACTIONS(SHIFT(3)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(4)),
[sym_number] = ACTIONS(SHIFT(3)),
[sym_variable] = ACTIONS(SHIFT(3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(4)),
},
[24] = {
[sym__expression] = ACTIONS(SHIFT(31)),
@ -541,10 +541,10 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(3)),
[sym_exponent] = ACTIONS(SHIFT(3)),
[sym_group] = ACTIONS(SHIFT(3)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(4)),
[sym_number] = ACTIONS(SHIFT(3)),
[sym_variable] = ACTIONS(SHIFT(3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(4)),
},
[25] = {
[sym__expression] = ACTIONS(SHIFT(30)),
@ -554,10 +554,10 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(3)),
[sym_exponent] = ACTIONS(SHIFT(3)),
[sym_group] = ACTIONS(SHIFT(3)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(4)),
[sym_number] = ACTIONS(SHIFT(3)),
[sym_variable] = ACTIONS(SHIFT(3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(4)),
},
[26] = {
[sym__expression] = ACTIONS(SHIFT(29)),
@ -567,10 +567,10 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(3)),
[sym_exponent] = ACTIONS(SHIFT(3)),
[sym_group] = ACTIONS(SHIFT(3)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(4)),
[sym_number] = ACTIONS(SHIFT(3)),
[sym_variable] = ACTIONS(SHIFT(3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(4)),
},
[27] = {
[sym__expression] = ACTIONS(SHIFT(28)),
@ -580,55 +580,55 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym_quotient] = ACTIONS(SHIFT(3)),
[sym_exponent] = ACTIONS(SHIFT(3)),
[sym_group] = ACTIONS(SHIFT(3)),
[aux_sym_LPAREN] = ACTIONS(SHIFT(4)),
[sym_number] = ACTIONS(SHIFT(3)),
[sym_variable] = ACTIONS(SHIFT(3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_LPAREN] = ACTIONS(SHIFT(4)),
},
[28] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STAR] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_CARET] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
[aux_sym_STR_CARET] = ACTIONS(REDUCE_FRAGILE(sym_exponent, 3)),
},
[29] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STAR] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_CARET] = ACTIONS(SHIFT(27)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_quotient, 3)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(27)),
},
[30] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STAR] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_CARET] = ACTIONS(SHIFT(27)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STR_STAR] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STR_SLASH] = ACTIONS(REDUCE_FRAGILE(sym_product, 3)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(27)),
},
[31] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[aux_sym_STAR] = ACTIONS(SHIFT(25)),
[aux_sym_SLASH] = ACTIONS(SHIFT(26)),
[aux_sym_CARET] = ACTIONS(SHIFT(27)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_difference, 3)),
[aux_sym_STR_STAR] = ACTIONS(SHIFT(25)),
[aux_sym_STR_SLASH] = ACTIONS(SHIFT(26)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(27)),
},
[32] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[aux_sym_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[aux_sym_DASH] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[aux_sym_STAR] = ACTIONS(SHIFT(25)),
[aux_sym_SLASH] = ACTIONS(SHIFT(26)),
[aux_sym_CARET] = ACTIONS(SHIFT(27)),
[sym_comment] = ACTIONS(SHIFT_EXTRA()),
[aux_sym_STR_PLUS] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[aux_sym_STR_DASH] = ACTIONS(REDUCE_FRAGILE(sym_sum, 3)),
[aux_sym_STR_STAR] = ACTIONS(SHIFT(25)),
[aux_sym_STR_SLASH] = ACTIONS(SHIFT(26)),
[aux_sym_STR_CARET] = ACTIONS(SHIFT(27)),
},
};

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -7,52 +7,52 @@ enum {
sym__value = ts_builtin_sym_start,
sym_object,
sym_array,
aux_sym_object_repeat1,
aux_sym_array_repeat1,
aux_sym_LBRACE,
aux_sym_COLON,
aux_sym_COMMA,
aux_sym_RBRACE,
aux_sym_LBRACK,
aux_sym_RBRACK,
sym_string,
sym_number,
sym_null,
sym_true,
sym_false,
aux_sym_object_repeat1,
aux_sym_array_repeat1,
aux_sym_STR_LBRACE,
aux_sym_STR_COLON,
aux_sym_STR_COMMA,
aux_sym_STR_RBRACE,
aux_sym_STR_LBRACK,
aux_sym_STR_RBRACK,
};
static const char *ts_symbol_names[] = {
[sym__value] = "_value",
[sym_object] = "object",
[sym_array] = "array",
[aux_sym_object_repeat1] = "object_repeat1",
[aux_sym_array_repeat1] = "array_repeat1",
[ts_builtin_sym_error] = "ERROR",
[ts_builtin_sym_end] = "END",
[aux_sym_LBRACE] = "{",
[aux_sym_COLON] = ":",
[aux_sym_COMMA] = ",",
[aux_sym_RBRACE] = "}",
[aux_sym_LBRACK] = "[",
[aux_sym_RBRACK] = "]",
[sym_string] = "string",
[sym_number] = "number",
[sym_null] = "null",
[sym_true] = "true",
[sym_false] = "false",
[aux_sym_object_repeat1] = "object_repeat1",
[aux_sym_array_repeat1] = "array_repeat1",
[aux_sym_STR_LBRACE] = "STR_{",
[aux_sym_STR_COLON] = "STR_:",
[aux_sym_STR_COMMA] = "STR_,",
[aux_sym_STR_RBRACE] = "STR_}",
[aux_sym_STR_LBRACK] = "STR_[",
[aux_sym_STR_RBRACK] = "STR_]",
};
static const int ts_hidden_symbol_flags[SYMBOL_COUNT] = {
[sym__value] = 1,
[aux_sym_object_repeat1] = 1,
[aux_sym_array_repeat1] = 1,
[aux_sym_STR_LBRACE] = 1,
[aux_sym_STR_COLON] = 1,
[aux_sym_STR_COMMA] = 1,
[aux_sym_STR_RBRACE] = 1,
[aux_sym_STR_LBRACK] = 1,
[aux_sym_STR_RBRACK] = 1,
[aux_sym_LBRACE] = 1,
[aux_sym_COLON] = 1,
[aux_sym_COMMA] = 1,
[aux_sym_RBRACE] = 1,
[aux_sym_LBRACK] = 1,
[aux_sym_RBRACK] = 1,
};
static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
@ -127,7 +127,7 @@ static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
ADVANCE(8);
ACCEPT_TOKEN(sym_number);
case 9:
ACCEPT_TOKEN(aux_sym_STR_LBRACK);
ACCEPT_TOKEN(aux_sym_LBRACK);
case 10:
if (lookahead == 'a')
ADVANCE(11);
@ -175,7 +175,7 @@ static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
case 22:
ACCEPT_TOKEN(sym_true);
case 23:
ACCEPT_TOKEN(aux_sym_STR_LBRACE);
ACCEPT_TOKEN(aux_sym_LBRACE);
case 24:
START_TOKEN();
if (lookahead == 0)
@ -201,7 +201,7 @@ static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
ADVANCE(27);
LEX_ERROR();
case 27:
ACCEPT_TOKEN(aux_sym_STR_RBRACE);
ACCEPT_TOKEN(aux_sym_RBRACE);
case 28:
START_TOKEN();
if ((lookahead == '\t') ||
@ -227,7 +227,7 @@ static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
ADVANCE(23);
LEX_ERROR();
case 29:
ACCEPT_TOKEN(aux_sym_STR_RBRACK);
ACCEPT_TOKEN(aux_sym_RBRACK);
case 30:
START_TOKEN();
if ((lookahead == '\t') ||
@ -241,7 +241,7 @@ static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
ADVANCE(29);
LEX_ERROR();
case 31:
ACCEPT_TOKEN(aux_sym_STR_COMMA);
ACCEPT_TOKEN(aux_sym_COMMA);
case 32:
START_TOKEN();
if ((lookahead == '\t') ||
@ -275,7 +275,7 @@ static TSTree *ts_lex(TSLexer *lexer, TSStateId lex_state) {
ADVANCE(35);
LEX_ERROR();
case 35:
ACCEPT_TOKEN(aux_sym_STR_COLON);
ACCEPT_TOKEN(aux_sym_COLON);
case 36:
START_TOKEN();
if ((lookahead == '\t') ||
@ -385,8 +385,8 @@ static TSStateId ts_lex_states[STATE_COUNT] = {
[16] = 32,
[17] = 30,
[18] = 33,
[19] = 34,
[20] = 30,
[19] = 30,
[20] = 34,
[21] = 1,
[22] = 33,
[23] = 33,
@ -398,8 +398,8 @@ static TSStateId ts_lex_states[STATE_COUNT] = {
[29] = 33,
[30] = 33,
[31] = 33,
[32] = 34,
[33] = 33,
[32] = 33,
[33] = 34,
[34] = 1,
[35] = 33,
[36] = 36,
@ -425,8 +425,8 @@ static TSStateId ts_lex_states[STATE_COUNT] = {
[56] = 24,
[57] = 24,
[58] = 33,
[59] = 34,
[60] = 24,
[59] = 24,
[60] = 34,
[61] = 1,
[62] = 33,
[63] = 36,
@ -445,13 +445,13 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[sym__value] = ACTIONS(SHIFT(1)),
[sym_object] = ACTIONS(SHIFT(2)),
[sym_array] = ACTIONS(SHIFT(2)),
[aux_sym_LBRACE] = ACTIONS(SHIFT(3)),
[aux_sym_LBRACK] = ACTIONS(SHIFT(4)),
[sym_string] = ACTIONS(SHIFT(2)),
[sym_number] = ACTIONS(SHIFT(2)),
[sym_null] = ACTIONS(SHIFT(2)),
[sym_true] = ACTIONS(SHIFT(2)),
[sym_false] = ACTIONS(SHIFT(2)),
[aux_sym_STR_LBRACE] = ACTIONS(SHIFT(3)),
[aux_sym_STR_LBRACK] = ACTIONS(SHIFT(4)),
},
[1] = {
[ts_builtin_sym_end] = ACTIONS(ACCEPT_INPUT()),
@ -461,208 +461,208 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
},
[3] = {
[ts_builtin_sym_error] = ACTIONS(SHIFT(58)),
[sym_string] = ACTIONS(SHIFT(59)),
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(60)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(59)),
[sym_string] = ACTIONS(SHIFT(60)),
},
[4] = {
[sym__value] = ACTIONS(SHIFT(5)),
[sym_object] = ACTIONS(SHIFT(6)),
[sym_array] = ACTIONS(SHIFT(6)),
[ts_builtin_sym_error] = ACTIONS(SHIFT(5)),
[aux_sym_LBRACE] = ACTIONS(SHIFT(7)),
[aux_sym_LBRACK] = ACTIONS(SHIFT(8)),
[aux_sym_RBRACK] = ACTIONS(SHIFT(9)),
[sym_string] = ACTIONS(SHIFT(6)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_null] = ACTIONS(SHIFT(6)),
[sym_true] = ACTIONS(SHIFT(6)),
[sym_false] = ACTIONS(SHIFT(6)),
[aux_sym_STR_LBRACE] = ACTIONS(SHIFT(7)),
[aux_sym_STR_LBRACK] = ACTIONS(SHIFT(8)),
[aux_sym_STR_RBRACK] = ACTIONS(SHIFT(9)),
},
[5] = {
[aux_sym_array_repeat1] = ACTIONS(SHIFT(55)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(13)),
[aux_sym_STR_RBRACK] = ACTIONS(SHIFT(56)),
[aux_sym_COMMA] = ACTIONS(SHIFT(13)),
[aux_sym_RBRACK] = ACTIONS(SHIFT(56)),
},
[6] = {
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym__value, 1)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym__value, 1)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(sym__value, 1)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym__value, 1)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(sym__value, 1)),
},
[7] = {
[ts_builtin_sym_error] = ACTIONS(SHIFT(18)),
[sym_string] = ACTIONS(SHIFT(19)),
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(20)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(19)),
[sym_string] = ACTIONS(SHIFT(20)),
},
[8] = {
[sym__value] = ACTIONS(SHIFT(10)),
[sym_object] = ACTIONS(SHIFT(6)),
[sym_array] = ACTIONS(SHIFT(6)),
[ts_builtin_sym_error] = ACTIONS(SHIFT(10)),
[aux_sym_LBRACE] = ACTIONS(SHIFT(7)),
[aux_sym_LBRACK] = ACTIONS(SHIFT(8)),
[aux_sym_RBRACK] = ACTIONS(SHIFT(11)),
[sym_string] = ACTIONS(SHIFT(6)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_null] = ACTIONS(SHIFT(6)),
[sym_true] = ACTIONS(SHIFT(6)),
[sym_false] = ACTIONS(SHIFT(6)),
[aux_sym_STR_LBRACE] = ACTIONS(SHIFT(7)),
[aux_sym_STR_LBRACK] = ACTIONS(SHIFT(8)),
[aux_sym_STR_RBRACK] = ACTIONS(SHIFT(11)),
},
[9] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym_array, 2)),
},
[10] = {
[aux_sym_array_repeat1] = ACTIONS(SHIFT(12)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(13)),
[aux_sym_STR_RBRACK] = ACTIONS(SHIFT(14)),
[aux_sym_COMMA] = ACTIONS(SHIFT(13)),
[aux_sym_RBRACK] = ACTIONS(SHIFT(14)),
},
[11] = {
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym_array, 2)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_array, 2)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(sym_array, 2)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_array, 2)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(sym_array, 2)),
},
[12] = {
[aux_sym_STR_RBRACK] = ACTIONS(SHIFT(17)),
[aux_sym_RBRACK] = ACTIONS(SHIFT(17)),
},
[13] = {
[sym__value] = ACTIONS(SHIFT(15)),
[sym_object] = ACTIONS(SHIFT(6)),
[sym_array] = ACTIONS(SHIFT(6)),
[ts_builtin_sym_error] = ACTIONS(SHIFT(15)),
[aux_sym_LBRACE] = ACTIONS(SHIFT(7)),
[aux_sym_LBRACK] = ACTIONS(SHIFT(8)),
[sym_string] = ACTIONS(SHIFT(6)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_null] = ACTIONS(SHIFT(6)),
[sym_true] = ACTIONS(SHIFT(6)),
[sym_false] = ACTIONS(SHIFT(6)),
[aux_sym_STR_LBRACE] = ACTIONS(SHIFT(7)),
[aux_sym_STR_LBRACK] = ACTIONS(SHIFT(8)),
},
[14] = {
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym_array, 3)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_array, 3)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(sym_array, 3)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_array, 3)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(sym_array, 3)),
},
[15] = {
[aux_sym_array_repeat1] = ACTIONS(SHIFT(16)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(13)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(aux_sym_array_repeat1, 2)),
[aux_sym_COMMA] = ACTIONS(SHIFT(13)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(aux_sym_array_repeat1, 2)),
},
[16] = {
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(aux_sym_array_repeat1, 3)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(aux_sym_array_repeat1, 3)),
},
[17] = {
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym_array, 4)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_array, 4)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(sym_array, 4)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_array, 4)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(sym_array, 4)),
},
[18] = {
[aux_sym_object_repeat1] = ACTIONS(SHIFT(52)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(53)),
[aux_sym_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(53)),
},
[19] = {
[aux_sym_STR_COLON] = ACTIONS(SHIFT(21)),
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(sym_object, 2)),
},
[20] = {
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_COLON] = ACTIONS(SHIFT(21)),
},
[21] = {
[sym__value] = ACTIONS(SHIFT(22)),
[sym_object] = ACTIONS(SHIFT(23)),
[sym_array] = ACTIONS(SHIFT(23)),
[aux_sym_LBRACE] = ACTIONS(SHIFT(24)),
[aux_sym_LBRACK] = ACTIONS(SHIFT(25)),
[sym_string] = ACTIONS(SHIFT(23)),
[sym_number] = ACTIONS(SHIFT(23)),
[sym_null] = ACTIONS(SHIFT(23)),
[sym_true] = ACTIONS(SHIFT(23)),
[sym_false] = ACTIONS(SHIFT(23)),
[aux_sym_STR_LBRACE] = ACTIONS(SHIFT(24)),
[aux_sym_STR_LBRACK] = ACTIONS(SHIFT(25)),
},
[22] = {
[aux_sym_object_repeat1] = ACTIONS(SHIFT(49)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(50)),
[aux_sym_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(50)),
},
[23] = {
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym__value, 1)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym__value, 1)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(sym__value, 1)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym__value, 1)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(sym__value, 1)),
},
[24] = {
[ts_builtin_sym_error] = ACTIONS(SHIFT(31)),
[sym_string] = ACTIONS(SHIFT(32)),
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(33)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(32)),
[sym_string] = ACTIONS(SHIFT(33)),
},
[25] = {
[sym__value] = ACTIONS(SHIFT(26)),
[sym_object] = ACTIONS(SHIFT(6)),
[sym_array] = ACTIONS(SHIFT(6)),
[ts_builtin_sym_error] = ACTIONS(SHIFT(26)),
[aux_sym_LBRACE] = ACTIONS(SHIFT(7)),
[aux_sym_LBRACK] = ACTIONS(SHIFT(8)),
[aux_sym_RBRACK] = ACTIONS(SHIFT(27)),
[sym_string] = ACTIONS(SHIFT(6)),
[sym_number] = ACTIONS(SHIFT(6)),
[sym_null] = ACTIONS(SHIFT(6)),
[sym_true] = ACTIONS(SHIFT(6)),
[sym_false] = ACTIONS(SHIFT(6)),
[aux_sym_STR_LBRACE] = ACTIONS(SHIFT(7)),
[aux_sym_STR_LBRACK] = ACTIONS(SHIFT(8)),
[aux_sym_STR_RBRACK] = ACTIONS(SHIFT(27)),
},
[26] = {
[aux_sym_array_repeat1] = ACTIONS(SHIFT(28)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(13)),
[aux_sym_STR_RBRACK] = ACTIONS(SHIFT(29)),
[aux_sym_COMMA] = ACTIONS(SHIFT(13)),
[aux_sym_RBRACK] = ACTIONS(SHIFT(29)),
},
[27] = {
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym_array, 2)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_array, 2)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(sym_array, 2)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_array, 2)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(sym_array, 2)),
},
[28] = {
[aux_sym_STR_RBRACK] = ACTIONS(SHIFT(30)),
[aux_sym_RBRACK] = ACTIONS(SHIFT(30)),
},
[29] = {
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym_array, 3)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_array, 3)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(sym_array, 3)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_array, 3)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(sym_array, 3)),
},
[30] = {
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym_array, 4)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_array, 4)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(sym_array, 4)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_array, 4)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(sym_array, 4)),
},
[31] = {
[aux_sym_object_repeat1] = ACTIONS(SHIFT(46)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(47)),
[aux_sym_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(47)),
},
[32] = {
[aux_sym_STR_COLON] = ACTIONS(SHIFT(34)),
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(sym_object, 2)),
},
[33] = {
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_COLON] = ACTIONS(SHIFT(34)),
},
[34] = {
[sym__value] = ACTIONS(SHIFT(35)),
[sym_object] = ACTIONS(SHIFT(23)),
[sym_array] = ACTIONS(SHIFT(23)),
[aux_sym_LBRACE] = ACTIONS(SHIFT(24)),
[aux_sym_LBRACK] = ACTIONS(SHIFT(25)),
[sym_string] = ACTIONS(SHIFT(23)),
[sym_number] = ACTIONS(SHIFT(23)),
[sym_null] = ACTIONS(SHIFT(23)),
[sym_true] = ACTIONS(SHIFT(23)),
[sym_false] = ACTIONS(SHIFT(23)),
[aux_sym_STR_LBRACE] = ACTIONS(SHIFT(24)),
[aux_sym_STR_LBRACK] = ACTIONS(SHIFT(25)),
},
[35] = {
[aux_sym_object_repeat1] = ACTIONS(SHIFT(36)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(38)),
[aux_sym_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(38)),
},
[36] = {
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(45)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(45)),
},
[37] = {
[ts_builtin_sym_error] = ACTIONS(SHIFT(39)),
@ -670,86 +670,86 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
},
[38] = {
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym_object, 5)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 5)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(sym_object, 5)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 5)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(sym_object, 5)),
},
[39] = {
[aux_sym_object_repeat1] = ACTIONS(SHIFT(44)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(aux_sym_object_repeat1, 2)),
[aux_sym_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(aux_sym_object_repeat1, 2)),
},
[40] = {
[aux_sym_STR_COLON] = ACTIONS(SHIFT(41)),
[aux_sym_COLON] = ACTIONS(SHIFT(41)),
},
[41] = {
[sym__value] = ACTIONS(SHIFT(42)),
[sym_object] = ACTIONS(SHIFT(23)),
[sym_array] = ACTIONS(SHIFT(23)),
[aux_sym_LBRACE] = ACTIONS(SHIFT(24)),
[aux_sym_LBRACK] = ACTIONS(SHIFT(25)),
[sym_string] = ACTIONS(SHIFT(23)),
[sym_number] = ACTIONS(SHIFT(23)),
[sym_null] = ACTIONS(SHIFT(23)),
[sym_true] = ACTIONS(SHIFT(23)),
[sym_false] = ACTIONS(SHIFT(23)),
[aux_sym_STR_LBRACE] = ACTIONS(SHIFT(24)),
[aux_sym_STR_LBRACK] = ACTIONS(SHIFT(25)),
},
[42] = {
[aux_sym_object_repeat1] = ACTIONS(SHIFT(43)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(aux_sym_object_repeat1, 4)),
[aux_sym_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(aux_sym_object_repeat1, 4)),
},
[43] = {
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(aux_sym_object_repeat1, 5)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(aux_sym_object_repeat1, 5)),
},
[44] = {
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(aux_sym_object_repeat1, 3)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(aux_sym_object_repeat1, 3)),
},
[45] = {
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym_object, 6)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 6)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(sym_object, 6)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 6)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(sym_object, 6)),
},
[46] = {
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(48)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(48)),
},
[47] = {
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym_object, 3)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 3)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(sym_object, 3)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 3)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(sym_object, 3)),
},
[48] = {
[aux_sym_object_repeat1] = ACTIONS(REDUCE(sym_object, 4)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 4)),
[aux_sym_STR_RBRACE] = ACTIONS(REDUCE(sym_object, 4)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 4)),
[aux_sym_RBRACE] = ACTIONS(REDUCE(sym_object, 4)),
},
[49] = {
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(51)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(51)),
},
[50] = {
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym_object, 5)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 5)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(sym_object, 5)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 5)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(sym_object, 5)),
},
[51] = {
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym_object, 6)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 6)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(sym_object, 6)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 6)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(sym_object, 6)),
},
[52] = {
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(54)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(54)),
},
[53] = {
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym_object, 3)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 3)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(sym_object, 3)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 3)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(sym_object, 3)),
},
[54] = {
[aux_sym_array_repeat1] = ACTIONS(REDUCE(sym_object, 4)),
[aux_sym_STR_COMMA] = ACTIONS(REDUCE(sym_object, 4)),
[aux_sym_STR_RBRACK] = ACTIONS(REDUCE(sym_object, 4)),
[aux_sym_COMMA] = ACTIONS(REDUCE(sym_object, 4)),
[aux_sym_RBRACK] = ACTIONS(REDUCE(sym_object, 4)),
},
[55] = {
[aux_sym_STR_RBRACK] = ACTIONS(SHIFT(57)),
[aux_sym_RBRACK] = ACTIONS(SHIFT(57)),
},
[56] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym_array, 3)),
@ -759,34 +759,34 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
},
[58] = {
[aux_sym_object_repeat1] = ACTIONS(SHIFT(66)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(67)),
[aux_sym_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(67)),
},
[59] = {
[aux_sym_STR_COLON] = ACTIONS(SHIFT(61)),
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym_object, 2)),
},
[60] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym_object, 2)),
[aux_sym_COLON] = ACTIONS(SHIFT(61)),
},
[61] = {
[sym__value] = ACTIONS(SHIFT(62)),
[sym_object] = ACTIONS(SHIFT(23)),
[sym_array] = ACTIONS(SHIFT(23)),
[aux_sym_LBRACE] = ACTIONS(SHIFT(24)),
[aux_sym_LBRACK] = ACTIONS(SHIFT(25)),
[sym_string] = ACTIONS(SHIFT(23)),
[sym_number] = ACTIONS(SHIFT(23)),
[sym_null] = ACTIONS(SHIFT(23)),
[sym_true] = ACTIONS(SHIFT(23)),
[sym_false] = ACTIONS(SHIFT(23)),
[aux_sym_STR_LBRACE] = ACTIONS(SHIFT(24)),
[aux_sym_STR_LBRACK] = ACTIONS(SHIFT(25)),
},
[62] = {
[aux_sym_object_repeat1] = ACTIONS(SHIFT(63)),
[aux_sym_STR_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(64)),
[aux_sym_COMMA] = ACTIONS(SHIFT(37)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(64)),
},
[63] = {
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(65)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(65)),
},
[64] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym_object, 5)),
@ -795,7 +795,7 @@ static const TSParseAction *ts_parse_actions[STATE_COUNT][SYMBOL_COUNT] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym_object, 6)),
},
[66] = {
[aux_sym_STR_RBRACE] = ACTIONS(SHIFT(68)),
[aux_sym_RBRACE] = ACTIONS(SHIFT(68)),
},
[67] = {
[ts_builtin_sym_end] = ACTIONS(REDUCE(sym_object, 3)),