Add token helper for building token rules

Now you can specify the structure of tokens using
all of the rule functions, not just `str` and `pattern`
This commit is contained in:
Max Brunsfeld 2014-05-01 12:43:29 -07:00
parent d685edf015
commit 6d40dcf881
8 changed files with 59 additions and 12 deletions

View file

@ -34,14 +34,14 @@ describe("syntactic item set transitions", [&]() {
{ "A", blank() },
{ "B", i_token(21) },
}, {});
it("computes the closure of the new item sets", [&]() {
ParseItemSet set1({
ParseItem(Symbol(0), seq({ i_token(22), i_sym(1) }), 3, Symbol(23, SymbolOptionToken)),
});
SymTransitions sym_transitions;
AssertThat(sym_transitions(set1, grammar), Equals(map<Symbol, ParseItemSet>({
{ Symbol(22, SymbolOptionToken), ParseItemSet({
ParseItem(Symbol(0), i_sym(1), 4, Symbol(23, SymbolOptionToken)),

View file

@ -36,6 +36,22 @@ describe("extracting tokens from a grammar", []() {
})));
});
it("moves other rules marked as tokens into the lexical grammar", [&]() {
pair<PreparedGrammar, PreparedGrammar> result = extract_tokens(PreparedGrammar({
{ "rule0", seq({
token(choice({ str("a"), str("b") })),
i_sym(0) }) }
}, {}));
AssertThat(result.first, Equals(PreparedGrammar({
{ "rule0", seq({ i_aux_token(0), i_sym(0) }) }
}, {})));
AssertThat(result.second, Equals(PreparedGrammar({}, {
{ "token0", token(choice({ str("a"), str("b") })) },
})));
});
it("does not extract blanks into tokens", [&]() {
pair<PreparedGrammar, PreparedGrammar> result = extract_tokens(PreparedGrammar({
{ "rule1", choice({ i_sym(0), blank() }) },
@ -66,6 +82,7 @@ describe("extracting tokens from a grammar", []() {
auto result = extract_tokens(PreparedGrammar({
{ "rule0", i_sym(1) },
{ "rule1", pattern("a|b") },
{ "rule2", token(seq({ str("a"), str("b") })) },
}, {}));
AssertThat(result.first, Equals(PreparedGrammar({
@ -74,6 +91,7 @@ describe("extracting tokens from a grammar", []() {
AssertThat(result.second, Equals(PreparedGrammar({
{ "rule1", pattern("a|b") },
{ "rule2", token(seq({ str("a"), str("b") })) },
}, {})));
});