Trim whitespace

This commit is contained in:
Max Brunsfeld 2014-04-25 22:17:23 -07:00
parent 801f4bd0a8
commit 93df5579b4
15 changed files with 78 additions and 78 deletions

View file

@ -40,14 +40,14 @@ describe("computing FIRST sets", []() {
i_token(0),
i_token(1) }),
i_sym(0) });
Grammar grammar({
{ "rule0", seq({
i_token(2),
i_token(3),
i_token(4) }) }
});
AssertThat(first_set(rule, grammar), Equals(set<ISymbol>({
ISymbol(0, SymbolOptionToken),
ISymbol(2, SymbolOptionToken),
@ -58,7 +58,7 @@ describe("computing FIRST sets", []() {
auto rule = seq({
i_sym(0),
i_token(1) });
Grammar grammar({
{ "rule0", choice({
i_token(0),
@ -80,7 +80,7 @@ describe("computing FIRST sets", []() {
i_token(11),
}) },
});
auto rule = i_sym(0);
AssertThat(first_set(rule, grammar), Equals(set<ISymbol>({

View file

@ -17,19 +17,19 @@ namespace tree_sitter {
else
return CharacterSet(ranges).complement().copy();
}
rule_ptr i_sym(size_t index) {
return make_shared<rules::ISymbol>(index);
}
rule_ptr i_aux_sym(size_t index) {
return make_shared<rules::ISymbol>(index, SymbolOptionAuxiliary);
}
rule_ptr i_token(size_t index) {
return make_shared<rules::ISymbol>(index, SymbolOptionToken);
}
rule_ptr i_aux_token(size_t index) {
return make_shared<rules::ISymbol>(index, SymbolOption(SymbolOptionAuxiliary|SymbolOptionToken));
}

View file

@ -13,7 +13,7 @@ describe("expanding repeat rules in a grammar", []() {
PreparedGrammar grammar({
{ "rule0", repeat(i_token(0)) },
}, {});
AssertThat(expand_repeats(grammar), Equals(PreparedGrammar({
{ "rule0", i_aux_sym(0) },
}, {
@ -24,12 +24,12 @@ describe("expanding repeat rules in a grammar", []() {
blank() }) },
})));
});
it("replaces repeats inside of sequences", [&]() {
PreparedGrammar grammar({
{ "rule0", seq({ i_token(10), repeat(i_token(11)) }) },
}, {});
AssertThat(expand_repeats(grammar), Equals(PreparedGrammar({
{ "rule0", seq({ i_token(10), i_aux_sym(0) }) },
}, {
@ -38,12 +38,12 @@ describe("expanding repeat rules in a grammar", []() {
blank() }) },
})));
});
it("replaces repeats inside of choices", [&]() {
PreparedGrammar grammar({
{ "rule0", choice({ i_token(10), repeat(i_token(11)) }) },
}, {});
AssertThat(expand_repeats(grammar), Equals(PreparedGrammar({
{ "rule0", choice({ i_token(10), i_aux_sym(0) }) },
}, {
@ -52,12 +52,12 @@ describe("expanding repeat rules in a grammar", []() {
blank() }) },
})));
});
it("can replace multiple repeats in the same rule", [&]() {
PreparedGrammar grammar({
{ "rule0", seq({ repeat(i_token(10)), repeat(i_token(11)) }) },
}, {});
AssertThat(expand_repeats(grammar), Equals(PreparedGrammar({
{ "rule0", seq({ i_aux_sym(0), i_aux_sym(1) }) },
}, {
@ -73,13 +73,13 @@ describe("expanding repeat rules in a grammar", []() {
blank() }) },
})));
});
it("can replace repeats in multiple rules", [&]() {
PreparedGrammar grammar({
{ "rule0", repeat(i_token(10)) },
{ "rule1", repeat(i_token(11)) },
}, {});
AssertThat(expand_repeats(grammar), Equals(PreparedGrammar({
{ "rule0", i_aux_sym(0) },
{ "rule1", i_aux_sym(1) },

View file

@ -13,39 +13,39 @@ describe("extracting tokens from a grammar", []() {
pair<PreparedGrammar, PreparedGrammar> result = extract_tokens(PreparedGrammar({
{ "rule0", seq({ str("ab"), i_sym(0) }) }
}, {}));
AssertThat(result.first, Equals(PreparedGrammar({
{ "rule0", seq({ i_aux_token(0), i_sym(0) }) }
}, {})));
AssertThat(result.second, Equals(PreparedGrammar({}, {
{ "token0", str("ab") },
})));
});
it("moves patterns into the lexical grammar", [&]() {
pair<PreparedGrammar, PreparedGrammar> result = extract_tokens(PreparedGrammar({
{ "rule0", seq({ pattern("a+"), i_sym(0) }) }
}, {}));
AssertThat(result.first, Equals(PreparedGrammar({
{ "rule0", seq({ i_aux_token(0), i_sym(0) }) }
}, {})));
AssertThat(result.second, Equals(PreparedGrammar({}, {
{ "token0", pattern("a+") },
})));
});
it("does not extract blanks into tokens", [&]() {
pair<PreparedGrammar, PreparedGrammar> result = extract_tokens(Grammar({
{ "rule1", choice({ i_sym(0), blank() }) },
}));
AssertThat(result.first, Equals(PreparedGrammar({
{ "rule1", choice({ i_sym(0), blank() }) },
}, {})));
AssertThat(result.second, Equals(PreparedGrammar({}, {})));
});
@ -53,43 +53,43 @@ describe("extracting tokens from a grammar", []() {
pair<PreparedGrammar, PreparedGrammar> result = extract_tokens(PreparedGrammar({
{ "rule0", seq({ str("ab"), i_sym(0), str("ab") }) },
}, {}));
AssertThat(result.first, Equals(PreparedGrammar({
{ "rule0", seq({ i_aux_token(0), i_sym(0), i_aux_token(0) }) }
}, {})));
AssertThat(result.second, Equals(PreparedGrammar({}, {
{ "token0", str("ab") },
})));
});
it("moves entire rules into the lexical grammar when possible, updating referencing symbols", [&]() {
auto result = extract_tokens(PreparedGrammar({
{ "rule0", i_sym(1) },
{ "rule1", pattern("a|b") },
}, {}));
AssertThat(result.first, Equals(PreparedGrammar({
{ "rule0", i_token(0) }
}, {})));
AssertThat(result.second, Equals(PreparedGrammar({
{ "rule1", pattern("a|b") },
}, {})));
});
it("updates symbols whose indices need to change due to deleted rules", [&]() {
auto result = extract_tokens(PreparedGrammar({
{ "rule0", str("ab") },
{ "rule1", i_sym(0) },
{ "rule2", i_sym(1) },
}, {}));
AssertThat(result.first, Equals(PreparedGrammar({
{ "rule1", i_token(0) },
{ "rule2", i_sym(0) },
}, {})));
AssertThat(result.second, Equals(PreparedGrammar({
{ "rule0", str("ab") },
}, {})));

View file

@ -18,7 +18,7 @@ describe("interning symbols in a grammar", []() {
});
auto result = intern_symbols(grammar);
AssertThat((bool)result.second, IsFalse());
AssertThat(result.first, Equals(PreparedGrammar({
{ "x", choice({ i_sym(1), i_sym(2) }) },
@ -26,15 +26,15 @@ describe("interning symbols in a grammar", []() {
{ "z", str("stuff") },
}, {})));
});
describe("when there are symbols that reference undefined rules", [&]() {
it("returns an error", []() {
Grammar grammar({
{ "x", sym("y") },
});
auto result = intern_symbols(grammar);
AssertThat(result.second->message(), Equals("Undefined rule 'y'"));
});
});

View file

@ -17,7 +17,7 @@ describe("parsing regex pattern rules", []() {
character({ 'c' })
})));
});
it("parses wildcard '.' characters", [&]() {
Pattern rule(".");
AssertThat(
@ -121,7 +121,7 @@ describe("parsing regex pattern rules", []() {
character({ '(' }),
character({ 'b' })
})));
Pattern rule2("a\\.");
AssertThat(
rule2.to_rule_tree(),