Silence missing intializer warnings in compiler unit tests
This commit is contained in:
parent
6073d9c0e8
commit
a09409900f
4 changed files with 19 additions and 19 deletions
|
|
@ -46,7 +46,7 @@ describe("ParseItemSetBuilder", []() {
|
|||
{Symbol(15, Symbol::Terminal), 0, AssociativityNone},
|
||||
})
|
||||
}),
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto production = [&](int variable_index, int production_index) -> const Production & {
|
||||
return grammar.variables[variable_index].productions[production_index];
|
||||
|
|
@ -97,7 +97,7 @@ describe("ParseItemSetBuilder", []() {
|
|||
}),
|
||||
Production({})
|
||||
}),
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto production = [&](int variable_index, int production_index) -> const Production & {
|
||||
return grammar.variables[variable_index].productions[production_index];
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ describe("expand_repeats", []() {
|
|||
it("replaces repeat rules with pairs of recursive rules", [&]() {
|
||||
InitialSyntaxGrammar grammar{{
|
||||
Variable("rule0", VariableTypeNamed, repeat1(i_token(0))),
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto result = expand_repeats(grammar);
|
||||
|
||||
|
|
@ -32,7 +32,7 @@ describe("expand_repeats", []() {
|
|||
i_token(10),
|
||||
repeat1(i_token(11)),
|
||||
})),
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto result = expand_repeats(grammar);
|
||||
|
||||
|
|
@ -54,7 +54,7 @@ describe("expand_repeats", []() {
|
|||
i_token(10),
|
||||
repeat1(i_token(11))
|
||||
})),
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto result = expand_repeats(grammar);
|
||||
|
||||
|
|
@ -80,7 +80,7 @@ describe("expand_repeats", []() {
|
|||
i_token(3),
|
||||
repeat1(i_token(4))
|
||||
})),
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto result = expand_repeats(grammar);
|
||||
|
||||
|
|
@ -106,7 +106,7 @@ describe("expand_repeats", []() {
|
|||
repeat1(i_token(10)),
|
||||
repeat1(i_token(11)),
|
||||
})),
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto result = expand_repeats(grammar);
|
||||
|
||||
|
|
@ -130,7 +130,7 @@ describe("expand_repeats", []() {
|
|||
InitialSyntaxGrammar grammar{{
|
||||
Variable("rule0", VariableTypeNamed, repeat1(i_token(10))),
|
||||
Variable("rule1", VariableTypeNamed, repeat1(i_token(11))),
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto result = expand_repeats(grammar);
|
||||
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@ describe("extract_tokens", []() {
|
|||
Variable("rule_B", VariableTypeNamed, pattern("ij+")),
|
||||
Variable("rule_C", VariableTypeNamed, choice({ str("kl"), blank() })),
|
||||
Variable("rule_D", VariableTypeNamed, repeat1(i_sym(3)))
|
||||
}, {}, {}});
|
||||
}, {}, {}, {}});
|
||||
|
||||
InitialSyntaxGrammar &syntax_grammar = get<0>(result);
|
||||
LexicalGrammar &lexical_grammar = get<1>(result);
|
||||
|
|
@ -92,7 +92,7 @@ describe("extract_tokens", []() {
|
|||
i_sym(0),
|
||||
str("ab"),
|
||||
})),
|
||||
}, {}, {}});
|
||||
}, {}, {}, {}});
|
||||
|
||||
InitialSyntaxGrammar &syntax_grammar = get<0>(result);
|
||||
LexicalGrammar &lexical_grammar = get<1>(result);
|
||||
|
|
@ -111,7 +111,7 @@ describe("extract_tokens", []() {
|
|||
Variable("rule_A", VariableTypeNamed, seq({ i_sym(1), str("ab") })),
|
||||
Variable("rule_B", VariableTypeNamed, str("cd")),
|
||||
Variable("rule_C", VariableTypeNamed, seq({ str("ef"), str("cd") })),
|
||||
}, {}, {}});
|
||||
}, {}, {}, {}});
|
||||
|
||||
InitialSyntaxGrammar &syntax_grammar = get<0>(result);
|
||||
LexicalGrammar &lexical_grammar = get<1>(result);
|
||||
|
|
@ -151,7 +151,7 @@ describe("extract_tokens", []() {
|
|||
}, {
|
||||
str("y"),
|
||||
pattern("\\s+"),
|
||||
}, {}});
|
||||
}, {}, {}});
|
||||
|
||||
AssertThat(get<2>(result), Equals(CompileError::none()));
|
||||
|
||||
|
|
@ -168,7 +168,7 @@ describe("extract_tokens", []() {
|
|||
Variable("rule_B", VariableTypeNamed, str("y")),
|
||||
}, {
|
||||
str("y"),
|
||||
}, {}});
|
||||
}, {}, {}});
|
||||
|
||||
AssertThat(get<2>(result), Equals(CompileError::none()));
|
||||
AssertThat(get<1>(result).separators.size(), Equals<size_t>(0));
|
||||
|
|
@ -182,7 +182,7 @@ describe("extract_tokens", []() {
|
|||
Variable("rule_C", VariableTypeNamed, str("z")),
|
||||
}, {
|
||||
i_sym(2),
|
||||
}, {}});
|
||||
}, {}, {}});
|
||||
|
||||
AssertThat(get<2>(result), Equals(CompileError::none()));
|
||||
|
||||
|
|
@ -197,7 +197,7 @@ describe("extract_tokens", []() {
|
|||
auto result = extract_tokens(InternedGrammar{{
|
||||
Variable("rule_A", VariableTypeNamed, seq({ str("x"), i_sym(1) })),
|
||||
Variable("rule_B", VariableTypeNamed, seq({ str("y"), str("z") })),
|
||||
}, { i_sym(1) }, {}});
|
||||
}, { i_sym(1) }, {}, {}});
|
||||
|
||||
AssertThat(get<2>(result), !Equals(CompileError::none()));
|
||||
AssertThat(get<2>(result), Equals(
|
||||
|
|
@ -209,7 +209,7 @@ describe("extract_tokens", []() {
|
|||
auto result = extract_tokens(InternedGrammar{{
|
||||
Variable("rule_A", VariableTypeNamed, str("x")),
|
||||
Variable("rule_B", VariableTypeNamed, str("y")),
|
||||
}, { choice({ i_sym(1), blank() }) }, {}});
|
||||
}, { choice({ i_sym(1), blank() }) }, {}, {}});
|
||||
|
||||
AssertThat(get<2>(result), !Equals(CompileError::none()));
|
||||
AssertThat(get<2>(result), Equals(CompileError(
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ describe("intern_symbols", []() {
|
|||
{ "x", choice({ sym("y"), sym("_z") }) },
|
||||
{ "y", sym("_z") },
|
||||
{ "_z", str("stuff") }
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto result = intern_symbols(grammar);
|
||||
|
||||
|
|
@ -33,7 +33,7 @@ describe("intern_symbols", []() {
|
|||
it("returns an error", []() {
|
||||
Grammar grammar{{
|
||||
{ "x", sym("y") },
|
||||
}, {}, {}};
|
||||
}, {}, {}, {}};
|
||||
|
||||
auto result = intern_symbols(grammar);
|
||||
|
||||
|
|
@ -48,7 +48,7 @@ describe("intern_symbols", []() {
|
|||
{ "z", str("stuff") }
|
||||
}, {
|
||||
sym("z")
|
||||
}, {}};
|
||||
}, {}, {}};
|
||||
|
||||
auto result = intern_symbols(grammar);
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue