Rename extra_tokens -> extra_symbols

This commit is contained in:
Max Brunsfeld 2019-10-21 17:26:01 -07:00
parent fcaabea0cf
commit 0cceca7b4e
11 changed files with 33 additions and 33 deletions

View file

@ -76,7 +76,7 @@ impl<'a> ParseTableBuilder<'a> {
let mut non_terminal_extra_item_sets_by_first_terminal = BTreeMap::new();
for extra_non_terminal in self
.syntax_grammar
.extra_tokens
.extra_symbols
.iter()
.filter(|s| s.is_non_terminal())
{
@ -336,7 +336,7 @@ impl<'a> ParseTableBuilder<'a> {
// are added to every state except for those at the ends of non-terminal
// extras.
if !is_end_of_non_terminal_extra {
for extra_token in &self.syntax_grammar.extra_tokens {
for extra_token in &self.syntax_grammar.extra_symbols {
if extra_token.is_non_terminal() {
state
.nonterminal_entries
@ -843,7 +843,7 @@ fn populate_following_tokens(
}
}
}
for extra in &grammar.extra_tokens {
for extra in &grammar.extra_symbols {
if extra.is_terminal() {
for entry in result.iter_mut() {
entry.insert(*extra);

View file

@ -23,7 +23,7 @@ pub(crate) struct Variable {
pub(crate) struct InputGrammar {
pub name: String,
pub variables: Vec<Variable>,
pub extra_tokens: Vec<Rule>,
pub extra_symbols: Vec<Rule>,
pub expected_conflicts: Vec<Vec<String>>,
pub external_tokens: Vec<Rule>,
pub variables_to_inline: Vec<String>,
@ -87,7 +87,7 @@ pub(crate) struct ExternalToken {
#[derive(Debug, Default)]
pub(crate) struct SyntaxGrammar {
pub variables: Vec<SyntaxVariable>,
pub extra_tokens: Vec<Symbol>,
pub extra_symbols: Vec<Symbol>,
pub expected_conflicts: Vec<Vec<Symbol>>,
pub external_tokens: Vec<ExternalToken>,
pub supertype_symbols: Vec<Symbol>,

View file

@ -689,7 +689,7 @@ mod tests {
fn test_node_types_simple() {
let node_types = get_node_types(InputGrammar {
name: String::new(),
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: Vec::new(),
@ -775,7 +775,7 @@ mod tests {
fn test_node_types_with_supertypes() {
let node_types = get_node_types(InputGrammar {
name: String::new(),
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: Vec::new(),
@ -862,7 +862,7 @@ mod tests {
fn test_node_types_for_children_without_fields() {
let node_types = get_node_types(InputGrammar {
name: String::new(),
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: Vec::new(),
@ -960,7 +960,7 @@ mod tests {
fn test_node_types_for_aliased_nodes() {
let node_types = get_node_types(InputGrammar {
name: String::new(),
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: Vec::new(),
@ -1036,7 +1036,7 @@ mod tests {
fn test_node_types_with_multiple_valued_fields() {
let node_types = get_node_types(InputGrammar {
name: String::new(),
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: Vec::new(),

View file

@ -87,7 +87,7 @@ pub(crate) fn parse_grammar(input: &str) -> Result<InputGrammar> {
})
}
let extra_tokens = grammar_json
let extra_symbols = grammar_json
.extras
.unwrap_or(Vec::new())
.into_iter()
@ -107,7 +107,7 @@ pub(crate) fn parse_grammar(input: &str) -> Result<InputGrammar> {
name: grammar_json.name,
word_token: grammar_json.word,
variables,
extra_tokens,
extra_symbols,
expected_conflicts,
external_tokens,
supertype_symbols,

View file

@ -283,7 +283,7 @@ mod tests {
fn build_grammar(variables: Vec<Variable>) -> ExtractedSyntaxGrammar {
ExtractedSyntaxGrammar {
variables,
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: Vec::new(),

View file

@ -146,7 +146,7 @@ mod tests {
}],
},
],
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: Vec::new(),
supertype_symbols: Vec::new(),

View file

@ -90,13 +90,13 @@ pub(super) fn extract_tokens(
.collect();
let mut separators = Vec::new();
let mut extra_tokens = Vec::new();
for rule in grammar.extra_tokens {
let mut extra_symbols = Vec::new();
for rule in grammar.extra_symbols {
if let Rule::Symbol(symbol) = rule {
extra_tokens.push(symbol_replacer.replace_symbol(symbol));
extra_symbols.push(symbol_replacer.replace_symbol(symbol));
} else {
if let Some(index) = lexical_variables.iter().position(|v| v.rule == rule) {
extra_tokens.push(Symbol::terminal(index));
extra_symbols.push(Symbol::terminal(index));
} else {
separators.push(rule);
}
@ -150,7 +150,7 @@ pub(super) fn extract_tokens(
ExtractedSyntaxGrammar {
variables,
expected_conflicts,
extra_tokens,
extra_symbols,
variables_to_inline,
supertype_symbols,
external_tokens,
@ -407,15 +407,15 @@ mod test {
}
#[test]
fn test_extracting_extra_tokens() {
fn test_extracting_extra_symbols() {
let mut grammar = build_grammar(vec![
Variable::named("rule_0", Rule::string("x")),
Variable::named("comment", Rule::pattern("//.*")),
]);
grammar.extra_tokens = vec![Rule::string(" "), Rule::non_terminal(1)];
grammar.extra_symbols = vec![Rule::string(" "), Rule::non_terminal(1)];
let (syntax_grammar, lexical_grammar) = extract_tokens(grammar).unwrap();
assert_eq!(syntax_grammar.extra_tokens, vec![Symbol::terminal(1),]);
assert_eq!(syntax_grammar.extra_symbols, vec![Symbol::terminal(1),]);
assert_eq!(lexical_grammar.separators, vec![Rule::string(" "),]);
}
@ -492,7 +492,7 @@ mod test {
fn build_grammar(variables: Vec<Variable>) -> InternedGrammar {
InternedGrammar {
variables,
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: Vec::new(),

View file

@ -199,7 +199,7 @@ unless they are used only as the grammar's start rule.
}
}
Ok(SyntaxGrammar {
extra_tokens: grammar.extra_tokens,
extra_symbols: grammar.extra_symbols,
expected_conflicts: grammar.expected_conflicts,
variables_to_inline: grammar.variables_to_inline,
external_tokens: grammar.external_tokens,

View file

@ -30,9 +30,9 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
external_tokens.push(Variable { name, kind, rule });
}
let mut extra_tokens = Vec::with_capacity(grammar.extra_tokens.len());
for extra_token in grammar.extra_tokens.iter() {
extra_tokens.push(interner.intern_rule(extra_token)?);
let mut extra_symbols = Vec::with_capacity(grammar.extra_symbols.len());
for extra_token in grammar.extra_symbols.iter() {
extra_symbols.push(interner.intern_rule(extra_token)?);
}
let mut supertype_symbols = Vec::with_capacity(grammar.supertype_symbols.len());
@ -76,7 +76,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
Ok(InternedGrammar {
variables,
external_tokens,
extra_tokens,
extra_symbols,
expected_conflicts,
variables_to_inline,
supertype_symbols,
@ -236,7 +236,7 @@ mod tests {
InputGrammar {
variables,
name: "the_language".to_string(),
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: Vec::new(),

View file

@ -21,7 +21,7 @@ use crate::generate::rules::{AliasMap, Rule, Symbol};
pub(crate) struct IntermediateGrammar<T, U> {
variables: Vec<Variable>,
extra_tokens: Vec<T>,
extra_symbols: Vec<T>,
expected_conflicts: Vec<Vec<Symbol>>,
external_tokens: Vec<U>,
variables_to_inline: Vec<Symbol>,

View file

@ -196,7 +196,7 @@ mod tests {
fn test_basic_inlining() {
let grammar = SyntaxGrammar {
expected_conflicts: Vec::new(),
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
supertype_symbols: Vec::new(),
word_token: None,
@ -327,7 +327,7 @@ mod tests {
Symbol::non_terminal(3),
],
expected_conflicts: Vec::new(),
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
supertype_symbols: Vec::new(),
word_token: None,
@ -429,7 +429,7 @@ mod tests {
},
],
expected_conflicts: Vec::new(),
extra_tokens: Vec::new(),
extra_symbols: Vec::new(),
external_tokens: Vec::new(),
supertype_symbols: Vec::new(),
word_token: None,