chore: clippy

This commit is contained in:
dundargoc 2024-02-06 23:18:27 +01:00 committed by Amaan Qureshi
parent a1870b6013
commit c8bd6705cf
No known key found for this signature in database
GPG key ID: E67890ADC4227273
36 changed files with 467 additions and 462 deletions

View file

@ -436,14 +436,14 @@ mod tests {
let token_map = TokenConflictMap::new(
&grammar,
vec![
[Symbol::terminal(var("identifier"))]
.iter()
.cloned()
std::iter::once(&Symbol::terminal(var("identifier")))
.copied()
.collect(),
[Symbol::terminal(var("in"))].iter().cloned().collect(),
[Symbol::terminal(var("identifier"))]
.iter()
.cloned()
std::iter::once(&Symbol::terminal(var("in")))
.copied()
.collect(),
std::iter::once(&Symbol::terminal(var("identifier")))
.copied()
.collect(),
],
);

View file

@ -106,7 +106,7 @@ pub struct SyntaxGrammar {
#[cfg(test)]
impl ProductionStep {
pub fn new(symbol: Symbol) -> Self {
pub const fn new(symbol: Symbol) -> Self {
Self {
symbol,
precedence: Precedence::None,

View file

@ -971,7 +971,7 @@ mod tests {
"row {i}b: {:?} ~~ {:?}",
row.left,
row.right
)
);
}
}
@ -1032,6 +1032,7 @@ mod tests {
}
#[test]
#[allow(clippy::single_range_in_vec_init)]
fn test_character_set_get_ranges() {
struct Row {
chars: Vec<char>,
@ -1061,7 +1062,7 @@ mod tests {
chars,
ruled_out_chars,
expected_ranges,
} in table.iter()
} in &table
{
let ruled_out_chars = ruled_out_chars.iter().map(|c: &char| *c as u32).collect();
let mut set = CharacterSet::empty();

View file

@ -726,7 +726,7 @@ mod tests {
#[test]
fn test_node_types_simple() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
variables: vec![
Variable {
name: "v1".to_string(),
@ -815,7 +815,7 @@ mod tests {
#[test]
fn test_node_types_simple_extras() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
extra_symbols: vec![Rule::named("v3")],
variables: vec![
Variable {
@ -916,7 +916,7 @@ mod tests {
#[test]
fn test_node_types_with_supertypes() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
supertype_symbols: vec!["_v2".to_string()],
variables: vec![
Variable {
@ -998,7 +998,7 @@ mod tests {
#[test]
fn test_node_types_for_children_without_fields() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
variables: vec![
Variable {
name: "v1".to_string(),
@ -1090,7 +1090,7 @@ mod tests {
#[test]
fn test_node_types_with_inlined_rules() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
variables_to_inline: vec!["v2".to_string()],
variables: vec![
Variable {
@ -1140,7 +1140,7 @@ mod tests {
#[test]
fn test_node_types_for_aliased_nodes() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
variables: vec![
Variable {
name: "thing".to_string(),
@ -1210,7 +1210,7 @@ mod tests {
#[test]
fn test_node_types_with_multiple_valued_fields() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
variables: vec![
Variable {
name: "a".to_string(),
@ -1272,7 +1272,7 @@ mod tests {
#[test]
fn test_node_types_with_fields_on_hidden_tokens() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
variables: vec![Variable {
name: "script".to_string(),
kind: VariableType::Named,
@ -1298,7 +1298,7 @@ mod tests {
#[test]
fn test_node_types_with_multiple_rules_same_alias_name() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
variables: vec![
Variable {
name: "script".to_string(),
@ -1418,7 +1418,7 @@ mod tests {
#[test]
fn test_node_types_with_tokens_aliased_to_match_rules() {
let node_types = get_node_types(InputGrammar {
let node_types = get_node_types(&InputGrammar {
variables: vec![
Variable {
name: "a".to_string(),
@ -1768,9 +1768,9 @@ mod tests {
);
}
fn get_node_types(grammar: InputGrammar) -> Vec<NodeInfoJSON> {
fn get_node_types(grammar: &InputGrammar) -> Vec<NodeInfoJSON> {
let (syntax_grammar, lexical_grammar, _, default_aliases) =
prepare_grammar(&grammar).unwrap();
prepare_grammar(grammar).unwrap();
let variable_info =
get_variable_info(&syntax_grammar, &lexical_grammar, &default_aliases).unwrap();
generate_node_types_json(

View file

@ -792,7 +792,7 @@ mod tests {
},
// nested groups
Row {
rules: vec![Rule::seq(vec![Rule::pattern(r#"([^x\\]|\\(.|\n))+"#, "")])],
rules: vec![Rule::seq(vec![Rule::pattern(r"([^x\\]|\\(.|\n))+", "")])],
separators: vec![],
examples: vec![("abcx", Some((0, "abc"))), ("abc\\0x", Some((0, "abc\\0")))],
},
@ -800,24 +800,24 @@ mod tests {
Row {
rules: vec![
// Escaped forward slash (used in JS because '/' is the regex delimiter)
Rule::pattern(r#"\/"#, ""),
Rule::pattern(r"\/", ""),
// Escaped quotes
Rule::pattern(r#"\"\'"#, ""),
// Quote preceded by a literal backslash
Rule::pattern(r#"[\\']+"#, ""),
Rule::pattern(r"[\\']+", ""),
],
separators: vec![],
examples: vec![
("/", Some((0, "/"))),
("\"\'", Some((1, "\"\'"))),
(r#"'\'a"#, Some((2, r#"'\'"#))),
(r"'\'a", Some((2, r"'\'"))),
],
},
// unicode property escapes
Row {
rules: vec![
Rule::pattern(r#"\p{L}+\P{L}+"#, ""),
Rule::pattern(r#"\p{White_Space}+\P{White_Space}+[\p{White_Space}]*"#, ""),
Rule::pattern(r"\p{L}+\P{L}+", ""),
Rule::pattern(r"\p{White_Space}+\P{White_Space}+[\p{White_Space}]*", ""),
],
separators: vec![],
examples: vec![
@ -827,17 +827,17 @@ mod tests {
},
// unicode property escapes in bracketed sets
Row {
rules: vec![Rule::pattern(r#"[\p{L}\p{Nd}]+"#, "")],
rules: vec![Rule::pattern(r"[\p{L}\p{Nd}]+", "")],
separators: vec![],
examples: vec![("abΨ12٣٣, ok", Some((0, "abΨ12٣٣")))],
},
// unicode character escapes
Row {
rules: vec![
Rule::pattern(r#"\u{00dc}"#, ""),
Rule::pattern(r#"\U{000000dd}"#, ""),
Rule::pattern(r#"\u00de"#, ""),
Rule::pattern(r#"\U000000df"#, ""),
Rule::pattern(r"\u{00dc}", ""),
Rule::pattern(r"\U{000000dd}", ""),
Rule::pattern(r"\u00de", ""),
Rule::pattern(r"\U000000df", ""),
],
separators: vec![],
examples: vec![
@ -851,13 +851,13 @@ mod tests {
Row {
rules: vec![
// Un-escaped curly braces
Rule::pattern(r#"u{[0-9a-fA-F]+}"#, ""),
Rule::pattern(r"u{[0-9a-fA-F]+}", ""),
// Already-escaped curly braces
Rule::pattern(r#"\{[ab]{3}\}"#, ""),
Rule::pattern(r"\{[ab]{3}\}", ""),
// Unicode codepoints
Rule::pattern(r#"\u{1000A}"#, ""),
Rule::pattern(r"\u{1000A}", ""),
// Unicode codepoints (lowercase)
Rule::pattern(r#"\u{1000b}"#, ""),
Rule::pattern(r"\u{1000b}", ""),
],
separators: vec![],
examples: vec![
@ -957,7 +957,7 @@ mod tests {
})
.unwrap();
for (haystack, needle) in examples.iter() {
for (haystack, needle) in examples {
assert_eq!(simulate_nfa(&grammar, haystack), *needle);
}
}

View file

@ -402,7 +402,7 @@ mod test {
assert_eq!(
lexical_grammar.variables,
vec![Variable::anonymous("hello", Rule::string("hello")),]
)
);
}
#[test]

View file

@ -266,7 +266,7 @@ mod tests {
..Default::default()
};
let inline_map = process_inlines(&grammar, &Default::default()).unwrap();
let inline_map = process_inlines(&grammar, &LexicalGrammar::default()).unwrap();
// Nothing to inline at step 0.
assert!(inline_map
@ -362,7 +362,7 @@ mod tests {
..Default::default()
};
let inline_map = process_inlines(&grammar, &Default::default()).unwrap();
let inline_map = process_inlines(&grammar, &LexicalGrammar::default()).unwrap();
let productions: Vec<&Production> = inline_map
.inlined_productions(&grammar.variables[0].productions[0], 1)
@ -370,7 +370,7 @@ mod tests {
.collect();
assert_eq!(
productions.iter().cloned().cloned().collect::<Vec<_>>(),
productions.iter().copied().cloned().collect::<Vec<_>>(),
vec![
Production {
dynamic_precedence: 0,
@ -461,7 +461,7 @@ mod tests {
..Default::default()
};
let inline_map = process_inlines(&grammar, &Default::default()).unwrap();
let inline_map = process_inlines(&grammar, &LexicalGrammar::default()).unwrap();
let productions: Vec<_> = inline_map
.inlined_productions(&grammar.variables[0].productions[0], 0)
@ -469,7 +469,7 @@ mod tests {
.collect();
assert_eq!(
productions.iter().cloned().cloned().collect::<Vec<_>>(),
productions.iter().copied().cloned().collect::<Vec<_>>(),
vec![Production {
dynamic_precedence: 0,
steps: vec![

View file

@ -151,6 +151,7 @@ impl Rule {
}
impl Alias {
#[must_use]
pub const fn kind(&self) -> VariableType {
if self.is_named {
VariableType::Named
@ -161,6 +162,7 @@ impl Alias {
}
impl Precedence {
#[must_use]
pub const fn is_none(&self) -> bool {
matches!(self, Self::None)
}
@ -168,48 +170,59 @@ impl Precedence {
#[cfg(test)]
impl Rule {
pub fn terminal(index: usize) -> Self {
#[must_use]
pub const fn terminal(index: usize) -> Self {
Self::Symbol(Symbol::terminal(index))
}
pub fn non_terminal(index: usize) -> Self {
#[must_use]
pub const fn non_terminal(index: usize) -> Self {
Self::Symbol(Symbol::non_terminal(index))
}
pub fn external(index: usize) -> Self {
#[must_use]
pub const fn external(index: usize) -> Self {
Self::Symbol(Symbol::external(index))
}
#[must_use]
pub fn named(name: &'static str) -> Self {
Self::NamedSymbol(name.to_string())
}
#[must_use]
pub fn string(value: &'static str) -> Self {
Self::String(value.to_string())
}
#[must_use]
pub fn pattern(value: &'static str, flags: &'static str) -> Self {
Self::Pattern(value.to_string(), flags.to_string())
}
}
impl Symbol {
#[must_use]
pub fn is_terminal(&self) -> bool {
self.kind == SymbolType::Terminal
}
#[must_use]
pub fn is_non_terminal(&self) -> bool {
self.kind == SymbolType::NonTerminal
}
#[must_use]
pub fn is_external(&self) -> bool {
self.kind == SymbolType::External
}
#[must_use]
pub fn is_eof(&self) -> bool {
self.kind == SymbolType::End
}
#[must_use]
pub const fn non_terminal(index: usize) -> Self {
Self {
kind: SymbolType::NonTerminal,
@ -217,6 +230,7 @@ impl Symbol {
}
}
#[must_use]
pub const fn terminal(index: usize) -> Self {
Self {
kind: SymbolType::Terminal,
@ -224,6 +238,7 @@ impl Symbol {
}
}
#[must_use]
pub const fn external(index: usize) -> Self {
Self {
kind: SymbolType::External,
@ -231,6 +246,7 @@ impl Symbol {
}
}
#[must_use]
pub const fn end() -> Self {
Self {
kind: SymbolType::End,
@ -238,6 +254,7 @@ impl Symbol {
}
}
#[must_use]
pub const fn end_of_nonterminal_extra() -> Self {
Self {
kind: SymbolType::EndOfNonTerminalExtra,
@ -247,6 +264,7 @@ impl Symbol {
}
impl From<Symbol> for Rule {
#[must_use]
fn from(symbol: Symbol) -> Self {
Self::Symbol(symbol)
}