cli: Use anyhow and thiserror for errors

This patch updates the CLI to use anyhow and thiserror for error
management.  The main feature that our custom `Error` type was providing
was a _list_ of messages, which would allow us to annotate "lower-level"
errors with more contextual information.  This is exactly what's
provided by anyhow's `Context` trait.

(This is setup work for a future PR that will pull the `config` and
`loader` modules out into separate crates; by using `anyhow` we wouldn't
have to deal with a circular dependency between with the new crates.)
This commit is contained in:
Douglas Creager 2021-06-09 12:32:22 -04:00
parent 9d77561c43
commit d2d01e77e3
33 changed files with 237 additions and 419 deletions

View file

@ -1,11 +1,8 @@
use super::ExtractedLexicalGrammar;
use crate::generate::grammars::{LexicalGrammar, LexicalVariable};
use crate::generate::nfa::{CharacterSet, Nfa, NfaState};
use crate::generate::rules::Rule;
use crate::{
error::{Error, Result},
generate::rules::Precedence,
};
use crate::generate::rules::{Precedence, Rule};
use anyhow::{anyhow, Context, Result};
use lazy_static::lazy_static;
use regex::Regex;
use regex_syntax::ast::{
@ -111,9 +108,7 @@ pub(crate) fn expand_tokens(mut grammar: ExtractedLexicalGrammar) -> Result<Lexi
let last_state_id = builder.nfa.last_state_id();
builder
.expand_rule(&variable.rule, last_state_id)
.map_err(Error::wrap(|| {
format!("Error processing rule {}", variable.name)
}))?;
.with_context(|| format!("Error processing rule {}", variable.name))?;
if !is_immediate_token {
builder.is_sep = true;
@ -205,14 +200,14 @@ impl NfaBuilder {
result
}
Rule::Blank => Ok(false),
_ => Err(Error::grammar(&format!("Unexpected rule {:?}", rule))),
_ => Err(anyhow!("Grammar error: Unexpected rule {:?}", rule)),
}
}
fn expand_regex(&mut self, ast: &Ast, mut next_state_id: u32) -> Result<bool> {
match ast {
Ast::Empty(_) => Ok(false),
Ast::Flags(_) => Err(Error::regex("Flags are not supported".to_string())),
Ast::Flags(_) => Err(anyhow!("Regex error: Flags are not supported")),
Ast::Literal(literal) => {
self.push_advance(CharacterSet::from_char(literal.c), next_state_id);
Ok(true)
@ -221,7 +216,7 @@ impl NfaBuilder {
self.push_advance(CharacterSet::from_char('\n').negate(), next_state_id);
Ok(true)
}
Ast::Assertion(_) => Err(Error::regex("Assertions are not supported".to_string())),
Ast::Assertion(_) => Err(anyhow!("Regex error: Assertions are not supported")),
Ast::Class(class) => match class {
Class::Unicode(class) => {
let mut chars = self.expand_unicode_character_class(&class.kind)?;
@ -248,8 +243,8 @@ impl NfaBuilder {
self.push_advance(chars, next_state_id);
Ok(true)
}
ClassSet::BinaryOp(_) => Err(Error::regex(
"Binary operators in character classes aren't supported".to_string(),
ClassSet::BinaryOp(_) => Err(anyhow!(
"Regex error: Binary operators in character classes aren't supported"
)),
},
},
@ -383,10 +378,10 @@ impl NfaBuilder {
}
Ok(set)
}
_ => Err(Error::regex(format!(
"Unsupported character class syntax {:?}",
_ => Err(anyhow!(
"Regex error: Unsupported character class syntax {:?}",
item
))),
)),
}
}
@ -406,10 +401,10 @@ impl NfaBuilder {
.get(class_name.as_str())
.or_else(|| UNICODE_PROPERTIES.get(class_name.as_str()))
.ok_or_else(|| {
Error::regex(format!(
"Unsupported unicode character class {}",
anyhow!(
"Regex error: Unsupported unicode character class {}",
class_name
))
)
})?;
for c in code_points {
if let Some(c) = std::char::from_u32(*c) {
@ -421,8 +416,8 @@ impl NfaBuilder {
}
}
ClassUnicodeKind::NamedValue { .. } => {
return Err(Error::regex(
"Key-value unicode properties are not supported".to_string(),
return Err(anyhow!(
"Regex error: Key-value unicode properties are not supported"
))
}
}

View file

@ -1,7 +1,7 @@
use super::{ExtractedLexicalGrammar, ExtractedSyntaxGrammar, InternedGrammar};
use crate::error::{Error, Result};
use crate::generate::grammars::{ExternalToken, Variable, VariableType};
use crate::generate::rules::{MetadataParams, Rule, Symbol, SymbolType};
use anyhow::{anyhow, Result};
use std::collections::HashMap;
use std::mem;
@ -108,7 +108,7 @@ pub(super) fn extract_tokens(
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
if let Rule::Symbol(symbol) = rule {
if symbol.is_non_terminal() {
return Error::err(format!(
return Err(anyhow!(
"Rule '{}' cannot be used as both an external token and a non-terminal rule",
&variables[symbol.index].name,
));
@ -128,7 +128,7 @@ pub(super) fn extract_tokens(
})
}
} else {
return Error::err(format!(
return Err(anyhow!(
"Non-symbol rules cannot be used as external tokens"
));
}
@ -138,7 +138,7 @@ pub(super) fn extract_tokens(
if let Some(token) = grammar.word_token {
let token = symbol_replacer.replace_symbol(token);
if token.is_non_terminal() {
return Error::err(format!(
return Err(anyhow!(
"Non-terminal symbol '{}' cannot be used as the word token",
&variables[token.index].name
));
@ -482,7 +482,7 @@ mod test {
match extract_tokens(grammar) {
Err(e) => {
assert_eq!(e.message(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
assert_eq!(e.to_string(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
}
_ => {
panic!("Expected an error but got no error");

View file

@ -1,9 +1,9 @@
use super::ExtractedSyntaxGrammar;
use crate::error::{Error, Result};
use crate::generate::grammars::{
Production, ProductionStep, SyntaxGrammar, SyntaxVariable, Variable,
};
use crate::generate::rules::{Alias, Associativity, Precedence, Rule, Symbol};
use anyhow::{anyhow, Result};
struct RuleFlattener {
production: Production,
@ -193,7 +193,7 @@ pub(super) fn flatten_grammar(grammar: ExtractedSyntaxGrammar) -> Result<SyntaxG
for (i, variable) in variables.iter().enumerate() {
for production in &variable.productions {
if production.steps.is_empty() && symbol_is_used(&variables, Symbol::non_terminal(i)) {
return Error::err(format!(
return Err(anyhow!(
"The rule `{}` matches the empty string.
Tree-sitter does not support syntactic rules that match the empty string

View file

@ -1,13 +1,13 @@
use super::InternedGrammar;
use crate::error::{Error, Result};
use crate::generate::grammars::{InputGrammar, Variable, VariableType};
use crate::generate::rules::{Rule, Symbol};
use anyhow::{anyhow, Result};
pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar> {
let interner = Interner { grammar };
if variable_type_for_name(&grammar.variables[0].name) == VariableType::Hidden {
return Error::err("A grammar's start rule must be visible.".to_string());
return Err(anyhow!("A grammar's start rule must be visible."));
}
let mut variables = Vec::with_capacity(grammar.variables.len());
@ -40,7 +40,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
supertype_symbols.push(
interner
.intern_name(supertype_symbol_name)
.ok_or_else(|| Error::undefined_symbol(supertype_symbol_name))?,
.ok_or_else(|| anyhow!("Undefined symbol `{}`", supertype_symbol_name))?,
);
}
@ -51,7 +51,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
interned_conflict.push(
interner
.intern_name(&name)
.ok_or_else(|| Error::undefined_symbol(name))?,
.ok_or_else(|| anyhow!("Undefined symbol `{}`", name))?,
);
}
expected_conflicts.push(interned_conflict);
@ -69,7 +69,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
word_token = Some(
interner
.intern_name(&name)
.ok_or_else(|| Error::undefined_symbol(&name))?,
.ok_or_else(|| anyhow!("Undefined symbol `{}`", &name))?,
);
}
@ -122,7 +122,7 @@ impl<'a> Interner<'a> {
if let Some(symbol) = self.intern_name(&name) {
Ok(Rule::Symbol(symbol))
} else {
Err(Error::undefined_symbol(name))
Err(anyhow!("Undefined symbol `{}`", name))
}
}
@ -234,7 +234,7 @@ mod tests {
let result = intern_symbols(&build_grammar(vec![Variable::named("x", Rule::named("y"))]));
match result {
Err(e) => assert_eq!(e.message(), "Undefined symbol `y`"),
Err(e) => assert_eq!(e.to_string(), "Undefined symbol `y`"),
_ => panic!("Expected an error but got none"),
}
}

View file

@ -19,7 +19,7 @@ use super::grammars::{
SyntaxGrammar, Variable,
};
use super::rules::{AliasMap, Precedence, Rule, Symbol};
use super::{Error, Result};
use anyhow::{anyhow, Result};
use std::{
cmp::Ordering,
collections::{hash_map, HashMap, HashSet},
@ -93,10 +93,11 @@ fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
}
hash_map::Entry::Occupied(e) => {
if e.get() != &ordering {
return Err(Error::new(format!(
return Err(anyhow!(
"Conflicting orderings for precedences {} and {}",
entry1, entry2
)));
entry1,
entry2
));
}
}
}
@ -116,10 +117,11 @@ fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
Rule::Metadata { rule, params } => {
if let Precedence::Name(n) = &params.precedence {
if !names.contains(n) {
return Err(Error::new(format!(
return Err(anyhow!(
"Undeclared precedence '{}' in rule '{}'",
n, rule_name
)));
n,
rule_name
));
}
}
validate(rule_name, rule, names)?;
@ -196,7 +198,7 @@ mod tests {
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().message(),
result.unwrap_err().to_string(),
"Undeclared precedence 'omg' in rule 'v2'",
);
}
@ -244,7 +246,7 @@ mod tests {
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().message(),
result.unwrap_err().to_string(),
"Conflicting orderings for precedences 'a' and 'b'",
);
}