cli: Use anyhow and thiserror for errors

This patch updates the CLI to use anyhow and thiserror for error
management.  The main feature that our custom `Error` type was providing
was a _list_ of messages, which would allow us to annotate "lower-level"
errors with more contextual information.  This is exactly what's
provided by anyhow's `Context` trait.

(This is setup work for a future PR that will pull the `config` and
`loader` modules out into separate crates; by using `anyhow` we wouldn't
have to deal with a circular dependency between with the new crates.)
This commit is contained in:
Douglas Creager 2021-06-09 12:32:22 -04:00
parent 9d77561c43
commit d2d01e77e3
33 changed files with 237 additions and 419 deletions

View file

@ -1,5 +1,5 @@
use super::write_file;
use crate::error::{Error, Result};
use anyhow::{Context, Result};
use std::path::Path;
use std::{fs, str};
@ -61,7 +61,7 @@ pub fn generate_binding_files(repo_path: &Path, language_name: &str) -> Result<(
eprintln!("Updating binding.gyp with new binding path");
let binding_gyp = fs::read_to_string(&binding_gyp_path)
.map_err(Error::wrap(|| "Failed to read binding.gyp"))?;
.with_context(|| "Failed to read binding.gyp")?;
let binding_gyp = binding_gyp.replace("src/binding.cc", "bindings/node/binding.cc");
write_file(&binding_gyp_path, binding_gyp)?;
} else {
@ -72,12 +72,12 @@ pub fn generate_binding_files(repo_path: &Path, language_name: &str) -> Result<(
let package_json_path = repo_path.join("package.json");
if package_json_path.exists() {
let package_json_str = fs::read_to_string(&package_json_path)
.map_err(Error::wrap(|| "Failed to read package.json"))?;
.with_context(|| "Failed to read package.json")?;
let mut package_json =
serde_json::from_str::<serde_json::Map<String, serde_json::Value>>(
&package_json_str,
)
.map_err(Error::wrap(|| "Failed to parse package.json"))?;
.with_context(|| "Failed to parse package.json")?;
let package_json_main = package_json.get("main");
let package_json_needs_update = package_json_main.map_or(true, |v| {
let main_string = v.as_str();
@ -126,7 +126,6 @@ fn generate_file(path: &Path, template: &str, language_name: &str) -> Result<()>
}
fn create_dir(path: &Path) -> Result<()> {
fs::create_dir_all(&path).map_err(Error::wrap(|| {
format!("Failed to create {:?}", path.to_string_lossy())
}))
fs::create_dir_all(&path)
.with_context(|| format!("Failed to create {:?}", path.to_string_lossy()))
}

View file

@ -1,5 +1,6 @@
use super::item::{ParseItem, ParseItemSet, ParseItemSetCore};
use super::item_set_builder::ParseItemSetBuilder;
use crate::generate::grammars::PrecedenceEntry;
use crate::generate::grammars::{
InlinedProductionMap, LexicalGrammar, SyntaxGrammar, VariableType,
};
@ -9,10 +10,7 @@ use crate::generate::tables::{
FieldLocation, GotoAction, ParseAction, ParseState, ParseStateId, ParseTable, ParseTableEntry,
ProductionInfo, ProductionInfoId,
};
use crate::{
error::{Error, Result},
generate::grammars::PrecedenceEntry,
};
use anyhow::{anyhow, Result};
use std::collections::{BTreeMap, HashMap, HashSet, VecDeque};
use std::fmt::Write;
use std::u32;
@ -387,7 +385,7 @@ impl<'a> ParseTableBuilder<'a> {
}
message += &self.syntax_grammar.variables[*variable_index as usize].name;
}
return Err(Error::new(message));
return Err(anyhow!(message));
}
}
// Add actions for the start tokens of each non-terminal extra rule.
@ -762,7 +760,7 @@ impl<'a> ParseTableBuilder<'a> {
}
write!(&mut msg, "\n").unwrap();
Err(Error::new(msg))
Err(anyhow!(msg))
}
fn compare_precedence(

View file

@ -11,12 +11,12 @@ use self::build_parse_table::{build_parse_table, ParseStateInfo};
use self::coincident_tokens::CoincidentTokenIndex;
use self::minimize_parse_table::minimize_parse_table;
use self::token_conflicts::TokenConflictMap;
use crate::error::Result;
use crate::generate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar};
use crate::generate::nfa::NfaCursor;
use crate::generate::node_types::VariableInfo;
use crate::generate::rules::{AliasMap, Symbol, SymbolType, TokenSet};
use crate::generate::tables::{LexTable, ParseAction, ParseTable, ParseTableEntry};
use anyhow::Result;
use log::info;
use std::collections::{BTreeSet, HashMap};

View file

@ -17,7 +17,7 @@ use self::parse_grammar::parse_grammar;
use self::prepare_grammar::prepare_grammar;
use self::render::render_c_code;
use self::rules::AliasMap;
use crate::error::{Error, Result};
use anyhow::{anyhow, Context, Result};
use lazy_static::lazy_static;
use regex::{Regex, RegexBuilder};
use std::fs;
@ -161,10 +161,10 @@ fn load_grammar_file(grammar_path: &Path) -> Result<String> {
match grammar_path.extension().and_then(|e| e.to_str()) {
Some("js") => Ok(load_js_grammar_file(grammar_path)?),
Some("json") => Ok(fs::read_to_string(grammar_path)?),
_ => Err(Error::new(format!(
_ => Err(anyhow!(
"Unknown grammar file extension: {:?}",
grammar_path
))),
)),
}
}
@ -191,7 +191,7 @@ fn load_js_grammar_file(grammar_path: &Path) -> Result<String> {
match output.status.code() {
None => panic!("Node process was killed"),
Some(0) => {}
Some(code) => return Error::err(format!("Node process exited with status {}", code)),
Some(code) => return Err(anyhow!("Node process exited with status {}", code)),
}
let mut result = String::from_utf8(output.stdout).expect("Got invalid UTF8 from node");
@ -200,7 +200,6 @@ fn load_js_grammar_file(grammar_path: &Path) -> Result<String> {
}
fn write_file(path: &Path, body: impl AsRef<[u8]>) -> Result<()> {
fs::write(path, body).map_err(Error::wrap(|| {
format!("Failed to write {:?}", path.file_name().unwrap())
}))
fs::write(path, body)
.with_context(|| format!("Failed to write {:?}", path.file_name().unwrap()))
}

View file

@ -1,6 +1,6 @@
use super::grammars::{LexicalGrammar, SyntaxGrammar, VariableType};
use super::rules::{Alias, AliasMap, Symbol, SymbolType};
use crate::error::{Error, Result};
use anyhow::{anyhow, Result};
use serde_derive::Serialize;
use std::cmp::Ordering;
use std::collections::{BTreeMap, HashMap, HashSet};
@ -328,10 +328,13 @@ pub(crate) fn get_variable_info(
for supertype_symbol in &syntax_grammar.supertype_symbols {
if result[supertype_symbol.index].has_multi_step_production {
let variable = &syntax_grammar.variables[supertype_symbol.index];
return Err(Error::grammar(&format!(
"Supertype symbols must always have a single visible child, but `{}` can have multiple",
return Err(anyhow!(
concat!(
"Grammar error: Supertype symbols must always ",
"have a single visible child, but `{}` can have multiple"
),
variable.name
)));
));
}
}

View file

@ -1,6 +1,6 @@
use super::grammars::{InputGrammar, PrecedenceEntry, Variable, VariableType};
use super::rules::{Precedence, Rule};
use crate::error::{Error, Result};
use anyhow::{anyhow, Result};
use serde_derive::Deserialize;
use serde_json::{Map, Value};
@ -109,9 +109,8 @@ pub(crate) fn parse_grammar(input: &str) -> Result<InputGrammar> {
RuleJSON::STRING { value } => PrecedenceEntry::Name(value),
RuleJSON::SYMBOL { name } => PrecedenceEntry::Symbol(name),
_ => {
return Err(Error::new(
return Err(anyhow!(
"Invalid rule in precedences array. Only strings and symbols are allowed"
.to_string(),
))
}
})

View file

@ -1,11 +1,8 @@
use super::ExtractedLexicalGrammar;
use crate::generate::grammars::{LexicalGrammar, LexicalVariable};
use crate::generate::nfa::{CharacterSet, Nfa, NfaState};
use crate::generate::rules::Rule;
use crate::{
error::{Error, Result},
generate::rules::Precedence,
};
use crate::generate::rules::{Precedence, Rule};
use anyhow::{anyhow, Context, Result};
use lazy_static::lazy_static;
use regex::Regex;
use regex_syntax::ast::{
@ -111,9 +108,7 @@ pub(crate) fn expand_tokens(mut grammar: ExtractedLexicalGrammar) -> Result<Lexi
let last_state_id = builder.nfa.last_state_id();
builder
.expand_rule(&variable.rule, last_state_id)
.map_err(Error::wrap(|| {
format!("Error processing rule {}", variable.name)
}))?;
.with_context(|| format!("Error processing rule {}", variable.name))?;
if !is_immediate_token {
builder.is_sep = true;
@ -205,14 +200,14 @@ impl NfaBuilder {
result
}
Rule::Blank => Ok(false),
_ => Err(Error::grammar(&format!("Unexpected rule {:?}", rule))),
_ => Err(anyhow!("Grammar error: Unexpected rule {:?}", rule)),
}
}
fn expand_regex(&mut self, ast: &Ast, mut next_state_id: u32) -> Result<bool> {
match ast {
Ast::Empty(_) => Ok(false),
Ast::Flags(_) => Err(Error::regex("Flags are not supported".to_string())),
Ast::Flags(_) => Err(anyhow!("Regex error: Flags are not supported")),
Ast::Literal(literal) => {
self.push_advance(CharacterSet::from_char(literal.c), next_state_id);
Ok(true)
@ -221,7 +216,7 @@ impl NfaBuilder {
self.push_advance(CharacterSet::from_char('\n').negate(), next_state_id);
Ok(true)
}
Ast::Assertion(_) => Err(Error::regex("Assertions are not supported".to_string())),
Ast::Assertion(_) => Err(anyhow!("Regex error: Assertions are not supported")),
Ast::Class(class) => match class {
Class::Unicode(class) => {
let mut chars = self.expand_unicode_character_class(&class.kind)?;
@ -248,8 +243,8 @@ impl NfaBuilder {
self.push_advance(chars, next_state_id);
Ok(true)
}
ClassSet::BinaryOp(_) => Err(Error::regex(
"Binary operators in character classes aren't supported".to_string(),
ClassSet::BinaryOp(_) => Err(anyhow!(
"Regex error: Binary operators in character classes aren't supported"
)),
},
},
@ -383,10 +378,10 @@ impl NfaBuilder {
}
Ok(set)
}
_ => Err(Error::regex(format!(
"Unsupported character class syntax {:?}",
_ => Err(anyhow!(
"Regex error: Unsupported character class syntax {:?}",
item
))),
)),
}
}
@ -406,10 +401,10 @@ impl NfaBuilder {
.get(class_name.as_str())
.or_else(|| UNICODE_PROPERTIES.get(class_name.as_str()))
.ok_or_else(|| {
Error::regex(format!(
"Unsupported unicode character class {}",
anyhow!(
"Regex error: Unsupported unicode character class {}",
class_name
))
)
})?;
for c in code_points {
if let Some(c) = std::char::from_u32(*c) {
@ -421,8 +416,8 @@ impl NfaBuilder {
}
}
ClassUnicodeKind::NamedValue { .. } => {
return Err(Error::regex(
"Key-value unicode properties are not supported".to_string(),
return Err(anyhow!(
"Regex error: Key-value unicode properties are not supported"
))
}
}

View file

@ -1,7 +1,7 @@
use super::{ExtractedLexicalGrammar, ExtractedSyntaxGrammar, InternedGrammar};
use crate::error::{Error, Result};
use crate::generate::grammars::{ExternalToken, Variable, VariableType};
use crate::generate::rules::{MetadataParams, Rule, Symbol, SymbolType};
use anyhow::{anyhow, Result};
use std::collections::HashMap;
use std::mem;
@ -108,7 +108,7 @@ pub(super) fn extract_tokens(
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
if let Rule::Symbol(symbol) = rule {
if symbol.is_non_terminal() {
return Error::err(format!(
return Err(anyhow!(
"Rule '{}' cannot be used as both an external token and a non-terminal rule",
&variables[symbol.index].name,
));
@ -128,7 +128,7 @@ pub(super) fn extract_tokens(
})
}
} else {
return Error::err(format!(
return Err(anyhow!(
"Non-symbol rules cannot be used as external tokens"
));
}
@ -138,7 +138,7 @@ pub(super) fn extract_tokens(
if let Some(token) = grammar.word_token {
let token = symbol_replacer.replace_symbol(token);
if token.is_non_terminal() {
return Error::err(format!(
return Err(anyhow!(
"Non-terminal symbol '{}' cannot be used as the word token",
&variables[token.index].name
));
@ -482,7 +482,7 @@ mod test {
match extract_tokens(grammar) {
Err(e) => {
assert_eq!(e.message(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
assert_eq!(e.to_string(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
}
_ => {
panic!("Expected an error but got no error");

View file

@ -1,9 +1,9 @@
use super::ExtractedSyntaxGrammar;
use crate::error::{Error, Result};
use crate::generate::grammars::{
Production, ProductionStep, SyntaxGrammar, SyntaxVariable, Variable,
};
use crate::generate::rules::{Alias, Associativity, Precedence, Rule, Symbol};
use anyhow::{anyhow, Result};
struct RuleFlattener {
production: Production,
@ -193,7 +193,7 @@ pub(super) fn flatten_grammar(grammar: ExtractedSyntaxGrammar) -> Result<SyntaxG
for (i, variable) in variables.iter().enumerate() {
for production in &variable.productions {
if production.steps.is_empty() && symbol_is_used(&variables, Symbol::non_terminal(i)) {
return Error::err(format!(
return Err(anyhow!(
"The rule `{}` matches the empty string.
Tree-sitter does not support syntactic rules that match the empty string

View file

@ -1,13 +1,13 @@
use super::InternedGrammar;
use crate::error::{Error, Result};
use crate::generate::grammars::{InputGrammar, Variable, VariableType};
use crate::generate::rules::{Rule, Symbol};
use anyhow::{anyhow, Result};
pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar> {
let interner = Interner { grammar };
if variable_type_for_name(&grammar.variables[0].name) == VariableType::Hidden {
return Error::err("A grammar's start rule must be visible.".to_string());
return Err(anyhow!("A grammar's start rule must be visible."));
}
let mut variables = Vec::with_capacity(grammar.variables.len());
@ -40,7 +40,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
supertype_symbols.push(
interner
.intern_name(supertype_symbol_name)
.ok_or_else(|| Error::undefined_symbol(supertype_symbol_name))?,
.ok_or_else(|| anyhow!("Undefined symbol `{}`", supertype_symbol_name))?,
);
}
@ -51,7 +51,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
interned_conflict.push(
interner
.intern_name(&name)
.ok_or_else(|| Error::undefined_symbol(name))?,
.ok_or_else(|| anyhow!("Undefined symbol `{}`", name))?,
);
}
expected_conflicts.push(interned_conflict);
@ -69,7 +69,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
word_token = Some(
interner
.intern_name(&name)
.ok_or_else(|| Error::undefined_symbol(&name))?,
.ok_or_else(|| anyhow!("Undefined symbol `{}`", &name))?,
);
}
@ -122,7 +122,7 @@ impl<'a> Interner<'a> {
if let Some(symbol) = self.intern_name(&name) {
Ok(Rule::Symbol(symbol))
} else {
Err(Error::undefined_symbol(name))
Err(anyhow!("Undefined symbol `{}`", name))
}
}
@ -234,7 +234,7 @@ mod tests {
let result = intern_symbols(&build_grammar(vec![Variable::named("x", Rule::named("y"))]));
match result {
Err(e) => assert_eq!(e.message(), "Undefined symbol `y`"),
Err(e) => assert_eq!(e.to_string(), "Undefined symbol `y`"),
_ => panic!("Expected an error but got none"),
}
}

View file

@ -19,7 +19,7 @@ use super::grammars::{
SyntaxGrammar, Variable,
};
use super::rules::{AliasMap, Precedence, Rule, Symbol};
use super::{Error, Result};
use anyhow::{anyhow, Result};
use std::{
cmp::Ordering,
collections::{hash_map, HashMap, HashSet},
@ -93,10 +93,11 @@ fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
}
hash_map::Entry::Occupied(e) => {
if e.get() != &ordering {
return Err(Error::new(format!(
return Err(anyhow!(
"Conflicting orderings for precedences {} and {}",
entry1, entry2
)));
entry1,
entry2
));
}
}
}
@ -116,10 +117,11 @@ fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
Rule::Metadata { rule, params } => {
if let Precedence::Name(n) = &params.precedence {
if !names.contains(n) {
return Err(Error::new(format!(
return Err(anyhow!(
"Undeclared precedence '{}' in rule '{}'",
n, rule_name
)));
n,
rule_name
));
}
}
validate(rule_name, rule, names)?;
@ -196,7 +198,7 @@ mod tests {
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().message(),
result.unwrap_err().to_string(),
"Undeclared precedence 'omg' in rule 'v2'",
);
}
@ -244,7 +246,7 @@ mod tests {
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().message(),
result.unwrap_err().to_string(),
"Conflicting orderings for precedences 'a' and 'b'",
);
}