Overhaul CLI error handling to allow multiple levels of context
This commit is contained in:
parent
b7bc2d30ae
commit
d274e81d0d
17 changed files with 214 additions and 129 deletions
|
|
@ -602,7 +602,7 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
}
|
||||
write!(&mut msg, "\n").unwrap();
|
||||
|
||||
Err(Error(msg))
|
||||
Err(Error::new(msg))
|
||||
}
|
||||
|
||||
fn get_auxiliary_node_info(
|
||||
|
|
|
|||
|
|
@ -64,20 +64,16 @@ pub fn generate_parser_in_directory(
|
|||
node_types_json,
|
||||
} = generate_parser_for_grammar_with_opts(&grammar_json, minimize, state_ids_to_log)?;
|
||||
|
||||
fs::write(&repo_src_path.join("parser.c"), c_code)
|
||||
.map_err(|e| format!("Failed to write parser.c: {}", e))?;
|
||||
fs::write(&repo_src_path.join("node-types.json"), node_types_json)
|
||||
.map_err(|e| format!("Failed to write parser.c: {}", e))?;
|
||||
fs::write(
|
||||
write_file(&repo_src_path.join("parser.c"), c_code)?;
|
||||
write_file(&repo_src_path.join("node-types.json"), node_types_json)?;
|
||||
write_file(
|
||||
&repo_header_path.join("parser.h"),
|
||||
tree_sitter::PARSER_HEADER,
|
||||
)
|
||||
.map_err(|e| format!("Failed to write parser.h: {}", e))?;
|
||||
fs::write(
|
||||
)?;
|
||||
write_file(
|
||||
&repo_path.join("index.js"),
|
||||
npm_files::index_js(&language_name),
|
||||
)
|
||||
.map_err(|e| format!("Failed to write index.js: {}", e))?;
|
||||
)?;
|
||||
ensure_file(&repo_src_path.join("binding.cc"), || {
|
||||
npm_files::binding_cc(&language_name)
|
||||
})?;
|
||||
|
|
@ -139,7 +135,7 @@ fn load_grammar_file(grammar_path: &Path) -> Result<String> {
|
|||
match grammar_path.extension().and_then(|e| e.to_str()) {
|
||||
Some("js") => Ok(load_js_grammar_file(grammar_path)?),
|
||||
Some("json") => Ok(fs::read_to_string(grammar_path)?),
|
||||
_ => Err(Error(format!(
|
||||
_ => Err(Error::new(format!(
|
||||
"Unknown grammar file extension: {:?}",
|
||||
grammar_path
|
||||
))),
|
||||
|
|
@ -169,7 +165,7 @@ fn load_js_grammar_file(grammar_path: &Path) -> Result<String> {
|
|||
match output.status.code() {
|
||||
None => panic!("Node process was killed"),
|
||||
Some(0) => {}
|
||||
Some(code) => return Err(Error(format!("Node process exited with status {}", code))),
|
||||
Some(code) => return Error::err(format!("Node process exited with status {}", code)),
|
||||
}
|
||||
|
||||
let mut result = String::from_utf8(output.stdout).expect("Got invalid UTF8 from node");
|
||||
|
|
@ -177,11 +173,16 @@ fn load_js_grammar_file(grammar_path: &Path) -> Result<String> {
|
|||
Ok(result)
|
||||
}
|
||||
|
||||
fn write_file(path: &Path, body: impl AsRef<[u8]>) -> Result<()> {
|
||||
fs::write(path, body).map_err(Error::wrap(|| {
|
||||
format!("Failed to write {:?}", path.file_name().unwrap())
|
||||
}))
|
||||
}
|
||||
|
||||
fn ensure_file<T: AsRef<[u8]>>(path: &PathBuf, f: impl Fn() -> T) -> Result<()> {
|
||||
if path.exists() {
|
||||
Ok(())
|
||||
} else {
|
||||
fs::write(path, f().as_ref())
|
||||
.map_err(|e| Error(format!("Failed to write file {:?}: {}", path, e)))
|
||||
write_file(path, f().as_ref())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -98,7 +98,9 @@ pub(crate) fn expand_tokens(mut grammar: ExtractedLexicalGrammar) -> Result<Lexi
|
|||
let last_state_id = builder.nfa.last_state_id();
|
||||
builder
|
||||
.expand_rule(&variable.rule, last_state_id)
|
||||
.map_err(|Error(msg)| Error(format!("Rule {} {}", variable.name, msg)))?;
|
||||
.map_err(Error::wrap(|| {
|
||||
format!("Error processing rule {}", variable.name)
|
||||
}))?;
|
||||
|
||||
if !is_immediate_token {
|
||||
builder.is_sep = true;
|
||||
|
|
@ -125,9 +127,7 @@ impl NfaBuilder {
|
|||
match rule {
|
||||
Rule::Pattern(s) => {
|
||||
let s = preprocess_regex(s);
|
||||
let ast = parse::Parser::new()
|
||||
.parse(&s)
|
||||
.map_err(|e| Error(e.to_string()))?;
|
||||
let ast = parse::Parser::new().parse(&s)?;
|
||||
self.expand_regex(&ast, next_state_id)
|
||||
}
|
||||
Rule::String(s) => {
|
||||
|
|
|
|||
|
|
@ -95,10 +95,10 @@ pub(super) fn extract_tokens(
|
|||
if let Rule::Symbol(symbol) = rule {
|
||||
let new_symbol = symbol_replacer.replace_symbol(symbol);
|
||||
if new_symbol.is_non_terminal() {
|
||||
return Err(Error(format!(
|
||||
return Error::err(format!(
|
||||
"Non-token symbol '{}' cannot be used as an extra token",
|
||||
&variables[new_symbol.index].name
|
||||
)));
|
||||
));
|
||||
} else {
|
||||
extra_tokens.push(new_symbol);
|
||||
}
|
||||
|
|
@ -116,10 +116,10 @@ pub(super) fn extract_tokens(
|
|||
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
|
||||
if let Rule::Symbol(symbol) = rule {
|
||||
if symbol.is_non_terminal() {
|
||||
return Err(Error(format!(
|
||||
return Error::err(format!(
|
||||
"Rule '{}' cannot be used as both an external token and a non-terminal rule",
|
||||
&variables[symbol.index].name,
|
||||
)));
|
||||
));
|
||||
}
|
||||
|
||||
if symbol.is_external() {
|
||||
|
|
@ -136,9 +136,9 @@ pub(super) fn extract_tokens(
|
|||
})
|
||||
}
|
||||
} else {
|
||||
return Err(Error(format!(
|
||||
return Error::err(format!(
|
||||
"Non-symbol rules cannot be used as external tokens"
|
||||
)));
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -146,10 +146,10 @@ pub(super) fn extract_tokens(
|
|||
if let Some(token) = grammar.word_token {
|
||||
let token = symbol_replacer.replace_symbol(token);
|
||||
if token.is_non_terminal() {
|
||||
return Err(Error(format!(
|
||||
return Error::err(format!(
|
||||
"Non-terminal symbol '{}' cannot be used as the word token",
|
||||
&variables[token.index].name
|
||||
)));
|
||||
));
|
||||
}
|
||||
word_token = Some(token);
|
||||
}
|
||||
|
|
@ -482,9 +482,9 @@ mod test {
|
|||
grammar.extra_tokens = vec![Rule::non_terminal(1)];
|
||||
|
||||
match extract_tokens(grammar) {
|
||||
Err(Error(s)) => {
|
||||
Err(e) => {
|
||||
assert_eq!(
|
||||
s,
|
||||
e.message(),
|
||||
"Non-token symbol 'rule_1' cannot be used as an extra token"
|
||||
);
|
||||
}
|
||||
|
|
@ -510,8 +510,8 @@ mod test {
|
|||
grammar.external_tokens = vec![Variable::named("rule_1", Rule::non_terminal(1))];
|
||||
|
||||
match extract_tokens(grammar) {
|
||||
Err(Error(s)) => {
|
||||
assert_eq!(s, "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
|
||||
Err(e) => {
|
||||
assert_eq!(e.message(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
|
||||
}
|
||||
_ => {
|
||||
panic!("Expected an error but got no error");
|
||||
|
|
|
|||
|
|
@ -187,14 +187,14 @@ pub(super) fn flatten_grammar(grammar: ExtractedSyntaxGrammar) -> Result<SyntaxG
|
|||
for (i, variable) in variables.iter().enumerate() {
|
||||
for production in &variable.productions {
|
||||
if production.steps.is_empty() && symbol_is_used(&variables, Symbol::non_terminal(i)) {
|
||||
return Err(Error(format!(
|
||||
return Error::err(format!(
|
||||
"The rule `{}` matches the empty string.
|
||||
|
||||
Tree-sitter does not support syntactic rules that match the empty string
|
||||
unless they are used only as the grammar's start rule.
|
||||
",
|
||||
variable.name
|
||||
)));
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
|
|||
let interner = Interner { grammar };
|
||||
|
||||
if variable_type_for_name(&grammar.variables[0].name) == VariableType::Hidden {
|
||||
return Err(Error("A grammar's start rule must be visible.".to_string()));
|
||||
return Error::err("A grammar's start rule must be visible.".to_string());
|
||||
}
|
||||
|
||||
let mut variables = Vec::with_capacity(grammar.variables.len());
|
||||
|
|
@ -227,7 +227,7 @@ mod tests {
|
|||
let result = intern_symbols(&build_grammar(vec![Variable::named("x", Rule::named("y"))]));
|
||||
|
||||
match result {
|
||||
Err(Error(message)) => assert_eq!(message, "Undefined symbol `y`"),
|
||||
Err(e) => assert_eq!(e.message(), "Undefined symbol `y`"),
|
||||
_ => panic!("Expected an error but got none"),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue