Overhaul CLI error handling to allow multiple levels of context

This commit is contained in:
Max Brunsfeld 2019-05-30 16:52:30 -07:00
parent b7bc2d30ae
commit d274e81d0d
17 changed files with 214 additions and 129 deletions

View file

@ -4,6 +4,7 @@ use std::path::{Path, PathBuf};
use std::time::Instant;
use std::{env, fs, usize};
use tree_sitter::{Language, Parser};
use tree_sitter_cli::error::Error;
use tree_sitter_cli::loader::Loader;
include!("../src/tests/helpers/dirs.rs");
@ -153,7 +154,7 @@ fn parse(parser: &mut Parser, example_path: &Path, max_path_length: usize) -> us
);
let source_code = fs::read(example_path)
.map_err(|e| format!("Failed to read {:?} - {}", example_path, e))
.map_err(Error::wrap(|| format!("Failed to read {:?}", example_path)))
.unwrap();
let time = Instant::now();
let _tree = parser
@ -171,6 +172,8 @@ fn get_language(path: &Path) -> Language {
let src_dir = GRAMMARS_DIR.join(path).join("src");
TEST_LOADER
.load_language_at_path(&src_dir, &src_dir)
.map_err(|e| format!("Failed to load language at path {:?} - {:?}", src_dir, e))
.map_err(Error::wrap(|| {
format!("Failed to load language at path {:?}", src_dir)
}))
.unwrap()
}

View file

@ -1,46 +1,82 @@
use std::fmt::Write;
use std::io;
use tree_sitter_highlight::PropertySheetError;
#[derive(Debug)]
pub struct Error(pub String);
pub struct Error(pub Vec<String>);
pub type Result<T> = std::result::Result<T, Error>;
impl Error {
pub fn grammar(message: &str) -> Self {
Error(format!("Grammar error: {}", message))
Error(vec![format!("Grammar error: {}", message)])
}
pub fn regex(message: &str) -> Self {
Error(format!("Regex error: {}", message))
Error(vec![format!("Regex error: {}", message)])
}
pub fn undefined_symbol(name: &str) -> Self {
Error(format!("Undefined symbol `{}`", name))
Error(vec![format!("Undefined symbol `{}`", name)])
}
pub fn new(message: String) -> Self {
Error(vec![message])
}
pub fn err<T>(message: String) -> Result<T> {
Err(Error::new(message))
}
pub fn wrap<E: Into<Self>, M: ToString, F: FnOnce() -> M>(
message_fn: F,
) -> impl FnOnce(E) -> Self {
|e| {
let mut result = e.into();
result.0.push(message_fn().to_string());
result
}
}
pub fn message(&self) -> String {
let mut result = self.0.last().unwrap().clone();
if self.0.len() > 1 {
result.push_str("\nDetails:\n");
for msg in self.0[0..self.0.len() - 1].iter().rev() {
writeln!(&mut result, " {}", msg).unwrap();
}
}
result
}
}
impl From<serde_json::Error> for Error {
fn from(error: serde_json::Error) -> Self {
Error(error.to_string())
Error::new(error.to_string())
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
Error(error.to_string())
Error::new(error.to_string())
}
}
impl From<rsass::Error> for Error {
fn from(error: rsass::Error) -> Self {
Error(error.to_string())
Error::new(error.to_string())
}
}
impl From<regex_syntax::ast::Error> for Error {
fn from(error: regex_syntax::ast::Error) -> Self {
Error::new(error.to_string())
}
}
impl From<String> for Error {
fn from(error: String) -> Self {
Error(error)
Error::new(error)
}
}

View file

@ -602,7 +602,7 @@ impl<'a> ParseTableBuilder<'a> {
}
write!(&mut msg, "\n").unwrap();
Err(Error(msg))
Err(Error::new(msg))
}
fn get_auxiliary_node_info(

View file

@ -64,20 +64,16 @@ pub fn generate_parser_in_directory(
node_types_json,
} = generate_parser_for_grammar_with_opts(&grammar_json, minimize, state_ids_to_log)?;
fs::write(&repo_src_path.join("parser.c"), c_code)
.map_err(|e| format!("Failed to write parser.c: {}", e))?;
fs::write(&repo_src_path.join("node-types.json"), node_types_json)
.map_err(|e| format!("Failed to write parser.c: {}", e))?;
fs::write(
write_file(&repo_src_path.join("parser.c"), c_code)?;
write_file(&repo_src_path.join("node-types.json"), node_types_json)?;
write_file(
&repo_header_path.join("parser.h"),
tree_sitter::PARSER_HEADER,
)
.map_err(|e| format!("Failed to write parser.h: {}", e))?;
fs::write(
)?;
write_file(
&repo_path.join("index.js"),
npm_files::index_js(&language_name),
)
.map_err(|e| format!("Failed to write index.js: {}", e))?;
)?;
ensure_file(&repo_src_path.join("binding.cc"), || {
npm_files::binding_cc(&language_name)
})?;
@ -139,7 +135,7 @@ fn load_grammar_file(grammar_path: &Path) -> Result<String> {
match grammar_path.extension().and_then(|e| e.to_str()) {
Some("js") => Ok(load_js_grammar_file(grammar_path)?),
Some("json") => Ok(fs::read_to_string(grammar_path)?),
_ => Err(Error(format!(
_ => Err(Error::new(format!(
"Unknown grammar file extension: {:?}",
grammar_path
))),
@ -169,7 +165,7 @@ fn load_js_grammar_file(grammar_path: &Path) -> Result<String> {
match output.status.code() {
None => panic!("Node process was killed"),
Some(0) => {}
Some(code) => return Err(Error(format!("Node process exited with status {}", code))),
Some(code) => return Error::err(format!("Node process exited with status {}", code)),
}
let mut result = String::from_utf8(output.stdout).expect("Got invalid UTF8 from node");
@ -177,11 +173,16 @@ fn load_js_grammar_file(grammar_path: &Path) -> Result<String> {
Ok(result)
}
fn write_file(path: &Path, body: impl AsRef<[u8]>) -> Result<()> {
fs::write(path, body).map_err(Error::wrap(|| {
format!("Failed to write {:?}", path.file_name().unwrap())
}))
}
fn ensure_file<T: AsRef<[u8]>>(path: &PathBuf, f: impl Fn() -> T) -> Result<()> {
if path.exists() {
Ok(())
} else {
fs::write(path, f().as_ref())
.map_err(|e| Error(format!("Failed to write file {:?}: {}", path, e)))
write_file(path, f().as_ref())
}
}

View file

@ -98,7 +98,9 @@ pub(crate) fn expand_tokens(mut grammar: ExtractedLexicalGrammar) -> Result<Lexi
let last_state_id = builder.nfa.last_state_id();
builder
.expand_rule(&variable.rule, last_state_id)
.map_err(|Error(msg)| Error(format!("Rule {} {}", variable.name, msg)))?;
.map_err(Error::wrap(|| {
format!("Error processing rule {}", variable.name)
}))?;
if !is_immediate_token {
builder.is_sep = true;
@ -125,9 +127,7 @@ impl NfaBuilder {
match rule {
Rule::Pattern(s) => {
let s = preprocess_regex(s);
let ast = parse::Parser::new()
.parse(&s)
.map_err(|e| Error(e.to_string()))?;
let ast = parse::Parser::new().parse(&s)?;
self.expand_regex(&ast, next_state_id)
}
Rule::String(s) => {

View file

@ -95,10 +95,10 @@ pub(super) fn extract_tokens(
if let Rule::Symbol(symbol) = rule {
let new_symbol = symbol_replacer.replace_symbol(symbol);
if new_symbol.is_non_terminal() {
return Err(Error(format!(
return Error::err(format!(
"Non-token symbol '{}' cannot be used as an extra token",
&variables[new_symbol.index].name
)));
));
} else {
extra_tokens.push(new_symbol);
}
@ -116,10 +116,10 @@ pub(super) fn extract_tokens(
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
if let Rule::Symbol(symbol) = rule {
if symbol.is_non_terminal() {
return Err(Error(format!(
return Error::err(format!(
"Rule '{}' cannot be used as both an external token and a non-terminal rule",
&variables[symbol.index].name,
)));
));
}
if symbol.is_external() {
@ -136,9 +136,9 @@ pub(super) fn extract_tokens(
})
}
} else {
return Err(Error(format!(
return Error::err(format!(
"Non-symbol rules cannot be used as external tokens"
)));
));
}
}
@ -146,10 +146,10 @@ pub(super) fn extract_tokens(
if let Some(token) = grammar.word_token {
let token = symbol_replacer.replace_symbol(token);
if token.is_non_terminal() {
return Err(Error(format!(
return Error::err(format!(
"Non-terminal symbol '{}' cannot be used as the word token",
&variables[token.index].name
)));
));
}
word_token = Some(token);
}
@ -482,9 +482,9 @@ mod test {
grammar.extra_tokens = vec![Rule::non_terminal(1)];
match extract_tokens(grammar) {
Err(Error(s)) => {
Err(e) => {
assert_eq!(
s,
e.message(),
"Non-token symbol 'rule_1' cannot be used as an extra token"
);
}
@ -510,8 +510,8 @@ mod test {
grammar.external_tokens = vec![Variable::named("rule_1", Rule::non_terminal(1))];
match extract_tokens(grammar) {
Err(Error(s)) => {
assert_eq!(s, "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
Err(e) => {
assert_eq!(e.message(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
}
_ => {
panic!("Expected an error but got no error");

View file

@ -187,14 +187,14 @@ pub(super) fn flatten_grammar(grammar: ExtractedSyntaxGrammar) -> Result<SyntaxG
for (i, variable) in variables.iter().enumerate() {
for production in &variable.productions {
if production.steps.is_empty() && symbol_is_used(&variables, Symbol::non_terminal(i)) {
return Err(Error(format!(
return Error::err(format!(
"The rule `{}` matches the empty string.
Tree-sitter does not support syntactic rules that match the empty string
unless they are used only as the grammar's start rule.
",
variable.name
)));
));
}
}
}

View file

@ -7,7 +7,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
let interner = Interner { grammar };
if variable_type_for_name(&grammar.variables[0].name) == VariableType::Hidden {
return Err(Error("A grammar's start rule must be visible.".to_string()));
return Error::err("A grammar's start rule must be visible.".to_string());
}
let mut variables = Vec::with_capacity(grammar.variables.len());
@ -227,7 +227,7 @@ mod tests {
let result = intern_symbols(&build_grammar(vec![Variable::named("x", Rule::named("y"))]));
match result {
Err(Error(message)) => assert_eq!(message, "Undefined symbol `y`"),
Err(e) => assert_eq!(e.message(), "Undefined symbol `y`"),
_ => panic!("Expected an error but got none"),
}
}

View file

@ -362,20 +362,22 @@ fn language_for_injection_string<'a>(
string: &str,
) -> Option<(Language, &'a PropertySheet<Properties>)> {
match loader.language_configuration_for_injection_string(string) {
Err(message) => {
Err(e) => {
eprintln!(
"Failed to load language for injection string '{}': {}",
string, message.0
string,
e.message()
);
None
}
Ok(None) => None,
Ok(Some((language, configuration))) => {
match configuration.highlight_property_sheet(language) {
Err(message) => {
Err(e) => {
eprintln!(
"Failed to load property sheet for injection string '{}': {}",
string, message.0
string,
e.message()
);
None
}

View file

@ -165,8 +165,10 @@ impl Loader {
struct GrammarJSON {
name: String,
}
let mut grammar_file = fs::File::open(grammar_path)?;
let grammar_json: GrammarJSON = serde_json::from_reader(BufReader::new(&mut grammar_file))?;
let mut grammar_file =
fs::File::open(grammar_path).map_err(Error::wrap(|| "Failed to read grammar.json"))?;
let grammar_json: GrammarJSON = serde_json::from_reader(BufReader::new(&mut grammar_file))
.map_err(Error::wrap(|| "Failed to parse grammar.json"))?;
let scanner_path = if scanner_path.exists() {
Some(scanner_path)
@ -197,7 +199,11 @@ impl Loader {
let mut library_path = self.parser_lib_path.join(name);
library_path.set_extension(DYLIB_EXTENSION);
if needs_recompile(&library_path, &parser_path, &scanner_path)? {
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path).map_err(
Error::wrap(|| "Failed to compare source and binary timestamps"),
)?;
if recompile {
let mut config = cc::Build::new();
config
.cpp(true)
@ -244,9 +250,11 @@ impl Loader {
command.arg("-xc").arg(parser_path);
}
let output = command.output()?;
let output = command
.output()
.map_err(Error::wrap(|| "Failed to execute C compiler"))?;
if !output.status.success() {
return Err(Error(format!(
return Err(Error::new(format!(
"Parser compilation failed.\nStdout: {}\nStderr: {}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
@ -254,16 +262,16 @@ impl Loader {
}
}
let library = Library::new(&library_path).map_err(|e| {
Error(format!(
"Error opening dynamic library {:?}: {}",
&library_path, e
))
})?;
let library = Library::new(&library_path).map_err(Error::wrap(|| {
format!("Error opening dynamic library {:?}", &library_path)
}))?;
let language_fn_name = format!("tree_sitter_{}", replace_dashes_with_underscores(name));
let language = unsafe {
let language_fn: Symbol<unsafe extern "C" fn() -> Language> =
library.get(language_fn_name.as_bytes())?;
let language_fn: Symbol<unsafe extern "C" fn() -> Language> = library
.get(language_fn_name.as_bytes())
.map_err(Error::wrap(|| {
format!("Failed to load symbol {}", language_fn_name)
}))?;
language_fn()
};
mem::forget(library);
@ -349,8 +357,19 @@ impl LanguageConfiguration {
self.highlight_property_sheet
.get_or_try_init(|| {
if let Some(path) = &self.highlight_property_sheet_path {
let sheet_json = fs::read_to_string(path)?;
let sheet = load_property_sheet(language, &sheet_json)?;
let sheet_json = fs::read_to_string(path).map_err(Error::wrap(|| {
format!(
"Failed to read property sheet {:?}",
path.file_name().unwrap()
)
}))?;
let sheet =
load_property_sheet(language, &sheet_json).map_err(Error::wrap(|| {
format!(
"Failed to parse property sheet {:?}",
path.file_name().unwrap()
)
}))?;
Ok(Some(sheet))
} else {
Ok(None)

View file

@ -1,4 +1,5 @@
use clap::{App, AppSettings, Arg, SubCommand};
use error::Error;
use std::env;
use std::fs;
use std::path::Path;
@ -10,7 +11,7 @@ use tree_sitter_cli::{
fn main() {
if let Err(e) = run() {
eprintln!("{}", e.0);
eprintln!("{}", e.message());
exit(1);
}
}
@ -174,15 +175,35 @@ fn run() -> error::Result<()> {
for path in paths {
let path = Path::new(path);
let language = if let Some(scope) = matches.value_of("scope") {
if let Some(config) = loader.language_configuration_for_scope(scope)? {
if let Some(config) =
loader
.language_configuration_for_scope(scope)
.map_err(Error::wrap(|| {
format!("Failed to load language for scope '{}'", scope)
}))?
{
config.0
} else {
return Err(error::Error(format!("Unknown scope '{}'", scope)));
return Error::err(format!("Unknown scope '{}'", scope));
}
} else if let Some((l, _)) = loader.language_configuration_for_file_name(path)? {
l
} else if let Some(l) = loader.language_at_path(&current_dir)? {
l
} else if let Some((lang, _)) = loader
.language_configuration_for_file_name(path)
.map_err(Error::wrap(|| {
format!(
"Failed to load language for file name {:?}",
path.file_name().unwrap()
)
}))?
{
lang
} else if let Some(lang) =
loader
.language_at_path(&current_dir)
.map_err(Error::wrap(|| {
"Failed to load language in current directory"
}))?
{
lang
} else {
eprintln!("No language found");
return Ok(());
@ -202,7 +223,7 @@ fn run() -> error::Result<()> {
}
if has_error {
return Err(error::Error(String::new()));
return Error::err(String::new());
}
} else if let Some(matches) = matches.subcommand_matches("highlight") {
let paths = matches.values_of("path").unwrap().into_iter();
@ -218,7 +239,7 @@ fn run() -> error::Result<()> {
if let Some(scope) = matches.value_of("scope") {
language_config = loader.language_configuration_for_scope(scope)?;
if language_config.is_none() {
return Err(error::Error(format!("Unknown scope '{}'", scope)));
return Error::err(format!("Unknown scope '{}'", scope));
}
} else {
language_config = None;
@ -245,9 +266,7 @@ fn run() -> error::Result<()> {
highlight::ansi(&loader, &config.theme, &source, language, sheet, time)?;
}
} else {
return Err(error::Error(format!(
"No syntax highlighting property sheet specified"
)));
return Error::err(format!("No syntax highlighting property sheet specified"));
}
}
} else if let Some(matches) = matches.subcommand_matches("build-wasm") {

View file

@ -29,8 +29,9 @@ pub fn parse_file_at_path(
let mut _log_session = None;
let mut parser = Parser::new();
parser.set_language(language)?;
let mut source_code = fs::read(path)
.map_err(|e| Error(format!("Error reading source file {:?}: {}", path, e)))?;
let mut source_code = fs::read(path).map_err(Error::wrap(|| {
format!("Error reading source file {:?}", path)
}))?;
// If the `--cancel` flag was passed, then cancel the parse
// when the user types a newline.

View file

@ -461,8 +461,10 @@ pub fn generate_property_sheets_in_directory(repo_path: &Path) -> Result<()> {
let property_sheet_json_path = src_dir_path
.join(css_path.file_name().unwrap())
.with_extension("json");
let property_sheet_json_file = File::create(&property_sheet_json_path)
.map_err(|e| format!("Failed to create {:?}: {}", property_sheet_json_path, e))?;
let property_sheet_json_file =
File::create(&property_sheet_json_path).map_err(Error::wrap(|| {
format!("Failed to create {:?}", property_sheet_json_path)
}))?;
let mut writer = BufWriter::new(property_sheet_json_file);
serde_json::to_writer_pretty(&mut writer, &sheet)?;
}
@ -561,7 +563,7 @@ fn parse_sass_items(
is_immediate: immediate,
});
} else {
return Err(Error(format!("Node type {} must be separated by whitespace or the `>` operator", value)));
return Error::err(format!("Node type {} must be separated by whitespace or the `>` operator", value));
}
}
operator_was_immediate = None;
@ -576,7 +578,7 @@ fn parse_sass_items(
None => return Err(interpolation_error()),
Some("text") => {
if operator_was_immediate.is_some() {
return Err(Error("The `text` attribute must be used in combination with a node type or field".to_string()));
return Error::err("The `text` attribute must be used in combination with a node type or field".to_string());
}
if let Some(last_step) = prefix.last_mut() {
last_step.text_pattern =
@ -595,21 +597,21 @@ fn parse_sass_items(
});
operator_was_immediate = None;
} else {
return Err(Error("The `token` attribute canot be used in combination with a node type".to_string()));
return Error::err("The `token` attribute canot be used in combination with a node type".to_string());
}
}
_ => {
return Err(Error(format!(
return Error::err(format!(
"Unsupported attribute {}",
part
)));
));
}
}
}
SelectorPart::PseudoElement { .. } => {
return Err(Error(
return Error::err(
"Pseudo elements are not supported".to_string(),
));
);
}
SelectorPart::Pseudo { name, arg } => match name.single_raw() {
None => return Err(interpolation_error()),
@ -621,19 +623,19 @@ fn parse_sass_items(
if let Ok(i) = usize::from_str_radix(&arg_str, 10) {
last_step.child_index = Some(i);
} else {
return Err(Error(format!(
return Error::err(format!(
"Invalid child index {}",
arg
)));
));
}
}
}
}
_ => {
return Err(Error(format!(
return Error::err(format!(
"Unsupported pseudo-class {}",
part
)));
));
}
},
SelectorPart::Descendant => {
@ -644,10 +646,10 @@ fn parse_sass_items(
if operator == '>' {
operator_was_immediate = Some(true);
} else {
return Err(Error(format!(
return Error::err(format!(
"Unsupported operator {}",
operator
)));
));
}
}
}
@ -657,7 +659,7 @@ fn parse_sass_items(
}
parse_sass_items(items, &full_selectors, result)?;
}
_ => return Err(Error(format!("Unsupported syntax type {:?}", item))),
_ => return Error::err(format!("Unsupported syntax type {:?}", item)),
}
}
@ -687,7 +689,7 @@ fn process_at_rules(
items.splice(i..(i + 1), imported_items);
continue;
} else {
return Err(Error("@import arguments must be strings".to_string()));
return Err(Error::new("@import arguments must be strings".to_string()));
}
}
rsass::Item::AtRule { name, args, .. } => match name.as_str() {
@ -698,10 +700,10 @@ fn process_at_rules(
items.remove(i);
continue;
} else {
return Err(Error("@schema arguments must be strings".to_string()));
return Error::err("@schema arguments must be strings".to_string());
}
}
_ => return Err(Error(format!("Unsupported at-rule '{}'", name))),
_ => return Error::err(format!("Unsupported at-rule '{}'", name)),
},
_ => {}
}
@ -730,7 +732,9 @@ fn parse_sass_value(value: &Value) -> Result<PropertyValue> {
result.insert("args".to_string(), PropertyValue::Array(args));
Ok(PropertyValue::Object(result))
} else {
Err(Error("String interpolation is not supported".to_string()))
Err(Error::new(
"String interpolation is not supported".to_string(),
))
}
}
Value::List(elements, ..) => {
@ -744,7 +748,7 @@ fn parse_sass_value(value: &Value) -> Result<PropertyValue> {
Value::Numeric(n, _) => Ok(PropertyValue::Number(n.to_integer())),
Value::True => Ok(PropertyValue::Boolean(true)),
Value::False => Ok(PropertyValue::Boolean(false)),
_ => Err(Error(format!(
_ => Err(Error::new(format!(
"Property values must be strings or function calls. Got {:?}",
value
))),
@ -781,13 +785,13 @@ fn resolve_path(base: &Path, p: &str) -> Result<PathBuf> {
}
}
}
Err(Error(format!("Could not resolve import path `{}`", p)))
Err(Error::new(format!("Could not resolve import path `{}`", p)))
}
fn check_node_kind(name: &String) -> Result<()> {
for c in name.chars() {
if !c.is_alphanumeric() && c != '_' {
return Err(Error(format!("Invalid identifier '{}'", name)));
return Err(Error::new(format!("Invalid identifier '{}'", name)));
}
}
Ok(())
@ -799,12 +803,12 @@ fn get_string_value(mut s: String) -> Result<String> {
s.remove(0);
Ok(s)
} else {
Err(Error(format!("Unsupported string literal {}", s)))
Err(Error::new(format!("Unsupported string literal {}", s)))
}
}
fn interpolation_error() -> Error {
Error("String interpolation is not supported".to_string())
Error::new("String interpolation is not supported".to_string())
}
#[cfg(test)]

View file

@ -92,7 +92,7 @@ pub fn run_tests_at_path(
println!("\n {}. {}:", i + 1, name);
print_diff(actual, expected);
}
Err(Error(String::new()))
Error::err(String::new())
} else {
Ok(())
}

View file

@ -224,10 +224,11 @@ fn test_feature_corpus_files() {
let expected_message = fs::read_to_string(&error_message_path).unwrap();
if let Err(e) = generate_result {
if e.0 != expected_message {
if e.message() != expected_message {
eprintln!(
"Unexpected error message.\n\nExpected:\n\n{}\nActual:\n\n{}\n",
expected_message, e.0
expected_message,
e.message()
);
failure_count += 1;
}

View file

@ -7,18 +7,12 @@ use std::process::Command;
pub fn get_grammar_name(src_dir: &Path) -> Result<String> {
let grammar_json_path = src_dir.join("grammar.json");
let grammar_json = fs::read_to_string(&grammar_json_path).map_err(|e| {
format!(
"Failed to read grammar file {:?} - {}",
grammar_json_path, e
)
})?;
let grammar: GrammarJSON = serde_json::from_str(&grammar_json).map_err(|e| {
format!(
"Failed to parse grammar file {:?} - {}",
grammar_json_path, e
)
})?;
let grammar_json = fs::read_to_string(&grammar_json_path).map_err(Error::wrap(|| {
format!("Failed to read grammar file {:?}", grammar_json_path)
}))?;
let grammar: GrammarJSON = serde_json::from_str(&grammar_json).map_err(Error::wrap(|| {
format!("Failed to parse grammar file {:?}", grammar_json_path)
}))?;
Ok(grammar.name)
}
@ -55,7 +49,7 @@ pub fn compile_language_to_wasm(language_dir: &Path, force_docker: bool) -> Resu
let user_id_output = Command::new("id")
.arg("-u")
.output()
.map_err(|e| format!("Failed to get get current user id {}", e))?;
.map_err(Error::wrap(|| "Failed to get get current user id {}"))?;
let user_id = String::from_utf8_lossy(&user_id_output.stdout);
let user_id = user_id.trim();
command.args(&["--user", user_id]);
@ -82,8 +76,9 @@ pub fn compile_language_to_wasm(language_dir: &Path, force_docker: bool) -> Resu
]);
// Find source files to pass to emscripten
let src_entries = fs::read_dir(&src_dir)
.map_err(|e| format!("Failed to read source directory {:?} - {}", src_dir, e))?;
let src_entries = fs::read_dir(&src_dir).map_err(Error::wrap(|| {
format!("Failed to read source directory {:?}", src_dir)
}))?;
for entry in src_entries {
let entry = entry?;
@ -107,7 +102,7 @@ pub fn compile_language_to_wasm(language_dir: &Path, force_docker: bool) -> Resu
let output = command
.output()
.map_err(|e| format!("Failed to run emcc command - {}", e))?;
.map_err(Error::wrap(|| "Failed to run emcc command"))?;
if !output.status.success() {
return Err(Error::from(format!(
"emcc command failed - {}",
@ -116,8 +111,9 @@ pub fn compile_language_to_wasm(language_dir: &Path, force_docker: bool) -> Resu
}
// Move the created `.wasm` file into the current working directory.
fs::rename(&language_dir.join(&output_filename), &output_filename)
.map_err(|e| format!("Couldn't find output file {:?} - {}", output_filename, e))?;
fs::rename(&language_dir.join(&output_filename), &output_filename).map_err(Error::wrap(
|| format!("Couldn't find output file {:?}", output_filename),
))?;
Ok(())
}

View file

@ -1,3 +1,4 @@
use super::error::Error;
use super::wasm;
use std::fs;
use std::net::TcpListener;
@ -24,20 +25,22 @@ pub fn serve(grammar_path: &Path) {
let url = format!("127.0.0.1:{}", port);
let server = Server::http(&url).expect("Failed to start web server");
let grammar_name = wasm::get_grammar_name(&grammar_path.join("src"))
.map_err(|e| format!("Failed to get wasm filename: {:?}", e))
.map_err(Error::wrap(|| "Failed to get wasm filename"))
.unwrap();
let wasm_filename = format!("tree-sitter-{}.wasm", grammar_name);
let language_wasm = fs::read(grammar_path.join(&wasm_filename))
.map_err(|_| {
.map_err(Error::wrap(|| {
format!(
"Failed to read '{}'. Run `tree-sitter build-wasm` first.",
"Failed to read {}. Run `tree-sitter build-wasm` first.",
wasm_filename
)
})
}))
.unwrap();
webbrowser::open(&format!("http://127.0.0.1:{}", port))
.map_err(|e| format!("Failed to open '{}' in a web browser. Error: {}", url, e))
.map_err(Error::wrap(|| {
format!("Failed to open '{}' in a web browser", url)
}))
.unwrap();
let html = HTML