cli: Use anyhow and thiserror for errors

This patch updates the CLI to use anyhow and thiserror for error
management.  The main feature that our custom `Error` type was providing
was a _list_ of messages, which would allow us to annotate "lower-level"
errors with more contextual information.  This is exactly what's
provided by anyhow's `Context` trait.

(This is setup work for a future PR that will pull the `config` and
`loader` modules out into separate crates; by using `anyhow` we wouldn't
have to deal with a circular dependency between with the new crates.)
This commit is contained in:
Douglas Creager 2021-06-09 12:32:22 -04:00
parent 9d77561c43
commit d2d01e77e3
33 changed files with 237 additions and 419 deletions

29
Cargo.lock generated
View file

@ -29,6 +29,12 @@ dependencies = [
"winapi",
]
[[package]]
name = "anyhow"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b"
[[package]]
name = "arrayref"
version = "0.3.6"
@ -585,6 +591,26 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "thiserror"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa6f76457f59514c7eeb4e59d891395fab0b2fd1d40723ae737d64153392e9c6"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a36768c0fbf1bb15eca10defa29526bda730a2376c2ab4393ccfa16fb1a318d"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "thread_local"
version = "1.1.3"
@ -647,6 +673,7 @@ name = "tree-sitter-cli"
version = "0.19.5"
dependencies = [
"ansi_term 0.12.1",
"anyhow",
"atty",
"cc",
"clap",
@ -680,6 +707,7 @@ name = "tree-sitter-highlight"
version = "0.19.2"
dependencies = [
"regex",
"thiserror",
"tree-sitter",
]
@ -689,6 +717,7 @@ version = "0.19.2"
dependencies = [
"memchr",
"regex",
"thiserror",
"tree-sitter",
]

View file

@ -20,6 +20,7 @@ harness = false
[dependencies]
ansi_term = "0.12"
anyhow = "1.0"
atty = "0.2"
cc = "^1.0.58"
clap = "2.32"

View file

@ -1,10 +1,10 @@
use anyhow::Context;
use lazy_static::lazy_static;
use std::collections::BTreeMap;
use std::path::{Path, PathBuf};
use std::time::Instant;
use std::{env, fs, str, usize};
use tree_sitter::{Language, Parser, Query};
use tree_sitter_cli::error::Error;
use tree_sitter_cli::loader::Loader;
include!("../src/tests/helpers/dirs.rs");
@ -192,7 +192,7 @@ fn parse(path: &Path, max_path_length: usize, mut action: impl FnMut(&[u8])) ->
);
let source_code = fs::read(path)
.map_err(Error::wrap(|| format!("Failed to read {:?}", path)))
.with_context(|| format!("Failed to read {:?}", path))
.unwrap();
let time = Instant::now();
for _ in 0..*REPETITION_COUNT {
@ -209,8 +209,6 @@ fn get_language(path: &Path) -> Language {
let src_dir = GRAMMARS_DIR.join(path).join("src");
TEST_LOADER
.load_language_at_path(&src_dir, &src_dir)
.map_err(Error::wrap(|| {
format!("Failed to load language at path {:?}", src_dir)
}))
.with_context(|| format!("Failed to load language at path {:?}", src_dir))
.unwrap()
}

View file

@ -1,156 +0,0 @@
use super::test_highlight;
use std::fmt::Write;
use std::io;
use std::io::ErrorKind;
use tree_sitter::{QueryError, QueryErrorKind};
use walkdir;
#[derive(Debug)]
pub struct Error(Option<Vec<String>>);
pub type Result<T> = std::result::Result<T, Error>;
impl Error {
pub fn grammar(message: &str) -> Self {
Error(Some(vec![format!("Grammar error: {}", message)]))
}
pub fn regex(mut message: String) -> Self {
message.insert_str(0, "Regex error: ");
Error(Some(vec![message]))
}
pub fn undefined_symbol(name: &str) -> Self {
Error(Some(vec![format!("Undefined symbol `{}`", name)]))
}
pub fn new(message: String) -> Self {
Error(Some(vec![message]))
}
pub fn new_ignored() -> Self {
Self(None)
}
pub fn is_ignored(&self) -> bool {
self.0.is_none()
}
pub fn err<T>(message: String) -> Result<T> {
Err(Error::new(message))
}
pub fn wrap<E: Into<Self>, M: ToString, F: FnOnce() -> M>(
message_fn: F,
) -> impl FnOnce(E) -> Self {
|e| {
let mut result = e.into();
match result.0 {
Some(ref mut e) => e.push(message_fn().to_string()),
None => panic!("It's not allowed to wrap an ignored error"),
}
result
}
}
pub fn message(&self) -> String {
match self.0 {
None => "Ignored error".to_string(),
Some(ref e) => {
let mut result = e.last().unwrap().clone();
if e.len() > 1 {
result.push_str("\nDetails:\n");
for msg in e[0..e.len() - 1].iter().rev() {
writeln!(&mut result, " {}", msg).unwrap();
}
}
result
}
}
}
}
impl<'a> From<(&str, QueryError)> for Error {
fn from((path, error): (&str, QueryError)) -> Self {
let mut msg = format!("Query error at {}:{}. ", path, error.row + 1);
match error.kind {
QueryErrorKind::Capture => write!(&mut msg, "Invalid capture name {}", error.message),
QueryErrorKind::Field => write!(&mut msg, "Invalid field name {}", error.message),
QueryErrorKind::NodeType => write!(&mut msg, "Invalid node type {}", error.message),
QueryErrorKind::Syntax => write!(&mut msg, "Invalid syntax:\n{}", error.message),
QueryErrorKind::Structure => write!(&mut msg, "Impossible pattern:\n{}", error.message),
QueryErrorKind::Predicate => write!(&mut msg, "Invalid predicate: {}", error.message),
}
.unwrap();
Self::new(msg)
}
}
impl<'a> From<tree_sitter_highlight::Error> for Error {
fn from(error: tree_sitter_highlight::Error) -> Self {
Error::new(format!("{:?}", error))
}
}
impl<'a> From<tree_sitter_tags::Error> for Error {
fn from(error: tree_sitter_tags::Error) -> Self {
Error::new(format!("{}", error))
}
}
impl From<serde_json::Error> for Error {
fn from(error: serde_json::Error) -> Self {
Error::new(error.to_string())
}
}
impl From<io::Error> for Error {
fn from(error: io::Error) -> Self {
match error.kind() {
ErrorKind::BrokenPipe => Error::new_ignored(),
_ => Error::new(error.to_string()),
}
}
}
impl From<glob::PatternError> for Error {
fn from(error: glob::PatternError) -> Self {
Error::new(error.to_string())
}
}
impl From<glob::GlobError> for Error {
fn from(error: glob::GlobError) -> Self {
Error::new(error.to_string())
}
}
impl From<libloading::Error> for Error {
fn from(error: libloading::Error) -> Self {
Error::new(error.to_string())
}
}
impl From<regex_syntax::ast::Error> for Error {
fn from(error: regex_syntax::ast::Error) -> Self {
Error::new(error.to_string())
}
}
impl From<test_highlight::Failure> for Error {
fn from(error: test_highlight::Failure) -> Self {
Error::new(error.message())
}
}
impl From<String> for Error {
fn from(error: String) -> Self {
Error::new(error)
}
}
impl From<walkdir::Error> for Error {
fn from(error: walkdir::Error) -> Self {
Error::new(error.to_string())
}
}

View file

@ -1,5 +1,5 @@
use super::write_file;
use crate::error::{Error, Result};
use anyhow::{Context, Result};
use std::path::Path;
use std::{fs, str};
@ -61,7 +61,7 @@ pub fn generate_binding_files(repo_path: &Path, language_name: &str) -> Result<(
eprintln!("Updating binding.gyp with new binding path");
let binding_gyp = fs::read_to_string(&binding_gyp_path)
.map_err(Error::wrap(|| "Failed to read binding.gyp"))?;
.with_context(|| "Failed to read binding.gyp")?;
let binding_gyp = binding_gyp.replace("src/binding.cc", "bindings/node/binding.cc");
write_file(&binding_gyp_path, binding_gyp)?;
} else {
@ -72,12 +72,12 @@ pub fn generate_binding_files(repo_path: &Path, language_name: &str) -> Result<(
let package_json_path = repo_path.join("package.json");
if package_json_path.exists() {
let package_json_str = fs::read_to_string(&package_json_path)
.map_err(Error::wrap(|| "Failed to read package.json"))?;
.with_context(|| "Failed to read package.json")?;
let mut package_json =
serde_json::from_str::<serde_json::Map<String, serde_json::Value>>(
&package_json_str,
)
.map_err(Error::wrap(|| "Failed to parse package.json"))?;
.with_context(|| "Failed to parse package.json")?;
let package_json_main = package_json.get("main");
let package_json_needs_update = package_json_main.map_or(true, |v| {
let main_string = v.as_str();
@ -126,7 +126,6 @@ fn generate_file(path: &Path, template: &str, language_name: &str) -> Result<()>
}
fn create_dir(path: &Path) -> Result<()> {
fs::create_dir_all(&path).map_err(Error::wrap(|| {
format!("Failed to create {:?}", path.to_string_lossy())
}))
fs::create_dir_all(&path)
.with_context(|| format!("Failed to create {:?}", path.to_string_lossy()))
}

View file

@ -1,5 +1,6 @@
use super::item::{ParseItem, ParseItemSet, ParseItemSetCore};
use super::item_set_builder::ParseItemSetBuilder;
use crate::generate::grammars::PrecedenceEntry;
use crate::generate::grammars::{
InlinedProductionMap, LexicalGrammar, SyntaxGrammar, VariableType,
};
@ -9,10 +10,7 @@ use crate::generate::tables::{
FieldLocation, GotoAction, ParseAction, ParseState, ParseStateId, ParseTable, ParseTableEntry,
ProductionInfo, ProductionInfoId,
};
use crate::{
error::{Error, Result},
generate::grammars::PrecedenceEntry,
};
use anyhow::{anyhow, Result};
use std::collections::{BTreeMap, HashMap, HashSet, VecDeque};
use std::fmt::Write;
use std::u32;
@ -387,7 +385,7 @@ impl<'a> ParseTableBuilder<'a> {
}
message += &self.syntax_grammar.variables[*variable_index as usize].name;
}
return Err(Error::new(message));
return Err(anyhow!(message));
}
}
// Add actions for the start tokens of each non-terminal extra rule.
@ -762,7 +760,7 @@ impl<'a> ParseTableBuilder<'a> {
}
write!(&mut msg, "\n").unwrap();
Err(Error::new(msg))
Err(anyhow!(msg))
}
fn compare_precedence(

View file

@ -11,12 +11,12 @@ use self::build_parse_table::{build_parse_table, ParseStateInfo};
use self::coincident_tokens::CoincidentTokenIndex;
use self::minimize_parse_table::minimize_parse_table;
use self::token_conflicts::TokenConflictMap;
use crate::error::Result;
use crate::generate::grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar};
use crate::generate::nfa::NfaCursor;
use crate::generate::node_types::VariableInfo;
use crate::generate::rules::{AliasMap, Symbol, SymbolType, TokenSet};
use crate::generate::tables::{LexTable, ParseAction, ParseTable, ParseTableEntry};
use anyhow::Result;
use log::info;
use std::collections::{BTreeSet, HashMap};

View file

@ -17,7 +17,7 @@ use self::parse_grammar::parse_grammar;
use self::prepare_grammar::prepare_grammar;
use self::render::render_c_code;
use self::rules::AliasMap;
use crate::error::{Error, Result};
use anyhow::{anyhow, Context, Result};
use lazy_static::lazy_static;
use regex::{Regex, RegexBuilder};
use std::fs;
@ -161,10 +161,10 @@ fn load_grammar_file(grammar_path: &Path) -> Result<String> {
match grammar_path.extension().and_then(|e| e.to_str()) {
Some("js") => Ok(load_js_grammar_file(grammar_path)?),
Some("json") => Ok(fs::read_to_string(grammar_path)?),
_ => Err(Error::new(format!(
_ => Err(anyhow!(
"Unknown grammar file extension: {:?}",
grammar_path
))),
)),
}
}
@ -191,7 +191,7 @@ fn load_js_grammar_file(grammar_path: &Path) -> Result<String> {
match output.status.code() {
None => panic!("Node process was killed"),
Some(0) => {}
Some(code) => return Error::err(format!("Node process exited with status {}", code)),
Some(code) => return Err(anyhow!("Node process exited with status {}", code)),
}
let mut result = String::from_utf8(output.stdout).expect("Got invalid UTF8 from node");
@ -200,7 +200,6 @@ fn load_js_grammar_file(grammar_path: &Path) -> Result<String> {
}
fn write_file(path: &Path, body: impl AsRef<[u8]>) -> Result<()> {
fs::write(path, body).map_err(Error::wrap(|| {
format!("Failed to write {:?}", path.file_name().unwrap())
}))
fs::write(path, body)
.with_context(|| format!("Failed to write {:?}", path.file_name().unwrap()))
}

View file

@ -1,6 +1,6 @@
use super::grammars::{LexicalGrammar, SyntaxGrammar, VariableType};
use super::rules::{Alias, AliasMap, Symbol, SymbolType};
use crate::error::{Error, Result};
use anyhow::{anyhow, Result};
use serde_derive::Serialize;
use std::cmp::Ordering;
use std::collections::{BTreeMap, HashMap, HashSet};
@ -328,10 +328,13 @@ pub(crate) fn get_variable_info(
for supertype_symbol in &syntax_grammar.supertype_symbols {
if result[supertype_symbol.index].has_multi_step_production {
let variable = &syntax_grammar.variables[supertype_symbol.index];
return Err(Error::grammar(&format!(
"Supertype symbols must always have a single visible child, but `{}` can have multiple",
return Err(anyhow!(
concat!(
"Grammar error: Supertype symbols must always ",
"have a single visible child, but `{}` can have multiple"
),
variable.name
)));
));
}
}

View file

@ -1,6 +1,6 @@
use super::grammars::{InputGrammar, PrecedenceEntry, Variable, VariableType};
use super::rules::{Precedence, Rule};
use crate::error::{Error, Result};
use anyhow::{anyhow, Result};
use serde_derive::Deserialize;
use serde_json::{Map, Value};
@ -109,9 +109,8 @@ pub(crate) fn parse_grammar(input: &str) -> Result<InputGrammar> {
RuleJSON::STRING { value } => PrecedenceEntry::Name(value),
RuleJSON::SYMBOL { name } => PrecedenceEntry::Symbol(name),
_ => {
return Err(Error::new(
return Err(anyhow!(
"Invalid rule in precedences array. Only strings and symbols are allowed"
.to_string(),
))
}
})

View file

@ -1,11 +1,8 @@
use super::ExtractedLexicalGrammar;
use crate::generate::grammars::{LexicalGrammar, LexicalVariable};
use crate::generate::nfa::{CharacterSet, Nfa, NfaState};
use crate::generate::rules::Rule;
use crate::{
error::{Error, Result},
generate::rules::Precedence,
};
use crate::generate::rules::{Precedence, Rule};
use anyhow::{anyhow, Context, Result};
use lazy_static::lazy_static;
use regex::Regex;
use regex_syntax::ast::{
@ -111,9 +108,7 @@ pub(crate) fn expand_tokens(mut grammar: ExtractedLexicalGrammar) -> Result<Lexi
let last_state_id = builder.nfa.last_state_id();
builder
.expand_rule(&variable.rule, last_state_id)
.map_err(Error::wrap(|| {
format!("Error processing rule {}", variable.name)
}))?;
.with_context(|| format!("Error processing rule {}", variable.name))?;
if !is_immediate_token {
builder.is_sep = true;
@ -205,14 +200,14 @@ impl NfaBuilder {
result
}
Rule::Blank => Ok(false),
_ => Err(Error::grammar(&format!("Unexpected rule {:?}", rule))),
_ => Err(anyhow!("Grammar error: Unexpected rule {:?}", rule)),
}
}
fn expand_regex(&mut self, ast: &Ast, mut next_state_id: u32) -> Result<bool> {
match ast {
Ast::Empty(_) => Ok(false),
Ast::Flags(_) => Err(Error::regex("Flags are not supported".to_string())),
Ast::Flags(_) => Err(anyhow!("Regex error: Flags are not supported")),
Ast::Literal(literal) => {
self.push_advance(CharacterSet::from_char(literal.c), next_state_id);
Ok(true)
@ -221,7 +216,7 @@ impl NfaBuilder {
self.push_advance(CharacterSet::from_char('\n').negate(), next_state_id);
Ok(true)
}
Ast::Assertion(_) => Err(Error::regex("Assertions are not supported".to_string())),
Ast::Assertion(_) => Err(anyhow!("Regex error: Assertions are not supported")),
Ast::Class(class) => match class {
Class::Unicode(class) => {
let mut chars = self.expand_unicode_character_class(&class.kind)?;
@ -248,8 +243,8 @@ impl NfaBuilder {
self.push_advance(chars, next_state_id);
Ok(true)
}
ClassSet::BinaryOp(_) => Err(Error::regex(
"Binary operators in character classes aren't supported".to_string(),
ClassSet::BinaryOp(_) => Err(anyhow!(
"Regex error: Binary operators in character classes aren't supported"
)),
},
},
@ -383,10 +378,10 @@ impl NfaBuilder {
}
Ok(set)
}
_ => Err(Error::regex(format!(
"Unsupported character class syntax {:?}",
_ => Err(anyhow!(
"Regex error: Unsupported character class syntax {:?}",
item
))),
)),
}
}
@ -406,10 +401,10 @@ impl NfaBuilder {
.get(class_name.as_str())
.or_else(|| UNICODE_PROPERTIES.get(class_name.as_str()))
.ok_or_else(|| {
Error::regex(format!(
"Unsupported unicode character class {}",
anyhow!(
"Regex error: Unsupported unicode character class {}",
class_name
))
)
})?;
for c in code_points {
if let Some(c) = std::char::from_u32(*c) {
@ -421,8 +416,8 @@ impl NfaBuilder {
}
}
ClassUnicodeKind::NamedValue { .. } => {
return Err(Error::regex(
"Key-value unicode properties are not supported".to_string(),
return Err(anyhow!(
"Regex error: Key-value unicode properties are not supported"
))
}
}

View file

@ -1,7 +1,7 @@
use super::{ExtractedLexicalGrammar, ExtractedSyntaxGrammar, InternedGrammar};
use crate::error::{Error, Result};
use crate::generate::grammars::{ExternalToken, Variable, VariableType};
use crate::generate::rules::{MetadataParams, Rule, Symbol, SymbolType};
use anyhow::{anyhow, Result};
use std::collections::HashMap;
use std::mem;
@ -108,7 +108,7 @@ pub(super) fn extract_tokens(
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
if let Rule::Symbol(symbol) = rule {
if symbol.is_non_terminal() {
return Error::err(format!(
return Err(anyhow!(
"Rule '{}' cannot be used as both an external token and a non-terminal rule",
&variables[symbol.index].name,
));
@ -128,7 +128,7 @@ pub(super) fn extract_tokens(
})
}
} else {
return Error::err(format!(
return Err(anyhow!(
"Non-symbol rules cannot be used as external tokens"
));
}
@ -138,7 +138,7 @@ pub(super) fn extract_tokens(
if let Some(token) = grammar.word_token {
let token = symbol_replacer.replace_symbol(token);
if token.is_non_terminal() {
return Error::err(format!(
return Err(anyhow!(
"Non-terminal symbol '{}' cannot be used as the word token",
&variables[token.index].name
));
@ -482,7 +482,7 @@ mod test {
match extract_tokens(grammar) {
Err(e) => {
assert_eq!(e.message(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
assert_eq!(e.to_string(), "Rule 'rule_1' cannot be used as both an external token and a non-terminal rule");
}
_ => {
panic!("Expected an error but got no error");

View file

@ -1,9 +1,9 @@
use super::ExtractedSyntaxGrammar;
use crate::error::{Error, Result};
use crate::generate::grammars::{
Production, ProductionStep, SyntaxGrammar, SyntaxVariable, Variable,
};
use crate::generate::rules::{Alias, Associativity, Precedence, Rule, Symbol};
use anyhow::{anyhow, Result};
struct RuleFlattener {
production: Production,
@ -193,7 +193,7 @@ pub(super) fn flatten_grammar(grammar: ExtractedSyntaxGrammar) -> Result<SyntaxG
for (i, variable) in variables.iter().enumerate() {
for production in &variable.productions {
if production.steps.is_empty() && symbol_is_used(&variables, Symbol::non_terminal(i)) {
return Error::err(format!(
return Err(anyhow!(
"The rule `{}` matches the empty string.
Tree-sitter does not support syntactic rules that match the empty string

View file

@ -1,13 +1,13 @@
use super::InternedGrammar;
use crate::error::{Error, Result};
use crate::generate::grammars::{InputGrammar, Variable, VariableType};
use crate::generate::rules::{Rule, Symbol};
use anyhow::{anyhow, Result};
pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar> {
let interner = Interner { grammar };
if variable_type_for_name(&grammar.variables[0].name) == VariableType::Hidden {
return Error::err("A grammar's start rule must be visible.".to_string());
return Err(anyhow!("A grammar's start rule must be visible."));
}
let mut variables = Vec::with_capacity(grammar.variables.len());
@ -40,7 +40,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
supertype_symbols.push(
interner
.intern_name(supertype_symbol_name)
.ok_or_else(|| Error::undefined_symbol(supertype_symbol_name))?,
.ok_or_else(|| anyhow!("Undefined symbol `{}`", supertype_symbol_name))?,
);
}
@ -51,7 +51,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
interned_conflict.push(
interner
.intern_name(&name)
.ok_or_else(|| Error::undefined_symbol(name))?,
.ok_or_else(|| anyhow!("Undefined symbol `{}`", name))?,
);
}
expected_conflicts.push(interned_conflict);
@ -69,7 +69,7 @@ pub(super) fn intern_symbols(grammar: &InputGrammar) -> Result<InternedGrammar>
word_token = Some(
interner
.intern_name(&name)
.ok_or_else(|| Error::undefined_symbol(&name))?,
.ok_or_else(|| anyhow!("Undefined symbol `{}`", &name))?,
);
}
@ -122,7 +122,7 @@ impl<'a> Interner<'a> {
if let Some(symbol) = self.intern_name(&name) {
Ok(Rule::Symbol(symbol))
} else {
Err(Error::undefined_symbol(name))
Err(anyhow!("Undefined symbol `{}`", name))
}
}
@ -234,7 +234,7 @@ mod tests {
let result = intern_symbols(&build_grammar(vec![Variable::named("x", Rule::named("y"))]));
match result {
Err(e) => assert_eq!(e.message(), "Undefined symbol `y`"),
Err(e) => assert_eq!(e.to_string(), "Undefined symbol `y`"),
_ => panic!("Expected an error but got none"),
}
}

View file

@ -19,7 +19,7 @@ use super::grammars::{
SyntaxGrammar, Variable,
};
use super::rules::{AliasMap, Precedence, Rule, Symbol};
use super::{Error, Result};
use anyhow::{anyhow, Result};
use std::{
cmp::Ordering,
collections::{hash_map, HashMap, HashSet},
@ -93,10 +93,11 @@ fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
}
hash_map::Entry::Occupied(e) => {
if e.get() != &ordering {
return Err(Error::new(format!(
return Err(anyhow!(
"Conflicting orderings for precedences {} and {}",
entry1, entry2
)));
entry1,
entry2
));
}
}
}
@ -116,10 +117,11 @@ fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
Rule::Metadata { rule, params } => {
if let Precedence::Name(n) = &params.precedence {
if !names.contains(n) {
return Err(Error::new(format!(
return Err(anyhow!(
"Undeclared precedence '{}' in rule '{}'",
n, rule_name
)));
n,
rule_name
));
}
}
validate(rule_name, rule, names)?;
@ -196,7 +198,7 @@ mod tests {
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().message(),
result.unwrap_err().to_string(),
"Undeclared precedence 'omg' in rule 'v2'",
);
}
@ -244,7 +246,7 @@ mod tests {
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().message(),
result.unwrap_err().to_string(),
"Conflicting orderings for precedences 'a' and 'b'",
);
}

View file

@ -1,7 +1,7 @@
use super::util;
use crate::error::Result;
use crate::loader::Loader;
use ansi_term::Color;
use anyhow::Result;
use lazy_static::lazy_static;
use serde::ser::SerializeMap;
use serde::{Deserialize, Deserializer, Serialize, Serializer};

View file

@ -1,5 +1,4 @@
pub mod config;
pub mod error;
pub mod generate;
pub mod highlight;
pub mod loader;

View file

@ -1,4 +1,4 @@
use super::error::{Error, Result};
use anyhow::{anyhow, Context, Error, Result};
use libloading::{Library, Symbol};
use once_cell::unsync::OnceCell;
use regex::{Regex, RegexBuilder};
@ -162,7 +162,7 @@ impl Loader {
// one to use by applying the configurations' content regexes.
else {
let file_contents = fs::read(path)
.map_err(Error::wrap(|| format!("Failed to read path {:?}", path)))?;
.with_context(|| format!("Failed to read path {:?}", path))?;
let file_contents = String::from_utf8_lossy(&file_contents);
let mut best_score = -2isize;
let mut best_configuration_id = None;
@ -250,9 +250,9 @@ impl Loader {
name: String,
}
let mut grammar_file =
fs::File::open(grammar_path).map_err(Error::wrap(|| "Failed to read grammar.json"))?;
fs::File::open(grammar_path).with_context(|| "Failed to read grammar.json")?;
let grammar_json: GrammarJSON = serde_json::from_reader(BufReader::new(&mut grammar_file))
.map_err(Error::wrap(|| "Failed to parse grammar.json"))?;
.with_context(|| "Failed to parse grammar.json")?;
let scanner_path = if scanner_path.exists() {
Some(scanner_path)
@ -283,9 +283,8 @@ impl Loader {
let mut library_path = self.parser_lib_path.join(name);
library_path.set_extension(DYLIB_EXTENSION);
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path).map_err(
Error::wrap(|| "Failed to compare source and binary timestamps"),
)?;
let recompile = needs_recompile(&library_path, &parser_path, &scanner_path)
.with_context(|| "Failed to compare source and binary timestamps")?;
if recompile {
let mut config = cc::Build::new();
@ -336,26 +335,23 @@ impl Loader {
let output = command
.output()
.map_err(Error::wrap(|| "Failed to execute C compiler"))?;
.with_context(|| "Failed to execute C compiler")?;
if !output.status.success() {
return Err(Error::new(format!(
return Err(anyhow!(
"Parser compilation failed.\nStdout: {}\nStderr: {}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
)));
));
}
}
let library = unsafe { Library::new(&library_path) }.map_err(Error::wrap(|| {
format!("Error opening dynamic library {:?}", &library_path)
}))?;
let library = unsafe { Library::new(&library_path) }
.with_context(|| format!("Error opening dynamic library {:?}", &library_path))?;
let language_fn_name = format!("tree_sitter_{}", replace_dashes_with_underscores(name));
let language = unsafe {
let language_fn: Symbol<unsafe extern "C" fn() -> Language> = library
.get(language_fn_name.as_bytes())
.map_err(Error::wrap(|| {
format!("Failed to load symbol {}", language_fn_name)
}))?;
.with_context(|| format!("Failed to load symbol {}", language_fn_name))?;
language_fn()
};
mem::forget(library);
@ -370,8 +366,7 @@ impl Loader {
Err(e) => {
eprintln!(
"Failed to load language for injection string '{}': {}",
string,
e.message()
string, e
);
None
}
@ -380,8 +375,7 @@ impl Loader {
Err(e) => {
eprintln!(
"Failed to load property sheet for injection string '{}': {}",
string,
e.message()
string, e
);
None
}
@ -646,7 +640,7 @@ impl<'a> LanguageConfiguration<'a> {
ranges: &'b Vec<(String, Range<usize>)>,
source: &str,
start_offset: usize,
) -> (&'b str, QueryError) {
) -> Error {
let offset_within_section = error.offset - start_offset;
let (path, range) = ranges
.iter()
@ -657,7 +651,7 @@ impl<'a> LanguageConfiguration<'a> {
.chars()
.filter(|c| *c == '\n')
.count();
(path.as_ref(), error)
Error::from(error).context(format!("Error in query file {:?}", path))
}
fn read_queries(
@ -671,18 +665,16 @@ impl<'a> LanguageConfiguration<'a> {
for path in paths {
let abs_path = self.root_path.join(path);
let prev_query_len = query.len();
query += &fs::read_to_string(&abs_path).map_err(Error::wrap(|| {
format!("Failed to read query file {:?}", path)
}))?;
query += &fs::read_to_string(&abs_path)
.with_context(|| format!("Failed to read query file {:?}", path))?;
path_ranges.push((path.clone(), prev_query_len..query.len()));
}
} else {
let queries_path = self.root_path.join("queries");
let path = queries_path.join(default_path);
if path.exists() {
query = fs::read_to_string(&path).map_err(Error::wrap(|| {
format!("Failed to read query file {:?}", path)
}))?;
query = fs::read_to_string(&path)
.with_context(|| format!("Failed to read query file {:?}", path))?;
path_ranges.push((default_path.to_string(), 0..query.len()));
}
}

View file

@ -1,32 +1,18 @@
use anyhow::{anyhow, Context, Result};
use clap::{App, AppSettings, Arg, SubCommand};
use error::Error;
use glob::glob;
use std::path::Path;
use std::process::exit;
use std::{env, fs, u64};
use tree_sitter::Language;
use tree_sitter_cli::{
config, error, generate, highlight, loader, logger, parse, query, tags, test, test_highlight,
util, wasm, web_ui,
config, generate, highlight, loader, logger, parse, query, tags, test, test_highlight, util,
wasm, web_ui,
};
const BUILD_VERSION: &'static str = env!("CARGO_PKG_VERSION");
const BUILD_SHA: Option<&'static str> = option_env!("BUILD_SHA");
fn main() {
if let Err(e) = run() {
if e.is_ignored() {
exit(0);
}
if !e.message().is_empty() {
eprintln!("");
eprintln!("{}", e.message());
}
exit(1);
}
}
fn run() -> error::Result<()> {
fn main() -> Result<()> {
let version = if let Some(build_sha) = BUILD_SHA {
format!("{} ({})", BUILD_VERSION, build_sha)
} else {
@ -212,7 +198,7 @@ fn run() -> error::Result<()> {
let languages = loader.languages_at_path(&current_dir)?;
let language = languages
.first()
.ok_or_else(|| "No language found".to_string())?;
.ok_or_else(|| anyhow!("No language found"))?;
let test_dir = current_dir.join("test");
// Run the corpus tests. Look for them at two paths: `test/corpus` and `corpus`.
@ -297,7 +283,7 @@ fn run() -> error::Result<()> {
}
if has_error {
return Error::err(String::new());
return Err(anyhow!(""));
}
} else if let Some(matches) = matches.subcommand_matches("query") {
let ordered_captures = matches.values_of("captures").is_some();
@ -352,7 +338,7 @@ fn run() -> error::Result<()> {
if let Some(scope) = matches.value_of("scope") {
lang = loader.language_configuration_for_scope(scope)?;
if lang.is_none() {
return Error::err(format!("Unknown scope '{}'", scope));
return Err(anyhow!("Unknown scope '{}'", scope));
}
}
@ -432,12 +418,10 @@ fn run() -> error::Result<()> {
fn collect_paths<'a>(
paths_file: Option<&str>,
paths: Option<impl Iterator<Item = &'a str>>,
) -> error::Result<Vec<String>> {
) -> Result<Vec<String>> {
if let Some(paths_file) = paths_file {
return Ok(fs::read_to_string(paths_file)
.map_err(Error::wrap(|| {
format!("Failed to read paths file {}", paths_file)
}))?
.with_context(|| format!("Failed to read paths file {}", paths_file))?
.trim()
.split_ascii_whitespace()
.map(String::from)
@ -467,8 +451,8 @@ fn collect_paths<'a>(
if Path::new(path).exists() {
incorporate_path(path, positive);
} else {
let paths = glob(path)
.map_err(Error::wrap(|| format!("Invalid glob pattern {:?}", path)))?;
let paths =
glob(path).with_context(|| format!("Invalid glob pattern {:?}", path))?;
for path in paths {
if let Some(path) = path?.to_str() {
incorporate_path(path, positive);
@ -478,15 +462,15 @@ fn collect_paths<'a>(
}
if result.is_empty() {
Error::err(
"No files were found at or matched by the provided pathname/glob".to_string(),
)?;
return Err(anyhow!(
"No files were found at or matched by the provided pathname/glob"
));
}
return Ok(result);
}
Err(Error::new("Must provide one or more paths".to_string()))
Err(anyhow!("Must provide one or more paths"))
}
fn select_language(
@ -494,41 +478,35 @@ fn select_language(
path: &Path,
current_dir: &Path,
scope: Option<&str>,
) -> Result<Language, Error> {
) -> Result<Language> {
if let Some(scope) = scope {
if let Some(config) =
loader
.language_configuration_for_scope(scope)
.map_err(Error::wrap(|| {
format!("Failed to load language for scope '{}'", scope)
}))?
if let Some(config) = loader
.language_configuration_for_scope(scope)
.with_context(|| format!("Failed to load language for scope '{}'", scope))?
{
Ok(config.0)
} else {
return Error::err(format!("Unknown scope '{}'", scope));
return Err(anyhow!("Unknown scope '{}'", scope));
}
} else if let Some((lang, _)) =
loader
.language_configuration_for_file_name(path)
.map_err(Error::wrap(|| {
format!(
"Failed to load language for file name {:?}",
path.file_name().unwrap()
)
}))?
} else if let Some((lang, _)) = loader
.language_configuration_for_file_name(path)
.with_context(|| {
format!(
"Failed to load language for file name {}",
&path.file_name().unwrap().to_string_lossy()
)
})?
{
Ok(lang)
} else if let Some(lang) = loader
.languages_at_path(&current_dir)
.map_err(Error::wrap(|| {
"Failed to load language in current directory"
}))?
.with_context(|| "Failed to load language in current directory")?
.first()
.cloned()
{
Ok(lang)
} else {
eprintln!("No language found");
Error::err("No language found".to_string())
Err(anyhow!("No language found"))
}
}

View file

@ -1,5 +1,5 @@
use super::error::{Error, Result};
use super::util;
use anyhow::{anyhow, Context, Result};
use std::io::{self, Write};
use std::path::Path;
use std::sync::atomic::AtomicUsize;
@ -45,10 +45,9 @@ pub fn parse_file_at_path(
) -> Result<bool> {
let mut _log_session = None;
let mut parser = Parser::new();
parser.set_language(language).map_err(|e| e.to_string())?;
let mut source_code = fs::read(path).map_err(Error::wrap(|| {
format!("Error reading source file {:?}", path)
}))?;
parser.set_language(language)?;
let mut source_code =
fs::read(path).with_context(|| format!("Error reading source file {:?}", path))?;
// If the `--cancel` flag was passed, then cancel the parse
// when the user types a newline.
@ -296,10 +295,10 @@ pub fn perform_edit(tree: &mut Tree, input: &mut Vec<u8>, edit: &Edit) -> InputE
fn parse_edit_flag(source_code: &Vec<u8>, flag: &str) -> Result<Edit> {
let error = || {
Error::from(format!(concat!(
anyhow!(concat!(
"Invalid edit string '{}'. ",
"Edit strings must match the pattern '<START_BYTE_OR_POSITION> <REMOVED_LENGTH> <NEW_TEXT>'"
), flag))
), flag)
};
// Three whitespace-separated parts:

View file

@ -1,5 +1,5 @@
use super::error::{Error, Result};
use crate::query_testing;
use anyhow::{Context, Result};
use std::{
fs,
io::{self, Write},
@ -19,11 +19,9 @@ pub fn query_files_at_paths(
let stdout = io::stdout();
let mut stdout = stdout.lock();
let query_source = fs::read_to_string(query_path).map_err(Error::wrap(|| {
format!("Error reading query file {:?}", query_path)
}))?;
let query = Query::new(language, &query_source)
.map_err(|e| Error::new(format!("Query compilation failed: {:?}", e)))?;
let query_source = fs::read_to_string(query_path)
.with_context(|| format!("Error reading query file {:?}", query_path))?;
let query = Query::new(language, &query_source).with_context(|| "Query compilation failed")?;
let mut query_cursor = QueryCursor::new();
if let Some(range) = range {
@ -31,16 +29,15 @@ pub fn query_files_at_paths(
}
let mut parser = Parser::new();
parser.set_language(language).map_err(|e| e.to_string())?;
parser.set_language(language)?;
for path in paths {
let mut results = Vec::new();
writeln!(&mut stdout, "{}", path)?;
let source_code = fs::read(&path).map_err(Error::wrap(|| {
format!("Error reading source file {:?}", path)
}))?;
let source_code =
fs::read(&path).with_context(|| format!("Error reading source file {:?}", path))?;
let tree = parser.parse(&source_code, None).unwrap();
if ordered_captures {

View file

@ -1,5 +1,4 @@
use crate::error;
use crate::error::Result;
use anyhow::{anyhow, Result};
use lazy_static::lazy_static;
use regex::Regex;
use std::fs;
@ -139,10 +138,12 @@ pub fn assert_expected_captures(
p.position.row == info.start.row && p.position >= info.start && p.position < info.end
}) {
if found.expected_capture_name != info.name && info.name != "name" {
Err(error::Error::new(format!(
Err(anyhow!(
"Assertion failed: at {}, found {}, expected {}",
info.start, found.expected_capture_name, info.name
)))?
info.start,
found.expected_capture_name,
info.name
))?
}
}
}

View file

@ -1,6 +1,6 @@
use super::loader::Loader;
use super::util;
use crate::error::{Error, Result};
use anyhow::{anyhow, Result};
use std::io::{self, Write};
use std::path::Path;
use std::time::Instant;
@ -18,7 +18,7 @@ pub fn generate_tags(
if let Some(scope) = scope {
lang = loader.language_configuration_for_scope(scope)?;
if lang.is_none() {
return Error::err(format!("Unknown scope '{}'", scope));
return Err(anyhow!("Unknown scope '{}'", scope));
}
}

View file

@ -1,6 +1,6 @@
use super::error::{Error, Result};
use super::util;
use ansi_term::Colour;
use anyhow::{anyhow, Context, Result};
use difference::{Changeset, Difference};
use lazy_static::lazy_static;
use regex::bytes::{Regex as ByteRegex, RegexBuilder as ByteRegexBuilder};
@ -65,7 +65,7 @@ pub fn run_tests_at_path(
let test_entry = parse_tests(path)?;
let mut _log_session = None;
let mut parser = Parser::new();
parser.set_language(language).map_err(|e| e.to_string())?;
parser.set_language(language)?;
if debug_graph {
_log_session = Some(util::log_graphs(&mut parser, "log.html")?);
@ -116,7 +116,7 @@ pub fn run_tests_at_path(
println!("\n {}. {}:", i + 1, name);
print_diff(actual, expected);
}
Error::err(String::new())
Err(anyhow!(""))
}
} else {
Ok(())
@ -135,10 +135,10 @@ pub fn check_queries_at_path(language: Language, path: &Path) -> Result<()> {
})
{
let filepath = entry.file_name().to_str().unwrap_or("");
let content = fs::read_to_string(entry.path()).map_err(Error::wrap(|| {
format!("Error reading query file {:?}", entry.file_name())
}))?;
Query::new(language, &content).map_err(|e| (filepath, e))?;
let content = fs::read_to_string(entry.path())
.with_context(|| format!("Error reading query file {:?}", filepath))?;
Query::new(language, &content)
.with_context(|| format!("Error in query file {:?}", filepath))?;
}
}
Ok(())

View file

@ -1,12 +1,13 @@
use super::error::Result;
use crate::loader::Loader;
use crate::query_testing::{parse_position_comments, Assertion};
use ansi_term::Colour;
use anyhow::{anyhow, Result};
use std::fs;
use std::path::Path;
use tree_sitter::Point;
use tree_sitter_highlight::{Highlight, HighlightConfiguration, HighlightEvent, Highlighter};
#[derive(Debug)]
pub struct Failure {
row: usize,
column: usize,
@ -14,25 +15,26 @@ pub struct Failure {
actual_highlights: Vec<String>,
}
impl Failure {
pub fn message(&self) -> String {
let mut result = format!(
impl std::error::Error for Failure {}
impl std::fmt::Display for Failure {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(
f,
"Failure - row: {}, column: {}, expected highlight '{}', actual highlights: ",
self.row, self.column, self.expected_highlight
);
)?;
if self.actual_highlights.is_empty() {
result += "none.";
write!(f, "none.")?;
} else {
for (i, actual_highlight) in self.actual_highlights.iter().enumerate() {
if i > 0 {
result += ", ";
write!(f, ", ")?;
}
result += "'";
result += actual_highlight;
result += "'";
write!(f, "'{}'", actual_highlight)?;
}
}
result
Ok(())
}
}
@ -47,10 +49,10 @@ pub fn test_highlights(loader: &Loader, directory: &Path) -> Result<()> {
let test_file_name = highlight_test_file.file_name();
let (language, language_config) = loader
.language_configuration_for_file_name(&test_file_path)?
.ok_or_else(|| format!("No language found for path {:?}", test_file_path))?;
.ok_or_else(|| anyhow!("No language found for path {:?}", test_file_path))?;
let highlight_config = language_config
.highlight_config(language)?
.ok_or_else(|| format!("No highlighting config found for {:?}", test_file_path))?;
.ok_or_else(|| anyhow!("No highlighting config found for {:?}", test_file_path))?;
match test_highlight(
&loader,
&mut highlighter,
@ -69,14 +71,14 @@ pub fn test_highlights(loader: &Loader, directory: &Path) -> Result<()> {
" ✗ {}",
Colour::Red.paint(test_file_name.to_string_lossy().as_ref())
);
println!(" {}", e.message());
println!(" {}", e);
failed = true;
}
}
}
if failed {
Err(String::new().into())
Err(anyhow!(""))
} else {
Ok(())
}

View file

@ -231,11 +231,10 @@ fn test_feature_corpus_files() {
let expected_message = fs::read_to_string(&error_message_path).unwrap();
if let Err(e) = generate_result {
if e.message() != expected_message {
if e.to_string() != expected_message {
eprintln!(
"Unexpected error message.\n\nExpected:\n\n{}\nActual:\n\n{}\n",
expected_message,
e.message()
expected_message, e
);
failure_count += 1;
}
@ -250,8 +249,7 @@ fn test_feature_corpus_files() {
if let Err(e) = &generate_result {
eprintln!(
"Unexpected error for test grammar '{}':\n{}",
language_name,
e.message()
language_name, e
);
failure_count += 1;
continue;

View file

@ -1,4 +1,4 @@
use super::error::Result;
use anyhow::{anyhow, Context, Result};
use std::io;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::Arc;
@ -40,7 +40,6 @@ pub fn log_graphs(_parser: &mut Parser, _path: &str) -> Result<LogSession> {
#[cfg(unix)]
pub fn log_graphs(parser: &mut Parser, path: &str) -> Result<LogSession> {
use super::error::Error;
use std::io::Write;
let mut dot_file = std::fs::File::create(path)?;
@ -50,13 +49,11 @@ pub fn log_graphs(parser: &mut Parser, path: &str) -> Result<LogSession> {
.stdin(Stdio::piped())
.stdout(dot_file)
.spawn()
.map_err(Error::wrap(|| {
"Failed to run the `dot` command. Check that graphviz is installed."
}))?;
.with_context(|| "Failed to run the `dot` command. Check that graphviz is installed.")?;
let dot_stdin = dot_process
.stdin
.take()
.ok_or_else(|| Error::new("Failed to open stdin for `dot` process.".to_string()))?;
.ok_or_else(|| anyhow!("Failed to open stdin for `dot` process."))?;
parser.print_dot_graphs(&dot_stdin);
Ok(LogSession(
PathBuf::from(path),

View file

@ -1,5 +1,5 @@
use super::error::{Error, Result};
use super::generate::parse_grammar::GrammarJSON;
use anyhow::{anyhow, Context, Result};
use std::ffi::{OsStr, OsString};
use std::fs;
use std::path::Path;
@ -8,12 +8,10 @@ use which::which;
pub fn get_grammar_name(src_dir: &Path) -> Result<String> {
let grammar_json_path = src_dir.join("grammar.json");
let grammar_json = fs::read_to_string(&grammar_json_path).map_err(Error::wrap(|| {
format!("Failed to read grammar file {:?}", grammar_json_path)
}))?;
let grammar: GrammarJSON = serde_json::from_str(&grammar_json).map_err(Error::wrap(|| {
format!("Failed to parse grammar file {:?}", grammar_json_path)
}))?;
let grammar_json = fs::read_to_string(&grammar_json_path)
.with_context(|| format!("Failed to read grammar file {:?}", grammar_json_path))?;
let grammar: GrammarJSON = serde_json::from_str(&grammar_json)
.with_context(|| format!("Failed to parse grammar file {:?}", grammar_json_path))?;
Ok(grammar.name)
}
@ -56,7 +54,7 @@ pub fn compile_language_to_wasm(language_dir: &Path, force_docker: bool) -> Resu
let user_id_output = Command::new("id")
.arg("-u")
.output()
.map_err(Error::wrap(|| "Failed to get get current user id"))?;
.with_context(|| "Failed to get get current user id")?;
let user_id = String::from_utf8_lossy(&user_id_output.stdout);
let user_id = user_id.trim();
command.args(&["--user", user_id]);
@ -65,9 +63,9 @@ pub fn compile_language_to_wasm(language_dir: &Path, force_docker: bool) -> Resu
// Run `emcc` in a container using the `emscripten-slim` image
command.args(&["emscripten/emsdk", "emcc"]);
} else {
return Error::err(
"You must have either emcc or docker on your PATH to run this command".to_string(),
);
return Err(anyhow!(
"You must have either emcc or docker on your PATH to run this command"
));
}
command.args(&[
@ -107,18 +105,17 @@ pub fn compile_language_to_wasm(language_dir: &Path, force_docker: bool) -> Resu
let output = command
.output()
.map_err(Error::wrap(|| "Failed to run emcc command"))?;
.with_context(|| "Failed to run emcc command")?;
if !output.status.success() {
return Err(Error::from(format!(
return Err(anyhow!(
"emcc command failed - {}",
String::from_utf8_lossy(&output.stderr)
)));
));
}
// Move the created `.wasm` file into the current working directory.
fs::rename(&language_dir.join(&output_filename), &output_filename).map_err(Error::wrap(
|| format!("Couldn't find output file {:?}", output_filename),
))?;
fs::rename(&language_dir.join(&output_filename), &output_filename)
.with_context(|| format!("Couldn't find output file {:?}", output_filename))?;
Ok(())
}

View file

@ -1,5 +1,5 @@
use super::error::Error;
use super::wasm;
use anyhow::Context;
use std::env;
use std::fs;
use std::net::TcpListener;
@ -62,16 +62,16 @@ pub fn serve(grammar_path: &Path, open_in_browser: bool) {
let url = format!("127.0.0.1:{}", port);
let server = Server::http(&url).expect("Failed to start web server");
let grammar_name = wasm::get_grammar_name(&grammar_path.join("src"))
.map_err(Error::wrap(|| "Failed to get wasm filename"))
.with_context(|| "Failed to get wasm filename")
.unwrap();
let wasm_filename = format!("tree-sitter-{}.wasm", grammar_name);
let language_wasm = fs::read(grammar_path.join(&wasm_filename))
.map_err(Error::wrap(|| {
.with_context(|| {
format!(
"Failed to read {}. Run `tree-sitter build-wasm` first.",
wasm_filename
)
}))
})
.unwrap();
if open_in_browser {
if let Err(_) = webbrowser::open(&format!("http://127.0.0.1:{}", port)) {

View file

@ -18,6 +18,7 @@ crate-type = ["lib", "staticlib"]
[dependencies]
regex = "1"
thiserror = "1.0"
[dependencies.tree-sitter]
version = ">= 0.3.7"

View file

@ -4,6 +4,7 @@ pub use c_lib as c;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::{iter, mem, ops, str, usize};
use thiserror::Error;
use tree_sitter::{
Language, LossyUtf8, Node, Parser, Point, Query, QueryCaptures, QueryCursor, QueryError,
QueryMatch, Range, Tree,
@ -18,10 +19,13 @@ const BUFFER_LINES_RESERVE_CAPACITY: usize = 1000;
pub struct Highlight(pub usize);
/// Represents the reason why syntax highlighting failed.
#[derive(Debug, PartialEq, Eq)]
#[derive(Debug, Error, PartialEq, Eq)]
pub enum Error {
#[error("Cancelled")]
Cancelled,
#[error("Invalid language")]
InvalidLanguage,
#[error("Unknown error")]
Unknown,
}

View file

@ -19,6 +19,7 @@ crate-type = ["lib", "staticlib"]
[dependencies]
regex = "1"
memchr = "2.3"
thiserror = "1.0"
[dependencies.tree-sitter]
version = ">= 0.17.0"

View file

@ -7,7 +7,8 @@ use std::ffi::{CStr, CString};
use std::ops::Range;
use std::os::raw::c_char;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::{char, fmt, mem, str};
use std::{char, mem, str};
use thiserror::Error;
use tree_sitter::{
Language, LossyUtf8, Parser, Point, Query, QueryCursor, QueryError, QueryPredicateArg, Tree,
};
@ -56,12 +57,17 @@ pub struct Tag {
pub syntax_type_id: u32,
}
#[derive(Debug, PartialEq)]
#[derive(Debug, Error, PartialEq)]
pub enum Error {
Query(QueryError),
Regex(regex::Error),
#[error(transparent)]
Query(#[from] QueryError),
#[error(transparent)]
Regex(#[from] regex::Error),
#[error("Cancelled")]
Cancelled,
#[error("Invalid language")]
InvalidLanguage,
#[error("Invalid capture @{0}. Expected one of: @definition.*, @reference.*, @doc, @name, @local.(scope|definition|reference).")]
InvalidCapture(String),
}
@ -562,27 +568,6 @@ impl Tag {
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Error::InvalidCapture(name) => write!(f, "Invalid capture @{}. Expected one of: @definition.*, @reference.*, @doc, @name, @local.(scope|definition|reference).", name),
_ => write!(f, "{:?}", self)
}
}
}
impl From<regex::Error> for Error {
fn from(error: regex::Error) -> Self {
Error::Regex(error)
}
}
impl From<QueryError> for Error {
fn from(error: QueryError) -> Self {
Error::Query(error)
}
}
fn line_range(
text: &[u8],
start_byte: usize,