fix: Naming; Lifetimes; deny(rust_2018_idioms)
- One has to think about lifetimes if a type has one:
- `<&'a Node<'tree>>::language` now returns `Language<'tree>` instead of
`Language<'a>`, as it should;
- Renamed `struct TreeCursor<'cursor>` into `struct TreeCursor<'tree>`,
to be consistant with the usages and reduse confusion;
- Remove explicit "outlives" requirements from `QueryMatches`, `QueryCaptures`,
and their impl blocks, because they're inferred
- TODO: should `'query` be renamed into `'cursor`?
This commit is contained in:
parent
630fa52717
commit
ffd777ba65
29 changed files with 169 additions and 158 deletions
|
|
@ -119,7 +119,7 @@ fn get_following_tokens(
|
|||
syntax_grammar: &SyntaxGrammar,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
inlines: &InlinedProductionMap,
|
||||
builder: &ParseItemSetBuilder,
|
||||
builder: &ParseItemSetBuilder<'_>,
|
||||
) -> Vec<TokenSet> {
|
||||
let mut result = vec![TokenSet::new(); lexical_grammar.variables.len()];
|
||||
let productions = syntax_grammar
|
||||
|
|
@ -160,8 +160,8 @@ fn populate_error_state(
|
|||
parse_table: &mut ParseTable,
|
||||
syntax_grammar: &SyntaxGrammar,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
coincident_token_index: &CoincidentTokenIndex,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
coincident_token_index: &CoincidentTokenIndex<'_>,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
keywords: &TokenSet,
|
||||
) {
|
||||
let state = &mut parse_table.states[0];
|
||||
|
|
@ -323,8 +323,8 @@ fn identify_keywords(
|
|||
lexical_grammar: &LexicalGrammar,
|
||||
parse_table: &ParseTable,
|
||||
word_token: Option<Symbol>,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
coincident_token_index: &CoincidentTokenIndex,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
coincident_token_index: &CoincidentTokenIndex<'_>,
|
||||
) -> TokenSet {
|
||||
if word_token.is_none() {
|
||||
return TokenSet::new();
|
||||
|
|
@ -429,7 +429,7 @@ fn identify_keywords(
|
|||
fn mark_fragile_tokens(
|
||||
parse_table: &mut ParseTable,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
) {
|
||||
let n = lexical_grammar.variables.len();
|
||||
let mut valid_tokens_mask = Vec::with_capacity(n);
|
||||
|
|
@ -543,7 +543,7 @@ fn report_state_info<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
fn all_chars_are_alphabetical(cursor: &NfaCursor) -> bool {
|
||||
fn all_chars_are_alphabetical(cursor: &NfaCursor<'_>) -> bool {
|
||||
cursor.transition_chars().all(|(chars, is_sep)| {
|
||||
if is_sep {
|
||||
true
|
||||
|
|
|
|||
|
|
@ -27,8 +27,8 @@ pub fn build_lex_table(
|
|||
syntax_grammar: &SyntaxGrammar,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
keywords: &TokenSet,
|
||||
coincident_token_index: &CoincidentTokenIndex,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
coincident_token_index: &CoincidentTokenIndex<'_>,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
) -> LexTables {
|
||||
let keyword_lex_table = if syntax_grammar.word_token.is_some() {
|
||||
let mut builder = LexTableBuilder::new(lexical_grammar);
|
||||
|
|
@ -284,8 +284,8 @@ fn merge_token_set(
|
|||
tokens: &mut TokenSet,
|
||||
other: &TokenSet,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
coincident_token_index: &CoincidentTokenIndex,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
coincident_token_index: &CoincidentTokenIndex<'_>,
|
||||
) -> bool {
|
||||
for i in 0..lexical_grammar.variables.len() {
|
||||
let symbol = Symbol::terminal(i);
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ pub struct AmbiguousExtraError {
|
|||
}
|
||||
|
||||
impl std::fmt::Display for ConflictError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
for symbol in &self.symbol_sequence {
|
||||
write!(f, " {symbol}")?;
|
||||
}
|
||||
|
|
@ -171,7 +171,7 @@ impl std::fmt::Display for ConflictError {
|
|||
}
|
||||
|
||||
impl std::fmt::Display for Interpretation {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
for symbol in &self.preceding_symbols {
|
||||
write!(f, " {symbol}")?;
|
||||
}
|
||||
|
|
@ -191,7 +191,7 @@ impl std::fmt::Display for Interpretation {
|
|||
}
|
||||
|
||||
impl std::fmt::Display for Resolution {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Precedence { symbols } => {
|
||||
write!(f, "Specify a higher precedence in ")?;
|
||||
|
|
@ -227,7 +227,7 @@ impl std::fmt::Display for Resolution {
|
|||
}
|
||||
|
||||
impl std::fmt::Display for AmbiguousExtraError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
for (i, symbol) in self.parent_symbols.iter().enumerate() {
|
||||
if i > 0 {
|
||||
write!(f, ", ")?;
|
||||
|
|
@ -695,7 +695,7 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
|
||||
fn handle_conflict(
|
||||
&mut self,
|
||||
item_set: &ParseItemSet,
|
||||
item_set: &ParseItemSet<'_>,
|
||||
state_id: ParseStateId,
|
||||
preceding_symbols: &SymbolSequence,
|
||||
preceding_auxiliary_symbols: &[AuxiliarySymbolInfo],
|
||||
|
|
@ -917,7 +917,7 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
shift_items.sort_unstable();
|
||||
reduce_items.sort_unstable();
|
||||
|
||||
let get_rule_names = |items: &[&ParseItem]| -> Vec<String> {
|
||||
let get_rule_names = |items: &[&ParseItem<'_>]| -> Vec<String> {
|
||||
let mut last_rule_id = None;
|
||||
let mut result = Vec::with_capacity(items.len());
|
||||
for item in items {
|
||||
|
|
@ -1030,7 +1030,7 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
|
||||
fn get_auxiliary_node_info(
|
||||
&self,
|
||||
item_set: &ParseItemSet,
|
||||
item_set: &ParseItemSet<'_>,
|
||||
symbol: Symbol,
|
||||
) -> AuxiliarySymbolInfo {
|
||||
let parent_symbols = item_set
|
||||
|
|
@ -1053,7 +1053,7 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_production_id(&mut self, item: &ParseItem) -> ProductionInfoId {
|
||||
fn get_production_id(&mut self, item: &ParseItem<'_>) -> ProductionInfoId {
|
||||
let mut production_info = ProductionInfo {
|
||||
alias_sequence: Vec::new(),
|
||||
field_map: BTreeMap::new(),
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ impl<'a> CoincidentTokenIndex<'a> {
|
|||
}
|
||||
|
||||
impl fmt::Debug for CoincidentTokenIndex<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "CoincidentTokenIndex {{")?;
|
||||
|
||||
writeln!(f, " entries: {{")?;
|
||||
|
|
|
|||
|
|
@ -185,7 +185,7 @@ impl<'a> ParseItemSet<'a> {
|
|||
}
|
||||
|
||||
impl fmt::Display for ParseItemDisplay<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
if self.0.is_augmented() {
|
||||
write!(f, "START →")?;
|
||||
} else {
|
||||
|
|
@ -281,7 +281,7 @@ fn display_variable_name(source: &str) -> String {
|
|||
}
|
||||
|
||||
impl fmt::Display for TokenSetDisplay<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
write!(f, "[")?;
|
||||
for (i, symbol) in self.0.iter().enumerate() {
|
||||
if i > 0 {
|
||||
|
|
@ -306,7 +306,7 @@ impl fmt::Display for TokenSetDisplay<'_> {
|
|||
}
|
||||
|
||||
impl fmt::Display for ParseItemSetDisplay<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
for entry in &self.0.entries {
|
||||
write!(
|
||||
f,
|
||||
|
|
|
|||
|
|
@ -342,7 +342,7 @@ impl<'a> ParseItemSetBuilder<'a> {
|
|||
}
|
||||
|
||||
impl fmt::Debug for ParseItemSetBuilder<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "ParseItemSetBuilder {{")?;
|
||||
|
||||
writeln!(f, " first_sets: {{")?;
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ pub fn minimize_parse_table(
|
|||
syntax_grammar: &SyntaxGrammar,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
simple_aliases: &AliasMap,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
keywords: &TokenSet,
|
||||
optimizations: OptLevel,
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ impl<'a> TokenConflictMap<'a> {
|
|||
}
|
||||
|
||||
impl fmt::Debug for TokenConflictMap<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "TokenConflictMap {{")?;
|
||||
|
||||
let syntax_grammar = SyntaxGrammar::default();
|
||||
|
|
@ -205,7 +205,7 @@ const fn matrix_index(variable_count: usize, i: usize, j: usize) -> usize {
|
|||
variable_count * i + j
|
||||
}
|
||||
|
||||
fn get_starting_chars(cursor: &mut NfaCursor, grammar: &LexicalGrammar) -> Vec<CharacterSet> {
|
||||
fn get_starting_chars(cursor: &mut NfaCursor<'_>, grammar: &LexicalGrammar) -> Vec<CharacterSet> {
|
||||
let mut result = Vec::with_capacity(grammar.variables.len());
|
||||
for variable in &grammar.variables {
|
||||
cursor.reset(vec![variable.start_state]);
|
||||
|
|
@ -237,7 +237,7 @@ fn get_following_chars(
|
|||
}
|
||||
|
||||
fn compute_conflict_status(
|
||||
cursor: &mut NfaCursor,
|
||||
cursor: &mut NfaCursor<'_>,
|
||||
grammar: &LexicalGrammar,
|
||||
following_chars: &[CharacterSet],
|
||||
i: usize,
|
||||
|
|
|
|||
|
|
@ -264,7 +264,7 @@ impl InlinedProductionMap {
|
|||
}
|
||||
|
||||
impl fmt::Display for PrecedenceEntry {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Name(n) => write!(f, "'{n}'"),
|
||||
Self::Symbol(s) => write!(f, "$.{s}"),
|
||||
|
|
|
|||
|
|
@ -409,7 +409,7 @@ impl PartialOrd for CharacterSet {
|
|||
}
|
||||
|
||||
impl fmt::Debug for CharacterSet {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "CharacterSet [")?;
|
||||
let mut set = self.clone();
|
||||
if self.contains(char::MAX) {
|
||||
|
|
@ -440,7 +440,7 @@ impl Nfa {
|
|||
}
|
||||
|
||||
impl fmt::Debug for Nfa {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "Nfa {{ states: {{")?;
|
||||
for (i, state) in self.states.iter().enumerate() {
|
||||
writeln!(f, " {i}: {state:?},")?;
|
||||
|
|
|
|||
|
|
@ -15,11 +15,11 @@ use super::{IoError, JSError, JSResult};
|
|||
const DSL: &[u8] = include_bytes!("dsl.js");
|
||||
|
||||
trait JSResultExt<T> {
|
||||
fn or_js_error(self, ctx: &Ctx) -> JSResult<T>;
|
||||
fn or_js_error(self, ctx: &Ctx<'_>) -> JSResult<T>;
|
||||
}
|
||||
|
||||
impl<T> JSResultExt<T> for Result<T, rquickjs::Error> {
|
||||
fn or_js_error(self, ctx: &Ctx) -> JSResult<T> {
|
||||
fn or_js_error(self, ctx: &Ctx<'_>) -> JSResult<T> {
|
||||
match self {
|
||||
Ok(v) => Ok(v),
|
||||
Err(rquickjs::Error::Exception) => Err(format_js_exception(ctx.catch())),
|
||||
|
|
@ -28,7 +28,7 @@ impl<T> JSResultExt<T> for Result<T, rquickjs::Error> {
|
|||
}
|
||||
}
|
||||
|
||||
fn format_js_exception(v: Value) -> JSError {
|
||||
fn format_js_exception(v: Value<'_>) -> JSError {
|
||||
let Some(exception) = v.into_exception() else {
|
||||
return JSError::QuickJS("Expected a JS exception".to_string());
|
||||
};
|
||||
|
|
@ -215,11 +215,11 @@ fn try_resolve_path(path: &Path) -> rquickjs::Result<PathBuf> {
|
|||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn require_from_module<'a>(
|
||||
ctx: Ctx<'a>,
|
||||
fn require_from_module<'js>(
|
||||
ctx: Ctx<'js>,
|
||||
module_path: String,
|
||||
from_module: &str,
|
||||
) -> rquickjs::Result<Value<'a>> {
|
||||
) -> rquickjs::Result<Value<'js>> {
|
||||
let current_module = PathBuf::from(from_module);
|
||||
let current_dir = if current_module.is_file() {
|
||||
current_module.parent().unwrap_or(Path::new("."))
|
||||
|
|
@ -234,13 +234,13 @@ fn require_from_module<'a>(
|
|||
load_module_from_content(&ctx, &resolved_path, &contents)
|
||||
}
|
||||
|
||||
fn load_module_from_content<'a>(
|
||||
ctx: &Ctx<'a>,
|
||||
fn load_module_from_content<'js>(
|
||||
ctx: &Ctx<'js>,
|
||||
path: &Path,
|
||||
contents: &str,
|
||||
) -> rquickjs::Result<Value<'a>> {
|
||||
) -> rquickjs::Result<Value<'js>> {
|
||||
if path.extension().is_some_and(|ext| ext == "json") {
|
||||
return ctx.eval::<Value, _>(format!("JSON.parse({contents:?})"));
|
||||
return ctx.eval::<Value<'js>, _>(format!("JSON.parse({contents:?})"));
|
||||
}
|
||||
|
||||
let exports = Object::new(ctx.clone())?;
|
||||
|
|
@ -256,7 +256,7 @@ fn load_module_from_content<'a>(
|
|||
let module_path = filename.clone();
|
||||
let require = Function::new(
|
||||
ctx.clone(),
|
||||
move |ctx_inner: Ctx<'a>, target_path: String| -> rquickjs::Result<Value<'a>> {
|
||||
move |ctx_inner: Ctx<'js>, target_path: String| -> rquickjs::Result<Value<'js>> {
|
||||
require_from_module(ctx_inner, target_path, &module_path)
|
||||
},
|
||||
)?;
|
||||
|
|
@ -264,8 +264,8 @@ fn load_module_from_content<'a>(
|
|||
let wrapper =
|
||||
format!("(function(exports, require, module, __filename, __dirname) {{ {contents} }})");
|
||||
|
||||
let module_func = ctx.eval::<Function, _>(wrapper)?;
|
||||
module_func.call::<_, Value>((exports, require, module_obj.clone(), filename, dirname))?;
|
||||
let module_func = ctx.eval::<Function<'js>, _>(wrapper)?;
|
||||
module_func.call::<_, Value<'js>>((exports, require, module_obj.clone(), filename, dirname))?;
|
||||
|
||||
module_obj.get("exports")
|
||||
}
|
||||
|
|
@ -318,7 +318,7 @@ pub fn execute_native_runtime(grammar_path: &Path) -> JSResult<String> {
|
|||
let grammar_path_string = grammar_path.to_string_lossy().to_string();
|
||||
let main_require = Function::new(
|
||||
ctx.clone(),
|
||||
move |ctx_inner, target_path: String| -> rquickjs::Result<Value> {
|
||||
move |ctx_inner, target_path: String| -> rquickjs::Result<Value<'_>> {
|
||||
require_from_module(ctx_inner, target_path, &grammar_path_string)
|
||||
},
|
||||
)?;
|
||||
|
|
@ -328,7 +328,7 @@ pub fn execute_native_runtime(grammar_path: &Path) -> JSResult<String> {
|
|||
promise.finish::<()>().or_js_error(&ctx)?;
|
||||
|
||||
let grammar_json = ctx
|
||||
.eval::<rquickjs::String, _>("globalThis.output")
|
||||
.eval::<rquickjs::String<'_>, _>("globalThis.output")
|
||||
.map(|s| s.to_string())
|
||||
.or_js_error(&ctx)?
|
||||
.or_js_error(&ctx)?;
|
||||
|
|
@ -437,8 +437,8 @@ mod tests {
|
|||
const pkg = require('./package.json');
|
||||
module.exports = grammar({
|
||||
name: 'json_test',
|
||||
rules: {
|
||||
source_file: $ => 'version_' + pkg.version.replace(/\./g, '_')
|
||||
rules: {
|
||||
source_file: $ => 'version_' + pkg.version.replace(/\./g, '_')
|
||||
}
|
||||
});
|
||||
",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue