fix: Naming; Lifetimes; deny(rust_2018_idioms)

- One has to think about lifetimes if a type has one:
  - `<&'a Node<'tree>>::language` now returns `Language<'tree>` instead of
    `Language<'a>`, as it should;
- Renamed `struct TreeCursor<'cursor>` into `struct TreeCursor<'tree>`,
  to be consistant with the usages and reduse confusion;
- Remove explicit "outlives" requirements from `QueryMatches`, `QueryCaptures`,
  and their impl blocks, because they're inferred
- TODO: should `'query` be renamed into `'cursor`?
This commit is contained in:
DanikVitek 2026-01-10 23:01:55 +02:00
parent 630fa52717
commit ffd777ba65
29 changed files with 169 additions and 158 deletions

View file

@ -4,7 +4,7 @@ use super::{scope_sequence::ScopeSequence, LOG_ENABLED, LOG_GRAPH_ENABLED};
use crate::util;
pub fn check_consistent_sizes(tree: &Tree, input: &[u8]) {
fn check(node: Node, line_offsets: &[usize]) {
fn check(node: Node<'_>, line_offsets: &[usize]) {
let start_byte = node.start_byte();
let end_byte = node.end_byte();
let start_point = node.start_position();

View file

@ -1188,7 +1188,7 @@ fn generate_file(
path: &Path,
template: &str,
language_name: &str,
generate_opts: &GenerateOpts,
generate_opts: &GenerateOpts<'_>,
) -> Result<()> {
let filename = path.file_name().unwrap().to_str().unwrap();

View file

@ -11,11 +11,11 @@ pub fn paint(color: Option<impl Into<Color>>, text: &str) -> String {
struct Logger;
impl Log for Logger {
fn enabled(&self, _: &Metadata) -> bool {
fn enabled(&self, _: &Metadata<'_>) -> bool {
true
}
fn log(&self, record: &Record) {
fn log(&self, record: &Record<'_>) {
match record.level() {
Level::Error => eprintln!(
"{} {}",

View file

@ -284,7 +284,7 @@ pub fn parse_file_at_path(
path: &Path,
name: &str,
max_path_length: usize,
opts: &mut ParseFileOptions,
opts: &mut ParseFileOptions<'_>,
) -> Result<()> {
let mut _log_session = None;
parser.set_language(language)?;
@ -774,7 +774,7 @@ pub fn render_cst<'a, 'b: 'a>(
source_code: &[u8],
tree: &'b Tree,
cursor: &mut TreeCursor<'a>,
opts: &ParseFileOptions,
opts: &ParseFileOptions<'_>,
out: &mut impl Write,
) -> Result<()> {
let lossy_source_code = String::from_utf8_lossy(source_code);
@ -841,9 +841,9 @@ fn render_node_text(source: &str) -> String {
}
fn write_node_text(
opts: &ParseFileOptions,
opts: &ParseFileOptions<'_>,
out: &mut impl Write,
cursor: &TreeCursor,
cursor: &TreeCursor<'_>,
is_named: bool,
source: &str,
color: Option<impl Into<Color> + Copy>,
@ -906,7 +906,7 @@ fn write_node_text(
Ok(())
}
fn render_line_feed(source: &str, opts: &ParseFileOptions) -> String {
fn render_line_feed(source: &str, opts: &ParseFileOptions<'_>) -> String {
if cfg!(windows) {
source.replace("\r\n", &paint(opts.parse_theme.line_feed, "\r\n"))
} else {
@ -915,8 +915,8 @@ fn render_line_feed(source: &str, opts: &ParseFileOptions) -> String {
}
fn render_node_range(
opts: &ParseFileOptions,
cursor: &TreeCursor,
opts: &ParseFileOptions<'_>,
cursor: &TreeCursor<'_>,
is_named: bool,
is_multiline: bool,
total_width: usize,
@ -952,8 +952,8 @@ fn render_node_range(
}
fn cst_render_node(
opts: &ParseFileOptions,
cursor: &mut TreeCursor,
opts: &ParseFileOptions<'_>,
cursor: &TreeCursor<'_>,
source_code: &[u8],
out: &mut impl Write,
total_width: usize,

View file

@ -14,7 +14,7 @@ pub struct Utf8Point {
}
impl std::fmt::Display for Utf8Point {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "({}, {})", self.row, self.column)
}
}

View file

@ -603,7 +603,7 @@ impl std::fmt::Display for TestSummary {
pub fn run_tests_at_path(
parser: &mut Parser,
opts: &TestOptions,
opts: &TestOptions<'_>,
test_summary: &mut TestSummary,
) -> Result<()> {
let test_entry = parse_tests(&opts.path)?;
@ -814,7 +814,7 @@ impl TestCorrection {
fn run_tests(
parser: &mut Parser,
test_entry: TestEntry,
opts: &TestOptions,
opts: &TestOptions<'_>,
test_summary: &mut TestSummary,
corrected_entries: &mut Vec<TestCorrection>,
is_root: bool,
@ -1070,7 +1070,9 @@ fn run_tests(
let mut ran_test_in_group = false;
let matches_filter = |name: &str, file_name: &Option<String>, opts: &TestOptions| {
let matches_filter = |name: &str,
file_name: &Option<String>,
opts: &TestOptions<'_>| {
if let (Some(test_file_path), Some(filter_file_name)) = (file_name, &opts.file_name)
{
if !filter_file_name.eq(test_file_path) {

View file

@ -22,7 +22,7 @@ pub struct Failure {
impl std::error::Error for Failure {}
impl std::fmt::Display for Failure {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Failure - row: {}, column: {}, expected highlight '{}', actual highlights: ",

View file

@ -21,7 +21,7 @@ pub struct Failure {
impl std::error::Error for Failure {}
impl std::fmt::Display for Failure {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Failure - row: {}, column: {}, expected tag: '{}', actual tag: ",

View file

@ -75,7 +75,7 @@ impl Pattern {
(pattern, pattern_start..pattern_end)
}
fn random_pattern_for_node(cursor: &mut TreeCursor, rng: &mut impl Rng) -> Self {
fn random_pattern_for_node(cursor: &mut TreeCursor<'_>, rng: &mut impl Rng) -> Self {
let node = cursor.node();
// Sometimes specify the node's type, sometimes use a wildcard.
@ -225,7 +225,7 @@ impl Pattern {
}
// Find every matching combination of child patterns and child nodes.
let mut finished_matches = Vec::<Match>::new();
let mut finished_matches = Vec::<Match<'_, 'tree>>::new();
if cursor.goto_first_child() {
let mut match_states = vec![(0, mat)];
loop {
@ -306,7 +306,7 @@ impl Ord for Match<'_, '_> {
}
}
fn compare_depth_first(a: Node, b: Node) -> Ordering {
fn compare_depth_first(a: Node<'_>, b: Node<'_>) -> Ordering {
let a = a.byte_range();
let b = b.byte_range();
a.start.cmp(&b.start).then_with(|| b.end.cmp(&a.end))

View file

@ -1219,7 +1219,7 @@ private:
);
}
fn get_all_nodes(tree: &Tree) -> Vec<Node> {
fn get_all_nodes(tree: &Tree) -> Vec<Node<'_>> {
let mut result = Vec::new();
let mut visited_children = false;
let mut cursor = tree.walk();

View file

@ -44,7 +44,7 @@ pub fn test_with_seed(args: TokenStream, input: TokenStream) -> TokenStream {
}
impl Parse for Args {
fn parse(input: ParseStream) -> syn::Result<Self> {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let mut retry = None;
let mut seed = None;
let mut seed_fn = None;

View file

@ -3971,7 +3971,7 @@ fn test_query_text_callback_returns_chunks() {
parser.set_language(&language).unwrap();
let tree = parser.parse(source, None).unwrap();
let mut cursor = QueryCursor::new();
let captures = cursor.captures(&query, tree.root_node(), |node: Node| {
let captures = cursor.captures(&query, tree.root_node(), |node: Node<'_>| {
chunks_in_range(node.byte_range())
});

View file

@ -119,7 +119,7 @@ fn get_following_tokens(
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
inlines: &InlinedProductionMap,
builder: &ParseItemSetBuilder,
builder: &ParseItemSetBuilder<'_>,
) -> Vec<TokenSet> {
let mut result = vec![TokenSet::new(); lexical_grammar.variables.len()];
let productions = syntax_grammar
@ -160,8 +160,8 @@ fn populate_error_state(
parse_table: &mut ParseTable,
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
coincident_token_index: &CoincidentTokenIndex,
token_conflict_map: &TokenConflictMap,
coincident_token_index: &CoincidentTokenIndex<'_>,
token_conflict_map: &TokenConflictMap<'_>,
keywords: &TokenSet,
) {
let state = &mut parse_table.states[0];
@ -323,8 +323,8 @@ fn identify_keywords(
lexical_grammar: &LexicalGrammar,
parse_table: &ParseTable,
word_token: Option<Symbol>,
token_conflict_map: &TokenConflictMap,
coincident_token_index: &CoincidentTokenIndex,
token_conflict_map: &TokenConflictMap<'_>,
coincident_token_index: &CoincidentTokenIndex<'_>,
) -> TokenSet {
if word_token.is_none() {
return TokenSet::new();
@ -429,7 +429,7 @@ fn identify_keywords(
fn mark_fragile_tokens(
parse_table: &mut ParseTable,
lexical_grammar: &LexicalGrammar,
token_conflict_map: &TokenConflictMap,
token_conflict_map: &TokenConflictMap<'_>,
) {
let n = lexical_grammar.variables.len();
let mut valid_tokens_mask = Vec::with_capacity(n);
@ -543,7 +543,7 @@ fn report_state_info<'a>(
}
}
fn all_chars_are_alphabetical(cursor: &NfaCursor) -> bool {
fn all_chars_are_alphabetical(cursor: &NfaCursor<'_>) -> bool {
cursor.transition_chars().all(|(chars, is_sep)| {
if is_sep {
true

View file

@ -27,8 +27,8 @@ pub fn build_lex_table(
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
keywords: &TokenSet,
coincident_token_index: &CoincidentTokenIndex,
token_conflict_map: &TokenConflictMap,
coincident_token_index: &CoincidentTokenIndex<'_>,
token_conflict_map: &TokenConflictMap<'_>,
) -> LexTables {
let keyword_lex_table = if syntax_grammar.word_token.is_some() {
let mut builder = LexTableBuilder::new(lexical_grammar);
@ -284,8 +284,8 @@ fn merge_token_set(
tokens: &mut TokenSet,
other: &TokenSet,
lexical_grammar: &LexicalGrammar,
token_conflict_map: &TokenConflictMap,
coincident_token_index: &CoincidentTokenIndex,
token_conflict_map: &TokenConflictMap<'_>,
coincident_token_index: &CoincidentTokenIndex<'_>,
) -> bool {
for i in 0..lexical_grammar.variables.len() {
let symbol = Symbol::terminal(i);

View file

@ -114,7 +114,7 @@ pub struct AmbiguousExtraError {
}
impl std::fmt::Display for ConflictError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for symbol in &self.symbol_sequence {
write!(f, " {symbol}")?;
}
@ -171,7 +171,7 @@ impl std::fmt::Display for ConflictError {
}
impl std::fmt::Display for Interpretation {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for symbol in &self.preceding_symbols {
write!(f, " {symbol}")?;
}
@ -191,7 +191,7 @@ impl std::fmt::Display for Interpretation {
}
impl std::fmt::Display for Resolution {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Precedence { symbols } => {
write!(f, "Specify a higher precedence in ")?;
@ -227,7 +227,7 @@ impl std::fmt::Display for Resolution {
}
impl std::fmt::Display for AmbiguousExtraError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for (i, symbol) in self.parent_symbols.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
@ -695,7 +695,7 @@ impl<'a> ParseTableBuilder<'a> {
fn handle_conflict(
&mut self,
item_set: &ParseItemSet,
item_set: &ParseItemSet<'_>,
state_id: ParseStateId,
preceding_symbols: &SymbolSequence,
preceding_auxiliary_symbols: &[AuxiliarySymbolInfo],
@ -917,7 +917,7 @@ impl<'a> ParseTableBuilder<'a> {
shift_items.sort_unstable();
reduce_items.sort_unstable();
let get_rule_names = |items: &[&ParseItem]| -> Vec<String> {
let get_rule_names = |items: &[&ParseItem<'_>]| -> Vec<String> {
let mut last_rule_id = None;
let mut result = Vec::with_capacity(items.len());
for item in items {
@ -1030,7 +1030,7 @@ impl<'a> ParseTableBuilder<'a> {
fn get_auxiliary_node_info(
&self,
item_set: &ParseItemSet,
item_set: &ParseItemSet<'_>,
symbol: Symbol,
) -> AuxiliarySymbolInfo {
let parent_symbols = item_set
@ -1053,7 +1053,7 @@ impl<'a> ParseTableBuilder<'a> {
}
}
fn get_production_id(&mut self, item: &ParseItem) -> ProductionInfoId {
fn get_production_id(&mut self, item: &ParseItem<'_>) -> ProductionInfoId {
let mut production_info = ProductionInfo {
alias_sequence: Vec::new(),
field_map: BTreeMap::new(),

View file

@ -56,7 +56,7 @@ impl<'a> CoincidentTokenIndex<'a> {
}
impl fmt::Debug for CoincidentTokenIndex<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "CoincidentTokenIndex {{")?;
writeln!(f, " entries: {{")?;

View file

@ -185,7 +185,7 @@ impl<'a> ParseItemSet<'a> {
}
impl fmt::Display for ParseItemDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
if self.0.is_augmented() {
write!(f, "START →")?;
} else {
@ -281,7 +281,7 @@ fn display_variable_name(source: &str) -> String {
}
impl fmt::Display for TokenSetDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "[")?;
for (i, symbol) in self.0.iter().enumerate() {
if i > 0 {
@ -306,7 +306,7 @@ impl fmt::Display for TokenSetDisplay<'_> {
}
impl fmt::Display for ParseItemSetDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
for entry in &self.0.entries {
write!(
f,

View file

@ -342,7 +342,7 @@ impl<'a> ParseItemSetBuilder<'a> {
}
impl fmt::Debug for ParseItemSetBuilder<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "ParseItemSetBuilder {{")?;
writeln!(f, " first_sets: {{")?;

View file

@ -19,7 +19,7 @@ pub fn minimize_parse_table(
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
simple_aliases: &AliasMap,
token_conflict_map: &TokenConflictMap,
token_conflict_map: &TokenConflictMap<'_>,
keywords: &TokenSet,
optimizations: OptLevel,
) {

View file

@ -146,7 +146,7 @@ impl<'a> TokenConflictMap<'a> {
}
impl fmt::Debug for TokenConflictMap<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "TokenConflictMap {{")?;
let syntax_grammar = SyntaxGrammar::default();
@ -205,7 +205,7 @@ const fn matrix_index(variable_count: usize, i: usize, j: usize) -> usize {
variable_count * i + j
}
fn get_starting_chars(cursor: &mut NfaCursor, grammar: &LexicalGrammar) -> Vec<CharacterSet> {
fn get_starting_chars(cursor: &mut NfaCursor<'_>, grammar: &LexicalGrammar) -> Vec<CharacterSet> {
let mut result = Vec::with_capacity(grammar.variables.len());
for variable in &grammar.variables {
cursor.reset(vec![variable.start_state]);
@ -237,7 +237,7 @@ fn get_following_chars(
}
fn compute_conflict_status(
cursor: &mut NfaCursor,
cursor: &mut NfaCursor<'_>,
grammar: &LexicalGrammar,
following_chars: &[CharacterSet],
i: usize,

View file

@ -264,7 +264,7 @@ impl InlinedProductionMap {
}
impl fmt::Display for PrecedenceEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Name(n) => write!(f, "'{n}'"),
Self::Symbol(s) => write!(f, "$.{s}"),

View file

@ -409,7 +409,7 @@ impl PartialOrd for CharacterSet {
}
impl fmt::Debug for CharacterSet {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "CharacterSet [")?;
let mut set = self.clone();
if self.contains(char::MAX) {
@ -440,7 +440,7 @@ impl Nfa {
}
impl fmt::Debug for Nfa {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "Nfa {{ states: {{")?;
for (i, state) in self.states.iter().enumerate() {
writeln!(f, " {i}: {state:?},")?;

View file

@ -15,11 +15,11 @@ use super::{IoError, JSError, JSResult};
const DSL: &[u8] = include_bytes!("dsl.js");
trait JSResultExt<T> {
fn or_js_error(self, ctx: &Ctx) -> JSResult<T>;
fn or_js_error(self, ctx: &Ctx<'_>) -> JSResult<T>;
}
impl<T> JSResultExt<T> for Result<T, rquickjs::Error> {
fn or_js_error(self, ctx: &Ctx) -> JSResult<T> {
fn or_js_error(self, ctx: &Ctx<'_>) -> JSResult<T> {
match self {
Ok(v) => Ok(v),
Err(rquickjs::Error::Exception) => Err(format_js_exception(ctx.catch())),
@ -28,7 +28,7 @@ impl<T> JSResultExt<T> for Result<T, rquickjs::Error> {
}
}
fn format_js_exception(v: Value) -> JSError {
fn format_js_exception(v: Value<'_>) -> JSError {
let Some(exception) = v.into_exception() else {
return JSError::QuickJS("Expected a JS exception".to_string());
};
@ -215,11 +215,11 @@ fn try_resolve_path(path: &Path) -> rquickjs::Result<PathBuf> {
}
#[allow(clippy::needless_pass_by_value)]
fn require_from_module<'a>(
ctx: Ctx<'a>,
fn require_from_module<'js>(
ctx: Ctx<'js>,
module_path: String,
from_module: &str,
) -> rquickjs::Result<Value<'a>> {
) -> rquickjs::Result<Value<'js>> {
let current_module = PathBuf::from(from_module);
let current_dir = if current_module.is_file() {
current_module.parent().unwrap_or(Path::new("."))
@ -234,13 +234,13 @@ fn require_from_module<'a>(
load_module_from_content(&ctx, &resolved_path, &contents)
}
fn load_module_from_content<'a>(
ctx: &Ctx<'a>,
fn load_module_from_content<'js>(
ctx: &Ctx<'js>,
path: &Path,
contents: &str,
) -> rquickjs::Result<Value<'a>> {
) -> rquickjs::Result<Value<'js>> {
if path.extension().is_some_and(|ext| ext == "json") {
return ctx.eval::<Value, _>(format!("JSON.parse({contents:?})"));
return ctx.eval::<Value<'js>, _>(format!("JSON.parse({contents:?})"));
}
let exports = Object::new(ctx.clone())?;
@ -256,7 +256,7 @@ fn load_module_from_content<'a>(
let module_path = filename.clone();
let require = Function::new(
ctx.clone(),
move |ctx_inner: Ctx<'a>, target_path: String| -> rquickjs::Result<Value<'a>> {
move |ctx_inner: Ctx<'js>, target_path: String| -> rquickjs::Result<Value<'js>> {
require_from_module(ctx_inner, target_path, &module_path)
},
)?;
@ -264,8 +264,8 @@ fn load_module_from_content<'a>(
let wrapper =
format!("(function(exports, require, module, __filename, __dirname) {{ {contents} }})");
let module_func = ctx.eval::<Function, _>(wrapper)?;
module_func.call::<_, Value>((exports, require, module_obj.clone(), filename, dirname))?;
let module_func = ctx.eval::<Function<'js>, _>(wrapper)?;
module_func.call::<_, Value<'js>>((exports, require, module_obj.clone(), filename, dirname))?;
module_obj.get("exports")
}
@ -318,7 +318,7 @@ pub fn execute_native_runtime(grammar_path: &Path) -> JSResult<String> {
let grammar_path_string = grammar_path.to_string_lossy().to_string();
let main_require = Function::new(
ctx.clone(),
move |ctx_inner, target_path: String| -> rquickjs::Result<Value> {
move |ctx_inner, target_path: String| -> rquickjs::Result<Value<'_>> {
require_from_module(ctx_inner, target_path, &grammar_path_string)
},
)?;
@ -328,7 +328,7 @@ pub fn execute_native_runtime(grammar_path: &Path) -> JSResult<String> {
promise.finish::<()>().or_js_error(&ctx)?;
let grammar_json = ctx
.eval::<rquickjs::String, _>("globalThis.output")
.eval::<rquickjs::String<'_>, _>("globalThis.output")
.map(|s| s.to_string())
.or_js_error(&ctx)?
.or_js_error(&ctx)?;
@ -437,8 +437,8 @@ mod tests {
const pkg = require('./package.json');
module.exports = grammar({
name: 'json_test',
rules: {
source_file: $ => 'version_' + pkg.version.replace(/\./g, '_')
rules: {
source_file: $ => 'version_' + pkg.version.replace(/\./g, '_')
}
});
",

View file

@ -189,7 +189,7 @@ struct HighlightIterLayer<'a> {
depth: usize,
}
pub struct _QueryCaptures<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> {
pub struct _QueryCaptures<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> {
ptr: *mut ffi::TSQueryCursor,
query: &'query Query,
text_provider: T,
@ -225,7 +225,7 @@ impl<'tree> _QueryMatch<'_, 'tree> {
}
}
impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> Iterator
impl<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> Iterator
for _QueryCaptures<'query, 'tree, T, I>
{
type Item = (QueryMatch<'query, 'tree>, usize);
@ -240,7 +240,10 @@ impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> Iterator
m.as_mut_ptr(),
core::ptr::addr_of_mut!(capture_index),
) {
let result = std::mem::transmute::<_QueryMatch, QueryMatch>(_QueryMatch::new(
let result = std::mem::transmute::<
_QueryMatch<'query, 'tree>,
QueryMatch<'query, 'tree>,
>(_QueryMatch::new(
&m.assume_init(),
self.ptr,
));
@ -594,6 +597,7 @@ impl<'a> HighlightIterLayer<'a> {
}
}
// SAFETY:
// The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
// prevents them from being moved. But both of these values are really just
// pointers, so it's actually ok to move them.
@ -601,12 +605,18 @@ impl<'a> HighlightIterLayer<'a> {
let cursor_ref = unsafe {
mem::transmute::<&mut QueryCursor, &'static mut QueryCursor>(&mut cursor)
};
let captures = unsafe {
std::mem::transmute::<QueryCaptures<_, _>, _QueryCaptures<_, _>>(
cursor_ref.captures(&config.query, tree_ref.root_node(), source),
)
}
.peekable();
let captures =
unsafe {
std::mem::transmute::<
QueryCaptures<'_, '_, _, _>,
_QueryCaptures<'_, '_, _, _>,
>(cursor_ref.captures(
&config.query,
tree_ref.root_node(),
source,
))
}
.peekable();
result.push(HighlightIterLayer {
highlight_end_stack: Vec::new(),
@ -648,7 +658,7 @@ impl<'a> HighlightIterLayer<'a> {
// of their children.
fn intersect_ranges(
parent_ranges: &[Range],
nodes: &[Node],
nodes: &[Node<'_>],
includes_children: bool,
) -> Vec<Range> {
let mut cursor = nodes[0].walk();

View file

@ -765,7 +765,7 @@ impl Loader {
}
#[must_use]
pub fn get_all_language_configurations(&self) -> Vec<(&LanguageConfiguration, &Path)> {
pub fn get_all_language_configurations(&self) -> Vec<(&LanguageConfiguration<'static>, &Path)> {
self.language_configurations
.iter()
.map(|c| (c, self.languages_by_id[c.language_id].0.as_ref()))
@ -775,7 +775,7 @@ impl Loader {
pub fn language_configuration_for_scope(
&self,
scope: &str,
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
) -> LoaderResult<Option<(Language, &LanguageConfiguration<'static>)>> {
for configuration in &self.language_configurations {
if configuration.scope.as_ref().is_some_and(|s| s == scope) {
let language = self.language_for_id(configuration.language_id)?;
@ -788,7 +788,7 @@ impl Loader {
pub fn language_configuration_for_first_line_regex(
&self,
path: &Path,
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
) -> LoaderResult<Option<(Language, &LanguageConfiguration<'static>)>> {
self.language_configuration_ids_by_first_line_regex
.iter()
.try_fold(None, |_, (regex, ids)| {
@ -817,7 +817,7 @@ impl Loader {
pub fn language_configuration_for_file_name(
&self,
path: &Path,
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
) -> LoaderResult<Option<(Language, &LanguageConfiguration<'static>)>> {
// Find all the language configurations that match this file name
// or a suffix of the file name.
let configuration_ids = path
@ -889,7 +889,7 @@ impl Loader {
pub fn language_configuration_for_injection_string(
&self,
string: &str,
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
) -> LoaderResult<Option<(Language, &LanguageConfiguration<'static>)>> {
let mut best_match_length = 0;
let mut best_match_position = None;
for (i, configuration) in self.language_configurations.iter().enumerate() {
@ -915,7 +915,7 @@ impl Loader {
pub fn language_for_configuration(
&self,
configuration: &LanguageConfiguration,
configuration: &LanguageConfiguration<'_>,
) -> LoaderResult<Language> {
self.language_for_id(configuration.language_id)
}
@ -946,7 +946,7 @@ impl Loader {
self.load_language_at_path(config).map(|_| ())
}
pub fn load_language_at_path(&self, mut config: CompileConfig) -> LoaderResult<Language> {
pub fn load_language_at_path(&self, mut config: CompileConfig<'_>) -> LoaderResult<Language> {
let grammar_path = config.src_path.join("grammar.json");
config.name = Self::grammar_json_name(&grammar_path)?;
self.load_language_at_path_with_name(config)
@ -954,7 +954,7 @@ impl Loader {
pub fn load_language_at_path_with_name(
&self,
mut config: CompileConfig,
mut config: CompileConfig<'_>,
) -> LoaderResult<Language> {
let mut lib_name = config.name.clone();
let language_fn_name = format!("tree_sitter_{}", config.name.replace('-', "_"));
@ -1128,7 +1128,7 @@ impl Loader {
})?;
let language = unsafe {
let language_fn = library
.get::<Symbol<unsafe extern "C" fn() -> Language>>(function_name.as_bytes())
.get::<Symbol<'_, unsafe extern "C" fn() -> Language>>(function_name.as_bytes())
.map_err(|e| {
LoaderError::Symbol(SymbolError {
error: e,
@ -1144,7 +1144,7 @@ impl Loader {
fn compile_parser_to_dylib(
&self,
config: &CompileConfig,
config: &CompileConfig<'_>,
lock_file: &fs::File,
lock_path: &Path,
) -> LoaderResult<()> {
@ -1534,7 +1534,9 @@ impl Loader {
}
#[must_use]
pub fn get_language_configuration_in_current_path(&self) -> Option<&LanguageConfiguration> {
pub fn get_language_configuration_in_current_path(
&self,
) -> Option<&LanguageConfiguration<'static>> {
self.language_configuration_in_current_path
.map(|i| &self.language_configurations[i])
}
@ -1543,7 +1545,7 @@ impl Loader {
&mut self,
parser_path: &Path,
set_current_path_config: bool,
) -> LoaderResult<&[LanguageConfiguration]> {
) -> LoaderResult<&[LanguageConfiguration<'static>]> {
let initial_language_configuration_count = self.language_configurations.len();
match TreeSitterJSON::from_file(parser_path) {

View file

@ -313,6 +313,7 @@ impl TagsContext {
)
.ok_or(Error::Cancelled)?;
// SAFETY:
// The `matches` iterator borrows the `Tree`, which prevents it from being
// moved. But the tree is really just a pointer, so it's actually ok to
// move it.