diff --git a/Cargo.toml b/Cargo.toml index 84dccaac..7e532f8d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -79,7 +79,7 @@ unused_self = "allow" used_underscore_items = "allow" [workspace.lints.rust] -mismatched_lifetime_syntaxes = "allow" +rust_2018_idioms = "deny" [profile.optimize] inherits = "release" diff --git a/crates/cli/src/fuzz/corpus_test.rs b/crates/cli/src/fuzz/corpus_test.rs index e95ab283..69ad1e26 100644 --- a/crates/cli/src/fuzz/corpus_test.rs +++ b/crates/cli/src/fuzz/corpus_test.rs @@ -4,7 +4,7 @@ use super::{scope_sequence::ScopeSequence, LOG_ENABLED, LOG_GRAPH_ENABLED}; use crate::util; pub fn check_consistent_sizes(tree: &Tree, input: &[u8]) { - fn check(node: Node, line_offsets: &[usize]) { + fn check(node: Node<'_>, line_offsets: &[usize]) { let start_byte = node.start_byte(); let end_byte = node.end_byte(); let start_point = node.start_position(); diff --git a/crates/cli/src/init.rs b/crates/cli/src/init.rs index 70ca25af..3960fffd 100644 --- a/crates/cli/src/init.rs +++ b/crates/cli/src/init.rs @@ -1188,7 +1188,7 @@ fn generate_file( path: &Path, template: &str, language_name: &str, - generate_opts: &GenerateOpts, + generate_opts: &GenerateOpts<'_>, ) -> Result<()> { let filename = path.file_name().unwrap().to_str().unwrap(); diff --git a/crates/cli/src/logger.rs b/crates/cli/src/logger.rs index 41b11906..99eaf8db 100644 --- a/crates/cli/src/logger.rs +++ b/crates/cli/src/logger.rs @@ -11,11 +11,11 @@ pub fn paint(color: Option>, text: &str) -> String { struct Logger; impl Log for Logger { - fn enabled(&self, _: &Metadata) -> bool { + fn enabled(&self, _: &Metadata<'_>) -> bool { true } - fn log(&self, record: &Record) { + fn log(&self, record: &Record<'_>) { match record.level() { Level::Error => eprintln!( "{} {}", diff --git a/crates/cli/src/parse.rs b/crates/cli/src/parse.rs index 1a1d9723..c65e273f 100644 --- a/crates/cli/src/parse.rs +++ b/crates/cli/src/parse.rs @@ -284,7 +284,7 @@ pub fn parse_file_at_path( path: &Path, name: &str, max_path_length: usize, - opts: &mut ParseFileOptions, + opts: &mut ParseFileOptions<'_>, ) -> Result<()> { let mut _log_session = None; parser.set_language(language)?; @@ -774,7 +774,7 @@ pub fn render_cst<'a, 'b: 'a>( source_code: &[u8], tree: &'b Tree, cursor: &mut TreeCursor<'a>, - opts: &ParseFileOptions, + opts: &ParseFileOptions<'_>, out: &mut impl Write, ) -> Result<()> { let lossy_source_code = String::from_utf8_lossy(source_code); @@ -841,9 +841,9 @@ fn render_node_text(source: &str) -> String { } fn write_node_text( - opts: &ParseFileOptions, + opts: &ParseFileOptions<'_>, out: &mut impl Write, - cursor: &TreeCursor, + cursor: &TreeCursor<'_>, is_named: bool, source: &str, color: Option + Copy>, @@ -906,7 +906,7 @@ fn write_node_text( Ok(()) } -fn render_line_feed(source: &str, opts: &ParseFileOptions) -> String { +fn render_line_feed(source: &str, opts: &ParseFileOptions<'_>) -> String { if cfg!(windows) { source.replace("\r\n", &paint(opts.parse_theme.line_feed, "\r\n")) } else { @@ -915,8 +915,8 @@ fn render_line_feed(source: &str, opts: &ParseFileOptions) -> String { } fn render_node_range( - opts: &ParseFileOptions, - cursor: &TreeCursor, + opts: &ParseFileOptions<'_>, + cursor: &TreeCursor<'_>, is_named: bool, is_multiline: bool, total_width: usize, @@ -952,8 +952,8 @@ fn render_node_range( } fn cst_render_node( - opts: &ParseFileOptions, - cursor: &mut TreeCursor, + opts: &ParseFileOptions<'_>, + cursor: &TreeCursor<'_>, source_code: &[u8], out: &mut impl Write, total_width: usize, diff --git a/crates/cli/src/query_testing.rs b/crates/cli/src/query_testing.rs index e4923d65..776f1e14 100644 --- a/crates/cli/src/query_testing.rs +++ b/crates/cli/src/query_testing.rs @@ -14,7 +14,7 @@ pub struct Utf8Point { } impl std::fmt::Display for Utf8Point { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "({}, {})", self.row, self.column) } } diff --git a/crates/cli/src/test.rs b/crates/cli/src/test.rs index b568e4a3..a7d0f3fa 100644 --- a/crates/cli/src/test.rs +++ b/crates/cli/src/test.rs @@ -603,7 +603,7 @@ impl std::fmt::Display for TestSummary { pub fn run_tests_at_path( parser: &mut Parser, - opts: &TestOptions, + opts: &TestOptions<'_>, test_summary: &mut TestSummary, ) -> Result<()> { let test_entry = parse_tests(&opts.path)?; @@ -814,7 +814,7 @@ impl TestCorrection { fn run_tests( parser: &mut Parser, test_entry: TestEntry, - opts: &TestOptions, + opts: &TestOptions<'_>, test_summary: &mut TestSummary, corrected_entries: &mut Vec, is_root: bool, @@ -1070,7 +1070,9 @@ fn run_tests( let mut ran_test_in_group = false; - let matches_filter = |name: &str, file_name: &Option, opts: &TestOptions| { + let matches_filter = |name: &str, + file_name: &Option, + opts: &TestOptions<'_>| { if let (Some(test_file_path), Some(filter_file_name)) = (file_name, &opts.file_name) { if !filter_file_name.eq(test_file_path) { diff --git a/crates/cli/src/test_highlight.rs b/crates/cli/src/test_highlight.rs index d96f90c2..316c3252 100644 --- a/crates/cli/src/test_highlight.rs +++ b/crates/cli/src/test_highlight.rs @@ -22,7 +22,7 @@ pub struct Failure { impl std::error::Error for Failure {} impl std::fmt::Display for Failure { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "Failure - row: {}, column: {}, expected highlight '{}', actual highlights: ", diff --git a/crates/cli/src/test_tags.rs b/crates/cli/src/test_tags.rs index 882718e5..e60accb1 100644 --- a/crates/cli/src/test_tags.rs +++ b/crates/cli/src/test_tags.rs @@ -21,7 +21,7 @@ pub struct Failure { impl std::error::Error for Failure {} impl std::fmt::Display for Failure { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "Failure - row: {}, column: {}, expected tag: '{}', actual tag: ", diff --git a/crates/cli/src/tests/helpers/query_helpers.rs b/crates/cli/src/tests/helpers/query_helpers.rs index e648ac8e..b7fd8228 100644 --- a/crates/cli/src/tests/helpers/query_helpers.rs +++ b/crates/cli/src/tests/helpers/query_helpers.rs @@ -75,7 +75,7 @@ impl Pattern { (pattern, pattern_start..pattern_end) } - fn random_pattern_for_node(cursor: &mut TreeCursor, rng: &mut impl Rng) -> Self { + fn random_pattern_for_node(cursor: &mut TreeCursor<'_>, rng: &mut impl Rng) -> Self { let node = cursor.node(); // Sometimes specify the node's type, sometimes use a wildcard. @@ -225,7 +225,7 @@ impl Pattern { } // Find every matching combination of child patterns and child nodes. - let mut finished_matches = Vec::::new(); + let mut finished_matches = Vec::>::new(); if cursor.goto_first_child() { let mut match_states = vec![(0, mat)]; loop { @@ -306,7 +306,7 @@ impl Ord for Match<'_, '_> { } } -fn compare_depth_first(a: Node, b: Node) -> Ordering { +fn compare_depth_first(a: Node<'_>, b: Node<'_>) -> Ordering { let a = a.byte_range(); let b = b.byte_range(); a.start.cmp(&b.start).then_with(|| b.end.cmp(&a.end)) diff --git a/crates/cli/src/tests/node_test.rs b/crates/cli/src/tests/node_test.rs index 614bfdb9..ef01d206 100644 --- a/crates/cli/src/tests/node_test.rs +++ b/crates/cli/src/tests/node_test.rs @@ -1219,7 +1219,7 @@ private: ); } -fn get_all_nodes(tree: &Tree) -> Vec { +fn get_all_nodes(tree: &Tree) -> Vec> { let mut result = Vec::new(); let mut visited_children = false; let mut cursor = tree.walk(); diff --git a/crates/cli/src/tests/proc_macro/src/lib.rs b/crates/cli/src/tests/proc_macro/src/lib.rs index a63006cd..56d50191 100644 --- a/crates/cli/src/tests/proc_macro/src/lib.rs +++ b/crates/cli/src/tests/proc_macro/src/lib.rs @@ -44,7 +44,7 @@ pub fn test_with_seed(args: TokenStream, input: TokenStream) -> TokenStream { } impl Parse for Args { - fn parse(input: ParseStream) -> syn::Result { + fn parse(input: ParseStream<'_>) -> syn::Result { let mut retry = None; let mut seed = None; let mut seed_fn = None; diff --git a/crates/cli/src/tests/query_test.rs b/crates/cli/src/tests/query_test.rs index 3f1467e5..1b3bae02 100644 --- a/crates/cli/src/tests/query_test.rs +++ b/crates/cli/src/tests/query_test.rs @@ -3971,7 +3971,7 @@ fn test_query_text_callback_returns_chunks() { parser.set_language(&language).unwrap(); let tree = parser.parse(source, None).unwrap(); let mut cursor = QueryCursor::new(); - let captures = cursor.captures(&query, tree.root_node(), |node: Node| { + let captures = cursor.captures(&query, tree.root_node(), |node: Node<'_>| { chunks_in_range(node.byte_range()) }); diff --git a/crates/generate/src/build_tables.rs b/crates/generate/src/build_tables.rs index 8c6ef2a4..489412cf 100644 --- a/crates/generate/src/build_tables.rs +++ b/crates/generate/src/build_tables.rs @@ -119,7 +119,7 @@ fn get_following_tokens( syntax_grammar: &SyntaxGrammar, lexical_grammar: &LexicalGrammar, inlines: &InlinedProductionMap, - builder: &ParseItemSetBuilder, + builder: &ParseItemSetBuilder<'_>, ) -> Vec { let mut result = vec![TokenSet::new(); lexical_grammar.variables.len()]; let productions = syntax_grammar @@ -160,8 +160,8 @@ fn populate_error_state( parse_table: &mut ParseTable, syntax_grammar: &SyntaxGrammar, lexical_grammar: &LexicalGrammar, - coincident_token_index: &CoincidentTokenIndex, - token_conflict_map: &TokenConflictMap, + coincident_token_index: &CoincidentTokenIndex<'_>, + token_conflict_map: &TokenConflictMap<'_>, keywords: &TokenSet, ) { let state = &mut parse_table.states[0]; @@ -323,8 +323,8 @@ fn identify_keywords( lexical_grammar: &LexicalGrammar, parse_table: &ParseTable, word_token: Option, - token_conflict_map: &TokenConflictMap, - coincident_token_index: &CoincidentTokenIndex, + token_conflict_map: &TokenConflictMap<'_>, + coincident_token_index: &CoincidentTokenIndex<'_>, ) -> TokenSet { if word_token.is_none() { return TokenSet::new(); @@ -429,7 +429,7 @@ fn identify_keywords( fn mark_fragile_tokens( parse_table: &mut ParseTable, lexical_grammar: &LexicalGrammar, - token_conflict_map: &TokenConflictMap, + token_conflict_map: &TokenConflictMap<'_>, ) { let n = lexical_grammar.variables.len(); let mut valid_tokens_mask = Vec::with_capacity(n); @@ -543,7 +543,7 @@ fn report_state_info<'a>( } } -fn all_chars_are_alphabetical(cursor: &NfaCursor) -> bool { +fn all_chars_are_alphabetical(cursor: &NfaCursor<'_>) -> bool { cursor.transition_chars().all(|(chars, is_sep)| { if is_sep { true diff --git a/crates/generate/src/build_tables/build_lex_table.rs b/crates/generate/src/build_tables/build_lex_table.rs index 9d0d4fb7..b4c43eed 100644 --- a/crates/generate/src/build_tables/build_lex_table.rs +++ b/crates/generate/src/build_tables/build_lex_table.rs @@ -27,8 +27,8 @@ pub fn build_lex_table( syntax_grammar: &SyntaxGrammar, lexical_grammar: &LexicalGrammar, keywords: &TokenSet, - coincident_token_index: &CoincidentTokenIndex, - token_conflict_map: &TokenConflictMap, + coincident_token_index: &CoincidentTokenIndex<'_>, + token_conflict_map: &TokenConflictMap<'_>, ) -> LexTables { let keyword_lex_table = if syntax_grammar.word_token.is_some() { let mut builder = LexTableBuilder::new(lexical_grammar); @@ -284,8 +284,8 @@ fn merge_token_set( tokens: &mut TokenSet, other: &TokenSet, lexical_grammar: &LexicalGrammar, - token_conflict_map: &TokenConflictMap, - coincident_token_index: &CoincidentTokenIndex, + token_conflict_map: &TokenConflictMap<'_>, + coincident_token_index: &CoincidentTokenIndex<'_>, ) -> bool { for i in 0..lexical_grammar.variables.len() { let symbol = Symbol::terminal(i); diff --git a/crates/generate/src/build_tables/build_parse_table.rs b/crates/generate/src/build_tables/build_parse_table.rs index 66f29609..8b601a4a 100644 --- a/crates/generate/src/build_tables/build_parse_table.rs +++ b/crates/generate/src/build_tables/build_parse_table.rs @@ -114,7 +114,7 @@ pub struct AmbiguousExtraError { } impl std::fmt::Display for ConflictError { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for symbol in &self.symbol_sequence { write!(f, " {symbol}")?; } @@ -171,7 +171,7 @@ impl std::fmt::Display for ConflictError { } impl std::fmt::Display for Interpretation { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for symbol in &self.preceding_symbols { write!(f, " {symbol}")?; } @@ -191,7 +191,7 @@ impl std::fmt::Display for Interpretation { } impl std::fmt::Display for Resolution { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Precedence { symbols } => { write!(f, "Specify a higher precedence in ")?; @@ -227,7 +227,7 @@ impl std::fmt::Display for Resolution { } impl std::fmt::Display for AmbiguousExtraError { - fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { for (i, symbol) in self.parent_symbols.iter().enumerate() { if i > 0 { write!(f, ", ")?; @@ -695,7 +695,7 @@ impl<'a> ParseTableBuilder<'a> { fn handle_conflict( &mut self, - item_set: &ParseItemSet, + item_set: &ParseItemSet<'_>, state_id: ParseStateId, preceding_symbols: &SymbolSequence, preceding_auxiliary_symbols: &[AuxiliarySymbolInfo], @@ -917,7 +917,7 @@ impl<'a> ParseTableBuilder<'a> { shift_items.sort_unstable(); reduce_items.sort_unstable(); - let get_rule_names = |items: &[&ParseItem]| -> Vec { + let get_rule_names = |items: &[&ParseItem<'_>]| -> Vec { let mut last_rule_id = None; let mut result = Vec::with_capacity(items.len()); for item in items { @@ -1030,7 +1030,7 @@ impl<'a> ParseTableBuilder<'a> { fn get_auxiliary_node_info( &self, - item_set: &ParseItemSet, + item_set: &ParseItemSet<'_>, symbol: Symbol, ) -> AuxiliarySymbolInfo { let parent_symbols = item_set @@ -1053,7 +1053,7 @@ impl<'a> ParseTableBuilder<'a> { } } - fn get_production_id(&mut self, item: &ParseItem) -> ProductionInfoId { + fn get_production_id(&mut self, item: &ParseItem<'_>) -> ProductionInfoId { let mut production_info = ProductionInfo { alias_sequence: Vec::new(), field_map: BTreeMap::new(), diff --git a/crates/generate/src/build_tables/coincident_tokens.rs b/crates/generate/src/build_tables/coincident_tokens.rs index d1e40741..7ccd1ed4 100644 --- a/crates/generate/src/build_tables/coincident_tokens.rs +++ b/crates/generate/src/build_tables/coincident_tokens.rs @@ -56,7 +56,7 @@ impl<'a> CoincidentTokenIndex<'a> { } impl fmt::Debug for CoincidentTokenIndex<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "CoincidentTokenIndex {{")?; writeln!(f, " entries: {{")?; diff --git a/crates/generate/src/build_tables/item.rs b/crates/generate/src/build_tables/item.rs index cd70ce74..2ee83da9 100644 --- a/crates/generate/src/build_tables/item.rs +++ b/crates/generate/src/build_tables/item.rs @@ -185,7 +185,7 @@ impl<'a> ParseItemSet<'a> { } impl fmt::Display for ParseItemDisplay<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { if self.0.is_augmented() { write!(f, "START →")?; } else { @@ -281,7 +281,7 @@ fn display_variable_name(source: &str) -> String { } impl fmt::Display for TokenSetDisplay<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { write!(f, "[")?; for (i, symbol) in self.0.iter().enumerate() { if i > 0 { @@ -306,7 +306,7 @@ impl fmt::Display for TokenSetDisplay<'_> { } impl fmt::Display for ParseItemSetDisplay<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { for entry in &self.0.entries { write!( f, diff --git a/crates/generate/src/build_tables/item_set_builder.rs b/crates/generate/src/build_tables/item_set_builder.rs index 44e05702..057e2408 100644 --- a/crates/generate/src/build_tables/item_set_builder.rs +++ b/crates/generate/src/build_tables/item_set_builder.rs @@ -342,7 +342,7 @@ impl<'a> ParseItemSetBuilder<'a> { } impl fmt::Debug for ParseItemSetBuilder<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "ParseItemSetBuilder {{")?; writeln!(f, " first_sets: {{")?; diff --git a/crates/generate/src/build_tables/minimize_parse_table.rs b/crates/generate/src/build_tables/minimize_parse_table.rs index 6c26f1c4..4a1214ec 100644 --- a/crates/generate/src/build_tables/minimize_parse_table.rs +++ b/crates/generate/src/build_tables/minimize_parse_table.rs @@ -19,7 +19,7 @@ pub fn minimize_parse_table( syntax_grammar: &SyntaxGrammar, lexical_grammar: &LexicalGrammar, simple_aliases: &AliasMap, - token_conflict_map: &TokenConflictMap, + token_conflict_map: &TokenConflictMap<'_>, keywords: &TokenSet, optimizations: OptLevel, ) { diff --git a/crates/generate/src/build_tables/token_conflicts.rs b/crates/generate/src/build_tables/token_conflicts.rs index d72effd4..1d0493ba 100644 --- a/crates/generate/src/build_tables/token_conflicts.rs +++ b/crates/generate/src/build_tables/token_conflicts.rs @@ -146,7 +146,7 @@ impl<'a> TokenConflictMap<'a> { } impl fmt::Debug for TokenConflictMap<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "TokenConflictMap {{")?; let syntax_grammar = SyntaxGrammar::default(); @@ -205,7 +205,7 @@ const fn matrix_index(variable_count: usize, i: usize, j: usize) -> usize { variable_count * i + j } -fn get_starting_chars(cursor: &mut NfaCursor, grammar: &LexicalGrammar) -> Vec { +fn get_starting_chars(cursor: &mut NfaCursor<'_>, grammar: &LexicalGrammar) -> Vec { let mut result = Vec::with_capacity(grammar.variables.len()); for variable in &grammar.variables { cursor.reset(vec![variable.start_state]); @@ -237,7 +237,7 @@ fn get_following_chars( } fn compute_conflict_status( - cursor: &mut NfaCursor, + cursor: &mut NfaCursor<'_>, grammar: &LexicalGrammar, following_chars: &[CharacterSet], i: usize, diff --git a/crates/generate/src/grammars.rs b/crates/generate/src/grammars.rs index c6e0acdd..cd40e5c1 100644 --- a/crates/generate/src/grammars.rs +++ b/crates/generate/src/grammars.rs @@ -264,7 +264,7 @@ impl InlinedProductionMap { } impl fmt::Display for PrecedenceEntry { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Name(n) => write!(f, "'{n}'"), Self::Symbol(s) => write!(f, "$.{s}"), diff --git a/crates/generate/src/nfa.rs b/crates/generate/src/nfa.rs index eecbc40b..0e0e6a47 100644 --- a/crates/generate/src/nfa.rs +++ b/crates/generate/src/nfa.rs @@ -409,7 +409,7 @@ impl PartialOrd for CharacterSet { } impl fmt::Debug for CharacterSet { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "CharacterSet [")?; let mut set = self.clone(); if self.contains(char::MAX) { @@ -440,7 +440,7 @@ impl Nfa { } impl fmt::Debug for Nfa { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { writeln!(f, "Nfa {{ states: {{")?; for (i, state) in self.states.iter().enumerate() { writeln!(f, " {i}: {state:?},")?; diff --git a/crates/generate/src/quickjs.rs b/crates/generate/src/quickjs.rs index 99e77397..de092515 100644 --- a/crates/generate/src/quickjs.rs +++ b/crates/generate/src/quickjs.rs @@ -15,11 +15,11 @@ use super::{IoError, JSError, JSResult}; const DSL: &[u8] = include_bytes!("dsl.js"); trait JSResultExt { - fn or_js_error(self, ctx: &Ctx) -> JSResult; + fn or_js_error(self, ctx: &Ctx<'_>) -> JSResult; } impl JSResultExt for Result { - fn or_js_error(self, ctx: &Ctx) -> JSResult { + fn or_js_error(self, ctx: &Ctx<'_>) -> JSResult { match self { Ok(v) => Ok(v), Err(rquickjs::Error::Exception) => Err(format_js_exception(ctx.catch())), @@ -28,7 +28,7 @@ impl JSResultExt for Result { } } -fn format_js_exception(v: Value) -> JSError { +fn format_js_exception(v: Value<'_>) -> JSError { let Some(exception) = v.into_exception() else { return JSError::QuickJS("Expected a JS exception".to_string()); }; @@ -215,11 +215,11 @@ fn try_resolve_path(path: &Path) -> rquickjs::Result { } #[allow(clippy::needless_pass_by_value)] -fn require_from_module<'a>( - ctx: Ctx<'a>, +fn require_from_module<'js>( + ctx: Ctx<'js>, module_path: String, from_module: &str, -) -> rquickjs::Result> { +) -> rquickjs::Result> { let current_module = PathBuf::from(from_module); let current_dir = if current_module.is_file() { current_module.parent().unwrap_or(Path::new(".")) @@ -234,13 +234,13 @@ fn require_from_module<'a>( load_module_from_content(&ctx, &resolved_path, &contents) } -fn load_module_from_content<'a>( - ctx: &Ctx<'a>, +fn load_module_from_content<'js>( + ctx: &Ctx<'js>, path: &Path, contents: &str, -) -> rquickjs::Result> { +) -> rquickjs::Result> { if path.extension().is_some_and(|ext| ext == "json") { - return ctx.eval::(format!("JSON.parse({contents:?})")); + return ctx.eval::, _>(format!("JSON.parse({contents:?})")); } let exports = Object::new(ctx.clone())?; @@ -256,7 +256,7 @@ fn load_module_from_content<'a>( let module_path = filename.clone(); let require = Function::new( ctx.clone(), - move |ctx_inner: Ctx<'a>, target_path: String| -> rquickjs::Result> { + move |ctx_inner: Ctx<'js>, target_path: String| -> rquickjs::Result> { require_from_module(ctx_inner, target_path, &module_path) }, )?; @@ -264,8 +264,8 @@ fn load_module_from_content<'a>( let wrapper = format!("(function(exports, require, module, __filename, __dirname) {{ {contents} }})"); - let module_func = ctx.eval::(wrapper)?; - module_func.call::<_, Value>((exports, require, module_obj.clone(), filename, dirname))?; + let module_func = ctx.eval::, _>(wrapper)?; + module_func.call::<_, Value<'js>>((exports, require, module_obj.clone(), filename, dirname))?; module_obj.get("exports") } @@ -318,7 +318,7 @@ pub fn execute_native_runtime(grammar_path: &Path) -> JSResult { let grammar_path_string = grammar_path.to_string_lossy().to_string(); let main_require = Function::new( ctx.clone(), - move |ctx_inner, target_path: String| -> rquickjs::Result { + move |ctx_inner, target_path: String| -> rquickjs::Result> { require_from_module(ctx_inner, target_path, &grammar_path_string) }, )?; @@ -328,7 +328,7 @@ pub fn execute_native_runtime(grammar_path: &Path) -> JSResult { promise.finish::<()>().or_js_error(&ctx)?; let grammar_json = ctx - .eval::("globalThis.output") + .eval::, _>("globalThis.output") .map(|s| s.to_string()) .or_js_error(&ctx)? .or_js_error(&ctx)?; @@ -437,8 +437,8 @@ mod tests { const pkg = require('./package.json'); module.exports = grammar({ name: 'json_test', - rules: { - source_file: $ => 'version_' + pkg.version.replace(/\./g, '_') + rules: { + source_file: $ => 'version_' + pkg.version.replace(/\./g, '_') } }); ", diff --git a/crates/highlight/src/highlight.rs b/crates/highlight/src/highlight.rs index 9a78d1ac..ecee2a65 100644 --- a/crates/highlight/src/highlight.rs +++ b/crates/highlight/src/highlight.rs @@ -189,7 +189,7 @@ struct HighlightIterLayer<'a> { depth: usize, } -pub struct _QueryCaptures<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> { +pub struct _QueryCaptures<'query, 'tree, T: TextProvider, I: AsRef<[u8]>> { ptr: *mut ffi::TSQueryCursor, query: &'query Query, text_provider: T, @@ -225,7 +225,7 @@ impl<'tree> _QueryMatch<'_, 'tree> { } } -impl<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> Iterator +impl<'query, 'tree, T: TextProvider, I: AsRef<[u8]>> Iterator for _QueryCaptures<'query, 'tree, T, I> { type Item = (QueryMatch<'query, 'tree>, usize); @@ -240,7 +240,10 @@ impl<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> Iterator m.as_mut_ptr(), core::ptr::addr_of_mut!(capture_index), ) { - let result = std::mem::transmute::<_QueryMatch, QueryMatch>(_QueryMatch::new( + let result = std::mem::transmute::< + _QueryMatch<'query, 'tree>, + QueryMatch<'query, 'tree>, + >(_QueryMatch::new( &m.assume_init(), self.ptr, )); @@ -594,6 +597,7 @@ impl<'a> HighlightIterLayer<'a> { } } + // SAFETY: // The `captures` iterator borrows the `Tree` and the `QueryCursor`, which // prevents them from being moved. But both of these values are really just // pointers, so it's actually ok to move them. @@ -601,12 +605,18 @@ impl<'a> HighlightIterLayer<'a> { let cursor_ref = unsafe { mem::transmute::<&mut QueryCursor, &'static mut QueryCursor>(&mut cursor) }; - let captures = unsafe { - std::mem::transmute::, _QueryCaptures<_, _>>( - cursor_ref.captures(&config.query, tree_ref.root_node(), source), - ) - } - .peekable(); + let captures = + unsafe { + std::mem::transmute::< + QueryCaptures<'_, '_, _, _>, + _QueryCaptures<'_, '_, _, _>, + >(cursor_ref.captures( + &config.query, + tree_ref.root_node(), + source, + )) + } + .peekable(); result.push(HighlightIterLayer { highlight_end_stack: Vec::new(), @@ -648,7 +658,7 @@ impl<'a> HighlightIterLayer<'a> { // of their children. fn intersect_ranges( parent_ranges: &[Range], - nodes: &[Node], + nodes: &[Node<'_>], includes_children: bool, ) -> Vec { let mut cursor = nodes[0].walk(); diff --git a/crates/loader/src/loader.rs b/crates/loader/src/loader.rs index 11c8b673..9f773a4c 100644 --- a/crates/loader/src/loader.rs +++ b/crates/loader/src/loader.rs @@ -765,7 +765,7 @@ impl Loader { } #[must_use] - pub fn get_all_language_configurations(&self) -> Vec<(&LanguageConfiguration, &Path)> { + pub fn get_all_language_configurations(&self) -> Vec<(&LanguageConfiguration<'static>, &Path)> { self.language_configurations .iter() .map(|c| (c, self.languages_by_id[c.language_id].0.as_ref())) @@ -775,7 +775,7 @@ impl Loader { pub fn language_configuration_for_scope( &self, scope: &str, - ) -> LoaderResult> { + ) -> LoaderResult)>> { for configuration in &self.language_configurations { if configuration.scope.as_ref().is_some_and(|s| s == scope) { let language = self.language_for_id(configuration.language_id)?; @@ -788,7 +788,7 @@ impl Loader { pub fn language_configuration_for_first_line_regex( &self, path: &Path, - ) -> LoaderResult> { + ) -> LoaderResult)>> { self.language_configuration_ids_by_first_line_regex .iter() .try_fold(None, |_, (regex, ids)| { @@ -817,7 +817,7 @@ impl Loader { pub fn language_configuration_for_file_name( &self, path: &Path, - ) -> LoaderResult> { + ) -> LoaderResult)>> { // Find all the language configurations that match this file name // or a suffix of the file name. let configuration_ids = path @@ -889,7 +889,7 @@ impl Loader { pub fn language_configuration_for_injection_string( &self, string: &str, - ) -> LoaderResult> { + ) -> LoaderResult)>> { let mut best_match_length = 0; let mut best_match_position = None; for (i, configuration) in self.language_configurations.iter().enumerate() { @@ -915,7 +915,7 @@ impl Loader { pub fn language_for_configuration( &self, - configuration: &LanguageConfiguration, + configuration: &LanguageConfiguration<'_>, ) -> LoaderResult { self.language_for_id(configuration.language_id) } @@ -946,7 +946,7 @@ impl Loader { self.load_language_at_path(config).map(|_| ()) } - pub fn load_language_at_path(&self, mut config: CompileConfig) -> LoaderResult { + pub fn load_language_at_path(&self, mut config: CompileConfig<'_>) -> LoaderResult { let grammar_path = config.src_path.join("grammar.json"); config.name = Self::grammar_json_name(&grammar_path)?; self.load_language_at_path_with_name(config) @@ -954,7 +954,7 @@ impl Loader { pub fn load_language_at_path_with_name( &self, - mut config: CompileConfig, + mut config: CompileConfig<'_>, ) -> LoaderResult { let mut lib_name = config.name.clone(); let language_fn_name = format!("tree_sitter_{}", config.name.replace('-', "_")); @@ -1128,7 +1128,7 @@ impl Loader { })?; let language = unsafe { let language_fn = library - .get:: Language>>(function_name.as_bytes()) + .get:: Language>>(function_name.as_bytes()) .map_err(|e| { LoaderError::Symbol(SymbolError { error: e, @@ -1144,7 +1144,7 @@ impl Loader { fn compile_parser_to_dylib( &self, - config: &CompileConfig, + config: &CompileConfig<'_>, lock_file: &fs::File, lock_path: &Path, ) -> LoaderResult<()> { @@ -1534,7 +1534,9 @@ impl Loader { } #[must_use] - pub fn get_language_configuration_in_current_path(&self) -> Option<&LanguageConfiguration> { + pub fn get_language_configuration_in_current_path( + &self, + ) -> Option<&LanguageConfiguration<'static>> { self.language_configuration_in_current_path .map(|i| &self.language_configurations[i]) } @@ -1543,7 +1545,7 @@ impl Loader { &mut self, parser_path: &Path, set_current_path_config: bool, - ) -> LoaderResult<&[LanguageConfiguration]> { + ) -> LoaderResult<&[LanguageConfiguration<'static>]> { let initial_language_configuration_count = self.language_configurations.len(); match TreeSitterJSON::from_file(parser_path) { diff --git a/crates/tags/src/tags.rs b/crates/tags/src/tags.rs index 16270b0a..c6654876 100644 --- a/crates/tags/src/tags.rs +++ b/crates/tags/src/tags.rs @@ -313,6 +313,7 @@ impl TagsContext { ) .ok_or(Error::Cancelled)?; + // SAFETY: // The `matches` iterator borrows the `Tree`, which prevents it from being // moved. But the tree is really just a pointer, so it's actually ok to // move it. diff --git a/lib/binding_rust/lib.rs b/lib/binding_rust/lib.rs index bf86cf74..a402e01b 100644 --- a/lib/binding_rust/lib.rs +++ b/lib/binding_rust/lib.rs @@ -231,7 +231,7 @@ impl<'a> ParseOptions<'a> { /// This is useful when you need to reuse parse options multiple times, e.g., calling /// [`Parser::parse_with_options`] multiple times with the same options. #[must_use] - pub fn reborrow(&mut self) -> ParseOptions { + pub fn reborrow(&mut self) -> ParseOptions<'_> { ParseOptions { progress_callback: match &mut self.progress_callback { Some(cb) => Some(*cb), @@ -266,7 +266,7 @@ impl<'a> QueryCursorOptions<'a> { /// This is useful when you need to reuse query cursor options multiple times, e.g., calling /// [`QueryCursor::matches`] multiple times with the same options. #[must_use] - pub fn reborrow(&mut self) -> QueryCursorOptions { + pub fn reborrow(&mut self) -> QueryCursorOptions<'_> { QueryCursorOptions { progress_callback: match &mut self.progress_callback { Some(cb) => Some(*cb), @@ -283,7 +283,7 @@ impl Drop for QueryCursorOptionsDrop { unsafe { if !(*self.0).payload.is_null() { drop(Box::from_raw( - (*self.0).payload.cast::(), + (*self.0).payload.cast::>(), )); } drop(Box::from_raw(self.0)); @@ -317,7 +317,7 @@ pub trait Decode { /// A stateful object for walking a syntax [`Tree`] efficiently. #[doc(alias = "TSTreeCursor")] -pub struct TreeCursor<'cursor>(ffi::TSTreeCursor, PhantomData<&'cursor ()>); +pub struct TreeCursor<'tree>(ffi::TSTreeCursor, PhantomData<&'tree ()>); /// A set of patterns that match nodes in a syntax tree. #[doc(alias = "TSQuery")] @@ -392,7 +392,7 @@ pub struct QueryMatch<'cursor, 'tree> { } /// A sequence of [`QueryMatch`]es associated with a given [`QueryCursor`]. -pub struct QueryMatches<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> { +pub struct QueryMatches<'query, 'tree, T: TextProvider, I: AsRef<[u8]>> { ptr: *mut ffi::TSQueryCursor, query: &'query Query, text_provider: T, @@ -407,7 +407,7 @@ pub struct QueryMatches<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8] /// /// During iteration, each element contains a [`QueryMatch`] and index. The index can /// be used to access the new capture inside of the [`QueryMatch::captures`]'s [`captures`]. -pub struct QueryCaptures<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> { +pub struct QueryCaptures<'query, 'tree, T: TextProvider, I: AsRef<[u8]>> { ptr: *mut ffi::TSQueryCursor, query: &'query Query, text_provider: T, @@ -423,7 +423,7 @@ where I: AsRef<[u8]>, { type I: Iterator; - fn text(&mut self, node: Node) -> Self::I; + fn text(&mut self, node: Node<'_>) -> Self::I; } /// A particular [`Node`] that has been captured with a particular name within a @@ -755,17 +755,17 @@ impl Parser { /// Get the parser's current logger. #[doc(alias = "ts_parser_logger")] #[must_use] - pub fn logger(&self) -> Option<&Logger> { + pub fn logger(&self) -> Option<&Logger<'_>> { let logger = unsafe { ffi::ts_parser_logger(self.0.as_ptr()) }; - unsafe { logger.payload.cast::().as_ref() } + unsafe { logger.payload.cast::>().as_ref() } } /// Set the logging callback that the parser should use during parsing. #[doc(alias = "ts_parser_set_logger")] - pub fn set_logger(&mut self, logger: Option) { + pub fn set_logger(&mut self, logger: Option>) { let prev_logger = unsafe { ffi::ts_parser_logger(self.0.as_ptr()) }; if !prev_logger.payload.is_null() { - drop(unsafe { Box::from_raw(prev_logger.payload.cast::()) }); + drop(unsafe { Box::from_raw(prev_logger.payload.cast::>()) }); } let c_logger = if let Some(logger) = logger { @@ -776,7 +776,7 @@ impl Parser { c_log_type: ffi::TSLogType, c_message: *const c_char, ) { - let callback = payload.cast::().as_mut().unwrap(); + let callback = payload.cast::>().as_mut().unwrap(); if let Ok(message) = CStr::from_ptr(c_message).to_str() { let log_type = if c_log_type == ffi::TSLogTypeParse { LogType::Parse @@ -878,7 +878,7 @@ impl Parser { &mut self, callback: &mut F, old_tree: Option<&Tree>, - options: Option, + options: Option>, ) -> Option { type Payload<'a, F, T> = (&'a mut F, Option); @@ -886,7 +886,7 @@ impl Parser { unsafe extern "C" fn progress(state: *mut ffi::TSParseState) -> bool { let callback = (*state) .payload - .cast::() + .cast::>() .as_mut() .unwrap(); match callback(&ParseState::from_raw(state)) { @@ -902,7 +902,7 @@ impl Parser { position: ffi::TSPoint, bytes_read: *mut u32, ) -> *const c_char { - let (callback, text) = payload.cast::>().as_mut().unwrap(); + let (callback, text) = payload.cast::>().as_mut().unwrap(); *text = Some(callback(byte_offset as usize, position.into())); let slice = text.as_ref().unwrap().as_ref(); *bytes_read = slice.len() as u32; @@ -934,7 +934,7 @@ impl Parser { // 1. A reference to the rust `callback`. // 2. The text that was returned from the previous call to `callback`. This allows the // callback to return owned values like vectors. - let mut payload: Payload = (callback, None); + let mut payload: Payload<'_, F, T> = (callback, None); let c_input = ffi::TSInput { payload: ptr::addr_of_mut!(payload).cast::(), @@ -992,14 +992,14 @@ impl Parser { &mut self, callback: &mut F, old_tree: Option<&Tree>, - options: Option, + options: Option>, ) -> Option { type Payload<'a, F, T> = (&'a mut F, Option); unsafe extern "C" fn progress(state: *mut ffi::TSParseState) -> bool { let callback = (*state) .payload - .cast::() + .cast::>() .as_mut() .unwrap(); match callback(&ParseState::from_raw(state)) { @@ -1015,7 +1015,7 @@ impl Parser { position: ffi::TSPoint, bytes_read: *mut u32, ) -> *const c_char { - let (callback, text) = payload.cast::>().as_mut().unwrap(); + let (callback, text) = payload.cast::>().as_mut().unwrap(); *text = Some(callback( (byte_offset / 2) as usize, Point { @@ -1053,7 +1053,7 @@ impl Parser { // 1. A reference to the rust `callback`. // 2. The text that was returned from the previous call to `callback`. This allows the // callback to return owned values like vectors. - let mut payload: Payload = (callback, None); + let mut payload: Payload<'_, F, T> = (callback, None); let c_input = ffi::TSInput { payload: core::ptr::addr_of_mut!(payload).cast::(), @@ -1111,7 +1111,7 @@ impl Parser { &mut self, callback: &mut F, old_tree: Option<&Tree>, - options: Option, + options: Option>, ) -> Option { type Payload<'a, F, T> = (&'a mut F, Option); @@ -1119,7 +1119,7 @@ impl Parser { unsafe extern "C" fn progress(state: *mut ffi::TSParseState) -> bool { let callback = (*state) .payload - .cast::() + .cast::>() .as_mut() .unwrap(); match callback(&ParseState::from_raw(state)) { @@ -1135,7 +1135,7 @@ impl Parser { position: ffi::TSPoint, bytes_read: *mut u32, ) -> *const c_char { - let (callback, text) = payload.cast::>().as_mut().unwrap(); + let (callback, text) = payload.cast::>().as_mut().unwrap(); *text = Some(callback( (byte_offset / 2) as usize, Point { @@ -1173,7 +1173,7 @@ impl Parser { // 1. A reference to the rust `callback`. // 2. The text that was returned from the previous call to `callback`. This allows the // callback to return owned values like vectors. - let mut payload: Payload = (callback, None); + let mut payload: Payload<'_, F, T> = (callback, None); let c_input = ffi::TSInput { payload: core::ptr::addr_of_mut!(payload).cast::(), @@ -1215,14 +1215,14 @@ impl Parser { &mut self, callback: &mut F, old_tree: Option<&Tree>, - options: Option, + options: Option>, ) -> Option { type Payload<'a, F, T> = (&'a mut F, Option); unsafe extern "C" fn progress(state: *mut ffi::TSParseState) -> bool { let callback = (*state) .payload - .cast::() + .cast::>() .as_mut() .unwrap(); match callback(&ParseState::from_raw(state)) { @@ -1251,7 +1251,7 @@ impl Parser { position: ffi::TSPoint, bytes_read: *mut u32, ) -> *const c_char { - let (callback, text) = payload.cast::>().as_mut().unwrap(); + let (callback, text) = payload.cast::>().as_mut().unwrap(); *text = Some(callback(byte_offset as usize, position.into())); let slice = text.as_ref().unwrap().as_ref(); *bytes_read = slice.len() as u32; @@ -1283,7 +1283,7 @@ impl Parser { // 1. A reference to the rust `callback`. // 2. The text that was returned from the previous call to `callback`. This allows the // callback to return owned values like vectors. - let mut payload: Payload = (callback, None); + let mut payload: Payload<'_, F, T> = (callback, None); let c_input = ffi::TSInput { payload: core::ptr::addr_of_mut!(payload).cast::(), @@ -1395,7 +1395,7 @@ impl Tree { /// Get the root node of the syntax tree. #[doc(alias = "ts_tree_root_node")] #[must_use] - pub fn root_node(&self) -> Node { + pub fn root_node(&self) -> Node<'_> { Node::new(unsafe { ffi::ts_tree_root_node(self.0.as_ptr()) }).unwrap() } @@ -1403,7 +1403,7 @@ impl Tree { /// forward by the given offset. #[doc(alias = "ts_tree_root_node_with_offset")] #[must_use] - pub fn root_node_with_offset(&self, offset_bytes: usize, offset_extent: Point) -> Node { + pub fn root_node_with_offset(&self, offset_bytes: usize, offset_extent: Point) -> Node<'_> { Node::new(unsafe { ffi::ts_tree_root_node_with_offset( self.0.as_ptr(), @@ -1417,7 +1417,7 @@ impl Tree { /// Get the language that was used to parse the syntax tree. #[doc(alias = "ts_tree_language")] #[must_use] - pub fn language(&self) -> LanguageRef { + pub fn language(&self) -> LanguageRef<'_> { LanguageRef( unsafe { ffi::ts_tree_language(self.0.as_ptr()) }, PhantomData, @@ -1437,7 +1437,7 @@ impl Tree { /// Create a new [`TreeCursor`] starting from the root of the tree. #[must_use] - pub fn walk(&self) -> TreeCursor { + pub fn walk(&self) -> TreeCursor<'_> { self.root_node().walk() } @@ -1507,7 +1507,7 @@ impl Tree { } impl fmt::Debug for Tree { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{{Tree {:?}}}", self.root_node()) } } @@ -1581,7 +1581,7 @@ impl<'tree> Node<'tree> { /// Get the [`Language`] that was used to parse this node's syntax tree. #[doc(alias = "ts_node_language")] #[must_use] - pub fn language(&self) -> LanguageRef { + pub fn language(&self) -> LanguageRef<'tree> { LanguageRef(unsafe { ffi::ts_node_language(self.0) }, PhantomData) } @@ -2058,7 +2058,7 @@ impl hash::Hash for Node<'_> { } impl fmt::Debug for Node<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "{{Node {} {} - {}}}", @@ -2070,7 +2070,7 @@ impl fmt::Debug for Node<'_> { } impl fmt::Display for Node<'_> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let sexp = self.to_sexp(); if sexp.is_empty() { write!(f, "") @@ -2082,11 +2082,11 @@ impl fmt::Display for Node<'_> { } } -impl<'cursor> TreeCursor<'cursor> { +impl<'tree> TreeCursor<'tree> { /// Get the tree cursor's current [`Node`]. #[doc(alias = "ts_tree_cursor_current_node")] #[must_use] - pub fn node(&self) -> Node<'cursor> { + pub fn node(&self) -> Node<'tree> { Node( unsafe { ffi::ts_tree_cursor_current_node(&self.0) }, PhantomData, @@ -2227,7 +2227,7 @@ impl<'cursor> TreeCursor<'cursor> { /// Re-initialize this tree cursor to start at the original node that the /// cursor was constructed with. #[doc(alias = "ts_tree_cursor_reset")] - pub fn reset(&mut self, node: Node<'cursor>) { + pub fn reset(&mut self, node: Node<'tree>) { unsafe { ffi::ts_tree_cursor_reset(&mut self.0, node.0) }; } @@ -3022,12 +3022,12 @@ impl QueryCursor { query: &'query Query, node: Node<'tree>, text_provider: T, - options: QueryCursorOptions, + options: QueryCursorOptions<'_>, ) -> QueryMatches<'query, 'tree, T, I> { unsafe extern "C" fn progress(state: *mut ffi::TSQueryCursorState) -> bool { let callback = (*state) .payload - .cast::() + .cast::>() .as_mut() .unwrap(); match callback(&QueryCursorState::from_raw(state)) { @@ -3111,12 +3111,12 @@ impl QueryCursor { query: &'query Query, node: Node<'tree>, text_provider: T, - options: QueryCursorOptions, + options: QueryCursorOptions<'_>, ) -> QueryCaptures<'query, 'tree, T, I> { unsafe extern "C" fn progress(state: *mut ffi::TSQueryCursorState) -> bool { let callback = (*state) .payload - .cast::() + .cast::>() .as_mut() .unwrap(); match callback(&QueryCursorState::from_raw(state)) { @@ -3404,7 +3404,7 @@ impl QueryProperty { /// Provide a `StreamingIterator` instead of the traditional `Iterator`, as the /// underlying object in the C library gets updated on each iteration. Copies would /// have their internal state overwritten, leading to Undefined Behavior -impl<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> StreamingIterator +impl<'query, 'tree, T: TextProvider, I: AsRef<[u8]>> StreamingIterator for QueryMatches<'query, 'tree, T, I> { type Item = QueryMatch<'query, 'tree>; @@ -3435,15 +3435,13 @@ impl<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> StreamingIterato } } -impl<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> StreamingIteratorMut - for QueryMatches<'query, 'tree, T, I> -{ +impl, I: AsRef<[u8]>> StreamingIteratorMut for QueryMatches<'_, '_, T, I> { fn get_mut(&mut self) -> Option<&mut Self::Item> { self.current_match.as_mut() } } -impl<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> StreamingIterator +impl<'query, 'tree, T: TextProvider, I: AsRef<[u8]>> StreamingIterator for QueryCaptures<'query, 'tree, T, I> { type Item = (QueryMatch<'query, 'tree>, usize); @@ -3480,9 +3478,7 @@ impl<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> StreamingIterato } } -impl<'query, 'tree: 'query, T: TextProvider, I: AsRef<[u8]>> StreamingIteratorMut - for QueryCaptures<'query, 'tree, T, I> -{ +impl, I: AsRef<[u8]>> StreamingIteratorMut for QueryCaptures<'_, '_, T, I> { fn get_mut(&mut self) -> Option<&mut Self::Item> { self.current_match.as_mut() } @@ -3521,7 +3517,7 @@ impl, I: AsRef<[u8]>> QueryCaptures<'_, '_, T, I> { } impl fmt::Debug for QueryMatch<'_, '_> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "QueryMatch {{ id: {}, pattern_index: {}, captures: {:?} }}", @@ -3532,13 +3528,13 @@ impl fmt::Debug for QueryMatch<'_, '_> { impl TextProvider for F where - F: FnMut(Node) -> R, + F: FnMut(Node<'_>) -> R, R: Iterator, I: AsRef<[u8]>, { type I = R; - fn text(&mut self, node: Node) -> Self::I { + fn text(&mut self, node: Node<'_>) -> Self::I { (self)(node) } } @@ -3546,7 +3542,7 @@ where impl<'a> TextProvider<&'a [u8]> for &'a [u8] { type I = iter::Once<&'a [u8]>; - fn text(&mut self, node: Node) -> Self::I { + fn text(&mut self, node: Node<'_>) -> Self::I { iter::once(&self[node.byte_range()]) } } @@ -3577,7 +3573,7 @@ impl Point { } impl fmt::Display for Point { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "({}, {})", self.row, self.column) } } @@ -3622,8 +3618,8 @@ impl From for Range { } } -impl From<&'_ InputEdit> for ffi::TSInputEdit { - fn from(val: &'_ InputEdit) -> Self { +impl From<&InputEdit> for ffi::TSInputEdit { + fn from(val: &InputEdit) -> Self { Self { start_byte: val.start_byte as u32, old_end_byte: val.old_end_byte as u32, @@ -3694,13 +3690,13 @@ const fn predicate_error(row: usize, message: String) -> QueryError { } impl fmt::Display for IncludedRangesError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Incorrect range by index: {}", self.0) } } impl fmt::Display for LanguageError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Version(version) => { write!( @@ -3717,7 +3713,7 @@ impl fmt::Display for LanguageError { } impl fmt::Display for QueryError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let msg = match self.kind { QueryErrorKind::Field => "Invalid field name ", QueryErrorKind::NodeType => "Invalid node type ", diff --git a/lib/binding_rust/wasm_language.rs b/lib/binding_rust/wasm_language.rs index 66df377a..a6baa21f 100644 --- a/lib/binding_rust/wasm_language.rs +++ b/lib/binding_rust/wasm_language.rs @@ -133,7 +133,7 @@ impl Drop for WasmStore { } impl fmt::Display for WasmError { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let kind = match self.kind { WasmErrorKind::Parse => "Failed to parse Wasm", WasmErrorKind::Compile => "Failed to compile Wasm",