fix: Naming; Lifetimes; deny(rust_2018_idioms)
- One has to think about lifetimes if a type has one:
- `<&'a Node<'tree>>::language` now returns `Language<'tree>` instead of
`Language<'a>`, as it should;
- Renamed `struct TreeCursor<'cursor>` into `struct TreeCursor<'tree>`,
to be consistant with the usages and reduse confusion;
- Remove explicit "outlives" requirements from `QueryMatches`, `QueryCaptures`,
and their impl blocks, because they're inferred
- TODO: should `'query` be renamed into `'cursor`?
This commit is contained in:
parent
630fa52717
commit
ffd777ba65
29 changed files with 169 additions and 158 deletions
|
|
@ -79,7 +79,7 @@ unused_self = "allow"
|
|||
used_underscore_items = "allow"
|
||||
|
||||
[workspace.lints.rust]
|
||||
mismatched_lifetime_syntaxes = "allow"
|
||||
rust_2018_idioms = "deny"
|
||||
|
||||
[profile.optimize]
|
||||
inherits = "release"
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ use super::{scope_sequence::ScopeSequence, LOG_ENABLED, LOG_GRAPH_ENABLED};
|
|||
use crate::util;
|
||||
|
||||
pub fn check_consistent_sizes(tree: &Tree, input: &[u8]) {
|
||||
fn check(node: Node, line_offsets: &[usize]) {
|
||||
fn check(node: Node<'_>, line_offsets: &[usize]) {
|
||||
let start_byte = node.start_byte();
|
||||
let end_byte = node.end_byte();
|
||||
let start_point = node.start_position();
|
||||
|
|
|
|||
|
|
@ -1188,7 +1188,7 @@ fn generate_file(
|
|||
path: &Path,
|
||||
template: &str,
|
||||
language_name: &str,
|
||||
generate_opts: &GenerateOpts,
|
||||
generate_opts: &GenerateOpts<'_>,
|
||||
) -> Result<()> {
|
||||
let filename = path.file_name().unwrap().to_str().unwrap();
|
||||
|
||||
|
|
|
|||
|
|
@ -11,11 +11,11 @@ pub fn paint(color: Option<impl Into<Color>>, text: &str) -> String {
|
|||
struct Logger;
|
||||
|
||||
impl Log for Logger {
|
||||
fn enabled(&self, _: &Metadata) -> bool {
|
||||
fn enabled(&self, _: &Metadata<'_>) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn log(&self, record: &Record) {
|
||||
fn log(&self, record: &Record<'_>) {
|
||||
match record.level() {
|
||||
Level::Error => eprintln!(
|
||||
"{} {}",
|
||||
|
|
|
|||
|
|
@ -284,7 +284,7 @@ pub fn parse_file_at_path(
|
|||
path: &Path,
|
||||
name: &str,
|
||||
max_path_length: usize,
|
||||
opts: &mut ParseFileOptions,
|
||||
opts: &mut ParseFileOptions<'_>,
|
||||
) -> Result<()> {
|
||||
let mut _log_session = None;
|
||||
parser.set_language(language)?;
|
||||
|
|
@ -774,7 +774,7 @@ pub fn render_cst<'a, 'b: 'a>(
|
|||
source_code: &[u8],
|
||||
tree: &'b Tree,
|
||||
cursor: &mut TreeCursor<'a>,
|
||||
opts: &ParseFileOptions,
|
||||
opts: &ParseFileOptions<'_>,
|
||||
out: &mut impl Write,
|
||||
) -> Result<()> {
|
||||
let lossy_source_code = String::from_utf8_lossy(source_code);
|
||||
|
|
@ -841,9 +841,9 @@ fn render_node_text(source: &str) -> String {
|
|||
}
|
||||
|
||||
fn write_node_text(
|
||||
opts: &ParseFileOptions,
|
||||
opts: &ParseFileOptions<'_>,
|
||||
out: &mut impl Write,
|
||||
cursor: &TreeCursor,
|
||||
cursor: &TreeCursor<'_>,
|
||||
is_named: bool,
|
||||
source: &str,
|
||||
color: Option<impl Into<Color> + Copy>,
|
||||
|
|
@ -906,7 +906,7 @@ fn write_node_text(
|
|||
Ok(())
|
||||
}
|
||||
|
||||
fn render_line_feed(source: &str, opts: &ParseFileOptions) -> String {
|
||||
fn render_line_feed(source: &str, opts: &ParseFileOptions<'_>) -> String {
|
||||
if cfg!(windows) {
|
||||
source.replace("\r\n", &paint(opts.parse_theme.line_feed, "\r\n"))
|
||||
} else {
|
||||
|
|
@ -915,8 +915,8 @@ fn render_line_feed(source: &str, opts: &ParseFileOptions) -> String {
|
|||
}
|
||||
|
||||
fn render_node_range(
|
||||
opts: &ParseFileOptions,
|
||||
cursor: &TreeCursor,
|
||||
opts: &ParseFileOptions<'_>,
|
||||
cursor: &TreeCursor<'_>,
|
||||
is_named: bool,
|
||||
is_multiline: bool,
|
||||
total_width: usize,
|
||||
|
|
@ -952,8 +952,8 @@ fn render_node_range(
|
|||
}
|
||||
|
||||
fn cst_render_node(
|
||||
opts: &ParseFileOptions,
|
||||
cursor: &mut TreeCursor,
|
||||
opts: &ParseFileOptions<'_>,
|
||||
cursor: &TreeCursor<'_>,
|
||||
source_code: &[u8],
|
||||
out: &mut impl Write,
|
||||
total_width: usize,
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ pub struct Utf8Point {
|
|||
}
|
||||
|
||||
impl std::fmt::Display for Utf8Point {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "({}, {})", self.row, self.column)
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -603,7 +603,7 @@ impl std::fmt::Display for TestSummary {
|
|||
|
||||
pub fn run_tests_at_path(
|
||||
parser: &mut Parser,
|
||||
opts: &TestOptions,
|
||||
opts: &TestOptions<'_>,
|
||||
test_summary: &mut TestSummary,
|
||||
) -> Result<()> {
|
||||
let test_entry = parse_tests(&opts.path)?;
|
||||
|
|
@ -814,7 +814,7 @@ impl TestCorrection {
|
|||
fn run_tests(
|
||||
parser: &mut Parser,
|
||||
test_entry: TestEntry,
|
||||
opts: &TestOptions,
|
||||
opts: &TestOptions<'_>,
|
||||
test_summary: &mut TestSummary,
|
||||
corrected_entries: &mut Vec<TestCorrection>,
|
||||
is_root: bool,
|
||||
|
|
@ -1070,7 +1070,9 @@ fn run_tests(
|
|||
|
||||
let mut ran_test_in_group = false;
|
||||
|
||||
let matches_filter = |name: &str, file_name: &Option<String>, opts: &TestOptions| {
|
||||
let matches_filter = |name: &str,
|
||||
file_name: &Option<String>,
|
||||
opts: &TestOptions<'_>| {
|
||||
if let (Some(test_file_path), Some(filter_file_name)) = (file_name, &opts.file_name)
|
||||
{
|
||||
if !filter_file_name.eq(test_file_path) {
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ pub struct Failure {
|
|||
impl std::error::Error for Failure {}
|
||||
|
||||
impl std::fmt::Display for Failure {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"Failure - row: {}, column: {}, expected highlight '{}', actual highlights: ",
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ pub struct Failure {
|
|||
impl std::error::Error for Failure {}
|
||||
|
||||
impl std::fmt::Display for Failure {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"Failure - row: {}, column: {}, expected tag: '{}', actual tag: ",
|
||||
|
|
|
|||
|
|
@ -75,7 +75,7 @@ impl Pattern {
|
|||
(pattern, pattern_start..pattern_end)
|
||||
}
|
||||
|
||||
fn random_pattern_for_node(cursor: &mut TreeCursor, rng: &mut impl Rng) -> Self {
|
||||
fn random_pattern_for_node(cursor: &mut TreeCursor<'_>, rng: &mut impl Rng) -> Self {
|
||||
let node = cursor.node();
|
||||
|
||||
// Sometimes specify the node's type, sometimes use a wildcard.
|
||||
|
|
@ -225,7 +225,7 @@ impl Pattern {
|
|||
}
|
||||
|
||||
// Find every matching combination of child patterns and child nodes.
|
||||
let mut finished_matches = Vec::<Match>::new();
|
||||
let mut finished_matches = Vec::<Match<'_, 'tree>>::new();
|
||||
if cursor.goto_first_child() {
|
||||
let mut match_states = vec![(0, mat)];
|
||||
loop {
|
||||
|
|
@ -306,7 +306,7 @@ impl Ord for Match<'_, '_> {
|
|||
}
|
||||
}
|
||||
|
||||
fn compare_depth_first(a: Node, b: Node) -> Ordering {
|
||||
fn compare_depth_first(a: Node<'_>, b: Node<'_>) -> Ordering {
|
||||
let a = a.byte_range();
|
||||
let b = b.byte_range();
|
||||
a.start.cmp(&b.start).then_with(|| b.end.cmp(&a.end))
|
||||
|
|
|
|||
|
|
@ -1219,7 +1219,7 @@ private:
|
|||
);
|
||||
}
|
||||
|
||||
fn get_all_nodes(tree: &Tree) -> Vec<Node> {
|
||||
fn get_all_nodes(tree: &Tree) -> Vec<Node<'_>> {
|
||||
let mut result = Vec::new();
|
||||
let mut visited_children = false;
|
||||
let mut cursor = tree.walk();
|
||||
|
|
|
|||
|
|
@ -44,7 +44,7 @@ pub fn test_with_seed(args: TokenStream, input: TokenStream) -> TokenStream {
|
|||
}
|
||||
|
||||
impl Parse for Args {
|
||||
fn parse(input: ParseStream) -> syn::Result<Self> {
|
||||
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
|
||||
let mut retry = None;
|
||||
let mut seed = None;
|
||||
let mut seed_fn = None;
|
||||
|
|
|
|||
|
|
@ -3971,7 +3971,7 @@ fn test_query_text_callback_returns_chunks() {
|
|||
parser.set_language(&language).unwrap();
|
||||
let tree = parser.parse(source, None).unwrap();
|
||||
let mut cursor = QueryCursor::new();
|
||||
let captures = cursor.captures(&query, tree.root_node(), |node: Node| {
|
||||
let captures = cursor.captures(&query, tree.root_node(), |node: Node<'_>| {
|
||||
chunks_in_range(node.byte_range())
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ fn get_following_tokens(
|
|||
syntax_grammar: &SyntaxGrammar,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
inlines: &InlinedProductionMap,
|
||||
builder: &ParseItemSetBuilder,
|
||||
builder: &ParseItemSetBuilder<'_>,
|
||||
) -> Vec<TokenSet> {
|
||||
let mut result = vec![TokenSet::new(); lexical_grammar.variables.len()];
|
||||
let productions = syntax_grammar
|
||||
|
|
@ -160,8 +160,8 @@ fn populate_error_state(
|
|||
parse_table: &mut ParseTable,
|
||||
syntax_grammar: &SyntaxGrammar,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
coincident_token_index: &CoincidentTokenIndex,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
coincident_token_index: &CoincidentTokenIndex<'_>,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
keywords: &TokenSet,
|
||||
) {
|
||||
let state = &mut parse_table.states[0];
|
||||
|
|
@ -323,8 +323,8 @@ fn identify_keywords(
|
|||
lexical_grammar: &LexicalGrammar,
|
||||
parse_table: &ParseTable,
|
||||
word_token: Option<Symbol>,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
coincident_token_index: &CoincidentTokenIndex,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
coincident_token_index: &CoincidentTokenIndex<'_>,
|
||||
) -> TokenSet {
|
||||
if word_token.is_none() {
|
||||
return TokenSet::new();
|
||||
|
|
@ -429,7 +429,7 @@ fn identify_keywords(
|
|||
fn mark_fragile_tokens(
|
||||
parse_table: &mut ParseTable,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
) {
|
||||
let n = lexical_grammar.variables.len();
|
||||
let mut valid_tokens_mask = Vec::with_capacity(n);
|
||||
|
|
@ -543,7 +543,7 @@ fn report_state_info<'a>(
|
|||
}
|
||||
}
|
||||
|
||||
fn all_chars_are_alphabetical(cursor: &NfaCursor) -> bool {
|
||||
fn all_chars_are_alphabetical(cursor: &NfaCursor<'_>) -> bool {
|
||||
cursor.transition_chars().all(|(chars, is_sep)| {
|
||||
if is_sep {
|
||||
true
|
||||
|
|
|
|||
|
|
@ -27,8 +27,8 @@ pub fn build_lex_table(
|
|||
syntax_grammar: &SyntaxGrammar,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
keywords: &TokenSet,
|
||||
coincident_token_index: &CoincidentTokenIndex,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
coincident_token_index: &CoincidentTokenIndex<'_>,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
) -> LexTables {
|
||||
let keyword_lex_table = if syntax_grammar.word_token.is_some() {
|
||||
let mut builder = LexTableBuilder::new(lexical_grammar);
|
||||
|
|
@ -284,8 +284,8 @@ fn merge_token_set(
|
|||
tokens: &mut TokenSet,
|
||||
other: &TokenSet,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
coincident_token_index: &CoincidentTokenIndex,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
coincident_token_index: &CoincidentTokenIndex<'_>,
|
||||
) -> bool {
|
||||
for i in 0..lexical_grammar.variables.len() {
|
||||
let symbol = Symbol::terminal(i);
|
||||
|
|
|
|||
|
|
@ -114,7 +114,7 @@ pub struct AmbiguousExtraError {
|
|||
}
|
||||
|
||||
impl std::fmt::Display for ConflictError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
for symbol in &self.symbol_sequence {
|
||||
write!(f, " {symbol}")?;
|
||||
}
|
||||
|
|
@ -171,7 +171,7 @@ impl std::fmt::Display for ConflictError {
|
|||
}
|
||||
|
||||
impl std::fmt::Display for Interpretation {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
for symbol in &self.preceding_symbols {
|
||||
write!(f, " {symbol}")?;
|
||||
}
|
||||
|
|
@ -191,7 +191,7 @@ impl std::fmt::Display for Interpretation {
|
|||
}
|
||||
|
||||
impl std::fmt::Display for Resolution {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Precedence { symbols } => {
|
||||
write!(f, "Specify a higher precedence in ")?;
|
||||
|
|
@ -227,7 +227,7 @@ impl std::fmt::Display for Resolution {
|
|||
}
|
||||
|
||||
impl std::fmt::Display for AmbiguousExtraError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
for (i, symbol) in self.parent_symbols.iter().enumerate() {
|
||||
if i > 0 {
|
||||
write!(f, ", ")?;
|
||||
|
|
@ -695,7 +695,7 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
|
||||
fn handle_conflict(
|
||||
&mut self,
|
||||
item_set: &ParseItemSet,
|
||||
item_set: &ParseItemSet<'_>,
|
||||
state_id: ParseStateId,
|
||||
preceding_symbols: &SymbolSequence,
|
||||
preceding_auxiliary_symbols: &[AuxiliarySymbolInfo],
|
||||
|
|
@ -917,7 +917,7 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
shift_items.sort_unstable();
|
||||
reduce_items.sort_unstable();
|
||||
|
||||
let get_rule_names = |items: &[&ParseItem]| -> Vec<String> {
|
||||
let get_rule_names = |items: &[&ParseItem<'_>]| -> Vec<String> {
|
||||
let mut last_rule_id = None;
|
||||
let mut result = Vec::with_capacity(items.len());
|
||||
for item in items {
|
||||
|
|
@ -1030,7 +1030,7 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
|
||||
fn get_auxiliary_node_info(
|
||||
&self,
|
||||
item_set: &ParseItemSet,
|
||||
item_set: &ParseItemSet<'_>,
|
||||
symbol: Symbol,
|
||||
) -> AuxiliarySymbolInfo {
|
||||
let parent_symbols = item_set
|
||||
|
|
@ -1053,7 +1053,7 @@ impl<'a> ParseTableBuilder<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn get_production_id(&mut self, item: &ParseItem) -> ProductionInfoId {
|
||||
fn get_production_id(&mut self, item: &ParseItem<'_>) -> ProductionInfoId {
|
||||
let mut production_info = ProductionInfo {
|
||||
alias_sequence: Vec::new(),
|
||||
field_map: BTreeMap::new(),
|
||||
|
|
|
|||
|
|
@ -56,7 +56,7 @@ impl<'a> CoincidentTokenIndex<'a> {
|
|||
}
|
||||
|
||||
impl fmt::Debug for CoincidentTokenIndex<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "CoincidentTokenIndex {{")?;
|
||||
|
||||
writeln!(f, " entries: {{")?;
|
||||
|
|
|
|||
|
|
@ -185,7 +185,7 @@ impl<'a> ParseItemSet<'a> {
|
|||
}
|
||||
|
||||
impl fmt::Display for ParseItemDisplay<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
if self.0.is_augmented() {
|
||||
write!(f, "START →")?;
|
||||
} else {
|
||||
|
|
@ -281,7 +281,7 @@ fn display_variable_name(source: &str) -> String {
|
|||
}
|
||||
|
||||
impl fmt::Display for TokenSetDisplay<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
write!(f, "[")?;
|
||||
for (i, symbol) in self.0.iter().enumerate() {
|
||||
if i > 0 {
|
||||
|
|
@ -306,7 +306,7 @@ impl fmt::Display for TokenSetDisplay<'_> {
|
|||
}
|
||||
|
||||
impl fmt::Display for ParseItemSetDisplay<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
|
||||
for entry in &self.0.entries {
|
||||
write!(
|
||||
f,
|
||||
|
|
|
|||
|
|
@ -342,7 +342,7 @@ impl<'a> ParseItemSetBuilder<'a> {
|
|||
}
|
||||
|
||||
impl fmt::Debug for ParseItemSetBuilder<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "ParseItemSetBuilder {{")?;
|
||||
|
||||
writeln!(f, " first_sets: {{")?;
|
||||
|
|
|
|||
|
|
@ -19,7 +19,7 @@ pub fn minimize_parse_table(
|
|||
syntax_grammar: &SyntaxGrammar,
|
||||
lexical_grammar: &LexicalGrammar,
|
||||
simple_aliases: &AliasMap,
|
||||
token_conflict_map: &TokenConflictMap,
|
||||
token_conflict_map: &TokenConflictMap<'_>,
|
||||
keywords: &TokenSet,
|
||||
optimizations: OptLevel,
|
||||
) {
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ impl<'a> TokenConflictMap<'a> {
|
|||
}
|
||||
|
||||
impl fmt::Debug for TokenConflictMap<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "TokenConflictMap {{")?;
|
||||
|
||||
let syntax_grammar = SyntaxGrammar::default();
|
||||
|
|
@ -205,7 +205,7 @@ const fn matrix_index(variable_count: usize, i: usize, j: usize) -> usize {
|
|||
variable_count * i + j
|
||||
}
|
||||
|
||||
fn get_starting_chars(cursor: &mut NfaCursor, grammar: &LexicalGrammar) -> Vec<CharacterSet> {
|
||||
fn get_starting_chars(cursor: &mut NfaCursor<'_>, grammar: &LexicalGrammar) -> Vec<CharacterSet> {
|
||||
let mut result = Vec::with_capacity(grammar.variables.len());
|
||||
for variable in &grammar.variables {
|
||||
cursor.reset(vec![variable.start_state]);
|
||||
|
|
@ -237,7 +237,7 @@ fn get_following_chars(
|
|||
}
|
||||
|
||||
fn compute_conflict_status(
|
||||
cursor: &mut NfaCursor,
|
||||
cursor: &mut NfaCursor<'_>,
|
||||
grammar: &LexicalGrammar,
|
||||
following_chars: &[CharacterSet],
|
||||
i: usize,
|
||||
|
|
|
|||
|
|
@ -264,7 +264,7 @@ impl InlinedProductionMap {
|
|||
}
|
||||
|
||||
impl fmt::Display for PrecedenceEntry {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Name(n) => write!(f, "'{n}'"),
|
||||
Self::Symbol(s) => write!(f, "$.{s}"),
|
||||
|
|
|
|||
|
|
@ -409,7 +409,7 @@ impl PartialOrd for CharacterSet {
|
|||
}
|
||||
|
||||
impl fmt::Debug for CharacterSet {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "CharacterSet [")?;
|
||||
let mut set = self.clone();
|
||||
if self.contains(char::MAX) {
|
||||
|
|
@ -440,7 +440,7 @@ impl Nfa {
|
|||
}
|
||||
|
||||
impl fmt::Debug for Nfa {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
writeln!(f, "Nfa {{ states: {{")?;
|
||||
for (i, state) in self.states.iter().enumerate() {
|
||||
writeln!(f, " {i}: {state:?},")?;
|
||||
|
|
|
|||
|
|
@ -15,11 +15,11 @@ use super::{IoError, JSError, JSResult};
|
|||
const DSL: &[u8] = include_bytes!("dsl.js");
|
||||
|
||||
trait JSResultExt<T> {
|
||||
fn or_js_error(self, ctx: &Ctx) -> JSResult<T>;
|
||||
fn or_js_error(self, ctx: &Ctx<'_>) -> JSResult<T>;
|
||||
}
|
||||
|
||||
impl<T> JSResultExt<T> for Result<T, rquickjs::Error> {
|
||||
fn or_js_error(self, ctx: &Ctx) -> JSResult<T> {
|
||||
fn or_js_error(self, ctx: &Ctx<'_>) -> JSResult<T> {
|
||||
match self {
|
||||
Ok(v) => Ok(v),
|
||||
Err(rquickjs::Error::Exception) => Err(format_js_exception(ctx.catch())),
|
||||
|
|
@ -28,7 +28,7 @@ impl<T> JSResultExt<T> for Result<T, rquickjs::Error> {
|
|||
}
|
||||
}
|
||||
|
||||
fn format_js_exception(v: Value) -> JSError {
|
||||
fn format_js_exception(v: Value<'_>) -> JSError {
|
||||
let Some(exception) = v.into_exception() else {
|
||||
return JSError::QuickJS("Expected a JS exception".to_string());
|
||||
};
|
||||
|
|
@ -215,11 +215,11 @@ fn try_resolve_path(path: &Path) -> rquickjs::Result<PathBuf> {
|
|||
}
|
||||
|
||||
#[allow(clippy::needless_pass_by_value)]
|
||||
fn require_from_module<'a>(
|
||||
ctx: Ctx<'a>,
|
||||
fn require_from_module<'js>(
|
||||
ctx: Ctx<'js>,
|
||||
module_path: String,
|
||||
from_module: &str,
|
||||
) -> rquickjs::Result<Value<'a>> {
|
||||
) -> rquickjs::Result<Value<'js>> {
|
||||
let current_module = PathBuf::from(from_module);
|
||||
let current_dir = if current_module.is_file() {
|
||||
current_module.parent().unwrap_or(Path::new("."))
|
||||
|
|
@ -234,13 +234,13 @@ fn require_from_module<'a>(
|
|||
load_module_from_content(&ctx, &resolved_path, &contents)
|
||||
}
|
||||
|
||||
fn load_module_from_content<'a>(
|
||||
ctx: &Ctx<'a>,
|
||||
fn load_module_from_content<'js>(
|
||||
ctx: &Ctx<'js>,
|
||||
path: &Path,
|
||||
contents: &str,
|
||||
) -> rquickjs::Result<Value<'a>> {
|
||||
) -> rquickjs::Result<Value<'js>> {
|
||||
if path.extension().is_some_and(|ext| ext == "json") {
|
||||
return ctx.eval::<Value, _>(format!("JSON.parse({contents:?})"));
|
||||
return ctx.eval::<Value<'js>, _>(format!("JSON.parse({contents:?})"));
|
||||
}
|
||||
|
||||
let exports = Object::new(ctx.clone())?;
|
||||
|
|
@ -256,7 +256,7 @@ fn load_module_from_content<'a>(
|
|||
let module_path = filename.clone();
|
||||
let require = Function::new(
|
||||
ctx.clone(),
|
||||
move |ctx_inner: Ctx<'a>, target_path: String| -> rquickjs::Result<Value<'a>> {
|
||||
move |ctx_inner: Ctx<'js>, target_path: String| -> rquickjs::Result<Value<'js>> {
|
||||
require_from_module(ctx_inner, target_path, &module_path)
|
||||
},
|
||||
)?;
|
||||
|
|
@ -264,8 +264,8 @@ fn load_module_from_content<'a>(
|
|||
let wrapper =
|
||||
format!("(function(exports, require, module, __filename, __dirname) {{ {contents} }})");
|
||||
|
||||
let module_func = ctx.eval::<Function, _>(wrapper)?;
|
||||
module_func.call::<_, Value>((exports, require, module_obj.clone(), filename, dirname))?;
|
||||
let module_func = ctx.eval::<Function<'js>, _>(wrapper)?;
|
||||
module_func.call::<_, Value<'js>>((exports, require, module_obj.clone(), filename, dirname))?;
|
||||
|
||||
module_obj.get("exports")
|
||||
}
|
||||
|
|
@ -318,7 +318,7 @@ pub fn execute_native_runtime(grammar_path: &Path) -> JSResult<String> {
|
|||
let grammar_path_string = grammar_path.to_string_lossy().to_string();
|
||||
let main_require = Function::new(
|
||||
ctx.clone(),
|
||||
move |ctx_inner, target_path: String| -> rquickjs::Result<Value> {
|
||||
move |ctx_inner, target_path: String| -> rquickjs::Result<Value<'_>> {
|
||||
require_from_module(ctx_inner, target_path, &grammar_path_string)
|
||||
},
|
||||
)?;
|
||||
|
|
@ -328,7 +328,7 @@ pub fn execute_native_runtime(grammar_path: &Path) -> JSResult<String> {
|
|||
promise.finish::<()>().or_js_error(&ctx)?;
|
||||
|
||||
let grammar_json = ctx
|
||||
.eval::<rquickjs::String, _>("globalThis.output")
|
||||
.eval::<rquickjs::String<'_>, _>("globalThis.output")
|
||||
.map(|s| s.to_string())
|
||||
.or_js_error(&ctx)?
|
||||
.or_js_error(&ctx)?;
|
||||
|
|
@ -437,8 +437,8 @@ mod tests {
|
|||
const pkg = require('./package.json');
|
||||
module.exports = grammar({
|
||||
name: 'json_test',
|
||||
rules: {
|
||||
source_file: $ => 'version_' + pkg.version.replace(/\./g, '_')
|
||||
rules: {
|
||||
source_file: $ => 'version_' + pkg.version.replace(/\./g, '_')
|
||||
}
|
||||
});
|
||||
",
|
||||
|
|
|
|||
|
|
@ -189,7 +189,7 @@ struct HighlightIterLayer<'a> {
|
|||
depth: usize,
|
||||
}
|
||||
|
||||
pub struct _QueryCaptures<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> {
|
||||
pub struct _QueryCaptures<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> {
|
||||
ptr: *mut ffi::TSQueryCursor,
|
||||
query: &'query Query,
|
||||
text_provider: T,
|
||||
|
|
@ -225,7 +225,7 @@ impl<'tree> _QueryMatch<'_, 'tree> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> Iterator
|
||||
impl<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> Iterator
|
||||
for _QueryCaptures<'query, 'tree, T, I>
|
||||
{
|
||||
type Item = (QueryMatch<'query, 'tree>, usize);
|
||||
|
|
@ -240,7 +240,10 @@ impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> Iterator
|
|||
m.as_mut_ptr(),
|
||||
core::ptr::addr_of_mut!(capture_index),
|
||||
) {
|
||||
let result = std::mem::transmute::<_QueryMatch, QueryMatch>(_QueryMatch::new(
|
||||
let result = std::mem::transmute::<
|
||||
_QueryMatch<'query, 'tree>,
|
||||
QueryMatch<'query, 'tree>,
|
||||
>(_QueryMatch::new(
|
||||
&m.assume_init(),
|
||||
self.ptr,
|
||||
));
|
||||
|
|
@ -594,6 +597,7 @@ impl<'a> HighlightIterLayer<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
// SAFETY:
|
||||
// The `captures` iterator borrows the `Tree` and the `QueryCursor`, which
|
||||
// prevents them from being moved. But both of these values are really just
|
||||
// pointers, so it's actually ok to move them.
|
||||
|
|
@ -601,12 +605,18 @@ impl<'a> HighlightIterLayer<'a> {
|
|||
let cursor_ref = unsafe {
|
||||
mem::transmute::<&mut QueryCursor, &'static mut QueryCursor>(&mut cursor)
|
||||
};
|
||||
let captures = unsafe {
|
||||
std::mem::transmute::<QueryCaptures<_, _>, _QueryCaptures<_, _>>(
|
||||
cursor_ref.captures(&config.query, tree_ref.root_node(), source),
|
||||
)
|
||||
}
|
||||
.peekable();
|
||||
let captures =
|
||||
unsafe {
|
||||
std::mem::transmute::<
|
||||
QueryCaptures<'_, '_, _, _>,
|
||||
_QueryCaptures<'_, '_, _, _>,
|
||||
>(cursor_ref.captures(
|
||||
&config.query,
|
||||
tree_ref.root_node(),
|
||||
source,
|
||||
))
|
||||
}
|
||||
.peekable();
|
||||
|
||||
result.push(HighlightIterLayer {
|
||||
highlight_end_stack: Vec::new(),
|
||||
|
|
@ -648,7 +658,7 @@ impl<'a> HighlightIterLayer<'a> {
|
|||
// of their children.
|
||||
fn intersect_ranges(
|
||||
parent_ranges: &[Range],
|
||||
nodes: &[Node],
|
||||
nodes: &[Node<'_>],
|
||||
includes_children: bool,
|
||||
) -> Vec<Range> {
|
||||
let mut cursor = nodes[0].walk();
|
||||
|
|
|
|||
|
|
@ -765,7 +765,7 @@ impl Loader {
|
|||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn get_all_language_configurations(&self) -> Vec<(&LanguageConfiguration, &Path)> {
|
||||
pub fn get_all_language_configurations(&self) -> Vec<(&LanguageConfiguration<'static>, &Path)> {
|
||||
self.language_configurations
|
||||
.iter()
|
||||
.map(|c| (c, self.languages_by_id[c.language_id].0.as_ref()))
|
||||
|
|
@ -775,7 +775,7 @@ impl Loader {
|
|||
pub fn language_configuration_for_scope(
|
||||
&self,
|
||||
scope: &str,
|
||||
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
|
||||
) -> LoaderResult<Option<(Language, &LanguageConfiguration<'static>)>> {
|
||||
for configuration in &self.language_configurations {
|
||||
if configuration.scope.as_ref().is_some_and(|s| s == scope) {
|
||||
let language = self.language_for_id(configuration.language_id)?;
|
||||
|
|
@ -788,7 +788,7 @@ impl Loader {
|
|||
pub fn language_configuration_for_first_line_regex(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
|
||||
) -> LoaderResult<Option<(Language, &LanguageConfiguration<'static>)>> {
|
||||
self.language_configuration_ids_by_first_line_regex
|
||||
.iter()
|
||||
.try_fold(None, |_, (regex, ids)| {
|
||||
|
|
@ -817,7 +817,7 @@ impl Loader {
|
|||
pub fn language_configuration_for_file_name(
|
||||
&self,
|
||||
path: &Path,
|
||||
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
|
||||
) -> LoaderResult<Option<(Language, &LanguageConfiguration<'static>)>> {
|
||||
// Find all the language configurations that match this file name
|
||||
// or a suffix of the file name.
|
||||
let configuration_ids = path
|
||||
|
|
@ -889,7 +889,7 @@ impl Loader {
|
|||
pub fn language_configuration_for_injection_string(
|
||||
&self,
|
||||
string: &str,
|
||||
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
|
||||
) -> LoaderResult<Option<(Language, &LanguageConfiguration<'static>)>> {
|
||||
let mut best_match_length = 0;
|
||||
let mut best_match_position = None;
|
||||
for (i, configuration) in self.language_configurations.iter().enumerate() {
|
||||
|
|
@ -915,7 +915,7 @@ impl Loader {
|
|||
|
||||
pub fn language_for_configuration(
|
||||
&self,
|
||||
configuration: &LanguageConfiguration,
|
||||
configuration: &LanguageConfiguration<'_>,
|
||||
) -> LoaderResult<Language> {
|
||||
self.language_for_id(configuration.language_id)
|
||||
}
|
||||
|
|
@ -946,7 +946,7 @@ impl Loader {
|
|||
self.load_language_at_path(config).map(|_| ())
|
||||
}
|
||||
|
||||
pub fn load_language_at_path(&self, mut config: CompileConfig) -> LoaderResult<Language> {
|
||||
pub fn load_language_at_path(&self, mut config: CompileConfig<'_>) -> LoaderResult<Language> {
|
||||
let grammar_path = config.src_path.join("grammar.json");
|
||||
config.name = Self::grammar_json_name(&grammar_path)?;
|
||||
self.load_language_at_path_with_name(config)
|
||||
|
|
@ -954,7 +954,7 @@ impl Loader {
|
|||
|
||||
pub fn load_language_at_path_with_name(
|
||||
&self,
|
||||
mut config: CompileConfig,
|
||||
mut config: CompileConfig<'_>,
|
||||
) -> LoaderResult<Language> {
|
||||
let mut lib_name = config.name.clone();
|
||||
let language_fn_name = format!("tree_sitter_{}", config.name.replace('-', "_"));
|
||||
|
|
@ -1128,7 +1128,7 @@ impl Loader {
|
|||
})?;
|
||||
let language = unsafe {
|
||||
let language_fn = library
|
||||
.get::<Symbol<unsafe extern "C" fn() -> Language>>(function_name.as_bytes())
|
||||
.get::<Symbol<'_, unsafe extern "C" fn() -> Language>>(function_name.as_bytes())
|
||||
.map_err(|e| {
|
||||
LoaderError::Symbol(SymbolError {
|
||||
error: e,
|
||||
|
|
@ -1144,7 +1144,7 @@ impl Loader {
|
|||
|
||||
fn compile_parser_to_dylib(
|
||||
&self,
|
||||
config: &CompileConfig,
|
||||
config: &CompileConfig<'_>,
|
||||
lock_file: &fs::File,
|
||||
lock_path: &Path,
|
||||
) -> LoaderResult<()> {
|
||||
|
|
@ -1534,7 +1534,9 @@ impl Loader {
|
|||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn get_language_configuration_in_current_path(&self) -> Option<&LanguageConfiguration> {
|
||||
pub fn get_language_configuration_in_current_path(
|
||||
&self,
|
||||
) -> Option<&LanguageConfiguration<'static>> {
|
||||
self.language_configuration_in_current_path
|
||||
.map(|i| &self.language_configurations[i])
|
||||
}
|
||||
|
|
@ -1543,7 +1545,7 @@ impl Loader {
|
|||
&mut self,
|
||||
parser_path: &Path,
|
||||
set_current_path_config: bool,
|
||||
) -> LoaderResult<&[LanguageConfiguration]> {
|
||||
) -> LoaderResult<&[LanguageConfiguration<'static>]> {
|
||||
let initial_language_configuration_count = self.language_configurations.len();
|
||||
|
||||
match TreeSitterJSON::from_file(parser_path) {
|
||||
|
|
|
|||
|
|
@ -313,6 +313,7 @@ impl TagsContext {
|
|||
)
|
||||
.ok_or(Error::Cancelled)?;
|
||||
|
||||
// SAFETY:
|
||||
// The `matches` iterator borrows the `Tree`, which prevents it from being
|
||||
// moved. But the tree is really just a pointer, so it's actually ok to
|
||||
// move it.
|
||||
|
|
|
|||
|
|
@ -231,7 +231,7 @@ impl<'a> ParseOptions<'a> {
|
|||
/// This is useful when you need to reuse parse options multiple times, e.g., calling
|
||||
/// [`Parser::parse_with_options`] multiple times with the same options.
|
||||
#[must_use]
|
||||
pub fn reborrow(&mut self) -> ParseOptions {
|
||||
pub fn reborrow(&mut self) -> ParseOptions<'_> {
|
||||
ParseOptions {
|
||||
progress_callback: match &mut self.progress_callback {
|
||||
Some(cb) => Some(*cb),
|
||||
|
|
@ -266,7 +266,7 @@ impl<'a> QueryCursorOptions<'a> {
|
|||
/// This is useful when you need to reuse query cursor options multiple times, e.g., calling
|
||||
/// [`QueryCursor::matches`] multiple times with the same options.
|
||||
#[must_use]
|
||||
pub fn reborrow(&mut self) -> QueryCursorOptions {
|
||||
pub fn reborrow(&mut self) -> QueryCursorOptions<'_> {
|
||||
QueryCursorOptions {
|
||||
progress_callback: match &mut self.progress_callback {
|
||||
Some(cb) => Some(*cb),
|
||||
|
|
@ -283,7 +283,7 @@ impl Drop for QueryCursorOptionsDrop {
|
|||
unsafe {
|
||||
if !(*self.0).payload.is_null() {
|
||||
drop(Box::from_raw(
|
||||
(*self.0).payload.cast::<QueryProgressCallback>(),
|
||||
(*self.0).payload.cast::<QueryProgressCallback<'_>>(),
|
||||
));
|
||||
}
|
||||
drop(Box::from_raw(self.0));
|
||||
|
|
@ -317,7 +317,7 @@ pub trait Decode {
|
|||
|
||||
/// A stateful object for walking a syntax [`Tree`] efficiently.
|
||||
#[doc(alias = "TSTreeCursor")]
|
||||
pub struct TreeCursor<'cursor>(ffi::TSTreeCursor, PhantomData<&'cursor ()>);
|
||||
pub struct TreeCursor<'tree>(ffi::TSTreeCursor, PhantomData<&'tree ()>);
|
||||
|
||||
/// A set of patterns that match nodes in a syntax tree.
|
||||
#[doc(alias = "TSQuery")]
|
||||
|
|
@ -392,7 +392,7 @@ pub struct QueryMatch<'cursor, 'tree> {
|
|||
}
|
||||
|
||||
/// A sequence of [`QueryMatch`]es associated with a given [`QueryCursor`].
|
||||
pub struct QueryMatches<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> {
|
||||
pub struct QueryMatches<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> {
|
||||
ptr: *mut ffi::TSQueryCursor,
|
||||
query: &'query Query,
|
||||
text_provider: T,
|
||||
|
|
@ -407,7 +407,7 @@ pub struct QueryMatches<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]
|
|||
///
|
||||
/// During iteration, each element contains a [`QueryMatch`] and index. The index can
|
||||
/// be used to access the new capture inside of the [`QueryMatch::captures`]'s [`captures`].
|
||||
pub struct QueryCaptures<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> {
|
||||
pub struct QueryCaptures<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> {
|
||||
ptr: *mut ffi::TSQueryCursor,
|
||||
query: &'query Query,
|
||||
text_provider: T,
|
||||
|
|
@ -423,7 +423,7 @@ where
|
|||
I: AsRef<[u8]>,
|
||||
{
|
||||
type I: Iterator<Item = I>;
|
||||
fn text(&mut self, node: Node) -> Self::I;
|
||||
fn text(&mut self, node: Node<'_>) -> Self::I;
|
||||
}
|
||||
|
||||
/// A particular [`Node`] that has been captured with a particular name within a
|
||||
|
|
@ -755,17 +755,17 @@ impl Parser {
|
|||
/// Get the parser's current logger.
|
||||
#[doc(alias = "ts_parser_logger")]
|
||||
#[must_use]
|
||||
pub fn logger(&self) -> Option<&Logger> {
|
||||
pub fn logger(&self) -> Option<&Logger<'_>> {
|
||||
let logger = unsafe { ffi::ts_parser_logger(self.0.as_ptr()) };
|
||||
unsafe { logger.payload.cast::<Logger>().as_ref() }
|
||||
unsafe { logger.payload.cast::<Logger<'_>>().as_ref() }
|
||||
}
|
||||
|
||||
/// Set the logging callback that the parser should use during parsing.
|
||||
#[doc(alias = "ts_parser_set_logger")]
|
||||
pub fn set_logger(&mut self, logger: Option<Logger>) {
|
||||
pub fn set_logger(&mut self, logger: Option<Logger<'_>>) {
|
||||
let prev_logger = unsafe { ffi::ts_parser_logger(self.0.as_ptr()) };
|
||||
if !prev_logger.payload.is_null() {
|
||||
drop(unsafe { Box::from_raw(prev_logger.payload.cast::<Logger>()) });
|
||||
drop(unsafe { Box::from_raw(prev_logger.payload.cast::<Logger<'_>>()) });
|
||||
}
|
||||
|
||||
let c_logger = if let Some(logger) = logger {
|
||||
|
|
@ -776,7 +776,7 @@ impl Parser {
|
|||
c_log_type: ffi::TSLogType,
|
||||
c_message: *const c_char,
|
||||
) {
|
||||
let callback = payload.cast::<Logger>().as_mut().unwrap();
|
||||
let callback = payload.cast::<Logger<'_>>().as_mut().unwrap();
|
||||
if let Ok(message) = CStr::from_ptr(c_message).to_str() {
|
||||
let log_type = if c_log_type == ffi::TSLogTypeParse {
|
||||
LogType::Parse
|
||||
|
|
@ -878,7 +878,7 @@ impl Parser {
|
|||
&mut self,
|
||||
callback: &mut F,
|
||||
old_tree: Option<&Tree>,
|
||||
options: Option<ParseOptions>,
|
||||
options: Option<ParseOptions<'_>>,
|
||||
) -> Option<Tree> {
|
||||
type Payload<'a, F, T> = (&'a mut F, Option<T>);
|
||||
|
||||
|
|
@ -886,7 +886,7 @@ impl Parser {
|
|||
unsafe extern "C" fn progress(state: *mut ffi::TSParseState) -> bool {
|
||||
let callback = (*state)
|
||||
.payload
|
||||
.cast::<ParseProgressCallback>()
|
||||
.cast::<ParseProgressCallback<'_>>()
|
||||
.as_mut()
|
||||
.unwrap();
|
||||
match callback(&ParseState::from_raw(state)) {
|
||||
|
|
@ -902,7 +902,7 @@ impl Parser {
|
|||
position: ffi::TSPoint,
|
||||
bytes_read: *mut u32,
|
||||
) -> *const c_char {
|
||||
let (callback, text) = payload.cast::<Payload<F, T>>().as_mut().unwrap();
|
||||
let (callback, text) = payload.cast::<Payload<'_, F, T>>().as_mut().unwrap();
|
||||
*text = Some(callback(byte_offset as usize, position.into()));
|
||||
let slice = text.as_ref().unwrap().as_ref();
|
||||
*bytes_read = slice.len() as u32;
|
||||
|
|
@ -934,7 +934,7 @@ impl Parser {
|
|||
// 1. A reference to the rust `callback`.
|
||||
// 2. The text that was returned from the previous call to `callback`. This allows the
|
||||
// callback to return owned values like vectors.
|
||||
let mut payload: Payload<F, T> = (callback, None);
|
||||
let mut payload: Payload<'_, F, T> = (callback, None);
|
||||
|
||||
let c_input = ffi::TSInput {
|
||||
payload: ptr::addr_of_mut!(payload).cast::<c_void>(),
|
||||
|
|
@ -992,14 +992,14 @@ impl Parser {
|
|||
&mut self,
|
||||
callback: &mut F,
|
||||
old_tree: Option<&Tree>,
|
||||
options: Option<ParseOptions>,
|
||||
options: Option<ParseOptions<'_>>,
|
||||
) -> Option<Tree> {
|
||||
type Payload<'a, F, T> = (&'a mut F, Option<T>);
|
||||
|
||||
unsafe extern "C" fn progress(state: *mut ffi::TSParseState) -> bool {
|
||||
let callback = (*state)
|
||||
.payload
|
||||
.cast::<ParseProgressCallback>()
|
||||
.cast::<ParseProgressCallback<'_>>()
|
||||
.as_mut()
|
||||
.unwrap();
|
||||
match callback(&ParseState::from_raw(state)) {
|
||||
|
|
@ -1015,7 +1015,7 @@ impl Parser {
|
|||
position: ffi::TSPoint,
|
||||
bytes_read: *mut u32,
|
||||
) -> *const c_char {
|
||||
let (callback, text) = payload.cast::<Payload<F, T>>().as_mut().unwrap();
|
||||
let (callback, text) = payload.cast::<Payload<'_, F, T>>().as_mut().unwrap();
|
||||
*text = Some(callback(
|
||||
(byte_offset / 2) as usize,
|
||||
Point {
|
||||
|
|
@ -1053,7 +1053,7 @@ impl Parser {
|
|||
// 1. A reference to the rust `callback`.
|
||||
// 2. The text that was returned from the previous call to `callback`. This allows the
|
||||
// callback to return owned values like vectors.
|
||||
let mut payload: Payload<F, T> = (callback, None);
|
||||
let mut payload: Payload<'_, F, T> = (callback, None);
|
||||
|
||||
let c_input = ffi::TSInput {
|
||||
payload: core::ptr::addr_of_mut!(payload).cast::<c_void>(),
|
||||
|
|
@ -1111,7 +1111,7 @@ impl Parser {
|
|||
&mut self,
|
||||
callback: &mut F,
|
||||
old_tree: Option<&Tree>,
|
||||
options: Option<ParseOptions>,
|
||||
options: Option<ParseOptions<'_>>,
|
||||
) -> Option<Tree> {
|
||||
type Payload<'a, F, T> = (&'a mut F, Option<T>);
|
||||
|
||||
|
|
@ -1119,7 +1119,7 @@ impl Parser {
|
|||
unsafe extern "C" fn progress(state: *mut ffi::TSParseState) -> bool {
|
||||
let callback = (*state)
|
||||
.payload
|
||||
.cast::<ParseProgressCallback>()
|
||||
.cast::<ParseProgressCallback<'_>>()
|
||||
.as_mut()
|
||||
.unwrap();
|
||||
match callback(&ParseState::from_raw(state)) {
|
||||
|
|
@ -1135,7 +1135,7 @@ impl Parser {
|
|||
position: ffi::TSPoint,
|
||||
bytes_read: *mut u32,
|
||||
) -> *const c_char {
|
||||
let (callback, text) = payload.cast::<Payload<F, T>>().as_mut().unwrap();
|
||||
let (callback, text) = payload.cast::<Payload<'_, F, T>>().as_mut().unwrap();
|
||||
*text = Some(callback(
|
||||
(byte_offset / 2) as usize,
|
||||
Point {
|
||||
|
|
@ -1173,7 +1173,7 @@ impl Parser {
|
|||
// 1. A reference to the rust `callback`.
|
||||
// 2. The text that was returned from the previous call to `callback`. This allows the
|
||||
// callback to return owned values like vectors.
|
||||
let mut payload: Payload<F, T> = (callback, None);
|
||||
let mut payload: Payload<'_, F, T> = (callback, None);
|
||||
|
||||
let c_input = ffi::TSInput {
|
||||
payload: core::ptr::addr_of_mut!(payload).cast::<c_void>(),
|
||||
|
|
@ -1215,14 +1215,14 @@ impl Parser {
|
|||
&mut self,
|
||||
callback: &mut F,
|
||||
old_tree: Option<&Tree>,
|
||||
options: Option<ParseOptions>,
|
||||
options: Option<ParseOptions<'_>>,
|
||||
) -> Option<Tree> {
|
||||
type Payload<'a, F, T> = (&'a mut F, Option<T>);
|
||||
|
||||
unsafe extern "C" fn progress(state: *mut ffi::TSParseState) -> bool {
|
||||
let callback = (*state)
|
||||
.payload
|
||||
.cast::<ParseProgressCallback>()
|
||||
.cast::<ParseProgressCallback<'_>>()
|
||||
.as_mut()
|
||||
.unwrap();
|
||||
match callback(&ParseState::from_raw(state)) {
|
||||
|
|
@ -1251,7 +1251,7 @@ impl Parser {
|
|||
position: ffi::TSPoint,
|
||||
bytes_read: *mut u32,
|
||||
) -> *const c_char {
|
||||
let (callback, text) = payload.cast::<Payload<F, T>>().as_mut().unwrap();
|
||||
let (callback, text) = payload.cast::<Payload<'_, F, T>>().as_mut().unwrap();
|
||||
*text = Some(callback(byte_offset as usize, position.into()));
|
||||
let slice = text.as_ref().unwrap().as_ref();
|
||||
*bytes_read = slice.len() as u32;
|
||||
|
|
@ -1283,7 +1283,7 @@ impl Parser {
|
|||
// 1. A reference to the rust `callback`.
|
||||
// 2. The text that was returned from the previous call to `callback`. This allows the
|
||||
// callback to return owned values like vectors.
|
||||
let mut payload: Payload<F, T> = (callback, None);
|
||||
let mut payload: Payload<'_, F, T> = (callback, None);
|
||||
|
||||
let c_input = ffi::TSInput {
|
||||
payload: core::ptr::addr_of_mut!(payload).cast::<c_void>(),
|
||||
|
|
@ -1395,7 +1395,7 @@ impl Tree {
|
|||
/// Get the root node of the syntax tree.
|
||||
#[doc(alias = "ts_tree_root_node")]
|
||||
#[must_use]
|
||||
pub fn root_node(&self) -> Node {
|
||||
pub fn root_node(&self) -> Node<'_> {
|
||||
Node::new(unsafe { ffi::ts_tree_root_node(self.0.as_ptr()) }).unwrap()
|
||||
}
|
||||
|
||||
|
|
@ -1403,7 +1403,7 @@ impl Tree {
|
|||
/// forward by the given offset.
|
||||
#[doc(alias = "ts_tree_root_node_with_offset")]
|
||||
#[must_use]
|
||||
pub fn root_node_with_offset(&self, offset_bytes: usize, offset_extent: Point) -> Node {
|
||||
pub fn root_node_with_offset(&self, offset_bytes: usize, offset_extent: Point) -> Node<'_> {
|
||||
Node::new(unsafe {
|
||||
ffi::ts_tree_root_node_with_offset(
|
||||
self.0.as_ptr(),
|
||||
|
|
@ -1417,7 +1417,7 @@ impl Tree {
|
|||
/// Get the language that was used to parse the syntax tree.
|
||||
#[doc(alias = "ts_tree_language")]
|
||||
#[must_use]
|
||||
pub fn language(&self) -> LanguageRef {
|
||||
pub fn language(&self) -> LanguageRef<'_> {
|
||||
LanguageRef(
|
||||
unsafe { ffi::ts_tree_language(self.0.as_ptr()) },
|
||||
PhantomData,
|
||||
|
|
@ -1437,7 +1437,7 @@ impl Tree {
|
|||
|
||||
/// Create a new [`TreeCursor`] starting from the root of the tree.
|
||||
#[must_use]
|
||||
pub fn walk(&self) -> TreeCursor {
|
||||
pub fn walk(&self) -> TreeCursor<'_> {
|
||||
self.root_node().walk()
|
||||
}
|
||||
|
||||
|
|
@ -1507,7 +1507,7 @@ impl Tree {
|
|||
}
|
||||
|
||||
impl fmt::Debug for Tree {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{{Tree {:?}}}", self.root_node())
|
||||
}
|
||||
}
|
||||
|
|
@ -1581,7 +1581,7 @@ impl<'tree> Node<'tree> {
|
|||
/// Get the [`Language`] that was used to parse this node's syntax tree.
|
||||
#[doc(alias = "ts_node_language")]
|
||||
#[must_use]
|
||||
pub fn language(&self) -> LanguageRef {
|
||||
pub fn language(&self) -> LanguageRef<'tree> {
|
||||
LanguageRef(unsafe { ffi::ts_node_language(self.0) }, PhantomData)
|
||||
}
|
||||
|
||||
|
|
@ -2058,7 +2058,7 @@ impl hash::Hash for Node<'_> {
|
|||
}
|
||||
|
||||
impl fmt::Debug for Node<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"{{Node {} {} - {}}}",
|
||||
|
|
@ -2070,7 +2070,7 @@ impl fmt::Debug for Node<'_> {
|
|||
}
|
||||
|
||||
impl fmt::Display for Node<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let sexp = self.to_sexp();
|
||||
if sexp.is_empty() {
|
||||
write!(f, "")
|
||||
|
|
@ -2082,11 +2082,11 @@ impl fmt::Display for Node<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'cursor> TreeCursor<'cursor> {
|
||||
impl<'tree> TreeCursor<'tree> {
|
||||
/// Get the tree cursor's current [`Node`].
|
||||
#[doc(alias = "ts_tree_cursor_current_node")]
|
||||
#[must_use]
|
||||
pub fn node(&self) -> Node<'cursor> {
|
||||
pub fn node(&self) -> Node<'tree> {
|
||||
Node(
|
||||
unsafe { ffi::ts_tree_cursor_current_node(&self.0) },
|
||||
PhantomData,
|
||||
|
|
@ -2227,7 +2227,7 @@ impl<'cursor> TreeCursor<'cursor> {
|
|||
/// Re-initialize this tree cursor to start at the original node that the
|
||||
/// cursor was constructed with.
|
||||
#[doc(alias = "ts_tree_cursor_reset")]
|
||||
pub fn reset(&mut self, node: Node<'cursor>) {
|
||||
pub fn reset(&mut self, node: Node<'tree>) {
|
||||
unsafe { ffi::ts_tree_cursor_reset(&mut self.0, node.0) };
|
||||
}
|
||||
|
||||
|
|
@ -3022,12 +3022,12 @@ impl QueryCursor {
|
|||
query: &'query Query,
|
||||
node: Node<'tree>,
|
||||
text_provider: T,
|
||||
options: QueryCursorOptions,
|
||||
options: QueryCursorOptions<'_>,
|
||||
) -> QueryMatches<'query, 'tree, T, I> {
|
||||
unsafe extern "C" fn progress(state: *mut ffi::TSQueryCursorState) -> bool {
|
||||
let callback = (*state)
|
||||
.payload
|
||||
.cast::<QueryProgressCallback>()
|
||||
.cast::<QueryProgressCallback<'_>>()
|
||||
.as_mut()
|
||||
.unwrap();
|
||||
match callback(&QueryCursorState::from_raw(state)) {
|
||||
|
|
@ -3111,12 +3111,12 @@ impl QueryCursor {
|
|||
query: &'query Query,
|
||||
node: Node<'tree>,
|
||||
text_provider: T,
|
||||
options: QueryCursorOptions,
|
||||
options: QueryCursorOptions<'_>,
|
||||
) -> QueryCaptures<'query, 'tree, T, I> {
|
||||
unsafe extern "C" fn progress(state: *mut ffi::TSQueryCursorState) -> bool {
|
||||
let callback = (*state)
|
||||
.payload
|
||||
.cast::<QueryProgressCallback>()
|
||||
.cast::<QueryProgressCallback<'_>>()
|
||||
.as_mut()
|
||||
.unwrap();
|
||||
match callback(&QueryCursorState::from_raw(state)) {
|
||||
|
|
@ -3404,7 +3404,7 @@ impl QueryProperty {
|
|||
/// Provide a `StreamingIterator` instead of the traditional `Iterator`, as the
|
||||
/// underlying object in the C library gets updated on each iteration. Copies would
|
||||
/// have their internal state overwritten, leading to Undefined Behavior
|
||||
impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> StreamingIterator
|
||||
impl<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> StreamingIterator
|
||||
for QueryMatches<'query, 'tree, T, I>
|
||||
{
|
||||
type Item = QueryMatch<'query, 'tree>;
|
||||
|
|
@ -3435,15 +3435,13 @@ impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> StreamingIterato
|
|||
}
|
||||
}
|
||||
|
||||
impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> StreamingIteratorMut
|
||||
for QueryMatches<'query, 'tree, T, I>
|
||||
{
|
||||
impl<T: TextProvider<I>, I: AsRef<[u8]>> StreamingIteratorMut for QueryMatches<'_, '_, T, I> {
|
||||
fn get_mut(&mut self) -> Option<&mut Self::Item> {
|
||||
self.current_match.as_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> StreamingIterator
|
||||
impl<'query, 'tree, T: TextProvider<I>, I: AsRef<[u8]>> StreamingIterator
|
||||
for QueryCaptures<'query, 'tree, T, I>
|
||||
{
|
||||
type Item = (QueryMatch<'query, 'tree>, usize);
|
||||
|
|
@ -3480,9 +3478,7 @@ impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> StreamingIterato
|
|||
}
|
||||
}
|
||||
|
||||
impl<'query, 'tree: 'query, T: TextProvider<I>, I: AsRef<[u8]>> StreamingIteratorMut
|
||||
for QueryCaptures<'query, 'tree, T, I>
|
||||
{
|
||||
impl<T: TextProvider<I>, I: AsRef<[u8]>> StreamingIteratorMut for QueryCaptures<'_, '_, T, I> {
|
||||
fn get_mut(&mut self) -> Option<&mut Self::Item> {
|
||||
self.current_match.as_mut()
|
||||
}
|
||||
|
|
@ -3521,7 +3517,7 @@ impl<T: TextProvider<I>, I: AsRef<[u8]>> QueryCaptures<'_, '_, T, I> {
|
|||
}
|
||||
|
||||
impl fmt::Debug for QueryMatch<'_, '_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"QueryMatch {{ id: {}, pattern_index: {}, captures: {:?} }}",
|
||||
|
|
@ -3532,13 +3528,13 @@ impl fmt::Debug for QueryMatch<'_, '_> {
|
|||
|
||||
impl<F, R, I> TextProvider<I> for F
|
||||
where
|
||||
F: FnMut(Node) -> R,
|
||||
F: FnMut(Node<'_>) -> R,
|
||||
R: Iterator<Item = I>,
|
||||
I: AsRef<[u8]>,
|
||||
{
|
||||
type I = R;
|
||||
|
||||
fn text(&mut self, node: Node) -> Self::I {
|
||||
fn text(&mut self, node: Node<'_>) -> Self::I {
|
||||
(self)(node)
|
||||
}
|
||||
}
|
||||
|
|
@ -3546,7 +3542,7 @@ where
|
|||
impl<'a> TextProvider<&'a [u8]> for &'a [u8] {
|
||||
type I = iter::Once<&'a [u8]>;
|
||||
|
||||
fn text(&mut self, node: Node) -> Self::I {
|
||||
fn text(&mut self, node: Node<'_>) -> Self::I {
|
||||
iter::once(&self[node.byte_range()])
|
||||
}
|
||||
}
|
||||
|
|
@ -3577,7 +3573,7 @@ impl Point {
|
|||
}
|
||||
|
||||
impl fmt::Display for Point {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "({}, {})", self.row, self.column)
|
||||
}
|
||||
}
|
||||
|
|
@ -3622,8 +3618,8 @@ impl From<ffi::TSRange> for Range {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<&'_ InputEdit> for ffi::TSInputEdit {
|
||||
fn from(val: &'_ InputEdit) -> Self {
|
||||
impl From<&InputEdit> for ffi::TSInputEdit {
|
||||
fn from(val: &InputEdit) -> Self {
|
||||
Self {
|
||||
start_byte: val.start_byte as u32,
|
||||
old_end_byte: val.old_end_byte as u32,
|
||||
|
|
@ -3694,13 +3690,13 @@ const fn predicate_error(row: usize, message: String) -> QueryError {
|
|||
}
|
||||
|
||||
impl fmt::Display for IncludedRangesError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Incorrect range by index: {}", self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for LanguageError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Version(version) => {
|
||||
write!(
|
||||
|
|
@ -3717,7 +3713,7 @@ impl fmt::Display for LanguageError {
|
|||
}
|
||||
|
||||
impl fmt::Display for QueryError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let msg = match self.kind {
|
||||
QueryErrorKind::Field => "Invalid field name ",
|
||||
QueryErrorKind::NodeType => "Invalid node type ",
|
||||
|
|
|
|||
|
|
@ -133,7 +133,7 @@ impl Drop for WasmStore {
|
|||
}
|
||||
|
||||
impl fmt::Display for WasmError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let kind = match self.kind {
|
||||
WasmErrorKind::Parse => "Failed to parse Wasm",
|
||||
WasmErrorKind::Compile => "Failed to compile Wasm",
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue