Always call deserialize on external scanner before scanning
Remembering the last token that the external scanner produced is not worth the complexity.
This commit is contained in:
parent
c285fbef38
commit
f3977ec213
3 changed files with 9 additions and 24 deletions
|
|
@ -110,7 +110,6 @@ void ts_lexer_init(Lexer *self) {
|
|||
.payload = NULL,
|
||||
.log = NULL
|
||||
},
|
||||
.last_external_token = NULL,
|
||||
};
|
||||
ts_lexer_reset(self, length_zero());
|
||||
}
|
||||
|
|
@ -134,7 +133,6 @@ static inline void ts_lexer__reset(Lexer *self, Length position) {
|
|||
void ts_lexer_set_input(Lexer *self, TSInput input) {
|
||||
self->input = input;
|
||||
ts_lexer__reset(self, length_zero());
|
||||
ts_lexer_set_last_external_token(self, NULL);
|
||||
}
|
||||
|
||||
void ts_lexer_reset(Lexer *self, Length position) {
|
||||
|
|
@ -157,9 +155,3 @@ void ts_lexer_start(Lexer *self) {
|
|||
void ts_lexer_advance_to_end(Lexer *self) {
|
||||
while (self->data.lookahead != 0) ts_lexer__advance(self, false);
|
||||
}
|
||||
|
||||
void ts_lexer_set_last_external_token(Lexer *self, Tree *token) {
|
||||
if (token) ts_tree_retain(token);
|
||||
if (self->last_external_token) ts_tree_release(self->last_external_token);
|
||||
self->last_external_token = token;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,7 +24,6 @@ typedef struct {
|
|||
TSInput input;
|
||||
TSLogger logger;
|
||||
char debug_buffer[TREE_SITTER_SERIALIZATION_BUFFER_SIZE];
|
||||
Tree *last_external_token;
|
||||
} Lexer;
|
||||
|
||||
void ts_lexer_init(Lexer *);
|
||||
|
|
@ -32,7 +31,6 @@ void ts_lexer_set_input(Lexer *, TSInput);
|
|||
void ts_lexer_reset(Lexer *, Length);
|
||||
void ts_lexer_start(Lexer *);
|
||||
void ts_lexer_advance_to_end(Lexer *);
|
||||
void ts_lexer_set_last_external_token(Lexer *, Tree *);
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
|
|
|
|||
|
|
@ -250,18 +250,15 @@ static CondenseResult parser__condense_stack(Parser *self) {
|
|||
}
|
||||
|
||||
static void parser__restore_external_scanner(Parser *self, Tree *external_token) {
|
||||
if (!ts_tree_external_token_state_eq(self->lexer.last_external_token, external_token)) {
|
||||
LOG("restore_external_scanner");
|
||||
ts_lexer_set_last_external_token(&self->lexer, external_token);
|
||||
if (external_token) {
|
||||
self->language->external_scanner.deserialize(
|
||||
self->external_scanner_payload,
|
||||
ts_external_token_state_data(&external_token->external_token_state),
|
||||
external_token->external_token_state.length
|
||||
);
|
||||
} else {
|
||||
self->language->external_scanner.deserialize(self->external_scanner_payload, NULL, 0);
|
||||
}
|
||||
LOG("restore_external_scanner");
|
||||
if (external_token) {
|
||||
self->language->external_scanner.deserialize(
|
||||
self->external_scanner_payload,
|
||||
ts_external_token_state_data(&external_token->external_token_state),
|
||||
external_token->external_token_state.length
|
||||
);
|
||||
} else {
|
||||
self->language->external_scanner.deserialize(self->external_scanner_payload, NULL, 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -378,7 +375,6 @@ static Tree *parser__lex(Parser *self, StackVersion version) {
|
|||
self->lexer.debug_buffer
|
||||
);
|
||||
ts_external_token_state_init(&result->external_token_state, self->lexer.debug_buffer, length);
|
||||
ts_lexer_set_last_external_token(&self->lexer, result);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1320,7 +1316,6 @@ void parser_destroy(Parser *self) {
|
|||
array_delete(&self->tree_path1);
|
||||
if (self->tree_path2.contents)
|
||||
array_delete(&self->tree_path2);
|
||||
ts_lexer_set_last_external_token(&self->lexer, NULL);
|
||||
parser_set_language(self, NULL);
|
||||
}
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue