Fix parse error when reusing a node at the end of an included range
This commit is contained in:
parent
618146260e
commit
d07f864815
2 changed files with 46 additions and 25 deletions
|
|
@ -104,13 +104,16 @@ static void ts_lexer__get_lookahead(Lexer *self) {
|
|||
|
||||
static void ts_lexer_goto(Lexer *self, Length position) {
|
||||
self->current_position = position;
|
||||
bool found_included_range = false;
|
||||
|
||||
// Move to the first valid position at or after the given position.
|
||||
bool found_included_range = false;
|
||||
for (unsigned i = 0; i < self->included_range_count; i++) {
|
||||
TSRange *included_range = &self->included_ranges[i];
|
||||
if (included_range->end_byte > position.bytes) {
|
||||
if (included_range->start_byte >= position.bytes) {
|
||||
if (
|
||||
included_range->end_byte > self->current_position.bytes &&
|
||||
included_range->end_byte > included_range->start_byte
|
||||
) {
|
||||
if (included_range->start_byte >= self->current_position.bytes) {
|
||||
self->current_position = (Length) {
|
||||
.bytes = included_range->start_byte,
|
||||
.extent = included_range->start_point,
|
||||
|
|
@ -127,8 +130,8 @@ static void ts_lexer_goto(Lexer *self, Length position) {
|
|||
// If the current position is outside of the current chunk of text,
|
||||
// then clear out the current chunk of text.
|
||||
if (self->chunk && (
|
||||
position.bytes < self->chunk_start ||
|
||||
position.bytes >= self->chunk_start + self->chunk_size
|
||||
self->current_position.bytes < self->chunk_start ||
|
||||
self->current_position.bytes >= self->chunk_start + self->chunk_size
|
||||
)) {
|
||||
ts_lexer__clear_chunk(self);
|
||||
}
|
||||
|
|
@ -164,27 +167,31 @@ static void ts_lexer__do_advance(Lexer *self, bool skip) {
|
|||
}
|
||||
}
|
||||
|
||||
const TSRange *current_range = NULL;
|
||||
if (self->current_included_range_index < self->included_range_count) {
|
||||
current_range = &self->included_ranges[self->current_included_range_index];
|
||||
if (self->current_position.bytes == current_range->end_byte) {
|
||||
self->current_included_range_index++;
|
||||
if (self->current_included_range_index < self->included_range_count) {
|
||||
current_range++;
|
||||
self->current_position = (Length) {
|
||||
current_range->start_byte,
|
||||
current_range->start_point,
|
||||
};
|
||||
} else {
|
||||
current_range = NULL;
|
||||
}
|
||||
const TSRange *current_range = &self->included_ranges[self->current_included_range_index];
|
||||
while (
|
||||
self->current_position.bytes >= current_range->end_byte ||
|
||||
current_range->end_byte == current_range->start_byte
|
||||
) {
|
||||
self->current_included_range_index++;
|
||||
if (self->current_included_range_index < self->included_range_count) {
|
||||
current_range++;
|
||||
self->current_position = (Length) {
|
||||
current_range->start_byte,
|
||||
current_range->start_point,
|
||||
};
|
||||
} else {
|
||||
current_range = NULL;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (skip) self->token_start_position = self->current_position;
|
||||
|
||||
if (current_range) {
|
||||
if (self->current_position.bytes >= self->chunk_start + self->chunk_size) {
|
||||
if (
|
||||
self->current_position.bytes < self->chunk_start ||
|
||||
self->current_position.bytes >= self->chunk_start + self->chunk_size
|
||||
) {
|
||||
ts_lexer__get_chunk(self);
|
||||
}
|
||||
ts_lexer__get_lookahead(self);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue