2015-11-20 12:55:01 -08:00
|
|
|
#include <assert.h>
|
2014-07-10 13:14:52 -07:00
|
|
|
#include <stdio.h>
|
2016-06-02 14:04:48 -07:00
|
|
|
#include <limits.h>
|
2014-10-09 14:02:03 -07:00
|
|
|
#include <stdbool.h>
|
2014-07-10 13:14:52 -07:00
|
|
|
#include "tree_sitter/runtime.h"
|
2018-05-10 15:11:14 -07:00
|
|
|
#include "runtime/subtree.h"
|
2014-07-30 23:40:02 -07:00
|
|
|
#include "runtime/lexer.h"
|
2014-09-26 16:15:07 -07:00
|
|
|
#include "runtime/length.h"
|
2016-02-17 20:41:29 -08:00
|
|
|
#include "runtime/array.h"
|
2015-11-20 12:00:49 -08:00
|
|
|
#include "runtime/language.h"
|
2016-01-15 15:08:42 -08:00
|
|
|
#include "runtime/alloc.h"
|
2018-05-10 22:22:37 -07:00
|
|
|
#include "runtime/stack.h"
|
|
|
|
|
#include "runtime/reusable_node.h"
|
2016-05-09 14:31:44 -07:00
|
|
|
#include "runtime/reduce_action.h"
|
2016-08-31 10:51:59 -07:00
|
|
|
#include "runtime/error_costs.h"
|
2018-05-10 22:22:37 -07:00
|
|
|
#include "runtime/string_input.h"
|
|
|
|
|
#include "runtime/tree.h"
|
2014-07-10 13:14:52 -07:00
|
|
|
|
2017-07-17 17:12:36 -07:00
|
|
|
#define LOG(...) \
|
2018-05-11 12:43:04 -07:00
|
|
|
if (self->lexer.logger.log || self->dot_graph_file) { \
|
2017-07-17 17:12:36 -07:00
|
|
|
snprintf(self->lexer.debug_buffer, TREE_SITTER_SERIALIZATION_BUFFER_SIZE, __VA_ARGS__); \
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__log(self); \
|
2017-10-04 15:09:46 -07:00
|
|
|
}
|
2016-02-23 09:45:27 -08:00
|
|
|
|
2018-05-11 12:43:04 -07:00
|
|
|
#define LOG_STACK() \
|
|
|
|
|
if (self->dot_graph_file) { \
|
|
|
|
|
ts_stack_print_dot_graph(self->stack, self->language, self->dot_graph_file); \
|
|
|
|
|
fputs("\n\n", self->dot_graph_file); \
|
2016-02-23 09:45:27 -08:00
|
|
|
}
|
|
|
|
|
|
2018-05-11 12:43:04 -07:00
|
|
|
#define LOG_TREE() \
|
|
|
|
|
if (self->dot_graph_file) { \
|
|
|
|
|
ts_subtree_print_dot_graph(self->finished_tree, self->language, self->dot_graph_file); \
|
|
|
|
|
fputs("\n", self->dot_graph_file); \
|
2016-06-22 21:04:35 -07:00
|
|
|
}
|
|
|
|
|
|
2016-03-02 09:55:25 -08:00
|
|
|
#define SYM_NAME(symbol) ts_language_symbol_name(self->language, symbol)
|
2014-10-13 21:20:08 -07:00
|
|
|
|
2017-09-12 16:20:06 -07:00
|
|
|
static const unsigned MAX_VERSION_COUNT = 6;
|
2017-09-12 12:00:00 -07:00
|
|
|
static const unsigned MAX_SUMMARY_DEPTH = 16;
|
2017-09-13 16:49:18 -07:00
|
|
|
static const unsigned MAX_COST_DIFFERENCE = 16 * ERROR_COST_PER_SKIPPED_TREE;
|
2017-06-29 14:58:20 -07:00
|
|
|
|
2018-05-10 22:22:37 -07:00
|
|
|
typedef struct {
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *token;
|
|
|
|
|
const Subtree *last_external_token;
|
2018-05-10 22:22:37 -07:00
|
|
|
uint32_t byte_index;
|
|
|
|
|
} TokenCache;
|
|
|
|
|
|
|
|
|
|
struct TSParser {
|
|
|
|
|
Lexer lexer;
|
|
|
|
|
Stack *stack;
|
|
|
|
|
SubtreePool tree_pool;
|
|
|
|
|
const TSLanguage *language;
|
|
|
|
|
ReduceActionSet reduce_actions;
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *finished_tree;
|
2018-05-10 22:22:37 -07:00
|
|
|
Subtree scratch_tree;
|
|
|
|
|
TokenCache token_cache;
|
|
|
|
|
ReusableNode reusable_node;
|
|
|
|
|
void *external_scanner_payload;
|
2018-05-11 12:43:04 -07:00
|
|
|
FILE *dot_graph_file;
|
2018-05-10 22:22:37 -07:00
|
|
|
unsigned accept_count;
|
2018-05-23 14:30:23 -07:00
|
|
|
size_t operation_limit;
|
2018-05-16 17:42:38 -07:00
|
|
|
volatile bool enabled;
|
2018-05-23 14:30:23 -07:00
|
|
|
bool halt_on_error;
|
2018-05-10 22:22:37 -07:00
|
|
|
};
|
|
|
|
|
|
2016-03-07 20:06:46 -08:00
|
|
|
typedef struct {
|
2017-09-13 16:38:15 -07:00
|
|
|
unsigned cost;
|
2018-04-02 10:57:44 -07:00
|
|
|
unsigned node_count;
|
2017-10-09 15:51:22 -07:00
|
|
|
int dynamic_precedence;
|
2017-09-13 16:38:15 -07:00
|
|
|
bool is_in_error;
|
|
|
|
|
} ErrorStatus;
|
|
|
|
|
|
|
|
|
|
typedef enum {
|
|
|
|
|
ErrorComparisonTakeLeft,
|
|
|
|
|
ErrorComparisonPreferLeft,
|
|
|
|
|
ErrorComparisonNone,
|
|
|
|
|
ErrorComparisonPreferRight,
|
|
|
|
|
ErrorComparisonTakeRight,
|
|
|
|
|
} ErrorComparison;
|
2016-03-03 10:21:57 -08:00
|
|
|
|
2018-05-10 22:22:37 -07:00
|
|
|
// Parser - Private
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static void ts_parser__log(TSParser *self) {
|
2017-08-25 16:26:40 -07:00
|
|
|
if (self->lexer.logger.log) {
|
|
|
|
|
self->lexer.logger.log(
|
|
|
|
|
self->lexer.logger.payload,
|
|
|
|
|
TSLogTypeParse,
|
|
|
|
|
self->lexer.debug_buffer
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-11 12:43:04 -07:00
|
|
|
if (self->dot_graph_file) {
|
|
|
|
|
fprintf(self->dot_graph_file, "graph {\nlabel=\"");
|
2017-08-25 16:26:40 -07:00
|
|
|
for (char *c = &self->lexer.debug_buffer[0]; *c != 0; c++) {
|
2018-05-11 12:43:04 -07:00
|
|
|
if (*c == '"') fputc('\\', self->dot_graph_file);
|
|
|
|
|
fputc(*c, self->dot_graph_file);
|
2017-08-25 16:26:40 -07:00
|
|
|
}
|
2018-05-11 12:43:04 -07:00
|
|
|
fprintf(self->dot_graph_file, "\"\n}\n\n");
|
2017-08-25 16:26:40 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static bool ts_parser__breakdown_top_of_stack(TSParser *self, StackVersion version) {
|
2016-03-31 12:03:07 -07:00
|
|
|
bool did_break_down = false;
|
2016-06-22 22:32:38 -07:00
|
|
|
bool pending = false;
|
2015-12-06 21:10:47 -08:00
|
|
|
|
2015-12-02 11:24:13 -08:00
|
|
|
do {
|
2018-03-29 17:37:54 -07:00
|
|
|
StackSliceArray pop = ts_stack_pop_pending(self->stack, version);
|
|
|
|
|
if (!pop.size) break;
|
2015-12-06 21:10:47 -08:00
|
|
|
|
2016-03-31 12:03:07 -07:00
|
|
|
did_break_down = true;
|
2016-06-22 22:32:38 -07:00
|
|
|
pending = false;
|
2018-03-29 17:37:54 -07:00
|
|
|
for (uint32_t i = 0; i < pop.size; i++) {
|
|
|
|
|
StackSlice slice = pop.contents[i];
|
|
|
|
|
TSStateId state = ts_stack_state(self->stack, slice.version);
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *parent = *array_front(&slice.subtrees);
|
2016-06-22 22:32:38 -07:00
|
|
|
|
2018-04-02 18:04:26 -07:00
|
|
|
for (uint32_t j = 0; j < parent->children.size; j++) {
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *child = parent->children.contents[j];
|
2018-04-02 18:04:26 -07:00
|
|
|
pending = child->children.size > 0;
|
2016-03-31 12:03:07 -07:00
|
|
|
|
2016-06-22 22:32:38 -07:00
|
|
|
if (child->symbol == ts_builtin_sym_error) {
|
2016-10-05 14:02:49 -07:00
|
|
|
state = ERROR_STATE;
|
2016-06-22 22:32:38 -07:00
|
|
|
} else if (!child->extra) {
|
2016-11-14 08:36:06 -08:00
|
|
|
state = ts_language_next_state(self->language, state, child->symbol);
|
2015-12-06 21:10:47 -08:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_retain(child);
|
2016-11-04 09:18:38 -07:00
|
|
|
ts_stack_push(self->stack, slice.version, child, pending, state);
|
2016-01-19 18:07:24 -08:00
|
|
|
}
|
2015-12-02 11:24:13 -08:00
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
for (uint32_t j = 1; j < slice.subtrees.size; j++) {
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *tree = slice.subtrees.contents[j];
|
2017-08-29 16:22:27 -07:00
|
|
|
ts_stack_push(self->stack, slice.version, tree, false, state);
|
2016-01-19 18:07:24 -08:00
|
|
|
}
|
2016-01-21 14:07:38 -07:00
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_release(&self->tree_pool, parent);
|
|
|
|
|
array_delete(&slice.subtrees);
|
2018-03-29 17:37:54 -07:00
|
|
|
|
|
|
|
|
LOG("breakdown_top_of_stack tree:%s", SYM_NAME(parent->symbol));
|
|
|
|
|
LOG_STACK();
|
2016-02-23 17:35:50 -08:00
|
|
|
}
|
2016-06-22 22:32:38 -07:00
|
|
|
} while (pending);
|
2016-01-19 18:07:24 -08:00
|
|
|
|
2016-11-04 09:18:38 -07:00
|
|
|
return did_break_down;
|
2015-12-02 11:24:13 -08:00
|
|
|
}
|
|
|
|
|
|
2018-05-11 15:06:13 -07:00
|
|
|
static void ts_parser__breakdown_lookahead(TSParser *self, const Subtree **lookahead,
|
|
|
|
|
TSStateId state, ReusableNode *reusable_node) {
|
2018-05-09 10:16:10 -07:00
|
|
|
bool did_descend = false;
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *tree = reusable_node_tree(reusable_node);
|
2018-05-09 10:16:10 -07:00
|
|
|
while (tree->children.size > 0 && tree->parse_state != state) {
|
|
|
|
|
LOG("state_mismatch sym:%s", SYM_NAME(tree->symbol));
|
|
|
|
|
reusable_node_descend(reusable_node);
|
|
|
|
|
tree = reusable_node_tree(reusable_node);
|
|
|
|
|
did_descend = true;
|
2016-07-17 13:35:43 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-09 10:16:10 -07:00
|
|
|
if (did_descend) {
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_release(&self->tree_pool, *lookahead);
|
2018-05-09 10:16:10 -07:00
|
|
|
*lookahead = tree;
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_retain(*lookahead);
|
2016-07-17 13:35:43 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static ErrorComparison ts_parser__compare_versions(TSParser *self, ErrorStatus a, ErrorStatus b) {
|
2017-09-13 16:38:15 -07:00
|
|
|
if (!a.is_in_error && b.is_in_error) {
|
|
|
|
|
if (a.cost < b.cost) {
|
|
|
|
|
return ErrorComparisonTakeLeft;
|
|
|
|
|
} else {
|
|
|
|
|
return ErrorComparisonPreferLeft;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (a.is_in_error && !b.is_in_error) {
|
|
|
|
|
if (b.cost < a.cost) {
|
|
|
|
|
return ErrorComparisonTakeRight;
|
|
|
|
|
} else {
|
|
|
|
|
return ErrorComparisonPreferRight;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (a.cost < b.cost) {
|
2018-04-02 10:57:44 -07:00
|
|
|
if ((b.cost - a.cost) * (1 + a.node_count) > MAX_COST_DIFFERENCE) {
|
2017-09-13 16:38:15 -07:00
|
|
|
return ErrorComparisonTakeLeft;
|
|
|
|
|
} else {
|
|
|
|
|
return ErrorComparisonPreferLeft;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (b.cost < a.cost) {
|
2018-04-02 10:57:44 -07:00
|
|
|
if ((a.cost - b.cost) * (1 + b.node_count) > MAX_COST_DIFFERENCE) {
|
2017-09-13 16:38:15 -07:00
|
|
|
return ErrorComparisonTakeRight;
|
|
|
|
|
} else {
|
|
|
|
|
return ErrorComparisonPreferRight;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-10-09 15:51:22 -07:00
|
|
|
if (a.dynamic_precedence > b.dynamic_precedence) return ErrorComparisonPreferLeft;
|
|
|
|
|
if (b.dynamic_precedence > a.dynamic_precedence) return ErrorComparisonPreferRight;
|
2017-09-13 16:38:15 -07:00
|
|
|
return ErrorComparisonNone;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static ErrorStatus ts_parser__version_status(TSParser *self, StackVersion version) {
|
2018-04-02 09:47:01 -07:00
|
|
|
unsigned cost = ts_stack_error_cost(self->stack, version);
|
|
|
|
|
bool is_paused = ts_stack_is_paused(self->stack, version);
|
|
|
|
|
if (is_paused) cost += ERROR_COST_PER_SKIPPED_TREE;
|
|
|
|
|
return (ErrorStatus) {
|
|
|
|
|
.cost = cost,
|
2018-04-02 10:57:44 -07:00
|
|
|
.node_count = ts_stack_node_count_since_error(self->stack, version),
|
2018-04-02 09:47:01 -07:00
|
|
|
.dynamic_precedence = ts_stack_dynamic_precedence(self->stack, version),
|
|
|
|
|
.is_in_error = is_paused || ts_stack_state(self->stack, version) == ERROR_STATE
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static bool ts_parser__better_version_exists(TSParser *self, StackVersion version,
|
2017-09-13 16:38:15 -07:00
|
|
|
bool is_in_error, unsigned cost) {
|
|
|
|
|
if (self->finished_tree && self->finished_tree->error_cost <= cost) return true;
|
|
|
|
|
|
2018-03-29 17:37:54 -07:00
|
|
|
Length position = ts_stack_position(self->stack, version);
|
2017-10-09 15:51:22 -07:00
|
|
|
ErrorStatus status = {
|
|
|
|
|
.cost = cost,
|
|
|
|
|
.is_in_error = is_in_error,
|
|
|
|
|
.dynamic_precedence = ts_stack_dynamic_precedence(self->stack, version),
|
2018-04-02 10:57:44 -07:00
|
|
|
.node_count = ts_stack_node_count_since_error(self->stack, version),
|
2017-10-09 15:51:22 -07:00
|
|
|
};
|
2017-09-13 16:38:15 -07:00
|
|
|
|
|
|
|
|
for (StackVersion i = 0, n = ts_stack_version_count(self->stack); i < n; i++) {
|
2017-12-29 16:10:43 -08:00
|
|
|
if (i == version ||
|
2018-04-02 09:47:01 -07:00
|
|
|
!ts_stack_is_active(self->stack, i) ||
|
2018-03-29 17:37:54 -07:00
|
|
|
ts_stack_position(self->stack, i).bytes < position.bytes) continue;
|
2018-05-10 15:16:24 -07:00
|
|
|
ErrorStatus status_i = ts_parser__version_status(self, i);
|
|
|
|
|
switch (ts_parser__compare_versions(self, status, status_i)) {
|
2017-09-13 16:38:15 -07:00
|
|
|
case ErrorComparisonTakeRight:
|
|
|
|
|
return true;
|
|
|
|
|
case ErrorComparisonPreferRight:
|
|
|
|
|
if (ts_stack_can_merge(self->stack, i, version)) return true;
|
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-11 15:06:13 -07:00
|
|
|
static void ts_parser__restore_external_scanner(TSParser *self, const Subtree *external_token) {
|
2017-08-29 14:40:28 -07:00
|
|
|
if (external_token) {
|
|
|
|
|
self->language->external_scanner.deserialize(
|
|
|
|
|
self->external_scanner_payload,
|
2018-05-11 12:57:41 -07:00
|
|
|
ts_external_scanner_state_data(&external_token->external_scanner_state),
|
|
|
|
|
external_token->external_scanner_state.length
|
2017-08-29 14:40:28 -07:00
|
|
|
);
|
|
|
|
|
} else {
|
|
|
|
|
self->language->external_scanner.deserialize(self->external_scanner_payload, NULL, 0);
|
2016-12-20 17:06:20 -08:00
|
|
|
}
|
2016-12-05 11:50:24 -08:00
|
|
|
}
|
2016-06-21 07:28:04 -07:00
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
static bool ts_parser__can_reuse_first_leaf(TSParser *self, TSStateId state, const Subtree *tree,
|
|
|
|
|
TableEntry *table_entry) {
|
|
|
|
|
TSLexMode current_lex_mode = self->language->lex_modes[state];
|
|
|
|
|
|
|
|
|
|
// If the token was created in a state with the same set of lookaheads, it is reusable.
|
|
|
|
|
if (tree->first_leaf.lex_mode.lex_state == current_lex_mode.lex_state &&
|
|
|
|
|
tree->first_leaf.lex_mode.external_lex_state == current_lex_mode.external_lex_state &&
|
|
|
|
|
(tree->first_leaf.symbol != self->language->keyword_capture_token ||
|
2018-06-15 12:25:17 -07:00
|
|
|
(!tree->is_keyword && tree->parse_state == state))) return true;
|
2018-05-18 18:04:42 -07:00
|
|
|
|
|
|
|
|
// Empty tokens are not reusable in states with different lookaheads.
|
|
|
|
|
if (tree->size.bytes == 0 && tree->symbol != ts_builtin_sym_end) return false;
|
|
|
|
|
|
|
|
|
|
// If the current state allows external tokens or other tokens that conflict with this
|
|
|
|
|
// token, this token is not reusable.
|
|
|
|
|
return current_lex_mode.external_lex_state == 0 && table_entry->is_reusable;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-11 15:06:13 -07:00
|
|
|
static const Subtree *ts_parser__lex(TSParser *self, StackVersion version, TSStateId parse_state) {
|
2018-03-29 17:37:54 -07:00
|
|
|
Length start_position = ts_stack_position(self->stack, version);
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *external_token = ts_stack_last_external_token(self->stack, version);
|
2016-12-05 11:50:24 -08:00
|
|
|
TSLexMode lex_mode = self->language->lex_modes[parse_state];
|
2017-01-07 21:45:28 -08:00
|
|
|
const bool *valid_external_tokens = ts_language_enabled_external_tokens(
|
2016-12-20 17:06:20 -08:00
|
|
|
self->language,
|
2016-12-21 11:24:41 -08:00
|
|
|
lex_mode.external_lex_state
|
2016-12-20 17:06:20 -08:00
|
|
|
);
|
2016-12-05 11:50:24 -08:00
|
|
|
|
2016-12-20 17:06:20 -08:00
|
|
|
bool found_external_token = false;
|
2017-09-01 14:22:50 -07:00
|
|
|
bool error_mode = parse_state == ERROR_STATE;
|
2017-10-31 10:04:44 -07:00
|
|
|
bool skipped_error = false;
|
2016-09-03 23:40:57 -07:00
|
|
|
int32_t first_error_character = 0;
|
2017-10-31 10:04:44 -07:00
|
|
|
Length error_start_position = length_zero();
|
|
|
|
|
Length error_end_position = length_zero();
|
2017-08-29 14:44:24 -07:00
|
|
|
uint32_t last_byte_scanned = start_position.bytes;
|
2016-12-20 17:06:20 -08:00
|
|
|
ts_lexer_reset(&self->lexer, start_position);
|
|
|
|
|
|
|
|
|
|
for (;;) {
|
|
|
|
|
Length current_position = self->lexer.current_position;
|
|
|
|
|
|
2017-01-07 21:45:28 -08:00
|
|
|
if (valid_external_tokens) {
|
2017-08-29 14:44:24 -07:00
|
|
|
LOG(
|
|
|
|
|
"lex_external state:%d, row:%u, column:%u",
|
|
|
|
|
lex_mode.external_lex_state,
|
|
|
|
|
current_position.extent.row,
|
|
|
|
|
current_position.extent.column
|
|
|
|
|
);
|
2016-12-20 17:06:20 -08:00
|
|
|
ts_lexer_start(&self->lexer);
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__restore_external_scanner(self, external_token);
|
2017-08-29 14:44:24 -07:00
|
|
|
if (self->language->external_scanner.scan(
|
2017-06-27 14:30:46 -07:00
|
|
|
self->external_scanner_payload,
|
|
|
|
|
&self->lexer.data,
|
|
|
|
|
valid_external_tokens
|
2017-08-29 14:44:24 -07:00
|
|
|
)) {
|
2017-12-20 16:26:38 -08:00
|
|
|
if (length_is_undefined(self->lexer.token_end_position)) {
|
2017-03-19 22:20:59 -07:00
|
|
|
self->lexer.token_end_position = self->lexer.current_position;
|
|
|
|
|
}
|
2017-06-27 14:30:46 -07:00
|
|
|
|
2018-04-02 09:47:01 -07:00
|
|
|
if (!error_mode || self->lexer.token_end_position.bytes > current_position.bytes) {
|
2017-08-29 14:44:24 -07:00
|
|
|
found_external_token = true;
|
2017-03-19 22:20:59 -07:00
|
|
|
break;
|
|
|
|
|
}
|
2016-12-20 17:06:20 -08:00
|
|
|
}
|
2017-06-27 14:30:46 -07:00
|
|
|
|
2017-08-29 14:44:24 -07:00
|
|
|
if (self->lexer.current_position.bytes > last_byte_scanned) {
|
|
|
|
|
last_byte_scanned = self->lexer.current_position.bytes;
|
|
|
|
|
}
|
2016-12-20 17:06:20 -08:00
|
|
|
ts_lexer_reset(&self->lexer, current_position);
|
|
|
|
|
}
|
|
|
|
|
|
2017-08-29 14:44:24 -07:00
|
|
|
LOG(
|
|
|
|
|
"lex_internal state:%d, row:%u, column:%u",
|
|
|
|
|
lex_mode.lex_state,
|
|
|
|
|
current_position.extent.row,
|
|
|
|
|
current_position.extent.column
|
|
|
|
|
);
|
2016-12-20 17:06:20 -08:00
|
|
|
ts_lexer_start(&self->lexer);
|
|
|
|
|
if (self->language->lex_fn(&self->lexer.data, lex_mode.lex_state)) {
|
|
|
|
|
break;
|
|
|
|
|
}
|
2016-09-03 23:40:57 -07:00
|
|
|
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
if (!error_mode) {
|
2017-09-01 14:22:50 -07:00
|
|
|
error_mode = true;
|
2016-12-05 11:50:24 -08:00
|
|
|
lex_mode = self->language->lex_modes[ERROR_STATE];
|
2017-01-07 21:45:28 -08:00
|
|
|
valid_external_tokens = ts_language_enabled_external_tokens(
|
2016-12-20 17:06:20 -08:00
|
|
|
self->language,
|
2016-12-21 11:24:41 -08:00
|
|
|
lex_mode.external_lex_state
|
2016-12-20 17:06:20 -08:00
|
|
|
);
|
2017-08-29 14:44:24 -07:00
|
|
|
if (self->lexer.current_position.bytes > last_byte_scanned) {
|
|
|
|
|
last_byte_scanned = self->lexer.current_position.bytes;
|
|
|
|
|
}
|
2016-09-19 13:35:08 -07:00
|
|
|
ts_lexer_reset(&self->lexer, start_position);
|
2016-09-03 22:46:14 -07:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2016-09-19 13:35:08 -07:00
|
|
|
if (!skipped_error) {
|
2016-12-20 17:06:20 -08:00
|
|
|
LOG("skip_unrecognized_character");
|
|
|
|
|
skipped_error = true;
|
2016-09-19 13:35:08 -07:00
|
|
|
error_start_position = self->lexer.token_start_position;
|
2017-02-07 17:48:53 -08:00
|
|
|
error_end_position = self->lexer.token_start_position;
|
2016-10-16 21:10:25 -07:00
|
|
|
first_error_character = self->lexer.data.lookahead;
|
2016-09-03 22:46:14 -07:00
|
|
|
}
|
|
|
|
|
|
2016-09-19 13:35:08 -07:00
|
|
|
if (self->lexer.current_position.bytes == error_end_position.bytes) {
|
2016-10-16 21:10:25 -07:00
|
|
|
if (self->lexer.data.lookahead == 0) {
|
|
|
|
|
self->lexer.data.result_symbol = ts_builtin_sym_error;
|
2016-09-19 13:35:08 -07:00
|
|
|
break;
|
2016-09-03 23:40:57 -07:00
|
|
|
}
|
2016-12-05 16:36:34 -08:00
|
|
|
self->lexer.data.advance(&self->lexer, false);
|
2016-09-03 22:46:14 -07:00
|
|
|
}
|
|
|
|
|
|
2016-09-19 13:35:08 -07:00
|
|
|
error_end_position = self->lexer.current_position;
|
2016-06-17 21:26:03 -07:00
|
|
|
}
|
|
|
|
|
|
2018-03-07 11:56:59 -08:00
|
|
|
if (self->lexer.current_position.bytes > last_byte_scanned) {
|
|
|
|
|
last_byte_scanned = self->lexer.current_position.bytes;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
Subtree *result;
|
2016-09-03 23:40:57 -07:00
|
|
|
if (skipped_error) {
|
2016-11-09 20:59:05 -08:00
|
|
|
Length padding = length_sub(error_start_position, start_position);
|
|
|
|
|
Length size = length_sub(error_end_position, error_start_position);
|
2018-05-11 13:02:12 -07:00
|
|
|
result = ts_subtree_new_error(&self->tree_pool, size, padding, first_error_character, self->language);
|
2016-05-20 20:26:03 -07:00
|
|
|
} else {
|
2017-12-07 11:48:31 -08:00
|
|
|
if (self->lexer.token_end_position.bytes < self->lexer.token_start_position.bytes) {
|
|
|
|
|
self->lexer.token_start_position = self->lexer.token_end_position;
|
|
|
|
|
}
|
|
|
|
|
|
2018-06-15 12:25:17 -07:00
|
|
|
bool is_keyword = false;
|
2018-03-07 11:56:59 -08:00
|
|
|
TSSymbol symbol = self->lexer.data.result_symbol;
|
2016-12-02 22:03:48 -08:00
|
|
|
Length padding = length_sub(self->lexer.token_start_position, start_position);
|
2017-03-13 17:03:47 -07:00
|
|
|
Length size = length_sub(self->lexer.token_end_position, self->lexer.token_start_position);
|
2018-03-07 11:56:59 -08:00
|
|
|
|
|
|
|
|
if (found_external_token) {
|
|
|
|
|
symbol = self->language->external_scanner.symbol_map[symbol];
|
|
|
|
|
} else if (symbol == self->language->keyword_capture_token && symbol != 0) {
|
|
|
|
|
uint32_t end_byte = self->lexer.token_end_position.bytes;
|
|
|
|
|
ts_lexer_reset(&self->lexer, self->lexer.token_start_position);
|
|
|
|
|
ts_lexer_start(&self->lexer);
|
|
|
|
|
if (
|
|
|
|
|
self->language->keyword_lex_fn(&self->lexer.data, 0) &&
|
|
|
|
|
self->lexer.token_end_position.bytes == end_byte &&
|
|
|
|
|
ts_language_has_actions(self->language, parse_state, self->lexer.data.result_symbol)
|
|
|
|
|
) {
|
2018-06-15 12:25:17 -07:00
|
|
|
is_keyword = true;
|
2018-03-07 11:56:59 -08:00
|
|
|
symbol = self->lexer.data.result_symbol;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-11 13:02:12 -07:00
|
|
|
result = ts_subtree_new_leaf(&self->tree_pool, symbol, padding, size, self->language);
|
2018-06-15 12:25:17 -07:00
|
|
|
result->is_keyword = is_keyword;
|
2016-12-20 17:06:20 -08:00
|
|
|
|
|
|
|
|
if (found_external_token) {
|
|
|
|
|
result->has_external_tokens = true;
|
2017-07-17 17:12:36 -07:00
|
|
|
unsigned length = self->language->external_scanner.serialize(
|
2017-07-14 10:37:26 -07:00
|
|
|
self->external_scanner_payload,
|
2017-07-17 17:12:36 -07:00
|
|
|
self->lexer.debug_buffer
|
2017-07-14 10:37:26 -07:00
|
|
|
);
|
2018-05-11 12:57:41 -07:00
|
|
|
ts_external_scanner_state_init(&result->external_scanner_state, self->lexer.debug_buffer, length);
|
2016-12-20 17:06:20 -08:00
|
|
|
}
|
2016-05-20 20:26:03 -07:00
|
|
|
}
|
|
|
|
|
|
2017-08-29 14:44:24 -07:00
|
|
|
result->bytes_scanned = last_byte_scanned - start_position.bytes + 1;
|
2016-09-03 23:40:57 -07:00
|
|
|
result->parse_state = parse_state;
|
2016-12-20 17:06:20 -08:00
|
|
|
result->first_leaf.lex_mode = lex_mode;
|
|
|
|
|
|
|
|
|
|
LOG("lexed_lookahead sym:%s, size:%u", SYM_NAME(result->symbol), result->size.bytes);
|
2016-05-20 20:26:03 -07:00
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
static const Subtree *ts_parser__get_cached_token(TSParser *self, TSStateId state,
|
|
|
|
|
size_t position,
|
|
|
|
|
const Subtree *last_external_token,
|
|
|
|
|
TableEntry *table_entry) {
|
2017-08-30 17:35:12 -07:00
|
|
|
TokenCache *cache = &self->token_cache;
|
2018-05-18 18:04:42 -07:00
|
|
|
if (
|
|
|
|
|
cache->token && cache->byte_index == position &&
|
|
|
|
|
ts_subtree_external_scanner_state_eq(cache->last_external_token, last_external_token)
|
|
|
|
|
) {
|
|
|
|
|
ts_language_table_entry(self->language, state, cache->token->first_leaf.symbol, table_entry);
|
|
|
|
|
if (ts_parser__can_reuse_first_leaf(self, state, cache->token, table_entry)) {
|
|
|
|
|
ts_subtree_retain(cache->token);
|
|
|
|
|
return cache->token;
|
|
|
|
|
}
|
2017-08-30 17:35:12 -07:00
|
|
|
}
|
2018-05-18 18:04:42 -07:00
|
|
|
return NULL;
|
2017-08-30 17:35:12 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-11 15:06:13 -07:00
|
|
|
static void ts_parser__set_cached_token(TSParser *self, size_t byte_index,
|
2018-05-18 18:04:42 -07:00
|
|
|
const Subtree *last_external_token,
|
|
|
|
|
const Subtree *token) {
|
2017-08-30 17:35:12 -07:00
|
|
|
TokenCache *cache = &self->token_cache;
|
2018-05-10 15:11:14 -07:00
|
|
|
if (token) ts_subtree_retain(token);
|
|
|
|
|
if (last_external_token) ts_subtree_retain(last_external_token);
|
|
|
|
|
if (cache->token) ts_subtree_release(&self->tree_pool, cache->token);
|
|
|
|
|
if (cache->last_external_token) ts_subtree_release(&self->tree_pool, cache->last_external_token);
|
2017-08-30 17:35:12 -07:00
|
|
|
cache->token = token;
|
|
|
|
|
cache->byte_index = byte_index;
|
|
|
|
|
cache->last_external_token = last_external_token;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
static const Subtree *ts_parser__reuse_node(TSParser *self, StackVersion version,
|
|
|
|
|
TSStateId *state, uint32_t position,
|
|
|
|
|
const Subtree *last_external_token,
|
2018-05-11 15:06:13 -07:00
|
|
|
TableEntry *table_entry) {
|
|
|
|
|
const Subtree *result;
|
2018-05-18 18:04:42 -07:00
|
|
|
while ((result = reusable_node_tree(&self->reusable_node))) {
|
|
|
|
|
uint32_t byte_offset = reusable_node_byte_offset(&self->reusable_node);
|
|
|
|
|
if (byte_offset > position) {
|
2017-08-30 16:17:10 -07:00
|
|
|
LOG("before_reusable_node symbol:%s", SYM_NAME(result->symbol));
|
2015-11-18 08:47:15 -08:00
|
|
|
break;
|
2015-09-13 19:47:45 -07:00
|
|
|
}
|
2014-10-14 09:32:11 -07:00
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
if (byte_offset < position) {
|
2017-08-30 16:17:10 -07:00
|
|
|
LOG("past_reusable_node symbol:%s", SYM_NAME(result->symbol));
|
2018-05-18 18:04:42 -07:00
|
|
|
reusable_node_advance(&self->reusable_node);
|
2015-09-13 19:47:45 -07:00
|
|
|
continue;
|
|
|
|
|
}
|
2014-10-14 09:32:11 -07:00
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
if (!ts_subtree_external_scanner_state_eq(self->reusable_node.last_external_token, last_external_token)) {
|
2017-08-30 16:17:10 -07:00
|
|
|
LOG("reusable_node_has_different_external_scanner_state symbol:%s", SYM_NAME(result->symbol));
|
2018-05-18 18:04:42 -07:00
|
|
|
reusable_node_advance(&self->reusable_node);
|
2016-07-01 15:08:19 -07:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2017-08-30 16:17:10 -07:00
|
|
|
const char *reason = NULL;
|
|
|
|
|
if (result->has_changes) {
|
|
|
|
|
reason = "has_changes";
|
|
|
|
|
} else if (result->symbol == ts_builtin_sym_error) {
|
|
|
|
|
reason = "is_error";
|
2017-12-28 15:48:35 -08:00
|
|
|
} else if (result->is_missing) {
|
|
|
|
|
reason = "is_missing";
|
2017-08-30 16:17:10 -07:00
|
|
|
} else if (result->fragile_left || result->fragile_right) {
|
|
|
|
|
reason = "is_fragile";
|
2014-10-14 09:32:11 -07:00
|
|
|
}
|
|
|
|
|
|
2017-08-30 16:17:10 -07:00
|
|
|
if (reason) {
|
2017-08-30 17:35:12 -07:00
|
|
|
LOG("cant_reuse_node_%s tree:%s", reason, SYM_NAME(result->symbol));
|
2018-05-18 18:04:42 -07:00
|
|
|
if (!reusable_node_descend(&self->reusable_node)) {
|
|
|
|
|
reusable_node_advance(&self->reusable_node);
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__breakdown_top_of_stack(self, version);
|
2018-03-29 17:37:54 -07:00
|
|
|
*state = ts_stack_state(self->stack, version);
|
2017-01-07 21:45:28 -08:00
|
|
|
}
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2017-08-30 17:35:12 -07:00
|
|
|
ts_language_table_entry(self->language, *state, result->first_leaf.symbol, table_entry);
|
2018-05-10 15:16:24 -07:00
|
|
|
if (!ts_parser__can_reuse_first_leaf(self, *state, result, table_entry)) {
|
2017-08-30 17:35:12 -07:00
|
|
|
LOG(
|
|
|
|
|
"cant_reuse_node symbol:%s, first_leaf_symbol:%s",
|
|
|
|
|
SYM_NAME(result->symbol),
|
|
|
|
|
SYM_NAME(result->first_leaf.symbol)
|
|
|
|
|
);
|
2018-05-18 18:04:42 -07:00
|
|
|
reusable_node_advance_past_leaf(&self->reusable_node);
|
2017-08-30 17:35:12 -07:00
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
LOG("reuse_node symbol:%s", SYM_NAME(result->symbol));
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_retain(result);
|
2015-11-18 08:47:15 -08:00
|
|
|
return result;
|
2014-10-14 09:32:11 -07:00
|
|
|
}
|
2014-10-22 12:54:46 -07:00
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
return NULL;
|
2015-11-18 08:47:15 -08:00
|
|
|
}
|
2015-10-06 16:22:58 -07:00
|
|
|
|
2018-05-11 15:06:13 -07:00
|
|
|
static bool ts_parser__select_tree(TSParser *self, const Subtree *left, const Subtree *right) {
|
2017-07-06 15:20:11 -07:00
|
|
|
if (!left) return true;
|
|
|
|
|
if (!right) return false;
|
|
|
|
|
|
2016-08-30 10:58:25 -07:00
|
|
|
if (right->error_cost < left->error_cost) {
|
2016-06-22 22:36:11 -07:00
|
|
|
LOG("select_smaller_error symbol:%s, over_symbol:%s",
|
2016-06-23 11:42:43 -07:00
|
|
|
SYM_NAME(right->symbol), SYM_NAME(left->symbol));
|
2016-04-24 00:54:20 -07:00
|
|
|
return true;
|
2015-12-08 12:25:41 -08:00
|
|
|
}
|
2017-07-06 15:20:11 -07:00
|
|
|
|
2016-08-30 10:58:25 -07:00
|
|
|
if (left->error_cost < right->error_cost) {
|
2016-06-22 22:36:11 -07:00
|
|
|
LOG("select_smaller_error symbol:%s, over_symbol:%s",
|
2016-06-23 11:42:43 -07:00
|
|
|
SYM_NAME(left->symbol), SYM_NAME(right->symbol));
|
2016-04-24 00:54:20 -07:00
|
|
|
return false;
|
|
|
|
|
}
|
2014-10-09 14:02:03 -07:00
|
|
|
|
2017-07-06 15:20:11 -07:00
|
|
|
if (right->dynamic_precedence > left->dynamic_precedence) {
|
|
|
|
|
LOG("select_higher_precedence symbol:%s, prec:%u, over_symbol:%s, other_prec:%u",
|
|
|
|
|
SYM_NAME(right->symbol), right->dynamic_precedence, SYM_NAME(left->symbol),
|
|
|
|
|
left->dynamic_precedence);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (left->dynamic_precedence > right->dynamic_precedence) {
|
|
|
|
|
LOG("select_higher_precedence symbol:%s, prec:%u, over_symbol:%s, other_prec:%u",
|
|
|
|
|
SYM_NAME(left->symbol), left->dynamic_precedence, SYM_NAME(right->symbol),
|
|
|
|
|
right->dynamic_precedence);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (left->error_cost > 0) return true;
|
2017-07-05 17:33:35 -07:00
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
int comparison = ts_subtree_compare(left, right);
|
2016-05-09 14:31:44 -07:00
|
|
|
switch (comparison) {
|
|
|
|
|
case -1:
|
2016-06-23 11:42:43 -07:00
|
|
|
LOG("select_earlier symbol:%s, over_symbol:%s", SYM_NAME(left->symbol),
|
|
|
|
|
SYM_NAME(right->symbol));
|
2016-05-09 14:31:44 -07:00
|
|
|
return false;
|
|
|
|
|
break;
|
|
|
|
|
case 1:
|
2016-06-23 11:42:43 -07:00
|
|
|
LOG("select_earlier symbol:%s, over_symbol:%s", SYM_NAME(right->symbol),
|
|
|
|
|
SYM_NAME(left->symbol));
|
2016-05-09 14:31:44 -07:00
|
|
|
return true;
|
|
|
|
|
default:
|
2016-06-23 11:42:43 -07:00
|
|
|
LOG("select_existing symbol:%s, over_symbol:%s", SYM_NAME(left->symbol),
|
|
|
|
|
SYM_NAME(right->symbol));
|
2016-05-09 14:31:44 -07:00
|
|
|
return false;
|
|
|
|
|
}
|
2016-04-18 11:16:56 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static void ts_parser__shift(TSParser *self, StackVersion version, TSStateId state,
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *lookahead, bool extra) {
|
|
|
|
|
const Subtree *subtree_to_push;
|
2016-06-12 17:26:15 -07:00
|
|
|
if (extra != lookahead->extra) {
|
2018-05-11 15:06:13 -07:00
|
|
|
Subtree *result = ts_subtree_make_mut(&self->tree_pool, lookahead);
|
|
|
|
|
result->extra = extra;
|
|
|
|
|
subtree_to_push = result;
|
2016-04-18 11:16:56 -07:00
|
|
|
} else {
|
2018-05-11 15:06:13 -07:00
|
|
|
subtree_to_push = lookahead;
|
2016-04-18 11:16:56 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-11 15:06:13 -07:00
|
|
|
bool is_pending = subtree_to_push->children.size > 0;
|
|
|
|
|
ts_stack_push(self->stack, version, subtree_to_push, is_pending, state);
|
|
|
|
|
if (subtree_to_push->has_external_tokens) {
|
2017-06-27 14:30:46 -07:00
|
|
|
ts_stack_set_last_external_token(
|
2018-05-11 15:06:13 -07:00
|
|
|
self->stack, version, ts_subtree_last_external_token(subtree_to_push)
|
2017-06-27 14:30:46 -07:00
|
|
|
);
|
2017-01-07 21:45:28 -08:00
|
|
|
}
|
2014-07-10 13:14:52 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static bool ts_parser__replace_children(TSParser *self, Subtree *tree, SubtreeArray *children) {
|
2017-08-01 13:28:18 -07:00
|
|
|
self->scratch_tree = *tree;
|
2018-04-02 18:04:26 -07:00
|
|
|
self->scratch_tree.children.size = 0;
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_set_children(&self->scratch_tree, children, self->language);
|
2018-05-10 15:16:24 -07:00
|
|
|
if (ts_parser__select_tree(self, tree, &self->scratch_tree)) {
|
2017-08-01 13:28:18 -07:00
|
|
|
*tree = self->scratch_tree;
|
2016-04-24 00:54:20 -07:00
|
|
|
return true;
|
|
|
|
|
} else {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static StackSliceArray ts_parser__reduce(TSParser *self, StackVersion version, TSSymbol symbol,
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
uint32_t count, int dynamic_precedence,
|
|
|
|
|
uint16_t alias_sequence_id, bool fragile) {
|
2016-11-14 12:15:24 -08:00
|
|
|
uint32_t initial_version_count = ts_stack_version_count(self->stack);
|
2016-11-14 17:25:55 -08:00
|
|
|
|
2018-03-29 17:37:54 -07:00
|
|
|
StackSliceArray pop = ts_stack_pop_count(self->stack, version, count);
|
2016-04-07 10:40:33 -07:00
|
|
|
|
2018-03-29 17:37:54 -07:00
|
|
|
for (uint32_t i = 0; i < pop.size; i++) {
|
|
|
|
|
StackSlice slice = pop.contents[i];
|
2015-07-08 17:34:21 -07:00
|
|
|
|
2016-11-14 17:25:55 -08:00
|
|
|
// Extra tokens on top of the stack should not be included in this new parent
|
|
|
|
|
// node. They will be re-pushed onto the stack after the parent node is
|
|
|
|
|
// created and pushed.
|
2018-05-10 15:11:14 -07:00
|
|
|
SubtreeArray children = slice.subtrees;
|
2018-04-02 18:04:26 -07:00
|
|
|
while (children.size > 0 && children.contents[children.size - 1]->extra) {
|
|
|
|
|
children.size--;
|
2017-07-14 10:37:26 -07:00
|
|
|
}
|
2015-07-08 17:34:21 -07:00
|
|
|
|
2018-05-11 13:02:12 -07:00
|
|
|
Subtree *parent = ts_subtree_new_node(&self->tree_pool,
|
2018-04-02 18:04:26 -07:00
|
|
|
symbol, &children, alias_sequence_id, self->language
|
2017-07-14 10:37:26 -07:00
|
|
|
);
|
2016-02-08 12:08:15 -08:00
|
|
|
|
2016-11-14 17:25:55 -08:00
|
|
|
// This pop operation may have caused multiple stack versions to collapse
|
|
|
|
|
// into one, because they all diverged from a common state. In that case,
|
|
|
|
|
// choose one of the arrays of trees to be the parent node's children, and
|
|
|
|
|
// delete the rest of the tree arrays.
|
2018-03-29 17:37:54 -07:00
|
|
|
while (i + 1 < pop.size) {
|
|
|
|
|
StackSlice next_slice = pop.contents[i + 1];
|
2017-07-03 16:18:29 -07:00
|
|
|
if (next_slice.version != slice.version) break;
|
2016-04-24 00:54:20 -07:00
|
|
|
i++;
|
|
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
SubtreeArray children = next_slice.subtrees;
|
2018-04-02 18:04:26 -07:00
|
|
|
while (children.size > 0 && children.contents[children.size - 1]->extra) {
|
|
|
|
|
children.size--;
|
2017-07-14 10:37:26 -07:00
|
|
|
}
|
2016-04-24 00:54:20 -07:00
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
if (ts_parser__replace_children(self, parent, &children)) {
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_array_delete(&self->tree_pool, &slice.subtrees);
|
2016-04-24 00:54:20 -07:00
|
|
|
slice = next_slice;
|
|
|
|
|
} else {
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_array_delete(&self->tree_pool, &next_slice.subtrees);
|
2016-04-24 00:54:20 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-07-06 15:20:11 -07:00
|
|
|
parent->dynamic_precedence += dynamic_precedence;
|
2017-07-31 11:45:24 -07:00
|
|
|
parent->alias_sequence_id = alias_sequence_id;
|
2017-07-06 15:20:11 -07:00
|
|
|
|
2018-03-29 17:37:54 -07:00
|
|
|
TSStateId state = ts_stack_state(self->stack, slice.version);
|
2017-07-14 10:37:26 -07:00
|
|
|
TSStateId next_state = ts_language_next_state(self->language, state, symbol);
|
2018-05-18 18:04:42 -07:00
|
|
|
if (fragile || pop.size > 1 || initial_version_count > 1) {
|
2016-03-07 20:06:46 -08:00
|
|
|
parent->fragile_left = true;
|
|
|
|
|
parent->fragile_right = true;
|
2016-06-27 14:39:12 -07:00
|
|
|
parent->parse_state = TS_TREE_STATE_NONE;
|
2016-04-07 10:40:33 -07:00
|
|
|
} else {
|
2016-04-18 11:16:56 -07:00
|
|
|
parent->parse_state = state;
|
2016-03-07 20:06:46 -08:00
|
|
|
}
|
2015-07-08 17:34:21 -07:00
|
|
|
|
2016-11-14 17:25:55 -08:00
|
|
|
// Push the parent node onto the stack, along with any extra tokens that
|
|
|
|
|
// were previously on top of the stack.
|
2017-08-29 16:22:27 -07:00
|
|
|
ts_stack_push(self->stack, slice.version, parent, false, next_state);
|
2018-05-10 15:11:14 -07:00
|
|
|
for (uint32_t j = parent->children.size; j < slice.subtrees.size; j++) {
|
|
|
|
|
ts_stack_push(self->stack, slice.version, slice.subtrees.contents[j], false, next_state);
|
2015-12-02 07:53:15 -08:00
|
|
|
}
|
2018-04-06 09:35:17 -07:00
|
|
|
|
|
|
|
|
if (ts_stack_version_count(self->stack) > MAX_VERSION_COUNT) {
|
|
|
|
|
i++;
|
|
|
|
|
while (i < pop.size) {
|
|
|
|
|
StackSlice slice = pop.contents[i];
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_array_delete(&self->tree_pool, &slice.subtrees);
|
2018-04-06 09:35:17 -07:00
|
|
|
ts_stack_halt(self->stack, slice.version);
|
|
|
|
|
i++;
|
|
|
|
|
}
|
|
|
|
|
while (ts_stack_version_count(self->stack) > slice.version + 1) {
|
|
|
|
|
ts_stack_remove_version(self->stack, slice.version + 1);
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
}
|
2015-07-08 17:34:21 -07:00
|
|
|
}
|
2015-05-27 10:53:02 -07:00
|
|
|
|
2016-11-14 17:25:55 -08:00
|
|
|
for (StackVersion i = initial_version_count; i < ts_stack_version_count(self->stack); i++) {
|
2016-06-02 14:04:48 -07:00
|
|
|
for (StackVersion j = initial_version_count; j < i; j++) {
|
|
|
|
|
if (ts_stack_merge(self->stack, j, i)) {
|
|
|
|
|
i--;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-04-24 00:55:19 -07:00
|
|
|
|
2016-11-14 17:25:55 -08:00
|
|
|
return pop;
|
2015-06-15 15:24:15 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 22:22:37 -07:00
|
|
|
static void ts_parser__start(TSParser *self, TSInput input, const Subtree *previous_tree) {
|
2015-09-18 23:20:06 -07:00
|
|
|
if (previous_tree) {
|
2015-11-20 11:49:04 -08:00
|
|
|
LOG("parse_after_edit");
|
2015-09-13 19:47:45 -07:00
|
|
|
} else {
|
2015-11-20 11:49:04 -08:00
|
|
|
LOG("new_parse");
|
2015-09-13 19:47:45 -07:00
|
|
|
}
|
2015-09-18 23:20:06 -07:00
|
|
|
|
2017-07-17 17:12:36 -07:00
|
|
|
if (self->language->external_scanner.deserialize) {
|
|
|
|
|
self->language->external_scanner.deserialize(self->external_scanner_payload, NULL, 0);
|
2016-12-20 13:12:01 -08:00
|
|
|
}
|
2016-11-30 09:34:47 -08:00
|
|
|
|
2015-12-03 10:00:39 -08:00
|
|
|
ts_lexer_set_input(&self->lexer, input);
|
2018-05-16 17:42:38 -07:00
|
|
|
ts_stack_clear(self->stack);
|
|
|
|
|
ts_parser__set_cached_token(self, 0, NULL, NULL);
|
2018-05-23 14:30:23 -07:00
|
|
|
reusable_node_reset(&self->reusable_node, previous_tree);
|
2018-05-16 17:42:38 -07:00
|
|
|
if (self->finished_tree) {
|
|
|
|
|
ts_subtree_release(&self->tree_pool, self->finished_tree);
|
|
|
|
|
self->finished_tree = NULL;
|
|
|
|
|
}
|
2018-05-23 14:30:23 -07:00
|
|
|
self->accept_count = 0;
|
2018-05-16 17:42:38 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-11 15:06:13 -07:00
|
|
|
static void ts_parser__accept(TSParser *self, StackVersion version, const Subtree *lookahead) {
|
2016-09-19 13:35:08 -07:00
|
|
|
assert(lookahead->symbol == ts_builtin_sym_end);
|
2016-11-04 09:18:38 -07:00
|
|
|
ts_stack_push(self->stack, version, lookahead, false, 1);
|
2016-03-10 11:57:33 -08:00
|
|
|
|
2018-04-06 13:28:32 -07:00
|
|
|
StackSliceArray pop = ts_stack_pop_all(self->stack, version);
|
2018-03-29 17:37:54 -07:00
|
|
|
for (uint32_t i = 0; i < pop.size; i++) {
|
2018-05-10 15:11:14 -07:00
|
|
|
SubtreeArray trees = pop.contents[i].subtrees;
|
2016-04-24 00:54:20 -07:00
|
|
|
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *root = NULL;
|
2017-12-07 11:40:57 -08:00
|
|
|
for (uint32_t j = trees.size - 1; j + 1 > 0; j--) {
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *child = trees.contents[j];
|
2017-12-07 11:40:57 -08:00
|
|
|
if (!child->extra) {
|
2018-04-02 18:04:26 -07:00
|
|
|
for (uint32_t k = 0; k < child->children.size; k++) {
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_retain(child->children.contents[k]);
|
2018-03-29 17:18:43 -07:00
|
|
|
}
|
2018-04-06 13:28:32 -07:00
|
|
|
array_splice(&trees, j, 1, &child->children);
|
2018-05-11 13:02:12 -07:00
|
|
|
root = ts_subtree_new_node(
|
2018-04-06 13:28:32 -07:00
|
|
|
&self->tree_pool, child->symbol, &trees,
|
|
|
|
|
child->alias_sequence_id, self->language
|
|
|
|
|
);
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_release(&self->tree_pool, child);
|
2017-12-07 11:40:57 -08:00
|
|
|
break;
|
2016-04-24 00:54:20 -07:00
|
|
|
}
|
2015-12-02 07:53:15 -08:00
|
|
|
}
|
2016-06-26 11:57:42 -07:00
|
|
|
|
2017-06-27 14:30:46 -07:00
|
|
|
assert(root && root->ref_count > 0);
|
2018-01-09 17:08:36 -08:00
|
|
|
self->accept_count++;
|
2017-06-27 14:30:46 -07:00
|
|
|
|
|
|
|
|
if (self->finished_tree) {
|
2018-05-10 15:16:24 -07:00
|
|
|
if (ts_parser__select_tree(self, self->finished_tree, root)) {
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_release(&self->tree_pool, self->finished_tree);
|
2017-06-27 14:30:46 -07:00
|
|
|
self->finished_tree = root;
|
|
|
|
|
} else {
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_release(&self->tree_pool, root);
|
2017-06-27 14:30:46 -07:00
|
|
|
}
|
2016-06-26 11:57:42 -07:00
|
|
|
} else {
|
2017-06-27 14:30:46 -07:00
|
|
|
self->finished_tree = root;
|
2016-06-26 11:57:42 -07:00
|
|
|
}
|
2015-12-02 07:53:15 -08:00
|
|
|
}
|
2016-01-19 18:07:24 -08:00
|
|
|
|
2018-03-29 17:37:54 -07:00
|
|
|
ts_stack_remove_version(self->stack, pop.contents[0].version);
|
2016-06-02 14:04:48 -07:00
|
|
|
ts_stack_halt(self->stack, version);
|
2014-08-09 01:03:55 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static bool ts_parser__do_all_potential_reductions(TSParser *self, StackVersion starting_version,
|
2017-12-28 15:48:35 -08:00
|
|
|
TSSymbol lookahead_symbol) {
|
2018-04-06 09:35:17 -07:00
|
|
|
uint32_t initial_version_count = ts_stack_version_count(self->stack);
|
|
|
|
|
|
|
|
|
|
bool can_shift_lookahead_symbol = false;
|
|
|
|
|
StackVersion version = starting_version;
|
|
|
|
|
for (unsigned i = 0; true; i++) {
|
2017-12-28 14:00:59 -08:00
|
|
|
uint32_t version_count = ts_stack_version_count(self->stack);
|
|
|
|
|
if (version >= version_count) break;
|
|
|
|
|
|
2018-04-06 09:35:17 -07:00
|
|
|
bool merged = false;
|
|
|
|
|
for (StackVersion i = initial_version_count; i < version; i++) {
|
|
|
|
|
if (ts_stack_merge(self->stack, i, version)) {
|
|
|
|
|
merged = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (merged) continue;
|
|
|
|
|
|
2018-03-29 17:37:54 -07:00
|
|
|
TSStateId state = ts_stack_state(self->stack, version);
|
2017-12-28 14:00:59 -08:00
|
|
|
bool has_shift_action = false;
|
|
|
|
|
array_clear(&self->reduce_actions);
|
|
|
|
|
|
2017-12-28 15:48:35 -08:00
|
|
|
TSSymbol first_symbol, end_symbol;
|
|
|
|
|
if (lookahead_symbol != 0) {
|
|
|
|
|
first_symbol = lookahead_symbol;
|
|
|
|
|
end_symbol = lookahead_symbol + 1;
|
|
|
|
|
} else {
|
|
|
|
|
first_symbol = 1;
|
|
|
|
|
end_symbol = self->language->token_count;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (TSSymbol symbol = first_symbol; symbol < end_symbol; symbol++) {
|
2017-12-28 14:00:59 -08:00
|
|
|
TableEntry entry;
|
|
|
|
|
ts_language_table_entry(self->language, state, symbol, &entry);
|
|
|
|
|
for (uint32_t i = 0; i < entry.action_count; i++) {
|
|
|
|
|
TSParseAction action = entry.actions[i];
|
|
|
|
|
switch (action.type) {
|
|
|
|
|
case TSParseActionTypeShift:
|
|
|
|
|
case TSParseActionTypeRecover:
|
2018-04-06 09:35:17 -07:00
|
|
|
if (!action.params.extra && !action.params.repetition) has_shift_action = true;
|
2017-12-28 14:00:59 -08:00
|
|
|
break;
|
|
|
|
|
case TSParseActionTypeReduce:
|
|
|
|
|
if (action.params.child_count > 0)
|
|
|
|
|
ts_reduce_action_set_add(&self->reduce_actions, (ReduceAction){
|
|
|
|
|
.symbol = action.params.symbol,
|
|
|
|
|
.count = action.params.child_count,
|
|
|
|
|
.dynamic_precedence = action.params.dynamic_precedence,
|
|
|
|
|
.alias_sequence_id = action.params.alias_sequence_id,
|
|
|
|
|
});
|
|
|
|
|
default:
|
|
|
|
|
break;
|
|
|
|
|
}
|
2016-05-29 22:36:47 -07:00
|
|
|
}
|
2016-03-10 11:57:33 -08:00
|
|
|
}
|
|
|
|
|
|
2017-12-28 14:00:59 -08:00
|
|
|
for (uint32_t i = 0; i < self->reduce_actions.size; i++) {
|
|
|
|
|
ReduceAction action = self->reduce_actions.contents[i];
|
2018-04-06 09:35:17 -07:00
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__reduce(
|
2017-12-28 14:00:59 -08:00
|
|
|
self, version, action.symbol, action.count,
|
|
|
|
|
action.dynamic_precedence, action.alias_sequence_id,
|
|
|
|
|
true
|
|
|
|
|
);
|
|
|
|
|
}
|
2016-05-09 14:31:44 -07:00
|
|
|
|
2017-12-28 15:48:35 -08:00
|
|
|
if (has_shift_action) {
|
2018-04-06 09:35:17 -07:00
|
|
|
can_shift_lookahead_symbol = true;
|
|
|
|
|
} else if (self->reduce_actions.size > 0 && i < MAX_VERSION_COUNT) {
|
|
|
|
|
ts_stack_renumber_version(self->stack, version_count, version);
|
|
|
|
|
continue;
|
|
|
|
|
} else if (lookahead_symbol != 0) {
|
|
|
|
|
ts_stack_remove_version(self->stack, version);
|
2017-12-28 14:00:59 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (version == starting_version) {
|
|
|
|
|
version = version_count;
|
2016-08-29 09:34:08 -07:00
|
|
|
} else {
|
2017-12-28 14:00:59 -08:00
|
|
|
version++;
|
2016-08-29 09:34:08 -07:00
|
|
|
}
|
|
|
|
|
}
|
2018-04-06 09:35:17 -07:00
|
|
|
|
|
|
|
|
return can_shift_lookahead_symbol;
|
2016-08-29 09:34:08 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static void ts_parser__handle_error(TSParser *self, StackVersion version,
|
|
|
|
|
TSSymbol lookahead_symbol) {
|
2017-12-28 15:48:35 -08:00
|
|
|
// Perform any reductions that could have happened in this state, regardless of the lookahead.
|
2016-11-14 12:15:24 -08:00
|
|
|
uint32_t previous_version_count = ts_stack_version_count(self->stack);
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__do_all_potential_reductions(self, version, 0);
|
2017-12-28 15:48:35 -08:00
|
|
|
uint32_t version_count = ts_stack_version_count(self->stack);
|
2016-05-29 22:36:47 -07:00
|
|
|
|
2016-11-14 17:25:55 -08:00
|
|
|
// Push a discontinuity onto the stack. Merge all of the stack versions that
|
|
|
|
|
// were created in the previous step.
|
2017-12-28 15:48:35 -08:00
|
|
|
bool did_insert_missing_token = false;
|
|
|
|
|
for (StackVersion v = version; v < version_count;) {
|
|
|
|
|
if (!did_insert_missing_token) {
|
2018-03-29 17:37:54 -07:00
|
|
|
TSStateId state = ts_stack_state(self->stack, v);
|
2017-12-28 15:48:35 -08:00
|
|
|
for (TSSymbol missing_symbol = 1;
|
|
|
|
|
missing_symbol < self->language->token_count;
|
|
|
|
|
missing_symbol++) {
|
|
|
|
|
TSStateId state_after_missing_symbol = ts_language_next_state(
|
|
|
|
|
self->language, state, missing_symbol
|
|
|
|
|
);
|
|
|
|
|
if (state_after_missing_symbol == 0) continue;
|
|
|
|
|
|
|
|
|
|
if (ts_language_has_reduce_action(
|
|
|
|
|
self->language,
|
|
|
|
|
state_after_missing_symbol,
|
|
|
|
|
lookahead_symbol
|
|
|
|
|
)) {
|
|
|
|
|
StackVersion version_with_missing_tree = ts_stack_copy_version(self->stack, v);
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *missing_tree = ts_subtree_new_missing_leaf(
|
|
|
|
|
&self->tree_pool, missing_symbol, self->language
|
|
|
|
|
);
|
2017-12-28 15:48:35 -08:00
|
|
|
ts_stack_push(
|
|
|
|
|
self->stack, version_with_missing_tree,
|
|
|
|
|
missing_tree, false,
|
|
|
|
|
state_after_missing_symbol
|
|
|
|
|
);
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
if (ts_parser__do_all_potential_reductions(
|
2017-12-28 15:48:35 -08:00
|
|
|
self, version_with_missing_tree,
|
|
|
|
|
lookahead_symbol
|
|
|
|
|
)) {
|
2018-04-06 09:35:17 -07:00
|
|
|
LOG(
|
|
|
|
|
"recover_with_missing symbol:%s, state:%u",
|
|
|
|
|
SYM_NAME(missing_symbol),
|
|
|
|
|
ts_stack_state(self->stack, version_with_missing_tree)
|
|
|
|
|
);
|
2017-12-28 15:48:35 -08:00
|
|
|
did_insert_missing_token = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ts_stack_push(self->stack, v, NULL, false, ERROR_STATE);
|
|
|
|
|
v = (v == version) ? previous_version_count : v + 1;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (unsigned i = previous_version_count; i < version_count; i++) {
|
2018-04-02 11:57:26 -07:00
|
|
|
assert(ts_stack_merge(self->stack, version, previous_version_count));
|
2016-05-29 22:36:47 -07:00
|
|
|
}
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
|
2017-09-12 12:00:00 -07:00
|
|
|
ts_stack_record_summary(self->stack, version, MAX_SUMMARY_DEPTH);
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
LOG_STACK();
|
2016-05-09 14:31:44 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static void ts_parser__halt_parse(TSParser *self) {
|
2017-05-01 14:41:55 -07:00
|
|
|
LOG("halting_parse");
|
|
|
|
|
LOG_STACK();
|
|
|
|
|
|
|
|
|
|
ts_lexer_advance_to_end(&self->lexer);
|
|
|
|
|
Length remaining_length = length_sub(
|
|
|
|
|
self->lexer.current_position,
|
2018-03-29 17:37:54 -07:00
|
|
|
ts_stack_position(self->stack, 0)
|
2017-05-01 14:41:55 -07:00
|
|
|
);
|
|
|
|
|
|
2018-05-11 13:02:12 -07:00
|
|
|
Subtree *filler_node = ts_subtree_new_error(&self->tree_pool, remaining_length, length_zero(), 0, self->language);
|
2017-05-01 14:41:55 -07:00
|
|
|
filler_node->visible = false;
|
2017-08-29 16:22:27 -07:00
|
|
|
ts_stack_push(self->stack, 0, filler_node, false, 0);
|
2017-05-01 14:41:55 -07:00
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
SubtreeArray children = array_new();
|
2018-05-11 13:02:12 -07:00
|
|
|
Subtree *root_error = ts_subtree_new_error_node(&self->tree_pool, &children, self->language);
|
2017-08-29 16:22:27 -07:00
|
|
|
ts_stack_push(self->stack, 0, root_error, false, 0);
|
2017-05-01 14:41:55 -07:00
|
|
|
|
2018-05-11 13:02:12 -07:00
|
|
|
Subtree *eof = ts_subtree_new_leaf(&self->tree_pool, ts_builtin_sym_end, length_zero(), length_zero(), self->language);
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__accept(self, 0, eof);
|
2017-05-01 14:41:55 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static bool ts_parser__recover_to_state(TSParser *self, StackVersion version, unsigned depth,
|
2018-05-11 15:06:13 -07:00
|
|
|
TSStateId goal_state) {
|
2018-03-29 17:37:54 -07:00
|
|
|
StackSliceArray pop = ts_stack_pop_count(self->stack, version, depth);
|
2017-12-29 13:21:36 -08:00
|
|
|
StackVersion previous_version = STACK_VERSION_NONE;
|
|
|
|
|
|
2018-03-29 17:37:54 -07:00
|
|
|
for (unsigned i = 0; i < pop.size; i++) {
|
|
|
|
|
StackSlice slice = pop.contents[i];
|
2017-12-29 13:21:36 -08:00
|
|
|
|
|
|
|
|
if (slice.version == previous_version) {
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_array_delete(&self->tree_pool, &slice.subtrees);
|
2018-03-29 17:37:54 -07:00
|
|
|
array_erase(&pop, i--);
|
2017-12-29 13:21:36 -08:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-29 17:37:54 -07:00
|
|
|
if (ts_stack_state(self->stack, slice.version) != goal_state) {
|
2017-12-29 13:21:36 -08:00
|
|
|
ts_stack_halt(self->stack, slice.version);
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_array_delete(&self->tree_pool, &slice.subtrees);
|
2018-03-29 17:37:54 -07:00
|
|
|
array_erase(&pop, i--);
|
2017-12-29 13:21:36 -08:00
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
SubtreeArray error_trees = ts_stack_pop_error(self->stack, slice.version);
|
2018-04-06 09:35:17 -07:00
|
|
|
if (error_trees.size > 0) {
|
|
|
|
|
assert(error_trees.size == 1);
|
2018-05-10 15:11:14 -07:00
|
|
|
array_splice(&slice.subtrees, 0, 0, &error_trees.contents[0]->children);
|
2018-04-06 09:35:17 -07:00
|
|
|
for (unsigned j = 0; j < error_trees.contents[0]->children.size; j++) {
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_retain(slice.subtrees.contents[j]);
|
2018-04-06 09:35:17 -07:00
|
|
|
}
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_array_delete(&self->tree_pool, &error_trees);
|
2017-12-29 13:21:36 -08:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
SubtreeArray trailing_extras = ts_subtree_array_remove_trailing_extras(&slice.subtrees);
|
2017-12-29 13:21:36 -08:00
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
if (slice.subtrees.size > 0) {
|
2018-05-11 13:02:12 -07:00
|
|
|
Subtree *error = ts_subtree_new_error_node(&self->tree_pool, &slice.subtrees, self->language);
|
2017-12-29 13:21:36 -08:00
|
|
|
error->extra = true;
|
|
|
|
|
ts_stack_push(self->stack, slice.version, error, false, goal_state);
|
|
|
|
|
} else {
|
2018-05-10 15:11:14 -07:00
|
|
|
array_delete(&slice.subtrees);
|
2017-12-29 13:21:36 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (unsigned j = 0; j < trailing_extras.size; j++) {
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *tree = trailing_extras.contents[j];
|
2017-12-29 13:21:36 -08:00
|
|
|
ts_stack_push(self->stack, slice.version, tree, false, goal_state);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
previous_version = slice.version;
|
|
|
|
|
array_delete(&trailing_extras);
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-28 15:48:35 -08:00
|
|
|
return previous_version != STACK_VERSION_NONE;
|
2017-12-29 13:21:36 -08:00
|
|
|
}
|
|
|
|
|
|
2018-05-11 15:06:13 -07:00
|
|
|
static void ts_parser__recover(TSParser *self, StackVersion version, const Subtree *lookahead) {
|
2017-09-12 16:20:06 -07:00
|
|
|
bool did_recover = false;
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
unsigned previous_version_count = ts_stack_version_count(self->stack);
|
2018-03-29 17:37:54 -07:00
|
|
|
Length position = ts_stack_position(self->stack, version);
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
StackSummary *summary = ts_stack_get_summary(self->stack, version);
|
2018-04-06 09:35:17 -07:00
|
|
|
unsigned node_count_since_error = ts_stack_node_count_since_error(self->stack, version);
|
|
|
|
|
unsigned current_error_cost = ts_stack_error_cost(self->stack, version);
|
|
|
|
|
|
|
|
|
|
if (summary && lookahead->symbol != ts_builtin_sym_error) {
|
|
|
|
|
for (unsigned i = 0; i < summary->size; i++) {
|
|
|
|
|
StackSummaryEntry entry = summary->contents[i];
|
|
|
|
|
|
|
|
|
|
if (entry.state == ERROR_STATE) continue;
|
|
|
|
|
if (entry.position.bytes == position.bytes) continue;
|
|
|
|
|
unsigned depth = entry.depth;
|
|
|
|
|
if (node_count_since_error > 0) depth++;
|
|
|
|
|
|
|
|
|
|
bool would_merge = false;
|
|
|
|
|
for (unsigned j = 0; j < previous_version_count; j++) {
|
|
|
|
|
if (
|
|
|
|
|
ts_stack_state(self->stack, j) == entry.state &&
|
|
|
|
|
ts_stack_position(self->stack, j).bytes == position.bytes
|
|
|
|
|
) {
|
|
|
|
|
would_merge = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
}
|
|
|
|
|
|
2018-04-06 09:35:17 -07:00
|
|
|
if (would_merge) continue;
|
|
|
|
|
|
|
|
|
|
unsigned new_cost =
|
|
|
|
|
current_error_cost +
|
|
|
|
|
entry.depth * ERROR_COST_PER_SKIPPED_TREE +
|
|
|
|
|
(position.bytes - entry.position.bytes) * ERROR_COST_PER_SKIPPED_CHAR +
|
|
|
|
|
(position.extent.row - entry.position.extent.row) * ERROR_COST_PER_SKIPPED_LINE;
|
2018-05-10 15:16:24 -07:00
|
|
|
if (ts_parser__better_version_exists(self, version, false, new_cost)) break;
|
2018-04-06 09:35:17 -07:00
|
|
|
|
|
|
|
|
if (ts_language_has_actions(self->language, entry.state, lookahead->symbol)) {
|
2018-05-10 15:16:24 -07:00
|
|
|
if (ts_parser__recover_to_state(self, version, depth, entry.state)) {
|
2018-04-06 09:35:17 -07:00
|
|
|
did_recover = true;
|
|
|
|
|
LOG("recover_to_previous state:%u, depth:%u", entry.state, depth);
|
|
|
|
|
LOG_STACK();
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-04-06 09:35:17 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (unsigned i = previous_version_count; i < ts_stack_version_count(self->stack); i++) {
|
|
|
|
|
if (!ts_stack_is_active(self->stack, i)) {
|
2018-04-02 09:47:01 -07:00
|
|
|
ts_stack_remove_version(self->stack, i--);
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-09-12 16:20:06 -07:00
|
|
|
if (did_recover && ts_stack_version_count(self->stack) > MAX_VERSION_COUNT) {
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
ts_stack_halt(self->stack, version);
|
2018-05-11 15:06:13 -07:00
|
|
|
ts_subtree_release(&self->tree_pool, lookahead);
|
Simplify error recovery; eliminate recovery states
The previous approach to error recovery relied on special error-recovery
states in the parse table. For each token T, there was an error recovery
state in which the parser looked for *any* token that could follow T.
Unfortunately, sometimes the set of tokens that could follow T contained
conflicts. For example, in JS, the token '}' can be followed by the
open-ended 'template_chars' token, but also by ordinary tokens like
'identifier'. So with the old algorithm, when recovering from an
unexpected '}' token, the lexer had no way to distinguish identifiers
from template_chars.
This commit drops the error recovery states. Instead, when we encounter
an unexpected token T, we recover from the error by finding a previous
state S in the stack in which T would be valid, popping all of the nodes
after S, and wrapping them in an error.
This way, the lexer is always invoked in a normal parse state, in which
it is looking for a non-conflicting set of tokens. Eliminating the error
recovery states also shrinks the lex state machine significantly.
Signed-off-by: Rick Winfrey <rewinfrey@github.com>
2017-09-11 15:22:52 -07:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2016-06-02 14:04:48 -07:00
|
|
|
if (lookahead->symbol == ts_builtin_sym_end) {
|
2016-06-22 22:36:11 -07:00
|
|
|
LOG("recover_eof");
|
2018-05-10 15:11:14 -07:00
|
|
|
SubtreeArray children = array_new();
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *parent = ts_subtree_new_error_node(&self->tree_pool, &children, self->language);
|
2017-08-29 16:22:27 -07:00
|
|
|
ts_stack_push(self->stack, version, parent, false, 1);
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__accept(self, version, lookahead);
|
2017-06-29 14:58:20 -07:00
|
|
|
return;
|
2016-06-02 14:04:48 -07:00
|
|
|
}
|
|
|
|
|
|
2018-04-06 09:35:17 -07:00
|
|
|
unsigned new_cost =
|
|
|
|
|
current_error_cost + ERROR_COST_PER_SKIPPED_TREE +
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_total_bytes(lookahead) * ERROR_COST_PER_SKIPPED_CHAR +
|
|
|
|
|
ts_subtree_total_size(lookahead).extent.row * ERROR_COST_PER_SKIPPED_LINE;
|
2018-04-06 09:35:17 -07:00
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
if (ts_parser__better_version_exists(self, version, false, new_cost)) {
|
2018-04-06 09:35:17 -07:00
|
|
|
ts_stack_halt(self->stack, version);
|
2018-05-11 15:06:13 -07:00
|
|
|
ts_subtree_release(&self->tree_pool, lookahead);
|
2018-04-06 09:35:17 -07:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2017-09-14 12:07:46 -07:00
|
|
|
unsigned n;
|
|
|
|
|
const TSParseAction *actions = ts_language_actions(self->language, 1, lookahead->symbol, &n);
|
2018-04-06 09:35:17 -07:00
|
|
|
if (n > 0 && actions[n - 1].type == TSParseActionTypeShift && actions[n - 1].params.extra) {
|
2018-05-11 15:06:13 -07:00
|
|
|
Subtree *mutable_lookahead = ts_subtree_make_mut(&self->tree_pool, lookahead);
|
|
|
|
|
mutable_lookahead->extra = true;
|
|
|
|
|
lookahead = mutable_lookahead;
|
2018-04-06 09:35:17 -07:00
|
|
|
}
|
2017-07-03 12:27:23 -07:00
|
|
|
|
2018-04-06 09:35:17 -07:00
|
|
|
LOG("skip_token symbol:%s", SYM_NAME(lookahead->symbol));
|
2018-05-10 15:11:14 -07:00
|
|
|
SubtreeArray children = array_new();
|
2018-04-09 18:09:54 -07:00
|
|
|
array_reserve(&children, 1);
|
2018-04-06 09:35:17 -07:00
|
|
|
array_push(&children, lookahead);
|
2018-05-11 15:06:13 -07:00
|
|
|
const Subtree *error_repeat = ts_subtree_new_node(
|
2018-04-06 09:35:17 -07:00
|
|
|
&self->tree_pool,
|
|
|
|
|
ts_builtin_sym_error_repeat,
|
|
|
|
|
&children,
|
|
|
|
|
0,
|
|
|
|
|
self->language
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
if (node_count_since_error > 0) {
|
|
|
|
|
StackSliceArray pop = ts_stack_pop_count(self->stack, version, 1);
|
|
|
|
|
assert(pop.size == 1);
|
2018-05-10 15:11:14 -07:00
|
|
|
assert(pop.contents[0].subtrees.size == 1);
|
2018-04-06 09:35:17 -07:00
|
|
|
ts_stack_renumber_version(self->stack, pop.contents[0].version, version);
|
2018-05-10 15:11:14 -07:00
|
|
|
array_push(&pop.contents[0].subtrees, error_repeat);
|
2018-05-11 13:02:12 -07:00
|
|
|
error_repeat = ts_subtree_new_node(
|
2018-04-06 09:35:17 -07:00
|
|
|
&self->tree_pool,
|
|
|
|
|
ts_builtin_sym_error_repeat,
|
2018-05-10 15:11:14 -07:00
|
|
|
&pop.contents[0].subtrees,
|
2018-04-06 09:35:17 -07:00
|
|
|
0,
|
|
|
|
|
self->language
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ts_stack_push(self->stack, version, error_repeat, false, ERROR_STATE);
|
|
|
|
|
|
|
|
|
|
if (lookahead->has_external_tokens) {
|
|
|
|
|
ts_stack_set_last_external_token(
|
2018-05-10 15:11:14 -07:00
|
|
|
self->stack, version, ts_subtree_last_external_token(lookahead)
|
2018-04-06 09:35:17 -07:00
|
|
|
);
|
2017-06-30 17:45:05 -07:00
|
|
|
}
|
2016-03-07 20:06:46 -08:00
|
|
|
}
|
|
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
static void ts_parser__advance(TSParser *self, StackVersion version, bool allow_node_reuse) {
|
2018-03-29 17:37:54 -07:00
|
|
|
TSStateId state = ts_stack_state(self->stack, version);
|
2018-05-18 18:04:42 -07:00
|
|
|
uint32_t position = ts_stack_position(self->stack, version).bytes;
|
|
|
|
|
const Subtree *last_external_token = ts_stack_last_external_token(self->stack, version);
|
|
|
|
|
|
|
|
|
|
bool did_reuse = true;
|
|
|
|
|
const Subtree *lookahead = NULL;
|
2017-08-30 17:35:12 -07:00
|
|
|
TableEntry table_entry;
|
2018-05-18 18:04:42 -07:00
|
|
|
|
|
|
|
|
// If possible, reuse a node from the previous syntax tree.
|
|
|
|
|
if (allow_node_reuse) {
|
|
|
|
|
lookahead = ts_parser__reuse_node(
|
|
|
|
|
self, version, &state, position, last_external_token, &table_entry
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Otherwise, try to reuse the token previously returned by the lexer.
|
|
|
|
|
if (!lookahead) {
|
|
|
|
|
did_reuse = false;
|
|
|
|
|
lookahead = ts_parser__get_cached_token(
|
|
|
|
|
self, state, position, last_external_token, &table_entry
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Otherwise, re-run the lexer.
|
|
|
|
|
if (!lookahead) {
|
|
|
|
|
lookahead = ts_parser__lex(self, version, state);
|
|
|
|
|
ts_parser__set_cached_token(self, position, last_external_token, lookahead);
|
|
|
|
|
ts_language_table_entry(self->language, state, lookahead->symbol, &table_entry);
|
|
|
|
|
}
|
2016-07-17 13:35:43 -07:00
|
|
|
|
2015-10-07 12:58:32 -07:00
|
|
|
for (;;) {
|
2016-04-10 14:12:24 -07:00
|
|
|
StackVersion last_reduction_version = STACK_VERSION_NONE;
|
|
|
|
|
|
2016-11-14 12:15:24 -08:00
|
|
|
for (uint32_t i = 0; i < table_entry.action_count; i++) {
|
2016-07-17 13:35:43 -07:00
|
|
|
TSParseAction action = table_entry.actions[i];
|
2016-04-10 14:12:24 -07:00
|
|
|
|
2015-10-07 12:58:32 -07:00
|
|
|
switch (action.type) {
|
2016-04-10 14:12:24 -07:00
|
|
|
case TSParseActionTypeShift: {
|
2018-01-29 10:40:59 -08:00
|
|
|
if (action.params.repetition) break;
|
2016-11-14 08:36:06 -08:00
|
|
|
TSStateId next_state;
|
2017-07-21 10:17:54 -07:00
|
|
|
if (action.params.extra) {
|
2018-04-06 09:35:17 -07:00
|
|
|
|
|
|
|
|
// TODO remove when TREE_SITTER_LANGUAGE_VERSION 9 is out.
|
|
|
|
|
if (state == ERROR_STATE) continue;
|
|
|
|
|
|
2016-11-14 08:36:06 -08:00
|
|
|
next_state = state;
|
|
|
|
|
LOG("shift_extra");
|
|
|
|
|
} else {
|
2017-07-21 10:17:54 -07:00
|
|
|
next_state = action.params.state;
|
2016-11-14 08:36:06 -08:00
|
|
|
LOG("shift state:%u", next_state);
|
|
|
|
|
}
|
|
|
|
|
|
2018-04-02 18:04:26 -07:00
|
|
|
if (lookahead->children.size > 0) {
|
2018-05-18 18:04:42 -07:00
|
|
|
ts_parser__breakdown_lookahead(self, &lookahead, state, &self->reusable_node);
|
2016-11-14 08:36:06 -08:00
|
|
|
next_state = ts_language_next_state(self->language, state, lookahead->symbol);
|
2016-05-09 14:31:44 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__shift(self, version, next_state, lookahead, action.params.extra);
|
2018-05-18 18:04:42 -07:00
|
|
|
if (did_reuse) reusable_node_advance(&self->reusable_node);
|
2016-11-04 09:18:38 -07:00
|
|
|
return;
|
2016-04-10 14:12:24 -07:00
|
|
|
}
|
2015-10-07 12:58:32 -07:00
|
|
|
|
2016-04-10 14:12:24 -07:00
|
|
|
case TSParseActionTypeReduce: {
|
2018-03-02 14:51:54 -08:00
|
|
|
bool is_fragile = table_entry.action_count > 1;
|
2017-07-21 10:17:54 -07:00
|
|
|
LOG("reduce sym:%s, child_count:%u", SYM_NAME(action.params.symbol), action.params.child_count);
|
2018-05-10 15:16:24 -07:00
|
|
|
StackSliceArray reduction = ts_parser__reduce(
|
2017-07-21 10:17:54 -07:00
|
|
|
self, version, action.params.symbol, action.params.child_count,
|
2017-07-31 11:45:24 -07:00
|
|
|
action.params.dynamic_precedence, action.params.alias_sequence_id,
|
2018-03-02 14:51:54 -08:00
|
|
|
is_fragile
|
2017-07-13 17:17:22 -07:00
|
|
|
);
|
2018-03-29 17:37:54 -07:00
|
|
|
StackSlice slice = *array_front(&reduction);
|
2016-11-14 17:25:55 -08:00
|
|
|
last_reduction_version = slice.version;
|
2015-10-07 12:58:32 -07:00
|
|
|
break;
|
2016-04-10 14:12:24 -07:00
|
|
|
}
|
2015-10-07 12:58:32 -07:00
|
|
|
|
2016-04-10 14:12:24 -07:00
|
|
|
case TSParseActionTypeAccept: {
|
2016-06-22 22:36:11 -07:00
|
|
|
LOG("accept");
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__accept(self, version, lookahead);
|
2016-11-04 09:18:38 -07:00
|
|
|
return;
|
2016-05-09 14:31:44 -07:00
|
|
|
}
|
2016-07-01 15:08:19 -07:00
|
|
|
|
2016-07-17 13:35:43 -07:00
|
|
|
case TSParseActionTypeRecover: {
|
2018-05-18 18:04:42 -07:00
|
|
|
if (lookahead->children.size > 0) {
|
|
|
|
|
ts_parser__breakdown_lookahead(self, &lookahead, ERROR_STATE, &self->reusable_node);
|
2016-07-17 13:35:43 -07:00
|
|
|
}
|
2018-05-18 18:04:42 -07:00
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__recover(self, version, lookahead);
|
2018-05-18 18:04:42 -07:00
|
|
|
if (did_reuse) reusable_node_advance(&self->reusable_node);
|
2016-11-04 09:18:38 -07:00
|
|
|
return;
|
2016-07-17 13:35:43 -07:00
|
|
|
}
|
2016-07-01 15:08:19 -07:00
|
|
|
}
|
2016-07-17 13:35:43 -07:00
|
|
|
}
|
2016-07-01 15:08:19 -07:00
|
|
|
|
2016-07-17 13:35:43 -07:00
|
|
|
if (last_reduction_version != STACK_VERSION_NONE) {
|
|
|
|
|
ts_stack_renumber_version(self->stack, last_reduction_version, version);
|
|
|
|
|
LOG_STACK();
|
2018-06-15 12:25:17 -07:00
|
|
|
state = ts_stack_state(self->stack, version);
|
|
|
|
|
ts_language_table_entry(self->language, state, lookahead->first_leaf.symbol, &table_entry);
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (lookahead->is_keyword && lookahead->symbol != self->language->keyword_capture_token) {
|
|
|
|
|
ts_language_table_entry(self->language, state, self->language->keyword_capture_token, &table_entry);
|
|
|
|
|
if (table_entry.action_count > 0) {
|
|
|
|
|
LOG(
|
|
|
|
|
"switch from_keyword:%s, to_word_token:%s",
|
|
|
|
|
SYM_NAME(lookahead->symbol),
|
|
|
|
|
SYM_NAME(self->language->keyword_capture_token)
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
Subtree *mutable_lookahead = ts_subtree_make_mut(&self->tree_pool, lookahead);
|
|
|
|
|
mutable_lookahead->symbol = self->language->keyword_capture_token;
|
|
|
|
|
mutable_lookahead->first_leaf.symbol = self->language->keyword_capture_token;
|
|
|
|
|
lookahead = mutable_lookahead;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (state == ERROR_STATE) {
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__recover(self, version, lookahead);
|
2018-04-02 09:47:01 -07:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2018-06-15 12:25:17 -07:00
|
|
|
if (ts_parser__breakdown_top_of_stack(self, version)) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
LOG("detect_error");
|
|
|
|
|
ts_stack_pause(self->stack, version, lookahead->first_leaf.symbol);
|
|
|
|
|
ts_subtree_release(&self->tree_pool, lookahead);
|
|
|
|
|
return;
|
2018-04-02 09:47:01 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
static unsigned ts_parser__condense_stack(TSParser *self) {
|
2018-04-02 09:47:01 -07:00
|
|
|
bool made_changes = false;
|
|
|
|
|
unsigned min_error_cost = UINT_MAX;
|
|
|
|
|
for (StackVersion i = 0; i < ts_stack_version_count(self->stack); i++) {
|
|
|
|
|
if (ts_stack_is_halted(self->stack, i)) {
|
|
|
|
|
ts_stack_remove_version(self->stack, i);
|
|
|
|
|
i--;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
ErrorStatus status_i = ts_parser__version_status(self, i);
|
2018-04-02 09:47:01 -07:00
|
|
|
if (!status_i.is_in_error && status_i.cost < min_error_cost) {
|
|
|
|
|
min_error_cost = status_i.cost;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (StackVersion j = 0; j < i; j++) {
|
2018-05-10 15:16:24 -07:00
|
|
|
ErrorStatus status_j = ts_parser__version_status(self, j);
|
2018-04-02 09:47:01 -07:00
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
switch (ts_parser__compare_versions(self, status_j, status_i)) {
|
2018-04-02 09:47:01 -07:00
|
|
|
case ErrorComparisonTakeLeft:
|
|
|
|
|
made_changes = true;
|
|
|
|
|
ts_stack_remove_version(self->stack, i);
|
|
|
|
|
i--;
|
|
|
|
|
j = i;
|
|
|
|
|
break;
|
|
|
|
|
case ErrorComparisonPreferLeft:
|
|
|
|
|
case ErrorComparisonNone:
|
2018-04-02 11:57:26 -07:00
|
|
|
if (ts_stack_merge(self->stack, j, i)) {
|
2018-04-02 09:47:01 -07:00
|
|
|
made_changes = true;
|
|
|
|
|
i--;
|
|
|
|
|
j = i;
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
case ErrorComparisonPreferRight:
|
|
|
|
|
made_changes = true;
|
2018-04-02 11:57:26 -07:00
|
|
|
if (ts_stack_merge(self->stack, j, i)) {
|
2018-04-02 09:47:01 -07:00
|
|
|
i--;
|
|
|
|
|
j = i;
|
2018-04-02 11:57:26 -07:00
|
|
|
} else {
|
|
|
|
|
ts_stack_swap_versions(self->stack, i, j);
|
2018-04-02 09:47:01 -07:00
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
case ErrorComparisonTakeRight:
|
|
|
|
|
made_changes = true;
|
|
|
|
|
ts_stack_remove_version(self->stack, j);
|
|
|
|
|
i--;
|
|
|
|
|
j--;
|
|
|
|
|
break;
|
2017-08-29 16:22:27 -07:00
|
|
|
}
|
2018-04-02 09:47:01 -07:00
|
|
|
}
|
|
|
|
|
}
|
2016-07-01 15:08:19 -07:00
|
|
|
|
2018-04-02 09:47:01 -07:00
|
|
|
while (ts_stack_version_count(self->stack) > MAX_VERSION_COUNT) {
|
|
|
|
|
ts_stack_remove_version(self->stack, MAX_VERSION_COUNT);
|
|
|
|
|
made_changes = true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (ts_stack_version_count(self->stack) > 0) {
|
|
|
|
|
bool has_unpaused_version = false;
|
|
|
|
|
for (StackVersion i = 0, n = ts_stack_version_count(self->stack); i < n; i++) {
|
|
|
|
|
if (ts_stack_is_paused(self->stack, i)) {
|
2018-04-02 12:07:14 -07:00
|
|
|
if (!has_unpaused_version && self->accept_count < MAX_VERSION_COUNT) {
|
2018-04-02 09:47:01 -07:00
|
|
|
LOG("resume version:%u", i);
|
2018-04-02 12:07:14 -07:00
|
|
|
min_error_cost = ts_stack_error_cost(self->stack, i);
|
2018-04-02 09:47:01 -07:00
|
|
|
TSSymbol lookahead_symbol = ts_stack_resume(self->stack, i);
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__handle_error(self, i, lookahead_symbol);
|
2018-04-02 09:47:01 -07:00
|
|
|
has_unpaused_version = true;
|
|
|
|
|
} else {
|
|
|
|
|
ts_stack_remove_version(self->stack, i);
|
|
|
|
|
i--;
|
|
|
|
|
n--;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
has_unpaused_version = true;
|
2017-08-29 16:22:27 -07:00
|
|
|
}
|
2016-07-17 13:35:43 -07:00
|
|
|
}
|
2018-04-02 09:47:01 -07:00
|
|
|
}
|
2017-08-30 17:35:12 -07:00
|
|
|
|
2018-04-02 09:47:01 -07:00
|
|
|
if (made_changes) {
|
|
|
|
|
LOG("condense");
|
|
|
|
|
LOG_STACK();
|
2015-07-08 17:34:21 -07:00
|
|
|
}
|
2018-04-02 09:47:01 -07:00
|
|
|
|
|
|
|
|
return min_error_cost;
|
2015-07-08 17:34:21 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 22:22:37 -07:00
|
|
|
// Parser - Public
|
|
|
|
|
|
|
|
|
|
TSParser *ts_parser_new() {
|
|
|
|
|
TSParser *self = ts_calloc(1, sizeof(TSParser));
|
2016-02-04 11:15:46 -08:00
|
|
|
ts_lexer_init(&self->lexer);
|
2016-05-09 14:31:44 -07:00
|
|
|
array_init(&self->reduce_actions);
|
2018-04-09 18:09:54 -07:00
|
|
|
array_reserve(&self->reduce_actions, 4);
|
2018-05-10 22:22:37 -07:00
|
|
|
self->tree_pool = ts_subtree_pool_new(32);
|
2017-10-05 17:32:21 -07:00
|
|
|
self->stack = ts_stack_new(&self->tree_pool);
|
2016-11-04 09:18:38 -07:00
|
|
|
self->finished_tree = NULL;
|
2018-05-09 10:16:10 -07:00
|
|
|
self->reusable_node = reusable_node_new();
|
2018-05-11 12:43:04 -07:00
|
|
|
self->dot_graph_file = NULL;
|
2018-05-10 22:22:37 -07:00
|
|
|
self->halt_on_error = false;
|
2018-05-16 17:42:38 -07:00
|
|
|
self->enabled = true;
|
2018-05-23 14:30:23 -07:00
|
|
|
self->operation_limit = SIZE_MAX;
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__set_cached_token(self, 0, NULL, NULL);
|
2018-05-10 22:22:37 -07:00
|
|
|
return self;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void ts_parser_delete(TSParser *self) {
|
|
|
|
|
if (self->stack) {
|
|
|
|
|
ts_stack_delete(self->stack);
|
|
|
|
|
}
|
|
|
|
|
if (self->reduce_actions.contents) {
|
|
|
|
|
array_delete(&self->reduce_actions);
|
|
|
|
|
}
|
|
|
|
|
ts_subtree_pool_delete(&self->tree_pool);
|
|
|
|
|
reusable_node_delete(&self->reusable_node);
|
|
|
|
|
ts_parser_set_language(self, NULL);
|
|
|
|
|
ts_free(self);
|
2015-08-16 19:53:34 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 22:22:37 -07:00
|
|
|
const TSLanguage *ts_parser_language(const TSParser *self) {
|
|
|
|
|
return self->language;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool ts_parser_set_language(TSParser *self, const TSLanguage *language) {
|
|
|
|
|
if (language && language->version != TREE_SITTER_LANGUAGE_VERSION) return false;
|
|
|
|
|
|
|
|
|
|
if (self->external_scanner_payload && self->language->external_scanner.destroy) {
|
2016-12-06 10:12:49 -08:00
|
|
|
self->language->external_scanner.destroy(self->external_scanner_payload);
|
2018-05-10 22:22:37 -07:00
|
|
|
}
|
2016-12-04 14:18:30 -08:00
|
|
|
|
2018-05-10 22:22:37 -07:00
|
|
|
if (language && language->external_scanner.create) {
|
2016-12-02 22:03:48 -08:00
|
|
|
self->external_scanner_payload = language->external_scanner.create();
|
2018-05-10 22:22:37 -07:00
|
|
|
} else {
|
2016-12-02 22:03:48 -08:00
|
|
|
self->external_scanner_payload = NULL;
|
2018-05-10 22:22:37 -07:00
|
|
|
}
|
2016-12-04 14:18:30 -08:00
|
|
|
|
|
|
|
|
self->language = language;
|
2018-05-10 22:22:37 -07:00
|
|
|
return true;
|
2016-12-02 22:03:48 -08:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 22:22:37 -07:00
|
|
|
TSLogger ts_parser_logger(const TSParser *self) {
|
|
|
|
|
return self->lexer.logger;
|
2015-08-16 19:53:34 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-10 22:22:37 -07:00
|
|
|
void ts_parser_set_logger(TSParser *self, TSLogger logger) {
|
|
|
|
|
self->lexer.logger = logger;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-11 12:43:04 -07:00
|
|
|
void ts_parser_print_dot_graphs(TSParser *self, FILE *file) {
|
|
|
|
|
self->dot_graph_file = file;
|
2018-05-10 22:22:37 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void ts_parser_halt_on_error(TSParser *self, bool should_halt_on_error) {
|
|
|
|
|
self->halt_on_error = should_halt_on_error;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-23 14:30:23 -07:00
|
|
|
bool ts_parser_enabled(const TSParser *self) {
|
2018-05-16 17:42:38 -07:00
|
|
|
return self->enabled;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void ts_parser_set_enabled(TSParser *self, bool enabled) {
|
|
|
|
|
self->enabled = enabled;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-23 14:30:23 -07:00
|
|
|
size_t ts_parser_operation_limit(const TSParser *self) {
|
|
|
|
|
return self->operation_limit;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void ts_parser_set_operation_limit(TSParser *self, size_t limit) {
|
|
|
|
|
self->operation_limit = limit;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
TSTree *ts_parser_resume(TSParser *self) {
|
|
|
|
|
if (!self->language || !self->lexer.input.read) return NULL;
|
2015-07-08 17:34:21 -07:00
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
uint32_t position = 0, last_position = 0, version_count = 0;
|
2018-05-23 14:30:23 -07:00
|
|
|
size_t operation_count = 0;
|
2015-07-08 17:34:21 -07:00
|
|
|
|
2016-07-01 15:08:19 -07:00
|
|
|
do {
|
2018-05-18 18:04:42 -07:00
|
|
|
for (StackVersion version = 0;
|
|
|
|
|
version_count = ts_stack_version_count(self->stack), version < version_count;
|
|
|
|
|
version++) {
|
2018-05-23 14:30:23 -07:00
|
|
|
operation_count++;
|
|
|
|
|
if (operation_count > self->operation_limit || !self->enabled) return NULL;
|
2018-05-16 17:42:38 -07:00
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
bool allow_node_reuse = version_count == 1;
|
2018-04-02 09:47:01 -07:00
|
|
|
while (ts_stack_is_active(self->stack, version)) {
|
2016-11-14 12:15:24 -08:00
|
|
|
LOG("process version:%d, version_count:%u, state:%d, row:%u, col:%u",
|
2016-06-23 11:42:43 -07:00
|
|
|
version, ts_stack_version_count(self->stack),
|
2018-03-29 17:37:54 -07:00
|
|
|
ts_stack_state(self->stack, version),
|
|
|
|
|
ts_stack_position(self->stack, version).extent.row,
|
|
|
|
|
ts_stack_position(self->stack, version).extent.column);
|
2015-11-18 08:47:15 -08:00
|
|
|
|
2018-05-18 18:04:42 -07:00
|
|
|
ts_parser__advance(self, version, allow_node_reuse);
|
2016-06-21 22:53:48 -07:00
|
|
|
LOG_STACK();
|
2018-04-02 09:47:01 -07:00
|
|
|
|
|
|
|
|
position = ts_stack_position(self->stack, version).bytes;
|
|
|
|
|
if (position > last_position || (version > 0 && position == last_position)) {
|
|
|
|
|
last_position = position;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2015-12-26 16:49:23 -08:00
|
|
|
}
|
2014-08-09 01:03:55 -07:00
|
|
|
}
|
2015-12-08 13:01:33 -08:00
|
|
|
|
2018-05-10 15:16:24 -07:00
|
|
|
unsigned min_error_cost = ts_parser__condense_stack(self);
|
2018-01-24 14:04:55 -08:00
|
|
|
if (self->finished_tree && self->finished_tree->error_cost < min_error_cost) {
|
|
|
|
|
break;
|
2018-05-10 22:22:37 -07:00
|
|
|
} else if (self->halt_on_error && min_error_cost > 0) {
|
2018-05-10 15:16:24 -07:00
|
|
|
ts_parser__halt_parse(self);
|
2018-01-24 14:04:55 -08:00
|
|
|
break;
|
2017-05-01 13:04:06 -07:00
|
|
|
}
|
2018-05-18 18:04:42 -07:00
|
|
|
} while (version_count != 0);
|
2017-05-01 13:04:06 -07:00
|
|
|
|
2018-05-10 15:11:14 -07:00
|
|
|
ts_subtree_balance(self->finished_tree, &self->tree_pool, self->language);
|
2018-01-29 10:41:07 -08:00
|
|
|
LOG("done");
|
|
|
|
|
LOG_TREE();
|
2018-05-10 22:22:37 -07:00
|
|
|
|
2018-05-23 14:30:23 -07:00
|
|
|
TSTree *result = ts_tree_new(self->finished_tree, self->language);
|
|
|
|
|
self->finished_tree = NULL;
|
|
|
|
|
ts_stack_clear(self->stack);
|
|
|
|
|
ts_parser__set_cached_token(self, 0, NULL, NULL);
|
|
|
|
|
ts_lexer_set_input(&self->lexer, (TSInput) { NULL, NULL, NULL, 0 });
|
2018-05-16 17:42:38 -07:00
|
|
|
return result;
|
2018-05-10 22:22:37 -07:00
|
|
|
}
|
|
|
|
|
|
2018-05-23 14:30:23 -07:00
|
|
|
TSTree *ts_parser_parse(TSParser *self, const TSTree *old_tree, TSInput input) {
|
2018-05-23 15:41:16 -07:00
|
|
|
if (!self->language) return NULL;
|
2018-05-23 14:30:23 -07:00
|
|
|
ts_parser__start(self, input, old_tree ? old_tree->root : NULL);
|
|
|
|
|
return ts_parser_resume(self);
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 22:22:37 -07:00
|
|
|
TSTree *ts_parser_parse_string(TSParser *self, const TSTree *old_tree,
|
|
|
|
|
const char *string, uint32_t length) {
|
|
|
|
|
TSStringInput input;
|
|
|
|
|
ts_string_input_init(&input, string, length);
|
|
|
|
|
return ts_parser_parse(self, old_tree, input.input);
|
2014-07-10 13:14:52 -07:00
|
|
|
}
|