clang-format
This commit is contained in:
parent
27bce56ef2
commit
08d50c25ae
9 changed files with 90 additions and 85 deletions
|
|
@ -55,7 +55,8 @@ class LexTableBuilder {
|
|||
parse_state.lex_state_id = add_lex_state(item_set);
|
||||
}
|
||||
|
||||
LexItemSet error_item_set = build_lex_item_set(parse_table->all_symbols(), true);
|
||||
LexItemSet error_item_set =
|
||||
build_lex_item_set(parse_table->all_symbols(), true);
|
||||
populate_lex_state(error_item_set, LexTable::ERROR_STATE_ID);
|
||||
|
||||
return lex_table;
|
||||
|
|
@ -86,13 +87,15 @@ class LexTableBuilder {
|
|||
for (const rule_ptr &rule : rules)
|
||||
for (const rule_ptr &separator_rule : separator_rules)
|
||||
result.entries.insert(LexItem(
|
||||
symbol, rules::Metadata::build(
|
||||
symbol,
|
||||
rules::Metadata::build(
|
||||
rules::Seq::build({
|
||||
rules::Metadata::build(separator_rule, {{rules::START_TOKEN, 1}}),
|
||||
rules::Metadata::build(rule, {{rules::PRECEDENCE, 0}}),
|
||||
}), {
|
||||
{rules::PRECEDENCE, INT_MIN},
|
||||
{rules::IS_ACTIVE, true},
|
||||
rules::Metadata::build(separator_rule,
|
||||
{ { rules::START_TOKEN, 1 } }),
|
||||
rules::Metadata::build(rule, { { rules::PRECEDENCE, 0 } }),
|
||||
}),
|
||||
{
|
||||
{ rules::PRECEDENCE, INT_MIN }, { rules::IS_ACTIVE, true },
|
||||
})));
|
||||
}
|
||||
|
||||
|
|
@ -136,8 +139,9 @@ class LexTableBuilder {
|
|||
LexItem::CompletionStatus completion_status = item.completion_status();
|
||||
if (completion_status.is_done) {
|
||||
auto current_action = lex_table.state(state_id).default_action;
|
||||
auto action = LexAction::Accept(item.lhs, completion_status.precedence.max,
|
||||
completion_status.is_string);
|
||||
auto action =
|
||||
LexAction::Accept(item.lhs, completion_status.precedence.max,
|
||||
completion_status.is_string);
|
||||
if (conflict_manager.resolve(action, current_action))
|
||||
lex_table.state(state_id).default_action = action;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -73,7 +73,8 @@ class LexItemTransitions : public rules::RuleFn<void> {
|
|||
{ new_char_set, { new_item_set, new_precedence_range } });
|
||||
}
|
||||
|
||||
map<rules::MetadataKey, int> activate_precedence(map<rules::MetadataKey, int> metadata) {
|
||||
map<rules::MetadataKey, int> activate_precedence(
|
||||
map<rules::MetadataKey, int> metadata) {
|
||||
if (metadata.find(rules::PRECEDENCE) != metadata.end())
|
||||
metadata.insert({ rules::IS_ACTIVE, 1 });
|
||||
return metadata;
|
||||
|
|
@ -126,7 +127,8 @@ class LexItemTransitions : public rules::RuleFn<void> {
|
|||
void apply_to(const rules::Metadata *rule) {
|
||||
LexItemSet::TransitionMap content_transitions;
|
||||
auto precedence = rule->value_for(rules::PRECEDENCE);
|
||||
bool has_active_precedence = precedence.second && rule->value_for(rules::IS_ACTIVE).second;
|
||||
bool has_active_precedence =
|
||||
precedence.second && rule->value_for(rules::IS_ACTIVE).second;
|
||||
if (has_active_precedence)
|
||||
precedence_stack->push_back(precedence.first);
|
||||
|
||||
|
|
@ -135,7 +137,8 @@ class LexItemTransitions : public rules::RuleFn<void> {
|
|||
merge_transition(
|
||||
transitions, pair.first,
|
||||
transform_item_set(pair.second.first, [this, &rule](rule_ptr item_rule) {
|
||||
return rules::Metadata::build(item_rule, activate_precedence(rule->value));
|
||||
return rules::Metadata::build(item_rule,
|
||||
activate_precedence(rule->value));
|
||||
}), pair.second.second);
|
||||
|
||||
if (has_active_precedence)
|
||||
|
|
|
|||
|
|
@ -40,15 +40,16 @@ rule_ptr Metadata::copy() const {
|
|||
pair<int, bool> Metadata::value_for(MetadataKey key) const {
|
||||
auto entry = value.find(key);
|
||||
if (entry == value.end())
|
||||
return {0, false};
|
||||
return { 0, false };
|
||||
else
|
||||
return {entry->second, true};
|
||||
return { entry->second, true };
|
||||
}
|
||||
|
||||
std::string Metadata::to_string() const {
|
||||
auto precedence = value_for(rules::PRECEDENCE);
|
||||
if (precedence.second && value_for(rules::IS_ACTIVE).second)
|
||||
return "(metadata prec:" + std::to_string(precedence.first) + " " + rule->to_string() + ")";
|
||||
return "(metadata prec:" + std::to_string(precedence.first) + " " +
|
||||
rule->to_string() + ")";
|
||||
else
|
||||
return "(metadata " + rule->to_string() + ")";
|
||||
}
|
||||
|
|
|
|||
|
|
@ -59,14 +59,12 @@ static inline TSLength ts_length_sub(TSLength len1, TSLength len2) {
|
|||
}
|
||||
|
||||
static inline TSLength ts_length_zero() {
|
||||
return (TSLength){0, 0, 0, 0};
|
||||
return (TSLength){ 0, 0, 0, 0 };
|
||||
}
|
||||
|
||||
static inline bool ts_length_eq(TSLength self, TSLength other) {
|
||||
return self.bytes == other.bytes &&
|
||||
self.chars == other.chars &&
|
||||
self.rows == other.rows &&
|
||||
self.columns == other.columns;
|
||||
return self.bytes == other.bytes && self.chars == other.chars &&
|
||||
self.rows == other.rows && self.columns == other.columns;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -24,7 +24,8 @@ static void ts_lexer__get_chunk(TSLexer *self) {
|
|||
TSInput input = self->input;
|
||||
if (!self->chunk ||
|
||||
self->current_position.bytes != self->chunk_start + self->chunk_size)
|
||||
input.seek_fn(input.payload, self->current_position.chars, self->current_position.bytes);
|
||||
input.seek_fn(input.payload, self->current_position.chars,
|
||||
self->current_position.bytes);
|
||||
|
||||
self->chunk_start = self->current_position.bytes;
|
||||
self->chunk = input.read_fn(input.payload, &self->chunk_size);
|
||||
|
|
@ -51,7 +52,9 @@ static void ts_lexer__start(TSLexer *self, TSStateId lex_state) {
|
|||
}
|
||||
|
||||
static void ts_lexer__start_token(TSLexer *self) {
|
||||
LOG("start_token chars:%lu, rows:%lu, columns:%lu", self->current_position.chars, self->current_position.rows, self->current_position.columns);
|
||||
LOG("start_token chars:%lu, rows:%lu, columns:%lu",
|
||||
self->current_position.chars, self->current_position.rows,
|
||||
self->current_position.columns);
|
||||
self->token_start_position = self->current_position;
|
||||
}
|
||||
|
||||
|
|
@ -83,8 +86,10 @@ static bool ts_lexer__advance(TSLexer *self, TSStateId state) {
|
|||
static TSTree *ts_lexer__accept(TSLexer *self, TSSymbol symbol,
|
||||
TSSymbolMetadata metadata,
|
||||
const char *symbol_name) {
|
||||
TSLength size = ts_length_sub(self->current_position, self->token_start_position);
|
||||
TSLength padding = ts_length_sub(self->token_start_position, self->token_end_position);
|
||||
TSLength size =
|
||||
ts_length_sub(self->current_position, self->token_start_position);
|
||||
TSLength padding =
|
||||
ts_length_sub(self->token_start_position, self->token_end_position);
|
||||
self->token_end_position = self->current_position;
|
||||
|
||||
if (symbol == ts_builtin_sym_error) {
|
||||
|
|
@ -135,5 +140,5 @@ void ts_lexer_set_input(TSLexer *self, TSInput input) {
|
|||
void ts_lexer_reset(TSLexer *self, TSLength position) {
|
||||
if (!ts_length_eq(position, self->current_position))
|
||||
ts_lexer__reset(self, position);
|
||||
return;
|
||||
return;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
#include "runtime/document.h"
|
||||
|
||||
TSNode ts_node_make(const TSTree *tree, size_t chars, size_t byte, size_t row) {
|
||||
return (TSNode){.data = tree, .offset = {chars, byte, row} };
|
||||
return (TSNode){.data = tree, .offset = { chars, byte, row } };
|
||||
}
|
||||
|
||||
/*
|
||||
|
|
@ -36,7 +36,8 @@ static inline bool ts_node__is_relevant(TSNode self, bool include_anonymous) {
|
|||
return include_anonymous ? tree->options.visible : tree->options.named;
|
||||
}
|
||||
|
||||
static inline size_t ts_node__relevant_child_count(TSNode self, bool include_anonymous) {
|
||||
static inline size_t ts_node__relevant_child_count(TSNode self,
|
||||
bool include_anonymous) {
|
||||
const TSTree *tree = ts_node__tree(self);
|
||||
return include_anonymous ? tree->visible_child_count : tree->named_child_count;
|
||||
}
|
||||
|
|
@ -44,22 +45,18 @@ static inline size_t ts_node__relevant_child_count(TSNode self, bool include_ano
|
|||
static inline TSNode ts_node__direct_parent(TSNode self, size_t *index) {
|
||||
const TSTree *tree = ts_node__tree(self);
|
||||
*index = tree->context.index;
|
||||
return ts_node_make(
|
||||
tree->context.parent,
|
||||
ts_node__offset_char(self) - tree->context.offset.chars,
|
||||
ts_node__offset_byte(self) - tree->context.offset.bytes,
|
||||
ts_node__offset_row(self) - tree->context.offset.rows
|
||||
);
|
||||
return ts_node_make(tree->context.parent,
|
||||
ts_node__offset_char(self) - tree->context.offset.chars,
|
||||
ts_node__offset_byte(self) - tree->context.offset.bytes,
|
||||
ts_node__offset_row(self) - tree->context.offset.rows);
|
||||
}
|
||||
|
||||
static inline TSNode ts_node__direct_child(TSNode self, size_t i) {
|
||||
const TSTree *child_tree = ts_node__tree(self)->children[i];
|
||||
return ts_node_make(
|
||||
child_tree,
|
||||
ts_node__offset_char(self) + child_tree->context.offset.chars,
|
||||
child_tree, ts_node__offset_char(self) + child_tree->context.offset.chars,
|
||||
ts_node__offset_byte(self) + child_tree->context.offset.bytes,
|
||||
ts_node__offset_row(self) + child_tree->context.offset.rows
|
||||
);
|
||||
ts_node__offset_row(self) + child_tree->context.offset.rows);
|
||||
}
|
||||
|
||||
static inline TSNode ts_node__child(TSNode self, size_t child_index,
|
||||
|
|
@ -108,7 +105,8 @@ static inline TSNode ts_node__prev_sibling(TSNode self, bool include_anonymous)
|
|||
TSNode child = ts_node__direct_child(result, i);
|
||||
if (ts_node__is_relevant(child, include_anonymous))
|
||||
return child;
|
||||
size_t grandchild_count = ts_node__relevant_child_count(child, include_anonymous);
|
||||
size_t grandchild_count =
|
||||
ts_node__relevant_child_count(child, include_anonymous);
|
||||
if (grandchild_count > 0)
|
||||
return ts_node__child(child, grandchild_count - 1, include_anonymous);
|
||||
}
|
||||
|
|
@ -130,7 +128,8 @@ static inline TSNode ts_node__next_sibling(TSNode self, bool include_anonymous)
|
|||
TSNode child = ts_node__direct_child(result, i);
|
||||
if (ts_node__is_relevant(child, include_anonymous))
|
||||
return child;
|
||||
size_t grandchild_count = ts_node__relevant_child_count(child, include_anonymous);
|
||||
size_t grandchild_count =
|
||||
ts_node__relevant_child_count(child, include_anonymous);
|
||||
if (grandchild_count > 0)
|
||||
return ts_node__child(child, 0, include_anonymous);
|
||||
}
|
||||
|
|
@ -188,18 +187,15 @@ size_t ts_node_end_byte(TSNode self) {
|
|||
|
||||
TSPoint ts_node_start_point(TSNode self) {
|
||||
const TSTree *tree = ts_node__tree(self);
|
||||
return (TSPoint){
|
||||
ts_node__offset_row(self) + tree->padding.rows,
|
||||
ts_tree_start_column(tree)
|
||||
};
|
||||
return (TSPoint){ ts_node__offset_row(self) + tree->padding.rows,
|
||||
ts_tree_start_column(tree) };
|
||||
}
|
||||
|
||||
TSPoint ts_node_end_point(TSNode self) {
|
||||
const TSTree *tree = ts_node__tree(self);
|
||||
return (TSPoint){
|
||||
ts_node__offset_row(self) + tree->padding.rows + tree->size.rows,
|
||||
ts_tree_end_column(tree)
|
||||
};
|
||||
return (TSPoint){ ts_node__offset_row(self) + tree->padding.rows +
|
||||
tree->size.rows,
|
||||
ts_tree_end_column(tree) };
|
||||
}
|
||||
|
||||
TSSymbol ts_node_symbol(TSNode self) {
|
||||
|
|
@ -218,8 +214,7 @@ const char *ts_node_string(TSNode self, const TSDocument *document) {
|
|||
bool ts_node_eq(TSNode self, TSNode other) {
|
||||
return ts_tree_eq(ts_node__tree(self), ts_node__tree(other)) &&
|
||||
self.offset[0] == other.offset[0] &&
|
||||
self.offset[1] == other.offset[1] &&
|
||||
self.offset[2] == other.offset[2];
|
||||
self.offset[1] == other.offset[1] && self.offset[2] == other.offset[2];
|
||||
}
|
||||
|
||||
bool ts_node_is_named(TSNode self) {
|
||||
|
|
|
|||
|
|
@ -162,7 +162,7 @@ static bool ts_parser__shift_extra(TSParser *self, int head, TSStateId state,
|
|||
}
|
||||
|
||||
static bool ts_parser__reduce(TSParser *self, int head, TSSymbol symbol,
|
||||
int child_count, bool extra, bool count_extra) {
|
||||
int child_count, bool extra, bool count_extra) {
|
||||
vector_clear(&self->reduce_parents);
|
||||
const TSSymbolMetadata *all_metadata = self->language->symbol_metadata;
|
||||
TSSymbolMetadata metadata = all_metadata[symbol];
|
||||
|
|
@ -202,8 +202,9 @@ static bool ts_parser__reduce(TSParser *self, int head, TSSymbol symbol,
|
|||
break;
|
||||
}
|
||||
|
||||
parent = ts_tree_make_node(symbol, pop_result->tree_count - trailing_extra_count,
|
||||
pop_result->trees, metadata);
|
||||
parent =
|
||||
ts_tree_make_node(symbol, pop_result->tree_count - trailing_extra_count,
|
||||
pop_result->trees, metadata);
|
||||
}
|
||||
vector_push(&self->reduce_parents, &parent);
|
||||
|
||||
|
|
@ -232,7 +233,8 @@ static bool ts_parser__reduce(TSParser *self, int head, TSSymbol symbol,
|
|||
}
|
||||
|
||||
LOG("split_during_reduce new_head:%d", new_head);
|
||||
LookaheadState *lookahead_state = vector_get(&self->lookahead_states, head);
|
||||
LookaheadState *lookahead_state =
|
||||
vector_get(&self->lookahead_states, head);
|
||||
vector_push(&self->lookahead_states, lookahead_state);
|
||||
}
|
||||
|
||||
|
|
@ -271,7 +273,8 @@ static bool ts_parser__reduce(TSParser *self, int head, TSSymbol symbol,
|
|||
if (trailing_extra_count > 0) {
|
||||
for (size_t j = 0; j < trailing_extra_count; j++) {
|
||||
size_t index = pop_result->tree_count - trailing_extra_count + j;
|
||||
if (ts_stack_push(self->stack, new_head, state, pop_result->trees[index])) {
|
||||
if (ts_stack_push(self->stack, new_head, state,
|
||||
pop_result->trees[index])) {
|
||||
vector_erase(&self->lookahead_states, new_head);
|
||||
removed_heads++;
|
||||
continue;
|
||||
|
|
@ -297,12 +300,13 @@ static bool ts_parser__reduce_fragile(TSParser *self, int head, TSSymbol symbol,
|
|||
|
||||
static void ts_parser__reduce_error(TSParser *self, int head,
|
||||
size_t child_count, TSTree *lookahead) {
|
||||
bool result = ts_parser__reduce(self, head, ts_builtin_sym_error,
|
||||
child_count, false, true);
|
||||
bool result = ts_parser__reduce(self, head, ts_builtin_sym_error, child_count,
|
||||
false, true);
|
||||
if (result) {
|
||||
TSTree **parent = vector_back(&self->reduce_parents);
|
||||
StackEntry *stack_entry = ts_stack_head(self->stack, head);
|
||||
stack_entry->position = ts_length_add(stack_entry->position, lookahead->padding);
|
||||
stack_entry->position =
|
||||
ts_length_add(stack_entry->position, lookahead->padding);
|
||||
(*parent)->size = ts_length_add((*parent)->size, lookahead->padding);
|
||||
lookahead->padding = ts_length_zero();
|
||||
ts_tree_set_fragile_left(*parent);
|
||||
|
|
@ -377,8 +381,7 @@ static void ts_parser__start(TSParser *self, TSInput input,
|
|||
ts_stack_clear(self->stack);
|
||||
|
||||
LookaheadState lookahead_state = {
|
||||
.reusable_subtree = previous_tree,
|
||||
.reusable_subtree_pos = 0,
|
||||
.reusable_subtree = previous_tree, .reusable_subtree_pos = 0,
|
||||
};
|
||||
vector_clear(&self->lookahead_states);
|
||||
vector_push(&self->lookahead_states, &lookahead_state);
|
||||
|
|
@ -393,20 +396,18 @@ static TSTree *ts_parser__finish(TSParser *self) {
|
|||
TSTree *root = pop_result->trees[i];
|
||||
size_t leading_extra_count = i;
|
||||
size_t trailing_extra_count = pop_result->tree_count - 1 - i;
|
||||
TSTree **new_children = malloc((root->child_count + leading_extra_count + trailing_extra_count) * sizeof(TSTree *));
|
||||
memcpy(
|
||||
new_children,
|
||||
pop_result->trees,
|
||||
leading_extra_count * sizeof(TSTree *));
|
||||
memcpy(
|
||||
new_children + leading_extra_count,
|
||||
root->children,
|
||||
root->child_count * sizeof(TSTree *));
|
||||
memcpy(
|
||||
new_children + leading_extra_count + root->child_count,
|
||||
pop_result->trees + leading_extra_count + 1,
|
||||
trailing_extra_count * sizeof(TSTree *));
|
||||
size_t new_count = root->child_count + leading_extra_count + trailing_extra_count;
|
||||
TSTree **new_children =
|
||||
malloc((root->child_count + leading_extra_count + trailing_extra_count) *
|
||||
sizeof(TSTree *));
|
||||
memcpy(new_children, pop_result->trees,
|
||||
leading_extra_count * sizeof(TSTree *));
|
||||
memcpy(new_children + leading_extra_count, root->children,
|
||||
root->child_count * sizeof(TSTree *));
|
||||
memcpy(new_children + leading_extra_count + root->child_count,
|
||||
pop_result->trees + leading_extra_count + 1,
|
||||
trailing_extra_count * sizeof(TSTree *));
|
||||
size_t new_count =
|
||||
root->child_count + leading_extra_count + trailing_extra_count;
|
||||
ts_tree_set_children(root, new_count, new_children);
|
||||
ts_tree_assign_parents(root);
|
||||
return root;
|
||||
|
|
@ -485,15 +486,15 @@ static ConsumeResult ts_parser__consume_lookahead(TSParser *self, int head,
|
|||
|
||||
case TSParseActionTypeReduceExtra:
|
||||
LOG("reduce_extra sym:%s", SYM_NAME(action.data.symbol));
|
||||
ts_parser__reduce(self, current_head, action.data.symbol, 1,
|
||||
true, false);
|
||||
ts_parser__reduce(self, current_head, action.data.symbol, 1, true,
|
||||
false);
|
||||
break;
|
||||
|
||||
case TSParseActionTypeReduceFragile:
|
||||
LOG("reduce_fragile sym:%s, count:%u", SYM_NAME(action.data.symbol),
|
||||
action.data.child_count);
|
||||
if (!ts_parser__reduce_fragile(self, current_head, action.data.symbol,
|
||||
action.data.child_count))
|
||||
action.data.child_count))
|
||||
if (!next_action)
|
||||
return ConsumeResultRemoved;
|
||||
break;
|
||||
|
|
|
|||
|
|
@ -132,10 +132,7 @@ static StackNode *stack_node_new(StackNode *next, TSStateId state, TSTree *tree)
|
|||
.ref_count = 1,
|
||||
.successor_count = 1,
|
||||
.successors = { next, NULL, NULL },
|
||||
.entry =
|
||||
{
|
||||
.state = state, .tree = tree, .position = position
|
||||
},
|
||||
.entry = {.state = state, .tree = tree, .position = position },
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
|
@ -203,7 +200,8 @@ static bool ts_stack__merge_head(Stack *self, int head_index, TSStateId state,
|
|||
TSTree *tree, TSLength position) {
|
||||
for (int i = 0; i < head_index; i++) {
|
||||
StackNode *head = self->heads[i];
|
||||
if (head->entry.state == state && ts_length_eq(head->entry.position, position)) {
|
||||
if (head->entry.state == state &&
|
||||
ts_length_eq(head->entry.position, position)) {
|
||||
if (head->entry.tree != tree) {
|
||||
head->entry.tree = self->tree_selection_callback.callback(
|
||||
self->tree_selection_callback.data, head->entry.tree, tree);
|
||||
|
|
|
|||
|
|
@ -33,10 +33,10 @@ TSTree *ts_tree_make_leaf(TSSymbol sym, TSLength padding, TSLength size,
|
|||
}
|
||||
|
||||
TSTree *ts_tree_make_error(TSLength size, TSLength padding, char lookahead_char) {
|
||||
TSTree *result =
|
||||
ts_tree_make_leaf(ts_builtin_sym_error, padding, size, (TSSymbolMetadata){
|
||||
.visible = true, .named = true,
|
||||
});
|
||||
TSTree *result = ts_tree_make_leaf(ts_builtin_sym_error, padding, size,
|
||||
(TSSymbolMetadata){
|
||||
.visible = true, .named = true,
|
||||
});
|
||||
result->lookahead_char = lookahead_char;
|
||||
return result;
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue