lib: refactor polymorphic Array(T) type

This adds a monomorphic type, ArrayMeta, to hold size and capacity information
about Array(T) objects.  This lets us take and pass pointers to ArrayMeta
objects regardless of the type parameter T of the Array(T) of which they are a
part.

Step 1/N for removing strict-aliasing violations from lib/src/array.h
This commit is contained in:
Nathaniel Wesley Filardo 2026-01-04 01:44:28 +00:00
parent dd60d5cff0
commit 17b06db99c
10 changed files with 295 additions and 292 deletions

View file

@ -21,34 +21,38 @@ extern "C" {
#pragma GCC diagnostic ignored "-Wunused-variable"
#endif
#define Array(T) \
struct { \
T *contents; \
uint32_t size; \
uint32_t capacity; \
typedef struct {
uint32_t size;
uint32_t capacity;
} _ArrayMeta;
#define Array(T) \
struct { \
T *contents; \
_ArrayMeta meta; \
}
/// Initialize an array.
#define array_init(self) \
((self)->size = 0, (self)->capacity = 0, (self)->contents = NULL)
((self)->meta.size = 0, (self)->meta.capacity = 0, (self)->contents = NULL)
/// Create an empty array.
#define array_new() \
{ NULL, 0, 0 }
{ NULL, { 0, 0 } }
/// Get a pointer to the element at a given `index` in the array.
#define array_get(self, _index) \
(ts_assert((uint32_t)(_index) < (self)->size), &(self)->contents[_index])
(ts_assert((uint32_t)(_index) < (self)->meta.size), &(self)->contents[_index])
/// Get a pointer to the first element in the array.
#define array_front(self) array_get(self, 0)
/// Get a pointer to the last element in the array.
#define array_back(self) array_get(self, (self)->size - 1)
#define array_back(self) array_get(self, (self)->meta.size - 1)
/// Clear the array, setting its size to zero. Note that this does not free any
/// memory allocated for the array's contents.
#define array_clear(self) ((self)->size = 0)
#define array_clear(self) ((self)->meta.size = 0)
/// Reserve `new_capacity` elements of space in the array. If `new_capacity` is
/// less than the array's current capacity, this function has no effect.
@ -62,7 +66,7 @@ extern "C" {
/// Push a new `element` onto the end of the array.
#define array_push(self, element) \
(_array__grow((Array *)(self), 1, array_elem_size(self)), \
(self)->contents[(self)->size++] = (element))
(self)->contents[(self)->meta.size++] = (element))
/// Increase the array's size by `count` elements.
/// New elements are zero-initialized.
@ -70,19 +74,19 @@ extern "C" {
do { \
if ((count) == 0) break; \
_array__grow((Array *)(self), count, array_elem_size(self)); \
memset((self)->contents + (self)->size, 0, (count) * array_elem_size(self)); \
(self)->size += (count); \
memset((self)->contents + (self)->meta.size, 0, (count) * array_elem_size(self)); \
(self)->meta.size += (count); \
} while (0)
/// Append all elements from one array to the end of another.
#define array_push_all(self, other) \
array_extend((self), (other)->size, (other)->contents)
array_extend((self), (other)->meta.size, (other)->contents)
/// Append `count` elements to the end of the array, reading their values from the
/// `contents` pointer.
#define array_extend(self, count, contents) \
_array__splice( \
(Array *)(self), array_elem_size(self), (self)->size, \
(Array *)(self), array_elem_size(self), (self)->meta.size, \
0, count, contents \
)
@ -104,7 +108,7 @@ extern "C" {
_array__erase((Array *)(self), array_elem_size(self), _index)
/// Pop the last element off the array, returning the element by value.
#define array_pop(self) ((self)->contents[--(self)->size])
#define array_pop(self) ((self)->contents[--(self)->meta.size])
/// Assign the contents of one array to another, reallocating if necessary.
#define array_assign(self, other) \
@ -164,38 +168,38 @@ static inline void _array__delete(Array *self) {
if (self->contents) {
ts_free(self->contents);
self->contents = NULL;
self->size = 0;
self->capacity = 0;
self->meta.size = 0;
self->meta.capacity = 0;
}
}
/// This is not what you're looking for, see `array_erase`.
static inline void _array__erase(Array *self, size_t element_size,
uint32_t index) {
ts_assert(index < self->size);
ts_assert(index < self->meta.size);
char *contents = (char *)self->contents;
memmove(contents + index * element_size, contents + (index + 1) * element_size,
(self->size - index - 1) * element_size);
self->size--;
(self->meta.size - index - 1) * element_size);
self->meta.size--;
}
/// This is not what you're looking for, see `array_reserve`.
static inline void _array__reserve(Array *self, size_t element_size, uint32_t new_capacity) {
if (new_capacity > self->capacity) {
if (new_capacity > self->meta.capacity) {
if (self->contents) {
self->contents = ts_realloc(self->contents, new_capacity * element_size);
} else {
self->contents = ts_malloc(new_capacity * element_size);
}
self->capacity = new_capacity;
self->meta.capacity = new_capacity;
}
}
/// This is not what you're looking for, see `array_assign`.
static inline void _array__assign(Array *self, const Array *other, size_t element_size) {
_array__reserve(self, element_size, other->size);
self->size = other->size;
memcpy(self->contents, other->contents, self->size * element_size);
_array__reserve(self, element_size, other->meta.size);
self->meta.size = other->meta.size;
memcpy(self->contents, other->contents, self->meta.size * element_size);
}
/// This is not what you're looking for, see `array_swap`.
@ -207,9 +211,9 @@ static inline void _array__swap(Array *self, Array *other) {
/// This is not what you're looking for, see `array_push` or `array_grow_by`.
static inline void _array__grow(Array *self, uint32_t count, size_t element_size) {
uint32_t new_size = self->size + count;
if (new_size > self->capacity) {
uint32_t new_capacity = self->capacity * 2;
uint32_t new_size = self->meta.size + count;
if (new_size > self->meta.capacity) {
uint32_t new_capacity = self->meta.capacity * 2;
if (new_capacity < 8) new_capacity = 8;
if (new_capacity < new_size) new_capacity = new_size;
_array__reserve(self, element_size, new_capacity);
@ -220,19 +224,19 @@ static inline void _array__grow(Array *self, uint32_t count, size_t element_size
static inline void _array__splice(Array *self, size_t element_size,
uint32_t index, uint32_t old_count,
uint32_t new_count, const void *elements) {
uint32_t new_size = self->size + new_count - old_count;
uint32_t new_size = self->meta.size + new_count - old_count;
uint32_t old_end = index + old_count;
uint32_t new_end = index + new_count;
ts_assert(old_end <= self->size);
ts_assert(old_end <= self->meta.size);
_array__reserve(self, element_size, new_size);
char *contents = (char *)self->contents;
if (self->size > old_end) {
if (self->meta.size > old_end) {
memmove(
contents + new_end * element_size,
contents + old_end * element_size,
(self->size - old_end) * element_size
(self->meta.size - old_end) * element_size
);
}
if (new_count > 0) {
@ -250,7 +254,7 @@ static inline void _array__splice(Array *self, size_t element_size,
);
}
}
self->size += new_count - old_count;
self->meta.size += new_count - old_count;
}
/// A binary search routine, based on Rust's `std::slice::binary_search_by`.
@ -259,7 +263,7 @@ static inline void _array__splice(Array *self, size_t element_size,
do { \
*(_index) = start; \
*(_exists) = false; \
uint32_t size = (self)->size - *(_index); \
uint32_t size = (self)->meta.size - *(_index); \
if (size == 0) break; \
int comparison; \
while (size > 1) { \

View file

@ -12,7 +12,7 @@ static void ts_range_array_add(
Length start,
Length end
) {
if (self->size > 0) {
if (self->meta.size > 0) {
TSRange *last_range = array_back(self);
if (start.bytes <= last_range->end_byte) {
last_range->end_byte = end.bytes;
@ -33,7 +33,7 @@ bool ts_range_array_intersects(
uint32_t start_byte,
uint32_t end_byte
) {
for (unsigned i = start_index; i < self->size; i++) {
for (unsigned i = start_index; i < self->meta.size; i++) {
TSRange *range = array_get(self, i);
if (range->end_byte > start_byte) {
if (range->start_byte >= end_byte) break;
@ -167,7 +167,7 @@ static Iterator iterator_new(
}
static bool iterator_done(Iterator *self) {
return self->cursor.stack.size == 0;
return self->cursor.stack.meta.size == 0;
}
static Length iterator_start_position(Iterator *self) {
@ -192,8 +192,8 @@ static Length iterator_end_position(Iterator *self) {
static bool iterator_tree_is_visible(const Iterator *self) {
TreeCursorEntry entry = *array_back(&self->cursor.stack);
if (ts_subtree_visible(*entry.subtree)) return true;
if (self->cursor.stack.size > 1) {
Subtree parent = *array_get(&self->cursor.stack, self->cursor.stack.size - 2)->subtree;
if (self->cursor.stack.meta.size > 1) {
Subtree parent = *array_get(&self->cursor.stack, self->cursor.stack.meta.size - 2)->subtree;
return ts_language_alias_at(
self->language,
parent.ptr->production_id,
@ -209,7 +209,7 @@ static void iterator_get_visible_state(
TSSymbol *alias_symbol,
uint32_t *start_byte
) {
uint32_t i = self->cursor.stack.size - 1;
uint32_t i = self->cursor.stack.meta.size - 1;
if (self->in_padding) {
if (i == 0) return;
@ -240,7 +240,7 @@ static void iterator_ascend(Iterator *self) {
if (iterator_done(self)) return;
if (iterator_tree_is_visible(self) && !self->in_padding) self->visible_depth--;
if (array_back(&self->cursor.stack)->child_index > 0) self->in_padding = false;
self->cursor.stack.size--;
self->cursor.stack.meta.size--;
}
static bool iterator_descend(Iterator *self, uint32_t goal_position) {
@ -530,7 +530,7 @@ unsigned ts_subtree_get_changed_ranges(
// Keep track of the current position in the included range differences
// array in order to avoid scanning the entire array on each iteration.
while (included_range_difference_index < included_range_differences->size) {
while (included_range_difference_index < included_range_differences->meta.size) {
const TSRange *range = array_get(included_range_differences,
included_range_difference_index
);
@ -553,5 +553,5 @@ unsigned ts_subtree_get_changed_ranges(
*cursor1 = old_iter.cursor;
*cursor2 = new_iter.cursor;
*ranges = results.contents;
return results.size;
return results.meta.size;
}

View file

@ -182,11 +182,11 @@ static bool ts_parser__breakdown_top_of_stack(
do {
StackSliceArray pop = ts_stack_pop_pending(self->stack, version);
if (!pop.size) break;
if (!pop.meta.size) break;
did_break_down = true;
pending = false;
for (uint32_t i = 0; i < pop.size; i++) {
for (uint32_t i = 0; i < pop.meta.size; i++) {
StackSlice slice = *array_get(&pop, i);
TSStateId state = ts_stack_state(self->stack, slice.version);
Subtree parent = *array_front(&slice.subtrees);
@ -205,7 +205,7 @@ static bool ts_parser__breakdown_top_of_stack(
ts_stack_push(self->stack, slice.version, child, pending, state);
}
for (uint32_t j = 1; j < slice.subtrees.size; j++) {
for (uint32_t j = 1; j < slice.subtrees.meta.size; j++) {
Subtree tree = *array_get(&slice.subtrees, j);
ts_stack_push(self->stack, slice.version, tree, false, state);
}
@ -948,7 +948,7 @@ static StackVersion ts_parser__reduce(
StackSliceArray pop = ts_stack_pop_count(self->stack, version, count);
uint32_t removed_version_count = 0;
uint32_t halted_version_count = ts_stack_halted_version_count(self->stack);
for (uint32_t i = 0; i < pop.size; i++) {
for (uint32_t i = 0; i < pop.meta.size; i++) {
StackSlice slice = *array_get(&pop, i);
StackVersion slice_version = slice.version - removed_version_count;
@ -960,7 +960,7 @@ static StackVersion ts_parser__reduce(
ts_stack_remove_version(self->stack, slice_version);
ts_subtree_array_delete(&self->tree_pool, &slice.subtrees);
removed_version_count++;
while (i + 1 < pop.size) {
while (i + 1 < pop.meta.size) {
LOG("aborting reduce with too many versions")
StackSlice next_slice = *array_get(&pop, i + 1);
if (next_slice.version != slice.version) break;
@ -984,7 +984,7 @@ static StackVersion ts_parser__reduce(
// into one, because they all diverged from a common state. In that case,
// choose one of the arrays of trees to be the parent node's children, and
// delete the rest of the tree arrays.
while (i + 1 < pop.size) {
while (i + 1 < pop.meta.size) {
StackSlice next_slice = *array_get(&pop, i + 1);
if (next_slice.version != slice.version) break;
i++;
@ -1014,7 +1014,7 @@ static StackVersion ts_parser__reduce(
if (end_of_non_terminal_extra && next_state == state) {
parent.ptr->extra = true;
}
if (is_fragile || pop.size > 1 || initial_version_count > 1) {
if (is_fragile || pop.meta.size > 1 || initial_version_count > 1) {
parent.ptr->fragile_left = true;
parent.ptr->fragile_right = true;
parent.ptr->parse_state = TS_TREE_STATE_NONE;
@ -1026,7 +1026,7 @@ static StackVersion ts_parser__reduce(
// Push the parent node onto the stack, along with any extra tokens that
// were previously on top of the stack.
ts_stack_push(self->stack, slice_version, ts_subtree_from_mut(parent), false, next_state);
for (uint32_t j = 0; j < self->trailing_extras.size; j++) {
for (uint32_t j = 0; j < self->trailing_extras.meta.size; j++) {
ts_stack_push(self->stack, slice_version, *array_get(&self->trailing_extras, j), false, next_state);
}
@ -1054,11 +1054,11 @@ static void ts_parser__accept(
ts_stack_push(self->stack, version, lookahead, false, 1);
StackSliceArray pop = ts_stack_pop_all(self->stack, version);
for (uint32_t i = 0; i < pop.size; i++) {
for (uint32_t i = 0; i < pop.meta.size; i++) {
SubtreeArray trees = array_get(&pop, i)->subtrees;
Subtree root = NULL_SUBTREE;
for (uint32_t j = trees.size - 1; j + 1 > 0; j--) {
for (uint32_t j = trees.meta.size - 1; j + 1 > 0; j--) {
Subtree tree = *array_get(&trees, j);
if (!ts_subtree_extra(tree)) {
ts_assert(!tree.data.is_inline);
@ -1159,7 +1159,7 @@ static bool ts_parser__do_all_potential_reductions(
}
StackVersion reduction_version = STACK_VERSION_NONE;
for (uint32_t j = 0; j < self->reduce_actions.size; j++) {
for (uint32_t j = 0; j < self->reduce_actions.meta.size; j++) {
ReduceAction action = *array_get(&self->reduce_actions, j);
reduction_version = ts_parser__reduce(
@ -1197,7 +1197,7 @@ static bool ts_parser__recover_to_state(
StackSliceArray pop = ts_stack_pop_count(self->stack, version, depth);
StackVersion previous_version = STACK_VERSION_NONE;
for (unsigned i = 0; i < pop.size; i++) {
for (unsigned i = 0; i < pop.meta.size; i++) {
StackSlice slice = *array_get(&pop, i);
if (slice.version == previous_version) {
@ -1214,8 +1214,8 @@ static bool ts_parser__recover_to_state(
}
SubtreeArray error_trees = ts_stack_pop_error(self->stack, slice.version);
if (error_trees.size > 0) {
ts_assert(error_trees.size == 1);
if (error_trees.meta.size > 0) {
ts_assert(error_trees.meta.size == 1);
Subtree error_tree = *array_get(&error_trees, 0);
uint32_t error_child_count = ts_subtree_child_count(error_tree);
if (error_child_count > 0) {
@ -1229,14 +1229,14 @@ static bool ts_parser__recover_to_state(
ts_subtree_array_remove_trailing_extras(&slice.subtrees, &self->trailing_extras);
if (slice.subtrees.size > 0) {
if (slice.subtrees.meta.size > 0) {
Subtree error = ts_subtree_new_error_node(&slice.subtrees, true, self->language);
ts_stack_push(self->stack, slice.version, error, false, goal_state);
} else {
array_delete(&slice.subtrees);
}
for (unsigned j = 0; j < self->trailing_extras.size; j++) {
for (unsigned j = 0; j < self->trailing_extras.meta.size; j++) {
Subtree tree = *array_get(&self->trailing_extras, j);
ts_stack_push(self->stack, slice.version, tree, false, goal_state);
}
@ -1272,7 +1272,7 @@ static void ts_parser__recover(
// of the previous parse states and their depths. Look at each state in the summary, to see
// if the current lookahead token would be valid in that state.
if (summary && !ts_subtree_is_error(lookahead)) {
for (unsigned i = 0; i < summary->size; i++) {
for (unsigned i = 0; i < summary->meta.size; i++) {
StackSummaryEntry entry = *array_get(summary, i);
if (entry.state == ERROR_STATE) continue;
@ -1398,8 +1398,8 @@ static void ts_parser__recover(
// See https://github.com/atom/atom/issues/18450#issuecomment-439579778
// If multiple stack versions have merged at this point, just pick one of the errors
// arbitrarily and discard the rest.
if (pop.size > 1) {
for (unsigned i = 1; i < pop.size; i++) {
if (pop.meta.size > 1) {
for (unsigned i = 1; i < pop.meta.size; i++) {
ts_subtree_array_delete(&self->tree_pool, &array_get(&pop, i)->subtrees);
}
while (ts_stack_version_count(self->stack) > array_get(&pop, 0)->version + 1) {
@ -1877,13 +1877,13 @@ static bool ts_parser__balance_subtree(TSParser *self) {
}
}
while (self->tree_pool.tree_stack.size > 0) {
while (self->tree_pool.tree_stack.meta.size > 0) {
if (!ts_parser__check_progress(self, NULL, NULL, 1)) {
return false;
}
MutableSubtree tree = *array_get(&self->tree_pool.tree_stack,
self->tree_pool.tree_stack.size - 1
self->tree_pool.tree_stack.meta.size - 1
);
if (tree.ptr->repeat_depth > 0) {
@ -2108,7 +2108,7 @@ TSTree *ts_parser_parse(
reusable_node_reset(&self->reusable_node, old_tree->root);
LOG("parse_after_edit");
LOG_TREE(self->old_tree);
for (unsigned i = 0; i < self->included_range_differences.size; i++) {
for (unsigned i = 0; i < self->included_range_differences.meta.size; i++) {
TSRange *range = array_get(&self->included_range_differences, i);
LOG("different_included_range %u - %u", range->start_byte, range->end_byte);
}
@ -2165,7 +2165,7 @@ TSTree *ts_parser_parse(
break;
}
while (self->included_range_difference_index < self->included_range_differences.size) {
while (self->included_range_difference_index < self->included_range_differences.meta.size) {
TSRange *range = array_get(&self->included_range_differences, self->included_range_difference_index);
if (range->end_byte <= position) {
self->included_range_difference_index++;

View file

@ -428,41 +428,41 @@ static CaptureListPool capture_list_pool_new(void) {
}
static void capture_list_pool_reset(CaptureListPool *self) {
for (uint16_t i = 0; i < (uint16_t)self->list.size; i++) {
for (uint16_t i = 0; i < (uint16_t)self->list.meta.size; i++) {
// This invalid size means that the list is not in use.
array_get(&self->list, i)->size = UINT32_MAX;
array_get(&self->list, i)->meta.size = UINT32_MAX;
}
self->free_capture_list_count = self->list.size;
self->free_capture_list_count = self->list.meta.size;
}
static void capture_list_pool_delete(CaptureListPool *self) {
for (uint16_t i = 0; i < (uint16_t)self->list.size; i++) {
for (uint16_t i = 0; i < (uint16_t)self->list.meta.size; i++) {
array_delete(array_get(&self->list, i));
}
array_delete(&self->list);
}
static const CaptureList *capture_list_pool_get(const CaptureListPool *self, uint16_t id) {
if (id >= self->list.size) return &self->empty_list;
if (id >= self->list.meta.size) return &self->empty_list;
return array_get(&self->list, id);
}
static CaptureList *capture_list_pool_get_mut(CaptureListPool *self, uint16_t id) {
ts_assert(id < self->list.size);
ts_assert(id < self->list.meta.size);
return array_get(&self->list, id);
}
static bool capture_list_pool_is_empty(const CaptureListPool *self) {
// The capture list pool is empty if all allocated lists are in use, and we
// have reached the maximum allowed number of allocated lists.
return self->free_capture_list_count == 0 && self->list.size >= self->max_capture_list_count;
return self->free_capture_list_count == 0 && self->list.meta.size >= self->max_capture_list_count;
}
static uint16_t capture_list_pool_acquire(CaptureListPool *self) {
// First see if any already allocated capture list is currently unused.
if (self->free_capture_list_count > 0) {
for (uint16_t i = 0; i < (uint16_t)self->list.size; i++) {
if (array_get(&self->list, i)->size == UINT32_MAX) {
for (uint16_t i = 0; i < (uint16_t)self->list.meta.size; i++) {
if (array_get(&self->list, i)->meta.size == UINT32_MAX) {
array_clear(array_get(&self->list, i));
self->free_capture_list_count--;
return i;
@ -472,7 +472,7 @@ static uint16_t capture_list_pool_acquire(CaptureListPool *self) {
// Otherwise allocate and initialize a new capture list, as long as that
// doesn't put us over the requested maximum.
uint32_t i = self->list.size;
uint32_t i = self->list.meta.size;
if (i >= self->max_capture_list_count) {
return NONE;
}
@ -483,8 +483,8 @@ static uint16_t capture_list_pool_acquire(CaptureListPool *self) {
}
static void capture_list_pool_release(CaptureListPool *self, uint16_t id) {
if (id >= self->list.size) return;
array_get(&self->list, id)->size = UINT32_MAX;
if (id >= self->list.meta.size) return;
array_get(&self->list, id)->meta.size = UINT32_MAX;
self->free_capture_list_count++;
}
@ -684,7 +684,7 @@ static TSQuantifier capture_quantifier_for_id(
const CaptureQuantifiers *self,
uint16_t id
) {
return (self->size <= id) ? TSQuantifierZero : (TSQuantifier) *array_get(self, id);
return (self->meta.size <= id) ? TSQuantifierZero : (TSQuantifier) *array_get(self, id);
}
// Add the given quantifier to the current value for id
@ -693,8 +693,8 @@ static void capture_quantifiers_add_for_id(
uint16_t id,
TSQuantifier quantifier
) {
if (self->size <= id) {
array_grow_by(self, id + 1 - self->size);
if (self->meta.size <= id) {
array_grow_by(self, id + 1 - self->meta.size);
}
uint8_t *own_quantifier = array_get(self, id);
*own_quantifier = (uint8_t) quantifier_add((TSQuantifier) *own_quantifier, quantifier);
@ -705,10 +705,10 @@ static void capture_quantifiers_add_all(
CaptureQuantifiers *self,
CaptureQuantifiers *quantifiers
) {
if (self->size < quantifiers->size) {
array_grow_by(self, quantifiers->size - self->size);
if (self->meta.size < quantifiers->meta.size) {
array_grow_by(self, quantifiers->meta.size - self->meta.size);
}
for (uint16_t id = 0; id < (uint16_t)quantifiers->size; id++) {
for (uint16_t id = 0; id < (uint16_t)quantifiers->meta.size; id++) {
uint8_t *quantifier = array_get(quantifiers, id);
uint8_t *own_quantifier = array_get(self, id);
*own_quantifier = (uint8_t) quantifier_add((TSQuantifier) *own_quantifier, (TSQuantifier) *quantifier);
@ -720,7 +720,7 @@ static void capture_quantifiers_mul(
CaptureQuantifiers *self,
TSQuantifier quantifier
) {
for (uint16_t id = 0; id < (uint16_t)self->size; id++) {
for (uint16_t id = 0; id < (uint16_t)self->meta.size; id++) {
uint8_t *own_quantifier = array_get(self, id);
*own_quantifier = (uint8_t) quantifier_mul((TSQuantifier) *own_quantifier, quantifier);
}
@ -731,15 +731,15 @@ static void capture_quantifiers_join_all(
CaptureQuantifiers *self,
CaptureQuantifiers *quantifiers
) {
if (self->size < quantifiers->size) {
array_grow_by(self, quantifiers->size - self->size);
if (self->meta.size < quantifiers->meta.size) {
array_grow_by(self, quantifiers->meta.size - self->meta.size);
}
for (uint32_t id = 0; id < quantifiers->size; id++) {
for (uint32_t id = 0; id < quantifiers->meta.size; id++) {
uint8_t *quantifier = array_get(quantifiers, id);
uint8_t *own_quantifier = array_get(self, id);
*own_quantifier = (uint8_t) quantifier_join((TSQuantifier) *own_quantifier, (TSQuantifier) *quantifier);
}
for (uint32_t id = quantifiers->size; id < self->size; id++) {
for (uint32_t id = quantifiers->meta.size; id < self->meta.size; id++) {
uint8_t *own_quantifier = array_get(self, id);
*own_quantifier = (uint8_t) quantifier_join((TSQuantifier) *own_quantifier, TSQuantifierZero);
}
@ -766,7 +766,7 @@ static int symbol_table_id_for_name(
const char *name,
uint32_t length
) {
for (unsigned i = 0; i < self->slices.size; i++) {
for (unsigned i = 0; i < self->slices.meta.size; i++) {
Slice slice = *array_get(&self->slices, i);
if (
slice.length == length &&
@ -794,14 +794,14 @@ static uint16_t symbol_table_insert_name(
int id = symbol_table_id_for_name(self, name, length);
if (id >= 0) return (uint16_t)id;
Slice slice = {
.offset = self->characters.size,
.offset = self->characters.meta.size,
.length = length,
};
array_grow_by(&self->characters, length + 1);
memcpy(array_get(&self->characters, slice.offset), name, length);
*array_get(&self->characters, self->characters.size - 1) = 0;
*array_get(&self->characters, self->characters.meta.size - 1) = 0;
array_push(&self->slices, slice);
return self->slices.size - 1;
return self->slices.meta.size - 1;
}
/************
@ -969,7 +969,7 @@ static inline AnalysisState *analysis_state_pool__clone_or_reuse(
AnalysisState *borrowed_item
) {
AnalysisState *new_item;
if (self->size) {
if (self->meta.size) {
new_item = array_pop(self);
} else {
new_item = ts_malloc(sizeof(AnalysisState));
@ -1023,7 +1023,7 @@ static inline void analysis_state_set__clear(AnalysisStateSet *self, AnalysisSta
// Releases all memory that is managed with this state set, including any items currently present.
// After calling this function, the set is no longer suitable for use.
static inline void analysis_state_set__delete(AnalysisStateSet *self) {
for (unsigned i = 0; i < self->size; i++) {
for (unsigned i = 0; i < self->meta.size; i++) {
ts_free(self->contents[i]);
}
array_delete(self);
@ -1094,7 +1094,7 @@ static inline bool ts_query__pattern_map_search(
uint32_t *result
) {
uint32_t base_index = self->wildcard_root_pattern_count;
uint32_t size = self->pattern_map.size - base_index;
uint32_t size = self->pattern_map.meta.size - base_index;
if (size == 0) {
*result = base_index;
return false;
@ -1115,7 +1115,7 @@ static inline bool ts_query__pattern_map_search(
if (needle > symbol) {
base_index++;
if (base_index < self->pattern_map.size) {
if (base_index < self->pattern_map.meta.size) {
symbol = array_get(&self->steps,
array_get(&self->pattern_map, base_index)->step_index
)->symbol;
@ -1140,7 +1140,7 @@ static inline void ts_query__pattern_map_insert(
// by pattern_index. This way, states for earlier patterns will be
// initiated first, which allows the ordering of the states array
// to be maintained more efficiently.
while (index < self->pattern_map.size) {
while (index < self->pattern_map.meta.size) {
PatternEntry *entry = array_get(&self->pattern_map, index);
if (
array_get(&self->steps, entry->step_index)->symbol == symbol &&
@ -1175,11 +1175,11 @@ static void ts_query__perform_analysis(
#ifdef DEBUG_ANALYZE_QUERY
printf("Iteration: %u. Final step indices:", iteration);
for (unsigned j = 0; j < analysis->final_step_indices.size; j++) {
for (unsigned j = 0; j < analysis->final_step_indices.meta.size; j++) {
printf(" %4u", *array_get(&analysis->final_step_indices, j));
}
printf("\n");
for (unsigned j = 0; j < analysis->states.size; j++) {
for (unsigned j = 0; j < analysis->states.meta.size; j++) {
AnalysisState *state = *array_get(&analysis->states, j);
printf(" %3u: step: %u, stack: [", j, state->step_index);
for (unsigned k = 0; k < state->depth; k++) {
@ -1201,16 +1201,16 @@ static void ts_query__perform_analysis(
// bump the depth limit by one, and continue to process the states the exceeded the
// limit. But only allow this if progress has been made since the last time the depth
// limit was increased.
if (analysis->states.size == 0) {
if (analysis->states.meta.size == 0) {
if (
analysis->deeper_states.size > 0 &&
analysis->final_step_indices.size > prev_final_step_count
analysis->deeper_states.meta.size > 0 &&
analysis->final_step_indices.meta.size > prev_final_step_count
) {
#ifdef DEBUG_ANALYZE_QUERY
printf("Increase recursion depth limit to %u\n", recursion_depth_limit + 1);
#endif
prev_final_step_count = analysis->final_step_indices.size;
prev_final_step_count = analysis->final_step_indices.meta.size;
recursion_depth_limit++;
AnalysisStateSet _states = analysis->states;
analysis->states = analysis->deeper_states;
@ -1222,7 +1222,7 @@ static void ts_query__perform_analysis(
}
analysis_state_set__clear(&analysis->next_states, &analysis->state_pool);
for (unsigned j = 0; j < analysis->states.size; j++) {
for (unsigned j = 0; j < analysis->states.meta.size; j++) {
AnalysisState * const state = *array_get(&analysis->states, j);
// For efficiency, it's important to avoid processing the same analysis state more
@ -1230,7 +1230,7 @@ static void ts_query__perform_analysis(
// their hypothetical syntax trees. In each iteration of this loop, start by advancing
// the states that have made the least progress. Avoid advancing states that have already
// made more progress.
if (analysis->next_states.size > 0) {
if (analysis->next_states.meta.size > 0) {
int comparison = analysis_state__compare(
&state,
array_back(&analysis->next_states)
@ -1242,7 +1242,7 @@ static void ts_query__perform_analysis(
#ifdef DEBUG_ANALYZE_QUERY
printf("Terminate iteration at state %u\n", j);
#endif
while (j < analysis->states.size) {
while (j < analysis->states.meta.size) {
analysis_state_set__push(
&analysis->next_states,
&analysis->state_pool,
@ -1298,7 +1298,7 @@ static void ts_query__perform_analysis(
analysis_subgraph_node__compare, &successor,
&node_index, &exists
);
while (node_index < subgraph->nodes.size) {
while (node_index < subgraph->nodes.meta.size) {
AnalysisSubgraphNode *node = array_get(&subgraph->nodes, node_index);
node_index++;
if (node->state != successor.state || node->child_index != successor.child_index) break;
@ -1467,7 +1467,7 @@ static void ts_query__perform_analysis(
static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
Array(uint16_t) non_rooted_pattern_start_steps = array_new();
for (unsigned i = 0; i < self->pattern_map.size; i++) {
for (unsigned i = 0; i < self->pattern_map.meta.size; i++) {
PatternEntry *pattern = array_get(&self->pattern_map, i);
if (!pattern->is_rooted) {
QueryStep *step = array_get(&self->steps, pattern->step_index);
@ -1482,7 +1482,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// captures, and record the indices of all of the steps that have child steps.
Array(uint32_t) parent_step_indices = array_new();
bool all_patterns_are_valid = true;
for (unsigned i = 0; i < self->steps.size; i++) {
for (unsigned i = 0; i < self->steps.meta.size; i++) {
QueryStep *step = array_get(&self->steps, i);
if (step->depth == PATTERN_DONE_MARKER) {
step->parent_pattern_guaranteed = true;
@ -1493,7 +1493,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
bool has_children = false;
bool is_wildcard = step->symbol == WILDCARD_SYMBOL;
step->contains_captures = step->capture_ids[0] != NONE;
for (unsigned j = i + 1; j < self->steps.size; j++) {
for (unsigned j = i + 1; j < self->steps.meta.size; j++) {
QueryStep *next_step = array_get(&self->steps, j);
if (
next_step->depth == PATTERN_DONE_MARKER ||
@ -1521,7 +1521,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
&subtype_length
);
for (unsigned j = i + 1; j < self->steps.size; j++) {
for (unsigned j = i + 1; j < self->steps.meta.size; j++) {
QueryStep *child_step = array_get(&self->steps, j);
if (child_step->depth == PATTERN_DONE_MARKER || child_step->depth <= step->depth) {
break;
@ -1536,7 +1536,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
}
if (!is_valid_subtype) {
for (unsigned offset_idx = 0; offset_idx < self->step_offsets.size; offset_idx++) {
for (unsigned offset_idx = 0; offset_idx < self->step_offsets.meta.size; offset_idx++) {
StepOffset *step_offset = array_get(&self->step_offsets, offset_idx);
if (step_offset->step_index >= j) {
*error_offset = step_offset->byte_offset;
@ -1560,7 +1560,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// one of the parent nodes, such that their children appear to belong to the
// parent.
AnalysisSubgraphArray subgraphs = array_new();
for (unsigned i = 0; i < parent_step_indices.size; i++) {
for (unsigned i = 0; i < parent_step_indices.meta.size; i++) {
uint32_t parent_step_index = *array_get(&parent_step_indices, i);
TSSymbol parent_symbol = array_get(&self->steps, parent_step_index)->symbol;
AnalysisSubgraph subgraph = { .symbol = parent_symbol };
@ -1605,7 +1605,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
);
if (exists) {
AnalysisSubgraph *subgraph = array_get(&subgraphs, subgraph_index);
if (subgraph->nodes.size == 0 || array_back(&subgraph->nodes)->state != state) {
if (subgraph->nodes.meta.size == 0 || array_back(&subgraph->nodes)->state != state) {
array_push(&subgraph->nodes, ((AnalysisSubgraphNode) {
.state = state,
.production_id = action->reduce.production_id,
@ -1643,7 +1643,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
if (exists) {
AnalysisSubgraph *subgraph = array_get(&subgraphs, subgraph_index);
if (
subgraph->start_states.size == 0 ||
subgraph->start_states.meta.size == 0 ||
*array_back(&subgraph->start_states) != state
)
array_push(&subgraph->start_states, state);
@ -1657,16 +1657,16 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// For each subgraph, compute the preceding states by walking backward
// from the end states using the predecessor map.
Array(AnalysisSubgraphNode) next_nodes = array_new();
for (unsigned i = 0; i < subgraphs.size; i++) {
for (unsigned i = 0; i < subgraphs.meta.size; i++) {
AnalysisSubgraph *subgraph = array_get(&subgraphs, i);
if (subgraph->nodes.size == 0) {
if (subgraph->nodes.meta.size == 0) {
array_delete(&subgraph->start_states);
array_erase(&subgraphs, i);
i--;
continue;
}
array_assign(&next_nodes, &subgraph->nodes);
while (next_nodes.size > 0) {
while (next_nodes.meta.size > 0) {
AnalysisSubgraphNode node = array_pop(&next_nodes);
if (node.child_index > 1) {
unsigned predecessor_count;
@ -1698,16 +1698,16 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
#ifdef DEBUG_ANALYZE_QUERY
printf("\nSubgraphs:\n");
for (unsigned i = 0; i < subgraphs.size; i++) {
for (unsigned i = 0; i < subgraphs.meta.size; i++) {
AnalysisSubgraph *subgraph = array_get(&subgraphs, i);
printf(" %u, %s:\n", subgraph->symbol, ts_language_symbol_name(self->language, subgraph->symbol));
for (unsigned j = 0; j < subgraph->start_states.size; j++) {
for (unsigned j = 0; j < subgraph->start_states.meta.size; j++) {
printf(
" {state: %u}\n",
*array_get(&subgraph->start_states, j)
);
}
for (unsigned j = 0; j < subgraph->nodes.size; j++) {
for (unsigned j = 0; j < subgraph->nodes.meta.size; j++) {
AnalysisSubgraphNode *node = array_get(&subgraph->nodes, j);
printf(
" {state: %u, child_index: %u, production_id: %u, done: %d}\n",
@ -1721,7 +1721,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// For each non-terminal pattern, determine if the pattern can successfully match,
// and identify all of the possible children within the pattern where matching could fail.
QueryAnalysis analysis = query_analysis__new();
for (unsigned i = 0; i < parent_step_indices.size; i++) {
for (unsigned i = 0; i < parent_step_indices.meta.size; i++) {
uint16_t parent_step_index = *array_get(&parent_step_indices, i);
uint16_t parent_depth = array_get(&self->steps, parent_step_index)->depth;
TSSymbol parent_symbol = array_get(&self->steps, parent_step_index)->symbol;
@ -1746,7 +1746,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
AnalysisSubgraph *subgraph = array_get(&subgraphs, subgraph_index);
analysis_state_set__clear(&analysis.states, &analysis.state_pool);
analysis_state_set__clear(&analysis.deeper_states, &analysis.state_pool);
for (unsigned j = 0; j < subgraph->start_states.size; j++) {
for (unsigned j = 0; j < subgraph->start_states.meta.size; j++) {
TSStateId parse_state = *array_get(&subgraph->start_states, j);
analysis_state_set__push(&analysis.states, &analysis.state_pool, &((AnalysisState) {
.step_index = parent_step_index + 1,
@ -1777,7 +1777,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// If this pattern could not be fully analyzed, then every step should
// be considered fallible.
if (analysis.did_abort) {
for (unsigned j = parent_step_index + 1; j < self->steps.size; j++) {
for (unsigned j = parent_step_index + 1; j < self->steps.meta.size; j++) {
QueryStep *step = array_get(&self->steps, j);
if (
step->depth <= parent_depth ||
@ -1793,9 +1793,9 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// If this pattern cannot match, store the pattern index so that it can be
// returned to the caller.
if (analysis.finished_parent_symbols.size == 0) {
if (analysis.finished_parent_symbols.meta.size == 0) {
uint16_t impossible_step_index;
if (analysis.final_step_indices.size > 0) {
if (analysis.final_step_indices.meta.size > 0) {
impossible_step_index = *array_back(&analysis.final_step_indices);
} else {
// If there isn't a final step, then that means the parent step itself is unreachable.
@ -1803,7 +1803,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
}
uint32_t j, impossible_exists;
array_search_sorted_by(&self->step_offsets, .step_index, impossible_step_index, &j, &impossible_exists);
if (j >= self->step_offsets.size) j = self->step_offsets.size - 1;
if (j >= self->step_offsets.meta.size) j = self->step_offsets.meta.size - 1;
*error_offset = array_get(&self->step_offsets, j)->byte_offset;
all_patterns_are_valid = false;
break;
@ -1811,7 +1811,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// Mark as fallible any step where a match terminated.
// Later, this property will be propagated to all of the step's predecessors.
for (unsigned j = 0; j < analysis.final_step_indices.size; j++) {
for (unsigned j = 0; j < analysis.final_step_indices.meta.size; j++) {
uint32_t final_step_index = *array_get(&analysis.final_step_indices, j);
QueryStep *step = array_get(&self->steps, final_step_index);
if (
@ -1827,7 +1827,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// Mark as indefinite any step with captures that are used in predicates.
Array(uint16_t) predicate_capture_ids = array_new();
for (unsigned i = 0; i < self->patterns.size; i++) {
for (unsigned i = 0; i < self->patterns.meta.size; i++) {
QueryPattern *pattern = array_get(&self->patterns, i);
// Gather all of the captures that are used in predicates for this pattern.
@ -1866,10 +1866,10 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// Propagate fallibility. If a pattern is fallible at a given step, then it is
// fallible at all of its preceding steps.
bool done = self->steps.size == 0;
bool done = self->steps.meta.size == 0;
while (!done) {
done = true;
for (unsigned i = self->steps.size - 1; i > 0; i--) {
for (unsigned i = self->steps.meta.size - 1; i > 0; i--) {
QueryStep *step = array_get(&self->steps, i);
if (step->depth == PATTERN_DONE_MARKER) continue;
@ -1903,7 +1903,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
#ifdef DEBUG_ANALYZE_QUERY
printf("Steps:\n");
for (unsigned i = 0; i < self->steps.size; i++) {
for (unsigned i = 0; i < self->steps.meta.size; i++) {
QueryStep *step = array_get(&self->steps, i);
if (step->depth == PATTERN_DONE_MARKER) {
printf(" %u: DONE\n", i);
@ -1927,18 +1927,18 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
// of matching non-rooted patterns in this query. These repetition symbols
// prevent certain optimizations with range restrictions.
analysis.did_abort = false;
for (uint32_t i = 0; i < non_rooted_pattern_start_steps.size; i++) {
for (uint32_t i = 0; i < non_rooted_pattern_start_steps.meta.size; i++) {
uint16_t pattern_entry_index = *array_get(&non_rooted_pattern_start_steps, i);
PatternEntry *pattern_entry = array_get(&self->pattern_map, pattern_entry_index);
analysis_state_set__clear(&analysis.states, &analysis.state_pool);
analysis_state_set__clear(&analysis.deeper_states, &analysis.state_pool);
for (unsigned j = 0; j < subgraphs.size; j++) {
for (unsigned j = 0; j < subgraphs.meta.size; j++) {
AnalysisSubgraph *subgraph = array_get(&subgraphs, j);
TSSymbolMetadata metadata = ts_language_symbol_metadata(self->language, subgraph->symbol);
if (metadata.visible || metadata.named) continue;
for (uint32_t k = 0; k < subgraph->start_states.size; k++) {
for (uint32_t k = 0; k < subgraph->start_states.meta.size; k++) {
TSStateId parse_state = *array_get(&subgraph->start_states, k);
analysis_state_set__push(&analysis.states, &analysis.state_pool, &((AnalysisState) {
.step_index = pattern_entry->step_index,
@ -1967,21 +1967,21 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
&analysis
);
if (analysis.finished_parent_symbols.size > 0) {
if (analysis.finished_parent_symbols.meta.size > 0) {
array_get(&self->patterns, pattern_entry->pattern_index)->is_non_local = true;
}
for (unsigned k = 0; k < analysis.finished_parent_symbols.size; k++) {
for (unsigned k = 0; k < analysis.finished_parent_symbols.meta.size; k++) {
TSSymbol symbol = *array_get(&analysis.finished_parent_symbols, k);
array_insert_sorted_by(&self->repeat_symbols_with_rootless_patterns, , symbol);
}
}
#ifdef DEBUG_ANALYZE_QUERY
if (self->repeat_symbols_with_rootless_patterns.size > 0) {
if (self->repeat_symbols_with_rootless_patterns.meta.size > 0) {
printf("\nRepetition symbols with rootless patterns:\n");
printf("aborted analysis: %d\n", analysis.did_abort);
for (unsigned i = 0; i < self->repeat_symbols_with_rootless_patterns.size; i++) {
for (unsigned i = 0; i < self->repeat_symbols_with_rootless_patterns.meta.size; i++) {
TSSymbol symbol = *array_get(&self->repeat_symbols_with_rootless_patterns, i);
printf(" %u, %s\n", symbol, ts_language_symbol_name(self->language, symbol));
}
@ -1990,7 +1990,7 @@ static bool ts_query__analyze_patterns(TSQuery *self, unsigned *error_offset) {
#endif
// Cleanup
for (unsigned i = 0; i < subgraphs.size; i++) {
for (unsigned i = 0; i < subgraphs.meta.size; i++) {
array_delete(&array_get(&subgraphs, i)->start_states);
array_delete(&array_get(&subgraphs, i)->nodes);
}
@ -2020,7 +2020,7 @@ static void ts_query__add_negated_fields(
bool failed_match = false;
unsigned match_count = 0;
unsigned start_i = 0;
for (unsigned i = 0; i < self->negated_fields.size; i++) {
for (unsigned i = 0; i < self->negated_fields.meta.size; i++) {
TSFieldId existing_field_id = *array_get(&self->negated_fields, i);
// At each zero value, terminate the match attempt. If we've exactly
@ -2054,7 +2054,7 @@ static void ts_query__add_negated_fields(
}
}
step->negated_field_list_id = self->negated_fields.size;
step->negated_field_list_id = self->negated_fields.meta.size;
array_extend(&self->negated_fields, field_count, field_ids);
array_push(&self->negated_fields, 0);
}
@ -2186,7 +2186,7 @@ static TSQueryError ts_query__parse_predicate(
uint16_t query_id = symbol_table_insert_name(
&self->predicate_values,
self->string_buffer.contents,
self->string_buffer.size
self->string_buffer.meta.size
);
array_push(&self->predicate_steps, ((TSQueryPredicateStep) {
.type = TSQueryPredicateStepTypeString,
@ -2236,11 +2236,11 @@ static TSQueryError ts_query__parse_pattern(
if (stream->next == 0) return TSQueryErrorSyntax;
if (stream->next == ')' || stream->next == ']') return PARENT_DONE;
const uint32_t starting_step_index = self->steps.size;
const uint32_t starting_step_index = self->steps.meta.size;
// Store the byte offset of each step in the query.
if (
self->step_offsets.size == 0 ||
self->step_offsets.meta.size == 0 ||
array_back(&self->step_offsets)->step_index != starting_step_index
) {
array_push(&self->step_offsets, ((StepOffset) {
@ -2258,7 +2258,7 @@ static TSQueryError ts_query__parse_pattern(
Array(uint32_t) branch_step_indices = array_new();
CaptureQuantifiers branch_capture_quantifiers = capture_quantifiers_new();
for (;;) {
uint32_t start_index = self->steps.size;
uint32_t start_index = self->steps.meta.size;
TSQueryError e = ts_query__parse_pattern(
self,
stream,
@ -2268,7 +2268,7 @@ static TSQueryError ts_query__parse_pattern(
);
if (e == PARENT_DONE) {
if (stream->next == ']' && branch_step_indices.size > 0) {
if (stream->next == ']' && branch_step_indices.meta.size > 0) {
stream_advance(stream);
break;
}
@ -2294,13 +2294,13 @@ static TSQueryError ts_query__parse_pattern(
// For all of the branches except for the last one, add the subsequent branch as an
// alternative, and link the end of the branch to the current end of the steps.
for (unsigned i = 0; i < branch_step_indices.size - 1; i++) {
for (unsigned i = 0; i < branch_step_indices.meta.size - 1; i++) {
uint32_t step_index = *array_get(&branch_step_indices, i);
uint32_t next_step_index = *array_get(&branch_step_indices, i + 1);
QueryStep *start_step = array_get(&self->steps, step_index);
QueryStep *end_step = array_get(&self->steps, next_step_index - 1);
start_step->alternative_index = next_step_index;
end_step->alternative_index = self->steps.size;
end_step->alternative_index = self->steps.meta.size;
end_step->is_dead_end = true;
}
@ -2401,7 +2401,7 @@ static TSQueryError ts_query__parse_pattern(
symbol = ts_language_symbol_for_name(
self->language,
self->string_buffer.contents,
self->string_buffer.size,
self->string_buffer.meta.size,
false
);
if (!symbol) {
@ -2476,7 +2476,7 @@ static TSQueryError ts_query__parse_pattern(
step->symbol = ts_language_symbol_for_name(
self->language,
self->string_buffer.contents,
self->string_buffer.size,
self->string_buffer.meta.size,
false
);
} else {
@ -2563,7 +2563,7 @@ static TSQueryError ts_query__parse_pattern(
stream_skip_whitespace(stream);
}
uint16_t step_index = self->steps.size;
uint16_t step_index = self->steps.meta.size;
TSQueryError e = ts_query__parse_pattern(
self,
stream,
@ -2573,7 +2573,7 @@ static TSQueryError ts_query__parse_pattern(
);
// In the event we only parsed a predicate, meaning no new steps were added,
// then subtract one so we're not indexing past the end of the array
if (step_index == self->steps.size) step_index--;
if (step_index == self->steps.meta.size) step_index--;
if (e == PARENT_DONE) {
if (stream->next == ')') {
if (child_is_immediate) {
@ -2586,13 +2586,13 @@ static TSQueryError ts_query__parse_pattern(
last_child_step->is_last_child = true;
if (
last_child_step->alternative_index != NONE &&
last_child_step->alternative_index < self->steps.size
last_child_step->alternative_index < self->steps.meta.size
) {
QueryStep *alternative_step = array_get(&self->steps, last_child_step->alternative_index);
alternative_step->is_last_child = true;
while (
alternative_step->alternative_index != NONE &&
alternative_step->alternative_index < self->steps.size
alternative_step->alternative_index < self->steps.meta.size
) {
alternative_step = array_get(&self->steps, alternative_step->alternative_index);
alternative_step->is_last_child = true;
@ -2648,7 +2648,7 @@ static TSQueryError ts_query__parse_pattern(
TSSymbol symbol = ts_language_symbol_for_name(
self->language,
self->string_buffer.contents,
self->string_buffer.size,
self->string_buffer.meta.size,
false
);
if (!symbol) {
@ -2706,7 +2706,7 @@ static TSQueryError ts_query__parse_pattern(
if (
step->alternative_index != NONE &&
step->alternative_index > step_index &&
step->alternative_index < self->steps.size
step->alternative_index < self->steps.meta.size
) {
step_index = step->alternative_index;
step = array_get(&self->steps, step_index);
@ -2778,7 +2778,7 @@ static TSQueryError ts_query__parse_pattern(
if (
step->alternative_index != NONE &&
step->alternative_index > step_index &&
step->alternative_index < self->steps.size
step->alternative_index < self->steps.meta.size
) {
step_index = step->alternative_index;
} else {
@ -2814,17 +2814,17 @@ static TSQueryError ts_query__parse_pattern(
// `repeat_step` or beyond. Note that having just been pushed,
// `repeat_step` occupies slot `self->steps.size - 1`.
step = array_get(&self->steps, starting_step_index);
while (step->alternative_index != NONE && step->alternative_index < self->steps.size - 1) {
while (step->alternative_index != NONE && step->alternative_index < self->steps.meta.size - 1) {
step = array_get(&self->steps, step->alternative_index);
}
step->alternative_index = self->steps.size;
step->alternative_index = self->steps.meta.size;
break;
case TSQuantifierZeroOrOne:
step = array_get(&self->steps, starting_step_index);
while (step->alternative_index != NONE && step->alternative_index < self->steps.size) {
while (step->alternative_index != NONE && step->alternative_index < self->steps.meta.size) {
step = array_get(&self->steps, step->alternative_index);
}
step->alternative_index = self->steps.size;
step->alternative_index = self->steps.meta.size;
break;
default:
break;
@ -2874,9 +2874,9 @@ TSQuery *ts_query_new(
Stream stream = stream_new(source, source_len);
stream_skip_whitespace(&stream);
while (stream.input < stream.end) {
uint32_t pattern_index = self->patterns.size;
uint32_t start_step_index = self->steps.size;
uint32_t start_predicate_step_index = self->predicate_steps.size;
uint32_t pattern_index = self->patterns.meta.size;
uint32_t start_step_index = self->steps.meta.size;
uint32_t start_predicate_step_index = self->predicate_steps.meta.size;
array_push(&self->patterns, ((QueryPattern) {
.steps = (Slice) {.offset = start_step_index},
.predicate_steps = (Slice) {.offset = start_predicate_step_index},
@ -2888,8 +2888,8 @@ TSQuery *ts_query_new(
array_push(&self->steps, query_step__new(0, PATTERN_DONE_MARKER, false));
QueryPattern *pattern = array_back(&self->patterns);
pattern->steps.length = self->steps.size - start_step_index;
pattern->predicate_steps.length = self->predicate_steps.size - start_predicate_step_index;
pattern->steps.length = self->steps.meta.size - start_step_index;
pattern->predicate_steps.length = self->predicate_steps.meta.size - start_predicate_step_index;
pattern->end_byte = stream_offset(&stream);
// If any pattern could not be parsed, then report the error information
@ -2929,7 +2929,7 @@ TSQuery *ts_query_new(
// error node.
uint32_t start_depth = step->depth;
bool is_rooted = start_depth == 0;
for (uint32_t step_index = start_step_index + 1; step_index < self->steps.size; step_index++) {
for (uint32_t step_index = start_step_index + 1; step_index < self->steps.meta.size; step_index++) {
QueryStep *child_step = array_get(&self->steps, step_index);
if (child_step->is_dead_end) break;
if (child_step->depth == start_depth) {
@ -2983,7 +2983,7 @@ void ts_query_delete(TSQuery *self) {
ts_language_delete(self->language);
symbol_table_delete(&self->captures);
symbol_table_delete(&self->predicate_values);
for (uint32_t index = 0; index < self->capture_quantifiers.size; index++) {
for (uint32_t index = 0; index < self->capture_quantifiers.meta.size; index++) {
CaptureQuantifiers *capture_quantifiers = array_get(&self->capture_quantifiers, index);
capture_quantifiers_delete(capture_quantifiers);
}
@ -2993,15 +2993,15 @@ void ts_query_delete(TSQuery *self) {
}
uint32_t ts_query_pattern_count(const TSQuery *self) {
return self->patterns.size;
return self->patterns.meta.size;
}
uint32_t ts_query_capture_count(const TSQuery *self) {
return self->captures.slices.size;
return self->captures.slices.meta.size;
}
uint32_t ts_query_string_count(const TSQuery *self) {
return self->predicate_values.slices.size;
return self->predicate_values.slices.meta.size;
}
const char *ts_query_capture_name_for_id(
@ -3058,7 +3058,7 @@ bool ts_query_is_pattern_rooted(
const TSQuery *self,
uint32_t pattern_index
) {
for (unsigned i = 0; i < self->pattern_map.size; i++) {
for (unsigned i = 0; i < self->pattern_map.meta.size; i++) {
PatternEntry *entry = array_get(&self->pattern_map, i);
if (entry->pattern_index == pattern_index) {
if (!entry->is_rooted) return false;
@ -3071,7 +3071,7 @@ bool ts_query_is_pattern_non_local(
const TSQuery *self,
uint32_t pattern_index
) {
if (pattern_index < self->patterns.size) {
if (pattern_index < self->patterns.meta.size) {
return array_get(&self->patterns, pattern_index)->is_non_local;
} else {
return false;
@ -3083,12 +3083,12 @@ bool ts_query_is_pattern_guaranteed_at_step(
uint32_t byte_offset
) {
uint32_t step_index = UINT32_MAX;
for (unsigned i = 0; i < self->step_offsets.size; i++) {
for (unsigned i = 0; i < self->step_offsets.meta.size; i++) {
StepOffset *step_offset = array_get(&self->step_offsets, i);
if (step_offset->byte_offset > byte_offset) break;
step_index = step_offset->step_index;
}
if (step_index < self->steps.size) {
if (step_index < self->steps.meta.size) {
return array_get(&self->steps, step_index)->root_pattern_guaranteed;
} else {
return false;
@ -3099,7 +3099,7 @@ bool ts_query__step_is_fallible(
const TSQuery *self,
uint16_t step_index
) {
ts_assert((uint32_t)step_index + 1 < self->steps.size);
ts_assert((uint32_t)step_index + 1 < self->steps.meta.size);
QueryStep *step = array_get(&self->steps, step_index);
QueryStep *next_step = array_get(&self->steps, step_index + 1);
return (
@ -3118,7 +3118,7 @@ void ts_query_disable_capture(
// captured with the given name.
int id = symbol_table_id_for_name(&self->captures, name, length);
if (id != -1) {
for (unsigned i = 0; i < self->steps.size; i++) {
for (unsigned i = 0; i < self->steps.meta.size; i++) {
QueryStep *step = array_get(&self->steps, i);
query_step__remove_capture(step, id);
}
@ -3131,7 +3131,7 @@ void ts_query_disable_pattern(
) {
// Remove the given pattern from the pattern map. Its steps will still
// be in the `steps` array, but they will never be read.
for (unsigned i = 0; i < self->pattern_map.size; i++) {
for (unsigned i = 0; i < self->pattern_map.meta.size; i++) {
PatternEntry *pattern = array_get(&self->pattern_map, i);
if (pattern->pattern_index == pattern_index) {
array_erase(&self->pattern_map, i);
@ -3206,7 +3206,7 @@ void ts_query_cursor_exec(
) {
if (query) {
LOG("query steps:\n");
for (unsigned i = 0; i < query->steps.size; i++) {
for (unsigned i = 0; i < query->steps.meta.size; i++) {
QueryStep *step = array_get(&query->steps, i);
LOG(" %u: {", i);
if (step->depth == PATTERN_DONE_MARKER) {
@ -3338,7 +3338,7 @@ static bool ts_query_cursor__first_in_progress_capture(
*state_index = UINT32_MAX;
*byte_offset = UINT32_MAX;
*pattern_index = UINT32_MAX;
for (unsigned i = 0; i < self->states.size; i++) {
for (unsigned i = 0; i < self->states.meta.size; i++) {
QueryState *state = array_get(&self->states, i);
if (state->dead) continue;
@ -3346,7 +3346,7 @@ static bool ts_query_cursor__first_in_progress_capture(
&self->capture_list_pool,
state->capture_list_id
);
if (state->consumed_capture_count >= captures->size) {
if (state->consumed_capture_count >= captures->meta.size) {
continue;
}
@ -3420,8 +3420,8 @@ void ts_query_cursor__compare_captures(
*right_contains_left = true;
unsigned i = 0, j = 0;
for (;;) {
if (i < left_captures->size) {
if (j < right_captures->size) {
if (i < left_captures->meta.size) {
if (j < right_captures->meta.size) {
TSQueryCapture *left = array_get(left_captures, i);
TSQueryCapture *right = array_get(right_captures, j);
if (left->node.id == right->node.id && left->index == right->index) {
@ -3450,7 +3450,7 @@ void ts_query_cursor__compare_captures(
break;
}
} else {
if (j < right_captures->size) {
if (j < right_captures->meta.size) {
*left_contains_right = false;
}
break;
@ -3484,7 +3484,7 @@ static void ts_query_cursor__add_state(
// pattern while another state for the same pattern is already in progress.
// If there are multiple patterns like this in a query, then this loop will
// need to execute in order to keep the states ordered by pattern_index.
uint32_t index = self->states.size;
uint32_t index = self->states.meta.size;
while (index > 0) {
QueryState *prev_state = array_get(&self->states, index - 1);
if (prev_state->start_depth < start_depth) break;
@ -3591,7 +3591,7 @@ static void ts_query_cursor__capture(
ts_node_type(node),
state->pattern_index,
capture_id,
capture_list->size
capture_list->meta.size
);
}
}
@ -3635,7 +3635,7 @@ static inline bool ts_query_cursor__should_descend(
// If there are in-progress matches whose remaining steps occur
// deeper in the tree, then descend.
for (unsigned i = 0; i < self->states.size; i++) {
for (unsigned i = 0; i < self->states.meta.size; i++) {
QueryState *state = array_get(&self->states, i);
QueryStep *next_step = array_get(&self->query->steps, state->step_index);
if (
@ -3714,7 +3714,7 @@ static inline bool ts_query_cursor__advance(
bool did_match = false;
for (;;) {
if (self->halted) {
while (self->states.size > 0) {
while (self->states.meta.size > 0) {
QueryState state = array_pop(&self->states);
capture_list_pool_release(
&self->capture_list_pool,
@ -3754,7 +3754,7 @@ static inline bool ts_query_cursor__advance(
// After leaving a node, remove any states that cannot make further progress.
uint32_t deleted_count = 0;
for (unsigned i = 0, n = self->states.size; i < n; i++) {
for (unsigned i = 0, n = self->states.meta.size; i < n; i++) {
QueryState *state = array_get(&self->states, i);
QueryStep *step = array_get(&self->query->steps, state->step_index);
@ -3792,7 +3792,7 @@ static inline bool ts_query_cursor__advance(
*array_get(&self->states, i - deleted_count) = *state;
}
}
self->states.size -= deleted_count;
self->states.meta.size -= deleted_count;
}
// Leave this node by stepping to its next sibling or to its parent.
@ -3872,8 +3872,8 @@ static inline bool ts_query_cursor__advance(
ts_node_type(node),
ts_language_field_name_for_id(self->query->language, field_id),
ts_node_start_point(node).row,
self->states.size,
self->finished_states.size
self->states.meta.size,
self->finished_states.meta.size
);
bool node_is_error = symbol == ts_builtin_sym_error;
@ -3925,14 +3925,14 @@ static inline bool ts_query_cursor__advance(
// Advance to the next pattern whose root node matches this node.
i++;
if (i == self->query->pattern_map.size) break;
if (i == self->query->pattern_map.meta.size) break;
pattern = array_get(&self->query->pattern_map, i);
step = array_get(&self->query->steps, pattern->step_index);
} while (step->symbol == symbol);
}
// Update all of the in-progress states with current node.
for (unsigned j = 0, copy_count = 0; j < self->states.size; j += 1 + copy_count) {
for (unsigned j = 0, copy_count = 0; j < self->states.meta.size; j += 1 + copy_count) {
QueryState *state = array_get(&self->states, j);
QueryStep *step = array_get(&self->query->steps, state->step_index);
state->has_in_progress_alternatives = false;
@ -4135,7 +4135,7 @@ static inline bool ts_query_cursor__advance(
copy->step_index,
next_step->alternative_index,
next_step->alternative_is_immediate,
capture_list_pool_get(&self->capture_list_pool, copy->capture_list_id)->size
capture_list_pool_get(&self->capture_list_pool, copy->capture_list_id)->meta.size
);
end_index++;
copy_count++;
@ -4148,7 +4148,7 @@ static inline bool ts_query_cursor__advance(
}
}
for (unsigned j = 0; j < self->states.size; j++) {
for (unsigned j = 0; j < self->states.meta.size; j++) {
QueryState *state = array_get(&self->states, j);
if (state->dead) {
array_erase(&self->states, j);
@ -4160,7 +4160,7 @@ static inline bool ts_query_cursor__advance(
// repeated nodes, this is necessary to avoid multiple redundant states, where
// one state has a strict subset of another state's captures.
bool did_remove = false;
for (unsigned k = j + 1; k < self->states.size; k++) {
for (unsigned k = j + 1; k < self->states.meta.size; k++) {
QueryState *other_state = array_get(&self->states, k);
// Query states are kept in ascending order of start_depth and pattern_index.
@ -4219,7 +4219,7 @@ static inline bool ts_query_cursor__advance(
state->pattern_index,
state->start_depth,
state->step_index,
capture_list_pool_get(&self->capture_list_pool, state->capture_list_id)->size
capture_list_pool_get(&self->capture_list_pool, state->capture_list_id)->meta.size
);
QueryStep *next_step = array_get(&self->query->steps, state->step_index);
if (next_step->depth == PATTERN_DONE_MARKER) {
@ -4260,7 +4260,7 @@ bool ts_query_cursor_next_match(
TSQueryCursor *self,
TSQueryMatch *match
) {
if (self->finished_states.size == 0) {
if (self->finished_states.meta.size == 0) {
if (!ts_query_cursor__advance(self, false)) {
return false;
}
@ -4275,7 +4275,7 @@ bool ts_query_cursor_next_match(
state->capture_list_id
);
match->captures = captures->contents;
match->capture_count = captures->size;
match->capture_count = captures->meta.size;
capture_list_pool_release(&self->capture_list_pool, state->capture_list_id);
array_erase(&self->finished_states, 0);
return true;
@ -4285,7 +4285,7 @@ void ts_query_cursor_remove_match(
TSQueryCursor *self,
uint32_t match_id
) {
for (unsigned i = 0; i < self->finished_states.size; i++) {
for (unsigned i = 0; i < self->finished_states.meta.size; i++) {
const QueryState *state = array_get(&self->finished_states, i);
if (state->id == match_id) {
capture_list_pool_release(
@ -4299,7 +4299,7 @@ void ts_query_cursor_remove_match(
// Remove unfinished query states as well to prevent future
// captures for a match being removed.
for (unsigned i = 0; i < self->states.size; i++) {
for (unsigned i = 0; i < self->states.meta.size; i++) {
const QueryState *state = array_get(&self->states, i);
if (state->id == match_id) {
capture_list_pool_release(
@ -4339,7 +4339,7 @@ bool ts_query_cursor_next_capture(
QueryState *first_finished_state = NULL;
uint32_t first_finished_capture_byte = first_unfinished_capture_byte;
uint32_t first_finished_pattern_index = first_unfinished_pattern_index;
for (unsigned i = 0; i < self->finished_states.size;) {
for (unsigned i = 0; i < self->finished_states.meta.size;) {
QueryState *state = array_get(&self->finished_states, i);
const CaptureList *captures = capture_list_pool_get(
&self->capture_list_pool,
@ -4347,7 +4347,7 @@ bool ts_query_cursor_next_capture(
);
// Remove states whose captures are all consumed.
if (state->consumed_capture_count >= captures->size) {
if (state->consumed_capture_count >= captures->meta.size) {
capture_list_pool_release(
&self->capture_list_pool,
state->capture_list_id
@ -4410,7 +4410,7 @@ bool ts_query_cursor_next_capture(
state->capture_list_id
);
match->captures = captures->contents;
match->capture_count = captures->size;
match->capture_count = captures->meta.size;
*capture_index = state->consumed_capture_count;
state->consumed_capture_count++;
return true;
@ -4434,7 +4434,7 @@ bool ts_query_cursor_next_capture(
// continue finding more matches.
if (
!ts_query_cursor__advance(self, true) &&
self->finished_states.size == 0
self->finished_states.meta.size == 0
) return false;
}
}

View file

@ -19,7 +19,7 @@ typedef Array(ReduceAction) ReduceActionSet;
static inline void ts_reduce_action_set_add(ReduceActionSet *self,
ReduceAction new_action) {
for (uint32_t i = 0; i < self->size; i++) {
for (uint32_t i = 0; i < self->meta.size; i++) {
ReduceAction action = self->contents[i];
if (action.symbol == new_action.symbol && action.count == new_action.count)
return;

View file

@ -21,14 +21,14 @@ static inline void reusable_node_clear(ReusableNode *self) {
}
static inline Subtree reusable_node_tree(ReusableNode *self) {
return self->stack.size > 0
? self->stack.contents[self->stack.size - 1].tree
return self->stack.meta.size > 0
? self->stack.contents[self->stack.meta.size - 1].tree
: NULL_SUBTREE;
}
static inline uint32_t reusable_node_byte_offset(ReusableNode *self) {
return self->stack.size > 0
? self->stack.contents[self->stack.size - 1].byte_offset
return self->stack.meta.size > 0
? self->stack.contents[self->stack.meta.size - 1].byte_offset
: UINT32_MAX;
}
@ -48,7 +48,7 @@ static inline void reusable_node_advance(ReusableNode *self) {
do {
StackEntry popped_entry = array_pop(&self->stack);
next_index = popped_entry.child_index + 1;
if (self->stack.size == 0) return;
if (self->stack.meta.size == 0) return;
tree = array_back(&self->stack)->tree;
} while (ts_subtree_child_count(tree) <= next_index);

View file

@ -109,7 +109,7 @@ recur:
first_predecessor = self->links[0].node;
}
if (pool->size < MAX_NODE_POOL_SIZE) {
if (pool->meta.size < MAX_NODE_POOL_SIZE) {
array_push(pool, self);
} else {
ts_free(self);
@ -142,7 +142,7 @@ static StackNode *stack_node_new(
TSStateId state,
StackNodeArray *pool
) {
StackNode *node = pool->size > 0
StackNode *node = pool->meta.size > 0
? array_pop(pool)
: ts_malloc(sizeof(StackNode));
*node = (StackNode) {
@ -298,7 +298,7 @@ static StackVersion ts_stack__add_version(
array_push(&self->heads, head);
stack_node_retain(node);
if (head.last_external_token.ptr) ts_subtree_retain(head.last_external_token);
return (StackVersion)(self->heads.size - 1);
return (StackVersion)(self->heads.meta.size - 1);
}
static void ts_stack__add_slice(
@ -307,7 +307,7 @@ static void ts_stack__add_slice(
StackNode *node,
SubtreeArray *subtrees
) {
for (uint32_t i = self->slices.size - 1; i + 1 > 0; i--) {
for (uint32_t i = self->slices.meta.size - 1; i + 1 > 0; i--) {
StackVersion version = array_get(&self->slices, i)->version;
if (array_get(&self->heads, version)->node == node) {
StackSlice slice = {*subtrees, version};
@ -347,8 +347,8 @@ static StackSliceArray stack__iter(
array_push(&self->iterators, new_iterator);
while (self->iterators.size > 0) {
for (uint32_t i = 0, size = self->iterators.size; i < size; i++) {
while (self->iterators.meta.size > 0) {
for (uint32_t i = 0, size = self->iterators.meta.size; i < size; i++) {
StackIterator *iterator = array_get(&self->iterators, i);
StackNode *node = iterator->node;
@ -386,7 +386,7 @@ static StackSliceArray stack__iter(
link = node->links[0];
next_iterator = array_get(&self->iterators, i);
} else {
if (self->iterators.size >= MAX_ITERATOR_COUNT) continue;
if (self->iterators.meta.size >= MAX_ITERATOR_COUNT) continue;
link = node->links[j];
StackIterator current_iterator = *array_get(&self->iterators, i);
array_push(&self->iterators, current_iterator);
@ -443,12 +443,12 @@ void ts_stack_delete(Stack *self) {
if (self->iterators.contents)
array_delete(&self->iterators);
stack_node_release(self->base_node, &self->node_pool, self->subtree_pool);
for (uint32_t i = 0; i < self->heads.size; i++) {
for (uint32_t i = 0; i < self->heads.meta.size; i++) {
stack_head_delete(array_get(&self->heads, i), &self->node_pool, self->subtree_pool);
}
array_clear(&self->heads);
if (self->node_pool.contents) {
for (uint32_t i = 0; i < self->node_pool.size; i++)
for (uint32_t i = 0; i < self->node_pool.meta.size; i++)
ts_free(*array_get(&self->node_pool, i));
array_delete(&self->node_pool);
}
@ -457,12 +457,12 @@ void ts_stack_delete(Stack *self) {
}
uint32_t ts_stack_version_count(const Stack *self) {
return self->heads.size;
return self->heads.meta.size;
}
uint32_t ts_stack_halted_version_count(Stack *self) {
uint32_t count = 0;
for (uint32_t i = 0; i < self->heads.size; i++) {
for (uint32_t i = 0; i < self->heads.meta.size; i++) {
StackHead *head = array_get(&self->heads, i);
if (head->status == StackStatusHalted) {
count++;
@ -551,7 +551,7 @@ forceinline StackAction pop_pending_callback(void *payload, const StackIterator
StackSliceArray ts_stack_pop_pending(Stack *self, StackVersion version) {
StackSliceArray pop = stack__iter(self, version, pop_pending_callback, NULL, 0);
if (pop.size > 0) {
if (pop.meta.size > 0) {
ts_stack_renumber_version(self, array_get(&pop, 0)->version, version);
array_get(&pop, 0)->version = version;
}
@ -559,7 +559,7 @@ StackSliceArray ts_stack_pop_pending(Stack *self, StackVersion version) {
}
forceinline StackAction pop_error_callback(void *payload, const StackIterator *iterator) {
if (iterator->subtrees.size > 0) {
if (iterator->subtrees.meta.size > 0) {
bool *found_error = payload;
if (!*found_error && ts_subtree_is_error(*array_get(&iterator->subtrees, 0))) {
*found_error = true;
@ -578,15 +578,15 @@ SubtreeArray ts_stack_pop_error(Stack *self, StackVersion version) {
if (node->links[i].subtree.ptr && ts_subtree_is_error(node->links[i].subtree)) {
bool found_error = false;
StackSliceArray pop = stack__iter(self, version, pop_error_callback, &found_error, 1);
if (pop.size > 0) {
ts_assert(pop.size == 1);
if (pop.meta.size > 0) {
ts_assert(pop.meta.size == 1);
ts_stack_renumber_version(self, array_get(&pop, 0)->version, version);
return array_get(&pop, 0)->subtrees;
}
break;
}
}
return (SubtreeArray) {.size = 0};
return (SubtreeArray) {.meta = {.size = 0}};
}
forceinline StackAction pop_all_callback(void *payload, const StackIterator *iterator) {
@ -608,7 +608,7 @@ forceinline StackAction summarize_stack_callback(void *payload, const StackItera
TSStateId state = iterator->node->state;
unsigned depth = iterator->subtree_count;
if (depth > session->max_depth) return StackActionStop;
for (unsigned i = session->summary->size - 1; i + 1 > 0; i--) {
for (unsigned i = session->summary->meta.size - 1; i + 1 > 0; i--) {
StackSummaryEntry entry = *array_get(session->summary, i);
if (entry.depth < depth) break;
if (entry.depth == depth && entry.state == state) return StackActionNone;
@ -676,7 +676,7 @@ void ts_stack_remove_version(Stack *self, StackVersion version) {
void ts_stack_renumber_version(Stack *self, StackVersion v1, StackVersion v2) {
if (v1 == v2) return;
ts_assert(v2 < v1);
ts_assert((uint32_t)v1 < self->heads.size);
ts_assert((uint32_t)v1 < self->heads.meta.size);
StackHead *source_head = array_get(&self->heads, v1);
StackHead *target_head = array_get(&self->heads, v2);
if (target_head->summary && !source_head->summary) {
@ -695,14 +695,14 @@ void ts_stack_swap_versions(Stack *self, StackVersion v1, StackVersion v2) {
}
StackVersion ts_stack_copy_version(Stack *self, StackVersion version) {
ts_assert(version < self->heads.size);
ts_assert(version < self->heads.meta.size);
StackHead version_head = *array_get(&self->heads, version);
array_push(&self->heads, version_head);
StackHead *head = array_back(&self->heads);
stack_node_retain(head->node);
if (head->last_external_token.ptr) ts_subtree_retain(head->last_external_token);
head->summary = NULL;
return self->heads.size - 1;
return self->heads.meta.size - 1;
}
bool ts_stack_merge(Stack *self, StackVersion version1, StackVersion version2) {
@ -765,7 +765,7 @@ Subtree ts_stack_resume(Stack *self, StackVersion version) {
void ts_stack_clear(Stack *self) {
stack_node_retain(self->base_node);
for (uint32_t i = 0; i < self->heads.size; i++) {
for (uint32_t i = 0; i < self->heads.meta.size; i++) {
stack_head_delete(array_get(&self->heads, i), &self->node_pool, self->subtree_pool);
}
array_clear(&self->heads);
@ -788,7 +788,7 @@ bool ts_stack_print_dot_graph(Stack *self, const TSLanguage *language, FILE *f)
Array(StackNode *) visited_nodes = array_new();
array_clear(&self->iterators);
for (uint32_t i = 0; i < self->heads.size; i++) {
for (uint32_t i = 0; i < self->heads.meta.size; i++) {
StackHead *head = array_get(&self->heads, i);
if (head->status == StackStatusHalted) continue;
@ -807,7 +807,7 @@ bool ts_stack_print_dot_graph(Stack *self, const TSLanguage *language, FILE *f)
if (head->summary) {
fprintf(f, "\nsummary:");
for (uint32_t j = 0; j < head->summary->size; j++) fprintf(f, " %u", array_get(head->summary, j)->state);
for (uint32_t j = 0; j < head->summary->meta.size; j++) fprintf(f, " %u", array_get(head->summary, j)->state);
}
if (head->last_external_token.ptr) {
@ -827,11 +827,11 @@ bool ts_stack_print_dot_graph(Stack *self, const TSLanguage *language, FILE *f)
while (!all_iterators_done) {
all_iterators_done = true;
for (uint32_t i = 0; i < self->iterators.size; i++) {
for (uint32_t i = 0; i < self->iterators.meta.size; i++) {
StackIterator iterator = *array_get(&self->iterators, i);
StackNode *node = iterator.node;
for (uint32_t j = 0; j < visited_nodes.size; j++) {
for (uint32_t j = 0; j < visited_nodes.meta.size; j++) {
if (*array_get(&visited_nodes, j) == node) {
node = NULL;
break;

View file

@ -66,20 +66,19 @@ bool ts_external_scanner_state_eq(const ExternalScannerState *self, const char *
// SubtreeArray
void ts_subtree_array_copy(SubtreeArray self, SubtreeArray *dest) {
dest->size = self.size;
dest->capacity = self.capacity;
dest->meta = self.meta;
dest->contents = self.contents;
if (self.capacity > 0) {
dest->contents = ts_calloc(self.capacity, sizeof(Subtree));
memcpy(dest->contents, self.contents, self.size * sizeof(Subtree));
for (uint32_t i = 0; i < self.size; i++) {
if (self.meta.capacity > 0) {
dest->contents = ts_calloc(self.meta.capacity, sizeof(Subtree));
memcpy(dest->contents, self.contents, self.meta.size * sizeof(Subtree));
for (uint32_t i = 0; i < self.meta.size; i++) {
ts_subtree_retain(*array_get(dest, i));
}
}
}
void ts_subtree_array_clear(SubtreePool *pool, SubtreeArray *self) {
for (uint32_t i = 0; i < self->size; i++) {
for (uint32_t i = 0; i < self->meta.size; i++) {
ts_subtree_release(pool, *array_get(self, i));
}
array_clear(self);
@ -95,10 +94,10 @@ void ts_subtree_array_remove_trailing_extras(
SubtreeArray *destination
) {
array_clear(destination);
while (self->size > 0) {
Subtree last = *array_get(self, self->size - 1);
while (self->meta.size > 0) {
Subtree last = *array_back(self);
if (ts_subtree_extra(last)) {
self->size--;
self->meta.size--;
array_push(destination, last);
} else {
break;
@ -108,8 +107,8 @@ void ts_subtree_array_remove_trailing_extras(
}
void ts_subtree_array_reverse(SubtreeArray *self) {
for (uint32_t i = 0, limit = self->size / 2; i < limit; i++) {
size_t reverse_index = self->size - 1 - i;
for (uint32_t i = 0, limit = self->meta.size / 2; i < limit; i++) {
size_t reverse_index = self->meta.size - 1 - i;
Subtree swap = *array_get(self, i);
*array_get(self, i) = *array_get(self, reverse_index);
*array_get(self, reverse_index) = swap;
@ -126,7 +125,7 @@ SubtreePool ts_subtree_pool_new(uint32_t capacity) {
void ts_subtree_pool_delete(SubtreePool *self) {
if (self->free_trees.contents) {
for (unsigned i = 0; i < self->free_trees.size; i++) {
for (unsigned i = 0; i < self->free_trees.meta.size; i++) {
ts_free(array_get(&self->free_trees, i)->ptr);
}
array_delete(&self->free_trees);
@ -135,7 +134,7 @@ void ts_subtree_pool_delete(SubtreePool *self) {
}
static SubtreeHeapData *ts_subtree_pool_allocate(SubtreePool *self) {
if (self->free_trees.size > 0) {
if (self->free_trees.meta.size > 0) {
return array_pop(&self->free_trees).ptr;
} else {
return ts_malloc(sizeof(SubtreeHeapData));
@ -143,7 +142,7 @@ static SubtreeHeapData *ts_subtree_pool_allocate(SubtreePool *self) {
}
static void ts_subtree_pool_free(SubtreePool *self, SubtreeHeapData *tree) {
if (self->free_trees.capacity > 0 && self->free_trees.size + 1 <= TS_MAX_TREE_POOL_SIZE) {
if (self->free_trees.meta.capacity > 0 && self->free_trees.meta.size + 1 <= TS_MAX_TREE_POOL_SIZE) {
array_push(&self->free_trees, (MutableSubtree) {.ptr = tree});
} else {
ts_free(tree);
@ -295,7 +294,7 @@ void ts_subtree_compress(
const TSLanguage *language,
MutableSubtreeArray *stack
) {
unsigned initial_stack_size = stack->size;
unsigned initial_stack_size = stack->meta.size;
MutableSubtree tree = self;
TSSymbol symbol = tree.ptr->symbol;
@ -325,7 +324,7 @@ void ts_subtree_compress(
tree = grandchild;
}
while (stack->size > initial_stack_size) {
while (stack->meta.size > initial_stack_size) {
tree = array_pop(stack);
MutableSubtree child = ts_subtree_to_mut_unsafe(ts_subtree_children(tree)[0]);
MutableSubtree grandchild = ts_subtree_to_mut_unsafe(ts_subtree_children(child)[child.ptr->child_count - 1]);
@ -487,17 +486,17 @@ MutableSubtree ts_subtree_new_node(
bool fragile = symbol == ts_builtin_sym_error || symbol == ts_builtin_sym_error_repeat;
// Allocate the node's data at the end of the array of children.
size_t new_byte_size = ts_subtree_alloc_size(children->size);
if (children->capacity * sizeof(Subtree) < new_byte_size) {
size_t new_byte_size = ts_subtree_alloc_size(children->meta.size);
if (children->meta.capacity * sizeof(Subtree) < new_byte_size) {
children->contents = ts_realloc(children->contents, new_byte_size);
children->capacity = (uint32_t)(new_byte_size / sizeof(Subtree));
children->meta.capacity = (uint32_t)(new_byte_size / sizeof(Subtree));
}
SubtreeHeapData *data = (SubtreeHeapData *)&children->contents[children->size];
SubtreeHeapData *data = (SubtreeHeapData *)&children->contents[children->meta.size];
*data = (SubtreeHeapData) {
.ref_count = 1,
.symbol = symbol,
.child_count = children->size,
.child_count = children->meta.size,
.visible = metadata.visible,
.named = metadata.named,
.has_changes = false,
@ -571,7 +570,7 @@ void ts_subtree_release(SubtreePool *pool, Subtree self) {
array_push(&pool->tree_stack, ts_subtree_to_mut_unsafe(self));
}
while (pool->tree_stack.size > 0) {
while (pool->tree_stack.meta.size > 0) {
MutableSubtree tree = array_pop(&pool->tree_stack);
if (tree.ptr->child_count > 0) {
Subtree *children = ts_subtree_children(tree);
@ -597,7 +596,7 @@ int ts_subtree_compare(Subtree left, Subtree right, SubtreePool *pool) {
array_push(&pool->tree_stack, ts_subtree_to_mut_unsafe(left));
array_push(&pool->tree_stack, ts_subtree_to_mut_unsafe(right));
while (pool->tree_stack.size > 0) {
while (pool->tree_stack.meta.size > 0) {
right = ts_subtree_from_mut(array_pop(&pool->tree_stack));
left = ts_subtree_from_mut(array_pop(&pool->tree_stack));
@ -646,7 +645,7 @@ Subtree ts_subtree_edit(Subtree self, const TSInputEdit *input_edit, SubtreePool
},
}));
while (stack.size) {
while (stack.meta.size) {
EditEntry entry = array_pop(&stack);
Edit edit = entry.edit;
bool is_noop = edit.old_end.bytes == edit.start.bytes && edit.new_end.bytes == edit.start.bytes;

View file

@ -42,7 +42,7 @@ static inline CursorChildIterator ts_tree_cursor_iterate_children(const TreeCurs
);
uint32_t descendant_index = last_entry->descendant_index;
if (ts_tree_cursor_is_entry_visible(self, self->stack.size - 1)) {
if (ts_tree_cursor_is_entry_visible(self, self->stack.meta.size - 1)) {
descendant_index += 1;
}
@ -262,7 +262,7 @@ static inline int64_t ts_tree_cursor_goto_first_child_for_byte_and_point(
TSPoint goal_point
) {
TreeCursor *self = (TreeCursor *)_self;
uint32_t initial_size = self->stack.size;
uint32_t initial_size = self->stack.meta.size;
uint32_t visible_child_index = 0;
bool did_descend;
@ -294,7 +294,7 @@ static inline int64_t ts_tree_cursor_goto_first_child_for_byte_and_point(
}
} while (did_descend);
self->stack.size = initial_size;
self->stack.meta.size = initial_size;
return -1;
}
@ -311,9 +311,9 @@ TreeCursorStep ts_tree_cursor_goto_sibling_internal(
bool (*advance)(CursorChildIterator *, TreeCursorEntry *, bool *)
) {
TreeCursor *self = (TreeCursor *)_self;
uint32_t initial_size = self->stack.size;
uint32_t initial_size = self->stack.meta.size;
while (self->stack.size > 1) {
while (self->stack.meta.size > 1) {
TreeCursorEntry entry = array_pop(&self->stack);
CursorChildIterator iterator = ts_tree_cursor_iterate_children(self);
iterator.child_index = entry.child_index;
@ -323,7 +323,7 @@ TreeCursorStep ts_tree_cursor_goto_sibling_internal(
bool visible = false;
advance(&iterator, &entry, &visible);
if (visible && self->stack.size + 1 < initial_size) break;
if (visible && self->stack.meta.size + 1 < initial_size) break;
while (advance(&iterator, &entry, &visible)) {
if (visible) {
@ -338,7 +338,7 @@ TreeCursorStep ts_tree_cursor_goto_sibling_internal(
}
}
self->stack.size = initial_size;
self->stack.meta.size = initial_size;
return TreeCursorStepNone;
}
@ -374,7 +374,7 @@ TreeCursorStep ts_tree_cursor_goto_previous_sibling_internal(TSTreeCursor *_self
return step;
// restore position from the parent node
const TreeCursorEntry *parent = array_get(&self->stack, self->stack.size - 2);
const TreeCursorEntry *parent = array_get(&self->stack, self->stack.meta.size - 2);
Length position = parent->position;
uint32_t child_index = array_back(&self->stack)->child_index;
const Subtree *children = ts_subtree_children((*(parent->subtree)));
@ -407,9 +407,9 @@ bool ts_tree_cursor_goto_previous_sibling(TSTreeCursor *self) {
bool ts_tree_cursor_goto_parent(TSTreeCursor *_self) {
TreeCursor *self = (TreeCursor *)_self;
for (unsigned i = self->stack.size - 2; i + 1 > 0; i--) {
for (unsigned i = self->stack.meta.size - 2; i + 1 > 0; i--) {
if (ts_tree_cursor_is_entry_visible(self, i)) {
self->stack.size = i + 1;
self->stack.meta.size = i + 1;
return true;
}
}
@ -424,7 +424,7 @@ void ts_tree_cursor_goto_descendant(
// Ascend to the lowest ancestor that contains the goal node.
for (;;) {
uint32_t i = self->stack.size - 1;
uint32_t i = self->stack.meta.size - 1;
TreeCursorEntry *entry = array_get(&self->stack, i);
uint32_t next_descendant_index =
entry->descendant_index +
@ -435,10 +435,10 @@ void ts_tree_cursor_goto_descendant(
(next_descendant_index > goal_descendant_index)
) {
break;
} else if (self->stack.size <= 1) {
} else if (self->stack.meta.size <= 1) {
return;
} else {
self->stack.size--;
self->stack.meta.size--;
}
}
@ -478,8 +478,8 @@ TSNode ts_tree_cursor_current_node(const TSTreeCursor *_self) {
TreeCursorEntry *last_entry = array_back(&self->stack);
bool is_extra = ts_subtree_extra(*last_entry->subtree);
TSSymbol alias_symbol = is_extra ? 0 : self->root_alias_symbol;
if (self->stack.size > 1 && !is_extra) {
TreeCursorEntry *parent_entry = array_get(&self->stack, self->stack.size - 2);
if (self->stack.meta.size > 1 && !is_extra) {
TreeCursorEntry *parent_entry = array_get(&self->stack, self->stack.meta.size - 2);
alias_symbol = ts_language_alias_at(
self->tree->language,
parent_entry->subtree->ptr->production_id,
@ -515,7 +515,7 @@ void ts_tree_cursor_current_status(
// Walk up the tree, visiting the current node and its invisible ancestors,
// because fields can refer to nodes through invisible *wrapper* nodes,
for (unsigned i = self->stack.size - 1; i > 0; i--) {
for (unsigned i = self->stack.meta.size - 1; i > 0; i--) {
TreeCursorEntry *entry = array_get(&self->stack, i);
TreeCursorEntry *parent_entry = array_get(&self->stack, i - 1);
@ -542,7 +542,7 @@ void ts_tree_cursor_current_status(
self->tree->language,
entry_symbol
);
if (i != self->stack.size - 1 && entry_metadata.visible) break;
if (i != self->stack.meta.size - 1 && entry_metadata.visible) break;
// Record any supertypes
if (entry_metadata.supertype && *supertype_count < max_supertypes) {
@ -619,7 +619,7 @@ void ts_tree_cursor_current_status(
uint32_t ts_tree_cursor_current_depth(const TSTreeCursor *_self) {
const TreeCursor *self = (const TreeCursor *)_self;
uint32_t depth = 0;
for (unsigned i = 1; i < self->stack.size; i++) {
for (unsigned i = 1; i < self->stack.meta.size; i++) {
if (ts_tree_cursor_is_entry_visible(self, i)) {
depth++;
}
@ -629,7 +629,7 @@ uint32_t ts_tree_cursor_current_depth(const TSTreeCursor *_self) {
TSNode ts_tree_cursor_parent_node(const TSTreeCursor *_self) {
const TreeCursor *self = (const TreeCursor *)_self;
for (int i = (int)self->stack.size - 2; i >= 0; i--) {
for (int i = (int)self->stack.meta.size - 2; i >= 0; i--) {
TreeCursorEntry *entry = array_get(&self->stack, i);
bool is_visible = true;
TSSymbol alias_symbol = 0;
@ -658,13 +658,13 @@ TSFieldId ts_tree_cursor_current_field_id(const TSTreeCursor *_self) {
const TreeCursor *self = (const TreeCursor *)_self;
// Walk up the tree, visiting the current node and its invisible ancestors.
for (unsigned i = self->stack.size - 1; i > 0; i--) {
for (unsigned i = self->stack.meta.size - 1; i > 0; i--) {
TreeCursorEntry *entry = array_get(&self->stack, i);
TreeCursorEntry *parent_entry = array_get(&self->stack, i - 1);
// Stop walking up when another visible node is found.
if (
i != self->stack.size - 1 &&
i != self->stack.meta.size - 1 &&
ts_tree_cursor_is_entry_visible(self, i)
) break;

View file

@ -408,7 +408,7 @@ static void *copy_strings(
} else {
const uint8_t *string = &data[address];
uint32_t len = strlen((const char *)string);
result[i] = (const char *)(uintptr_t)string_data->size;
result[i] = (const char *)(uintptr_t)string_data->meta.size;
array_extend(string_data, len + 1, string);
}
}
@ -946,7 +946,7 @@ void ts_wasm_store_delete(TSWasmStore *self) {
wasm_globaltype_delete(self->const_i32_type);
wasmtime_store_delete(self->store);
wasm_engine_delete(self->engine);
for (unsigned i = 0; i < self->language_instances.size; i++) {
for (unsigned i = 0; i < self->language_instances.meta.size; i++) {
LanguageWasmInstance *instance = array_get(&self->language_instances, i);
language_id_delete(instance->language_id);
}
@ -956,7 +956,7 @@ void ts_wasm_store_delete(TSWasmStore *self) {
size_t ts_wasm_store_language_count(const TSWasmStore *self) {
size_t result = 0;
for (unsigned i = 0; i < self->language_instances.size; i++) {
for (unsigned i = 0; i < self->language_instances.meta.size; i++) {
const WasmLanguageId *id = array_get(&self->language_instances, i)->language_id;
if (!id->is_language_deleted) {
result++;
@ -1451,7 +1451,7 @@ const TSLanguage *ts_wasm_store_load_language(
language->keyword_lex_fn = (bool (*)(TSLexer *, TSStateId))language_module;
// Clear out any instances of languages that have been deleted.
for (unsigned i = 0; i < self->language_instances.size; i++) {
for (unsigned i = 0; i < self->language_instances.meta.size; i++) {
WasmLanguageId *id = array_get(&self->language_instances, i)->language_id;
if (id->is_language_deleted) {
language_id_delete(id);
@ -1492,7 +1492,7 @@ bool ts_wasm_store_add_language(
// Search for this store's instance of the language module. Also clear out any
// instances of languages that have been deleted.
bool exists = false;
for (unsigned i = 0; i < self->language_instances.size; i++) {
for (unsigned i = 0; i < self->language_instances.meta.size; i++) {
WasmLanguageId *id = array_get(&self->language_instances, i)->language_id;
if (id->is_language_deleted) {
language_id_delete(id);
@ -1507,7 +1507,7 @@ bool ts_wasm_store_add_language(
// If the language module has not been instantiated in this store, then add
// it to this store.
if (!exists) {
*index = self->language_instances.size;
*index = self->language_instances.meta.size;
char *message;
wasmtime_instance_t instance;
int32_t language_address;