2014-08-28 18:35:30 -07:00
|
|
|
#include "runtime/lexer.h"
|
2014-07-10 13:14:52 -07:00
|
|
|
#include "tree_sitter/parser.h"
|
2014-07-17 23:29:11 -07:00
|
|
|
#include "runtime/tree.h"
|
2014-09-26 16:15:07 -07:00
|
|
|
#include "runtime/length.h"
|
2014-09-13 00:15:24 -07:00
|
|
|
#include "utf8proc.h"
|
2014-07-10 13:14:52 -07:00
|
|
|
|
2014-10-03 15:44:49 -07:00
|
|
|
static const char *empty_chunk = "";
|
2014-09-13 00:15:24 -07:00
|
|
|
|
2014-10-03 16:06:08 -07:00
|
|
|
static bool advance(TSLexer *lexer) {
|
2014-09-13 00:15:24 -07:00
|
|
|
|
2014-10-03 15:44:49 -07:00
|
|
|
/*
|
|
|
|
|
* Return false if the Lexer has already reached the end of the input.
|
|
|
|
|
*/
|
|
|
|
|
if (lexer->chunk == empty_chunk)
|
2014-10-03 16:06:08 -07:00
|
|
|
return false;
|
2014-10-03 15:44:49 -07:00
|
|
|
|
|
|
|
|
/*
|
|
|
|
|
* Increment the Lexer's position.
|
|
|
|
|
*/
|
|
|
|
|
if (lexer->lookahead_size) {
|
|
|
|
|
lexer->current_position.bytes += lexer->lookahead_size;
|
|
|
|
|
lexer->current_position.chars += 1;
|
2014-09-13 00:15:24 -07:00
|
|
|
}
|
2014-08-31 16:24:27 -07:00
|
|
|
|
2014-10-03 15:44:49 -07:00
|
|
|
/*
|
|
|
|
|
* Request a new chunk of text from the Input if the Lexer has reached
|
|
|
|
|
* the end of the current chunk.
|
|
|
|
|
*/
|
|
|
|
|
if (lexer->current_position.bytes >= lexer->chunk_start + lexer->chunk_size) {
|
2014-09-13 00:15:24 -07:00
|
|
|
lexer->chunk_start += lexer->chunk_size;
|
|
|
|
|
lexer->chunk = lexer->input.read_fn(lexer->input.data, &lexer->chunk_size);
|
2014-10-03 15:44:49 -07:00
|
|
|
if (!lexer->chunk_size)
|
|
|
|
|
lexer->chunk = empty_chunk;
|
2014-09-13 00:15:24 -07:00
|
|
|
}
|
|
|
|
|
|
2014-10-03 15:44:49 -07:00
|
|
|
/*
|
|
|
|
|
* Read the next unicode character from the current chunk of text.
|
|
|
|
|
*/
|
|
|
|
|
size_t position_in_chunk = lexer->current_position.bytes - lexer->chunk_start;
|
|
|
|
|
lexer->lookahead_size = utf8proc_iterate(
|
|
|
|
|
(const uint8_t *)lexer->chunk + position_in_chunk,
|
|
|
|
|
lexer->chunk_size - position_in_chunk + 1, &lexer->lookahead);
|
2014-08-31 16:24:27 -07:00
|
|
|
|
2014-10-03 16:06:08 -07:00
|
|
|
return true;
|
2014-07-10 13:14:52 -07:00
|
|
|
}
|
|
|
|
|
|
2014-07-31 13:11:39 -07:00
|
|
|
static TSTree *accept(TSLexer *lexer, TSSymbol symbol, int is_hidden) {
|
2014-10-03 15:44:21 -07:00
|
|
|
TSLength size =
|
|
|
|
|
ts_length_sub(lexer->current_position, lexer->token_start_position);
|
|
|
|
|
TSLength padding =
|
|
|
|
|
ts_length_sub(lexer->token_start_position, lexer->token_end_position);
|
2014-09-26 16:15:07 -07:00
|
|
|
lexer->token_end_position = lexer->current_position;
|
2014-08-31 16:59:01 -07:00
|
|
|
return (symbol == ts_builtin_sym_error)
|
2014-09-02 07:41:29 -07:00
|
|
|
? ts_tree_make_error(size, padding, ts_lexer_lookahead_char(lexer))
|
|
|
|
|
: ts_tree_make_leaf(symbol, size, padding, is_hidden);
|
2014-07-10 13:14:52 -07:00
|
|
|
}
|
2014-07-30 23:40:02 -07:00
|
|
|
|
2014-08-31 16:24:27 -07:00
|
|
|
/*
|
|
|
|
|
* The `advance` and `accept` methods are stored as fields on the Lexer so
|
|
|
|
|
* that generated parsers can call them without needing to be linked against
|
|
|
|
|
* this library.
|
|
|
|
|
*/
|
2014-07-30 23:40:02 -07:00
|
|
|
TSLexer ts_lexer_make() {
|
2014-10-05 16:56:50 -07:00
|
|
|
TSLexer result = (TSLexer) { .advance_fn = advance,
|
|
|
|
|
.accept_fn = accept,
|
|
|
|
|
.debug = 0,
|
|
|
|
|
.chunk = NULL,
|
|
|
|
|
.chunk_start = 0,
|
|
|
|
|
.chunk_size = 0,
|
|
|
|
|
.current_position = ts_length_zero(),
|
|
|
|
|
.token_start_position = ts_length_zero(),
|
|
|
|
|
.token_end_position = ts_length_zero(),
|
|
|
|
|
.lookahead = 0,
|
|
|
|
|
.lookahead_size = 0, };
|
2014-09-11 13:12:06 -07:00
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2014-10-05 16:56:50 -07:00
|
|
|
void ts_lexer_reset(TSLexer *lexer, TSLength position) {
|
|
|
|
|
lexer->input.seek_fn(lexer->input.data, position);
|
|
|
|
|
lexer->current_position = position;
|
|
|
|
|
lexer->token_end_position = position;
|
2014-09-13 00:15:24 -07:00
|
|
|
lexer->lookahead = 0;
|
|
|
|
|
lexer->lookahead_size = 0;
|
2014-10-05 16:56:50 -07:00
|
|
|
lexer->chunk_start = position.bytes;
|
|
|
|
|
lexer->chunk_size = 0;
|
|
|
|
|
lexer->chunk = lexer->input.read_fn(lexer->input.data, &lexer->chunk_size);
|
|
|
|
|
ts_lexer_advance(lexer);
|
2014-07-30 23:40:02 -07:00
|
|
|
}
|