2014-10-17 17:52:54 -07:00
|
|
|
#include <stdio.h>
|
2014-08-28 18:35:30 -07:00
|
|
|
#include "runtime/lexer.h"
|
2014-07-10 13:14:52 -07:00
|
|
|
#include "tree_sitter/parser.h"
|
2014-07-17 23:29:11 -07:00
|
|
|
#include "runtime/tree.h"
|
2014-09-26 16:15:07 -07:00
|
|
|
#include "runtime/length.h"
|
2014-10-14 22:50:24 -07:00
|
|
|
#include "runtime/debugger.h"
|
2014-09-13 00:15:24 -07:00
|
|
|
#include "utf8proc.h"
|
2014-07-10 13:14:52 -07:00
|
|
|
|
2014-10-17 17:52:54 -07:00
|
|
|
#define DEBUG(...) \
|
|
|
|
|
if (lexer->debugger.debug_fn) { \
|
|
|
|
|
snprintf(lexer->debug_buffer, TS_DEBUG_BUFFER_SIZE, __VA_ARGS__); \
|
|
|
|
|
lexer->debugger.debug_fn(lexer->debugger.data, TSDebugTypeLex, \
|
|
|
|
|
lexer->debug_buffer); \
|
2014-10-17 16:20:01 -07:00
|
|
|
}
|
|
|
|
|
|
2015-07-27 18:29:48 -07:00
|
|
|
#define DEBUG_LOOKAHEAD() \
|
|
|
|
|
DEBUG((0 < lexer->lookahead && lexer->lookahead < 256) \
|
|
|
|
|
? "lookahead char:'%c'" \
|
|
|
|
|
: "lookahead char:%d", \
|
2014-10-22 12:54:46 -07:00
|
|
|
lexer->lookahead);
|
|
|
|
|
|
2014-10-03 15:44:49 -07:00
|
|
|
static const char *empty_chunk = "";
|
2014-09-13 00:15:24 -07:00
|
|
|
|
2015-08-16 19:53:34 -07:00
|
|
|
static void ts_lexer__get_chunk(TSLexer *lexer) {
|
2014-10-09 14:02:03 -07:00
|
|
|
TSInput input = lexer->input;
|
2014-10-14 22:56:42 -07:00
|
|
|
if (lexer->current_position.bytes != lexer->chunk_start + lexer->chunk_size)
|
|
|
|
|
input.seek_fn(input.data, lexer->current_position);
|
2014-10-17 16:20:01 -07:00
|
|
|
|
2014-10-09 14:02:03 -07:00
|
|
|
lexer->chunk_start = lexer->current_position.bytes;
|
|
|
|
|
lexer->chunk = input.read_fn(input.data, &lexer->chunk_size);
|
|
|
|
|
if (!lexer->chunk_size)
|
|
|
|
|
lexer->chunk = empty_chunk;
|
|
|
|
|
}
|
|
|
|
|
|
2015-08-16 19:53:34 -07:00
|
|
|
static void ts_lexer__get_lookahead(TSLexer *lexer) {
|
2014-10-17 16:20:01 -07:00
|
|
|
size_t position_in_chunk = lexer->current_position.bytes - lexer->chunk_start;
|
|
|
|
|
lexer->lookahead_size = utf8proc_iterate(
|
2015-07-27 18:29:48 -07:00
|
|
|
(const uint8_t *)lexer->chunk + position_in_chunk,
|
|
|
|
|
lexer->chunk_size - position_in_chunk + 1, &lexer->lookahead);
|
2014-10-22 12:54:46 -07:00
|
|
|
DEBUG_LOOKAHEAD();
|
2014-10-17 16:20:01 -07:00
|
|
|
}
|
|
|
|
|
|
2015-08-16 19:53:34 -07:00
|
|
|
static void ts_lexer__start(TSLexer *lexer, TSStateId lex_state) {
|
2014-10-17 16:20:01 -07:00
|
|
|
DEBUG("start_lex state:%d", lex_state);
|
2015-06-12 13:13:43 -07:00
|
|
|
DEBUG_LOOKAHEAD();
|
2014-10-17 16:20:01 -07:00
|
|
|
}
|
|
|
|
|
|
2015-08-16 19:53:34 -07:00
|
|
|
static void ts_lexer__start_token(TSLexer *lexer) {
|
2014-10-17 16:20:01 -07:00
|
|
|
DEBUG("start_token chars:%lu", lexer->current_position.chars);
|
|
|
|
|
lexer->token_start_position = lexer->current_position;
|
|
|
|
|
}
|
|
|
|
|
|
2015-08-16 19:53:34 -07:00
|
|
|
static bool ts_lexer__advance(TSLexer *lexer, TSStateId state) {
|
2014-10-17 16:20:01 -07:00
|
|
|
DEBUG("advance state:%d", state);
|
2014-09-13 00:15:24 -07:00
|
|
|
|
2014-10-03 15:44:49 -07:00
|
|
|
if (lexer->chunk == empty_chunk)
|
2014-10-03 16:06:08 -07:00
|
|
|
return false;
|
2014-10-03 15:44:49 -07:00
|
|
|
|
|
|
|
|
if (lexer->lookahead_size) {
|
|
|
|
|
lexer->current_position.bytes += lexer->lookahead_size;
|
|
|
|
|
lexer->current_position.chars += 1;
|
2014-09-13 00:15:24 -07:00
|
|
|
}
|
2014-08-31 16:24:27 -07:00
|
|
|
|
2014-10-17 16:20:01 -07:00
|
|
|
if (lexer->current_position.bytes >= lexer->chunk_start + lexer->chunk_size)
|
2015-08-16 19:53:34 -07:00
|
|
|
ts_lexer__get_chunk(lexer);
|
2014-08-31 16:24:27 -07:00
|
|
|
|
2015-08-16 19:53:34 -07:00
|
|
|
ts_lexer__get_lookahead(lexer);
|
2014-10-03 16:06:08 -07:00
|
|
|
return true;
|
2014-07-10 13:14:52 -07:00
|
|
|
}
|
|
|
|
|
|
2015-08-16 19:53:34 -07:00
|
|
|
static TSTree *ts_lexer__accept(TSLexer *lexer, TSSymbol symbol, int is_hidden,
|
|
|
|
|
const char *symbol_name) {
|
2014-10-03 15:44:21 -07:00
|
|
|
TSLength size =
|
2015-07-27 18:29:48 -07:00
|
|
|
ts_length_sub(lexer->current_position, lexer->token_start_position);
|
2014-10-03 15:44:21 -07:00
|
|
|
TSLength padding =
|
2015-07-27 18:29:48 -07:00
|
|
|
ts_length_sub(lexer->token_start_position, lexer->token_end_position);
|
2014-09-26 16:15:07 -07:00
|
|
|
lexer->token_end_position = lexer->current_position;
|
2014-10-22 12:54:46 -07:00
|
|
|
|
|
|
|
|
if (symbol == ts_builtin_sym_error) {
|
|
|
|
|
DEBUG("error_char");
|
|
|
|
|
return ts_tree_make_error(size, padding, lexer->lookahead);
|
|
|
|
|
} else {
|
|
|
|
|
DEBUG("accept_token sym:%s", symbol_name);
|
|
|
|
|
return ts_tree_make_leaf(symbol, size, padding, is_hidden);
|
|
|
|
|
}
|
2014-07-10 13:14:52 -07:00
|
|
|
}
|
2014-07-30 23:40:02 -07:00
|
|
|
|
2014-08-31 16:24:27 -07:00
|
|
|
/*
|
2014-10-17 16:20:01 -07:00
|
|
|
* The lexer's methods are stored as struct fields so that generated parsers
|
|
|
|
|
* can call them without needing to be linked against this library.
|
2014-08-31 16:24:27 -07:00
|
|
|
*/
|
2014-10-17 16:20:01 -07:00
|
|
|
|
2014-07-30 23:40:02 -07:00
|
|
|
TSLexer ts_lexer_make() {
|
2015-08-16 19:53:34 -07:00
|
|
|
TSLexer result = (TSLexer){.start_fn = ts_lexer__start,
|
|
|
|
|
.start_token_fn = ts_lexer__start_token,
|
|
|
|
|
.advance_fn = ts_lexer__advance,
|
|
|
|
|
.accept_fn = ts_lexer__accept,
|
2015-07-27 18:29:48 -07:00
|
|
|
.chunk = NULL,
|
|
|
|
|
.chunk_start = 0,
|
|
|
|
|
.chunk_size = 0,
|
|
|
|
|
.current_position = ts_length_zero(),
|
|
|
|
|
.token_start_position = ts_length_zero(),
|
|
|
|
|
.token_end_position = ts_length_zero(),
|
|
|
|
|
.lookahead = 0,
|
|
|
|
|
.lookahead_size = 0,
|
|
|
|
|
.debugger = ts_debugger_null() };
|
2014-09-11 13:12:06 -07:00
|
|
|
return result;
|
|
|
|
|
}
|
|
|
|
|
|
2014-10-05 16:56:50 -07:00
|
|
|
void ts_lexer_reset(TSLexer *lexer, TSLength position) {
|
2014-10-09 14:02:03 -07:00
|
|
|
lexer->token_end_position = position;
|
|
|
|
|
lexer->current_position = position;
|
2015-08-16 19:53:34 -07:00
|
|
|
ts_lexer__get_chunk(lexer);
|
|
|
|
|
ts_lexer__get_lookahead(lexer);
|
2014-07-30 23:40:02 -07:00
|
|
|
}
|