Fix memory leaks
This commit is contained in:
parent
9bc7d51074
commit
12331d66f5
7 changed files with 24 additions and 14 deletions
|
|
@ -75,7 +75,7 @@ static const ts_tree * ts_parse(void *data, ts_input input, ts_input_edit *edit)
|
|||
ts_parser constructor_name() { \
|
||||
return (ts_parser) { \
|
||||
.parse_fn = ts_parse, \
|
||||
.free_fn = NULL, \
|
||||
.free_fn = ts_lr_parser_free, \
|
||||
.symbol_names = ts_symbol_names, \
|
||||
.data = ts_lr_parser_make( \
|
||||
SYMBOL_COUNT, \
|
||||
|
|
|
|||
|
|
@ -56,6 +56,7 @@ ts_lr_parser * ts_lr_parser_make(size_t symbol_count,
|
|||
ts_tree * (* lex_fn)(ts_lexer *, ts_state_id),
|
||||
const int *hidden_symbol_flags,
|
||||
const int *ubiquitous_symbol_flags);
|
||||
void ts_lr_parser_free(void *data);
|
||||
void ts_lr_parser_initialize(ts_lr_parser *parser, ts_input input, ts_input_edit *edit);
|
||||
ts_tree * ts_lr_parser_parse(ts_lr_parser *parser, const char **symbol_names);
|
||||
|
||||
|
|
|
|||
|
|
@ -89,6 +89,8 @@ static vector<string> list_directory(string dir_name) {
|
|||
if (name != "." && name != "..")
|
||||
result.push_back(dir_name + "/" + name);
|
||||
}
|
||||
|
||||
closedir(dir);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -21,11 +21,6 @@ static int spy_seek(void *data, size_t position) {
|
|||
return 0;
|
||||
}
|
||||
|
||||
static void spy_release(void *data) {
|
||||
SpyReader *reader = static_cast<SpyReader *>(data);
|
||||
delete reader;
|
||||
}
|
||||
|
||||
SpyReader::SpyReader(string content, size_t chunk_size) :
|
||||
content(content),
|
||||
position(0),
|
||||
|
|
@ -34,5 +29,5 @@ SpyReader::SpyReader(string content, size_t chunk_size) :
|
|||
this,
|
||||
spy_read,
|
||||
spy_seek,
|
||||
spy_release,
|
||||
nullptr,
|
||||
}) {}
|
||||
|
|
|
|||
|
|
@ -19,12 +19,12 @@ describe("Languages", [&]() {
|
|||
ts_document_free(doc);
|
||||
});
|
||||
|
||||
auto run_tests_for_language = [&](string language, ts_parser parser) {
|
||||
auto run_tests_for_language = [&](string language, ts_parser (parser_constructor)()) {
|
||||
describe(language.c_str(), [&]() {
|
||||
before_each([&]() {
|
||||
ts_document_set_parser(doc, parser);
|
||||
ts_document_set_parser(doc, parser_constructor());
|
||||
});
|
||||
|
||||
|
||||
for (auto &entry : test_entries_for_language(language)) {
|
||||
it(entry.description.c_str(), [&]() {
|
||||
ts_document_set_input_string(doc, entry.input.c_str());
|
||||
|
|
@ -34,10 +34,10 @@ describe("Languages", [&]() {
|
|||
});
|
||||
};
|
||||
|
||||
run_tests_for_language("json", ts_parser_json());
|
||||
run_tests_for_language("arithmetic", ts_parser_arithmetic());
|
||||
run_tests_for_language("javascript", ts_parser_javascript());
|
||||
run_tests_for_language("golang", ts_parser_golang());
|
||||
run_tests_for_language("json", ts_parser_json);
|
||||
run_tests_for_language("arithmetic", ts_parser_arithmetic);
|
||||
run_tests_for_language("javascript", ts_parser_javascript);
|
||||
run_tests_for_language("golang", ts_parser_golang);
|
||||
});
|
||||
|
||||
END_TEST
|
||||
|
|
@ -13,6 +13,10 @@ ts_document * ts_document_make() {
|
|||
}
|
||||
|
||||
void ts_document_free(ts_document *document) {
|
||||
if (document->parser.free_fn)
|
||||
document->parser.free_fn(document->parser.data);
|
||||
if (document->input.release_fn)
|
||||
document->input.release_fn(document->input.data);
|
||||
free(document);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -176,6 +176,14 @@ ts_lr_parser * ts_lr_parser_make(size_t symbol_count,
|
|||
return result;
|
||||
}
|
||||
|
||||
void ts_lr_parser_free(void *data) {
|
||||
ts_lr_parser *parser = (ts_lr_parser *)data;
|
||||
if (parser->lookahead) ts_tree_release(parser->lookahead);
|
||||
if (parser->next_lookahead) ts_tree_release(parser->next_lookahead);
|
||||
ts_stack_delete(&parser->stack);
|
||||
free(parser);
|
||||
}
|
||||
|
||||
void ts_lr_parser_initialize(ts_lr_parser *parser, ts_input input, ts_input_edit *edit) {
|
||||
if (!edit) ts_stack_shrink(&parser->stack, 0);
|
||||
parser->lookahead = NULL;
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue