Remove unnecessary test helpers

This commit is contained in:
Max Brunsfeld 2015-09-06 17:07:04 -07:00
parent f9316933ad
commit 557c8c7f28
6 changed files with 36 additions and 89 deletions

View file

@ -1,11 +1,26 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/build_tables/rule_transitions.h"
#include "compiler/rules/metadata.h"
#include "compiler/helpers/containers.h"
using namespace rules;
using namespace build_tables;
template<typename K>
class rule_map : public std::map<K, rule_ptr> {
public:
bool operator==(const std::map<K, rule_ptr> &other) const {
if (this->size() != other.size()) return false;
for (const auto &pair : *this) {
auto other_pair = other.find(pair.first);
if (other_pair == other.end()) return false;
if (!pair.second->operator==(*other_pair->second)) return false;
}
return true;
}
rule_map(const std::initializer_list<std::pair<const K, rule_ptr>> &list) : std::map<K, rule_ptr>(list) {}
};
START_TEST
describe("sym_transitions", []() {

View file

@ -1,63 +0,0 @@
#ifndef HELPERS_CONTAINERS_H_
#define HELPERS_CONTAINERS_H_
#include <map>
#include <vector>
#include <initializer_list>
#include "tree_sitter/compiler.h"
#include "compiler/rule.h"
using std::map;
using std::vector;
using std::initializer_list;
using std::pair;
using tree_sitter::rule_ptr;
template<typename K>
class rule_map : public map<K, rule_ptr> {
public:
bool operator==(const map<K, rule_ptr> &other) const {
if (this->size() != other.size()) return false;
for (const auto &pair : *this) {
auto other_pair = other.find(pair.first);
if (other_pair == other.end()) return false;
if (!pair.second->operator==(*other_pair->second)) return false;
}
return true;
}
rule_map(const initializer_list<pair<const K, rule_ptr>> &list) : map<K, rule_ptr>(list) {}
};
template<typename T>
class eq_vector : public vector<T> {
public:
bool operator==(const vector<T> &other) const {
if (this->size() != other.size()) return false;
for (size_t i = 0; i < this->size(); i++)
if (!(this->operator[](i) == other[i]))
return false;
return true;
}
eq_vector(const initializer_list<T> &list) : vector<T>(list) {}
};
class rule_vector : public vector<rule_ptr> {
public:
bool operator==(const vector<rule_ptr> &other) const {
if (this->size() != other.size()) return false;
for (size_t i = 0; i < this->size(); i++) {
auto rule = this->operator[](i);
auto other_rule = other[i];
if (!rule->operator==(*rule))
return false;
}
return true;
}
rule_vector(const initializer_list<rule_ptr> &list) :
vector<rule_ptr>(list) {}
};
#endif // HELPERS_CONTAINERS_H_

View file

@ -1,7 +1,6 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/prepared_grammar.h"
#include "compiler/prepare_grammar/expand_repeats.h"
#include "compiler/helpers/containers.h"
START_TEST
@ -20,7 +19,7 @@ describe("expand_repeats", []() {
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
AssertThat(match.rules, Equals(vector<RuleEntry>({
{
"rule0",
choice({ i_sym(1), blank() }),
@ -51,7 +50,7 @@ describe("expand_repeats", []() {
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
AssertThat(match.rules, Equals(vector<RuleEntry>({
{
"rule0",
seq({
@ -82,7 +81,7 @@ describe("expand_repeats", []() {
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
AssertThat(match.rules, Equals(vector<RuleEntry>({
{
"rule0",
choice({ i_token(10), i_sym(1), blank() }),
@ -118,7 +117,7 @@ describe("expand_repeats", []() {
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
AssertThat(match.rules, Equals(vector<RuleEntry>({
{
"rule0",
choice({
@ -157,7 +156,7 @@ describe("expand_repeats", []() {
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
AssertThat(match.rules, Equals(vector<RuleEntry>({
{
"rule0",
seq({
@ -201,7 +200,7 @@ describe("expand_repeats", []() {
auto match = expand_repeats(grammar);
AssertThat(match.rules, Equals(eq_vector<RuleEntry>({
AssertThat(match.rules, Equals(vector<RuleEntry>({
{
"rule0",
choice({ i_sym(2), blank() }),

View file

@ -1,6 +1,5 @@
#include "compiler/compiler_spec_helper.h"
#include "compiler/prepared_grammar.h"
#include "compiler/helpers/containers.h"
#include "compiler/prepare_grammar/expand_tokens.h"
START_TEST
@ -26,7 +25,7 @@ describe("expand_tokens", []() {
auto result = expand_tokens(grammar);
AssertThat(result.second, Equals((const GrammarError *)nullptr));
AssertThat(result.first.rules, Equals(eq_vector<RuleEntry>({
AssertThat(result.first.rules, Equals(vector<RuleEntry>({
{
"rule_A",
seq({
@ -57,7 +56,7 @@ describe("expand_tokens", []() {
auto result = expand_tokens(grammar);
AssertThat(result.first.rules, Equals(eq_vector<RuleEntry>({
AssertThat(result.first.rules, Equals(vector<RuleEntry>({
{
"rule_A",
metadata(seq({
@ -91,7 +90,7 @@ describe("expand_tokens", []() {
auto result = expand_tokens(grammar);
AssertThat(result.second, Equals((const GrammarError *)nullptr));
AssertThat(result.first.rules, Equals(eq_vector<RuleEntry>({
AssertThat(result.first.rules, Equals(vector<RuleEntry>({
{
"rule_A",
seq({
@ -115,7 +114,7 @@ describe("expand_tokens", []() {
auto result = expand_tokens(grammar);
AssertThat(result.first.rules, Equals(eq_vector<RuleEntry>({
AssertThat(result.first.rules, Equals(vector<RuleEntry>({
{
"rule_A",
repeat(character({ 945, 946, 947, 948 }, false)),

View file

@ -2,7 +2,6 @@
#include "compiler/prepared_grammar.h"
#include "compiler/prepare_grammar/interned_grammar.h"
#include "compiler/prepare_grammar/extract_tokens.h"
#include "compiler/helpers/containers.h"
START_TEST
@ -49,7 +48,7 @@ describe("extract_tokens", []() {
AssertThat(error, Equals<const GrammarError *>(nullptr));
AssertThat(syntax_grammar.rules, Equals(eq_vector<RuleEntry>({
AssertThat(syntax_grammar.rules, Equals(vector<RuleEntry>({
{
"rule_A",
repeat(seq({
@ -87,7 +86,7 @@ describe("extract_tokens", []() {
}
})));
AssertThat(lexical_grammar.rules, Equals(eq_vector<RuleEntry>({
AssertThat(lexical_grammar.rules, Equals(vector<RuleEntry>({
// Strings become anonymous rules.
{
@ -143,7 +142,7 @@ describe("extract_tokens", []() {
SyntaxGrammar &syntax_grammar = get<0>(result);
LexicalGrammar &lexical_grammar = get<1>(result);
AssertThat(syntax_grammar.rules, Equals(eq_vector<RuleEntry>({
AssertThat(syntax_grammar.rules, Equals(vector<RuleEntry>({
{
"rule_A",
seq({ i_token(0), i_sym(0), i_token(0) }),
@ -151,7 +150,7 @@ describe("extract_tokens", []() {
}
})));
AssertThat(lexical_grammar.rules, Equals(eq_vector<RuleEntry>({
AssertThat(lexical_grammar.rules, Equals(vector<RuleEntry>({
{
"ab",
str("ab"),
@ -182,7 +181,7 @@ describe("extract_tokens", []() {
SyntaxGrammar &syntax_grammar = get<0>(result);
LexicalGrammar &lexical_grammar = get<1>(result);
AssertThat(syntax_grammar.rules, Equals(eq_vector<RuleEntry>({
AssertThat(syntax_grammar.rules, Equals(vector<RuleEntry>({
{
"rule_A",
seq({ i_sym(1), i_token(0) }),
@ -200,7 +199,7 @@ describe("extract_tokens", []() {
},
})));
AssertThat(lexical_grammar.rules, Equals(eq_vector<RuleEntry>({
AssertThat(lexical_grammar.rules, Equals(vector<RuleEntry>({
{
"ab",
str("ab"),
@ -261,10 +260,9 @@ describe("extract_tokens", []() {
AssertThat(get<2>(result), Equals<const GrammarError *>(nullptr));
AssertThat(get<1>(result).separators, Equals(rule_vector({
pattern("\\s+"),
str("y"),
})));
AssertThat(get<1>(result).separators.size(), Equals<size_t>(2));
AssertThat(get<1>(result).separators[0], EqualsPointer(pattern("\\s+")));
AssertThat(get<1>(result).separators[1], EqualsPointer(str("y")));
AssertThat(get<0>(result).ubiquitous_tokens, IsEmpty());
});

View file

@ -2,7 +2,6 @@
#include "compiler/prepare_grammar/intern_symbols.h"
#include "compiler/rules/named_symbol.h"
#include "compiler/rules/symbol.h"
#include "compiler/helpers/containers.h"
START_TEST
@ -20,7 +19,7 @@ describe("intern_symbols", []() {
auto result = intern_symbols(grammar);
AssertThat(result.second, Equals((GrammarError *)nullptr));
AssertThat(result.first.rules, Equals(eq_vector<RuleEntry>({
AssertThat(result.first.rules, Equals(vector<RuleEntry>({
{
"x",
choice({ i_sym(1), i_sym(2) }),