Moved wesley to header-only
This commit is contained in:
@@ -11,12 +11,9 @@ find_package( Boost 1.36.0 COMPONENTS regex unit_test_framework)
|
|||||||
if(Boost_FOUND)
|
if(Boost_FOUND)
|
||||||
include_directories(${Boost_INCLUDE_DIRS})
|
include_directories(${Boost_INCLUDE_DIRS})
|
||||||
|
|
||||||
add_executable(langkit_test main.cpp langkit_parser.cpp)
|
add_executable(langkit_test main.cpp)
|
||||||
target_link_libraries(langkit_test ${Boost_LIBRARIES})
|
target_link_libraries(langkit_test ${Boost_LIBRARIES})
|
||||||
|
|
||||||
add_library(langkit SHARED langkit_parser.cpp)
|
|
||||||
target_link_libraries(langkit ${Boost_LIBRARIES})
|
|
||||||
|
|
||||||
add_executable(langkit_unittest unittest.cpp)
|
add_executable(langkit_unittest unittest.cpp)
|
||||||
target_link_libraries(langkit_unittest ${Boost_LIBRARIES})
|
target_link_libraries(langkit_unittest ${Boost_LIBRARIES})
|
||||||
endif()
|
endif()
|
||||||
|
@@ -1,6 +0,0 @@
|
|||||||
// This file is distributed under the BSD License.
|
|
||||||
// See LICENSE.TXT for details.
|
|
||||||
|
|
||||||
#include <iostream>
|
|
||||||
#include "langkit_lexer.hpp"
|
|
||||||
|
|
@@ -48,12 +48,12 @@ struct Lexer {
|
|||||||
Pattern multiline_comment_end_pattern;
|
Pattern multiline_comment_end_pattern;
|
||||||
Pattern singleline_comment_pattern;
|
Pattern singleline_comment_pattern;
|
||||||
|
|
||||||
Lexer Lexer::operator<<(const Pattern &p) {
|
Lexer operator<<(const Pattern &p) {
|
||||||
lex_patterns.push_back(p);
|
lex_patterns.push_back(p);
|
||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<TokenPtr> Lexer::lex(const std::string &input, const char *filename) {
|
std::vector<TokenPtr> lex(const std::string &input, const char *filename) {
|
||||||
std::vector<Pattern>::iterator iter, end, iter2, end2;
|
std::vector<Pattern>::iterator iter, end, iter2, end2;
|
||||||
std::vector<TokenPtr> retval;
|
std::vector<TokenPtr> retval;
|
||||||
bool found;
|
bool found;
|
||||||
|
@@ -9,356 +9,4 @@
|
|||||||
#include "langkit_lexer.hpp"
|
#include "langkit_lexer.hpp"
|
||||||
#include "langkit_parser.hpp"
|
#include "langkit_parser.hpp"
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> String_Rule(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, const std::string &val) {
|
|
||||||
//std::cout << "S";
|
|
||||||
if (iter != end) {
|
|
||||||
if ((*iter)->text == val) {
|
|
||||||
if (keep) {
|
|
||||||
parent->children.push_back(*iter);
|
|
||||||
}
|
|
||||||
return std::pair<Token_Iterator, bool>(++iter, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return std::pair<Token_Iterator, bool>(iter, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Type_Rule(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, const int val) {
|
|
||||||
//std::cout << "I";
|
|
||||||
if (iter != end) {
|
|
||||||
if ((*iter)->identifier == val) {
|
|
||||||
if (keep) {
|
|
||||||
parent->children.push_back(*iter);
|
|
||||||
}
|
|
||||||
return std::pair<Token_Iterator, bool>(++iter, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return std::pair<Token_Iterator, bool>(iter, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Or_Rule(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, Rule lhs, Rule rhs) {
|
|
||||||
Token_Iterator new_iter;
|
|
||||||
unsigned int prev_size;
|
|
||||||
TokenPtr prev_parent = parent;
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_size = parent->children.size();
|
|
||||||
|
|
||||||
if (iter != end) {
|
|
||||||
std::pair<Token_Iterator, bool> result = lhs(iter, end, parent);
|
|
||||||
|
|
||||||
if (result.second) {
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent->filename = (*iter)->filename;
|
|
||||||
parent->start = (*iter)->start;
|
|
||||||
if (result.first == iter) {
|
|
||||||
parent->end = (*iter)->start;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parent->end = (*(result.first - 1))->end;
|
|
||||||
}
|
|
||||||
prev_parent->children.push_back(parent);
|
|
||||||
}
|
|
||||||
return std::pair<Token_Iterator, bool>(result.first, true);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (parent->children.size() != prev_size) {
|
|
||||||
//Clear out the partial matches
|
|
||||||
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
|
||||||
}
|
|
||||||
|
|
||||||
result = rhs(iter, end, parent);
|
|
||||||
if (result.second) {
|
|
||||||
if (new_id != -1) {
|
|
||||||
|
|
||||||
parent->filename = (*iter)->filename;
|
|
||||||
parent->start = (*iter)->start;
|
|
||||||
if (result.first == iter) {
|
|
||||||
parent->end = (*iter)->start;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parent->end = (*(result.first - 1))->end;
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_parent->children.push_back(parent);
|
|
||||||
}
|
|
||||||
return std::pair<Token_Iterator, bool>(result.first, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parent->children.size() != prev_size) {
|
|
||||||
//Clear out the partial matches
|
|
||||||
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
|
||||||
}
|
|
||||||
|
|
||||||
return std::pair<Token_Iterator, bool>(iter, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> And_Rule(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, Rule lhs, Rule rhs) {
|
|
||||||
Token_Iterator lhs_iter, rhs_iter;
|
|
||||||
unsigned int prev_size;
|
|
||||||
TokenPtr prev_parent = parent;
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_size = parent->children.size();
|
|
||||||
|
|
||||||
if (iter != end) {
|
|
||||||
std::pair<Token_Iterator, bool> result = lhs(iter, end, parent);
|
|
||||||
|
|
||||||
if (result.second) {
|
|
||||||
result = rhs(result.first, end, parent);
|
|
||||||
if (result.second) {
|
|
||||||
if (new_id != -1) {
|
|
||||||
|
|
||||||
parent->filename = (*iter)->filename;
|
|
||||||
parent->start = (*iter)->start;
|
|
||||||
if (result.first == iter) {
|
|
||||||
parent->end = (*iter)->start;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parent->end = (*(result.first - 1))->end;
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_parent->children.push_back(parent);
|
|
||||||
}
|
|
||||||
return std::pair<Token_Iterator, bool>(result.first, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parent->children.size() != prev_size) {
|
|
||||||
//Clear out the partial matches
|
|
||||||
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
|
||||||
}
|
|
||||||
|
|
||||||
return std::pair<Token_Iterator, bool>(iter, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Kleene_Rule
|
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, Rule rule) {
|
|
||||||
|
|
||||||
TokenPtr prev_parent = parent;
|
|
||||||
std::pair<Token_Iterator, bool> result;
|
|
||||||
Token_Iterator new_iter = iter;
|
|
||||||
|
|
||||||
if (iter != end) {
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
result.second = true;
|
|
||||||
while (result.second == true) {
|
|
||||||
result = rule(new_iter, end, parent);
|
|
||||||
new_iter = result.first;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
|
|
||||||
parent->filename = (*iter)->filename;
|
|
||||||
parent->start = (*iter)->start;
|
|
||||||
if (result.first == iter) {
|
|
||||||
parent->end = (*iter)->start;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parent->end = (*(result.first - 1))->end;
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_parent->children.push_back(parent);
|
|
||||||
}
|
|
||||||
return std::pair<Token_Iterator, bool>(result.first, true);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return std::pair<Token_Iterator, bool>(iter, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Plus_Rule
|
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, Rule rule) {
|
|
||||||
|
|
||||||
unsigned int prev_size;
|
|
||||||
TokenPtr prev_parent = parent;
|
|
||||||
Token_Iterator loop_iter = iter;
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_size = parent->children.size();
|
|
||||||
|
|
||||||
if (iter != end) {
|
|
||||||
std::pair<Token_Iterator, bool> result;
|
|
||||||
result = rule(loop_iter, end, parent);
|
|
||||||
|
|
||||||
if (result.second == true) {
|
|
||||||
loop_iter = result.first;
|
|
||||||
result.second = true;
|
|
||||||
while ((loop_iter != end) && (result.second == true)) {
|
|
||||||
result = rule(loop_iter, end, parent);
|
|
||||||
loop_iter = result.first;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
|
|
||||||
parent->filename = (*iter)->filename;
|
|
||||||
parent->start = (*iter)->start;
|
|
||||||
if (result.first == iter) {
|
|
||||||
parent->end = (*iter)->start;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parent->end = (*(result.first - 1))->end;
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_parent->children.push_back(parent);
|
|
||||||
}
|
|
||||||
|
|
||||||
return std::pair<Token_Iterator, bool>(result.first, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parent->children.size() != prev_size) {
|
|
||||||
//Clear out the partial matches
|
|
||||||
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
|
||||||
}
|
|
||||||
|
|
||||||
return std::pair<Token_Iterator, bool>(iter, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Optional_Rule
|
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, Rule rule) {
|
|
||||||
|
|
||||||
TokenPtr prev_parent = parent;
|
|
||||||
Token_Iterator new_iter = iter;
|
|
||||||
|
|
||||||
if (iter != end) {
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> result;
|
|
||||||
result.second = true;
|
|
||||||
if ((new_iter != end) && (result.second == true)) {
|
|
||||||
result = rule(new_iter, end, parent);
|
|
||||||
new_iter = result.first;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
|
|
||||||
parent->filename = (*iter)->filename;
|
|
||||||
parent->start = (*iter)->start;
|
|
||||||
if (result.first == iter) {
|
|
||||||
parent->end = (*iter)->start;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parent->end = (*(result.first - 1))->end;
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_parent->children.push_back(parent);
|
|
||||||
}
|
|
||||||
return std::pair<Token_Iterator, bool>(result.first, true);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return std::pair<Token_Iterator, bool>(iter, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Epsilon_Rule
|
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, Rule rule) {
|
|
||||||
|
|
||||||
TokenPtr prev_parent = parent;
|
|
||||||
Token_Iterator new_iter = iter;
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> result;
|
|
||||||
if ((new_iter != end)) {
|
|
||||||
result = rule(new_iter, end, parent);
|
|
||||||
new_iter = result.first;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent->filename = (*iter)->filename;
|
|
||||||
parent->start = (*iter)->start;
|
|
||||||
if (result.first == iter) {
|
|
||||||
parent->end = (*iter)->start;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parent->end = (*(result.first - 1))->end;
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_parent->children.push_back(parent);
|
|
||||||
}
|
|
||||||
|
|
||||||
return std::pair<Token_Iterator, bool>(iter, result.second);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Wrap_Rule
|
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, Rule rule) {
|
|
||||||
|
|
||||||
TokenPtr prev_parent = parent;
|
|
||||||
Token_Iterator new_iter = iter;
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
|
||||||
}
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> result;
|
|
||||||
if ((new_iter != end)) {
|
|
||||||
result = rule(new_iter, end, parent);
|
|
||||||
new_iter = result.first;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (new_id != -1) {
|
|
||||||
parent->filename = (*iter)->filename;
|
|
||||||
parent->start = (*iter)->start;
|
|
||||||
if (result.first == iter) {
|
|
||||||
parent->end = (*iter)->start;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
parent->end = (*(result.first - 1))->end;
|
|
||||||
}
|
|
||||||
|
|
||||||
prev_parent->children.push_back(parent);
|
|
||||||
}
|
|
||||||
|
|
||||||
return std::pair<Token_Iterator, bool>(result.first, result.second);
|
|
||||||
}
|
|
||||||
|
|
||||||
Rule Str(const std::string &text, bool keep) {
|
|
||||||
return Rule(boost::bind(String_Rule, _1, _2, _3, _4, _5, text), keep);
|
|
||||||
}
|
|
||||||
|
|
||||||
Rule Id(int id, bool keep) {
|
|
||||||
return Rule(boost::bind(Type_Rule, _1, _2, _3, _4, _5, id), keep);
|
|
||||||
}
|
|
||||||
|
|
||||||
Rule Str(const std::string &text) {
|
|
||||||
return Rule(boost::bind(String_Rule, _1, _2, _3, _4, _5, text));
|
|
||||||
}
|
|
||||||
|
|
||||||
Rule Id(int id) {
|
|
||||||
return Rule(boost::bind(Type_Rule, _1, _2, _3, _4, _5, id));
|
|
||||||
}
|
|
||||||
|
|
||||||
Rule Ign(Rule rule) {
|
|
||||||
rule.impl->keep = false;
|
|
||||||
|
|
||||||
return rule;
|
|
||||||
}
|
|
||||||
|
|
||||||
Rule Epsilon(Rule rule) {
|
|
||||||
return Rule(boost::bind(Epsilon_Rule, _1, _2, _3, _4, _5, rule));
|
|
||||||
}
|
|
||||||
|
|
||||||
Rule Wrap(Rule rule) {
|
|
||||||
return Rule(boost::bind(Wrap_Rule, _1, _2, _3, _4, _5, rule));
|
|
||||||
}
|
|
||||||
|
@@ -29,32 +29,347 @@ struct RuleImpl {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> String_Rule
|
//struct Rule;
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, const std::string &val);
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Type_Rule
|
template <typename T_Iter>
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, const int val);
|
std::pair<T_Iter, bool> String_Rule(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, const std::string &val) {
|
||||||
|
if (iter != end) {
|
||||||
|
if ((*iter)->text == val) {
|
||||||
|
if (keep) {
|
||||||
|
parent->children.push_back(*iter);
|
||||||
|
}
|
||||||
|
return std::pair<T_Iter, bool>(++iter, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Or_Rule
|
return std::pair<T_Iter, bool>(iter, false);
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, struct Rule lhs, struct Rule rhs);
|
}
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> And_Rule
|
template <typename T_Iter>
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, struct Rule lhs, struct Rule rhs);
|
std::pair<T_Iter, bool> Id_Rule(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, const int val) {
|
||||||
|
if (iter != end) {
|
||||||
|
if ((*iter)->identifier == val) {
|
||||||
|
if (keep) {
|
||||||
|
parent->children.push_back(*iter);
|
||||||
|
}
|
||||||
|
return std::pair<T_Iter, bool>(++iter, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Kleene_Rule
|
return std::pair<T_Iter, bool>(iter, false);
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, struct Rule rule);
|
}
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Plus_Rule
|
template <typename T_Iter, typename R_Type>
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, struct Rule rule);
|
std::pair<T_Iter, bool> Or_Rule(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type lhs, R_Type rhs) {
|
||||||
|
T_Iter new_iter;
|
||||||
|
unsigned int prev_size;
|
||||||
|
TokenPtr prev_parent = parent;
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Optional_Rule
|
if (new_id != -1) {
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, struct Rule rule);
|
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||||
|
}
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Epsilon_Rule
|
prev_size = parent->children.size();
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, struct Rule rule);
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> Wrap_Rule
|
if (iter != end) {
|
||||||
(Token_Iterator iter, Token_Iterator end, TokenPtr parent, bool keep, int new_id, struct Rule rule);
|
std::pair<T_Iter, bool> result = lhs(iter, end, parent);
|
||||||
|
|
||||||
|
if (result.second) {
|
||||||
|
if (new_id != -1) {
|
||||||
|
parent->filename = (*iter)->filename;
|
||||||
|
parent->start = (*iter)->start;
|
||||||
|
if (result.first == iter) {
|
||||||
|
parent->end = (*iter)->start;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parent->end = (*(result.first - 1))->end;
|
||||||
|
}
|
||||||
|
prev_parent->children.push_back(parent);
|
||||||
|
}
|
||||||
|
return std::pair<T_Iter, bool>(result.first, true);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (parent->children.size() != prev_size) {
|
||||||
|
//Clear out the partial matches
|
||||||
|
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
result = rhs(iter, end, parent);
|
||||||
|
if (result.second) {
|
||||||
|
if (new_id != -1) {
|
||||||
|
|
||||||
|
parent->filename = (*iter)->filename;
|
||||||
|
parent->start = (*iter)->start;
|
||||||
|
if (result.first == iter) {
|
||||||
|
parent->end = (*iter)->start;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parent->end = (*(result.first - 1))->end;
|
||||||
|
}
|
||||||
|
|
||||||
|
prev_parent->children.push_back(parent);
|
||||||
|
}
|
||||||
|
return std::pair<T_Iter, bool>(result.first, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parent->children.size() != prev_size) {
|
||||||
|
//Clear out the partial matches
|
||||||
|
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::pair<T_Iter, bool>(iter, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T_Iter, typename R_Type>
|
||||||
|
std::pair<Token_Iterator, bool> And_Rule(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type lhs, R_Type rhs) {
|
||||||
|
T_Iter lhs_iter, rhs_iter;
|
||||||
|
unsigned int prev_size;
|
||||||
|
TokenPtr prev_parent = parent;
|
||||||
|
|
||||||
|
if (new_id != -1) {
|
||||||
|
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||||
|
}
|
||||||
|
|
||||||
|
prev_size = parent->children.size();
|
||||||
|
|
||||||
|
if (iter != end) {
|
||||||
|
std::pair<T_Iter, bool> result = lhs(iter, end, parent);
|
||||||
|
|
||||||
|
if (result.second) {
|
||||||
|
result = rhs(result.first, end, parent);
|
||||||
|
if (result.second) {
|
||||||
|
if (new_id != -1) {
|
||||||
|
|
||||||
|
parent->filename = (*iter)->filename;
|
||||||
|
parent->start = (*iter)->start;
|
||||||
|
if (result.first == iter) {
|
||||||
|
parent->end = (*iter)->start;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parent->end = (*(result.first - 1))->end;
|
||||||
|
}
|
||||||
|
|
||||||
|
prev_parent->children.push_back(parent);
|
||||||
|
}
|
||||||
|
return std::pair<Token_Iterator, bool>(result.first, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parent->children.size() != prev_size) {
|
||||||
|
//Clear out the partial matches
|
||||||
|
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::pair<T_Iter, bool>(iter, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T_Iter, typename R_Type>
|
||||||
|
std::pair<T_Iter, bool> Kleene_Rule
|
||||||
|
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||||
|
|
||||||
|
TokenPtr prev_parent = parent;
|
||||||
|
std::pair<T_Iter, bool> result;
|
||||||
|
T_Iter new_iter = iter;
|
||||||
|
|
||||||
|
if (iter != end) {
|
||||||
|
if (new_id != -1) {
|
||||||
|
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||||
|
}
|
||||||
|
|
||||||
|
result.second = true;
|
||||||
|
while (result.second == true) {
|
||||||
|
result = rule(new_iter, end, parent);
|
||||||
|
new_iter = result.first;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (new_id != -1) {
|
||||||
|
|
||||||
|
parent->filename = (*iter)->filename;
|
||||||
|
parent->start = (*iter)->start;
|
||||||
|
if (result.first == iter) {
|
||||||
|
parent->end = (*iter)->start;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parent->end = (*(result.first - 1))->end;
|
||||||
|
}
|
||||||
|
|
||||||
|
prev_parent->children.push_back(parent);
|
||||||
|
}
|
||||||
|
return std::pair<T_Iter, bool>(result.first, true);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return std::pair<T_Iter, bool>(iter, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T_Iter, typename R_Type>
|
||||||
|
std::pair<T_Iter, bool> Plus_Rule
|
||||||
|
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||||
|
|
||||||
|
unsigned int prev_size;
|
||||||
|
TokenPtr prev_parent = parent;
|
||||||
|
T_Iter loop_iter = iter;
|
||||||
|
|
||||||
|
if (new_id != -1) {
|
||||||
|
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||||
|
}
|
||||||
|
|
||||||
|
prev_size = parent->children.size();
|
||||||
|
|
||||||
|
if (iter != end) {
|
||||||
|
std::pair<T_Iter, bool> result;
|
||||||
|
result = rule(loop_iter, end, parent);
|
||||||
|
|
||||||
|
if (result.second == true) {
|
||||||
|
loop_iter = result.first;
|
||||||
|
result.second = true;
|
||||||
|
while ((loop_iter != end) && (result.second == true)) {
|
||||||
|
result = rule(loop_iter, end, parent);
|
||||||
|
loop_iter = result.first;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (new_id != -1) {
|
||||||
|
|
||||||
|
parent->filename = (*iter)->filename;
|
||||||
|
parent->start = (*iter)->start;
|
||||||
|
if (result.first == iter) {
|
||||||
|
parent->end = (*iter)->start;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parent->end = (*(result.first - 1))->end;
|
||||||
|
}
|
||||||
|
|
||||||
|
prev_parent->children.push_back(parent);
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::pair<T_Iter, bool>(result.first, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (parent->children.size() != prev_size) {
|
||||||
|
//Clear out the partial matches
|
||||||
|
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::pair<T_Iter, bool>(iter, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T_Iter, typename R_Type>
|
||||||
|
std::pair<T_Iter, bool> Optional_Rule
|
||||||
|
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||||
|
|
||||||
|
TokenPtr prev_parent = parent;
|
||||||
|
T_Iter new_iter = iter;
|
||||||
|
|
||||||
|
if (iter != end) {
|
||||||
|
if (new_id != -1) {
|
||||||
|
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<T_Iter, bool> result;
|
||||||
|
result.second = true;
|
||||||
|
if ((new_iter != end) && (result.second == true)) {
|
||||||
|
result = rule(new_iter, end, parent);
|
||||||
|
new_iter = result.first;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (new_id != -1) {
|
||||||
|
|
||||||
|
parent->filename = (*iter)->filename;
|
||||||
|
parent->start = (*iter)->start;
|
||||||
|
if (result.first == iter) {
|
||||||
|
parent->end = (*iter)->start;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parent->end = (*(result.first - 1))->end;
|
||||||
|
}
|
||||||
|
|
||||||
|
prev_parent->children.push_back(parent);
|
||||||
|
}
|
||||||
|
return std::pair<T_Iter, bool>(result.first, true);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return std::pair<T_Iter, bool>(iter, true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T_Iter, typename R_Type>
|
||||||
|
std::pair<T_Iter, bool> Epsilon_Rule
|
||||||
|
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||||
|
|
||||||
|
TokenPtr prev_parent = parent;
|
||||||
|
T_Iter new_iter = iter;
|
||||||
|
|
||||||
|
if (new_id != -1) {
|
||||||
|
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<T_Iter, bool> result;
|
||||||
|
if ((new_iter != end)) {
|
||||||
|
result = rule(new_iter, end, parent);
|
||||||
|
new_iter = result.first;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (new_id != -1) {
|
||||||
|
parent->filename = (*iter)->filename;
|
||||||
|
parent->start = (*iter)->start;
|
||||||
|
if (result.first == iter) {
|
||||||
|
parent->end = (*iter)->start;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parent->end = (*(result.first - 1))->end;
|
||||||
|
}
|
||||||
|
|
||||||
|
prev_parent->children.push_back(parent);
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::pair<T_Iter, bool>(iter, result.second);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T_Iter, typename R_Type>
|
||||||
|
std::pair<T_Iter, bool> Wrap_Rule
|
||||||
|
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||||
|
|
||||||
|
TokenPtr prev_parent = parent;
|
||||||
|
T_Iter new_iter = iter;
|
||||||
|
|
||||||
|
if (new_id != -1) {
|
||||||
|
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<T_Iter, bool> result;
|
||||||
|
if ((new_iter != end)) {
|
||||||
|
result = rule(new_iter, end, parent);
|
||||||
|
new_iter = result.first;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (new_id != -1) {
|
||||||
|
parent->filename = (*iter)->filename;
|
||||||
|
parent->start = (*iter)->start;
|
||||||
|
if (result.first == iter) {
|
||||||
|
parent->end = (*iter)->start;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
parent->end = (*(result.first - 1))->end;
|
||||||
|
}
|
||||||
|
|
||||||
|
prev_parent->children.push_back(parent);
|
||||||
|
}
|
||||||
|
|
||||||
|
return std::pair<T_Iter, bool>(result.first, result.second);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename T_Iter, typename R_Type>
|
||||||
|
std::pair<T_Iter, bool> Ignore_Rule
|
||||||
|
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||||
|
|
||||||
|
rule.impl->keep = false;
|
||||||
|
|
||||||
|
return rule(iter, end, parent);
|
||||||
|
}
|
||||||
|
|
||||||
struct Rule {
|
struct Rule {
|
||||||
RuleImplPtr impl;
|
RuleImplPtr impl;
|
||||||
@@ -76,36 +391,96 @@ struct Rule {
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
Rule operator|(const Rule &rhs) {
|
};
|
||||||
return Rule(boost::bind(Or_Rule, _1, _2, _3, _4, _5, *this, rhs));
|
|
||||||
|
inline Rule operator>>(const Rule &lhs, const Rule &rhs) {
|
||||||
|
return Rule(boost::bind(And_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, lhs, rhs));
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Rule operator|(const Rule &lhs, const Rule &rhs) {
|
||||||
|
return Rule(boost::bind(Or_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, lhs, rhs));
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Rule operator*(const Rule &operand) {
|
||||||
|
return Rule(boost::bind(Kleene_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, operand));
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Rule operator+(const Rule &operand) {
|
||||||
|
return Rule(boost::bind(Plus_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, operand));
|
||||||
|
}
|
||||||
|
|
||||||
|
inline Rule operator~(const Rule &operand) {
|
||||||
|
return Rule(boost::bind(Optional_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, operand));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
template<typename ItrType, typename ParamType,
|
||||||
|
std::pair<ItrType,bool> (*Function)(ItrType, ItrType, TokenPtr, bool, int, ParamType)>
|
||||||
|
struct Rule_Builder
|
||||||
|
{
|
||||||
|
Rule_Builder(ParamType p, bool t_keep = true)
|
||||||
|
: m_p(p), m_keep(t_keep)
|
||||||
|
{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Rule operator>>(const Rule &rhs) {
|
// Auto conversion operator is the glue here.
|
||||||
return Rule(boost::bind(And_Rule, _1, _2, _3, _4, _5, *this, rhs));
|
// In one sense this option cleans up the impl quite a bit, with much fewer code
|
||||||
|
// repeats in all the rule builders.
|
||||||
|
// In another sense, it might take a couple of tries to get it right.
|
||||||
|
operator Rule() {
|
||||||
|
return Rule(boost::bind(Function, _1, _2, _3, _4, _5, m_p), m_keep);
|
||||||
}
|
}
|
||||||
|
|
||||||
Rule operator*() {
|
ParamType m_p;
|
||||||
return Rule(boost::bind(Kleene_Rule, _1, _2, _3, _4, _5, *this));
|
bool m_keep;
|
||||||
}
|
|
||||||
|
|
||||||
Rule operator+() {
|
|
||||||
return Rule(boost::bind(Plus_Rule, _1, _2, _3, _4, _5, *this));
|
|
||||||
}
|
|
||||||
|
|
||||||
Rule operator~() {
|
|
||||||
return Rule(boost::bind(Optional_Rule, _1, _2, _3, _4, _5, *this));
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
Rule Str(const std::string &text, bool keep);
|
typedef Rule_Builder<Token_Iterator, Rule, &Epsilon_Rule<Token_Iterator, Rule> > Epsilon;
|
||||||
Rule Id(int id, bool keep);
|
typedef Rule_Builder<Token_Iterator, Rule, &Wrap_Rule<Token_Iterator, Rule> > Wrap;
|
||||||
|
typedef Rule_Builder<Token_Iterator, Rule, &Ignore_Rule<Token_Iterator, Rule> > Ign;
|
||||||
|
typedef Rule_Builder<Token_Iterator, int, &Id_Rule<Token_Iterator> > Id;
|
||||||
|
typedef Rule_Builder<Token_Iterator, const std::string&, &String_Rule<Token_Iterator> > Str;
|
||||||
|
|
||||||
Rule Str(const std::string &text);
|
/*
|
||||||
Rule Id(int id);
|
template <typename R>
|
||||||
|
R Str(const std::string &text, bool keep) {
|
||||||
|
return Rule(boost::bind(String_Rule, _1, _2, _3, _4, _5, text), keep);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename R>
|
||||||
|
R Id(int id, bool keep) {
|
||||||
|
return Rule(boost::bind(Type_Rule, _1, _2, _3, _4, _5, id), keep);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename R>
|
||||||
|
R Str(const std::string &text) {
|
||||||
|
return Rule(boost::bind(String_Rule, _1, _2, _3, _4, _5, text));
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename R>
|
||||||
|
R Id(int id) {
|
||||||
|
return Rule(boost::bind(Type_Rule, _1, _2, _3, _4, _5, id));
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename R>
|
||||||
|
R Ign(R rule) {
|
||||||
|
rule.impl->keep = false;
|
||||||
|
|
||||||
|
return rule;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename R>
|
||||||
|
R Epsilon(R rule) {
|
||||||
|
return Rule(boost::bind(Epsilon_Rule, _1, _2, _3, _4, _5, rule));
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename R>
|
||||||
|
R Wrap(R rule) {
|
||||||
|
return Rule(boost::bind(Wrap_Rule, _1, _2, _3, _4, _5, rule));
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
Rule Ign(Rule rule);
|
|
||||||
Rule Epsilon(Rule rule);
|
|
||||||
Rule Wrap(Rule rule);
|
|
||||||
|
|
||||||
#endif /* LANGKIT_PARSER_HPP_ */
|
#endif /* LANGKIT_PARSER_HPP_ */
|
||||||
|
@@ -95,11 +95,12 @@ void parse(std::vector<TokenPtr> &tokens, const char *filename) {
|
|||||||
block = Ign(Str("{")) >> ~return_statement >> Ign(Str("}"));
|
block = Ign(Str("{")) >> ~return_statement >> Ign(Str("}"));
|
||||||
return_statement = Ign(Str("return")) >> expression;
|
return_statement = Ign(Str("return")) >> expression;
|
||||||
*/
|
*/
|
||||||
|
/*
|
||||||
Rule rule(TokenType::Equation);
|
Rule rule(TokenType::Equation);
|
||||||
rule = Wrap(Str("break"));
|
rule = Wrap(Str("break"));
|
||||||
|
|
||||||
std::cout << "Check: " << rule.impl->new_id << std::endl;
|
std::cout << "Check: " << rule.impl->new_id << std::endl;
|
||||||
|
*/
|
||||||
/*
|
/*
|
||||||
Rule rule = *(expression >> *Ign(Id(TokenType::Semicolon)));
|
Rule rule = *(expression >> *Ign(Id(TokenType::Semicolon)));
|
||||||
expression = term >> *((Str("+") >> term) | (Str("-") >> term));
|
expression = term >> *((Str("+") >> term) | (Str("-") >> term));
|
||||||
@@ -110,9 +111,9 @@ void parse(std::vector<TokenPtr> &tokens, const char *filename) {
|
|||||||
|
|
||||||
value = funcall | Id(TokenType::Identifier) | Id(TokenType::Number) | Id(TokenType::Quoted_String) | Id(TokenType::Single_Quoted_String);
|
value = funcall | Id(TokenType::Identifier) | Id(TokenType::Number) | Id(TokenType::Quoted_String) | Id(TokenType::Single_Quoted_String);
|
||||||
*/
|
*/
|
||||||
/*
|
|
||||||
Rule rule = Str("x") << Id(TokenType::Semicolon);
|
Rule rule = Str("x") >> Id(TokenType::Semicolon);
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Rule rule;
|
Rule rule;
|
||||||
|
@@ -14,7 +14,7 @@ if(Boost_FOUND)
|
|||||||
|
|
||||||
add_executable(wesley_test main.cpp)
|
add_executable(wesley_test main.cpp)
|
||||||
|
|
||||||
target_link_libraries(wesley_test ${Boost_LIBRARIES} langkit)
|
target_link_libraries(wesley_test ${Boost_LIBRARIES})
|
||||||
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
698
wesley/main.cpp
698
wesley/main.cpp
@@ -1,3 +1,4 @@
|
|||||||
|
/*
|
||||||
#include <boost/bind.hpp>
|
#include <boost/bind.hpp>
|
||||||
#include <boost/shared_ptr.hpp>
|
#include <boost/shared_ptr.hpp>
|
||||||
#include <boost/function.hpp>
|
#include <boost/function.hpp>
|
||||||
@@ -5,690 +6,16 @@
|
|||||||
#include <iostream>
|
#include <iostream>
|
||||||
#include <map>
|
#include <map>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
|
*/
|
||||||
|
|
||||||
#include "boxedcpp.hpp"
|
#include <iostream>
|
||||||
#include "bootstrap.hpp"
|
|
||||||
#include "bootstrap_stl.hpp"
|
|
||||||
|
|
||||||
#include "langkit_lexer.hpp"
|
#include "wesley.hpp"
|
||||||
#include "langkit_parser.hpp"
|
|
||||||
|
|
||||||
class TokenType { public: enum Type { File, Whitespace, Identifier, Integer, Operator, Parens_Open, Parens_Close,
|
|
||||||
Square_Open, Square_Close, Curly_Open, Curly_Close, Comma, Quoted_String, Single_Quoted_String, Carriage_Return, Semicolon,
|
|
||||||
Function_Def, Scoped_Block, Statement, Equation, Return, Expression, Term, Factor, Negate, Comment,
|
|
||||||
Value, Fun_Call, Method_Call, Comparison, If_Block, While_Block, Boolean, Real_Number, Array_Call, Variable_Decl, Array_Init,
|
|
||||||
For_Block, Prefix, Break }; };
|
|
||||||
|
|
||||||
const char *tokentype_to_string(int tokentype) {
|
|
||||||
const char *token_types[] = {"File", "Whitespace", "Identifier", "Integer", "Operator", "Parens_Open", "Parens_Close",
|
|
||||||
"Square_Open", "Square_Close", "Curly_Open", "Curly_Close", "Comma", "Quoted_String", "Single_Quoted_String", "Carriage_Return", "Semicolon",
|
|
||||||
"Function_Def", "Scoped_Block", "Statement", "Equation", "Return", "Expression", "Term", "Factor", "Negate", "Comment",
|
|
||||||
"Value", "Fun_Call", "Method_Call", "Comparison", "If_Block", "While_Block", "Boolean", "Real Number", "Array_Call", "Variable_Decl", "Array_Init",
|
|
||||||
"For_Block", "Prefix", "Break" };
|
|
||||||
|
|
||||||
return token_types[tokentype];
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ParserError {
|
|
||||||
std::string reason;
|
|
||||||
TokenPtr location;
|
|
||||||
|
|
||||||
ParserError(const std::string &why, const TokenPtr where) : reason(why), location(where){ }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct EvalError {
|
|
||||||
std::string reason;
|
|
||||||
TokenPtr location;
|
|
||||||
|
|
||||||
EvalError(const std::string &why, const TokenPtr where) : reason(why), location(where) { }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct ReturnValue {
|
|
||||||
Boxed_Value retval;
|
|
||||||
TokenPtr location;
|
|
||||||
|
|
||||||
ReturnValue(const Boxed_Value &return_value, const TokenPtr where) : retval(return_value), location(where) { }
|
|
||||||
};
|
|
||||||
|
|
||||||
struct BreakLoop {
|
|
||||||
TokenPtr location;
|
|
||||||
|
|
||||||
BreakLoop(const TokenPtr where) : location(where) { }
|
|
||||||
};
|
|
||||||
|
|
||||||
Boxed_Value eval_token(BoxedCPP_System &ss, TokenPtr node);
|
|
||||||
Boxed_Value evaluate_string(Lexer &lexer, Rule &parser, BoxedCPP_System &ss, const std::string &input, const char *filename);
|
|
||||||
|
|
||||||
void debug_print(TokenPtr token, std::string prepend) {
|
|
||||||
std::cout << prepend << "Token: " << token->text << "(" << tokentype_to_string(token->identifier) << ") @ " << token->filename
|
|
||||||
<< ": (" << token->start.line << ", " << token->start.column << ") to ("
|
|
||||||
<< token->end.line << ", " << token->end.column << ") " << std::endl;
|
|
||||||
|
|
||||||
for (unsigned int i = 0; i < token->children.size(); ++i) {
|
|
||||||
debug_print(token->children[i], prepend + " ");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void debug_print(std::vector<TokenPtr> &tokens) {
|
|
||||||
for (unsigned int i = 0; i < tokens.size(); ++i) {
|
|
||||||
debug_print(tokens[i], "");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//A function that prints any string passed to it
|
|
||||||
|
|
||||||
template <typename T>
|
|
||||||
void print(const T &t)
|
|
||||||
{
|
|
||||||
std::cout << t << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
template<> void print<bool>(const bool &t)
|
|
||||||
{
|
|
||||||
if (t) {
|
|
||||||
std::cout << "true" << std::endl;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
std::cout << "false" << std::endl;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string concat_string(const std::string &s1, const std::string &s2) {
|
|
||||||
return s1+s2;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Boxed_Value add_two(BoxedCPP_System &ss, const std::vector<Boxed_Value> &vals) {
|
|
||||||
return dispatch(ss.get_function("+"), vals);
|
|
||||||
}
|
|
||||||
|
|
||||||
const Boxed_Value eval(Lexer &lexer, Rule &parser, BoxedCPP_System &ss, const std::vector<Boxed_Value> &vals) {
|
|
||||||
std::string val;
|
|
||||||
|
|
||||||
try {
|
|
||||||
val = Cast_Helper<std::string &>()(vals[0]);
|
|
||||||
}
|
|
||||||
catch (std::exception &e) {
|
|
||||||
throw EvalError("Can not evaluate string: " + val, TokenPtr());
|
|
||||||
}
|
|
||||||
catch (EvalError &ee) {
|
|
||||||
throw EvalError("Can not evaluate string: " + val + " reason: " + ee.reason, TokenPtr());
|
|
||||||
}
|
|
||||||
return evaluate_string(lexer, parser, ss, val, "__EVAL__");
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string load_file(const char *filename) {
|
|
||||||
std::ifstream infile (filename, std::ios::in | std::ios::ate);
|
|
||||||
|
|
||||||
if (!infile.is_open()) {
|
|
||||||
std::cerr << "Can not open " << filename << std::endl;
|
|
||||||
exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::streampos size = infile.tellg();
|
|
||||||
infile.seekg(0, std::ios::beg);
|
|
||||||
|
|
||||||
std::vector<char> v(size);
|
|
||||||
infile.read(&v[0], size);
|
|
||||||
|
|
||||||
std::string ret_val (v.empty() ? std::string() : std::string (v.begin(), v.end()).c_str());
|
|
||||||
|
|
||||||
return ret_val;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Boxed_Value eval_function (BoxedCPP_System &ss, TokenPtr node, const std::vector<std::string> ¶m_names, const std::vector<Boxed_Value> &vals) {
|
|
||||||
for (unsigned int i = 0; i < param_names.size(); ++i) {
|
|
||||||
ss.add_object(param_names[i], vals[i]);
|
|
||||||
}
|
|
||||||
return eval_token(ss, node);
|
|
||||||
}
|
|
||||||
|
|
||||||
Lexer build_lexer() {
|
|
||||||
Lexer lexer;
|
|
||||||
lexer.set_skip(Pattern("[ \\t]+", TokenType::Whitespace));
|
|
||||||
lexer.set_line_sep(Pattern("\\n|\\r\\n", TokenType::Carriage_Return));
|
|
||||||
lexer.set_command_sep(Pattern(";|\\r\\n|\\n", TokenType::Semicolon));
|
|
||||||
lexer.set_multiline_comment(Pattern("/\\*", TokenType::Comment), Pattern("\\*/", TokenType::Comment));
|
|
||||||
lexer.set_singleline_comment(Pattern("//", TokenType::Comment));
|
|
||||||
|
|
||||||
lexer << Pattern("[A-Za-z_]+", TokenType::Identifier);
|
|
||||||
lexer << Pattern("[0-9]+\\.[0-9]+", TokenType::Real_Number);
|
|
||||||
lexer << Pattern("[0-9]+", TokenType::Integer);
|
|
||||||
lexer << Pattern("[!@#$%^&*|\\-+=<>.]+|/[!@#$%^&|\\-+=<>]*", TokenType::Operator);
|
|
||||||
lexer << Pattern("\\(", TokenType::Parens_Open);
|
|
||||||
lexer << Pattern("\\)", TokenType::Parens_Close);
|
|
||||||
lexer << Pattern("\\[", TokenType::Square_Open);
|
|
||||||
lexer << Pattern("\\]", TokenType::Square_Close);
|
|
||||||
lexer << Pattern("\\{", TokenType::Curly_Open);
|
|
||||||
lexer << Pattern("\\}", TokenType::Curly_Close);
|
|
||||||
lexer << Pattern(",", TokenType::Comma);
|
|
||||||
lexer << Pattern("\"(?:[^\"\\\\]|\\\\.)*\"", TokenType::Quoted_String);
|
|
||||||
lexer << Pattern("'(?:[^'\\\\]|\\\\.)*'", TokenType::Single_Quoted_String);
|
|
||||||
|
|
||||||
return lexer;
|
|
||||||
}
|
|
||||||
|
|
||||||
Rule build_parser_rules() {
|
|
||||||
Rule params;
|
|
||||||
Rule block(TokenType::Scoped_Block);
|
|
||||||
Rule fundef(TokenType::Function_Def);
|
|
||||||
Rule statement;
|
|
||||||
Rule equation(TokenType::Equation);
|
|
||||||
Rule boolean(TokenType::Boolean);
|
|
||||||
Rule comparison(TokenType::Comparison);
|
|
||||||
Rule expression(TokenType::Expression);
|
|
||||||
Rule term(TokenType::Term);
|
|
||||||
Rule factor(TokenType::Factor);
|
|
||||||
Rule negate(TokenType::Negate);
|
|
||||||
Rule prefix(TokenType::Prefix);
|
|
||||||
|
|
||||||
Rule funcall(TokenType::Fun_Call);
|
|
||||||
Rule methodcall(TokenType::Method_Call);
|
|
||||||
Rule if_block(TokenType::If_Block);
|
|
||||||
Rule while_block(TokenType::While_Block);
|
|
||||||
Rule for_block(TokenType::For_Block);
|
|
||||||
Rule arraycall(TokenType::Array_Call);
|
|
||||||
Rule vardecl(TokenType::Variable_Decl);
|
|
||||||
Rule arrayinit(TokenType::Array_Init);
|
|
||||||
|
|
||||||
Rule return_statement(TokenType::Return);
|
|
||||||
Rule break_statement(TokenType::Break);
|
|
||||||
|
|
||||||
Rule value;
|
|
||||||
Rule statements;
|
|
||||||
Rule for_conditions;
|
|
||||||
Rule source_elem;
|
|
||||||
Rule source_elems;
|
|
||||||
Rule statement_list;
|
|
||||||
Rule paren_block;
|
|
||||||
|
|
||||||
Rule rule = *(Ign(Id(TokenType::Semicolon))) >> source_elems >> *(Ign(Id(TokenType::Semicolon)));
|
|
||||||
|
|
||||||
source_elems = source_elem >> *(+Ign(Id(TokenType::Semicolon)) >> source_elem);
|
|
||||||
source_elem = fundef | statement;
|
|
||||||
statement_list = statement >> *(+Ign(Id(TokenType::Semicolon)) >> statement);
|
|
||||||
statement = if_block | while_block | for_block | equation;
|
|
||||||
|
|
||||||
if_block = Ign(Str("if")) >> boolean >> block >> *(*Ign(Id(TokenType::Semicolon)) >> Str("elseif") >> boolean >> block) >> ~(*Ign(Id(TokenType::Semicolon)) >> Str("else") >> block);
|
|
||||||
while_block = Ign(Str("while")) >> boolean >> block;
|
|
||||||
for_block = Ign(Str("for")) >> for_conditions >> block;
|
|
||||||
for_conditions = Ign(Id(TokenType::Parens_Open)) >> ~equation >> Ign(Str(";")) >> boolean >> Ign(Str(";")) >> equation >> Ign(Id(TokenType::Parens_Close));
|
|
||||||
|
|
||||||
fundef = Ign(Str("def")) >> Id(TokenType::Identifier) >> ~(Ign(Id(TokenType::Parens_Open)) >> ~params >> Ign(Id(TokenType::Parens_Close))) >>
|
|
||||||
block;
|
|
||||||
params = Id(TokenType::Identifier) >> *(Ign(Str(",")) >> Id(TokenType::Identifier));
|
|
||||||
block = *(Ign(Id(TokenType::Semicolon))) >> Ign(Id(TokenType::Curly_Open)) >> *(Ign(Id(TokenType::Semicolon))) >> ~statement_list >> *(Ign(Id(TokenType::Semicolon))) >> Ign(Id(TokenType::Curly_Close));
|
|
||||||
|
|
||||||
equation = *(((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("=")) |
|
|
||||||
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("+=")) |
|
|
||||||
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("-=")) |
|
|
||||||
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("*=")) |
|
|
||||||
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("/="))) >> boolean;
|
|
||||||
boolean = comparison >> *((Str("&&") >> comparison) | (Str("||") >> comparison));
|
|
||||||
comparison = expression >> *((Str("==") >> expression) | (Str("!=") >> expression) | (Str("<") >> expression) |
|
|
||||||
(Str("<=") >> expression) |(Str(">") >> expression) | (Str(">=") >> expression));
|
|
||||||
expression = term >> *((Str("+") >> term) | (Str("-") >> term));
|
|
||||||
term = factor >> *((Str("*") >> factor) | (Str("/") >> factor));
|
|
||||||
factor = methodcall | arraycall | value | negate | prefix | (Ign(Str("+")) >> value);
|
|
||||||
value = vardecl | arrayinit | block | paren_block | return_statement | break_statement |
|
|
||||||
funcall | Id(TokenType::Identifier) | Id(TokenType::Real_Number) | Id(TokenType::Integer) | Id(TokenType::Quoted_String) |
|
|
||||||
Id(TokenType::Single_Quoted_String) ;
|
|
||||||
|
|
||||||
funcall = Id(TokenType::Identifier) >> Ign(Id(TokenType::Parens_Open)) >> ~(boolean >> *(Ign(Str("," )) >> boolean)) >> Ign(Id(TokenType::Parens_Close));
|
|
||||||
methodcall = value >> +(Ign(Str(".")) >> funcall);
|
|
||||||
negate = Ign(Str("-")) >> boolean;
|
|
||||||
prefix = (Str("++") >> (boolean | arraycall)) | (Str("--") >> (boolean | arraycall));
|
|
||||||
arraycall = value >> +((Ign(Id(TokenType::Square_Open)) >> boolean >> Ign(Id(TokenType::Square_Close))));
|
|
||||||
|
|
||||||
arrayinit = Ign(Id(TokenType::Square_Open)) >> ~(boolean >> *(Ign(Str(",")) >> boolean)) >> Ign(Id(TokenType::Square_Close));
|
|
||||||
vardecl = Ign(Str("var")) >> Id(TokenType::Identifier);
|
|
||||||
return_statement = Ign(Str("return")) >> ~boolean;
|
|
||||||
break_statement = Wrap(Ign(Str("break")));
|
|
||||||
paren_block = (Ign(Id(TokenType::Parens_Open)) >> equation >> Ign(Id(TokenType::Parens_Close)));
|
|
||||||
|
|
||||||
return rule;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
BoxedCPP_System build_eval_system(Lexer &lexer, Rule &parser) {
|
|
||||||
BoxedCPP_System ss;
|
|
||||||
bootstrap(ss);
|
|
||||||
bootstrap_vector<std::vector<int> >(ss, "VectorInt");
|
|
||||||
bootstrap_vector<std::vector<Boxed_Value> >(ss, "Vector");
|
|
||||||
// dump_system(ss);
|
|
||||||
|
|
||||||
//Register a new function, this one with typing for us, so we don't have to ubox anything
|
|
||||||
//right here
|
|
||||||
register_function(ss, &print<bool>, "print");
|
|
||||||
register_function(ss, &print<std::string>, "print");
|
|
||||||
register_function(ss, &print<double>, "print");
|
|
||||||
register_function(ss, &print<size_t>, "print");
|
|
||||||
register_function(ss, &concat_string, "concat_string");
|
|
||||||
register_function(ss, &print<int>, "print");
|
|
||||||
|
|
||||||
ss.register_function(boost::function<void ()>(boost::bind(&dump_system, boost::ref(ss))), "dump_system");
|
|
||||||
ss.register_function(boost::function<void (Boxed_Value)>(boost::bind(&dump_object, _1)), "dump_object");
|
|
||||||
|
|
||||||
|
|
||||||
ss.register_function(boost::shared_ptr<Proxy_Function>(
|
|
||||||
new Dynamic_Proxy_Function(boost::bind(&add_two, boost::ref(ss), _1), 2)), "add_two");
|
|
||||||
|
|
||||||
ss.register_function(boost::shared_ptr<Proxy_Function>(
|
|
||||||
new Dynamic_Proxy_Function(boost::bind(&eval, boost::ref(lexer), boost::ref(parser),
|
|
||||||
boost::ref(ss), _1), 1)), "eval");
|
|
||||||
|
|
||||||
|
|
||||||
return ss;
|
|
||||||
}
|
|
||||||
|
|
||||||
Boxed_Value eval_token(BoxedCPP_System &ss, TokenPtr node) {
|
|
||||||
Boxed_Value retval;
|
|
||||||
unsigned int i, j;
|
|
||||||
|
|
||||||
switch (node->identifier) {
|
|
||||||
case (TokenType::Value) :
|
|
||||||
case (TokenType::File) :
|
|
||||||
for (i = 0; i < node->children.size(); ++i) {
|
|
||||||
retval = eval_token(ss, node->children[i]);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Identifier) :
|
|
||||||
if (node->text == "true") {
|
|
||||||
retval = Boxed_Value(true);
|
|
||||||
}
|
|
||||||
else if (node->text == "false") {
|
|
||||||
retval = Boxed_Value(false);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
try {
|
|
||||||
retval = ss.get_object(node->text);
|
|
||||||
}
|
|
||||||
catch (std::exception &e) {
|
|
||||||
throw EvalError("Can not find object: " + node->text, node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Real_Number) :
|
|
||||||
retval = Boxed_Value(double(atof(node->text.c_str())));
|
|
||||||
break;
|
|
||||||
case (TokenType::Integer) :
|
|
||||||
retval = Boxed_Value(atoi(node->text.c_str()));
|
|
||||||
break;
|
|
||||||
case (TokenType::Quoted_String) :
|
|
||||||
retval = Boxed_Value(node->text);
|
|
||||||
break;
|
|
||||||
case (TokenType::Single_Quoted_String) :
|
|
||||||
retval = Boxed_Value(node->text);
|
|
||||||
break;
|
|
||||||
case (TokenType::Equation) :
|
|
||||||
retval = eval_token(ss, node->children.back());
|
|
||||||
if (node->children.size() > 1) {
|
|
||||||
for (i = node->children.size()-3; ((int)i) >= 0; i -= 2) {
|
|
||||||
Param_List_Builder plb;
|
|
||||||
plb << eval_token(ss, node->children[i]);
|
|
||||||
plb << retval;
|
|
||||||
try {
|
|
||||||
retval = dispatch(ss.get_function(node->children[i+1]->text), plb);
|
|
||||||
}
|
|
||||||
catch(std::exception &e){
|
|
||||||
throw EvalError("Can not find appropriate '" + node->children[i+1]->text + "'", node->children[i+1]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Variable_Decl): {
|
|
||||||
ss.set_object(node->children[0]->text, Boxed_Value());
|
|
||||||
retval = ss.get_object(node->children[0]->text);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Factor) :
|
|
||||||
case (TokenType::Expression) :
|
|
||||||
case (TokenType::Term) :
|
|
||||||
case (TokenType::Boolean) :
|
|
||||||
case (TokenType::Comparison) : {
|
|
||||||
retval = eval_token(ss, node->children[0]);
|
|
||||||
if (node->children.size() > 1) {
|
|
||||||
for (i = 1; i < node->children.size(); i += 2) {
|
|
||||||
Param_List_Builder plb;
|
|
||||||
plb << retval;
|
|
||||||
plb << eval_token(ss, node->children[i + 1]);
|
|
||||||
|
|
||||||
try {
|
|
||||||
retval = dispatch(ss.get_function(node->children[i]->text), plb);
|
|
||||||
}
|
|
||||||
catch(std::exception &e){
|
|
||||||
throw EvalError("Can not find appropriate '" + node->children[i]->text + "'", node->children[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Array_Call) : {
|
|
||||||
retval = eval_token(ss, node->children[0]);
|
|
||||||
for (i = 1; i < node->children.size(); ++i) {
|
|
||||||
Param_List_Builder plb;
|
|
||||||
plb << retval;
|
|
||||||
plb << eval_token(ss, node->children[i]);
|
|
||||||
try {
|
|
||||||
retval = dispatch(ss.get_function("[]"), plb);
|
|
||||||
}
|
|
||||||
catch(std::exception &e){
|
|
||||||
throw EvalError("Can not find appropriate array lookup '[]'", node->children[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Negate) : {
|
|
||||||
retval = eval_token(ss, node->children[0]);
|
|
||||||
Param_List_Builder plb;
|
|
||||||
plb << retval;
|
|
||||||
//plb << Boxed_Value(-1);
|
|
||||||
|
|
||||||
try {
|
|
||||||
retval = dispatch(ss.get_function("-"), plb);
|
|
||||||
}
|
|
||||||
catch(std::exception &e){
|
|
||||||
throw EvalError("Can not find appropriate negation", node->children[0]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Prefix) : {
|
|
||||||
retval = eval_token(ss, node->children[1]);
|
|
||||||
Param_List_Builder plb;
|
|
||||||
plb << retval;
|
|
||||||
|
|
||||||
try {
|
|
||||||
retval = dispatch(ss.get_function(node->children[0]->text), plb);
|
|
||||||
}
|
|
||||||
catch(std::exception &e){
|
|
||||||
throw EvalError("Can not find appropriate prefix", node->children[0]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Array_Init) : {
|
|
||||||
try {
|
|
||||||
retval = dispatch(ss.get_function("Vector"), Param_List_Builder());
|
|
||||||
for (i = 0; i < node->children.size(); ++i) {
|
|
||||||
try {
|
|
||||||
Boxed_Value tmp = eval_token(ss, node->children[i]);
|
|
||||||
dispatch(ss.get_function("push_back"), Param_List_Builder() << retval << tmp);
|
|
||||||
}
|
|
||||||
catch (std::exception inner_e) {
|
|
||||||
throw EvalError("Can not find appropriate 'push_back'", node->children[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (std::exception e) {
|
|
||||||
throw EvalError("Can not find appropriate 'Vector()'", node);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Fun_Call) : {
|
|
||||||
Param_List_Builder plb;
|
|
||||||
for (i = 1; i < node->children.size(); ++i) {
|
|
||||||
plb << eval_token(ss, node->children[i]);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
retval = dispatch(ss.get_function(node->children[0]->text), plb);
|
|
||||||
}
|
|
||||||
catch(EvalError &ee) {
|
|
||||||
throw EvalError(ee.reason, node->children[0]);
|
|
||||||
}
|
|
||||||
catch(std::exception &e){
|
|
||||||
throw EvalError("Can not find appropriate '" + node->children[0]->text + "'", node->children[0]);
|
|
||||||
}
|
|
||||||
catch(ReturnValue &rv) {
|
|
||||||
retval = rv.retval;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Method_Call) : {
|
|
||||||
retval = eval_token(ss, node->children[0]);
|
|
||||||
if (node->children.size() > 1) {
|
|
||||||
for (i = 1; i < node->children.size(); ++i) {
|
|
||||||
Param_List_Builder plb;
|
|
||||||
plb << retval;
|
|
||||||
|
|
||||||
for (j = 1; j < node->children[i]->children.size(); ++j) {
|
|
||||||
plb << eval_token(ss, node->children[i]->children[j]);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
retval = dispatch(ss.get_function(node->children[i]->children[0]->text), plb);
|
|
||||||
}
|
|
||||||
catch(EvalError &ee) {
|
|
||||||
throw EvalError(ee.reason, node->children[0]);
|
|
||||||
}
|
|
||||||
catch(std::exception &e){
|
|
||||||
throw EvalError("Can not find appropriate '" + node->children[i]->children[0]->text + "'", node->children[0]);
|
|
||||||
}
|
|
||||||
catch(ReturnValue &rv) {
|
|
||||||
retval = rv.retval;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case(TokenType::If_Block) : {
|
|
||||||
retval = eval_token(ss, node->children[0]);
|
|
||||||
bool cond;
|
|
||||||
try {
|
|
||||||
cond = Cast_Helper<bool &>()(retval);
|
|
||||||
}
|
|
||||||
catch (std::exception &e) {
|
|
||||||
throw EvalError("If condition not boolean", node->children[0]);
|
|
||||||
}
|
|
||||||
if (cond) {
|
|
||||||
retval = eval_token(ss, node->children[1]);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (node->children.size() > 2) {
|
|
||||||
i = 2;
|
|
||||||
while ((!cond) && (i < node->children.size())) {
|
|
||||||
if (node->children[i]->text == "else") {
|
|
||||||
retval = eval_token(ss, node->children[i+1]);
|
|
||||||
cond = true;
|
|
||||||
}
|
|
||||||
else if (node->children[i]->text == "elseif") {
|
|
||||||
retval = eval_token(ss, node->children[i+1]);
|
|
||||||
try {
|
|
||||||
cond = Cast_Helper<bool &>()(retval);
|
|
||||||
}
|
|
||||||
catch (std::exception &e) {
|
|
||||||
throw EvalError("Elseif condition not boolean", node->children[i+1]);
|
|
||||||
}
|
|
||||||
if (cond) {
|
|
||||||
retval = eval_token(ss, node->children[i+2]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
i = i + 3;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case(TokenType::While_Block) : {
|
|
||||||
retval = eval_token(ss, node->children[0]);
|
|
||||||
bool cond;
|
|
||||||
try {
|
|
||||||
cond = Cast_Helper<bool &>()(retval);
|
|
||||||
}
|
|
||||||
catch (std::exception) {
|
|
||||||
throw EvalError("While condition not boolean", node->children[0]);
|
|
||||||
}
|
|
||||||
while (cond) {
|
|
||||||
try {
|
|
||||||
eval_token(ss, node->children[1]);
|
|
||||||
retval = eval_token(ss, node->children[0]);
|
|
||||||
try {
|
|
||||||
cond = Cast_Helper<bool &>()(retval);
|
|
||||||
}
|
|
||||||
catch (std::exception) {
|
|
||||||
throw EvalError("While condition not boolean", node->children[0]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (BreakLoop &bl) {
|
|
||||||
cond = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
retval = Boxed_Value();
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case(TokenType::For_Block) : {
|
|
||||||
Boxed_Value condition;
|
|
||||||
bool cond;
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (node->children.size() == 4) {
|
|
||||||
eval_token(ss, node->children[0]);
|
|
||||||
condition = eval_token(ss, node->children[1]);
|
|
||||||
}
|
|
||||||
else if (node->children.size() == 3){
|
|
||||||
condition = eval_token(ss, node->children[0]);
|
|
||||||
}
|
|
||||||
cond = Cast_Helper<bool &>()(condition);
|
|
||||||
}
|
|
||||||
catch (std::exception &e) {
|
|
||||||
throw EvalError("For condition not boolean", node);
|
|
||||||
}
|
|
||||||
while (cond) {
|
|
||||||
try {
|
|
||||||
if (node->children.size() == 4) {
|
|
||||||
eval_token(ss, node->children[3]);
|
|
||||||
eval_token(ss, node->children[2]);
|
|
||||||
condition = eval_token(ss, node->children[1]);
|
|
||||||
}
|
|
||||||
else if (node->children.size() == 3) {
|
|
||||||
eval_token(ss, node->children[2]);
|
|
||||||
eval_token(ss, node->children[1]);
|
|
||||||
condition = eval_token(ss, node->children[0]);
|
|
||||||
}
|
|
||||||
cond = Cast_Helper<bool &>()(condition);
|
|
||||||
|
|
||||||
}
|
|
||||||
catch (std::exception &e) {
|
|
||||||
throw EvalError("For condition not boolean", node);
|
|
||||||
}
|
|
||||||
catch (BreakLoop &bl) {
|
|
||||||
cond = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
retval = Boxed_Value();
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Function_Def) : {
|
|
||||||
unsigned int num_args = node->children.size() - 2;
|
|
||||||
std::vector<std::string> param_names;
|
|
||||||
for (i = 0; i < num_args; ++i) {
|
|
||||||
param_names.push_back(node->children[i+1]->text);
|
|
||||||
}
|
|
||||||
|
|
||||||
ss.register_function(boost::shared_ptr<Proxy_Function>(
|
|
||||||
new Dynamic_Proxy_Function(boost::bind(&eval_function, boost::ref(ss), node->children.back(), param_names, _1))), node->children[0]->text);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Scoped_Block) : {
|
|
||||||
ss.new_scope();
|
|
||||||
for (i = 0; i < node->children.size(); ++i) {
|
|
||||||
retval = eval_token(ss, node->children[i]);
|
|
||||||
}
|
|
||||||
ss.pop_scope();
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Return) : {
|
|
||||||
if (node->children.size() > 0) {
|
|
||||||
retval = eval_token(ss, node->children[0]);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
retval = Boxed_Value();
|
|
||||||
}
|
|
||||||
throw ReturnValue(retval, node);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Break) : {
|
|
||||||
throw BreakLoop(node);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case (TokenType::Statement) :
|
|
||||||
case (TokenType::Carriage_Return) :
|
|
||||||
case (TokenType::Semicolon) :
|
|
||||||
case (TokenType::Comment) :
|
|
||||||
case (TokenType::Operator) :
|
|
||||||
case (TokenType::Whitespace) :
|
|
||||||
case (TokenType::Parens_Open) :
|
|
||||||
case (TokenType::Parens_Close) :
|
|
||||||
case (TokenType::Square_Open) :
|
|
||||||
case (TokenType::Square_Close) :
|
|
||||||
case (TokenType::Curly_Open) :
|
|
||||||
case (TokenType::Curly_Close) :
|
|
||||||
case (TokenType::Comma) :
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
return retval;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
TokenPtr parse(Rule &rule, std::vector<TokenPtr> &tokens, const char *filename) {
|
|
||||||
|
|
||||||
Token_Iterator iter = tokens.begin(), end = tokens.end();
|
|
||||||
TokenPtr parent(new Token("Root", TokenType::File, filename));
|
|
||||||
|
|
||||||
std::pair<Token_Iterator, bool> results = rule(iter, end, parent);
|
|
||||||
|
|
||||||
if ((results.second) && (results.first == end)) {
|
|
||||||
//debug_print(parent, "");
|
|
||||||
return parent;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
throw ParserError("Parse failed to complete", *(results.first));
|
|
||||||
//throw ParserError("Parse failed to complete at: " + (*(results.first))->text , *(results.first));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Boxed_Value evaluate_string(Lexer &lexer, Rule &parser, BoxedCPP_System &ss, const std::string &input, const char *filename) {
|
|
||||||
std::vector<TokenPtr> tokens = lexer.lex(input, filename);
|
|
||||||
Boxed_Value value;
|
|
||||||
|
|
||||||
for (unsigned int i = 0; i < tokens.size(); ++i) {
|
|
||||||
if ((tokens[i]->identifier == TokenType::Quoted_String) || (tokens[i]->identifier == TokenType::Single_Quoted_String)) {
|
|
||||||
tokens[i]->text = tokens[i]->text.substr(1, tokens[i]->text.size()-2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//debug_print(tokens);
|
|
||||||
try {
|
|
||||||
TokenPtr parent = parse(parser, tokens, filename);
|
|
||||||
value = eval_token(ss, parent);
|
|
||||||
}
|
|
||||||
catch (ParserError &pe) {
|
|
||||||
if (filename != std::string("__EVAL__")) {
|
|
||||||
std::cout << "Parsing error: \"" << pe.reason << "\" in '" << pe.location->filename << "' line: " << pe.location->start.line+1 << std::endl;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
std::cout << "Parsing error: \"" << pe.reason << "\"" << std::endl;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (EvalError &ee) {
|
|
||||||
if (filename != std::string("__EVAL__")) {
|
|
||||||
std::cout << "Eval error: \"" << ee.reason << "\" in '" << ee.location->filename << "' line: " << ee.location->start.line+1 << std::endl;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
std::cout << "Eval error: \"" << ee.reason << "\"" << std::endl;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (std::exception &e) {
|
|
||||||
std::cout << "Exception: " << e.what() << std::endl;
|
|
||||||
}
|
|
||||||
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
int main(int argc, char *argv[]) {
|
int main(int argc, char *argv[]) {
|
||||||
std::string input;
|
std::string input;
|
||||||
|
|
||||||
Lexer lexer = build_lexer();
|
Wesley_Engine we;
|
||||||
Rule parser = build_parser_rules();
|
|
||||||
BoxedCPP_System ss = build_eval_system(lexer, parser);
|
|
||||||
|
|
||||||
if (argc < 2) {
|
if (argc < 2) {
|
||||||
std::cout << "eval> ";
|
std::cout << "eval> ";
|
||||||
@@ -696,16 +23,16 @@ int main(int argc, char *argv[]) {
|
|||||||
while (input != "quit") {
|
while (input != "quit") {
|
||||||
Boxed_Value val;
|
Boxed_Value val;
|
||||||
try {
|
try {
|
||||||
val = evaluate_string(lexer, parser, ss, input, "__EVAL__");
|
val = we.evaluate_string(input);
|
||||||
}
|
}
|
||||||
catch (const ReturnValue &rv) {
|
catch (const ReturnValue &rv) {
|
||||||
val = rv.retval;
|
val = rv.retval;
|
||||||
}
|
}
|
||||||
if (val.get_type_info().m_bare_type_info && *(val.get_type_info().m_bare_type_info) != typeid(void)) {
|
if (val.get_type_info().m_bare_type_info && *(val.get_type_info().m_bare_type_info) != typeid(void)) {
|
||||||
try {
|
try {
|
||||||
Boxed_Value printeval = dispatch(ss.get_function("to_string"), Param_List_Builder() << val);
|
Boxed_Value printeval = dispatch(we.get_engine().get_function("to_string"), Param_List_Builder() << val);
|
||||||
std::cout << "result: ";
|
std::cout << "result: ";
|
||||||
dispatch(ss.get_function("print"), Param_List_Builder() << printeval);
|
dispatch(we.get_engine().get_function("print"), Param_List_Builder() << printeval);
|
||||||
} catch (const std::runtime_error &e) {
|
} catch (const std::runtime_error &e) {
|
||||||
//std::cout << "unhandled type: " << val.get_type_info().m_type_info->name() << std::endl;
|
//std::cout << "unhandled type: " << val.get_type_info().m_type_info->name() << std::endl;
|
||||||
}
|
}
|
||||||
@@ -716,7 +43,14 @@ int main(int argc, char *argv[]) {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
for (int i = 1; i < argc; ++i) {
|
for (int i = 1; i < argc; ++i) {
|
||||||
Boxed_Value val = evaluate_string(lexer, parser, ss, load_file(argv[i]), argv[i]);
|
//Boxed_Value val = we.evaluate_string(we.load_file(argv[i]), argv[i]);
|
||||||
|
try {
|
||||||
|
Boxed_Value val = we.evaluate_file(argv[i]);
|
||||||
|
}
|
||||||
|
catch (std::exception &e) {
|
||||||
|
std::cerr << "Could not open: " << argv[i] << std::endl;
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
41
wesley/wesley.hpp
Normal file
41
wesley/wesley.hpp
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
// This file is distributed under the BSD License.
|
||||||
|
// See LICENSE.TXT for details.
|
||||||
|
|
||||||
|
#ifndef WESLEY_HPP_
|
||||||
|
#define WESLEY_HPP_
|
||||||
|
|
||||||
|
#include <boost/bind.hpp>
|
||||||
|
#include <boost/shared_ptr.hpp>
|
||||||
|
#include <boost/function.hpp>
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
|
#include <map>
|
||||||
|
#include <fstream>
|
||||||
|
|
||||||
|
class TokenType { public: enum Type { File, Whitespace, Identifier, Integer, Operator, Parens_Open, Parens_Close,
|
||||||
|
Square_Open, Square_Close, Curly_Open, Curly_Close, Comma, Quoted_String, Single_Quoted_String, Carriage_Return, Semicolon,
|
||||||
|
Function_Def, Scoped_Block, Statement, Equation, Return, Expression, Term, Factor, Negate, Comment,
|
||||||
|
Value, Fun_Call, Method_Call, Comparison, If_Block, While_Block, Boolean, Real_Number, Array_Call, Variable_Decl, Array_Init,
|
||||||
|
For_Block, Prefix, Break }; };
|
||||||
|
|
||||||
|
const char *tokentype_to_string(int tokentype) {
|
||||||
|
const char *token_types[] = {"File", "Whitespace", "Identifier", "Integer", "Operator", "Parens_Open", "Parens_Close",
|
||||||
|
"Square_Open", "Square_Close", "Curly_Open", "Curly_Close", "Comma", "Quoted_String", "Single_Quoted_String", "Carriage_Return", "Semicolon",
|
||||||
|
"Function_Def", "Scoped_Block", "Statement", "Equation", "Return", "Expression", "Term", "Factor", "Negate", "Comment",
|
||||||
|
"Value", "Fun_Call", "Method_Call", "Comparison", "If_Block", "While_Block", "Boolean", "Real Number", "Array_Call", "Variable_Decl", "Array_Init",
|
||||||
|
"For_Block", "Prefix", "Break" };
|
||||||
|
|
||||||
|
return token_types[tokentype];
|
||||||
|
}
|
||||||
|
|
||||||
|
#include "boxedcpp.hpp"
|
||||||
|
#include "bootstrap.hpp"
|
||||||
|
#include "bootstrap_stl.hpp"
|
||||||
|
|
||||||
|
#include "langkit_lexer.hpp"
|
||||||
|
#include "langkit_parser.hpp"
|
||||||
|
|
||||||
|
#include "wesley_eval.hpp"
|
||||||
|
#include "wesley_engine.hpp"
|
||||||
|
|
||||||
|
#endif /* WESLEY_HPP_ */
|
291
wesley/wesley_engine.hpp
Normal file
291
wesley/wesley_engine.hpp
Normal file
@@ -0,0 +1,291 @@
|
|||||||
|
// This file is distributed under the BSD License.
|
||||||
|
// See LICENSE.TXT for details.
|
||||||
|
|
||||||
|
#ifndef WESLEY_ENGINE_HPP_
|
||||||
|
#define WESLEY_ENGINE_HPP_
|
||||||
|
|
||||||
|
#include <exception>
|
||||||
|
|
||||||
|
//A function that prints any string passed to it
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
void print(const T &t)
|
||||||
|
{
|
||||||
|
std::cout << t << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
template<> void print<bool>(const bool &t)
|
||||||
|
{
|
||||||
|
if (t) {
|
||||||
|
std::cout << "true" << std::endl;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
std::cout << "false" << std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename String_Type>
|
||||||
|
String_Type concat_string(const String_Type &s1, const String_Type &s2) {
|
||||||
|
return s1+s2;
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename Eval_Engine>
|
||||||
|
class Wesley_System {
|
||||||
|
Lexer lexer;
|
||||||
|
Rule parser;
|
||||||
|
Eval_Engine engine;
|
||||||
|
|
||||||
|
public:
|
||||||
|
Wesley_System() : lexer(build_lexer()), parser(build_parser_rules()), engine(build_eval_system(lexer, parser)) {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
Eval_Engine &get_engine() {
|
||||||
|
return engine;
|
||||||
|
}
|
||||||
|
|
||||||
|
const Boxed_Value eval(const std::vector<Boxed_Value> &vals) {
|
||||||
|
std::string val;
|
||||||
|
|
||||||
|
try {
|
||||||
|
val = Cast_Helper<std::string &>()(vals[0]);
|
||||||
|
}
|
||||||
|
catch (std::exception &e) {
|
||||||
|
throw EvalError("Can not evaluate string: " + val, TokenPtr());
|
||||||
|
}
|
||||||
|
catch (EvalError &ee) {
|
||||||
|
throw EvalError("Can not evaluate string: " + val + " reason: " + ee.reason, TokenPtr());
|
||||||
|
}
|
||||||
|
return evaluate_string(val);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string load_file(const char *filename) {
|
||||||
|
std::ifstream infile (filename, std::ios::in | std::ios::ate);
|
||||||
|
|
||||||
|
if (!infile.is_open()) {
|
||||||
|
std::string fname = filename;
|
||||||
|
throw std::runtime_error("Can not open: " + fname);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::streampos size = infile.tellg();
|
||||||
|
infile.seekg(0, std::ios::beg);
|
||||||
|
|
||||||
|
std::vector<char> v(size);
|
||||||
|
infile.read(&v[0], size);
|
||||||
|
|
||||||
|
std::string ret_val (v.empty() ? std::string() : std::string (v.begin(), v.end()).c_str());
|
||||||
|
|
||||||
|
return ret_val;
|
||||||
|
}
|
||||||
|
|
||||||
|
void debug_print(TokenPtr token, std::string prepend) {
|
||||||
|
std::cout << prepend << "Token: " << token->text << "(" << tokentype_to_string(token->identifier) << ") @ " << token->filename
|
||||||
|
<< ": (" << token->start.line << ", " << token->start.column << ") to ("
|
||||||
|
<< token->end.line << ", " << token->end.column << ") " << std::endl;
|
||||||
|
|
||||||
|
for (unsigned int i = 0; i < token->children.size(); ++i) {
|
||||||
|
debug_print(token->children[i], prepend + " ");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void debug_print(std::vector<TokenPtr> &tokens) {
|
||||||
|
for (unsigned int i = 0; i < tokens.size(); ++i) {
|
||||||
|
debug_print(tokens[i], "");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Lexer build_lexer() {
|
||||||
|
Lexer lexer;
|
||||||
|
lexer.set_skip(Pattern("[ \\t]+", TokenType::Whitespace));
|
||||||
|
lexer.set_line_sep(Pattern("\\n|\\r\\n", TokenType::Carriage_Return));
|
||||||
|
lexer.set_command_sep(Pattern(";|\\r\\n|\\n", TokenType::Semicolon));
|
||||||
|
lexer.set_multiline_comment(Pattern("/\\*", TokenType::Comment), Pattern("\\*/", TokenType::Comment));
|
||||||
|
lexer.set_singleline_comment(Pattern("//", TokenType::Comment));
|
||||||
|
|
||||||
|
lexer << Pattern("[A-Za-z_]+", TokenType::Identifier);
|
||||||
|
lexer << Pattern("[0-9]+\\.[0-9]+", TokenType::Real_Number);
|
||||||
|
lexer << Pattern("[0-9]+", TokenType::Integer);
|
||||||
|
lexer << Pattern("[!@#$%^&*|\\-+=<>.]+|/[!@#$%^&|\\-+=<>]*", TokenType::Operator);
|
||||||
|
lexer << Pattern("\\(", TokenType::Parens_Open);
|
||||||
|
lexer << Pattern("\\)", TokenType::Parens_Close);
|
||||||
|
lexer << Pattern("\\[", TokenType::Square_Open);
|
||||||
|
lexer << Pattern("\\]", TokenType::Square_Close);
|
||||||
|
lexer << Pattern("\\{", TokenType::Curly_Open);
|
||||||
|
lexer << Pattern("\\}", TokenType::Curly_Close);
|
||||||
|
lexer << Pattern(",", TokenType::Comma);
|
||||||
|
lexer << Pattern("\"(?:[^\"\\\\]|\\\\.)*\"", TokenType::Quoted_String);
|
||||||
|
lexer << Pattern("'(?:[^'\\\\]|\\\\.)*'", TokenType::Single_Quoted_String);
|
||||||
|
|
||||||
|
return lexer;
|
||||||
|
}
|
||||||
|
|
||||||
|
Rule build_parser_rules() {
|
||||||
|
Rule params;
|
||||||
|
Rule block(TokenType::Scoped_Block);
|
||||||
|
Rule fundef(TokenType::Function_Def);
|
||||||
|
Rule statement;
|
||||||
|
Rule equation(TokenType::Equation);
|
||||||
|
Rule boolean(TokenType::Boolean);
|
||||||
|
Rule comparison(TokenType::Comparison);
|
||||||
|
Rule expression(TokenType::Expression);
|
||||||
|
Rule term(TokenType::Term);
|
||||||
|
Rule factor(TokenType::Factor);
|
||||||
|
Rule negate(TokenType::Negate);
|
||||||
|
Rule prefix(TokenType::Prefix);
|
||||||
|
|
||||||
|
Rule funcall(TokenType::Fun_Call);
|
||||||
|
Rule methodcall(TokenType::Method_Call);
|
||||||
|
Rule if_block(TokenType::If_Block);
|
||||||
|
Rule while_block(TokenType::While_Block);
|
||||||
|
Rule for_block(TokenType::For_Block);
|
||||||
|
Rule arraycall(TokenType::Array_Call);
|
||||||
|
Rule vardecl(TokenType::Variable_Decl);
|
||||||
|
Rule arrayinit(TokenType::Array_Init);
|
||||||
|
|
||||||
|
Rule return_statement(TokenType::Return);
|
||||||
|
Rule break_statement(TokenType::Break);
|
||||||
|
|
||||||
|
Rule value;
|
||||||
|
Rule statements;
|
||||||
|
Rule for_conditions;
|
||||||
|
Rule source_elem;
|
||||||
|
Rule source_elems;
|
||||||
|
Rule statement_list;
|
||||||
|
Rule paren_block;
|
||||||
|
|
||||||
|
Rule rule = *(Ign(Id(TokenType::Semicolon))) >> source_elems >> *(Ign(Id(TokenType::Semicolon)));
|
||||||
|
|
||||||
|
source_elems = source_elem >> *(+Ign(Id(TokenType::Semicolon)) >> source_elem);
|
||||||
|
source_elem = fundef | statement;
|
||||||
|
statement_list = statement >> *(+Ign(Id(TokenType::Semicolon)) >> statement);
|
||||||
|
statement = if_block | while_block | for_block | equation;
|
||||||
|
|
||||||
|
if_block = Ign(Str("if")) >> boolean >> block >> *(*Ign(Id(TokenType::Semicolon)) >> Str("elseif") >> boolean >> block) >> ~(*Ign(Id(TokenType::Semicolon)) >> Str("else") >> block);
|
||||||
|
while_block = Ign(Str("while")) >> boolean >> block;
|
||||||
|
for_block = Ign(Str("for")) >> for_conditions >> block;
|
||||||
|
for_conditions = Ign(Id(TokenType::Parens_Open)) >> ~equation >> Ign(Str(";")) >> boolean >> Ign(Str(";")) >> equation >> Ign(Id(TokenType::Parens_Close));
|
||||||
|
|
||||||
|
fundef = Ign(Str("def")) >> Id(TokenType::Identifier) >> ~(Ign(Id(TokenType::Parens_Open)) >> ~params >> Ign(Id(TokenType::Parens_Close))) >>
|
||||||
|
block;
|
||||||
|
params = Id(TokenType::Identifier) >> *(Ign(Str(",")) >> Id(TokenType::Identifier));
|
||||||
|
block = *(Ign(Id(TokenType::Semicolon))) >> Ign(Id(TokenType::Curly_Open)) >> *(Ign(Id(TokenType::Semicolon))) >> ~statement_list >> *(Ign(Id(TokenType::Semicolon))) >> Ign(Id(TokenType::Curly_Close));
|
||||||
|
|
||||||
|
equation = *(((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("=")) |
|
||||||
|
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("+=")) |
|
||||||
|
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("-=")) |
|
||||||
|
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("*=")) |
|
||||||
|
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("/="))) >> boolean;
|
||||||
|
boolean = comparison >> *((Str("&&") >> comparison) | (Str("||") >> comparison));
|
||||||
|
comparison = expression >> *((Str("==") >> expression) | (Str("!=") >> expression) | (Str("<") >> expression) |
|
||||||
|
(Str("<=") >> expression) |(Str(">") >> expression) | (Str(">=") >> expression));
|
||||||
|
expression = term >> *((Str("+") >> term) | (Str("-") >> term));
|
||||||
|
term = factor >> *((Str("*") >> factor) | (Str("/") >> factor));
|
||||||
|
factor = methodcall | arraycall | value | negate | prefix | (Ign(Str("+")) >> value);
|
||||||
|
value = vardecl | arrayinit | block | paren_block | return_statement | break_statement |
|
||||||
|
funcall | Id(TokenType::Identifier) | Id(TokenType::Real_Number) | Id(TokenType::Integer) | Id(TokenType::Quoted_String) |
|
||||||
|
Id(TokenType::Single_Quoted_String) ;
|
||||||
|
|
||||||
|
funcall = Id(TokenType::Identifier) >> Ign(Id(TokenType::Parens_Open)) >> ~(boolean >> *(Ign(Str("," )) >> boolean)) >> Ign(Id(TokenType::Parens_Close));
|
||||||
|
methodcall = value >> +(Ign(Str(".")) >> funcall);
|
||||||
|
negate = Ign(Str("-")) >> boolean;
|
||||||
|
prefix = (Str("++") >> (boolean | arraycall)) | (Str("--") >> (boolean | arraycall));
|
||||||
|
arraycall = value >> +((Ign(Id(TokenType::Square_Open)) >> boolean >> Ign(Id(TokenType::Square_Close))));
|
||||||
|
|
||||||
|
arrayinit = Ign(Id(TokenType::Square_Open)) >> ~(boolean >> *(Ign(Str(",")) >> boolean)) >> Ign(Id(TokenType::Square_Close));
|
||||||
|
vardecl = Ign(Str("var")) >> Id(TokenType::Identifier);
|
||||||
|
return_statement = Ign(Str("return")) >> ~boolean;
|
||||||
|
break_statement = Wrap(Ign(Str("break")));
|
||||||
|
paren_block = (Ign(Id(TokenType::Parens_Open)) >> equation >> Ign(Id(TokenType::Parens_Close)));
|
||||||
|
|
||||||
|
return rule;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
Eval_Engine build_eval_system(Lexer &lexer, Rule &parser) {
|
||||||
|
Eval_Engine ss;
|
||||||
|
bootstrap(ss);
|
||||||
|
bootstrap_vector<std::vector<int> >(ss, "VectorInt");
|
||||||
|
bootstrap_vector<std::vector<Boxed_Value> >(ss, "Vector");
|
||||||
|
// dump_system(ss);
|
||||||
|
|
||||||
|
//Register a new function, this one with typing for us, so we don't have to ubox anything
|
||||||
|
//right here
|
||||||
|
register_function(ss, &print<bool>, "print");
|
||||||
|
register_function(ss, &print<std::string>, "print");
|
||||||
|
register_function(ss, &print<double>, "print");
|
||||||
|
register_function(ss, &print<size_t>, "print");
|
||||||
|
register_function(ss, &concat_string<std::string>, "concat_string");
|
||||||
|
register_function(ss, &print<int>, "print");
|
||||||
|
|
||||||
|
ss.register_function(boost::function<void ()>(boost::bind(&dump_system, boost::ref(ss))), "dump_system");
|
||||||
|
ss.register_function(boost::function<void (Boxed_Value)>(boost::bind(&dump_object, _1)), "dump_object");
|
||||||
|
|
||||||
|
ss.register_function(boost::shared_ptr<Proxy_Function>(
|
||||||
|
new Dynamic_Proxy_Function(boost::bind(&Wesley_System<Eval_Engine>::eval, boost::ref(*this), _1), 1)), "eval");
|
||||||
|
|
||||||
|
return ss;
|
||||||
|
}
|
||||||
|
|
||||||
|
TokenPtr parse(Rule &rule, std::vector<TokenPtr> &tokens, const char *filename) {
|
||||||
|
|
||||||
|
Token_Iterator iter = tokens.begin(), end = tokens.end();
|
||||||
|
TokenPtr parent(new Token("Root", TokenType::File, filename));
|
||||||
|
|
||||||
|
std::pair<Token_Iterator, bool> results = rule(iter, end, parent);
|
||||||
|
|
||||||
|
if ((results.second) && (results.first == end)) {
|
||||||
|
//debug_print(parent, "");
|
||||||
|
return parent;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
throw ParserError("Parse failed to complete", *(results.first));
|
||||||
|
//throw ParserError("Parse failed to complete at: " + (*(results.first))->text , *(results.first));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Boxed_Value evaluate_string(const std::string &input, const char *filename = "__EVAL__") {
|
||||||
|
std::vector<TokenPtr> tokens = lexer.lex(input, filename);
|
||||||
|
Boxed_Value value;
|
||||||
|
|
||||||
|
for (unsigned int i = 0; i < tokens.size(); ++i) {
|
||||||
|
if ((tokens[i]->identifier == TokenType::Quoted_String) || (tokens[i]->identifier == TokenType::Single_Quoted_String)) {
|
||||||
|
tokens[i]->text = tokens[i]->text.substr(1, tokens[i]->text.size()-2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//debug_print(tokens);
|
||||||
|
try {
|
||||||
|
TokenPtr parent = parse(parser, tokens, filename);
|
||||||
|
value = eval_token<Eval_Engine>(engine, parent);
|
||||||
|
}
|
||||||
|
catch (ParserError &pe) {
|
||||||
|
if (filename != std::string("__EVAL__")) {
|
||||||
|
std::cout << "Parsing error: \"" << pe.reason << "\" in '" << pe.location->filename << "' line: " << pe.location->start.line+1 << std::endl;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
std::cout << "Parsing error: \"" << pe.reason << "\"" << std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (EvalError &ee) {
|
||||||
|
if (filename != std::string("__EVAL__")) {
|
||||||
|
std::cout << "Eval error: \"" << ee.reason << "\" in '" << ee.location->filename << "' line: " << ee.location->start.line+1 << std::endl;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
std::cout << "Eval error: \"" << ee.reason << "\"" << std::endl;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (std::exception &e) {
|
||||||
|
std::cout << "Exception: " << e.what() << std::endl;
|
||||||
|
}
|
||||||
|
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
Boxed_Value evaluate_file(const char *filename) {
|
||||||
|
return evaluate_string(load_file(filename), filename);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef Wesley_System<BoxedCPP_System> Wesley_Engine;
|
||||||
|
|
||||||
|
#endif /* WESLEY_ENGINE_HPP_ */
|
390
wesley/wesley_eval.hpp
Normal file
390
wesley/wesley_eval.hpp
Normal file
@@ -0,0 +1,390 @@
|
|||||||
|
// This file is distributed under the BSD License.
|
||||||
|
// See LICENSE.TXT for details.
|
||||||
|
|
||||||
|
#ifndef WESLEY_EVAL_HPP_
|
||||||
|
#define WESLEY_EVAL_HPP_
|
||||||
|
|
||||||
|
struct ParserError {
|
||||||
|
std::string reason;
|
||||||
|
TokenPtr location;
|
||||||
|
|
||||||
|
ParserError(const std::string &why, const TokenPtr where) : reason(why), location(where){ }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct EvalError {
|
||||||
|
std::string reason;
|
||||||
|
TokenPtr location;
|
||||||
|
|
||||||
|
EvalError(const std::string &why, const TokenPtr where) : reason(why), location(where) { }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ReturnValue {
|
||||||
|
Boxed_Value retval;
|
||||||
|
TokenPtr location;
|
||||||
|
|
||||||
|
ReturnValue(const Boxed_Value &return_value, const TokenPtr where) : retval(return_value), location(where) { }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct BreakLoop {
|
||||||
|
TokenPtr location;
|
||||||
|
|
||||||
|
BreakLoop(const TokenPtr where) : location(where) { }
|
||||||
|
};
|
||||||
|
|
||||||
|
template <typename Eval_System>
|
||||||
|
const Boxed_Value eval_function (Eval_System &ss, TokenPtr node, const std::vector<std::string> ¶m_names, const std::vector<Boxed_Value> &vals) {
|
||||||
|
for (unsigned int i = 0; i < param_names.size(); ++i) {
|
||||||
|
ss.add_object(param_names[i], vals[i]);
|
||||||
|
}
|
||||||
|
return eval_token(ss, node);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename Eval_System>
|
||||||
|
Boxed_Value eval_token(Eval_System &ss, TokenPtr node) {
|
||||||
|
Boxed_Value retval;
|
||||||
|
unsigned int i, j;
|
||||||
|
|
||||||
|
switch (node->identifier) {
|
||||||
|
case (TokenType::Value) :
|
||||||
|
case (TokenType::File) :
|
||||||
|
for (i = 0; i < node->children.size(); ++i) {
|
||||||
|
retval = eval_token(ss, node->children[i]);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Identifier) :
|
||||||
|
if (node->text == "true") {
|
||||||
|
retval = Boxed_Value(true);
|
||||||
|
}
|
||||||
|
else if (node->text == "false") {
|
||||||
|
retval = Boxed_Value(false);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
try {
|
||||||
|
retval = ss.get_object(node->text);
|
||||||
|
}
|
||||||
|
catch (std::exception &e) {
|
||||||
|
throw EvalError("Can not find object: " + node->text, node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Real_Number) :
|
||||||
|
retval = Boxed_Value(double(atof(node->text.c_str())));
|
||||||
|
break;
|
||||||
|
case (TokenType::Integer) :
|
||||||
|
retval = Boxed_Value(atoi(node->text.c_str()));
|
||||||
|
break;
|
||||||
|
case (TokenType::Quoted_String) :
|
||||||
|
retval = Boxed_Value(node->text);
|
||||||
|
break;
|
||||||
|
case (TokenType::Single_Quoted_String) :
|
||||||
|
retval = Boxed_Value(node->text);
|
||||||
|
break;
|
||||||
|
case (TokenType::Equation) :
|
||||||
|
retval = eval_token(ss, node->children.back());
|
||||||
|
if (node->children.size() > 1) {
|
||||||
|
for (i = node->children.size()-3; ((int)i) >= 0; i -= 2) {
|
||||||
|
Param_List_Builder plb;
|
||||||
|
plb << eval_token(ss, node->children[i]);
|
||||||
|
plb << retval;
|
||||||
|
try {
|
||||||
|
retval = dispatch(ss.get_function(node->children[i+1]->text), plb);
|
||||||
|
}
|
||||||
|
catch(std::exception &e){
|
||||||
|
throw EvalError("Can not find appropriate '" + node->children[i+1]->text + "'", node->children[i+1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Variable_Decl): {
|
||||||
|
ss.set_object(node->children[0]->text, Boxed_Value());
|
||||||
|
retval = ss.get_object(node->children[0]->text);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Factor) :
|
||||||
|
case (TokenType::Expression) :
|
||||||
|
case (TokenType::Term) :
|
||||||
|
case (TokenType::Boolean) :
|
||||||
|
case (TokenType::Comparison) : {
|
||||||
|
retval = eval_token(ss, node->children[0]);
|
||||||
|
if (node->children.size() > 1) {
|
||||||
|
for (i = 1; i < node->children.size(); i += 2) {
|
||||||
|
Param_List_Builder plb;
|
||||||
|
plb << retval;
|
||||||
|
plb << eval_token(ss, node->children[i + 1]);
|
||||||
|
|
||||||
|
try {
|
||||||
|
retval = dispatch(ss.get_function(node->children[i]->text), plb);
|
||||||
|
}
|
||||||
|
catch(std::exception &e){
|
||||||
|
throw EvalError("Can not find appropriate '" + node->children[i]->text + "'", node->children[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Array_Call) : {
|
||||||
|
retval = eval_token(ss, node->children[0]);
|
||||||
|
for (i = 1; i < node->children.size(); ++i) {
|
||||||
|
Param_List_Builder plb;
|
||||||
|
plb << retval;
|
||||||
|
plb << eval_token(ss, node->children[i]);
|
||||||
|
try {
|
||||||
|
retval = dispatch(ss.get_function("[]"), plb);
|
||||||
|
}
|
||||||
|
catch(std::exception &e){
|
||||||
|
throw EvalError("Can not find appropriate array lookup '[]'", node->children[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Negate) : {
|
||||||
|
retval = eval_token(ss, node->children[0]);
|
||||||
|
Param_List_Builder plb;
|
||||||
|
plb << retval;
|
||||||
|
//plb << Boxed_Value(-1);
|
||||||
|
|
||||||
|
try {
|
||||||
|
retval = dispatch(ss.get_function("-"), plb);
|
||||||
|
}
|
||||||
|
catch(std::exception &e){
|
||||||
|
throw EvalError("Can not find appropriate negation", node->children[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Prefix) : {
|
||||||
|
retval = eval_token(ss, node->children[1]);
|
||||||
|
Param_List_Builder plb;
|
||||||
|
plb << retval;
|
||||||
|
|
||||||
|
try {
|
||||||
|
retval = dispatch(ss.get_function(node->children[0]->text), plb);
|
||||||
|
}
|
||||||
|
catch(std::exception &e){
|
||||||
|
throw EvalError("Can not find appropriate prefix", node->children[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Array_Init) : {
|
||||||
|
try {
|
||||||
|
retval = dispatch(ss.get_function("Vector"), Param_List_Builder());
|
||||||
|
for (i = 0; i < node->children.size(); ++i) {
|
||||||
|
try {
|
||||||
|
Boxed_Value tmp = eval_token(ss, node->children[i]);
|
||||||
|
dispatch(ss.get_function("push_back"), Param_List_Builder() << retval << tmp);
|
||||||
|
}
|
||||||
|
catch (std::exception inner_e) {
|
||||||
|
throw EvalError("Can not find appropriate 'push_back'", node->children[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (std::exception e) {
|
||||||
|
throw EvalError("Can not find appropriate 'Vector()'", node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Fun_Call) : {
|
||||||
|
Param_List_Builder plb;
|
||||||
|
for (i = 1; i < node->children.size(); ++i) {
|
||||||
|
plb << eval_token(ss, node->children[i]);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
retval = dispatch(ss.get_function(node->children[0]->text), plb);
|
||||||
|
}
|
||||||
|
catch(EvalError &ee) {
|
||||||
|
throw EvalError(ee.reason, node->children[0]);
|
||||||
|
}
|
||||||
|
catch(std::exception &e){
|
||||||
|
throw EvalError("Can not find appropriate '" + node->children[0]->text + "'", node->children[0]);
|
||||||
|
}
|
||||||
|
catch(ReturnValue &rv) {
|
||||||
|
retval = rv.retval;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Method_Call) : {
|
||||||
|
retval = eval_token(ss, node->children[0]);
|
||||||
|
if (node->children.size() > 1) {
|
||||||
|
for (i = 1; i < node->children.size(); ++i) {
|
||||||
|
Param_List_Builder plb;
|
||||||
|
plb << retval;
|
||||||
|
|
||||||
|
for (j = 1; j < node->children[i]->children.size(); ++j) {
|
||||||
|
plb << eval_token(ss, node->children[i]->children[j]);
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
retval = dispatch(ss.get_function(node->children[i]->children[0]->text), plb);
|
||||||
|
}
|
||||||
|
catch(EvalError &ee) {
|
||||||
|
throw EvalError(ee.reason, node->children[0]);
|
||||||
|
}
|
||||||
|
catch(std::exception &e){
|
||||||
|
throw EvalError("Can not find appropriate '" + node->children[i]->children[0]->text + "'", node->children[0]);
|
||||||
|
}
|
||||||
|
catch(ReturnValue &rv) {
|
||||||
|
retval = rv.retval;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case(TokenType::If_Block) : {
|
||||||
|
retval = eval_token(ss, node->children[0]);
|
||||||
|
bool cond;
|
||||||
|
try {
|
||||||
|
cond = Cast_Helper<bool &>()(retval);
|
||||||
|
}
|
||||||
|
catch (std::exception &e) {
|
||||||
|
throw EvalError("If condition not boolean", node->children[0]);
|
||||||
|
}
|
||||||
|
if (cond) {
|
||||||
|
retval = eval_token(ss, node->children[1]);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
if (node->children.size() > 2) {
|
||||||
|
i = 2;
|
||||||
|
while ((!cond) && (i < node->children.size())) {
|
||||||
|
if (node->children[i]->text == "else") {
|
||||||
|
retval = eval_token(ss, node->children[i+1]);
|
||||||
|
cond = true;
|
||||||
|
}
|
||||||
|
else if (node->children[i]->text == "elseif") {
|
||||||
|
retval = eval_token(ss, node->children[i+1]);
|
||||||
|
try {
|
||||||
|
cond = Cast_Helper<bool &>()(retval);
|
||||||
|
}
|
||||||
|
catch (std::exception &e) {
|
||||||
|
throw EvalError("Elseif condition not boolean", node->children[i+1]);
|
||||||
|
}
|
||||||
|
if (cond) {
|
||||||
|
retval = eval_token(ss, node->children[i+2]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
i = i + 3;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case(TokenType::While_Block) : {
|
||||||
|
retval = eval_token(ss, node->children[0]);
|
||||||
|
bool cond;
|
||||||
|
try {
|
||||||
|
cond = Cast_Helper<bool &>()(retval);
|
||||||
|
}
|
||||||
|
catch (std::exception) {
|
||||||
|
throw EvalError("While condition not boolean", node->children[0]);
|
||||||
|
}
|
||||||
|
while (cond) {
|
||||||
|
try {
|
||||||
|
eval_token(ss, node->children[1]);
|
||||||
|
retval = eval_token(ss, node->children[0]);
|
||||||
|
try {
|
||||||
|
cond = Cast_Helper<bool &>()(retval);
|
||||||
|
}
|
||||||
|
catch (std::exception) {
|
||||||
|
throw EvalError("While condition not boolean", node->children[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (BreakLoop &bl) {
|
||||||
|
cond = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
retval = Boxed_Value();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case(TokenType::For_Block) : {
|
||||||
|
Boxed_Value condition;
|
||||||
|
bool cond;
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (node->children.size() == 4) {
|
||||||
|
eval_token(ss, node->children[0]);
|
||||||
|
condition = eval_token(ss, node->children[1]);
|
||||||
|
}
|
||||||
|
else if (node->children.size() == 3){
|
||||||
|
condition = eval_token(ss, node->children[0]);
|
||||||
|
}
|
||||||
|
cond = Cast_Helper<bool &>()(condition);
|
||||||
|
}
|
||||||
|
catch (std::exception &e) {
|
||||||
|
throw EvalError("For condition not boolean", node);
|
||||||
|
}
|
||||||
|
while (cond) {
|
||||||
|
try {
|
||||||
|
if (node->children.size() == 4) {
|
||||||
|
eval_token(ss, node->children[3]);
|
||||||
|
eval_token(ss, node->children[2]);
|
||||||
|
condition = eval_token(ss, node->children[1]);
|
||||||
|
}
|
||||||
|
else if (node->children.size() == 3) {
|
||||||
|
eval_token(ss, node->children[2]);
|
||||||
|
eval_token(ss, node->children[1]);
|
||||||
|
condition = eval_token(ss, node->children[0]);
|
||||||
|
}
|
||||||
|
cond = Cast_Helper<bool &>()(condition);
|
||||||
|
|
||||||
|
}
|
||||||
|
catch (std::exception &e) {
|
||||||
|
throw EvalError("For condition not boolean", node);
|
||||||
|
}
|
||||||
|
catch (BreakLoop &bl) {
|
||||||
|
cond = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
retval = Boxed_Value();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Function_Def) : {
|
||||||
|
unsigned int num_args = node->children.size() - 2;
|
||||||
|
std::vector<std::string> param_names;
|
||||||
|
for (i = 0; i < num_args; ++i) {
|
||||||
|
param_names.push_back(node->children[i+1]->text);
|
||||||
|
}
|
||||||
|
|
||||||
|
ss.register_function(boost::shared_ptr<Proxy_Function>(
|
||||||
|
new Dynamic_Proxy_Function(boost::bind(&eval_function<Eval_System>, boost::ref(ss), node->children.back(), param_names, _1))), node->children[0]->text);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Scoped_Block) : {
|
||||||
|
ss.new_scope();
|
||||||
|
for (i = 0; i < node->children.size(); ++i) {
|
||||||
|
retval = eval_token(ss, node->children[i]);
|
||||||
|
}
|
||||||
|
ss.pop_scope();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Return) : {
|
||||||
|
if (node->children.size() > 0) {
|
||||||
|
retval = eval_token(ss, node->children[0]);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
retval = Boxed_Value();
|
||||||
|
}
|
||||||
|
throw ReturnValue(retval, node);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Break) : {
|
||||||
|
throw BreakLoop(node);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case (TokenType::Statement) :
|
||||||
|
case (TokenType::Carriage_Return) :
|
||||||
|
case (TokenType::Semicolon) :
|
||||||
|
case (TokenType::Comment) :
|
||||||
|
case (TokenType::Operator) :
|
||||||
|
case (TokenType::Whitespace) :
|
||||||
|
case (TokenType::Parens_Open) :
|
||||||
|
case (TokenType::Parens_Close) :
|
||||||
|
case (TokenType::Square_Open) :
|
||||||
|
case (TokenType::Square_Close) :
|
||||||
|
case (TokenType::Curly_Open) :
|
||||||
|
case (TokenType::Curly_Close) :
|
||||||
|
case (TokenType::Comma) :
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return retval;
|
||||||
|
}
|
||||||
|
|
||||||
|
#endif /* WESLEY_EVAL_HPP_ */
|
Reference in New Issue
Block a user