Remove old langkit project
This commit is contained in:
parent
b1ae26e9c1
commit
bce2a1ffdb
@ -1,21 +0,0 @@
|
||||
cmake_minimum_required(VERSION 2.6)
|
||||
enable_testing()
|
||||
|
||||
project(langkit)
|
||||
|
||||
SET (CMAKE_BUILD_TYPE gdb)
|
||||
SET (CMAKE_C_FLAGS_GDB " -Wall -ggdb")
|
||||
SET (CMAKE_CXX_FLAGS_GDB " -Wall -ggdb")
|
||||
|
||||
find_package( Boost 1.36.0 COMPONENTS regex unit_test_framework)
|
||||
if(Boost_FOUND)
|
||||
include_directories(${Boost_INCLUDE_DIRS})
|
||||
|
||||
add_executable(langkit_test main.cpp)
|
||||
target_link_libraries(langkit_test ${Boost_LIBRARIES})
|
||||
|
||||
add_executable(langkit_unittest unittest.cpp)
|
||||
target_link_libraries(langkit_unittest ${Boost_LIBRARIES})
|
||||
endif()
|
||||
|
||||
add_test(langkit_unittest ${EXECUTABLE_OUTPUT_PATH}/langkit_unittest)
|
@ -1,206 +0,0 @@
|
||||
// This file is distributed under the BSD License.
|
||||
// See LICENSE.TXT for details.
|
||||
|
||||
#ifndef LANGKIT_LEXER_HPP_
|
||||
#define LANGKIT_LEXER_HPP_
|
||||
|
||||
#include <boost/regex.hpp>
|
||||
#include <tr1/memory>
|
||||
#include <string>
|
||||
|
||||
namespace langkit
|
||||
{
|
||||
struct File_Position {
|
||||
int line;
|
||||
int column;
|
||||
|
||||
File_Position(int file_line, int file_column)
|
||||
: line(file_line), column(file_column) { }
|
||||
|
||||
File_Position() : line(0), column(0) { }
|
||||
};
|
||||
|
||||
struct Pattern {
|
||||
boost::regex regex;
|
||||
int identifier;
|
||||
|
||||
Pattern() { }
|
||||
Pattern(const std::string ®exp, int id) : regex(regexp), identifier(id) { }
|
||||
};
|
||||
|
||||
typedef std::tr1::shared_ptr<struct Token> TokenPtr;
|
||||
|
||||
struct Token {
|
||||
std::string text;
|
||||
int identifier;
|
||||
const char *filename;
|
||||
File_Position start, end;
|
||||
|
||||
std::vector<TokenPtr> children;
|
||||
|
||||
Token(const std::string &token_text, int id, const char *fname) : text(token_text), identifier(id), filename(fname) { }
|
||||
};
|
||||
|
||||
struct Lexer {
|
||||
std::vector<Pattern> lex_patterns;
|
||||
Pattern skip_pattern;
|
||||
Pattern command_sep_pattern;
|
||||
Pattern line_sep_pattern;
|
||||
Pattern multiline_comment_start_pattern;
|
||||
Pattern multiline_comment_end_pattern;
|
||||
Pattern singleline_comment_pattern;
|
||||
|
||||
Lexer operator<<(const Pattern &p) {
|
||||
lex_patterns.push_back(p);
|
||||
return *this;
|
||||
}
|
||||
|
||||
std::vector<TokenPtr> lex(const std::string &input, const char *filename) {
|
||||
std::vector<Pattern>::iterator iter, end, iter2, end2;
|
||||
std::vector<TokenPtr> retval;
|
||||
bool found;
|
||||
std::string::const_iterator input_iter = input.begin(), input_end = input.end();
|
||||
|
||||
int current_col = 0;
|
||||
int current_line = 0;
|
||||
boost::match_results<std::string::const_iterator> what;
|
||||
|
||||
while (input_iter != input_end) {
|
||||
found = false;
|
||||
|
||||
if (regex_search(input_iter, input_end, what, singleline_comment_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_start(what[0]);
|
||||
input_iter += comment_start.size();
|
||||
|
||||
bool found_eol = false;
|
||||
|
||||
while ((!found_eol) && (input_iter != input_end)) {
|
||||
boost::match_results<std::string::const_iterator> eol_delim;
|
||||
if (regex_search(input_iter, input_end, eol_delim, line_sep_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_end(eol_delim[0]);
|
||||
input_iter += comment_end.size();
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
found_eol = true;
|
||||
break;
|
||||
}
|
||||
if ((!found_eol) && (input_iter != input_end)) {
|
||||
++input_iter;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, multiline_comment_start_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_start(what[0]);
|
||||
input_iter += comment_start.size();
|
||||
|
||||
bool found_eoc = false;
|
||||
|
||||
while ((!found_eoc) && (input_iter != input_end)) {
|
||||
boost::match_results<std::string::const_iterator> eol_delim;
|
||||
if (regex_search(input_iter, input_end, eol_delim, line_sep_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_end(eol_delim[0]);
|
||||
input_iter += comment_end.size();
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
}
|
||||
boost::match_results<std::string::const_iterator> eoc_delim;
|
||||
if (regex_search(input_iter, input_end, eoc_delim, multiline_comment_end_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_end(eoc_delim[0]);
|
||||
input_iter += comment_end.size();
|
||||
current_col += comment_end.size();
|
||||
found_eoc = true;
|
||||
break;
|
||||
}
|
||||
if ((!found_eoc) && (input_iter != input_end)) {
|
||||
++input_iter;
|
||||
++current_col;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found_eoc) {
|
||||
std::cout << "Incomplete comment block! Add exceptions!" << std::endl;
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, skip_pattern.regex, boost::match_continuous)) {
|
||||
std::string whitespace(what[0]);
|
||||
input_iter += whitespace.size();
|
||||
current_col += whitespace.size();
|
||||
found = true;
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, line_sep_pattern.regex, boost::match_continuous)) {
|
||||
const std::string cr(what[0]);
|
||||
|
||||
boost::match_results<std::string::const_iterator> if_delim;
|
||||
if (regex_search(cr.begin(), cr.end(), if_delim, command_sep_pattern.regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(if_delim[0], command_sep_pattern.identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
}
|
||||
|
||||
input_iter += cr.size();
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
found = true;
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, command_sep_pattern.regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(what[0], command_sep_pattern.identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
input_iter += t->text.size();
|
||||
found = true;
|
||||
}
|
||||
else {
|
||||
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
|
||||
if (regex_search(input_iter, input_end, what, iter->regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(what[0], iter->identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
input_iter += t->text.size();
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
const std::string err(input_iter, input_end);
|
||||
std::cout << "Unknown string at: " << err << std::endl;
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
void set_skip(const Pattern &p) {
|
||||
skip_pattern = p;
|
||||
}
|
||||
void set_line_sep(const Pattern &p) {
|
||||
line_sep_pattern = p;
|
||||
}
|
||||
void set_command_sep(const Pattern &p) {
|
||||
command_sep_pattern = p;
|
||||
}
|
||||
void set_multiline_comment(const Pattern &start, const Pattern &end) {
|
||||
multiline_comment_start_pattern = start;
|
||||
multiline_comment_end_pattern = end;
|
||||
}
|
||||
void set_singleline_comment(const Pattern &p) {
|
||||
singleline_comment_pattern = p;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
#endif /* LANGKIT_LEXER_HPP_ */
|
@ -1,449 +0,0 @@
|
||||
// This file is distributed under the BSD License.
|
||||
// See LICENSE.TXT for details.
|
||||
|
||||
#ifndef LANGKIT_PARSER_HPP_
|
||||
#define LANGKIT_PARSER_HPP_
|
||||
|
||||
#include <boost/function.hpp>
|
||||
|
||||
#include "langkit_lexer.hpp"
|
||||
|
||||
namespace langkit
|
||||
{
|
||||
struct RuleImpl;
|
||||
|
||||
typedef std::vector<TokenPtr>::iterator Token_Iterator;
|
||||
typedef boost::function<std::pair<Token_Iterator, bool>(Token_Iterator, Token_Iterator, TokenPtr, bool, int)> RuleFun;
|
||||
typedef std::tr1::shared_ptr<RuleImpl> RuleImplPtr;
|
||||
|
||||
struct RuleImpl {
|
||||
RuleFun rule;
|
||||
bool keep;
|
||||
int new_id;
|
||||
|
||||
RuleImpl() : keep(true), new_id(-1) {}
|
||||
RuleImpl(int id) : keep(true), new_id(id) {}
|
||||
RuleImpl(RuleFun fun) : rule(fun), keep(true), new_id(-1) {}
|
||||
RuleImpl(RuleFun fun, bool keep_match) : rule(fun), keep(keep_match), new_id(-1) {}
|
||||
|
||||
std::pair<Token_Iterator, bool> operator()(Token_Iterator iter, Token_Iterator end, TokenPtr parent) {
|
||||
return rule(iter, end, parent, keep, new_id);
|
||||
}
|
||||
};
|
||||
|
||||
//struct Rule;
|
||||
|
||||
template <typename T_Iter>
|
||||
std::pair<T_Iter, bool> String_Rule(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, const std::string &val) {
|
||||
if (iter != end) {
|
||||
if ((*iter)->text == val) {
|
||||
if (keep) {
|
||||
parent->children.push_back(*iter);
|
||||
}
|
||||
return std::pair<T_Iter, bool>(++iter, true);
|
||||
}
|
||||
}
|
||||
|
||||
return std::pair<T_Iter, bool>(iter, false);
|
||||
}
|
||||
|
||||
template <typename T_Iter>
|
||||
std::pair<T_Iter, bool> Id_Rule(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, const int val) {
|
||||
if (iter != end) {
|
||||
if ((*iter)->identifier == val) {
|
||||
if (keep) {
|
||||
parent->children.push_back(*iter);
|
||||
}
|
||||
return std::pair<T_Iter, bool>(++iter, true);
|
||||
}
|
||||
}
|
||||
|
||||
return std::pair<T_Iter, bool>(iter, false);
|
||||
}
|
||||
|
||||
template <typename T_Iter, typename R_Type>
|
||||
std::pair<T_Iter, bool> Or_Rule(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type lhs, R_Type rhs) {
|
||||
T_Iter new_iter;
|
||||
unsigned int prev_size;
|
||||
TokenPtr prev_parent = parent;
|
||||
|
||||
if (new_id != -1) {
|
||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||
}
|
||||
|
||||
prev_size = parent->children.size();
|
||||
|
||||
if (iter != end) {
|
||||
std::pair<T_Iter, bool> result = lhs(iter, end, parent);
|
||||
|
||||
if (result.second) {
|
||||
if (new_id != -1) {
|
||||
parent->filename = (*iter)->filename;
|
||||
parent->start = (*iter)->start;
|
||||
if (result.first == iter) {
|
||||
parent->end = (*iter)->start;
|
||||
}
|
||||
else {
|
||||
parent->end = (*(result.first - 1))->end;
|
||||
}
|
||||
prev_parent->children.push_back(parent);
|
||||
}
|
||||
return std::pair<T_Iter, bool>(result.first, true);
|
||||
}
|
||||
else {
|
||||
if (parent->children.size() != prev_size) {
|
||||
//Clear out the partial matches
|
||||
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
||||
}
|
||||
|
||||
result = rhs(iter, end, parent);
|
||||
if (result.second) {
|
||||
if (new_id != -1) {
|
||||
|
||||
parent->filename = (*iter)->filename;
|
||||
parent->start = (*iter)->start;
|
||||
if (result.first == iter) {
|
||||
parent->end = (*iter)->start;
|
||||
}
|
||||
else {
|
||||
parent->end = (*(result.first - 1))->end;
|
||||
}
|
||||
|
||||
prev_parent->children.push_back(parent);
|
||||
}
|
||||
return std::pair<T_Iter, bool>(result.first, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (parent->children.size() != prev_size) {
|
||||
//Clear out the partial matches
|
||||
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
||||
}
|
||||
|
||||
return std::pair<T_Iter, bool>(iter, false);
|
||||
}
|
||||
|
||||
template <typename T_Iter, typename R_Type>
|
||||
std::pair<Token_Iterator, bool> And_Rule(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type lhs, R_Type rhs) {
|
||||
T_Iter lhs_iter, rhs_iter;
|
||||
unsigned int prev_size;
|
||||
TokenPtr prev_parent = parent;
|
||||
|
||||
if (new_id != -1) {
|
||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||
}
|
||||
|
||||
prev_size = parent->children.size();
|
||||
|
||||
if (iter != end) {
|
||||
std::pair<T_Iter, bool> result = lhs(iter, end, parent);
|
||||
|
||||
if (result.second) {
|
||||
result = rhs(result.first, end, parent);
|
||||
if (result.second) {
|
||||
if (new_id != -1) {
|
||||
|
||||
parent->filename = (*iter)->filename;
|
||||
parent->start = (*iter)->start;
|
||||
if (result.first == iter) {
|
||||
parent->end = (*iter)->start;
|
||||
}
|
||||
else {
|
||||
parent->end = (*(result.first - 1))->end;
|
||||
}
|
||||
|
||||
prev_parent->children.push_back(parent);
|
||||
}
|
||||
return std::pair<Token_Iterator, bool>(result.first, true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (parent->children.size() != prev_size) {
|
||||
//Clear out the partial matches
|
||||
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
||||
}
|
||||
|
||||
return std::pair<T_Iter, bool>(iter, false);
|
||||
}
|
||||
|
||||
template <typename T_Iter, typename R_Type>
|
||||
std::pair<T_Iter, bool> Kleene_Rule
|
||||
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||
|
||||
TokenPtr prev_parent = parent;
|
||||
std::pair<T_Iter, bool> result;
|
||||
T_Iter new_iter = iter;
|
||||
|
||||
if (iter != end) {
|
||||
if (new_id != -1) {
|
||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||
}
|
||||
|
||||
result.second = true;
|
||||
while (result.second == true) {
|
||||
result = rule(new_iter, end, parent);
|
||||
new_iter = result.first;
|
||||
}
|
||||
|
||||
if (new_id != -1) {
|
||||
|
||||
parent->filename = (*iter)->filename;
|
||||
parent->start = (*iter)->start;
|
||||
if (result.first == iter) {
|
||||
parent->end = (*iter)->start;
|
||||
}
|
||||
else {
|
||||
parent->end = (*(result.first - 1))->end;
|
||||
}
|
||||
|
||||
prev_parent->children.push_back(parent);
|
||||
}
|
||||
return std::pair<T_Iter, bool>(result.first, true);
|
||||
}
|
||||
else {
|
||||
return std::pair<T_Iter, bool>(iter, true);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T_Iter, typename R_Type>
|
||||
std::pair<T_Iter, bool> Plus_Rule
|
||||
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||
|
||||
unsigned int prev_size;
|
||||
TokenPtr prev_parent = parent;
|
||||
T_Iter loop_iter = iter;
|
||||
|
||||
if (new_id != -1) {
|
||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||
}
|
||||
|
||||
prev_size = parent->children.size();
|
||||
|
||||
if (iter != end) {
|
||||
std::pair<T_Iter, bool> result;
|
||||
result = rule(loop_iter, end, parent);
|
||||
|
||||
if (result.second == true) {
|
||||
loop_iter = result.first;
|
||||
result.second = true;
|
||||
while ((loop_iter != end) && (result.second == true)) {
|
||||
result = rule(loop_iter, end, parent);
|
||||
loop_iter = result.first;
|
||||
}
|
||||
|
||||
if (new_id != -1) {
|
||||
|
||||
parent->filename = (*iter)->filename;
|
||||
parent->start = (*iter)->start;
|
||||
if (result.first == iter) {
|
||||
parent->end = (*iter)->start;
|
||||
}
|
||||
else {
|
||||
parent->end = (*(result.first - 1))->end;
|
||||
}
|
||||
|
||||
prev_parent->children.push_back(parent);
|
||||
}
|
||||
|
||||
return std::pair<T_Iter, bool>(result.first, true);
|
||||
}
|
||||
}
|
||||
|
||||
if (parent->children.size() != prev_size) {
|
||||
//Clear out the partial matches
|
||||
parent->children.erase(parent->children.begin() + prev_size, parent->children.end());
|
||||
}
|
||||
|
||||
return std::pair<T_Iter, bool>(iter, false);
|
||||
}
|
||||
|
||||
template <typename T_Iter, typename R_Type>
|
||||
std::pair<T_Iter, bool> Optional_Rule
|
||||
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||
|
||||
TokenPtr prev_parent = parent;
|
||||
T_Iter new_iter = iter;
|
||||
|
||||
if (iter != end) {
|
||||
if (new_id != -1) {
|
||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||
}
|
||||
|
||||
std::pair<T_Iter, bool> result;
|
||||
result.second = true;
|
||||
if ((new_iter != end) && (result.second == true)) {
|
||||
result = rule(new_iter, end, parent);
|
||||
new_iter = result.first;
|
||||
}
|
||||
|
||||
if (new_id != -1) {
|
||||
|
||||
parent->filename = (*iter)->filename;
|
||||
parent->start = (*iter)->start;
|
||||
if (result.first == iter) {
|
||||
parent->end = (*iter)->start;
|
||||
}
|
||||
else {
|
||||
parent->end = (*(result.first - 1))->end;
|
||||
}
|
||||
|
||||
prev_parent->children.push_back(parent);
|
||||
}
|
||||
return std::pair<T_Iter, bool>(result.first, true);
|
||||
}
|
||||
else {
|
||||
return std::pair<T_Iter, bool>(iter, true);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename T_Iter, typename R_Type>
|
||||
std::pair<T_Iter, bool> Epsilon_Rule
|
||||
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||
|
||||
TokenPtr prev_parent = parent;
|
||||
T_Iter new_iter = iter;
|
||||
|
||||
if (new_id != -1) {
|
||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||
}
|
||||
|
||||
std::pair<T_Iter, bool> result;
|
||||
if ((new_iter != end)) {
|
||||
result = rule(new_iter, end, parent);
|
||||
new_iter = result.first;
|
||||
}
|
||||
|
||||
if (new_id != -1) {
|
||||
parent->filename = (*iter)->filename;
|
||||
parent->start = (*iter)->start;
|
||||
if (result.first == iter) {
|
||||
parent->end = (*iter)->start;
|
||||
}
|
||||
else {
|
||||
parent->end = (*(result.first - 1))->end;
|
||||
}
|
||||
|
||||
prev_parent->children.push_back(parent);
|
||||
}
|
||||
|
||||
return std::pair<T_Iter, bool>(iter, result.second);
|
||||
}
|
||||
|
||||
template <typename T_Iter, typename R_Type>
|
||||
std::pair<T_Iter, bool> Wrap_Rule
|
||||
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||
|
||||
TokenPtr prev_parent = parent;
|
||||
T_Iter new_iter = iter;
|
||||
|
||||
if (new_id != -1) {
|
||||
parent = TokenPtr(new Token("", new_id, parent->filename));
|
||||
}
|
||||
|
||||
std::pair<T_Iter, bool> result;
|
||||
if ((new_iter != end)) {
|
||||
result = rule(new_iter, end, parent);
|
||||
new_iter = result.first;
|
||||
}
|
||||
|
||||
if (new_id != -1) {
|
||||
parent->filename = (*iter)->filename;
|
||||
parent->start = (*iter)->start;
|
||||
if (result.first == iter) {
|
||||
parent->end = (*iter)->start;
|
||||
}
|
||||
else {
|
||||
parent->end = (*(result.first - 1))->end;
|
||||
}
|
||||
|
||||
prev_parent->children.push_back(parent);
|
||||
}
|
||||
|
||||
return std::pair<T_Iter, bool>(result.first, result.second);
|
||||
}
|
||||
|
||||
template <typename T_Iter, typename R_Type>
|
||||
std::pair<T_Iter, bool> Ignore_Rule
|
||||
(T_Iter iter, T_Iter end, TokenPtr parent, bool keep, int new_id, R_Type rule) {
|
||||
|
||||
rule.impl->keep = false;
|
||||
|
||||
return rule(iter, end, parent);
|
||||
}
|
||||
|
||||
struct Rule {
|
||||
RuleImplPtr impl;
|
||||
|
||||
Rule() : impl(new RuleImpl()) {}
|
||||
Rule(int id) : impl(new RuleImpl(id)) {}
|
||||
Rule(RuleFun fun) : impl(new RuleImpl(fun)) {}
|
||||
Rule(RuleFun fun, bool keep) : impl(new RuleImpl(fun, keep)) {}
|
||||
|
||||
std::pair<Token_Iterator, bool> operator()(Token_Iterator iter, Token_Iterator end, TokenPtr parent) {
|
||||
return (*impl)(iter, end, parent);
|
||||
}
|
||||
|
||||
Rule &operator=(const Rule &rule) {
|
||||
int prev_id = impl->new_id;
|
||||
*impl = *(rule.impl);
|
||||
impl->new_id = prev_id;
|
||||
|
||||
return *this;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
inline Rule operator>>(const Rule &lhs, const Rule &rhs) {
|
||||
return Rule(boost::bind(And_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, lhs, rhs));
|
||||
}
|
||||
|
||||
inline Rule operator|(const Rule &lhs, const Rule &rhs) {
|
||||
return Rule(boost::bind(Or_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, lhs, rhs));
|
||||
}
|
||||
|
||||
inline Rule operator*(const Rule &operand) {
|
||||
return Rule(boost::bind(Kleene_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, operand));
|
||||
}
|
||||
|
||||
inline Rule operator+(const Rule &operand) {
|
||||
return Rule(boost::bind(Plus_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, operand));
|
||||
}
|
||||
|
||||
inline Rule operator~(const Rule &operand) {
|
||||
return Rule(boost::bind(Optional_Rule<Token_Iterator, Rule>, _1, _2, _3, _4, _5, operand));
|
||||
}
|
||||
|
||||
|
||||
template<typename ItrType, typename ParamType,
|
||||
std::pair<ItrType,bool> (*Function)(ItrType, ItrType, TokenPtr, bool, int, ParamType)>
|
||||
struct Rule_Builder
|
||||
{
|
||||
Rule_Builder(ParamType p, bool t_keep = true)
|
||||
: m_p(p), m_keep(t_keep)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Auto conversion operator is the glue here.
|
||||
// In one sense this option cleans up the impl quite a bit, with much fewer code
|
||||
// repeats in all the rule builders.
|
||||
// In another sense, it might take a couple of tries to get it right.
|
||||
operator Rule() {
|
||||
return Rule(boost::bind(Function, _1, _2, _3, _4, _5, m_p), m_keep);
|
||||
}
|
||||
|
||||
ParamType m_p;
|
||||
bool m_keep;
|
||||
};
|
||||
|
||||
|
||||
typedef Rule_Builder<Token_Iterator, Rule, &Epsilon_Rule<Token_Iterator, Rule> > Epsilon;
|
||||
typedef Rule_Builder<Token_Iterator, Rule, &Wrap_Rule<Token_Iterator, Rule> > Wrap;
|
||||
typedef Rule_Builder<Token_Iterator, Rule, &Ignore_Rule<Token_Iterator, Rule> > Ign;
|
||||
typedef Rule_Builder<Token_Iterator, int, &Id_Rule<Token_Iterator> > Id;
|
||||
typedef Rule_Builder<Token_Iterator, const std::string&, &String_Rule<Token_Iterator> > Str;
|
||||
}
|
||||
|
||||
#endif /* LANGKIT_PARSER_HPP_ */
|
205
langkit/main.cpp
205
langkit/main.cpp
@ -1,205 +0,0 @@
|
||||
// This file is distributed under the BSD License.
|
||||
// See LICENSE.TXT for details.
|
||||
|
||||
#include <boost/bind.hpp>
|
||||
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include <fstream>
|
||||
|
||||
#include "langkit_lexer.hpp"
|
||||
#include "langkit_parser.hpp"
|
||||
|
||||
class TokenType { public: enum Type { File, Whitespace, Identifier, Number, Operator, Parens_Open, Parens_Close, //6
|
||||
Square_Open, Square_Close, Curly_Open, Curly_Close, Comma, Quoted_String, Single_Quoted_String, Carriage_Return, Semicolon, //15
|
||||
Function_Def, Scoped_Block, Statement, Equation, Return, Expression, Term, Factor, Add, Subtract, Multiply, Divide, Negate, Comment, //29
|
||||
Value, Fun_Call }; };
|
||||
|
||||
const char *tokentype_to_string(int tokentype) {
|
||||
const char *token_types[] = {"File", "Whitespace", "Identifier", "Number", "Operator", "Parens_Open", "Parens_Close", //6
|
||||
"Square_Open", "Square_Close", "Curly_Open", "Curly_Close", "Comma", "Quoted_String", "Single_Quoted_String", "Carriage_Return", "Semicolon", //15
|
||||
"Function_Def", "Scoped_Block", "Statement", "Equation", "Return", "Expression", "Term", "Factor", "Add", "Subtract", "Multiply", "Divide", "Negate", "Comment", //29
|
||||
"Value", "Fun_Call" };
|
||||
|
||||
return token_types[tokentype];
|
||||
}
|
||||
|
||||
void debug_print(langkit::TokenPtr token, std::string prepend) {
|
||||
std::cout << prepend << "Token: " << token->text << "(" << tokentype_to_string(token->identifier) << ") @ " << token->filename
|
||||
<< ": (" << token->start.line << ", " << token->start.column << ") to ("
|
||||
<< token->end.line << ", " << token->end.column << ") " << std::endl;
|
||||
|
||||
for (unsigned int i = 0; i < token->children.size(); ++i) {
|
||||
debug_print(token->children[i], prepend + " ");
|
||||
}
|
||||
}
|
||||
|
||||
void debug_print(std::vector<langkit::TokenPtr> &tokens) {
|
||||
for (unsigned int i = 0; i < tokens.size(); ++i) {
|
||||
debug_print(tokens[i], "");
|
||||
}
|
||||
}
|
||||
|
||||
std::string load_file(const char *filename) {
|
||||
std::ifstream infile (filename, std::ios::in | std::ios::ate);
|
||||
|
||||
if (!infile.is_open()) {
|
||||
std::cerr << "Can not open " << filename << std::endl;
|
||||
exit(0);
|
||||
}
|
||||
|
||||
std::streampos size = infile.tellg();
|
||||
infile.seekg(0, std::ios::beg);
|
||||
|
||||
std::vector<char> v(size);
|
||||
infile.read(&v[0], size);
|
||||
|
||||
std::string ret_val (v.empty() ? std::string() : std::string (v.begin(), v.end()).c_str());
|
||||
|
||||
return ret_val;
|
||||
}
|
||||
|
||||
void parse(std::vector<langkit::TokenPtr> &tokens, const char *filename) {
|
||||
using namespace langkit;
|
||||
|
||||
/*
|
||||
Rule lhs;
|
||||
Rule rhs;
|
||||
Rule rule = lhs >> rhs;
|
||||
lhs = Str("def", true);
|
||||
rhs = Id(TokenType::Identifier, true);
|
||||
|
||||
//Rule rule(TokenType::Function_Def);
|
||||
//rule = Str("def") | Str("int");
|
||||
|
||||
//Rule rule = Str("def", false) >> Id(TokenType::Identifier);
|
||||
*/
|
||||
|
||||
|
||||
//Example: "def add(x,y) { return x+y }"
|
||||
|
||||
Rule params;
|
||||
Rule block(TokenType::Scoped_Block);
|
||||
//Rule rule(TokenType::Function_Def);
|
||||
Rule statement(TokenType::Statement);
|
||||
Rule return_statement(TokenType::Return);
|
||||
Rule expression(TokenType::Expression);
|
||||
Rule term(TokenType::Term);
|
||||
Rule factor(TokenType::Factor);
|
||||
Rule negate(TokenType::Negate);
|
||||
Rule funcall(TokenType::Fun_Call);
|
||||
Rule value;
|
||||
|
||||
/*
|
||||
Rule rule = Ign(Str("def")) >> Id(TokenType::Identifier) >> ~(Ign(Str("(")) >> ~params >> Ign(Str(")"))) >> block;
|
||||
params = Id(TokenType::Identifier) >> *(Ign(Str(",")) >> Id(TokenType::Identifier));
|
||||
block = Ign(Str("{")) >> ~return_statement >> Ign(Str("}"));
|
||||
return_statement = Ign(Str("return")) >> expression;
|
||||
*/
|
||||
/*
|
||||
Rule rule(TokenType::Equation);
|
||||
rule = Wrap(Str("break"));
|
||||
|
||||
std::cout << "Check: " << rule.impl->new_id << std::endl;
|
||||
*/
|
||||
/*
|
||||
Rule rule = *(expression >> *Ign(Id(TokenType::Semicolon)));
|
||||
expression = term >> *((Str("+") >> term) | (Str("-") >> term));
|
||||
term = factor >> *((Str("*") >> factor) | (Str("/") >> factor));
|
||||
factor = value | negate | (Ign(Str("+")) >> value);
|
||||
funcall = Id(TokenType::Identifier) >> Ign(Id(TokenType::Parens_Open)) >> ~(expression >> *(Ign(Str("," )) >> expression)) >> Ign(Id(TokenType::Parens_Close));
|
||||
negate = Ign(Str("-")) >> factor;
|
||||
|
||||
value = funcall | Id(TokenType::Identifier) | Id(TokenType::Number) | Id(TokenType::Quoted_String) | Id(TokenType::Single_Quoted_String);
|
||||
*/
|
||||
|
||||
Rule rule = Str("x") >> Id(TokenType::Semicolon);
|
||||
|
||||
|
||||
/*
|
||||
Rule rule;
|
||||
Rule rule2;
|
||||
rule = Nop(rule2);
|
||||
rule2 = Str("Bob");
|
||||
*/
|
||||
|
||||
/*
|
||||
Rule rule(3);
|
||||
rule = Ign(Str("Bob")) >> Str("Fred");
|
||||
*/
|
||||
|
||||
/*
|
||||
statement = equation;
|
||||
equation = Id(TokenType::Identifier) >> Str("+") >> Id(TokenType::Identifier);
|
||||
*/
|
||||
|
||||
/*
|
||||
Rule rule(TokenType::Function_Def);
|
||||
rule = +Str("Bob");
|
||||
*/
|
||||
|
||||
Token_Iterator iter = tokens.begin(), end = tokens.end();
|
||||
TokenPtr parent(new Token("Root", TokenType::File, filename));
|
||||
|
||||
std::pair<Token_Iterator, bool> results = rule(iter, end, parent);
|
||||
|
||||
/*
|
||||
while (results.second) {
|
||||
results = rule(results.first + 1, end, parent);
|
||||
//debug_print(parent, "");
|
||||
}
|
||||
*/
|
||||
|
||||
if (results.second) {
|
||||
std::cout << "Parse successful: " << std::endl;
|
||||
debug_print(parent, "");
|
||||
}
|
||||
else {
|
||||
std::cout << "Parse failed: " << std::endl;
|
||||
debug_print(parent, "");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
using namespace langkit;
|
||||
std::string input;
|
||||
|
||||
Lexer lexer;
|
||||
lexer.set_skip(Pattern("[ \\t]+", TokenType::Whitespace));
|
||||
lexer.set_line_sep(Pattern("\\n|\\r\\n", TokenType::Carriage_Return));
|
||||
lexer.set_command_sep(Pattern(";|\\r\\n|\\n", TokenType::Semicolon));
|
||||
lexer.set_multiline_comment(Pattern("/\\*", TokenType::Comment), Pattern("\\*/", TokenType::Comment));
|
||||
lexer.set_singleline_comment(Pattern("//", TokenType::Comment));
|
||||
|
||||
lexer << Pattern("[A-Za-z_]+", TokenType::Identifier);
|
||||
lexer << Pattern("[0-9]+(\\.[0-9]+)?", TokenType::Number);
|
||||
lexer << Pattern("[!@#$%^&*\\-+=<>]+|/[!@#$%^&\\-+=<>]*", TokenType::Operator);
|
||||
lexer << Pattern("\\(", TokenType::Parens_Open);
|
||||
lexer << Pattern("\\)", TokenType::Parens_Close);
|
||||
lexer << Pattern("\\[", TokenType::Square_Open);
|
||||
lexer << Pattern("\\]", TokenType::Square_Close);
|
||||
lexer << Pattern("\\{", TokenType::Curly_Open);
|
||||
lexer << Pattern("\\}", TokenType::Curly_Close);
|
||||
lexer << Pattern(",", TokenType::Comma);
|
||||
lexer << Pattern("\"(?:[^\"\\\\]|\\\\.)*\"", TokenType::Quoted_String);
|
||||
lexer << Pattern("'(?:[^'\\\\]|\\\\.)*'", TokenType::Single_Quoted_String);
|
||||
|
||||
if (argc < 2) {
|
||||
std::cout << "Expression> ";
|
||||
std::getline(std::cin, input);
|
||||
while (input != "quit") {
|
||||
std::vector<TokenPtr> tokens = lexer.lex(input, "INPUT");
|
||||
debug_print(tokens);
|
||||
parse(tokens, "INPUT");
|
||||
|
||||
std::cout << "Expression> ";
|
||||
std::getline(std::cin, input);
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::vector<TokenPtr> tokens = lexer.lex(load_file(argv[1]), argv[1]);
|
||||
debug_print(tokens);
|
||||
parse(tokens, argv[1]);
|
||||
}
|
||||
}
|
@ -1,5 +0,0 @@
|
||||
3+4
|
||||
7 + 9
|
||||
|
||||
|
||||
5 + 7
|
@ -1 +0,0 @@
|
||||
def add(x, y) { return x+y }
|
@ -1,8 +0,0 @@
|
||||
#define BOOST_TEST_DYN_LINK
|
||||
#define BOOST_TEST_MODULE boxedcpp_unittests
|
||||
#include <boost/test/unit_test.hpp>
|
||||
|
||||
BOOST_AUTO_TEST_CASE( add_operators )
|
||||
{
|
||||
BOOST_CHECK_EQUAL(2, 2);
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user