First step in moving to all-header implementation
This commit is contained in:
@@ -11,10 +11,10 @@ find_package( Boost 1.36.0 COMPONENTS regex unit_test_framework)
|
||||
if(Boost_FOUND)
|
||||
include_directories(${Boost_INCLUDE_DIRS})
|
||||
|
||||
add_executable(langkit_test main.cpp langkit_lexer.cpp langkit_parser.cpp)
|
||||
add_executable(langkit_test main.cpp langkit_parser.cpp)
|
||||
target_link_libraries(langkit_test ${Boost_LIBRARIES})
|
||||
|
||||
add_library(langkit SHARED langkit_lexer.cpp langkit_parser.cpp)
|
||||
add_library(langkit SHARED langkit_parser.cpp)
|
||||
target_link_libraries(langkit ${Boost_LIBRARIES})
|
||||
|
||||
add_executable(langkit_unittest unittest.cpp)
|
||||
|
@@ -4,136 +4,3 @@
|
||||
#include <iostream>
|
||||
#include "langkit_lexer.hpp"
|
||||
|
||||
Lexer Lexer::operator<<(const Pattern &p) {
|
||||
lex_patterns.push_back(p);
|
||||
return *this;
|
||||
}
|
||||
|
||||
std::vector<TokenPtr> Lexer::lex(const std::string &input, const char *filename) {
|
||||
std::vector<Pattern>::iterator iter, end, iter2, end2;
|
||||
std::vector<TokenPtr> retval;
|
||||
bool found;
|
||||
std::string::const_iterator input_iter = input.begin(), input_end = input.end();
|
||||
|
||||
int current_col = 0;
|
||||
int current_line = 0;
|
||||
boost::match_results<std::string::const_iterator> what;
|
||||
|
||||
while (input_iter != input_end) {
|
||||
found = false;
|
||||
|
||||
if (regex_search(input_iter, input_end, what, singleline_comment_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_start(what[0]);
|
||||
input_iter += comment_start.size();
|
||||
|
||||
bool found_eol = false;
|
||||
|
||||
while ((!found_eol) && (input_iter != input_end)) {
|
||||
boost::match_results<std::string::const_iterator> eol_delim;
|
||||
if (regex_search(input_iter, input_end, eol_delim, line_sep_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_end(eol_delim[0]);
|
||||
input_iter += comment_end.size();
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
found_eol = true;
|
||||
break;
|
||||
}
|
||||
if ((!found_eol) && (input_iter != input_end)) {
|
||||
++input_iter;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, multiline_comment_start_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_start(what[0]);
|
||||
input_iter += comment_start.size();
|
||||
|
||||
bool found_eoc = false;
|
||||
|
||||
while ((!found_eoc) && (input_iter != input_end)) {
|
||||
boost::match_results<std::string::const_iterator> eol_delim;
|
||||
if (regex_search(input_iter, input_end, eol_delim, line_sep_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_end(eol_delim[0]);
|
||||
input_iter += comment_end.size();
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
}
|
||||
boost::match_results<std::string::const_iterator> eoc_delim;
|
||||
if (regex_search(input_iter, input_end, eoc_delim, multiline_comment_end_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_end(eoc_delim[0]);
|
||||
input_iter += comment_end.size();
|
||||
current_col += comment_end.size();
|
||||
found_eoc = true;
|
||||
break;
|
||||
}
|
||||
if ((!found_eoc) && (input_iter != input_end)) {
|
||||
++input_iter;
|
||||
++current_col;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found_eoc) {
|
||||
std::cout << "Incomplete comment block! Add exceptions!" << std::endl;
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, skip_pattern.regex, boost::match_continuous)) {
|
||||
std::string whitespace(what[0]);
|
||||
input_iter += whitespace.size();
|
||||
current_col += whitespace.size();
|
||||
found = true;
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, line_sep_pattern.regex, boost::match_continuous)) {
|
||||
const std::string cr(what[0]);
|
||||
|
||||
boost::match_results<std::string::const_iterator> if_delim;
|
||||
if (regex_search(cr.begin(), cr.end(), if_delim, command_sep_pattern.regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(if_delim[0], command_sep_pattern.identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
}
|
||||
|
||||
input_iter += cr.size();
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
found = true;
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, command_sep_pattern.regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(what[0], command_sep_pattern.identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
input_iter += t->text.size();
|
||||
found = true;
|
||||
}
|
||||
else {
|
||||
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
|
||||
if (regex_search(input_iter, input_end, what, iter->regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(what[0], iter->identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
input_iter += t->text.size();
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
const std::string err(input_iter, input_end);
|
||||
std::cout << "Unknown string at: " << err << std::endl;
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
@@ -48,8 +48,139 @@ struct Lexer {
|
||||
Pattern multiline_comment_end_pattern;
|
||||
Pattern singleline_comment_pattern;
|
||||
|
||||
Lexer operator<<(const Pattern &p);
|
||||
std::vector<TokenPtr> lex(const std::string &input, const char *fname);
|
||||
Lexer Lexer::operator<<(const Pattern &p) {
|
||||
lex_patterns.push_back(p);
|
||||
return *this;
|
||||
}
|
||||
|
||||
std::vector<TokenPtr> Lexer::lex(const std::string &input, const char *filename) {
|
||||
std::vector<Pattern>::iterator iter, end, iter2, end2;
|
||||
std::vector<TokenPtr> retval;
|
||||
bool found;
|
||||
std::string::const_iterator input_iter = input.begin(), input_end = input.end();
|
||||
|
||||
int current_col = 0;
|
||||
int current_line = 0;
|
||||
boost::match_results<std::string::const_iterator> what;
|
||||
|
||||
while (input_iter != input_end) {
|
||||
found = false;
|
||||
|
||||
if (regex_search(input_iter, input_end, what, singleline_comment_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_start(what[0]);
|
||||
input_iter += comment_start.size();
|
||||
|
||||
bool found_eol = false;
|
||||
|
||||
while ((!found_eol) && (input_iter != input_end)) {
|
||||
boost::match_results<std::string::const_iterator> eol_delim;
|
||||
if (regex_search(input_iter, input_end, eol_delim, line_sep_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_end(eol_delim[0]);
|
||||
input_iter += comment_end.size();
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
found_eol = true;
|
||||
break;
|
||||
}
|
||||
if ((!found_eol) && (input_iter != input_end)) {
|
||||
++input_iter;
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, multiline_comment_start_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_start(what[0]);
|
||||
input_iter += comment_start.size();
|
||||
|
||||
bool found_eoc = false;
|
||||
|
||||
while ((!found_eoc) && (input_iter != input_end)) {
|
||||
boost::match_results<std::string::const_iterator> eol_delim;
|
||||
if (regex_search(input_iter, input_end, eol_delim, line_sep_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_end(eol_delim[0]);
|
||||
input_iter += comment_end.size();
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
}
|
||||
boost::match_results<std::string::const_iterator> eoc_delim;
|
||||
if (regex_search(input_iter, input_end, eoc_delim, multiline_comment_end_pattern.regex, boost::match_continuous)) {
|
||||
std::string comment_end(eoc_delim[0]);
|
||||
input_iter += comment_end.size();
|
||||
current_col += comment_end.size();
|
||||
found_eoc = true;
|
||||
break;
|
||||
}
|
||||
if ((!found_eoc) && (input_iter != input_end)) {
|
||||
++input_iter;
|
||||
++current_col;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found_eoc) {
|
||||
std::cout << "Incomplete comment block! Add exceptions!" << std::endl;
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, skip_pattern.regex, boost::match_continuous)) {
|
||||
std::string whitespace(what[0]);
|
||||
input_iter += whitespace.size();
|
||||
current_col += whitespace.size();
|
||||
found = true;
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, line_sep_pattern.regex, boost::match_continuous)) {
|
||||
const std::string cr(what[0]);
|
||||
|
||||
boost::match_results<std::string::const_iterator> if_delim;
|
||||
if (regex_search(cr.begin(), cr.end(), if_delim, command_sep_pattern.regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(if_delim[0], command_sep_pattern.identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
}
|
||||
|
||||
input_iter += cr.size();
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
found = true;
|
||||
}
|
||||
else if (regex_search(input_iter, input_end, what, command_sep_pattern.regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(what[0], command_sep_pattern.identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
input_iter += t->text.size();
|
||||
found = true;
|
||||
}
|
||||
else {
|
||||
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
|
||||
if (regex_search(input_iter, input_end, what, iter->regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(what[0], iter->identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
input_iter += t->text.size();
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
const std::string err(input_iter, input_end);
|
||||
std::cout << "Unknown string at: " << err << std::endl;
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
void set_skip(const Pattern &p) {
|
||||
skip_pattern = p;
|
||||
|
@@ -199,6 +199,7 @@ Rule build_parser_rules() {
|
||||
Rule source_elem;
|
||||
Rule source_elems;
|
||||
Rule statement_list;
|
||||
Rule paren_block;
|
||||
|
||||
Rule rule = *(Ign(Id(TokenType::Semicolon))) >> source_elems >> *(Ign(Id(TokenType::Semicolon)));
|
||||
|
||||
@@ -216,6 +217,7 @@ Rule build_parser_rules() {
|
||||
block;
|
||||
params = Id(TokenType::Identifier) >> *(Ign(Str(",")) >> Id(TokenType::Identifier));
|
||||
block = *(Ign(Id(TokenType::Semicolon))) >> Ign(Id(TokenType::Curly_Open)) >> *(Ign(Id(TokenType::Semicolon))) >> ~statement_list >> *(Ign(Id(TokenType::Semicolon))) >> Ign(Id(TokenType::Curly_Close));
|
||||
|
||||
equation = *(((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("=")) |
|
||||
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("+=")) |
|
||||
((vardecl | arraycall | Id(TokenType::Identifier)) >> Str("-=")) |
|
||||
@@ -227,18 +229,21 @@ Rule build_parser_rules() {
|
||||
expression = term >> *((Str("+") >> term) | (Str("-") >> term));
|
||||
term = factor >> *((Str("*") >> factor) | (Str("/") >> factor));
|
||||
factor = methodcall | arraycall | value | negate | prefix | (Ign(Str("+")) >> value);
|
||||
value = vardecl | arrayinit | block | paren_block | return_statement | break_statement |
|
||||
funcall | Id(TokenType::Identifier) | Id(TokenType::Real_Number) | Id(TokenType::Integer) | Id(TokenType::Quoted_String) |
|
||||
Id(TokenType::Single_Quoted_String) ;
|
||||
|
||||
funcall = Id(TokenType::Identifier) >> Ign(Id(TokenType::Parens_Open)) >> ~(boolean >> *(Ign(Str("," )) >> boolean)) >> Ign(Id(TokenType::Parens_Close));
|
||||
methodcall = value >> +(Ign(Str(".")) >> funcall);
|
||||
negate = Ign(Str("-")) >> boolean;
|
||||
prefix = (Str("++") >> (boolean | arraycall)) | (Str("--") >> (boolean | arraycall));
|
||||
arraycall = value >> +((Ign(Id(TokenType::Square_Open)) >> boolean >> Ign(Id(TokenType::Square_Close))));
|
||||
value = vardecl | arrayinit | block | (Ign(Id(TokenType::Parens_Open)) >> equation >> Ign(Id(TokenType::Parens_Close))) | return_statement | break_statement |
|
||||
funcall | Id(TokenType::Identifier) | Id(TokenType::Real_Number) | Id(TokenType::Integer) | Id(TokenType::Quoted_String) |
|
||||
Id(TokenType::Single_Quoted_String) ;
|
||||
|
||||
arrayinit = Ign(Id(TokenType::Square_Open)) >> ~(boolean >> *(Ign(Str(",")) >> boolean)) >> Ign(Id(TokenType::Square_Close));
|
||||
vardecl = Ign(Str("var")) >> Id(TokenType::Identifier);
|
||||
return_statement = Ign(Str("return")) >> ~boolean;
|
||||
break_statement = Wrap(Ign(Str("break")));
|
||||
paren_block = (Ign(Id(TokenType::Parens_Open)) >> equation >> Ign(Id(TokenType::Parens_Close)));
|
||||
|
||||
return rule;
|
||||
}
|
||||
|
Reference in New Issue
Block a user