Added comment lexing. Removed previous optimization. Flattened lex grammar.

This commit is contained in:
Jonathan Turner
2009-06-01 13:40:24 +00:00
parent f692834fa8
commit 307e557e5b
3 changed files with 127 additions and 123 deletions

View File

@@ -10,53 +10,112 @@ Lexer Lexer::operator<<(const Pattern &p) {
}
std::vector<TokenPtr> Lexer::lex(const std::string &input, const char *filename) {
std::vector<Pattern>::iterator iter, end, iter2;
std::vector<Pattern>::iterator iter, end, iter2, end2;
std::vector<TokenPtr> retval;
bool found;
std::string::const_iterator input_iter = input.begin();
std::string::const_iterator input_iter = input.begin(), input_end = input.end();
int current_col = 0;
int current_line = 0;
boost::match_results<std::string::const_iterator> what;
std::string master_lex_pattern;
unsigned int i = 0;
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
if (i > 0) {
master_lex_pattern += "|";
}
master_lex_pattern += "(" + iter->regex_string + ")";
++i;
}
boost::regex lex_regex(master_lex_pattern);
while (input_iter != input.end()) {
while (input_iter != input_end) {
found = false;
/*
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
boost::match_results<std::string::const_iterator> what;
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
TokenPtr t(new Token(what[0], iter->identifier, filename));
if (regex_search(input_iter, input_end, what, singleline_comment_pattern.regex, boost::match_continuous)) {
std::string comment_start(what[0]);
input_iter += comment_start.size();
bool found_eol = false;
while ((!found_eol) && (input_iter != input_end)) {
boost::match_results<std::string::const_iterator> eol_delim;
if (regex_search(input_iter, input_end, eol_delim, line_sep_pattern.regex, boost::match_continuous)) {
std::string comment_end(eol_delim[0]);
input_iter += comment_end.size();
++current_line;
current_col = 0;
found_eol = true;
break;
}
if ((!found_eol) && (input_iter != input_end)) {
++input_iter;
}
}
}
else if (regex_search(input_iter, input_end, what, multiline_comment_start_pattern.regex, boost::match_continuous)) {
std::string comment_start(what[0]);
input_iter += comment_start.size();
bool found_eoc = false;
while ((!found_eoc) && (input_iter != input_end)) {
boost::match_results<std::string::const_iterator> eol_delim;
if (regex_search(input_iter, input_end, eol_delim, line_sep_pattern.regex, boost::match_continuous)) {
std::string comment_end(eol_delim[0]);
input_iter += comment_end.size();
++current_line;
current_col = 0;
break;
}
boost::match_results<std::string::const_iterator> eoc_delim;
if (regex_search(input_iter, input_end, eoc_delim, multiline_comment_end_pattern.regex, boost::match_continuous)) {
std::string comment_end(eoc_delim[0]);
input_iter += comment_end.size();
current_col += comment_end.size();
found_eoc = true;
break;
}
if ((!found_eoc) && (input_iter != input_end)) {
++input_iter;
}
}
if (!found_eoc) {
std::cout << "Incomplete comment block! Add exceptions!" << std::endl;
return retval;
}
}
else if (regex_search(input_iter, input_end, what, skip_pattern.regex, boost::match_continuous)) {
std::string whitespace(what[0]);
input_iter += whitespace.size();
current_col += whitespace.size();
found = true;
}
else if (regex_search(input_iter, input_end, what, line_sep_pattern.regex, boost::match_continuous)) {
const std::string cr(what[0]);
boost::match_results<std::string::const_iterator> if_delim;
if (regex_search(cr.begin(), cr.end(), if_delim, command_sep_pattern.regex, boost::match_continuous)) {
TokenPtr t(new Token(if_delim[0], command_sep_pattern.identifier, filename));
t->start.column = current_col;
t->start.line = current_line;
current_col += t->text.size();
t->end.column = current_col;
t->end.line = current_line;
retval.push_back(t);
input_iter += t->text.size();
found = true;
break;
}
}
*/
boost::match_results<std::string::const_iterator> what;
if (regex_search(input_iter, input.end(), what, lex_regex, boost::match_continuous)) {
for (i = 0; i < lex_patterns.size(); ++i) {
if (!(std::string(what[i+1])).empty()) {
TokenPtr t(new Token(what[i+1], lex_patterns[i].identifier, filename));
input_iter += cr.size();
++current_line;
current_col = 0;
found = true;
}
else if (regex_search(input_iter, input_end, what, command_sep_pattern.regex, boost::match_continuous)) {
TokenPtr t(new Token(what[0], command_sep_pattern.identifier, filename));
t->start.column = current_col;
t->start.line = current_line;
current_col += t->text.size();
t->end.column = current_col;
t->end.line = current_line;
retval.push_back(t);
input_iter += t->text.size();
found = true;
}
else {
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
if (regex_search(input_iter, input_end, what, iter->regex, boost::match_continuous)) {
TokenPtr t(new Token(what[0], iter->identifier, filename));
t->start.column = current_col;
t->start.line = current_line;
current_col += t->text.size();
@@ -68,87 +127,13 @@ std::vector<TokenPtr> Lexer::lex(const std::string &input, const char *filename)
break;
}
}
}
if (!found) {
for (iter = skip_patterns.begin(), end = skip_patterns.end(); iter != end; ++iter) {
boost::match_results<std::string::const_iterator> what;
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
std::string whitespace(what[0]);
input_iter += whitespace.size();
current_col += whitespace.size();
found = true;
break;
}
}
if (!found) {
for (iter = line_sep_patterns.begin(), end = line_sep_patterns.end(); iter != end; ++iter) {
boost::match_results<std::string::const_iterator> what;
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
const std::string cr(what[0]);
for (iter2 = command_sep_patterns.begin(), end = command_sep_patterns.end(); iter2 != end; ++iter2) {
boost::match_results<std::string::const_iterator> if_delim;
if (regex_search(cr.begin(), cr.end(), if_delim, iter2->regex, boost::match_continuous)) {
TokenPtr t(new Token(if_delim[0], iter2->identifier, filename));
t->start.column = current_col;
t->start.line = current_line;
current_col += t->text.size();
t->end.column = current_col;
t->end.line = current_line;
retval.push_back(t);
break;
}
}
input_iter += cr.size();
found = true;
++current_line;
current_col = 0;
break;
}
}
if (!found) {
for (iter = command_sep_patterns.begin(), end = command_sep_patterns.end(); iter != end; ++iter) {
boost::match_results<std::string::const_iterator> what;
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
TokenPtr t(new Token(what[0], iter->identifier, filename));
t->start.column = current_col;
t->start.line = current_line;
current_col += t->text.size();
t->end.column = current_col;
t->end.line = current_line;
retval.push_back(t);
input_iter += t->text.size();
found = true;
break;
}
}
if (!found) {
const std::string err(input_iter, input.end());
std::cout << "Unknown string at: " << err << std::endl;
return retval;
}
}
const std::string err(input_iter, input_end);
std::cout << "Unknown string at: " << err << std::endl;
return retval;
}
}
}
return retval;
}
void Lexer::set_skip(const Pattern &p) {
skip_patterns.push_back(p);
}
void Lexer::set_line_sep(const Pattern &p) {
line_sep_patterns.push_back(p);
}
void Lexer::set_command_sep(const Pattern &p) {
command_sep_patterns.push_back(p);
}