Added command delimiters
This commit is contained in:
@@ -10,7 +10,7 @@ Lexer Lexer::operator<<(const Pattern &p) {
|
||||
}
|
||||
|
||||
std::vector<TokenPtr> Lexer::lex(const std::string &input, const char *filename) {
|
||||
std::vector<Pattern>::iterator iter, end;
|
||||
std::vector<Pattern>::iterator iter, end, iter2;
|
||||
std::vector<TokenPtr> retval;
|
||||
bool found;
|
||||
std::string::const_iterator input_iter = input.begin();
|
||||
@@ -51,11 +51,44 @@ std::vector<TokenPtr> Lexer::lex(const std::string &input, const char *filename)
|
||||
for (iter = line_sep_patterns.begin(), end = line_sep_patterns.end(); iter != end; ++iter) {
|
||||
boost::match_results<std::string::const_iterator> what;
|
||||
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
|
||||
std::string cr(what[0]);
|
||||
const std::string cr(what[0]);
|
||||
|
||||
for (iter2 = command_sep_patterns.begin(), end = command_sep_patterns.end(); iter2 != end; ++iter2) {
|
||||
boost::match_results<std::string::const_iterator> if_delim;
|
||||
if (regex_search(cr.begin(), cr.end(), if_delim, iter2->regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(if_delim[0], iter2->identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
input_iter += cr.size();
|
||||
found = true;
|
||||
++current_line;
|
||||
current_col = 0;
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!found) {
|
||||
for (iter = command_sep_patterns.begin(), end = command_sep_patterns.end(); iter != end; ++iter) {
|
||||
boost::match_results<std::string::const_iterator> what;
|
||||
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
|
||||
TokenPtr t(new Token(what[0], iter->identifier, filename));
|
||||
t->start.column = current_col;
|
||||
t->start.line = current_line;
|
||||
current_col += t->text.size();
|
||||
t->end.column = current_col;
|
||||
t->end.line = current_line;
|
||||
retval.push_back(t);
|
||||
input_iter += t->text.size();
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -68,6 +101,7 @@ std::vector<TokenPtr> Lexer::lex(const std::string &input, const char *filename)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
|
@@ -92,12 +92,6 @@ struct Rule {
|
||||
Rule operator~() {
|
||||
return Rule(boost::bind(Optional_Rule, _1, _2, _3, _4, _5, *this));
|
||||
}
|
||||
|
||||
//const RuleImplPtr get_impl() const { return impl; }
|
||||
|
||||
//private:
|
||||
//RuleImplPtr impl;
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
@@ -65,7 +65,7 @@ void parse(std::vector<TokenPtr> &tokens) {
|
||||
|
||||
|
||||
//Example: "def add(x,y)"
|
||||
|
||||
/*
|
||||
Rule params;
|
||||
Rule block(TokenType::Scoped_Block);
|
||||
Rule rule(TokenType::Function_Def);
|
||||
@@ -76,6 +76,9 @@ void parse(std::vector<TokenPtr> &tokens) {
|
||||
params = Id(TokenType::Identifier) << *(Ign(Str(",")) << Id(TokenType::Identifier));
|
||||
block = Ign(Str("{")) << ~return_statement << Ign(Str("}"));
|
||||
return_statement = Ign(Str("return")) << Id(TokenType::Identifier) << Str("+") << Id(TokenType::Identifier);
|
||||
*/
|
||||
|
||||
Rule rule = Str("x") << Id(TokenType::Semicolon);
|
||||
|
||||
/*
|
||||
Rule rule;
|
||||
@@ -112,7 +115,6 @@ void parse(std::vector<TokenPtr> &tokens) {
|
||||
std::cout << "Parse failed: " << std::endl;
|
||||
debug_print(parent, "");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -142,7 +144,7 @@ int main(int argc, char *argv[]) {
|
||||
std::getline(std::cin, input);
|
||||
while (input != "quit") {
|
||||
std::vector<TokenPtr> tokens = lexer.lex(input, "INPUT");
|
||||
//debug_print(tokens);
|
||||
debug_print(tokens);
|
||||
parse(tokens);
|
||||
|
||||
std::cout << "Expression> ";
|
||||
@@ -151,6 +153,7 @@ int main(int argc, char *argv[]) {
|
||||
}
|
||||
else {
|
||||
std::vector<TokenPtr> tokens = lexer.lex(load_file(argv[1]), argv[1]);
|
||||
debug_print(tokens);
|
||||
parse(tokens);
|
||||
//debug_print(tokens);
|
||||
}
|
||||
}
|
||||
|
Reference in New Issue
Block a user