Switch Token to smart pointer
This commit is contained in:
@@ -9,9 +9,9 @@ Lexer Lexer::operator<<(const Pattern &p) {
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<Token> Lexer::lex(const std::string &input, const char *filename) {
|
std::vector<TokenPtr> Lexer::lex(const std::string &input, const char *filename) {
|
||||||
std::vector<Pattern>::iterator iter, end;
|
std::vector<Pattern>::iterator iter, end;
|
||||||
std::vector<Token> retval;
|
std::vector<TokenPtr> retval;
|
||||||
bool found;
|
bool found;
|
||||||
std::string::const_iterator input_iter = input.begin();
|
std::string::const_iterator input_iter = input.begin();
|
||||||
|
|
||||||
@@ -23,14 +23,14 @@ std::vector<Token> Lexer::lex(const std::string &input, const char *filename) {
|
|||||||
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
|
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
|
||||||
boost::match_results<std::string::const_iterator> what;
|
boost::match_results<std::string::const_iterator> what;
|
||||||
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
|
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
|
||||||
Token t(what[0], iter->identifier, filename);
|
TokenPtr t(new Token(what[0], iter->identifier, filename));
|
||||||
t.start.column = current_col;
|
t->start.column = current_col;
|
||||||
t.start.line = current_line;
|
t->start.line = current_line;
|
||||||
current_col += t.text.size();
|
current_col += t->text.size();
|
||||||
t.end.column = current_col;
|
t->end.column = current_col;
|
||||||
t.end.line = current_line;
|
t->end.line = current_line;
|
||||||
retval.push_back(t);
|
retval.push_back(t);
|
||||||
input_iter += t.text.size();
|
input_iter += t->text.size();
|
||||||
found = true;
|
found = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@@ -4,8 +4,9 @@
|
|||||||
#ifndef LANGKIT_LEXER_HPP_
|
#ifndef LANGKIT_LEXER_HPP_
|
||||||
#define LANGKIT_LEXER_HPP_
|
#define LANGKIT_LEXER_HPP_
|
||||||
|
|
||||||
#include <string>
|
|
||||||
#include <boost/regex.hpp>
|
#include <boost/regex.hpp>
|
||||||
|
#include <tr1/memory>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
struct File_Position {
|
struct File_Position {
|
||||||
int line;
|
int line;
|
||||||
@@ -24,13 +25,15 @@ struct Pattern {
|
|||||||
Pattern(const std::string ®exp, int id) : regex(regexp), identifier(id) { }
|
Pattern(const std::string ®exp, int id) : regex(regexp), identifier(id) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
typedef std::tr1::shared_ptr<struct Token> TokenPtr;
|
||||||
|
|
||||||
struct Token {
|
struct Token {
|
||||||
std::string text;
|
std::string text;
|
||||||
int identifier;
|
int identifier;
|
||||||
const char *filename;
|
const char *filename;
|
||||||
File_Position start, end;
|
File_Position start, end;
|
||||||
|
|
||||||
std::vector<Token> children;
|
std::vector<TokenPtr> children;
|
||||||
|
|
||||||
Token(const std::string &token_text, int id, const char *fname) : text(token_text), identifier(id), filename(fname) { }
|
Token(const std::string &token_text, int id, const char *fname) : text(token_text), identifier(id), filename(fname) { }
|
||||||
};
|
};
|
||||||
@@ -42,7 +45,7 @@ struct Lexer {
|
|||||||
std::vector<Pattern> line_sep_patterns;
|
std::vector<Pattern> line_sep_patterns;
|
||||||
|
|
||||||
Lexer operator<<(const Pattern &p);
|
Lexer operator<<(const Pattern &p);
|
||||||
std::vector<Token> lex(const std::string &input, const char *fname);
|
std::vector<TokenPtr> lex(const std::string &input, const char *fname);
|
||||||
|
|
||||||
void set_skip(const Pattern &p);
|
void set_skip(const Pattern &p);
|
||||||
void set_line_sep(const Pattern &p);
|
void set_line_sep(const Pattern &p);
|
||||||
|
@@ -6,43 +6,43 @@
|
|||||||
#include "langkit_lexer.hpp"
|
#include "langkit_lexer.hpp"
|
||||||
#include "langkit_parser.hpp"
|
#include "langkit_parser.hpp"
|
||||||
|
|
||||||
std::pair<Token_Iterator, Token> String_Rule(Token_Iterator iter, Token_Iterator end, const std::string &val) {
|
std::pair<Token_Iterator, TokenPtr> String_Rule(Token_Iterator iter, Token_Iterator end, const std::string &val) {
|
||||||
if (iter != end) {
|
if (iter != end) {
|
||||||
if (iter->text == val) {
|
if ((*iter)->text == val) {
|
||||||
return std::pair<Token_Iterator, Token>(++iter, *iter);
|
return std::pair<Token_Iterator, TokenPtr>(++iter, *iter);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return std::pair<Token_Iterator, Token>(iter, *iter);
|
return std::pair<Token_Iterator, TokenPtr>(iter, *iter);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<Token_Iterator, Token> Type_Rule(Token_Iterator iter, Token_Iterator end, const int val) {
|
std::pair<Token_Iterator, TokenPtr> Type_Rule(Token_Iterator iter, Token_Iterator end, const int val) {
|
||||||
if (iter != end) {
|
if (iter != end) {
|
||||||
if (iter->identifier == val) {
|
if ((*iter)->identifier == val) {
|
||||||
return std::pair<Token_Iterator, Token>(++iter, *iter);
|
return std::pair<Token_Iterator, TokenPtr>(++iter, *iter);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return std::pair<Token_Iterator, Token>(iter, *iter);
|
return std::pair<Token_Iterator, TokenPtr>(iter, *iter);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::pair<Token_Iterator, Token> Or_Rule(Token_Iterator iter, Token_Iterator end, const Rule &lhs, const Rule &rhs) {
|
std::pair<Token_Iterator, TokenPtr> Or_Rule(Token_Iterator iter, Token_Iterator end, const Rule &lhs, const Rule &rhs) {
|
||||||
Token_Iterator new_iter;
|
Token_Iterator new_iter;
|
||||||
|
|
||||||
if (iter != end) {
|
if (iter != end) {
|
||||||
new_iter = lhs.rule(iter, end).first;
|
new_iter = lhs.rule(iter, end).first;
|
||||||
|
|
||||||
if (new_iter != iter) {
|
if (new_iter != iter) {
|
||||||
return std::pair<Token_Iterator, Token>(new_iter, *iter);
|
return std::pair<Token_Iterator, TokenPtr>(new_iter, *iter);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
new_iter = rhs.rule(iter, end).first;
|
new_iter = rhs.rule(iter, end).first;
|
||||||
if (new_iter != iter) {
|
if (new_iter != iter) {
|
||||||
return std::pair<Token_Iterator, Token>(new_iter, *iter);
|
return std::pair<Token_Iterator, TokenPtr>(new_iter, *iter);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return std::pair<Token_Iterator, Token>(iter, *iter);
|
return std::pair<Token_Iterator, TokenPtr>(iter, *iter);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@@ -64,6 +64,6 @@ std::pair<Token_Iterator, Token> And_Rule(Token_Iterator iter, Token_Iterator en
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
std::pair<Token_Iterator, Token> Rule::operator()(Token_Iterator iter, Token_Iterator end) {
|
std::pair<Token_Iterator, TokenPtr> Rule::operator()(Token_Iterator iter, Token_Iterator end) {
|
||||||
return this->rule(iter, end);
|
return this->rule(iter, end);
|
||||||
}
|
}
|
||||||
|
@@ -8,20 +8,21 @@
|
|||||||
|
|
||||||
#include "langkit_lexer.hpp"
|
#include "langkit_lexer.hpp"
|
||||||
|
|
||||||
typedef std::vector<Token>::iterator Token_Iterator;
|
|
||||||
|
typedef std::vector<TokenPtr>::iterator Token_Iterator;
|
||||||
|
|
||||||
struct Rule {
|
struct Rule {
|
||||||
int identifier;
|
int identifier;
|
||||||
boost::function<std::pair<Token_Iterator, Token>(Token_Iterator iter, Token_Iterator end)> rule;
|
boost::function<std::pair<Token_Iterator, TokenPtr>(Token_Iterator iter, Token_Iterator end)> rule;
|
||||||
std::pair<Token_Iterator, Token> operator()(Token_Iterator iter, Token_Iterator end);
|
std::pair<Token_Iterator, TokenPtr> operator()(Token_Iterator iter, Token_Iterator end);
|
||||||
|
|
||||||
Rule() : identifier(-1) {}
|
Rule() : identifier(-1) {}
|
||||||
Rule(int id) : identifier(id) {}
|
Rule(int id) : identifier(id) {}
|
||||||
};
|
};
|
||||||
|
|
||||||
std::pair<Token_Iterator, Token> String_Rule(Token_Iterator iter, Token_Iterator end, const std::string &val);
|
std::pair<Token_Iterator, TokenPtr> String_Rule(Token_Iterator iter, Token_Iterator end, const std::string &val);
|
||||||
std::pair<Token_Iterator, Token> Type_Rule(Token_Iterator iter, Token_Iterator end, const int val);
|
std::pair<Token_Iterator, TokenPtr> Type_Rule(Token_Iterator iter, Token_Iterator end, const int val);
|
||||||
std::pair<Token_Iterator, Token> Or_Rule(Token_Iterator iter, Token_Iterator end, const Rule &lhs, const Rule &rhs);
|
std::pair<Token_Iterator, TokenPtr> Or_Rule(Token_Iterator iter, Token_Iterator end, const Rule &lhs, const Rule &rhs);
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@@ -13,11 +13,11 @@
|
|||||||
class TokenType { public: enum Type { Whitespace, Identifier, Number, Operator, Parens_Open, Parens_Close,
|
class TokenType { public: enum Type { Whitespace, Identifier, Number, Operator, Parens_Open, Parens_Close,
|
||||||
Square_Open, Square_Close, Curly_Open, Curly_Close, Comma, Quoted_String, Single_Quoted_String, Carriage_Return, Semicolon }; };
|
Square_Open, Square_Close, Curly_Open, Curly_Close, Comma, Quoted_String, Single_Quoted_String, Carriage_Return, Semicolon }; };
|
||||||
|
|
||||||
void debug_print(Token &token) {
|
void debug_print(TokenPtr token) {
|
||||||
std::cout << "Token: " << token.text << "(" << token.identifier << ") @ " << token.filename << ": (" << token.start.column
|
std::cout << "Token: " << token->text << "(" << token->identifier << ") @ " << token->filename << ": (" << token->start.column
|
||||||
<< ", " << token.start.line << ") to (" << token.end.column << ", " << token.end.line << ") " << std::endl;
|
<< ", " << token->start.line << ") to (" << token->end.column << ", " << token->end.line << ") " << std::endl;
|
||||||
}
|
}
|
||||||
void debug_print(std::vector<Token> &tokens) {
|
void debug_print(std::vector<TokenPtr> &tokens) {
|
||||||
for (unsigned int i = 0; i < tokens.size(); ++i) {
|
for (unsigned int i = 0; i < tokens.size(); ++i) {
|
||||||
debug_print(tokens[i]);
|
debug_print(tokens[i]);
|
||||||
}
|
}
|
||||||
@@ -42,14 +42,13 @@ std::string load_file(const char *filename) {
|
|||||||
return ret_val;
|
return ret_val;
|
||||||
}
|
}
|
||||||
|
|
||||||
void parse(std::vector<Token> &tokens) {
|
void parse(std::vector<TokenPtr> &tokens) {
|
||||||
Rule rule;
|
Rule rule;
|
||||||
rule.rule = boost::bind(String_Rule, _1, _2, "def");
|
rule.rule = boost::bind(String_Rule, _1, _2, "def");
|
||||||
|
|
||||||
Token_Iterator iter = tokens.begin(), end = tokens.end();
|
Token_Iterator iter = tokens.begin(), end = tokens.end();
|
||||||
|
|
||||||
|
std::pair<Token_Iterator, TokenPtr> results = rule(iter, end);
|
||||||
std::pair<Token_Iterator, Token> results = rule(iter, end);
|
|
||||||
|
|
||||||
if (results.first != iter) {
|
if (results.first != iter) {
|
||||||
std::cout << "Parse successful: " << std::endl;
|
std::cout << "Parse successful: " << std::endl;
|
||||||
@@ -84,7 +83,7 @@ int main(int argc, char *argv[]) {
|
|||||||
std::cout << "Expression> ";
|
std::cout << "Expression> ";
|
||||||
std::getline(std::cin, input);
|
std::getline(std::cin, input);
|
||||||
while (input != "quit") {
|
while (input != "quit") {
|
||||||
std::vector<Token> tokens = lexer.lex(input, "INPUT");
|
std::vector<TokenPtr> tokens = lexer.lex(input, "INPUT");
|
||||||
debug_print(tokens);
|
debug_print(tokens);
|
||||||
parse(tokens);
|
parse(tokens);
|
||||||
|
|
||||||
@@ -93,7 +92,7 @@ int main(int argc, char *argv[]) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
std::vector<Token> tokens = lexer.lex(load_file(argv[1]), argv[1]);
|
std::vector<TokenPtr> tokens = lexer.lex(load_file(argv[1]), argv[1]);
|
||||||
debug_print(tokens);
|
debug_print(tokens);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user