Added filenames to tokens, removed from File_Position
This commit is contained in:
@@ -9,7 +9,7 @@ Lexer Lexer::operator<<(const Pattern &p) {
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<Token> Lexer::lex(const std::string &input) {
|
std::vector<Token> Lexer::lex(const std::string &input, const char *filename) {
|
||||||
std::vector<Pattern>::iterator iter, end;
|
std::vector<Pattern>::iterator iter, end;
|
||||||
std::vector<Token> retval;
|
std::vector<Token> retval;
|
||||||
bool found;
|
bool found;
|
||||||
@@ -23,7 +23,7 @@ std::vector<Token> Lexer::lex(const std::string &input) {
|
|||||||
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
|
for (iter = lex_patterns.begin(), end = lex_patterns.end(); iter != end; ++iter) {
|
||||||
boost::match_results<std::string::const_iterator> what;
|
boost::match_results<std::string::const_iterator> what;
|
||||||
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
|
if (regex_search(input_iter, input.end(), what, iter->regex, boost::match_continuous)) {
|
||||||
Token t(what[0], iter->identifier);
|
Token t(what[0], iter->identifier, filename);
|
||||||
t.start.column = current_col;
|
t.start.column = current_col;
|
||||||
t.start.line = current_line;
|
t.start.line = current_line;
|
||||||
current_col += t.text.size();
|
current_col += t.text.size();
|
||||||
|
@@ -10,12 +10,11 @@
|
|||||||
struct File_Position {
|
struct File_Position {
|
||||||
int line;
|
int line;
|
||||||
int column;
|
int column;
|
||||||
char *filename;
|
|
||||||
|
|
||||||
File_Position(int file_line, int file_column, char *fname)
|
File_Position(int file_line, int file_column)
|
||||||
: line(file_line), column(file_column), filename(fname) { }
|
: line(file_line), column(file_column) { }
|
||||||
|
|
||||||
File_Position() : line(0), column(0), filename(NULL) { }
|
File_Position() : line(0), column(0) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Pattern {
|
struct Pattern {
|
||||||
@@ -28,11 +27,12 @@ struct Pattern {
|
|||||||
struct Token {
|
struct Token {
|
||||||
std::string text;
|
std::string text;
|
||||||
int identifier;
|
int identifier;
|
||||||
|
const char *filename;
|
||||||
File_Position start, end;
|
File_Position start, end;
|
||||||
|
|
||||||
std::vector<Token> children;
|
std::vector<Token> children;
|
||||||
|
|
||||||
Token(const std::string &token_text, int id) : text(token_text), identifier(id) { }
|
Token(const std::string &token_text, int id, const char *fname) : text(token_text), identifier(id), filename(fname) { }
|
||||||
};
|
};
|
||||||
|
|
||||||
struct Lexer {
|
struct Lexer {
|
||||||
@@ -42,7 +42,7 @@ struct Lexer {
|
|||||||
std::vector<Pattern> line_sep_patterns;
|
std::vector<Pattern> line_sep_patterns;
|
||||||
|
|
||||||
Lexer operator<<(const Pattern &p);
|
Lexer operator<<(const Pattern &p);
|
||||||
std::vector<Token> lex(const std::string &input);
|
std::vector<Token> lex(const std::string &input, const char *fname);
|
||||||
|
|
||||||
void set_skip(const Pattern &p);
|
void set_skip(const Pattern &p);
|
||||||
void set_line_sep(const Pattern &p);
|
void set_line_sep(const Pattern &p);
|
||||||
|
@@ -14,7 +14,7 @@ class TokenType { public: enum Type { Whitespace, Identifier, Number, Operator,
|
|||||||
Square_Open, Square_Close, Curly_Open, Curly_Close, Comma, Quoted_String, Single_Quoted_String, Carriage_Return, Semicolon }; };
|
Square_Open, Square_Close, Curly_Open, Curly_Close, Comma, Quoted_String, Single_Quoted_String, Carriage_Return, Semicolon }; };
|
||||||
|
|
||||||
void debug_print(Token &token) {
|
void debug_print(Token &token) {
|
||||||
std::cout << "Token: " << token.text << "(" << token.identifier << ") @ (" << token.start.column
|
std::cout << "Token: " << token.text << "(" << token.identifier << ") @ " << token.filename << ": (" << token.start.column
|
||||||
<< ", " << token.start.line << ") to (" << token.end.column << ", " << token.end.line << ") " << std::endl;
|
<< ", " << token.start.line << ") to (" << token.end.column << ", " << token.end.line << ") " << std::endl;
|
||||||
}
|
}
|
||||||
void debug_print(std::vector<Token> &tokens) {
|
void debug_print(std::vector<Token> &tokens) {
|
||||||
@@ -84,7 +84,7 @@ int main(int argc, char *argv[]) {
|
|||||||
std::cout << "Expression> ";
|
std::cout << "Expression> ";
|
||||||
std::getline(std::cin, input);
|
std::getline(std::cin, input);
|
||||||
while (input != "quit") {
|
while (input != "quit") {
|
||||||
std::vector<Token> tokens = lexer.lex(input);
|
std::vector<Token> tokens = lexer.lex(input, "INPUT");
|
||||||
debug_print(tokens);
|
debug_print(tokens);
|
||||||
parse(tokens);
|
parse(tokens);
|
||||||
|
|
||||||
@@ -93,7 +93,7 @@ int main(int argc, char *argv[]) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
std::vector<Token> tokens = lexer.lex(load_file(argv[1]));
|
std::vector<Token> tokens = lexer.lex(load_file(argv[1]), argv[1]);
|
||||||
debug_print(tokens);
|
debug_print(tokens);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user