Make the Token ignore property a dynamic property.

Signed-off-by: FrancisANDRE <zosrothko@orange.fr>
This commit is contained in:
FrancisANDRE 2016-01-20 17:01:41 +01:00
parent 59b5b4e46e
commit 1c826040e5
6 changed files with 110 additions and 32 deletions

View File

@ -78,13 +78,7 @@ public:
/// You must not delete the token returned by next().
private:
struct TokenInfo
{
Token* pToken;
bool ignore;
};
typedef std::vector<TokenInfo> TokenVec;
typedef std::vector<Token*> TokenVec;
TokenVec _tokens;
std::istream* _pIstr;

View File

@ -53,7 +53,7 @@ public:
USER_TOKEN
};
Token();
Token(bool ignore = false);
/// Creates the Token.
virtual ~Token();
@ -106,10 +106,22 @@ public:
/// Returns a char representation of the token.
bool is(Class tokenClass) const;
/// Returns true iff the token has the given class.
/// Returns true iff the token has the given class.
void ignore(bool ignored);
/// If ignored is true, the token will be marked
/// as ignorable, which means that next() will
/// not return it.
/// If ignored is false, the token will be marked
/// as acceptable, which means that next() will
/// return it.
bool ignored() const;
/// return if the token is ignored or not
protected:
std::string _value;
bool _ignored;
private:
Token(const Token&);
@ -118,7 +130,7 @@ private:
class Foundation_API InvalidToken: public Token
/// This token class is used for signalling that
/// This token class is used for signaling that
/// an invalid character sequence has been encountered
/// in the input stream.
{

View File

@ -15,6 +15,7 @@
#include "Poco/StreamTokenizer.h"
#include "Poco/Token.h"
namespace Poco {
@ -36,7 +37,7 @@ StreamTokenizer::~StreamTokenizer()
{
for (TokenVec::iterator it = _tokens.begin(); it != _tokens.end(); ++it)
{
delete it->pToken;
delete *it;
}
}
@ -51,10 +52,8 @@ void StreamTokenizer::addToken(Token* pToken)
{
poco_check_ptr (pToken);
TokenInfo ti;
ti.pToken = pToken;
ti.ignore = (pToken->tokenClass() == Token::COMMENT_TOKEN || pToken->tokenClass() == Token::WHITESPACE_TOKEN);
_tokens.push_back(ti);
pToken->ignore(pToken->tokenClass() == Token::COMMENT_TOKEN || pToken->tokenClass() == Token::WHITESPACE_TOKEN);
_tokens.push_back(pToken);
}
@ -62,10 +61,8 @@ void StreamTokenizer::addToken(Token* pToken, bool ignore)
{
poco_check_ptr (pToken);
TokenInfo ti;
ti.pToken = pToken;
ti.ignore = ignore;
_tokens.push_back(ti);
pToken->ignore(ignore);
_tokens.push_back(pToken);
}
@ -79,16 +76,16 @@ const Token* StreamTokenizer::next()
TokenVec::const_iterator it = _tokens.begin();
while (first != eof && it != _tokens.end())
{
const TokenInfo& ti = *it;
if (ti.pToken->start((char) first, *_pIstr))
Token* ti = *it;
if (ti->start((char) first, *_pIstr))
{
ti.pToken->finish(*_pIstr);
if (ti.ignore)
ti->finish(*_pIstr);
if (ti->ignored())
{
first = _pIstr->get();
it = _tokens.begin();
}
else return ti.pToken;
else return *it;
}
else ++it;
}

View File

@ -22,7 +22,7 @@
namespace Poco {
Token::Token()
Token::Token(bool ignore) : _ignored(ignore)
{
}
@ -94,6 +94,17 @@ char Token::asChar() const
}
void Token::ignore(bool ignored)
{
_ignored = ignored;
}
bool Token::ignored() const
{
return _ignored;
}
InvalidToken::InvalidToken()
{
}
@ -126,7 +137,7 @@ Token::Class EOFToken::tokenClass() const
}
WhitespaceToken::WhitespaceToken()
WhitespaceToken::WhitespaceToken() : Token(true)
{
}

View File

@ -251,20 +251,80 @@ void StreamTokenizerTest::testTokenizer7()
tokenizer.addToken(new WhitespaceToken());
tokenizer.addToken(new IdentifierToken());
tokenizer.addToken(new IntLiteralToken());
const Token* next = tokenizer.next();
assert (next->tokenClass() == Token::IDENTIFIER_TOKEN);
assert (next->tokenString() == "foo");
assert(next->tokenClass() == Token::IDENTIFIER_TOKEN);
assert(next->tokenString() == "foo");
next = tokenizer.next();
assert (next->tokenClass() == Token::INTEGER_LITERAL_TOKEN);
assert (next->asInteger() == 123);
assert(next->tokenClass() == Token::INTEGER_LITERAL_TOKEN);
assert(next->asInteger() == 123);
next = tokenizer.next();
assert (next->tokenClass() == Token::EOF_TOKEN);
assert(next->tokenClass() == Token::EOF_TOKEN);
}
void StreamTokenizerTest::testTokenizer8()
{
std::string data = " foo 123 ";
std::istringstream istr(data);
StreamTokenizer tokenizer(istr);
Token* whitespace = new WhitespaceToken();
tokenizer.addToken(whitespace);
tokenizer.addToken(new IdentifierToken());
tokenizer.addToken(new IntLiteralToken());
const Token* next = tokenizer.next();
assert(next->tokenClass() == Token::IDENTIFIER_TOKEN);
assert(next->tokenString() == "foo");
whitespace->ignore(false);
next = tokenizer.next();
assert(next->tokenClass() == Token::WHITESPACE_TOKEN);
assert(next->asString() == " ");
next = tokenizer.next();
assert(next->tokenClass() == Token::INTEGER_LITERAL_TOKEN);
assert(next->asInteger() == 123);
next = tokenizer.next();
assert(next->tokenClass() == Token::WHITESPACE_TOKEN);
assert(next->asString() == " ");
next = tokenizer.next();
assert(next->tokenClass() == Token::EOF_TOKEN);
}
void StreamTokenizerTest::testTokenizer9()
{
std::string data = " foo 123 ";
std::istringstream istr(data);
StreamTokenizer tokenizer(istr);
Token* whitespace = new WhitespaceToken();
tokenizer.addToken(whitespace);
tokenizer.addToken(new IdentifierToken());
tokenizer.addToken(new IntLiteralToken());
const Token* next = tokenizer.next();
assert(next->tokenClass() == Token::IDENTIFIER_TOKEN);
assert(next->tokenString() == "foo");
whitespace->ignore(false);
next = tokenizer.next();
assert(next->tokenClass() == Token::WHITESPACE_TOKEN);
assert(next->asString() == " ");
next = tokenizer.next();
assert(next->tokenClass() == Token::INTEGER_LITERAL_TOKEN);
assert(next->asInteger() == 123);
whitespace->ignore(true);
next = tokenizer.next();
assert(next->tokenClass() == Token::EOF_TOKEN);
}
void StreamTokenizerTest::setUp()
{
}
@ -286,6 +346,8 @@ CppUnit::Test* StreamTokenizerTest::suite()
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer5);
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer6);
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer7);
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer8);
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer9);
return pSuite;
}

View File

@ -33,6 +33,8 @@ public:
void testTokenizer5();
void testTokenizer6();
void testTokenizer7();
void testTokenizer8();
void testTokenizer9();
void setUp();
void tearDown();