Merge pull request #1131 from Kampbell/DynamicIgnoredToken

Make the Token ignore property a dynamic property.
This commit is contained in:
Günter Obiltschnig
2016-05-05 10:51:37 +02:00
7 changed files with 113 additions and 33 deletions

View File

@@ -78,13 +78,7 @@ public:
/// You must not delete the token returned by next(). /// You must not delete the token returned by next().
private: private:
struct TokenInfo typedef std::vector<Token*> TokenVec;
{
Token* pToken;
bool ignore;
};
typedef std::vector<TokenInfo> TokenVec;
TokenVec _tokens; TokenVec _tokens;
std::istream* _pIstr; std::istream* _pIstr;

View File

@@ -53,7 +53,7 @@ public:
USER_TOKEN USER_TOKEN
}; };
Token(); Token(bool ignore = false);
/// Creates the Token. /// Creates the Token.
virtual ~Token(); virtual ~Token();
@@ -108,8 +108,20 @@ public:
bool is(Class tokenClass) const; bool is(Class tokenClass) const;
/// Returns true iff the token has the given class. /// Returns true iff the token has the given class.
void ignore(bool ignored);
/// If ignored is true, the token will be marked
/// as ignorable, which means that next() will
/// not return it.
/// If ignored is false, the token will be marked
/// as acceptable, which means that next() will
/// return it.
bool ignored() const;
/// return if the token is ignored or not
protected: protected:
std::string _value; std::string _value;
bool _ignored;
private: private:
Token(const Token&); Token(const Token&);
@@ -118,7 +130,7 @@ private:
class Foundation_API InvalidToken: public Token class Foundation_API InvalidToken: public Token
/// This token class is used for signalling that /// This token class is used for signaling that
/// an invalid character sequence has been encountered /// an invalid character sequence has been encountered
/// in the input stream. /// in the input stream.
{ {

View File

@@ -15,6 +15,7 @@
#include "Poco/StreamTokenizer.h" #include "Poco/StreamTokenizer.h"
#include "Poco/Token.h"
namespace Poco { namespace Poco {
@@ -36,7 +37,7 @@ StreamTokenizer::~StreamTokenizer()
{ {
for (TokenVec::iterator it = _tokens.begin(); it != _tokens.end(); ++it) for (TokenVec::iterator it = _tokens.begin(); it != _tokens.end(); ++it)
{ {
delete it->pToken; delete *it;
} }
} }
@@ -51,10 +52,8 @@ void StreamTokenizer::addToken(Token* pToken)
{ {
poco_check_ptr (pToken); poco_check_ptr (pToken);
TokenInfo ti; pToken->ignore(pToken->tokenClass() == Token::COMMENT_TOKEN || pToken->tokenClass() == Token::WHITESPACE_TOKEN);
ti.pToken = pToken; _tokens.push_back(pToken);
ti.ignore = (pToken->tokenClass() == Token::COMMENT_TOKEN || pToken->tokenClass() == Token::WHITESPACE_TOKEN);
_tokens.push_back(ti);
} }
@@ -62,10 +61,8 @@ void StreamTokenizer::addToken(Token* pToken, bool ignore)
{ {
poco_check_ptr (pToken); poco_check_ptr (pToken);
TokenInfo ti; pToken->ignore(ignore);
ti.pToken = pToken; _tokens.push_back(pToken);
ti.ignore = ignore;
_tokens.push_back(ti);
} }
@@ -79,16 +76,16 @@ const Token* StreamTokenizer::next()
TokenVec::const_iterator it = _tokens.begin(); TokenVec::const_iterator it = _tokens.begin();
while (first != eof && it != _tokens.end()) while (first != eof && it != _tokens.end())
{ {
const TokenInfo& ti = *it; Token* ti = *it;
if (ti.pToken->start((char) first, *_pIstr)) if (ti->start((char) first, *_pIstr))
{ {
ti.pToken->finish(*_pIstr); ti->finish(*_pIstr);
if (ti.ignore) if (ti->ignored())
{ {
first = _pIstr->get(); first = _pIstr->get();
it = _tokens.begin(); it = _tokens.begin();
} }
else return ti.pToken; else return *it;
} }
else ++it; else ++it;
} }

View File

@@ -22,7 +22,7 @@
namespace Poco { namespace Poco {
Token::Token() Token::Token(bool ignore) : _ignored(ignore)
{ {
} }
@@ -94,6 +94,17 @@ char Token::asChar() const
} }
void Token::ignore(bool ignored)
{
_ignored = ignored;
}
bool Token::ignored() const
{
return _ignored;
}
InvalidToken::InvalidToken() InvalidToken::InvalidToken()
{ {
} }
@@ -126,7 +137,7 @@ Token::Class EOFToken::tokenClass() const
} }
WhitespaceToken::WhitespaceToken() WhitespaceToken::WhitespaceToken() : Token(true)
{ {
} }

View File

@@ -265,6 +265,66 @@ void StreamTokenizerTest::testTokenizer7()
} }
void StreamTokenizerTest::testTokenizer8()
{
std::string data = " foo 123 ";
std::istringstream istr(data);
StreamTokenizer tokenizer(istr);
Token* whitespace = new WhitespaceToken();
tokenizer.addToken(whitespace);
tokenizer.addToken(new IdentifierToken());
tokenizer.addToken(new IntLiteralToken());
const Token* next = tokenizer.next();
assert(next->tokenClass() == Token::IDENTIFIER_TOKEN);
assert(next->tokenString() == "foo");
whitespace->ignore(false);
next = tokenizer.next();
assert(next->tokenClass() == Token::WHITESPACE_TOKEN);
assert(next->asString() == " ");
next = tokenizer.next();
assert(next->tokenClass() == Token::INTEGER_LITERAL_TOKEN);
assert(next->asInteger() == 123);
next = tokenizer.next();
assert(next->tokenClass() == Token::WHITESPACE_TOKEN);
assert(next->asString() == " ");
next = tokenizer.next();
assert(next->tokenClass() == Token::EOF_TOKEN);
}
void StreamTokenizerTest::testTokenizer9()
{
std::string data = " foo 123 ";
std::istringstream istr(data);
StreamTokenizer tokenizer(istr);
Token* whitespace = new WhitespaceToken();
tokenizer.addToken(whitespace);
tokenizer.addToken(new IdentifierToken());
tokenizer.addToken(new IntLiteralToken());
const Token* next = tokenizer.next();
assert(next->tokenClass() == Token::IDENTIFIER_TOKEN);
assert(next->tokenString() == "foo");
whitespace->ignore(false);
next = tokenizer.next();
assert(next->tokenClass() == Token::WHITESPACE_TOKEN);
assert(next->asString() == " ");
next = tokenizer.next();
assert(next->tokenClass() == Token::INTEGER_LITERAL_TOKEN);
assert(next->asInteger() == 123);
whitespace->ignore(true);
next = tokenizer.next();
assert(next->tokenClass() == Token::EOF_TOKEN);
}
void StreamTokenizerTest::setUp() void StreamTokenizerTest::setUp()
{ {
} }
@@ -286,6 +346,8 @@ CppUnit::Test* StreamTokenizerTest::suite()
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer5); CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer5);
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer6); CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer6);
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer7); CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer7);
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer8);
CppUnit_addTest(pSuite, StreamTokenizerTest, testTokenizer9);
return pSuite; return pSuite;
} }

View File

@@ -33,6 +33,8 @@ public:
void testTokenizer5(); void testTokenizer5();
void testTokenizer6(); void testTokenizer6();
void testTokenizer7(); void testTokenizer7();
void testTokenizer8();
void testTokenizer9();
void setUp(); void setUp();
void tearDown(); void tearDown();

View File

@@ -57,9 +57,11 @@ namespace
do do
{ {
n = ws.receiveFrame(pBuffer.get(), _bufSize, flags); n = ws.receiveFrame(pBuffer.get(), _bufSize, flags);
if (n == 0)
break;
ws.sendFrame(pBuffer.get(), n, flags); ws.sendFrame(pBuffer.get(), n, flags);
} }
while (n > 0 || (flags & WebSocket::FRAME_OP_BITMASK) != WebSocket::FRAME_OP_CLOSE); while ((flags & WebSocket::FRAME_OP_BITMASK) != WebSocket::FRAME_OP_CLOSE);
} }
catch (WebSocketException& exc) catch (WebSocketException& exc)
{ {