mirror of
https://github.com/pocoproject/poco.git
synced 2025-10-16 18:56:52 +02:00
added StringTokenizer::find()
This commit is contained in:
@@ -58,13 +58,14 @@ public:
|
|||||||
enum Options
|
enum Options
|
||||||
{
|
{
|
||||||
TOK_IGNORE_EMPTY = 1, /// ignore empty tokens
|
TOK_IGNORE_EMPTY = 1, /// ignore empty tokens
|
||||||
TOK_TRIM = 2 /// remove leading and trailing whitespace from tokens
|
TOK_TRIM = 2 /// remove leading and trailing whitespace from tokens
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef std::vector<std::string>::const_iterator Iterator;
|
typedef std::vector<std::string> TokenVec;
|
||||||
|
typedef TokenVec::const_iterator Iterator;
|
||||||
|
|
||||||
StringTokenizer(const std::string& str, const std::string& separators, int options = 0);
|
StringTokenizer(const std::string& str, const std::string& separators, int options = 0);
|
||||||
/// Splits the given string into tokens. The tokens are expected to be
|
/// Splits the given string into tokens. The tokens are expected to be
|
||||||
/// separated by one of the separator characters given in separators.
|
/// separated by one of the separator characters given in separators.
|
||||||
/// Additionally, options can be specified:
|
/// Additionally, options can be specified:
|
||||||
/// * TOK_IGNORE_EMPTY: empty tokens are ignored
|
/// * TOK_IGNORE_EMPTY: empty tokens are ignored
|
||||||
@@ -78,20 +79,40 @@ public:
|
|||||||
/// Destroys the tokenizer.
|
/// Destroys the tokenizer.
|
||||||
|
|
||||||
Iterator begin() const;
|
Iterator begin() const;
|
||||||
Iterator end() const;
|
Iterator end() const;
|
||||||
|
|
||||||
const std::string& operator [] (std::size_t index) const;
|
const std::string& operator [] (std::size_t index) const;
|
||||||
/// Returns the index'th token.
|
/// Returns const reference the index'th token.
|
||||||
/// Throws a RangeException if the index is out of range.
|
/// Throws a RangeException if the index is out of range.
|
||||||
|
|
||||||
std::size_t count() const;
|
std::string& operator [] (std::size_t index);
|
||||||
/// Returns the number of tokens.
|
/// Returns reference to the index'th token.
|
||||||
|
/// Throws a RangeException if the index is out of range.
|
||||||
|
|
||||||
|
bool has(const std::string& token) const;
|
||||||
|
/// Returns true if token exists, false otherwise.
|
||||||
|
|
||||||
|
std::size_t find(const std::string& token, std::size_t pos = 0) const;
|
||||||
|
/// Returns the index of the first occurence of the token
|
||||||
|
/// starting at position pos.
|
||||||
|
/// Throws a NotFoundException if the token is not found.
|
||||||
|
|
||||||
|
std::size_t replace(const std::string& oldToken, const std::string& newToken, std::size_t pos = 0);
|
||||||
|
/// Starting at position pos, replaces all subsequent tokens having value
|
||||||
|
/// equal to oldToken with newToken.
|
||||||
|
/// Returns the number of modified tokens.
|
||||||
|
|
||||||
|
std::size_t count() const;
|
||||||
|
/// Returns the total number of tokens.
|
||||||
|
|
||||||
|
std::size_t count(const std::string& token) const;
|
||||||
|
/// Returns the number of tokens equal to the specified token.
|
||||||
|
|
||||||
private:
|
private:
|
||||||
StringTokenizer(const StringTokenizer&);
|
StringTokenizer(const StringTokenizer&);
|
||||||
StringTokenizer& operator = (const StringTokenizer&);
|
StringTokenizer& operator = (const StringTokenizer&);
|
||||||
|
|
||||||
std::vector<std::string> _tokens;
|
TokenVec _tokens;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@@ -112,9 +133,16 @@ inline StringTokenizer::Iterator StringTokenizer::end() const
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
inline std::string& StringTokenizer::operator [] (std::size_t index)
|
||||||
|
{
|
||||||
|
if (index >= _tokens.size()) throw RangeException();
|
||||||
|
return _tokens[index];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
inline const std::string& StringTokenizer::operator [] (std::size_t index) const
|
inline const std::string& StringTokenizer::operator [] (std::size_t index) const
|
||||||
{
|
{
|
||||||
if (index >= _tokens.size()) throw RangeException();
|
if (index >= _tokens.size()) throw RangeException();
|
||||||
return _tokens[index];
|
return _tokens[index];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
//
|
//
|
||||||
// StringTokenizerTest.cpp
|
// StringTokenizerTest.cpp
|
||||||
//
|
//
|
||||||
// $Id: //poco/1.4/Foundation/testsuite/src/StringTokenizerTest.cpp#1 $
|
// $Id: //poco/svn/Foundation/testsuite/src/StringTokenizerTest.cpp#2 $
|
||||||
//
|
//
|
||||||
// Copyright (c) 2004-2006, Applied Informatics Software Engineering GmbH.
|
// Copyright (c) 2004-2006, Applied Informatics Software Engineering GmbH.
|
||||||
// and Contributors.
|
// and Contributors.
|
||||||
@@ -34,9 +34,12 @@
|
|||||||
#include "CppUnit/TestCaller.h"
|
#include "CppUnit/TestCaller.h"
|
||||||
#include "CppUnit/TestSuite.h"
|
#include "CppUnit/TestSuite.h"
|
||||||
#include "Poco/StringTokenizer.h"
|
#include "Poco/StringTokenizer.h"
|
||||||
|
#include "Poco/Exception.h"
|
||||||
|
|
||||||
|
|
||||||
using Poco::StringTokenizer;
|
using Poco::StringTokenizer;
|
||||||
|
using Poco::RangeException;
|
||||||
|
using Poco::NotFoundException;
|
||||||
|
|
||||||
|
|
||||||
StringTokenizerTest::StringTokenizerTest(const std::string& name): CppUnit::TestCase(name)
|
StringTokenizerTest::StringTokenizerTest(const std::string& name): CppUnit::TestCase(name)
|
||||||
@@ -67,146 +70,178 @@ void StringTokenizerTest::testStringTokenizer()
|
|||||||
StringTokenizer st("", "", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("", "", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
assert (st.begin() == st.end());
|
assert (st.begin() == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc", "");
|
StringTokenizer st("abc", "");
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("abc") == 0);
|
||||||
assert (*it++ == "abc");
|
assert (it != st.end());
|
||||||
assert (it == st.end());
|
assert (*it++ == "abc");
|
||||||
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc ", "", StringTokenizer::TOK_TRIM);
|
StringTokenizer st("abc ", "", StringTokenizer::TOK_TRIM);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("abc") == 0);
|
||||||
assert (*it++ == "abc");
|
assert (it != st.end());
|
||||||
assert (it == st.end());
|
assert (*it++ == "abc");
|
||||||
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st(" abc ", "", StringTokenizer::TOK_TRIM);
|
StringTokenizer st(" abc ", "", StringTokenizer::TOK_TRIM);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("abc") == 0);
|
||||||
assert (*it++ == "abc");
|
assert (it != st.end());
|
||||||
assert (it == st.end());
|
assert (*it++ == "abc");
|
||||||
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st(" abc", "", StringTokenizer::TOK_TRIM);
|
StringTokenizer st(" abc", "", StringTokenizer::TOK_TRIM);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("abc") == 0);
|
||||||
assert (*it++ == "abc");
|
assert (it != st.end());
|
||||||
assert (it == st.end());
|
assert (*it++ == "abc");
|
||||||
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc", "b");
|
StringTokenizer st("abc", "b");
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a") == 0);
|
||||||
assert (*it++ == "a");
|
assert (st.find("c") == 1);
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "a");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "c");
|
assert (*it++ == "c");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc", "b", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("abc", "b", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a") == 0);
|
||||||
assert (*it++ == "a");
|
assert (st.find("c") == 1);
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "a");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "c");
|
assert (*it++ == "c");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc", "bc");
|
StringTokenizer st("abc", "bc");
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a") == 0);
|
||||||
assert (*it++ == "a");
|
assert (st.find("") == 1);
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "a");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "");
|
assert (*it++ == "");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM);
|
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a") == 0);
|
||||||
assert (*it++ == "a");
|
assert (st.find("") == 1);
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "a");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "");
|
assert (*it++ == "");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("abc", "bc", StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a") == 0);
|
||||||
assert (*it++ == "a");
|
assert (it != st.end());
|
||||||
assert (it == st.end());
|
assert (*it++ == "a");
|
||||||
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a") == 0);
|
||||||
assert (*it++ == "a");
|
assert (it != st.end());
|
||||||
assert (it == st.end());
|
assert (*it++ == "a");
|
||||||
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a") == 0);
|
||||||
assert (*it++ == "a");
|
assert (it != st.end());
|
||||||
assert (it == st.end());
|
assert (*it++ == "a");
|
||||||
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("a a,c c", ",");
|
StringTokenizer st("a a,c c", ",");
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a a") == 0);
|
||||||
assert (*it++ == "a a");
|
assert (st.find("c c") == 1);
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "a a");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "c c");
|
assert (*it++ == "c c");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("a a,c c", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("a a,c c", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a a") == 0);
|
||||||
assert (*it++ == "a a");
|
assert (st.find("c c") == 1);
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "a a");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "c c");
|
assert (*it++ == "c c");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st(" a a , , c c ", ",");
|
StringTokenizer st(" a a , , c c ", ",");
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find(" a a ") == 0);
|
||||||
assert (*it++ == " a a ");
|
assert (st.find(" ") == 1);
|
||||||
assert (it != st.end());
|
assert (st.find(" c c ") == 2);
|
||||||
|
assert (it != st.end());
|
||||||
|
assert (*it++ == " a a ");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == " ");
|
assert (*it++ == " ");
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
assert (*it++ == " c c ");
|
assert (*it++ == " c c ");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st(" a a , , c c ", ",", StringTokenizer::TOK_TRIM);
|
StringTokenizer st(" a a , , c c ", ",", StringTokenizer::TOK_TRIM);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a a") == 0);
|
||||||
assert (*it++ == "a a");
|
assert (st.find("") == 1);
|
||||||
assert (it != st.end());
|
assert (st.find("c c") == 2);
|
||||||
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "a a");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "");
|
assert (*it++ == "");
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
assert (*it++ == "c c");
|
assert (*it++ == "c c");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st(" a a , , c c ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st(" a a , , c c ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a a") == 0);
|
||||||
assert (*it++ == "a a");
|
assert (st.find("c c") == 1);
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "a a");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "c c");
|
assert (*it++ == "c c");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc,def,,ghi , jk, l ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("abc,def,,ghi , jk, l ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("abc") == 0);
|
||||||
assert (*it++ == "abc");
|
assert (st.find("def") == 1);
|
||||||
assert (it != st.end());
|
assert (st.find("ghi") == 2);
|
||||||
|
assert (st.find("jk") == 3);
|
||||||
|
assert (st.find("l") == 4);
|
||||||
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "abc");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "def");
|
assert (*it++ == "def");
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
assert (*it++ == "ghi");
|
assert (*it++ == "ghi");
|
||||||
@@ -216,12 +251,17 @@ void StringTokenizerTest::testStringTokenizer()
|
|||||||
assert (*it++ == "l");
|
assert (*it++ == "l");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("abc,def,,ghi // jk, l ", ",/", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("abc,def,,ghi // jk, l ", ",/", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("abc") == 0);
|
||||||
assert (*it++ == "abc");
|
assert (st.find("def") == 1);
|
||||||
assert (it != st.end());
|
assert (st.find("ghi") == 2);
|
||||||
|
assert (st.find("jk") == 3);
|
||||||
|
assert (st.find("l") == 4);
|
||||||
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "abc");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "def");
|
assert (*it++ == "def");
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
assert (*it++ == "ghi");
|
assert (*it++ == "ghi");
|
||||||
@@ -231,12 +271,18 @@ void StringTokenizerTest::testStringTokenizer()
|
|||||||
assert (*it++ == "l");
|
assert (*it++ == "l");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("a/bc,def,,ghi // jk, l ", ",/", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("a/bc,def,,ghi // jk, l ", ",/", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("a") == 0);
|
||||||
assert (*it++ == "a");
|
assert (st.find("bc") == 1);
|
||||||
assert (it != st.end());
|
assert (st.find("def") == 2);
|
||||||
|
assert (st.find("ghi") == 3);
|
||||||
|
assert (st.find("jk") == 4);
|
||||||
|
assert (st.find("l") == 5);
|
||||||
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "a");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "bc");
|
assert (*it++ == "bc");
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
assert (*it++ == "def");
|
assert (*it++ == "def");
|
||||||
@@ -248,42 +294,125 @@ void StringTokenizerTest::testStringTokenizer()
|
|||||||
assert (*it++ == "l");
|
assert (*it++ == "l");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st(",ab,cd,", ",");
|
StringTokenizer st(",ab,cd,", ",");
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("") == 0);
|
||||||
assert (*it++ == "");
|
assert (st.find("ab") == 1);
|
||||||
assert (it != st.end());
|
assert (st.find("cd") == 2);
|
||||||
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "ab");
|
assert (*it++ == "ab");
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
assert (*it++ == "cd");
|
assert (*it++ == "cd");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st(",ab,cd,", ",", StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st(",ab,cd,", ",", StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("ab") == 0);
|
||||||
assert (*it++ == "ab");
|
assert (st.find("cd") == 1);
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "ab");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "cd");
|
assert (*it++ == "cd");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st(" , ab , cd , ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st(" , ab , cd , ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
StringTokenizer::Iterator it = st.begin();
|
StringTokenizer::Iterator it = st.begin();
|
||||||
assert (it != st.end());
|
assert (st.find("ab") == 0);
|
||||||
assert (*it++ == "ab");
|
assert (st.find("cd") == 1);
|
||||||
assert (it != st.end());
|
assert (it != st.end());
|
||||||
|
assert (*it++ == "ab");
|
||||||
|
assert (it != st.end());
|
||||||
assert (*it++ == "cd");
|
assert (*it++ == "cd");
|
||||||
assert (it == st.end());
|
assert (it == st.end());
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
StringTokenizer st("1 : 2 , : 3 ", ":,", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
StringTokenizer st("1 : 2 , : 3 ", ":,", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
assert (st.count() == 3);
|
assert (st.count() == 3);
|
||||||
assert (st[0] == "1");
|
assert (st[0] == "1");
|
||||||
assert (st[1] == "2");
|
assert (st[1] == "2");
|
||||||
assert (st[2] == "3");
|
assert (st[2] == "3");
|
||||||
}
|
assert (st.find("1") == 0);
|
||||||
|
assert (st.find("2") == 1);
|
||||||
|
assert (st.find("3") == 2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void StringTokenizerTest::testFind()
|
||||||
|
{
|
||||||
|
StringTokenizer st("0,1,2,3,3,2,1,0", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||||
|
|
||||||
|
assert (st.count() == 8);
|
||||||
|
assert (2 == st.count("0"));
|
||||||
|
assert (2 == st.count("1"));
|
||||||
|
assert (2 == st.count("2"));
|
||||||
|
assert (2 == st.count("3"));
|
||||||
|
assert (0 == st.count("4"));
|
||||||
|
assert (0 == st.count("5"));
|
||||||
|
assert (0 == st.count("6"));
|
||||||
|
assert (0 == st.count("7"));
|
||||||
|
|
||||||
|
assert (st[0] == "0");
|
||||||
|
assert (st[1] == "1");
|
||||||
|
assert (st[2] == "2");
|
||||||
|
assert (st[3] == "3");
|
||||||
|
assert (st[4] == "3");
|
||||||
|
assert (st[5] == "2");
|
||||||
|
assert (st[6] == "1");
|
||||||
|
assert (st[7] == "0");
|
||||||
|
|
||||||
|
assert (st.has("0"));
|
||||||
|
assert (st.has("1"));
|
||||||
|
assert (st.has("2"));
|
||||||
|
assert (st.has("3"));
|
||||||
|
assert (!st.has("4"));
|
||||||
|
assert (!st.has("5"));
|
||||||
|
assert (!st.has("6"));
|
||||||
|
assert (!st.has("7"));
|
||||||
|
|
||||||
|
assert (st.find("0") == 0);
|
||||||
|
assert (st.find("1") == 1);
|
||||||
|
assert (st.find("2") == 2);
|
||||||
|
assert (st.find("3") == 3);
|
||||||
|
|
||||||
|
assert (st.find("0", 1) == 7);
|
||||||
|
assert (st.find("1", 2) == 6);
|
||||||
|
assert (st.find("2", 3) == 5);
|
||||||
|
assert (st.find("3", 4) == 4);
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
std::size_t p = st.find("4");
|
||||||
|
fail ("must fail");
|
||||||
|
}
|
||||||
|
catch (NotFoundException&) { }
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
std::string s = st[8];
|
||||||
|
fail ("must fail");
|
||||||
|
}
|
||||||
|
catch (RangeException&) { }
|
||||||
|
|
||||||
|
st[0] = "1";
|
||||||
|
st[7] = "1";
|
||||||
|
assert (st[0] == "1");
|
||||||
|
assert (st[7] == "1");
|
||||||
|
assert (0 == st.count("0"));
|
||||||
|
assert (4 == st.count("1"));
|
||||||
|
|
||||||
|
st.replace("2", "5");
|
||||||
|
assert (0 == st.count("2"));
|
||||||
|
assert (2 == st.count("5"));
|
||||||
|
|
||||||
|
st.replace("3", "6", 4);
|
||||||
|
assert (1 == st.count("3"));
|
||||||
|
assert (1 == st.count("6"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -299,9 +428,10 @@ void StringTokenizerTest::tearDown()
|
|||||||
|
|
||||||
CppUnit::Test* StringTokenizerTest::suite()
|
CppUnit::Test* StringTokenizerTest::suite()
|
||||||
{
|
{
|
||||||
CppUnit::TestSuite* pSuite = new CppUnit::TestSuite("StringTokenizerTest");
|
CppUnit::TestSuite* pSuite = new CppUnit::TestSuite("StringTokenizerTest");
|
||||||
|
|
||||||
CppUnit_addTest(pSuite, StringTokenizerTest, testStringTokenizer);
|
CppUnit_addTest(pSuite, StringTokenizerTest, testStringTokenizer);
|
||||||
|
CppUnit_addTest(pSuite, StringTokenizerTest, testFind);
|
||||||
|
|
||||||
return pSuite;
|
return pSuite;
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user