mirror of
https://github.com/pocoproject/poco.git
synced 2024-12-13 18:45:10 +01:00
replaced indentation spaces with tabs
This commit is contained in:
parent
a68b9a114f
commit
fae6d780b7
@ -58,14 +58,14 @@ public:
|
||||
enum Options
|
||||
{
|
||||
TOK_IGNORE_EMPTY = 1, /// ignore empty tokens
|
||||
TOK_TRIM = 2 /// remove leading and trailing whitespace from tokens
|
||||
};
|
||||
|
||||
typedef std::vector<std::string> TokenVec;
|
||||
typedef TokenVec::const_iterator Iterator;
|
||||
|
||||
StringTokenizer(const std::string& str, const std::string& separators, int options = 0);
|
||||
/// Splits the given string into tokens. The tokens are expected to be
|
||||
TOK_TRIM = 2 /// remove leading and trailing whitespace from tokens
|
||||
};
|
||||
|
||||
typedef std::vector<std::string> TokenVec;
|
||||
typedef TokenVec::const_iterator Iterator;
|
||||
|
||||
StringTokenizer(const std::string& str, const std::string& separators, int options = 0);
|
||||
/// Splits the given string into tokens. The tokens are expected to be
|
||||
/// separated by one of the separator characters given in separators.
|
||||
/// Additionally, options can be specified:
|
||||
/// * TOK_IGNORE_EMPTY: empty tokens are ignored
|
||||
@ -79,40 +79,40 @@ public:
|
||||
/// Destroys the tokenizer.
|
||||
|
||||
Iterator begin() const;
|
||||
Iterator end() const;
|
||||
|
||||
const std::string& operator [] (std::size_t index) const;
|
||||
/// Returns const reference the index'th token.
|
||||
/// Throws a RangeException if the index is out of range.
|
||||
Iterator end() const;
|
||||
|
||||
const std::string& operator [] (std::size_t index) const;
|
||||
/// Returns const reference the index'th token.
|
||||
/// Throws a RangeException if the index is out of range.
|
||||
|
||||
std::string& operator [] (std::size_t index);
|
||||
/// Returns reference to the index'th token.
|
||||
/// Throws a RangeException if the index is out of range.
|
||||
std::string& operator [] (std::size_t index);
|
||||
/// Returns reference to the index'th token.
|
||||
/// Throws a RangeException if the index is out of range.
|
||||
|
||||
bool has(const std::string& token) const;
|
||||
/// Returns true if token exists, false otherwise.
|
||||
bool has(const std::string& token) const;
|
||||
/// Returns true if token exists, false otherwise.
|
||||
|
||||
std::size_t find(const std::string& token, std::size_t pos = 0) const;
|
||||
/// Returns the index of the first occurence of the token
|
||||
/// starting at position pos.
|
||||
/// Throws a NotFoundException if the token is not found.
|
||||
std::size_t find(const std::string& token, std::size_t pos = 0) const;
|
||||
/// Returns the index of the first occurence of the token
|
||||
/// starting at position pos.
|
||||
/// Throws a NotFoundException if the token is not found.
|
||||
|
||||
std::size_t replace(const std::string& oldToken, const std::string& newToken, std::size_t pos = 0);
|
||||
/// Starting at position pos, replaces all subsequent tokens having value
|
||||
/// equal to oldToken with newToken.
|
||||
/// Returns the number of modified tokens.
|
||||
|
||||
std::size_t count() const;
|
||||
/// Returns the total number of tokens.
|
||||
std::size_t replace(const std::string& oldToken, const std::string& newToken, std::size_t pos = 0);
|
||||
/// Starting at position pos, replaces all subsequent tokens having value
|
||||
/// equal to oldToken with newToken.
|
||||
/// Returns the number of modified tokens.
|
||||
|
||||
std::size_t count() const;
|
||||
/// Returns the total number of tokens.
|
||||
|
||||
std::size_t count(const std::string& token) const;
|
||||
/// Returns the number of tokens equal to the specified token.
|
||||
std::size_t count(const std::string& token) const;
|
||||
/// Returns the number of tokens equal to the specified token.
|
||||
|
||||
private:
|
||||
StringTokenizer(const StringTokenizer&);
|
||||
StringTokenizer& operator = (const StringTokenizer&);
|
||||
|
||||
TokenVec _tokens;
|
||||
StringTokenizer(const StringTokenizer&);
|
||||
StringTokenizer& operator = (const StringTokenizer&);
|
||||
|
||||
TokenVec _tokens;
|
||||
};
|
||||
|
||||
|
||||
@ -135,14 +135,14 @@ inline StringTokenizer::Iterator StringTokenizer::end() const
|
||||
|
||||
inline std::string& StringTokenizer::operator [] (std::size_t index)
|
||||
{
|
||||
if (index >= _tokens.size()) throw RangeException();
|
||||
return _tokens[index];
|
||||
if (index >= _tokens.size()) throw RangeException();
|
||||
return _tokens[index];
|
||||
}
|
||||
|
||||
|
||||
inline const std::string& StringTokenizer::operator [] (std::size_t index) const
|
||||
{
|
||||
if (index >= _tokens.size()) throw RangeException();
|
||||
if (index >= _tokens.size()) throw RangeException();
|
||||
return _tokens[index];
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@
|
||||
//
|
||||
// Library: Foundation
|
||||
// Package: Core
|
||||
// Module: StringTokenizer
|
||||
// Module: StringTokenizer
|
||||
//
|
||||
// Copyright (c) 2004-2006, Applied Informatics Software Engineering GmbH.
|
||||
// and Contributors.
|
||||
@ -80,44 +80,44 @@ StringTokenizer::StringTokenizer(const std::string& str, const std::string& sepa
|
||||
|
||||
std::size_t StringTokenizer::count(const std::string& token) const
|
||||
{
|
||||
std::size_t result = 0;
|
||||
TokenVec::const_iterator it = std::find(_tokens.begin(), _tokens.end(), token);
|
||||
while(it != _tokens.end())
|
||||
{
|
||||
result++;
|
||||
it = std::find(++it, _tokens.end(), token);
|
||||
}
|
||||
return result;
|
||||
std::size_t result = 0;
|
||||
TokenVec::const_iterator it = std::find(_tokens.begin(), _tokens.end(), token);
|
||||
while(it != _tokens.end())
|
||||
{
|
||||
result++;
|
||||
it = std::find(++it, _tokens.end(), token);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
std::size_t StringTokenizer::find(const std::string& token, std::size_t pos) const
|
||||
{
|
||||
TokenVec::const_iterator it = std::find(_tokens.begin() + pos, _tokens.end(), token);
|
||||
if ( it != _tokens.end() )
|
||||
{
|
||||
return it - _tokens.begin();
|
||||
}
|
||||
throw NotFoundException(token);
|
||||
{
|
||||
TokenVec::const_iterator it = std::find(_tokens.begin() + pos, _tokens.end(), token);
|
||||
if ( it != _tokens.end() )
|
||||
{
|
||||
return it - _tokens.begin();
|
||||
}
|
||||
throw NotFoundException(token);
|
||||
}
|
||||
|
||||
bool StringTokenizer::has(const std::string& token) const
|
||||
{
|
||||
TokenVec::const_iterator it = std::find(_tokens.begin(), _tokens.end(), token);
|
||||
return it != _tokens.end();
|
||||
TokenVec::const_iterator it = std::find(_tokens.begin(), _tokens.end(), token);
|
||||
return it != _tokens.end();
|
||||
}
|
||||
|
||||
std::size_t StringTokenizer::replace(const std::string& oldToken, const std::string& newToken, std::size_t pos)
|
||||
{
|
||||
std::size_t result = 0;
|
||||
TokenVec::iterator it = std::find(_tokens.begin() + pos, _tokens.end(), oldToken);
|
||||
while(it != _tokens.end())
|
||||
{
|
||||
result++;
|
||||
*it = newToken;
|
||||
it = std::find(++it, _tokens.end(), oldToken);
|
||||
}
|
||||
return result;
|
||||
std::size_t result = 0;
|
||||
TokenVec::iterator it = std::find(_tokens.begin() + pos, _tokens.end(), oldToken);
|
||||
while(it != _tokens.end())
|
||||
{
|
||||
result++;
|
||||
*it = newToken;
|
||||
it = std::find(++it, _tokens.end(), oldToken);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
StringTokenizer::~StringTokenizer()
|
||||
|
@ -70,178 +70,178 @@ void StringTokenizerTest::testStringTokenizer()
|
||||
StringTokenizer st("", "", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
assert (st.begin() == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it == st.end());
|
||||
{
|
||||
StringTokenizer st("abc", "");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc ", "", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc ", "", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" abc ", "", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" abc", "", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "b");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
{
|
||||
StringTokenizer st(" abc ", "", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" abc", "", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "b");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "c");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "b", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "b", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "c");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("a a,c c", ",");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a a") == 0);
|
||||
assert (st.find("c c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a a");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc", "bc", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("a a,c c", ",");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a a") == 0);
|
||||
assert (st.find("c c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a a");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "c c");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("a a,c c", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a a") == 0);
|
||||
assert (st.find("c c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a a");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("a a,c c", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a a") == 0);
|
||||
assert (st.find("c c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a a");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "c c");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" a a , , c c ", ",");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find(" a a ") == 0);
|
||||
assert (st.find(" ") == 1);
|
||||
assert (st.find(" c c ") == 2);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == " a a ");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" a a , , c c ", ",");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find(" a a ") == 0);
|
||||
assert (st.find(" ") == 1);
|
||||
assert (st.find(" c c ") == 2);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == " a a ");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == " ");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == " c c ");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" a a , , c c ", ",", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a a") == 0);
|
||||
assert (st.find("") == 1);
|
||||
assert (st.find("c c") == 2);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a a");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" a a , , c c ", ",", StringTokenizer::TOK_TRIM);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a a") == 0);
|
||||
assert (st.find("") == 1);
|
||||
assert (st.find("c c") == 2);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a a");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "c c");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" a a , , c c ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a a") == 0);
|
||||
assert (st.find("c c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a a");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" a a , , c c ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a a") == 0);
|
||||
assert (st.find("c c") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a a");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "c c");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc,def,,ghi , jk, l ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (st.find("def") == 1);
|
||||
assert (st.find("ghi") == 2);
|
||||
assert (st.find("jk") == 3);
|
||||
assert (st.find("l") == 4);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc,def,,ghi , jk, l ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (st.find("def") == 1);
|
||||
assert (st.find("ghi") == 2);
|
||||
assert (st.find("jk") == 3);
|
||||
assert (st.find("l") == 4);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "def");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "ghi");
|
||||
@ -250,18 +250,18 @@ void StringTokenizerTest::testStringTokenizer()
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "l");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc,def,,ghi // jk, l ", ",/", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (st.find("def") == 1);
|
||||
assert (st.find("ghi") == 2);
|
||||
assert (st.find("jk") == 3);
|
||||
assert (st.find("l") == 4);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("abc,def,,ghi // jk, l ", ",/", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("abc") == 0);
|
||||
assert (st.find("def") == 1);
|
||||
assert (st.find("ghi") == 2);
|
||||
assert (st.find("jk") == 3);
|
||||
assert (st.find("l") == 4);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "abc");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "def");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "ghi");
|
||||
@ -270,19 +270,19 @@ void StringTokenizerTest::testStringTokenizer()
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "l");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("a/bc,def,,ghi // jk, l ", ",/", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("bc") == 1);
|
||||
assert (st.find("def") == 2);
|
||||
assert (st.find("ghi") == 3);
|
||||
assert (st.find("jk") == 4);
|
||||
assert (st.find("l") == 5);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("a/bc,def,,ghi // jk, l ", ",/", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("a") == 0);
|
||||
assert (st.find("bc") == 1);
|
||||
assert (st.find("def") == 2);
|
||||
assert (st.find("ghi") == 3);
|
||||
assert (st.find("jk") == 4);
|
||||
assert (st.find("l") == 5);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "a");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "bc");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "def");
|
||||
@ -294,126 +294,125 @@ void StringTokenizerTest::testStringTokenizer()
|
||||
assert (*it++ == "l");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(",ab,cd,", ",");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("") == 0);
|
||||
assert (st.find("ab") == 1);
|
||||
assert (st.find("cd") == 2);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "");
|
||||
assert (it != st.end());
|
||||
{
|
||||
StringTokenizer st(",ab,cd,", ",");
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("") == 0);
|
||||
assert (st.find("ab") == 1);
|
||||
assert (st.find("cd") == 2);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "ab");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "cd");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(",ab,cd,", ",", StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("ab") == 0);
|
||||
assert (st.find("cd") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "ab");
|
||||
assert (it != st.end());
|
||||
{
|
||||
StringTokenizer st(",ab,cd,", ",", StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("ab") == 0);
|
||||
assert (st.find("cd") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "ab");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "cd");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st(" , ab , cd , ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("ab") == 0);
|
||||
assert (st.find("cd") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "ab");
|
||||
assert (it != st.end());
|
||||
{
|
||||
StringTokenizer st(" , ab , cd , ", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
StringTokenizer::Iterator it = st.begin();
|
||||
assert (st.find("ab") == 0);
|
||||
assert (st.find("cd") == 1);
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "ab");
|
||||
assert (it != st.end());
|
||||
assert (*it++ == "cd");
|
||||
assert (it == st.end());
|
||||
}
|
||||
{
|
||||
StringTokenizer st("1 : 2 , : 3 ", ":,", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
assert (st.count() == 3);
|
||||
assert (st[0] == "1");
|
||||
assert (st[1] == "2");
|
||||
assert (st[2] == "3");
|
||||
assert (st.find("1") == 0);
|
||||
assert (st.find("2") == 1);
|
||||
assert (st.find("3") == 2);
|
||||
}
|
||||
assert (st[0] == "1");
|
||||
assert (st[1] == "2");
|
||||
assert (st[2] == "3");
|
||||
assert (st.find("1") == 0);
|
||||
assert (st.find("2") == 1);
|
||||
assert (st.find("3") == 2);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void StringTokenizerTest::testFind()
|
||||
{
|
||||
StringTokenizer st("0,1,2,3,3,2,1,0", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
|
||||
assert (st.count() == 8);
|
||||
assert (2 == st.count("0"));
|
||||
assert (2 == st.count("1"));
|
||||
assert (2 == st.count("2"));
|
||||
assert (2 == st.count("3"));
|
||||
assert (0 == st.count("4"));
|
||||
assert (0 == st.count("5"));
|
||||
assert (0 == st.count("6"));
|
||||
assert (0 == st.count("7"));
|
||||
StringTokenizer st("0,1,2,3,3,2,1,0", ",", StringTokenizer::TOK_TRIM | StringTokenizer::TOK_IGNORE_EMPTY);
|
||||
|
||||
assert (st.count() == 8);
|
||||
assert (2 == st.count("0"));
|
||||
assert (2 == st.count("1"));
|
||||
assert (2 == st.count("2"));
|
||||
assert (2 == st.count("3"));
|
||||
assert (0 == st.count("4"));
|
||||
assert (0 == st.count("5"));
|
||||
assert (0 == st.count("6"));
|
||||
assert (0 == st.count("7"));
|
||||
|
||||
assert (st[0] == "0");
|
||||
assert (st[1] == "1");
|
||||
assert (st[2] == "2");
|
||||
assert (st[3] == "3");
|
||||
assert (st[4] == "3");
|
||||
assert (st[5] == "2");
|
||||
assert (st[6] == "1");
|
||||
assert (st[7] == "0");
|
||||
|
||||
assert (st.has("0"));
|
||||
assert (st.has("1"));
|
||||
assert (st.has("2"));
|
||||
assert (st.has("3"));
|
||||
assert (!st.has("4"));
|
||||
assert (!st.has("5"));
|
||||
assert (!st.has("6"));
|
||||
assert (!st.has("7"));
|
||||
assert (st[0] == "0");
|
||||
assert (st[1] == "1");
|
||||
assert (st[2] == "2");
|
||||
assert (st[3] == "3");
|
||||
assert (st[4] == "3");
|
||||
assert (st[5] == "2");
|
||||
assert (st[6] == "1");
|
||||
assert (st[7] == "0");
|
||||
|
||||
assert (st.has("0"));
|
||||
assert (st.has("1"));
|
||||
assert (st.has("2"));
|
||||
assert (st.has("3"));
|
||||
assert (!st.has("4"));
|
||||
assert (!st.has("5"));
|
||||
assert (!st.has("6"));
|
||||
assert (!st.has("7"));
|
||||
|
||||
assert (st.find("0") == 0);
|
||||
assert (st.find("1") == 1);
|
||||
assert (st.find("2") == 2);
|
||||
assert (st.find("3") == 3);
|
||||
assert (st.find("0") == 0);
|
||||
assert (st.find("1") == 1);
|
||||
assert (st.find("2") == 2);
|
||||
assert (st.find("3") == 3);
|
||||
|
||||
assert (st.find("0", 1) == 7);
|
||||
assert (st.find("1", 2) == 6);
|
||||
assert (st.find("2", 3) == 5);
|
||||
assert (st.find("3", 4) == 4);
|
||||
assert (st.find("0", 1) == 7);
|
||||
assert (st.find("1", 2) == 6);
|
||||
assert (st.find("2", 3) == 5);
|
||||
assert (st.find("3", 4) == 4);
|
||||
|
||||
try
|
||||
{
|
||||
std::size_t p = st.find("4");
|
||||
fail ("must fail");
|
||||
}
|
||||
catch (NotFoundException&) { }
|
||||
try
|
||||
{
|
||||
std::size_t p = st.find("4");
|
||||
fail ("must fail");
|
||||
}
|
||||
catch (NotFoundException&) { }
|
||||
|
||||
try
|
||||
{
|
||||
std::string s = st[8];
|
||||
fail ("must fail");
|
||||
}
|
||||
catch (RangeException&) { }
|
||||
try
|
||||
{
|
||||
std::string s = st[8];
|
||||
fail ("must fail");
|
||||
}
|
||||
catch (RangeException&) { }
|
||||
|
||||
st[0] = "1";
|
||||
st[7] = "1";
|
||||
assert (st[0] == "1");
|
||||
assert (st[7] == "1");
|
||||
assert (0 == st.count("0"));
|
||||
assert (4 == st.count("1"));
|
||||
st[0] = "1";
|
||||
st[7] = "1";
|
||||
assert (st[0] == "1");
|
||||
assert (st[7] == "1");
|
||||
assert (0 == st.count("0"));
|
||||
assert (4 == st.count("1"));
|
||||
|
||||
st.replace("2", "5");
|
||||
assert (0 == st.count("2"));
|
||||
assert (2 == st.count("5"));
|
||||
st.replace("2", "5");
|
||||
assert (0 == st.count("2"));
|
||||
assert (2 == st.count("5"));
|
||||
|
||||
st.replace("3", "6", 4);
|
||||
|
||||
assert (1 == st.count("3"));
|
||||
assert (1 == st.count("6"));
|
||||
st.replace("3", "6", 4);
|
||||
assert (1 == st.count("3"));
|
||||
assert (1 == st.count("6"));
|
||||
}
|
||||
|
||||
|
||||
@ -429,10 +428,10 @@ void StringTokenizerTest::tearDown()
|
||||
|
||||
CppUnit::Test* StringTokenizerTest::suite()
|
||||
{
|
||||
CppUnit::TestSuite* pSuite = new CppUnit::TestSuite("StringTokenizerTest");
|
||||
CppUnit::TestSuite* pSuite = new CppUnit::TestSuite("StringTokenizerTest");
|
||||
|
||||
CppUnit_addTest(pSuite, StringTokenizerTest, testStringTokenizer);
|
||||
CppUnit_addTest(pSuite, StringTokenizerTest, testFind);
|
||||
CppUnit_addTest(pSuite, StringTokenizerTest, testStringTokenizer);
|
||||
CppUnit_addTest(pSuite, StringTokenizerTest, testFind);
|
||||
|
||||
return pSuite;
|
||||
return pSuite;
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user