mirror of
https://github.com/pocoproject/poco.git
synced 2025-10-26 18:42:41 +01:00
fixed GH #1141: Poco::StringTokenizer::TOK_TRIM changes behavior between 1.4 and 1.6
Conflicts: Foundation/include/Poco/StringTokenizer.h Foundation/src/StringTokenizer.cpp
This commit is contained in:
@@ -68,12 +68,12 @@ public:
|
|||||||
bool has(const std::string& token) const;
|
bool has(const std::string& token) const;
|
||||||
/// Returns true if token exists, false otherwise.
|
/// Returns true if token exists, false otherwise.
|
||||||
|
|
||||||
std::size_t find(const std::string& token, std::size_t pos = 0) const;
|
std::string::size_type find(const std::string& token, std::string::size_type pos = 0) const;
|
||||||
/// Returns the index of the first occurrence of the token
|
/// Returns the index of the first occurence of the token
|
||||||
/// starting at position pos.
|
/// starting at position pos.
|
||||||
/// Throws a NotFoundException if the token is not found.
|
/// Throws a NotFoundException if the token is not found.
|
||||||
|
|
||||||
std::size_t replace(const std::string& oldToken, const std::string& newToken, std::size_t pos = 0);
|
std::size_t replace(const std::string& oldToken, const std::string& newToken, std::string::size_type pos = 0);
|
||||||
/// Starting at position pos, replaces all subsequent tokens having value
|
/// Starting at position pos, replaces all subsequent tokens having value
|
||||||
/// equal to oldToken with newToken.
|
/// equal to oldToken with newToken.
|
||||||
/// Returns the number of modified tokens.
|
/// Returns the number of modified tokens.
|
||||||
|
|||||||
@@ -16,9 +16,9 @@
|
|||||||
|
|
||||||
#include "Poco/StringTokenizer.h"
|
#include "Poco/StringTokenizer.h"
|
||||||
#include "Poco/Ascii.h"
|
#include "Poco/Ascii.h"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
|
||||||
|
|
||||||
namespace Poco {
|
namespace Poco {
|
||||||
|
|
||||||
|
|
||||||
@@ -38,7 +38,7 @@ StringTokenizer::StringTokenizer(const std::string& str, const std::string& sepa
|
|||||||
if (doTrim) trim(token);
|
if (doTrim) trim(token);
|
||||||
if (!token.empty() || !ignoreEmpty) _tokens.push_back(token);
|
if (!token.empty() || !ignoreEmpty) _tokens.push_back(token);
|
||||||
if (!ignoreEmpty) lastToken = true;
|
if (!ignoreEmpty) lastToken = true;
|
||||||
token = "";
|
token.clear();
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@@ -50,9 +50,12 @@ StringTokenizer::StringTokenizer(const std::string& str, const std::string& sepa
|
|||||||
if (!token.empty())
|
if (!token.empty())
|
||||||
{
|
{
|
||||||
if (doTrim) trim(token);
|
if (doTrim) trim(token);
|
||||||
if (!token.empty()) _tokens.push_back(token);
|
if (!token.empty() || !ignoreEmpty) _tokens.push_back(token);
|
||||||
|
}
|
||||||
|
else if (lastToken)
|
||||||
|
{
|
||||||
|
_tokens.push_back(std::string());
|
||||||
}
|
}
|
||||||
else if (lastToken) _tokens.push_back("");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -63,7 +66,9 @@ StringTokenizer::~StringTokenizer()
|
|||||||
|
|
||||||
void StringTokenizer::trim(std::string& token)
|
void StringTokenizer::trim(std::string& token)
|
||||||
{
|
{
|
||||||
std::size_t front = 0, back = 0, length = token.length();
|
std::string::size_type front = 0;
|
||||||
|
std::string::size_type back = 0;
|
||||||
|
std::string::size_type length = token.length();
|
||||||
std::string::const_iterator tIt = token.begin();
|
std::string::const_iterator tIt = token.begin();
|
||||||
std::string::const_iterator tEnd = token.end();
|
std::string::const_iterator tEnd = token.end();
|
||||||
for (; tIt != tEnd; ++tIt, ++front)
|
for (; tIt != tEnd; ++tIt, ++front)
|
||||||
@@ -96,7 +101,7 @@ std::size_t StringTokenizer::count(const std::string& token) const
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
std::size_t StringTokenizer::find(const std::string& token, std::size_t pos) const
|
std::string::size_type StringTokenizer::find(const std::string& token, std::string::size_type pos) const
|
||||||
{
|
{
|
||||||
TokenVec::const_iterator it = std::find(_tokens.begin() + pos, _tokens.end(), token);
|
TokenVec::const_iterator it = std::find(_tokens.begin() + pos, _tokens.end(), token);
|
||||||
if (it != _tokens.end())
|
if (it != _tokens.end())
|
||||||
@@ -106,13 +111,15 @@ std::size_t StringTokenizer::find(const std::string& token, std::size_t pos) con
|
|||||||
throw NotFoundException(token);
|
throw NotFoundException(token);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool StringTokenizer::has(const std::string& token) const
|
bool StringTokenizer::has(const std::string& token) const
|
||||||
{
|
{
|
||||||
TokenVec::const_iterator it = std::find(_tokens.begin(), _tokens.end(), token);
|
TokenVec::const_iterator it = std::find(_tokens.begin(), _tokens.end(), token);
|
||||||
return it != _tokens.end();
|
return it != _tokens.end();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::size_t StringTokenizer::replace(const std::string& oldToken, const std::string& newToken, std::size_t pos)
|
|
||||||
|
std::size_t StringTokenizer::replace(const std::string& oldToken, const std::string& newToken, std::string::size_type pos)
|
||||||
{
|
{
|
||||||
std::size_t result = 0;
|
std::size_t result = 0;
|
||||||
TokenVec::iterator it = std::find(_tokens.begin() + pos, _tokens.end(), oldToken);
|
TokenVec::iterator it = std::find(_tokens.begin() + pos, _tokens.end(), oldToken);
|
||||||
|
|||||||
@@ -326,6 +326,13 @@ void StringTokenizerTest::testStringTokenizer()
|
|||||||
assert (st.find("2") == 1);
|
assert (st.find("2") == 1);
|
||||||
assert (st.find("3") == 2);
|
assert (st.find("3") == 2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
Poco::StringTokenizer st(" 2- ","-", Poco::StringTokenizer::TOK_TRIM);
|
||||||
|
assert (st.count() == 2);
|
||||||
|
assert (st[0] == "2");
|
||||||
|
assert (st[1] == "");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user