mirror of
https://github.com/pocoproject/poco.git
synced 2025-10-14 23:07:56 +02:00
replaced indentation spaces with tabs
This commit is contained in:
@@ -58,14 +58,14 @@ public:
|
||||
enum Options
|
||||
{
|
||||
TOK_IGNORE_EMPTY = 1, /// ignore empty tokens
|
||||
TOK_TRIM = 2 /// remove leading and trailing whitespace from tokens
|
||||
};
|
||||
|
||||
typedef std::vector<std::string> TokenVec;
|
||||
typedef TokenVec::const_iterator Iterator;
|
||||
|
||||
StringTokenizer(const std::string& str, const std::string& separators, int options = 0);
|
||||
/// Splits the given string into tokens. The tokens are expected to be
|
||||
TOK_TRIM = 2 /// remove leading and trailing whitespace from tokens
|
||||
};
|
||||
|
||||
typedef std::vector<std::string> TokenVec;
|
||||
typedef TokenVec::const_iterator Iterator;
|
||||
|
||||
StringTokenizer(const std::string& str, const std::string& separators, int options = 0);
|
||||
/// Splits the given string into tokens. The tokens are expected to be
|
||||
/// separated by one of the separator characters given in separators.
|
||||
/// Additionally, options can be specified:
|
||||
/// * TOK_IGNORE_EMPTY: empty tokens are ignored
|
||||
@@ -79,40 +79,40 @@ public:
|
||||
/// Destroys the tokenizer.
|
||||
|
||||
Iterator begin() const;
|
||||
Iterator end() const;
|
||||
|
||||
const std::string& operator [] (std::size_t index) const;
|
||||
/// Returns const reference the index'th token.
|
||||
/// Throws a RangeException if the index is out of range.
|
||||
Iterator end() const;
|
||||
|
||||
const std::string& operator [] (std::size_t index) const;
|
||||
/// Returns const reference the index'th token.
|
||||
/// Throws a RangeException if the index is out of range.
|
||||
|
||||
std::string& operator [] (std::size_t index);
|
||||
/// Returns reference to the index'th token.
|
||||
/// Throws a RangeException if the index is out of range.
|
||||
std::string& operator [] (std::size_t index);
|
||||
/// Returns reference to the index'th token.
|
||||
/// Throws a RangeException if the index is out of range.
|
||||
|
||||
bool has(const std::string& token) const;
|
||||
/// Returns true if token exists, false otherwise.
|
||||
bool has(const std::string& token) const;
|
||||
/// Returns true if token exists, false otherwise.
|
||||
|
||||
std::size_t find(const std::string& token, std::size_t pos = 0) const;
|
||||
/// Returns the index of the first occurence of the token
|
||||
/// starting at position pos.
|
||||
/// Throws a NotFoundException if the token is not found.
|
||||
std::size_t find(const std::string& token, std::size_t pos = 0) const;
|
||||
/// Returns the index of the first occurence of the token
|
||||
/// starting at position pos.
|
||||
/// Throws a NotFoundException if the token is not found.
|
||||
|
||||
std::size_t replace(const std::string& oldToken, const std::string& newToken, std::size_t pos = 0);
|
||||
/// Starting at position pos, replaces all subsequent tokens having value
|
||||
/// equal to oldToken with newToken.
|
||||
/// Returns the number of modified tokens.
|
||||
|
||||
std::size_t count() const;
|
||||
/// Returns the total number of tokens.
|
||||
std::size_t replace(const std::string& oldToken, const std::string& newToken, std::size_t pos = 0);
|
||||
/// Starting at position pos, replaces all subsequent tokens having value
|
||||
/// equal to oldToken with newToken.
|
||||
/// Returns the number of modified tokens.
|
||||
|
||||
std::size_t count() const;
|
||||
/// Returns the total number of tokens.
|
||||
|
||||
std::size_t count(const std::string& token) const;
|
||||
/// Returns the number of tokens equal to the specified token.
|
||||
std::size_t count(const std::string& token) const;
|
||||
/// Returns the number of tokens equal to the specified token.
|
||||
|
||||
private:
|
||||
StringTokenizer(const StringTokenizer&);
|
||||
StringTokenizer& operator = (const StringTokenizer&);
|
||||
|
||||
TokenVec _tokens;
|
||||
StringTokenizer(const StringTokenizer&);
|
||||
StringTokenizer& operator = (const StringTokenizer&);
|
||||
|
||||
TokenVec _tokens;
|
||||
};
|
||||
|
||||
|
||||
@@ -135,14 +135,14 @@ inline StringTokenizer::Iterator StringTokenizer::end() const
|
||||
|
||||
inline std::string& StringTokenizer::operator [] (std::size_t index)
|
||||
{
|
||||
if (index >= _tokens.size()) throw RangeException();
|
||||
return _tokens[index];
|
||||
if (index >= _tokens.size()) throw RangeException();
|
||||
return _tokens[index];
|
||||
}
|
||||
|
||||
|
||||
inline const std::string& StringTokenizer::operator [] (std::size_t index) const
|
||||
{
|
||||
if (index >= _tokens.size()) throw RangeException();
|
||||
if (index >= _tokens.size()) throw RangeException();
|
||||
return _tokens[index];
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user