Googletest export
Add a matcher `testing::ReturnRoundRobin` which, on each call, returns the next element in the sequence, restarting at the beginning once it has reached the end. PiperOrigin-RevId: 276312136
This commit is contained in:
committed by
Gennadiy Civil
parent
1110c471ca
commit
37f3227831
@@ -17,10 +17,7 @@
|
||||
|
||||
"""Generate an Abstract Syntax Tree (AST) for C++."""
|
||||
|
||||
__author__ = 'nnorwitz@google.com (Neal Norwitz)'
|
||||
|
||||
|
||||
# TODO:
|
||||
# FIXME:
|
||||
# * Tokens should never be exported, need to convert to Nodes
|
||||
# (return types, parameters, etc.)
|
||||
# * Handle static class data for templatized classes
|
||||
@@ -338,7 +335,7 @@ class Class(_GenericDeclaration):
|
||||
# TODO(nnorwitz): handle namespaces, etc.
|
||||
if self.bases:
|
||||
for token_list in self.bases:
|
||||
# TODO(nnorwitz): bases are tokens, do name comparison.
|
||||
# TODO(nnorwitz): bases are tokens, do name comparision.
|
||||
for token in token_list:
|
||||
if token.name == node.name:
|
||||
return True
|
||||
@@ -381,7 +378,7 @@ class Function(_GenericDeclaration):
|
||||
|
||||
def Requires(self, node):
|
||||
if self.parameters:
|
||||
# TODO(nnorwitz): parameters are tokens, do name comparison.
|
||||
# TODO(nnorwitz): parameters are tokens, do name comparision.
|
||||
for p in self.parameters:
|
||||
if p.name == node.name:
|
||||
return True
|
||||
@@ -739,6 +736,14 @@ class AstBuilder(object):
|
||||
if token.token_type == tokenize.NAME:
|
||||
if (keywords.IsKeyword(token.name) and
|
||||
not keywords.IsBuiltinType(token.name)):
|
||||
if token.name == 'enum':
|
||||
# Pop the next token and only put it back if it's not
|
||||
# 'class'. This allows us to support the two-token
|
||||
# 'enum class' keyword as if it were simply 'enum'.
|
||||
next = self._GetNextToken()
|
||||
if next.name != 'class':
|
||||
self._AddBackToken(next)
|
||||
|
||||
method = getattr(self, 'handle_' + token.name)
|
||||
return method()
|
||||
elif token.name == self.in_class_name_only:
|
||||
@@ -754,7 +759,8 @@ class AstBuilder(object):
|
||||
# Handle data or function declaration/definition.
|
||||
syntax = tokenize.SYNTAX
|
||||
temp_tokens, last_token = \
|
||||
self._GetVarTokensUpTo(syntax, '(', ';', '{', '[')
|
||||
self._GetVarTokensUpToIgnoringTemplates(syntax,
|
||||
'(', ';', '{', '[')
|
||||
temp_tokens.insert(0, token)
|
||||
if last_token.name == '(':
|
||||
# If there is an assignment before the paren,
|
||||
@@ -858,7 +864,25 @@ class AstBuilder(object):
|
||||
last_token = self._GetNextToken()
|
||||
return tokens, last_token
|
||||
|
||||
# TODO(nnorwitz): remove _IgnoreUpTo() it shouldn't be necessary.
|
||||
# Same as _GetVarTokensUpTo, but skips over '<...>' which could contain an
|
||||
# expected token.
|
||||
def _GetVarTokensUpToIgnoringTemplates(self, expected_token_type,
|
||||
*expected_tokens):
|
||||
last_token = self._GetNextToken()
|
||||
tokens = []
|
||||
nesting = 0
|
||||
while (nesting > 0 or
|
||||
last_token.token_type != expected_token_type or
|
||||
last_token.name not in expected_tokens):
|
||||
tokens.append(last_token)
|
||||
last_token = self._GetNextToken()
|
||||
if last_token.name == '<':
|
||||
nesting += 1
|
||||
elif last_token.name == '>':
|
||||
nesting -= 1
|
||||
return tokens, last_token
|
||||
|
||||
# TODO(nnorwitz): remove _IgnoreUpTo() it shouldn't be necesary.
|
||||
def _IgnoreUpTo(self, token_type, token):
|
||||
unused_tokens = self._GetTokensUpTo(token_type, token)
|
||||
|
||||
@@ -1264,9 +1288,6 @@ class AstBuilder(object):
|
||||
return self._GetNestedType(Union)
|
||||
|
||||
def handle_enum(self):
|
||||
token = self._GetNextToken()
|
||||
if not (token.token_type == tokenize.NAME and token.name == 'class'):
|
||||
self._AddBackToken(token)
|
||||
return self._GetNestedType(Enum)
|
||||
|
||||
def handle_auto(self):
|
||||
@@ -1298,7 +1319,8 @@ class AstBuilder(object):
|
||||
if token2.token_type == tokenize.SYNTAX and token2.name == '~':
|
||||
return self.GetMethod(FUNCTION_VIRTUAL + FUNCTION_DTOR, None)
|
||||
assert token.token_type == tokenize.NAME or token.name == '::', token
|
||||
return_type_and_name = self._GetTokensUpTo(tokenize.SYNTAX, '(') # )
|
||||
return_type_and_name, _ = self._GetVarTokensUpToIgnoringTemplates(
|
||||
tokenize.SYNTAX, '(') # )
|
||||
return_type_and_name.insert(0, token)
|
||||
if token2 is not token:
|
||||
return_type_and_name.insert(1, token2)
|
||||
|
||||
Reference in New Issue
Block a user