- added mini unit testing framework

- added unit tests for Value.isXYZ() and iteration over Value members to compute size
This commit is contained in:
Baptiste Lepilleur 2009-11-21 18:07:09 +00:00
parent 932cfc798b
commit 45c499d310
10 changed files with 1342 additions and 2 deletions

View File

@ -231,6 +231,12 @@ def buildJSONTests( env, target_sources, target_name ):
check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) )
env.AlwaysBuild( check_alias_target )
def buildUnitTests( env, target_sources, target_name ):
jsontests_node = buildJSONExample( env, target_sources, target_name )
check_alias_target = env.Alias( 'check', jsontests_node,
RunUnitTests( jsontests_node, jsontests_node ) )
env.AlwaysBuild( check_alias_target )
def buildLibrary( env, target_sources, target_name ):
static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}',
source=target_sources )
@ -242,7 +248,7 @@ def buildLibrary( env, target_sources, target_name ):
env.Install( lib_dir, shared_lib )
env['SRCDIST_ADD']( source=[target_sources] )
Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests' )
Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' )
def buildProjectInDirectory( target_directory ):
global build_dir
@ -266,6 +272,18 @@ import SCons.Action
ActionFactory = SCons.Action.ActionFactory
RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string )
def runUnitTests_action( target, source = None, env = None ):
# Add test scripts to python path
jsontest_path = Dir( '#test' ).abspath
sys.path.insert( 0, jsontest_path )
import rununittests
return rununittests.runAllTests( os.path.abspath(source[0].path) )
def runUnitTests_string( target, source = None, env = None ):
return 'RunUnitTests("%s")' % source[0]
RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string )
env.Alias( 'check' )
srcdist_cmd = env['SRCDIST_ADD']( source = """
@ -275,6 +293,7 @@ env.Alias( 'src-dist', srcdist_cmd )
buildProjectInDirectory( 'src/jsontestrunner' )
buildProjectInDirectory( 'src/lib_json' )
buildProjectInDirectory( 'src/test_lib_json' )
buildProjectInDirectory( 'doc' )
#print env.Dump()

View File

@ -19,6 +19,9 @@
/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
/// If defined, indicates that Json use exception to report invalid type manipulation
/// instead of C assert macro.
# define JSON_USE_EXCEPTION 1
# ifdef JSON_IN_CPPTL
# include <cpptl/config.h>

View File

@ -8,6 +8,11 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcproj
{B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}"
ProjectSection(ProjectDependencies) = postProject
{B84F7231-16CE-41D8-8C08-7B523FF4225B} = {B84F7231-16CE-41D8-8C08-7B523FF4225B}
EndProjectSection
EndProject
Global
GlobalSection(SolutionConfiguration) = preSolution
Debug = Debug
@ -27,6 +32,12 @@ Global
{25AF2DD2-D396-4668-B188-488C33B8E620}.dummy.Build.0 = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release.ActiveCfg = Release|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release.Build.0 = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.ActiveCfg = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug.Build.0 = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.ActiveCfg = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.dummy.Build.0 = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.ActiveCfg = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release.Build.0 = Release|Win32
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
EndGlobalSection

View File

@ -0,0 +1,130 @@
<?xml version="1.0" encoding="Windows-1252"?>
<VisualStudioProject
ProjectType="Visual C++"
Version="7.10"
Name="test_lib_json"
ProjectGUID="{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}"
RootNamespace="test_lib_json"
Keyword="Win32Proj">
<Platforms>
<Platform
Name="Win32"/>
</Platforms>
<Configurations>
<Configuration
Name="Debug|Win32"
OutputDirectory="../../build/vs71/debug/test_lib_json"
IntermediateDirectory="../../build/vs71/debug/test_lib_json"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCCLCompilerTool"
Optimization="0"
AdditionalIncludeDirectories="../../include"
PreprocessorDefinitions="WIN32;_DEBUG;_CONSOLE"
MinimalRebuild="TRUE"
BasicRuntimeChecks="3"
RuntimeLibrary="1"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="TRUE"
DebugInformationFormat="4"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCLinkerTool"
OutputFile="$(OutDir)/test_lib_json.exe"
LinkIncremental="2"
GenerateDebugInformation="TRUE"
ProgramDatabaseFile="$(OutDir)/test_lib_json.pdb"
SubSystem="1"
TargetMachine="1"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCPostBuildEventTool"
Description="Running all unit tests"
CommandLine="$(TargetPath)"/>
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebDeploymentTool"/>
<Tool
Name="VCManagedWrapperGeneratorTool"/>
<Tool
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
</Configuration>
<Configuration
Name="Release|Win32"
OutputDirectory="../../build/vs71/release/test_lib_json"
IntermediateDirectory="../../build/vs71/release/test_lib_json"
ConfigurationType="1"
CharacterSet="2">
<Tool
Name="VCCLCompilerTool"
AdditionalIncludeDirectories="../../include"
PreprocessorDefinitions="WIN32;NDEBUG;_CONSOLE"
RuntimeLibrary="0"
UsePrecompiledHeader="0"
WarningLevel="3"
Detect64BitPortabilityProblems="TRUE"
DebugInformationFormat="3"/>
<Tool
Name="VCCustomBuildTool"/>
<Tool
Name="VCLinkerTool"
OutputFile="$(OutDir)/test_lib_json.exe"
LinkIncremental="1"
GenerateDebugInformation="TRUE"
SubSystem="1"
OptimizeReferences="2"
EnableCOMDATFolding="2"
TargetMachine="1"/>
<Tool
Name="VCMIDLTool"/>
<Tool
Name="VCPostBuildEventTool"
Description="Running all unit tests"
CommandLine="$(TargetPath)"/>
<Tool
Name="VCPreBuildEventTool"/>
<Tool
Name="VCPreLinkEventTool"/>
<Tool
Name="VCResourceCompilerTool"/>
<Tool
Name="VCWebServiceProxyGeneratorTool"/>
<Tool
Name="VCXMLDataGeneratorTool"/>
<Tool
Name="VCWebDeploymentTool"/>
<Tool
Name="VCManagedWrapperGeneratorTool"/>
<Tool
Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
</Configuration>
</Configurations>
<References>
</References>
<Files>
<File
RelativePath="..\..\src\test_lib_json\jsontest.cpp">
</File>
<File
RelativePath="..\..\src\test_lib_json\jsontest.h">
</File>
<File
RelativePath="..\..\src\test_lib_json\main.cpp">
</File>
</Files>
<Globals>
</Globals>
</VisualStudioProject>

View File

@ -0,0 +1,598 @@
#define _CRT_SECURE_NO_WARNINGS 1 // Prevents deprecation warning with MSVC
#include "jsontest.h"
#include <stdio.h>
#include <string>
#if defined(_MSC_VER)
// Used to install a report hook that prevent dialog on assertion and error.
# include <crtdbg.h>
#endif // if defined(_MSC_VER)
#if defined(_WIN32)
// Used to prevent dialog on memory fault.
// Limits headers included by Windows.h
# define WIN32_LEAN_AND_MEAN
# define NOSERVICE
# define NOMCX
# define NOIME
# define NOSOUND
# define NOCOMM
# define NORPC
# define NOGDI
# define NOUSER
# define NODRIVERS
# define NOLOGERROR
# define NOPROFILER
# define NOMEMMGR
# define NOLFILEIO
# define NOOPENFILE
# define NORESOURCE
# define NOATOM
# define NOLANGUAGE
# define NOLSTRING
# define NODBCS
# define NOKEYBOARDINFO
# define NOGDICAPMASKS
# define NOCOLOR
# define NOGDIOBJ
# define NODRAWTEXT
# define NOTEXTMETRIC
# define NOSCALABLEFONT
# define NOBITMAP
# define NORASTEROPS
# define NOMETAFILE
# define NOSYSMETRICS
# define NOSYSTEMPARAMSINFO
# define NOMSG
# define NOWINSTYLES
# define NOWINOFFSETS
# define NOSHOWWINDOW
# define NODEFERWINDOWPOS
# define NOVIRTUALKEYCODES
# define NOKEYSTATES
# define NOWH
# define NOMENUS
# define NOSCROLL
# define NOCLIPBOARD
# define NOICONS
# define NOMB
# define NOSYSCOMMANDS
# define NOMDI
# define NOCTLMGR
# define NOWINMESSAGES
# include <windows.h>
#endif // if defined(_WIN32)
namespace JsonTest {
// class TestResult
// //////////////////////////////////////////////////////////////////
TestResult::TestResult()
: predicateId_( 1 )
, lastUsedPredicateId_( 0 )
, messageTarget_( 0 )
{
// The root predicate has id 0
rootPredicateNode_.id_ = 0;
rootPredicateNode_.next_ = 0;
predicateStackTail_ = &rootPredicateNode_;
}
void
TestResult::setTestName( const std::string &name )
{
name_ = name;
}
TestResult &
TestResult::addFailure( const char *file, unsigned int line,
const char *expr )
{
/// Walks the PredicateContext stack adding them to failures_ if not already added.
unsigned int nestingLevel = 0;
PredicateContext *lastNode = rootPredicateNode_.next_;
for ( ; lastNode != 0; lastNode = lastNode->next_ )
{
if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext
{
lastUsedPredicateId_ = lastNode->id_;
addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_,
nestingLevel );
// Link the PredicateContext to the failure for message target when
// popping the PredicateContext.
lastNode->failure_ = &( failures_.back() );
}
++nestingLevel;
}
// Adds the failed assertion
addFailureInfo( file, line, expr, nestingLevel );
messageTarget_ = &( failures_.back() );
return *this;
}
void
TestResult::addFailureInfo( const char *file, unsigned int line,
const char *expr, unsigned int nestingLevel )
{
Failure failure;
failure.file_ = file;
failure.line_ = line;
if ( expr )
{
failure.expr_ = expr;
}
failure.nestingLevel_ = nestingLevel;
failures_.push_back( failure );
}
TestResult &
TestResult::popPredicateContext()
{
PredicateContext *lastNode = &rootPredicateNode_;
while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 )
{
lastNode = lastNode->next_;
}
// Set message target to popped failure
PredicateContext *tail = lastNode->next_;
if ( tail != 0 && tail->failure_ != 0 )
{
messageTarget_ = tail->failure_;
}
// Remove tail from list
predicateStackTail_ = lastNode;
lastNode->next_ = 0;
return *this;
}
bool
TestResult::failed() const
{
return !failures_.empty();
}
unsigned int
TestResult::getAssertionNestingLevel() const
{
unsigned int level = 0;
const PredicateContext *lastNode = &rootPredicateNode_;
while ( lastNode->next_ != 0 )
{
lastNode = lastNode->next_;
++level;
}
return level;
}
void
TestResult::printFailure( bool printTestName ) const
{
if ( failures_.empty() )
{
return;
}
if ( printTestName )
{
printf( "* Detail of %s test failure:\n", name_.c_str() );
}
// Print in reverse to display the callstack in the right order
Failures::const_iterator itEnd = failures_.end();
for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it )
{
const Failure &failure = *it;
std::string indent( failure.nestingLevel_ * 2, ' ' );
if ( failure.file_ )
{
printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ );
}
if ( !failure.expr_.empty() )
{
printf( "%s\n", failure.expr_.c_str() );
}
else if ( failure.file_ )
{
printf( "\n" );
}
if ( !failure.message_.empty() )
{
std::string reindented = indentText( failure.message_, indent + " " );
printf( "%s\n", reindented.c_str() );
}
}
}
std::string
TestResult::indentText( const std::string &text,
const std::string &indent )
{
std::string reindented;
std::string::size_type lastIndex = 0;
while ( lastIndex < text.size() )
{
std::string::size_type nextIndex = text.find( '\n', lastIndex );
if ( nextIndex == std::string::npos )
{
nextIndex = text.size() - 1;
}
reindented += indent;
reindented += text.substr( lastIndex, nextIndex - lastIndex + 1 );
lastIndex = nextIndex + 1;
}
return reindented;
}
TestResult &
TestResult::addToLastFailure( const std::string &message )
{
if ( messageTarget_ != 0 )
{
messageTarget_->message_ += message;
}
return *this;
}
TestResult &
TestResult::operator << ( bool value )
{
return addToLastFailure( value ? "true" : "false" );
}
TestResult &
TestResult::operator << ( int value )
{
char buffer[32];
sprintf( buffer, "%d", value );
return addToLastFailure( buffer );
}
TestResult &
TestResult::operator << ( unsigned int value )
{
char buffer[32];
sprintf( buffer, "%u", value );
return addToLastFailure( buffer );
}
TestResult &
TestResult::operator << ( double value )
{
char buffer[32];
sprintf( buffer, "%16g", value );
return addToLastFailure( buffer );
}
TestResult &
TestResult::operator << ( const char *value )
{
return addToLastFailure( value ? value
: "<NULL>" );
}
TestResult &
TestResult::operator << ( const std::string &value )
{
return addToLastFailure( value );
}
// class TestCase
// //////////////////////////////////////////////////////////////////
TestCase::TestCase()
: result_( 0 )
{
}
void
TestCase::run( TestResult &result )
{
result_ = &result;
runTestCase();
}
// class Runner
// //////////////////////////////////////////////////////////////////
Runner::Runner()
{
}
Runner &
Runner::add( TestCaseFactory factory )
{
tests_.push_back( factory );
return *this;
}
unsigned int
Runner::testCount() const
{
return static_cast<unsigned int>( tests_.size() );
}
std::string
Runner::testNameAt( unsigned int index ) const
{
TestCase *test = tests_[index]();
std::string name = test->testName();
delete test;
return name;
}
void
Runner::runTestAt( unsigned int index, TestResult &result ) const
{
TestCase *test = tests_[index]();
result.setTestName( test->testName() );
printf( "Testing %s: ", test->testName() );
fflush( stdout );
#if JSON_USE_EXCEPTION
try
{
#endif // if JSON_USE_EXCEPTION
test->run( result );
#if JSON_USE_EXCEPTION
}
catch ( const std::exception &e )
{
result.addFailure( __FILE__, __LINE__,
"Unexpected exception caugth:" ) << e.what();
}
#endif // if JSON_USE_EXCEPTION
delete test;
const char *status = result.failed() ? "FAILED"
: "OK";
printf( "%s\n", status );
fflush( stdout );
}
bool
Runner::runAllTest( bool printSummary ) const
{
unsigned int count = testCount();
std::deque<TestResult> failures;
for ( unsigned int index = 0; index < count; ++index )
{
TestResult result;
runTestAt( index, result );
if ( result.failed() )
{
failures.push_back( result );
}
}
if ( failures.empty() )
{
if ( printSummary )
{
printf( "All %d tests passed\n", count );
}
return true;
}
else
{
for ( unsigned int index = 0; index < failures.size(); ++index )
{
TestResult &result = failures[index];
result.printFailure( count > 1 );
}
if ( printSummary )
{
unsigned int failedCount = static_cast<unsigned int>( failures.size() );
unsigned int passedCount = count - failedCount;
printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount );
}
return false;
}
}
bool
Runner::testIndex( const std::string &testName,
unsigned int &indexOut ) const
{
unsigned int count = testCount();
for ( unsigned int index = 0; index < count; ++index )
{
if ( testNameAt(index) == testName )
{
indexOut = index;
return true;
}
}
return false;
}
void
Runner::listTests() const
{
unsigned int count = testCount();
for ( unsigned int index = 0; index < count; ++index )
{
printf( "%s\n", testNameAt( index ).c_str() );
}
}
int
Runner::runCommandLine( int argc, const char *argv[] ) const
{
typedef std::deque<std::string> TestNames;
Runner subrunner;
for ( int index = 1; index < argc; ++index )
{
std::string opt = argv[index];
if ( opt == "--list-tests" )
{
listTests();
return 0;
}
else if ( opt == "--test-auto" )
{
preventDialogOnCrash();
}
else if ( opt == "--test" )
{
++index;
if ( index < argc )
{
unsigned int testNameIndex;
if ( testIndex( argv[index], testNameIndex ) )
{
subrunner.add( tests_[testNameIndex] );
}
else
{
fprintf( stderr, "Test '%s' does not exist!\n", argv[index] );
return 2;
}
}
else
{
printUsage( argv[0] );
return 2;
}
}
else
{
printUsage( argv[0] );
return 2;
}
}
bool succeeded;
if ( subrunner.testCount() > 0 )
{
succeeded = subrunner.runAllTest( subrunner.testCount() > 1 );
}
else
{
succeeded = runAllTest( true );
}
return succeeded ? 0
: 1;
}
#if defined(_MSC_VER)
// Hook MSVCRT assertions to prevent dialog from appearing
static int
msvcrtSilentReportHook( int reportType, char *message, int *returnValue )
{
// The default CRT handling of error and assertion is to display
// an error dialog to the user.
// Instead, when an error or an assertion occurs, we force the
// application to terminate using abort() after display
// the message on stderr.
if ( reportType == _CRT_ERROR ||
reportType == _CRT_ASSERT )
{
// calling abort() cause the ReportHook to be called
// The following is used to detect this case and let's the
// error handler fallback on its default behaviour (
// display a warning message)
static volatile bool isAborting = false;
if ( isAborting )
{
return TRUE;
}
isAborting = true;
fprintf( stderr, "CRT Error/Assert:\n%s\n", message );
fflush( stderr );
abort();
}
// Let's other reportType (_CRT_WARNING) be handled as they would by default
return FALSE;
}
#endif // if defined(_MSC_VER)
void
Runner::preventDialogOnCrash()
{
#if defined(_MSC_VER)
// Install a hook to prevent MSVCRT error and assertion from
// popping a dialog.
_CrtSetReportHook( &msvcrtSilentReportHook );
#endif // if defined(_MSC_VER)
// @todo investiguate this handler (for buffer overflow)
// _set_security_error_handler
#if defined(_WIN32)
// Prevents the system from popping a dialog for debugging if the
// application fails due to invalid memory access.
SetErrorMode( SEM_FAILCRITICALERRORS
| SEM_NOGPFAULTERRORBOX
| SEM_NOOPENFILEERRORBOX );
#endif // if defined(_WIN32)
}
void
Runner::printUsage( const char *appName )
{
printf(
"Usage: %s [options]\n"
"\n"
"If --test is not specified, then all the test cases be run.\n"
"\n"
"Valid options:\n"
"--list-tests: print the name of all test cases on the standard\n"
" output and exit.\n"
"--test TESTNAME: executes the test case with the specified name.\n"
" May be repeated.\n"
"--test-auto: prevent dialog prompting for debugging on crash.\n"
, appName );
}
// Assertion functions
// //////////////////////////////////////////////////////////////////
TestResult &
checkStringEqual( TestResult &result,
const std::string &expected, const std::string &actual,
const char *file, unsigned int line, const char *expr )
{
if ( expected != actual )
{
result.addFailure( file, line, expr );
result << "Expected: '" << expected << "'\n";
result << "Actual : '" << actual << "'";
}
return result;
}
} // namespace JsonTest

View File

@ -0,0 +1,252 @@
#ifndef JSONTEST_H_INCLUDED
# define JSONTEST_H_INCLUDED
# include <json/config.h>
# include <stdio.h>
# include <deque>
# include <string>
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// Mini Unit Testing framework
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
/** \brief Unit testing framework.
* \warning: all assertions are non-aborting, test case execution will continue
* even if an assertion namespace.
* This constraint is for portability: the framework needs to compile
* on Visual Studio 6 and must not require exception usage.
*/
namespace JsonTest {
class Failure
{
public:
const char *file_;
unsigned int line_;
std::string expr_;
std::string message_;
unsigned int nestingLevel_;
};
/// Context used to create the assertion callstack on failure.
/// Must be a POD to allow inline initialisation without stepping
/// into the debugger.
struct PredicateContext
{
typedef unsigned int Id;
Id id_;
const char *file_;
unsigned int line_;
const char *expr_;
PredicateContext *next_;
/// Related Failure, set when the PredicateContext is converted
/// into a Failure.
Failure *failure_;
};
class TestResult
{
public:
TestResult();
/// \internal Implementation detail for assertion macros
/// Not encapsulated to prevent step into when debugging failed assertions
/// Incremented by one on assertion predicate entry, decreased by one
/// by addPredicateContext().
PredicateContext::Id predicateId_;
/// \internal Implementation detail for predicate macros
PredicateContext *predicateStackTail_;
void setTestName( const std::string &name );
/// Adds an assertion failure.
TestResult &addFailure( const char *file, unsigned int line,
const char *expr = 0 );
/// Removes the last PredicateContext added to the predicate stack
/// chained list.
/// Next messages will be targed at the PredicateContext that was removed.
TestResult &popPredicateContext();
bool failed() const;
void printFailure( bool printTestName ) const;
TestResult &operator << ( bool value );
TestResult &operator << ( int value );
TestResult &operator << ( unsigned int value );
TestResult &operator << ( double value );
TestResult &operator << ( const char *value );
TestResult &operator << ( const std::string &value );
private:
TestResult &addToLastFailure( const std::string &message );
unsigned int getAssertionNestingLevel() const;
/// Adds a failure or a predicate context
void addFailureInfo( const char *file, unsigned int line,
const char *expr, unsigned int nestingLevel );
static std::string indentText( const std::string &text,
const std::string &indent );
typedef std::deque<Failure> Failures;
Failures failures_;
std::string name_;
PredicateContext rootPredicateNode_;
PredicateContext::Id lastUsedPredicateId_;
/// Failure which is the target of the messages added using operator <<
Failure *messageTarget_;
};
class TestCase
{
public:
TestCase();
void run( TestResult &result );
virtual const char *testName() const = 0;
protected:
TestResult *result_;
private:
virtual void runTestCase() = 0;
};
/// Function pointer type for TestCase factory
typedef TestCase *(*TestCaseFactory)();
class Runner
{
public:
Runner();
/// Adds a test to the suite
Runner &add( TestCaseFactory factory );
/// Runs test as specified on the command-line
/// If no command-line arguments are provided, run all tests.
/// If --list-tests is provided, then print the list of all test cases
/// If --test <testname> is provided, then run test testname.
int runCommandLine( int argc, const char *argv[] ) const;
/// Runs all the test cases
bool runAllTest( bool printSummary ) const;
/// Returns the number of test case in the suite
unsigned int testCount() const;
/// Returns the name of the test case at the specified index
std::string testNameAt( unsigned int index ) const;
/// Runs the test case at the specified index using the specified TestResult
void runTestAt( unsigned int index, TestResult &result ) const;
static void printUsage( const char *appName );
private: // prevents copy construction and assignment
Runner( const Runner &other );
Runner &operator =( const Runner &other );
private:
void listTests() const;
bool testIndex( const std::string &testName, unsigned int &index ) const;
static void preventDialogOnCrash();
private:
typedef std::deque<TestCaseFactory> Factories;
Factories tests_;
};
template<typename T>
TestResult &
checkEqual( TestResult &result, const T &expected, const T &actual,
const char *file, unsigned int line, const char *expr )
{
if ( expected != actual )
{
result.addFailure( file, line, expr );
result << "Expected: " << expected << "\n";
result << "Actual : " << actual;
}
return result;
}
TestResult &
checkStringEqual( TestResult &result,
const std::string &expected, const std::string &actual,
const char *file, unsigned int line, const char *expr );
} // namespace JsonTest
/// \brief Asserts that the given expression is true.
/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y;
/// JSONTEST_ASSERT( x == y );
#define JSONTEST_ASSERT( expr ) \
if ( condition ) \
{ \
} \
else \
result_->addFailure( __FILE__, __LINE__, #expr )
/// \brief Asserts that the given predicate is true.
/// The predicate may do other assertions and be a member function of the fixture.
#define JSONTEST_ASSERT_PRED( expr ) \
{ \
JsonTest::PredicateContext _minitest_Context = { \
result_->predicateId_, __FILE__, __LINE__, #expr }; \
result_->predicateStackTail_->next_ = &_minitest_Context; \
result_->predicateId_ += 1; \
result_->predicateStackTail_ = &_minitest_Context; \
(expr); \
result_->popPredicateContext(); \
} \
*result_
/// \brief Asserts that two values are equals.
#define JSONTEST_ASSERT_EQUAL( expected, actual ) \
JsonTest::checkEqual( *result_, expected, actual, \
__FILE__, __LINE__, \
#expected " == " #actual )
/// \brief Asserts that two values are equals.
#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \
JsonTest::checkStringEqual( *result_, \
std::string(expected), std::string(actual), \
#expected " == " #actual )
/// \brief Begin a fixture test case.
#define JSONTEST_FIXTURE( FixtureType, name ) \
class Test##FixtureType##name : public FixtureType \
{ \
public: \
static JsonTest::TestCase *factory() \
{ \
return new Test##FixtureType##name(); \
} \
public: /* overidden from TestCase */ \
virtual const char *testName() const \
{ \
return #FixtureType "/" #name; \
} \
virtual void runTestCase(); \
}; \
\
void Test##FixtureType##name::runTestCase()
#define JSONTEST_FIXTURE_FACTORY( FixtureType, name ) \
&Test##FixtureType##name::factory
#define JSONTEST_REGISTER_FIXTURE( runner, FixtureType, name ) \
(runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) )
#endif // ifndef JSONTEST_H_INCLUDED

244
src/test_lib_json/main.cpp Normal file
View File

@ -0,0 +1,244 @@
#include <json/json.h>
#include "jsontest.h"
// TODO:
// - boolean value returns that they are integral. Should not be.
// - unsigned integer in integer range are not considered to be valid integer. Should check range.
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// Json Library test cases
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
struct ValueTest : JsonTest::TestCase
{
Json::Value null_;
Json::Value emptyArray_;
Json::Value emptyObject_;
Json::Value integer_;
Json::Value unsignedInteger_;
Json::Value smallUnsignedInteger_;
Json::Value real_;
Json::Value array1_;
Json::Value object1_;
Json::Value emptyString_;
Json::Value string1_;
Json::Value string_;
Json::Value true_;
Json::Value false_;
ValueTest()
: emptyArray_( Json::arrayValue )
, emptyObject_( Json::objectValue )
, integer_( 123456789 )
, smallUnsignedInteger_( Json::Value::UInt( Json::Value::maxInt ) )
, unsignedInteger_( 34567890u )
, real_( 1234.56789 )
, emptyString_( "" )
, string1_( "a" )
, string_( "sometext with space" )
, true_( true )
, false_( false )
{
array1_.append( 1234 );
object1_["id"] = 1234;
}
struct IsCheck
{
/// Initialize all checks to \c false by default.
IsCheck();
bool isObject_;
bool isArray_;
bool isBool_;
bool isDouble_;
bool isInt_;
bool isUInt_;
bool isIntegral_;
bool isNumeric_;
bool isString_;
bool isNull_;
};
void checkConstMemberCount( const Json::Value &value, unsigned int expectedCount );
void checkMemberCount( Json::Value &value, unsigned int expectedCount );
void checkIs( const Json::Value &value, const IsCheck &check );
};
JSONTEST_FIXTURE( ValueTest, size )
{
JSONTEST_ASSERT_PRED( checkMemberCount(emptyArray_, 0) );
JSONTEST_ASSERT_PRED( checkMemberCount(emptyObject_, 0) );
JSONTEST_ASSERT_PRED( checkMemberCount(array1_, 1) );
JSONTEST_ASSERT_PRED( checkMemberCount(object1_, 1) );
JSONTEST_ASSERT_PRED( checkMemberCount(null_, 0) );
JSONTEST_ASSERT_PRED( checkMemberCount(integer_, 0) );
JSONTEST_ASSERT_PRED( checkMemberCount(real_, 0) );
JSONTEST_ASSERT_PRED( checkMemberCount(emptyString_, 0) );
JSONTEST_ASSERT_PRED( checkMemberCount(string_, 0) );
JSONTEST_ASSERT_PRED( checkMemberCount(true_, 0) );
}
JSONTEST_FIXTURE( ValueTest, isObject )
{
IsCheck checks;
checks.isObject_ = true;
JSONTEST_ASSERT_PRED( checkIs( emptyObject_, checks ) );
JSONTEST_ASSERT_PRED( checkIs( object1_, checks ) );
}
JSONTEST_FIXTURE( ValueTest, isArray )
{
IsCheck checks;
checks.isArray_ = true;
JSONTEST_ASSERT_PRED( checkIs( emptyArray_, checks ) );
JSONTEST_ASSERT_PRED( checkIs( array1_, checks ) );
}
JSONTEST_FIXTURE( ValueTest, isNull )
{
IsCheck checks;
checks.isNull_ = true;
checks.isObject_ = true;
checks.isArray_ = true;
JSONTEST_ASSERT_PRED( checkIs( null_, checks ) );
}
JSONTEST_FIXTURE( ValueTest, isString )
{
IsCheck checks;
checks.isString_ = true;
JSONTEST_ASSERT_PRED( checkIs( emptyString_, checks ) );
JSONTEST_ASSERT_PRED( checkIs( string_, checks ) );
JSONTEST_ASSERT_PRED( checkIs( string1_, checks ) );
}
JSONTEST_FIXTURE( ValueTest, isBool )
{
IsCheck checks;
checks.isBool_ = true;
checks.isIntegral_ = true;
checks.isNumeric_ = true;
JSONTEST_ASSERT_PRED( checkIs( false_, checks ) );
JSONTEST_ASSERT_PRED( checkIs( true_, checks ) );
}
JSONTEST_FIXTURE( ValueTest, isDouble )
{
IsCheck checks;
checks.isDouble_ = true;
checks.isNumeric_ = true;
JSONTEST_ASSERT_PRED( checkIs( real_, checks ) );
}
JSONTEST_FIXTURE( ValueTest, isInt )
{
IsCheck checks;
checks.isInt_ = true;
checks.isNumeric_ = true;
checks.isIntegral_ = true;
JSONTEST_ASSERT_PRED( checkIs( integer_, checks ) );
}
JSONTEST_FIXTURE( ValueTest, isUInt )
{
IsCheck checks;
checks.isUInt_ = true;
checks.isNumeric_ = true;
checks.isIntegral_ = true;
JSONTEST_ASSERT_PRED( checkIs( unsignedInteger_, checks ) );
JSONTEST_ASSERT_PRED( checkIs( smallUnsignedInteger_, checks ) );
}
void
ValueTest::checkConstMemberCount( const Json::Value &value, unsigned int expectedCount )
{
unsigned int count = 0;
Json::Value::const_iterator itEnd = value.end();
for ( Json::Value::const_iterator it = value.begin(); it != itEnd; ++it )
{
++count;
}
JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::const_iterator";
}
void
ValueTest::checkMemberCount( Json::Value &value, unsigned int expectedCount )
{
JSONTEST_ASSERT_EQUAL( expectedCount, value.size() );
unsigned int count = 0;
Json::Value::iterator itEnd = value.end();
for ( Json::Value::iterator it = value.begin(); it != itEnd; ++it )
{
++count;
}
JSONTEST_ASSERT_EQUAL( expectedCount, count ) << "Json::Value::iterator";
JSONTEST_ASSERT_PRED( checkConstMemberCount(value, expectedCount) );
}
ValueTest::IsCheck::IsCheck()
: isObject_( false )
, isArray_( false )
, isBool_( false )
, isDouble_( false )
, isInt_( false )
, isUInt_( false )
, isIntegral_( false )
, isNumeric_( false )
, isString_( false )
, isNull_( false )
{
}
void
ValueTest::checkIs( const Json::Value &value, const IsCheck &check )
{
JSONTEST_ASSERT_EQUAL( check.isObject_, value.isObject() );
JSONTEST_ASSERT_EQUAL( check.isArray_, value.isArray() );
JSONTEST_ASSERT_EQUAL( check.isBool_, value.isBool() );
JSONTEST_ASSERT_EQUAL( check.isDouble_, value.isDouble() );
JSONTEST_ASSERT_EQUAL( check.isInt_, value.isInt() );
JSONTEST_ASSERT_EQUAL( check.isUInt_, value.isUInt() );
JSONTEST_ASSERT_EQUAL( check.isIntegral_, value.isIntegral() );
JSONTEST_ASSERT_EQUAL( check.isNumeric_, value.isNumeric() );
JSONTEST_ASSERT_EQUAL( check.isString_, value.isString() );
JSONTEST_ASSERT_EQUAL( check.isNull_, value.isNull() );
}
int main( int argc, const char *argv[] )
{
JsonTest::Runner runner;
JSONTEST_REGISTER_FIXTURE( runner, ValueTest, size );
JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isObject );
JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isArray );
JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isBool );
JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isInt );
JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isUInt );
JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isDouble );
JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isString );
JSONTEST_REGISTER_FIXTURE( runner, ValueTest, isNull );
return runner.runCommandLine( argc, argv );
}

View File

@ -0,0 +1,10 @@
Import( 'env_testing buildUnitTests' )
buildUnitTests( env_testing, Split( """
main.cpp
jsontest.cpp
""" ),
'test_lib_json' )
# For 'check' to work, 'libs' must be built first.
env_testing.Depends('test_lib_json', '#libs')

View File

@ -116,7 +116,7 @@ def main():
options, args = parser.parse_args()
if len(args) < 1 or len(args) > 2:
options.error( 'Must provides at least path to jsontestrunner executable.' )
parser.error( 'Must provides at least path to jsontestrunner executable.' )
sys.exit( 1 )
jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) )

73
test/rununittests.py Normal file
View File

@ -0,0 +1,73 @@
import sys
import os
import os.path
import subprocess
from glob import glob
import optparse
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
class TestProxy(object):
def __init__( self, test_exe_path, use_valgrind=False ):
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
self.use_valgrind = use_valgrind
def run( self, options ):
if self.use_valgrind:
cmd = VALGRIND_CMD.split()
else:
cmd = []
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
stdout = process.communicate()[0]
if process.returncode:
return False, stdout
return True, stdout
def runAllTests( exe_path, use_valgrind=False ):
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
status, test_names = test_proxy.run( ['--list-tests'] )
if not status:
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
return 1
test_names = [name.strip() for name in test_names.strip().split('\n')]
failures = []
for name in test_names:
print 'TESTING %s:' % name,
succeed, result = test_proxy.run( ['--test', name] )
if succeed:
print 'OK'
else:
failures.append( (name, result) )
print 'FAILED'
failed_count = len(failures)
pass_count = len(test_names) - failed_count
if failed_count:
print
for name, result in failures:
print result
print '%d/%d tests passed (%d failure(s))' % (
pass_count, len(test_names), failed_count)
return 1
else:
print 'All %d tests passed' % len(test_names)
return 0
def main():
from optparse import OptionParser
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
parser.add_option("--valgrind",
action="store_true", dest="valgrind", default=False,
help="run all the tests using valgrind to detect memory leaks")
parser.enable_interspersed_args()
options, args = parser.parse_args()
if len(args) != 1:
parser.error( 'Must provides at least path to test_lib_json executable.' )
sys.exit( 1 )
exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
sys.exit( exit_code )
if __name__ == '__main__':
main()