mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-10-14 15:05:34 +02:00
Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
15949af098 | ||
![]() |
8dc52b3cca | ||
![]() |
add941c1a9 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -10,4 +10,4 @@
|
||||
/libs/
|
||||
/doc/doxyfile
|
||||
/dist/
|
||||
#/include/json/version.h
|
||||
/include/json/version.h
|
||||
|
@@ -17,20 +17,28 @@ IF(NOT WIN32)
|
||||
ENDIF(NOT CMAKE_BUILD_TYPE)
|
||||
ENDIF(NOT WIN32)
|
||||
|
||||
SET(LIB_SUFFIX "" CACHE STRING "Optional arch-dependent suffix for the library installation directory")
|
||||
|
||||
SET(RUNTIME_INSTALL_DIR bin
|
||||
CACHE PATH "Install dir for executables and dlls")
|
||||
SET(ARCHIVE_INSTALL_DIR lib${LIB_SUFFIX}
|
||||
SET(ARCHIVE_INSTALL_DIR lib
|
||||
CACHE PATH "Install dir for static libraries")
|
||||
SET(LIBRARY_INSTALL_DIR lib${LIB_SUFFIX}
|
||||
SET(LIBRARY_INSTALL_DIR lib
|
||||
CACHE PATH "Install dir for shared libraries")
|
||||
SET(INCLUDE_INSTALL_DIR include
|
||||
CACHE PATH "Install dir for headers")
|
||||
SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake
|
||||
SET(PACKAGE_INSTALL_DIR lib/cmake
|
||||
CACHE PATH "Install dir for cmake package config files")
|
||||
MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PACKAGE_INSTALL_DIR )
|
||||
|
||||
# This ensures shared DLL are in the same dir as executable on Windows.
|
||||
# Put all executables / libraries are in a project global directory.
|
||||
SET(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
|
||||
CACHE PATH "Single directory for all static libraries.")
|
||||
SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
|
||||
CACHE PATH "Single directory for all dynamic libraries on Unix.")
|
||||
SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin
|
||||
CACHE PATH "Single directory for all executable and dynamic libraries on Windows.")
|
||||
MARK_AS_ADVANCED( CMAKE_RUNTIME_OUTPUT_DIRECTORY CMAKE_LIBRARY_OUTPUT_DIRECTORY CMAKE_ARCHIVE_OUTPUT_DIRECTORY )
|
||||
|
||||
# Set variable named ${VAR_NAME} to value ${VALUE}
|
||||
FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
|
||||
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
|
||||
@@ -93,7 +101,7 @@ IF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
||||
"pkg-config/jsoncpp.pc"
|
||||
@ONLY)
|
||||
INSTALL(FILES "${CMAKE_BINARY_DIR}/pkg-config/jsoncpp.pc"
|
||||
DESTINATION "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}/pkgconfig")
|
||||
DESTINATION "${CMAKE_INSTALL_PREFIX}/lib/pkgconfig")
|
||||
ENDIF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
||||
|
||||
IF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
|
@@ -56,7 +56,7 @@ def amalgamate_source( source_top_dir=None,
|
||||
target_source_path: output .cpp path
|
||||
header_include_path: generated header path relative to target_source_path.
|
||||
"""
|
||||
print("Amalgating header...")
|
||||
print ("Amalgating header...")
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." )
|
||||
header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
||||
@@ -77,12 +77,12 @@ def amalgamate_source( source_top_dir=None,
|
||||
header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" )
|
||||
|
||||
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
||||
print("Writing amalgated header to %r" % target_header_path)
|
||||
print ("Writing amalgated header to %r" % target_header_path)
|
||||
header.write_to( target_header_path )
|
||||
|
||||
base, ext = os.path.splitext( header_include_path )
|
||||
forward_header_include_path = base + "-forwards" + ext
|
||||
print("Amalgating forward header...")
|
||||
print ("Amalgating forward header...")
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." )
|
||||
header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path )
|
||||
@@ -99,10 +99,10 @@ def amalgamate_source( source_top_dir=None,
|
||||
|
||||
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
||||
forward_header_include_path )
|
||||
print("Writing amalgated forward header to %r" % target_forward_header_path)
|
||||
print ("Writing amalgated forward header to %r" % target_forward_header_path)
|
||||
header.write_to( target_forward_header_path )
|
||||
|
||||
print("Amalgating source...")
|
||||
print ("Amalgating source...")
|
||||
source = AmalgamationFile( source_top_dir )
|
||||
source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." )
|
||||
source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
||||
@@ -118,7 +118,7 @@ def amalgamate_source( source_top_dir=None,
|
||||
source.add_file( os.path.join(lib_json, "json_value.cpp") )
|
||||
source.add_file( os.path.join(lib_json, "json_writer.cpp") )
|
||||
|
||||
print("Writing amalgated source to %r" % target_source_path)
|
||||
print ("Writing amalgated source to %r" % target_source_path)
|
||||
source.write_to( target_source_path )
|
||||
|
||||
def main():
|
||||
@@ -144,7 +144,7 @@ Generate a single amalgated source and header file from the sources.
|
||||
sys.stderr.write( msg + "\n" )
|
||||
sys.exit( 1 )
|
||||
else:
|
||||
print("Source succesfully amalagated")
|
||||
print ("Source succesfully amalagated")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@@ -2,7 +2,6 @@
|
||||
# encoding: utf-8
|
||||
# Baptiste Lepilleur, 2009
|
||||
|
||||
from __future__ import print_function
|
||||
from dircache import listdir
|
||||
import re
|
||||
import fnmatch
|
||||
@@ -191,12 +190,12 @@ if __name__ == "__main__":
|
||||
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||
rex = ant_pattern_to_re( ant_pattern )
|
||||
print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
|
||||
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
|
||||
for accepted_match in accepted_matches:
|
||||
print('Accepted?:', accepted_match)
|
||||
self.assertTrue( rex.match( accepted_match ) is not None )
|
||||
print 'Accepted?:', accepted_match
|
||||
self.assert_( rex.match( accepted_match ) is not None )
|
||||
for rejected_match in rejected_matches:
|
||||
print('Rejected?:', rejected_match)
|
||||
self.assertTrue( rex.match( rejected_match ) is None )
|
||||
print 'Rejected?:', rejected_match
|
||||
self.assert_( rex.match( rejected_match ) is None )
|
||||
|
||||
unittest.main()
|
||||
|
@@ -1,281 +1,280 @@
|
||||
from __future__ import print_function
|
||||
import collections
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
import cgi
|
||||
|
||||
class BuildDesc:
|
||||
def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None):
|
||||
self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ]
|
||||
self.variables = variables or []
|
||||
self.build_type = build_type
|
||||
self.generator = generator
|
||||
|
||||
def merged_with( self, build_desc ):
|
||||
"""Returns a new BuildDesc by merging field content.
|
||||
Prefer build_desc fields to self fields for single valued field.
|
||||
"""
|
||||
return BuildDesc( self.prepend_envs + build_desc.prepend_envs,
|
||||
self.variables + build_desc.variables,
|
||||
build_desc.build_type or self.build_type,
|
||||
build_desc.generator or self.generator )
|
||||
|
||||
def env( self ):
|
||||
environ = os.environ.copy()
|
||||
for values_by_name in self.prepend_envs:
|
||||
for var, value in list(values_by_name.items()):
|
||||
var = var.upper()
|
||||
if type(value) is unicode:
|
||||
value = value.encode( sys.getdefaultencoding() )
|
||||
if var in environ:
|
||||
environ[var] = value + os.pathsep + environ[var]
|
||||
else:
|
||||
environ[var] = value
|
||||
return environ
|
||||
|
||||
def cmake_args( self ):
|
||||
args = ["-D%s" % var for var in self.variables]
|
||||
# skip build type for Visual Studio solution as it cause warning
|
||||
if self.build_type and 'Visual' not in self.generator:
|
||||
args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type )
|
||||
if self.generator:
|
||||
args.extend( ['-G', self.generator] )
|
||||
return args
|
||||
|
||||
def __repr__( self ):
|
||||
return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type)
|
||||
|
||||
class BuildData:
|
||||
def __init__( self, desc, work_dir, source_dir ):
|
||||
self.desc = desc
|
||||
self.work_dir = work_dir
|
||||
self.source_dir = source_dir
|
||||
self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' )
|
||||
self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' )
|
||||
self.cmake_succeeded = False
|
||||
self.build_succeeded = False
|
||||
|
||||
def execute_build(self):
|
||||
print('Build %s' % self.desc)
|
||||
self._make_new_work_dir( )
|
||||
self.cmake_succeeded = self._generate_makefiles( )
|
||||
if self.cmake_succeeded:
|
||||
self.build_succeeded = self._build_using_makefiles( )
|
||||
return self.build_succeeded
|
||||
|
||||
def _generate_makefiles(self):
|
||||
print(' Generating makefiles: ', end=' ')
|
||||
cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )]
|
||||
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path )
|
||||
print('done' if succeeded else 'FAILED')
|
||||
return succeeded
|
||||
|
||||
def _build_using_makefiles(self):
|
||||
print(' Building:', end=' ')
|
||||
cmd = ['cmake', '--build', self.work_dir]
|
||||
if self.desc.build_type:
|
||||
cmd += ['--config', self.desc.build_type]
|
||||
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path )
|
||||
print('done' if succeeded else 'FAILED')
|
||||
return succeeded
|
||||
|
||||
def _execute_build_subprocess(self, cmd, env, log_path):
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
|
||||
env=env )
|
||||
stdout, _ = process.communicate( )
|
||||
succeeded = (process.returncode == 0)
|
||||
with open( log_path, 'wb' ) as flog:
|
||||
log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
|
||||
flog.write( fix_eol( log ) )
|
||||
return succeeded
|
||||
|
||||
def _make_new_work_dir(self):
|
||||
if os.path.isdir( self.work_dir ):
|
||||
print(' Removing work directory', self.work_dir)
|
||||
shutil.rmtree( self.work_dir, ignore_errors=True )
|
||||
if not os.path.isdir( self.work_dir ):
|
||||
os.makedirs( self.work_dir )
|
||||
|
||||
def fix_eol( stdout ):
|
||||
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
|
||||
"""
|
||||
return re.sub( '\r*\n', os.linesep, stdout )
|
||||
|
||||
def load_build_variants_from_config( config_path ):
|
||||
with open( config_path, 'rb' ) as fconfig:
|
||||
data = json.load( fconfig )
|
||||
variants = data[ 'cmake_variants' ]
|
||||
build_descs_by_axis = collections.defaultdict( list )
|
||||
for axis in variants:
|
||||
axis_name = axis["name"]
|
||||
build_descs = []
|
||||
if "generators" in axis:
|
||||
for generator_data in axis["generators"]:
|
||||
for generator in generator_data["generator"]:
|
||||
build_desc = BuildDesc( generator=generator,
|
||||
prepend_envs=generator_data.get("env_prepend") )
|
||||
build_descs.append( build_desc )
|
||||
elif "variables" in axis:
|
||||
for variables in axis["variables"]:
|
||||
build_desc = BuildDesc( variables=variables )
|
||||
build_descs.append( build_desc )
|
||||
elif "build_types" in axis:
|
||||
for build_type in axis["build_types"]:
|
||||
build_desc = BuildDesc( build_type=build_type )
|
||||
build_descs.append( build_desc )
|
||||
build_descs_by_axis[axis_name].extend( build_descs )
|
||||
return build_descs_by_axis
|
||||
|
||||
def generate_build_variants( build_descs_by_axis ):
|
||||
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
|
||||
axis_names = list(build_descs_by_axis.keys())
|
||||
build_descs = []
|
||||
for axis_name, axis_build_descs in list(build_descs_by_axis.items()):
|
||||
if len(build_descs):
|
||||
# for each existing build_desc and each axis build desc, create a new build_desc
|
||||
new_build_descs = []
|
||||
for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs):
|
||||
new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) )
|
||||
build_descs = new_build_descs
|
||||
else:
|
||||
build_descs = axis_build_descs
|
||||
return build_descs
|
||||
|
||||
HTML_TEMPLATE = string.Template('''<html>
|
||||
<head>
|
||||
<title>$title</title>
|
||||
<style type="text/css">
|
||||
td.failed {background-color:#f08080;}
|
||||
td.ok {background-color:#c0eec0;}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<table border="1">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Variables</th>
|
||||
$th_vars
|
||||
</tr>
|
||||
<tr>
|
||||
<th>Build type</th>
|
||||
$th_build_types
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
$tr_builds
|
||||
</tbody>
|
||||
</table>
|
||||
</body></html>''')
|
||||
|
||||
def generate_html_report( html_report_path, builds ):
|
||||
report_dir = os.path.dirname( html_report_path )
|
||||
# Vertical axis: generator
|
||||
# Horizontal: variables, then build_type
|
||||
builds_by_generator = collections.defaultdict( list )
|
||||
variables = set()
|
||||
build_types_by_variable = collections.defaultdict( set )
|
||||
build_by_pos_key = {} # { (generator, var_key, build_type): build }
|
||||
for build in builds:
|
||||
builds_by_generator[build.desc.generator].append( build )
|
||||
var_key = tuple(sorted(build.desc.variables))
|
||||
variables.add( var_key )
|
||||
build_types_by_variable[var_key].add( build.desc.build_type )
|
||||
pos_key = (build.desc.generator, var_key, build.desc.build_type)
|
||||
build_by_pos_key[pos_key] = build
|
||||
variables = sorted( variables )
|
||||
th_vars = []
|
||||
th_build_types = []
|
||||
for variable in variables:
|
||||
build_types = sorted( build_types_by_variable[variable] )
|
||||
nb_build_type = len(build_types_by_variable[variable])
|
||||
th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) )
|
||||
for build_type in build_types:
|
||||
th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) )
|
||||
tr_builds = []
|
||||
for generator in sorted( builds_by_generator ):
|
||||
tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ]
|
||||
for variable in variables:
|
||||
build_types = sorted( build_types_by_variable[variable] )
|
||||
for build_type in build_types:
|
||||
pos_key = (generator, variable, build_type)
|
||||
build = build_by_pos_key.get(pos_key)
|
||||
if build:
|
||||
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
|
||||
build_status = 'ok' if build.build_succeeded else 'FAILED'
|
||||
cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir )
|
||||
build_log_url = os.path.relpath( build.build_log_path, report_dir )
|
||||
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (
|
||||
build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
|
||||
if build.cmake_succeeded:
|
||||
td += '<br><a href="%s" class="%s">Build: %s</a>' % (
|
||||
build_log_url, build_status.lower(), build_status)
|
||||
td += '</td>'
|
||||
else:
|
||||
td = '<td></td>'
|
||||
tds.append( td )
|
||||
tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) )
|
||||
html = HTML_TEMPLATE.substitute(
|
||||
title='Batch build report',
|
||||
th_vars=' '.join(th_vars),
|
||||
th_build_types=' '.join( th_build_types),
|
||||
tr_builds='\n'.join( tr_builds ) )
|
||||
with open( html_report_path, 'wt' ) as fhtml:
|
||||
fhtml.write( html )
|
||||
print('HTML report generated in:', html_report_path)
|
||||
|
||||
def main():
|
||||
usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...]
|
||||
Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run
|
||||
as described in CONFIG_JSON_PATH building in WORK_DIR.
|
||||
|
||||
Example of call:
|
||||
python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = True
|
||||
# parser.add_option('-v', '--verbose', dest="verbose", action='store_true',
|
||||
# help="""Be verbose.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 3:
|
||||
parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." )
|
||||
work_dir = args[0]
|
||||
source_dir = args[1].rstrip('/\\')
|
||||
config_paths = args[2:]
|
||||
for config_path in config_paths:
|
||||
if not os.path.isfile( config_path ):
|
||||
parser.error( "Can not read: %r" % config_path )
|
||||
|
||||
# generate build variants
|
||||
build_descs = []
|
||||
for config_path in config_paths:
|
||||
build_descs_by_axis = load_build_variants_from_config( config_path )
|
||||
build_descs.extend( generate_build_variants( build_descs_by_axis ) )
|
||||
print('Build variants (%d):' % len(build_descs))
|
||||
# assign build directory for each variant
|
||||
if not os.path.isdir( work_dir ):
|
||||
os.makedirs( work_dir )
|
||||
builds = []
|
||||
with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap:
|
||||
for index, build_desc in enumerate( build_descs ):
|
||||
build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) )
|
||||
builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) )
|
||||
fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) )
|
||||
for build in builds:
|
||||
build.execute_build()
|
||||
html_report_path = os.path.join( work_dir, 'batchbuild-report.html' )
|
||||
generate_html_report( html_report_path, builds )
|
||||
print('Done')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
import cgi
|
||||
|
||||
class BuildDesc:
|
||||
def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None):
|
||||
self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ]
|
||||
self.variables = variables or []
|
||||
self.build_type = build_type
|
||||
self.generator = generator
|
||||
|
||||
def merged_with( self, build_desc ):
|
||||
"""Returns a new BuildDesc by merging field content.
|
||||
Prefer build_desc fields to self fields for single valued field.
|
||||
"""
|
||||
return BuildDesc( self.prepend_envs + build_desc.prepend_envs,
|
||||
self.variables + build_desc.variables,
|
||||
build_desc.build_type or self.build_type,
|
||||
build_desc.generator or self.generator )
|
||||
|
||||
def env( self ):
|
||||
environ = os.environ.copy()
|
||||
for values_by_name in self.prepend_envs:
|
||||
for var, value in values_by_name.items():
|
||||
var = var.upper()
|
||||
if type(value) is unicode:
|
||||
value = value.encode( sys.getdefaultencoding() )
|
||||
if var in environ:
|
||||
environ[var] = value + os.pathsep + environ[var]
|
||||
else:
|
||||
environ[var] = value
|
||||
return environ
|
||||
|
||||
def cmake_args( self ):
|
||||
args = ["-D%s" % var for var in self.variables]
|
||||
# skip build type for Visual Studio solution as it cause warning
|
||||
if self.build_type and 'Visual' not in self.generator:
|
||||
args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type )
|
||||
if self.generator:
|
||||
args.extend( ['-G', self.generator] )
|
||||
return args
|
||||
|
||||
def __repr__( self ):
|
||||
return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type)
|
||||
|
||||
class BuildData:
|
||||
def __init__( self, desc, work_dir, source_dir ):
|
||||
self.desc = desc
|
||||
self.work_dir = work_dir
|
||||
self.source_dir = source_dir
|
||||
self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' )
|
||||
self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' )
|
||||
self.cmake_succeeded = False
|
||||
self.build_succeeded = False
|
||||
|
||||
def execute_build(self):
|
||||
print 'Build %s' % self.desc
|
||||
self._make_new_work_dir( )
|
||||
self.cmake_succeeded = self._generate_makefiles( )
|
||||
if self.cmake_succeeded:
|
||||
self.build_succeeded = self._build_using_makefiles( )
|
||||
return self.build_succeeded
|
||||
|
||||
def _generate_makefiles(self):
|
||||
print ' Generating makefiles: ',
|
||||
cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )]
|
||||
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path )
|
||||
print 'done' if succeeded else 'FAILED'
|
||||
return succeeded
|
||||
|
||||
def _build_using_makefiles(self):
|
||||
print ' Building:',
|
||||
cmd = ['cmake', '--build', self.work_dir]
|
||||
if self.desc.build_type:
|
||||
cmd += ['--config', self.desc.build_type]
|
||||
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path )
|
||||
print 'done' if succeeded else 'FAILED'
|
||||
return succeeded
|
||||
|
||||
def _execute_build_subprocess(self, cmd, env, log_path):
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
|
||||
env=env )
|
||||
stdout, _ = process.communicate( )
|
||||
succeeded = (process.returncode == 0)
|
||||
with open( log_path, 'wb' ) as flog:
|
||||
log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
|
||||
flog.write( fix_eol( log ) )
|
||||
return succeeded
|
||||
|
||||
def _make_new_work_dir(self):
|
||||
if os.path.isdir( self.work_dir ):
|
||||
print ' Removing work directory', self.work_dir
|
||||
shutil.rmtree( self.work_dir, ignore_errors=True )
|
||||
if not os.path.isdir( self.work_dir ):
|
||||
os.makedirs( self.work_dir )
|
||||
|
||||
def fix_eol( stdout ):
|
||||
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
|
||||
"""
|
||||
return re.sub( '\r*\n', os.linesep, stdout )
|
||||
|
||||
def load_build_variants_from_config( config_path ):
|
||||
with open( config_path, 'rb' ) as fconfig:
|
||||
data = json.load( fconfig )
|
||||
variants = data[ 'cmake_variants' ]
|
||||
build_descs_by_axis = collections.defaultdict( list )
|
||||
for axis in variants:
|
||||
axis_name = axis["name"]
|
||||
build_descs = []
|
||||
if "generators" in axis:
|
||||
for generator_data in axis["generators"]:
|
||||
for generator in generator_data["generator"]:
|
||||
build_desc = BuildDesc( generator=generator,
|
||||
prepend_envs=generator_data.get("env_prepend") )
|
||||
build_descs.append( build_desc )
|
||||
elif "variables" in axis:
|
||||
for variables in axis["variables"]:
|
||||
build_desc = BuildDesc( variables=variables )
|
||||
build_descs.append( build_desc )
|
||||
elif "build_types" in axis:
|
||||
for build_type in axis["build_types"]:
|
||||
build_desc = BuildDesc( build_type=build_type )
|
||||
build_descs.append( build_desc )
|
||||
build_descs_by_axis[axis_name].extend( build_descs )
|
||||
return build_descs_by_axis
|
||||
|
||||
def generate_build_variants( build_descs_by_axis ):
|
||||
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
|
||||
axis_names = build_descs_by_axis.keys()
|
||||
build_descs = []
|
||||
for axis_name, axis_build_descs in build_descs_by_axis.items():
|
||||
if len(build_descs):
|
||||
# for each existing build_desc and each axis build desc, create a new build_desc
|
||||
new_build_descs = []
|
||||
for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs):
|
||||
new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) )
|
||||
build_descs = new_build_descs
|
||||
else:
|
||||
build_descs = axis_build_descs
|
||||
return build_descs
|
||||
|
||||
HTML_TEMPLATE = string.Template('''<html>
|
||||
<head>
|
||||
<title>$title</title>
|
||||
<style type="text/css">
|
||||
td.failed {background-color:#f08080;}
|
||||
td.ok {background-color:#c0eec0;}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<table border="1">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Variables</th>
|
||||
$th_vars
|
||||
</tr>
|
||||
<tr>
|
||||
<th>Build type</th>
|
||||
$th_build_types
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
$tr_builds
|
||||
</tbody>
|
||||
</table>
|
||||
</body></html>''')
|
||||
|
||||
def generate_html_report( html_report_path, builds ):
|
||||
report_dir = os.path.dirname( html_report_path )
|
||||
# Vertical axis: generator
|
||||
# Horizontal: variables, then build_type
|
||||
builds_by_generator = collections.defaultdict( list )
|
||||
variables = set()
|
||||
build_types_by_variable = collections.defaultdict( set )
|
||||
build_by_pos_key = {} # { (generator, var_key, build_type): build }
|
||||
for build in builds:
|
||||
builds_by_generator[build.desc.generator].append( build )
|
||||
var_key = tuple(sorted(build.desc.variables))
|
||||
variables.add( var_key )
|
||||
build_types_by_variable[var_key].add( build.desc.build_type )
|
||||
pos_key = (build.desc.generator, var_key, build.desc.build_type)
|
||||
build_by_pos_key[pos_key] = build
|
||||
variables = sorted( variables )
|
||||
th_vars = []
|
||||
th_build_types = []
|
||||
for variable in variables:
|
||||
build_types = sorted( build_types_by_variable[variable] )
|
||||
nb_build_type = len(build_types_by_variable[variable])
|
||||
th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) )
|
||||
for build_type in build_types:
|
||||
th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) )
|
||||
tr_builds = []
|
||||
for generator in sorted( builds_by_generator ):
|
||||
tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ]
|
||||
for variable in variables:
|
||||
build_types = sorted( build_types_by_variable[variable] )
|
||||
for build_type in build_types:
|
||||
pos_key = (generator, variable, build_type)
|
||||
build = build_by_pos_key.get(pos_key)
|
||||
if build:
|
||||
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
|
||||
build_status = 'ok' if build.build_succeeded else 'FAILED'
|
||||
cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir )
|
||||
build_log_url = os.path.relpath( build.build_log_path, report_dir )
|
||||
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (
|
||||
build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
|
||||
if build.cmake_succeeded:
|
||||
td += '<br><a href="%s" class="%s">Build: %s</a>' % (
|
||||
build_log_url, build_status.lower(), build_status)
|
||||
td += '</td>'
|
||||
else:
|
||||
td = '<td></td>'
|
||||
tds.append( td )
|
||||
tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) )
|
||||
html = HTML_TEMPLATE.substitute(
|
||||
title='Batch build report',
|
||||
th_vars=' '.join(th_vars),
|
||||
th_build_types=' '.join( th_build_types),
|
||||
tr_builds='\n'.join( tr_builds ) )
|
||||
with open( html_report_path, 'wt' ) as fhtml:
|
||||
fhtml.write( html )
|
||||
print 'HTML report generated in:', html_report_path
|
||||
|
||||
def main():
|
||||
usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...]
|
||||
Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run
|
||||
as described in CONFIG_JSON_PATH building in WORK_DIR.
|
||||
|
||||
Example of call:
|
||||
python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = True
|
||||
# parser.add_option('-v', '--verbose', dest="verbose", action='store_true',
|
||||
# help="""Be verbose.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 3:
|
||||
parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." )
|
||||
work_dir = args[0]
|
||||
source_dir = args[1].rstrip('/\\')
|
||||
config_paths = args[2:]
|
||||
for config_path in config_paths:
|
||||
if not os.path.isfile( config_path ):
|
||||
parser.error( "Can not read: %r" % config_path )
|
||||
|
||||
# generate build variants
|
||||
build_descs = []
|
||||
for config_path in config_paths:
|
||||
build_descs_by_axis = load_build_variants_from_config( config_path )
|
||||
build_descs.extend( generate_build_variants( build_descs_by_axis ) )
|
||||
print 'Build variants (%d):' % len(build_descs)
|
||||
# assign build directory for each variant
|
||||
if not os.path.isdir( work_dir ):
|
||||
os.makedirs( work_dir )
|
||||
builds = []
|
||||
with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap:
|
||||
for index, build_desc in enumerate( build_descs ):
|
||||
build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) )
|
||||
builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) )
|
||||
fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) )
|
||||
for build in builds:
|
||||
build.execute_build()
|
||||
html_report_path = os.path.join( work_dir, 'batchbuild-report.html' )
|
||||
generate_html_report( html_report_path, builds )
|
||||
print 'Done'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
||||
|
@@ -1,4 +1,3 @@
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
|
||||
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
@@ -7,8 +6,8 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
raise ValueError( 'Path "%s" is not a file' % path )
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError as msg:
|
||||
print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr)
|
||||
except IOError, msg:
|
||||
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
||||
return False
|
||||
try:
|
||||
raw_lines = f.readlines()
|
||||
@@ -16,7 +15,7 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
f.close()
|
||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||
if raw_lines != fixed_lines:
|
||||
print('%s =>' % path, end=' ')
|
||||
print '%s =>' % path,
|
||||
if not is_dry_run:
|
||||
f = open(path, "wb")
|
||||
try:
|
||||
@@ -24,7 +23,7 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
finally:
|
||||
f.close()
|
||||
if verbose:
|
||||
print(is_dry_run and ' NEED FIX' or ' FIXED')
|
||||
print is_dry_run and ' NEED FIX' or ' FIXED'
|
||||
return True
|
||||
##
|
||||
##
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""Updates the license text in source file.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
# An existing license is found if the file starts with the string below,
|
||||
# and ends with the first blank line.
|
||||
@@ -35,11 +34,11 @@ def update_license( path, dry_run, show_diff ):
|
||||
if not dry_run:
|
||||
with open( path, 'wb' ) as fout:
|
||||
fout.write( new_text.replace('\n', newline ) )
|
||||
print('Updated', path)
|
||||
print 'Updated', path
|
||||
if show_diff:
|
||||
import difflib
|
||||
print('\n'.join( difflib.unified_diff( original_text.split('\n'),
|
||||
new_text.split('\n') ) ))
|
||||
print '\n'.join( difflib.unified_diff( original_text.split('\n'),
|
||||
new_text.split('\n') ) )
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -84,7 +83,7 @@ python devtools\licenseupdater.py include src
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
||||
print('Done')
|
||||
print 'Done'
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
|
28
doxybuild.py
28
doxybuild.py
@@ -1,12 +1,12 @@
|
||||
"""Script to generate doxygen documentation.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
from devtools import tarball
|
||||
|
||||
import re
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
import shutil
|
||||
from devtools import tarball
|
||||
|
||||
def find_program(*filenames):
|
||||
"""find a program in folders path_lst, and sets env[var]
|
||||
@@ -33,9 +33,9 @@ def do_subst_in_file(targetfile, sourcefile, dict):
|
||||
contents = f.read()
|
||||
f.close()
|
||||
except:
|
||||
print("Can't read source file %s"%sourcefile)
|
||||
print "Can't read source file %s"%sourcefile
|
||||
raise
|
||||
for (k,v) in list(dict.items()):
|
||||
for (k,v) in dict.items():
|
||||
v = v.replace('\\','\\\\')
|
||||
contents = re.sub(k, v, contents)
|
||||
try:
|
||||
@@ -43,7 +43,7 @@ def do_subst_in_file(targetfile, sourcefile, dict):
|
||||
f.write(contents)
|
||||
f.close()
|
||||
except:
|
||||
print("Can't write target file %s"%targetfile)
|
||||
print "Can't write target file %s"%targetfile
|
||||
raise
|
||||
|
||||
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
||||
@@ -53,12 +53,12 @@ def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
||||
try:
|
||||
os.chdir( working_dir )
|
||||
cmd = [doxygen_path, config_file]
|
||||
print('Running:', ' '.join( cmd ))
|
||||
print 'Running:', ' '.join( cmd )
|
||||
try:
|
||||
import subprocess
|
||||
except:
|
||||
if os.system( ' '.join( cmd ) ) != 0:
|
||||
print('Documentation generation failed')
|
||||
print 'Documentation generation failed'
|
||||
return False
|
||||
else:
|
||||
if is_silent:
|
||||
@@ -67,8 +67,8 @@ def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
||||
process = subprocess.Popen( cmd )
|
||||
stdout, _ = process.communicate()
|
||||
if process.returncode:
|
||||
print('Documentation generation failed:')
|
||||
print(stdout)
|
||||
print 'Documentation generation failed:'
|
||||
print stdout
|
||||
return False
|
||||
return True
|
||||
finally:
|
||||
@@ -107,7 +107,7 @@ def build_doc( options, make_release=False ):
|
||||
}
|
||||
|
||||
if os.path.isdir( output_dir ):
|
||||
print('Deleting directory:', output_dir)
|
||||
print 'Deleting directory:', output_dir
|
||||
shutil.rmtree( output_dir )
|
||||
if not os.path.isdir( output_dir ):
|
||||
os.makedirs( output_dir )
|
||||
@@ -115,15 +115,15 @@ def build_doc( options, make_release=False ):
|
||||
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
|
||||
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
|
||||
if not options.silent:
|
||||
print(open(warning_log_path, 'rb').read())
|
||||
print open(warning_log_path, 'rb').read()
|
||||
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
|
||||
print('Generated documentation can be found in:')
|
||||
print(index_path)
|
||||
print 'Generated documentation can be found in:'
|
||||
print index_path
|
||||
if options.open:
|
||||
import webbrowser
|
||||
webbrowser.open( 'file://' + index_path )
|
||||
if options.make_tarball:
|
||||
print('Generating doc tarball to', tarball_path)
|
||||
print 'Generating doc tarball to', tarball_path
|
||||
tarball_sources = [
|
||||
output_dir,
|
||||
'README.txt',
|
||||
|
@@ -187,6 +187,7 @@ private:
|
||||
|
||||
typedef std::deque<ErrorInfo> Errors;
|
||||
|
||||
bool expectToken(TokenType type, Token& token, const char* message);
|
||||
bool readToken(Token& token);
|
||||
void skipSpaces();
|
||||
bool match(Location pattern, int patternLength);
|
||||
|
@@ -171,7 +171,7 @@ private:
|
||||
CZString(const char* cstr, DuplicationPolicy allocate);
|
||||
CZString(const CZString& other);
|
||||
~CZString();
|
||||
CZString& operator=(CZString other);
|
||||
CZString &operator=(const CZString &other);
|
||||
bool operator<(const CZString& other) const;
|
||||
bool operator==(const CZString& other) const;
|
||||
ArrayIndex index() const;
|
||||
@@ -235,26 +235,25 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
Value(const CppTL::ConstString& value);
|
||||
#endif
|
||||
Value(bool value);
|
||||
/// Deep copy.
|
||||
Value(const Value& other);
|
||||
~Value();
|
||||
|
||||
// Deep copy, then swap(other).
|
||||
Value& operator=(Value other);
|
||||
/// Swap everything.
|
||||
Value &operator=(const Value &other);
|
||||
/// Swap values.
|
||||
/// \note Currently, comments are intentionally not swapped, for
|
||||
/// both logic and efficiency.
|
||||
void swap(Value& other);
|
||||
/// Swap values but leave comments and source offsets in place.
|
||||
void swapPayload(Value& other);
|
||||
|
||||
ValueType type() const;
|
||||
|
||||
/// Compare payload only, not comments etc.
|
||||
bool operator<(const Value& other) const;
|
||||
bool operator<=(const Value& other) const;
|
||||
bool operator>=(const Value& other) const;
|
||||
bool operator>(const Value& other) const;
|
||||
|
||||
bool operator==(const Value& other) const;
|
||||
bool operator!=(const Value& other) const;
|
||||
|
||||
int compare(const Value& other) const;
|
||||
|
||||
const char* asCString() const;
|
||||
@@ -441,8 +440,6 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
size_t getOffsetLimit() const;
|
||||
|
||||
private:
|
||||
void initBasic(ValueType type, bool allocated = false);
|
||||
|
||||
Value& resolveReference(const char* key, bool isStatic);
|
||||
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
|
@@ -4,9 +4,9 @@
|
||||
#ifndef JSON_VERSION_H_INCLUDED
|
||||
# define JSON_VERSION_H_INCLUDED
|
||||
|
||||
# define JSONCPP_VERSION_STRING "1.2.0"
|
||||
# define JSONCPP_VERSION_MAJOR 1
|
||||
# define JSONCPP_VERSION_MINOR 2
|
||||
# define JSONCPP_VERSION_STRING "0.7.0"
|
||||
# define JSONCPP_VERSION_MAJOR 0
|
||||
# define JSONCPP_VERSION_MINOR 7
|
||||
# define JSONCPP_VERSION_PATCH 0
|
||||
# define JSONCPP_VERSION_QUALIFIER
|
||||
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
|
||||
|
@@ -14,7 +14,6 @@ python makerelease.py 0.5.0 0.6.0-dev
|
||||
Note: This was for Subversion. Now that we are in GitHub, we do not
|
||||
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
import subprocess
|
||||
import sys
|
||||
@@ -47,7 +46,7 @@ class SVNError(Exception):
|
||||
|
||||
def svn_command( command, *args ):
|
||||
cmd = ['svn', '--non-interactive', command] + list(args)
|
||||
print('Running:', ' '.join( cmd ))
|
||||
print 'Running:', ' '.join( cmd )
|
||||
process = subprocess.Popen( cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT )
|
||||
@@ -85,7 +84,7 @@ def svn_check_if_tag_exist( tag_url ):
|
||||
"""
|
||||
try:
|
||||
list_stdout = svn_command( 'list', tag_url )
|
||||
except SVNError as e:
|
||||
except SVNError, e:
|
||||
if e.returncode != 1 or not str(e).find('tag_url'):
|
||||
raise e
|
||||
# otherwise ignore error, meaning tag does not exist
|
||||
@@ -118,7 +117,7 @@ def svn_export( tag_url, export_dir ):
|
||||
def fix_sources_eol( dist_dir ):
|
||||
"""Set file EOL for tarball distribution.
|
||||
"""
|
||||
print('Preparing exported source file EOL for distribution...')
|
||||
print 'Preparing exported source file EOL for distribution...'
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
win_sources = antglob.glob( dist_dir,
|
||||
includes = '**/*.sln **/*.vcproj',
|
||||
@@ -149,7 +148,7 @@ def download( url, target_path ):
|
||||
|
||||
def check_compile( distcheck_top_dir, platform ):
|
||||
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
|
||||
print('Running:', ' '.join( cmd ))
|
||||
print 'Running:', ' '.join( cmd )
|
||||
log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
|
||||
flog = open( log_path, 'wb' )
|
||||
try:
|
||||
@@ -180,9 +179,9 @@ def run_sftp_batch( userhost, sftp, batch, retry=0 ):
|
||||
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
|
||||
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
|
||||
error = None
|
||||
for retry_index in range(0, max(1,retry)):
|
||||
for retry_index in xrange(0, max(1,retry)):
|
||||
heading = retry_index == 0 and 'Running:' or 'Retrying:'
|
||||
print(heading, ' '.join( cmd ))
|
||||
print heading, ' '.join( cmd )
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode != 0:
|
||||
@@ -220,21 +219,21 @@ exit
|
||||
upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
|
||||
paths_to_remove = existing_paths - upload_paths
|
||||
if paths_to_remove:
|
||||
print('Removing the following file from web:')
|
||||
print('\n'.join( paths_to_remove ))
|
||||
print 'Removing the following file from web:'
|
||||
print '\n'.join( paths_to_remove )
|
||||
stdout = run_sftp_batch( userhost, sftp, """cd htdocs
|
||||
rm %s
|
||||
exit""" % ' '.join(paths_to_remove) )
|
||||
print('Uploading %d files:' % len(upload_paths))
|
||||
print 'Uploading %d files:' % len(upload_paths)
|
||||
batch_size = 10
|
||||
upload_paths = list(upload_paths)
|
||||
start_time = time.time()
|
||||
for index in range(0,len(upload_paths),batch_size):
|
||||
for index in xrange(0,len(upload_paths),batch_size):
|
||||
paths = upload_paths[index:index+batch_size]
|
||||
file_per_sec = (time.time() - start_time) / (index+1)
|
||||
remaining_files = len(upload_paths) - index
|
||||
remaining_sec = file_per_sec * remaining_files
|
||||
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
|
||||
print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)
|
||||
run_sftp_batch( userhost, sftp, """cd htdocs
|
||||
lcd %s
|
||||
mput %s
|
||||
@@ -298,7 +297,7 @@ Warning: --force should only be used when developping/testing the release script
|
||||
else:
|
||||
msg = check_no_pending_commit()
|
||||
if not msg:
|
||||
print('Setting version to', release_version)
|
||||
print 'Setting version to', release_version
|
||||
set_version( release_version )
|
||||
svn_commit( 'Release ' + release_version )
|
||||
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
|
||||
@@ -306,11 +305,11 @@ Warning: --force should only be used when developping/testing the release script
|
||||
if options.retag_release:
|
||||
svn_remove_tag( tag_url, 'Overwriting previous tag' )
|
||||
else:
|
||||
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
|
||||
print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
|
||||
sys.exit( 1 )
|
||||
svn_tag_sandbox( tag_url, 'Release ' + release_version )
|
||||
|
||||
print('Generated doxygen document...')
|
||||
print 'Generated doxygen document...'
|
||||
## doc_dirname = r'jsoncpp-api-html-0.5.0'
|
||||
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
|
||||
doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
|
||||
@@ -324,11 +323,11 @@ Warning: --force should only be used when developping/testing the release script
|
||||
|
||||
source_dir = 'jsoncpp-src-' + release_version
|
||||
source_tarball_path = 'dist/%s.tar.gz' % source_dir
|
||||
print('Generating source tarball to', source_tarball_path)
|
||||
print 'Generating source tarball to', source_tarball_path
|
||||
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
|
||||
|
||||
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
|
||||
print('Generating amalgamation source tarball to', amalgamation_tarball_path)
|
||||
print 'Generating amalgamation source tarball to', amalgamation_tarball_path
|
||||
amalgamation_dir = 'dist/amalgamation'
|
||||
amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
|
||||
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
|
||||
@@ -338,41 +337,41 @@ Warning: --force should only be used when developping/testing the release script
|
||||
# Decompress source tarball, download and install scons-local
|
||||
distcheck_dir = 'dist/distcheck'
|
||||
distcheck_top_dir = distcheck_dir + '/' + source_dir
|
||||
print('Decompressing source tarball to', distcheck_dir)
|
||||
print 'Decompressing source tarball to', distcheck_dir
|
||||
rmdir_if_exist( distcheck_dir )
|
||||
tarball.decompress( source_tarball_path, distcheck_dir )
|
||||
scons_local_path = 'dist/scons-local.tar.gz'
|
||||
print('Downloading scons-local to', scons_local_path)
|
||||
print 'Downloading scons-local to', scons_local_path
|
||||
download( SCONS_LOCAL_URL, scons_local_path )
|
||||
print('Decompressing scons-local to', distcheck_top_dir)
|
||||
print 'Decompressing scons-local to', distcheck_top_dir
|
||||
tarball.decompress( scons_local_path, distcheck_top_dir )
|
||||
|
||||
# Run compilation
|
||||
print('Compiling decompressed tarball')
|
||||
print 'Compiling decompressed tarball'
|
||||
all_build_status = True
|
||||
for platform in options.platforms.split(','):
|
||||
print('Testing platform:', platform)
|
||||
print 'Testing platform:', platform
|
||||
build_status, log_path = check_compile( distcheck_top_dir, platform )
|
||||
print('see build log:', log_path)
|
||||
print(build_status and '=> ok' or '=> FAILED')
|
||||
print 'see build log:', log_path
|
||||
print build_status and '=> ok' or '=> FAILED'
|
||||
all_build_status = all_build_status and build_status
|
||||
if not build_status:
|
||||
print('Testing failed on at least one platform, aborting...')
|
||||
print 'Testing failed on at least one platform, aborting...'
|
||||
svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
|
||||
sys.exit(1)
|
||||
if options.user:
|
||||
if not options.no_web:
|
||||
print('Uploading documentation using user', options.user)
|
||||
print 'Uploading documentation using user', options.user
|
||||
sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
|
||||
print('Completed documentation upload')
|
||||
print('Uploading source and documentation tarballs for release using user', options.user)
|
||||
print 'Completed documentation upload'
|
||||
print 'Uploading source and documentation tarballs for release using user', options.user
|
||||
sourceforge_release_tarball( SOURCEFORGE_PROJECT,
|
||||
[source_tarball_path, doc_tarball_path],
|
||||
user=options.user, sftp=options.sftp )
|
||||
print('Source and doc release tarballs uploaded')
|
||||
print 'Source and doc release tarballs uploaded'
|
||||
else:
|
||||
print('No upload user specified. Web site and download tarbal were not uploaded.')
|
||||
print('Tarball can be found at:', doc_tarball_path)
|
||||
print 'No upload user specified. Web site and download tarbal were not uploaded.'
|
||||
print 'Tarball can be found at:', doc_tarball_path
|
||||
|
||||
# Set next version number and commit
|
||||
set_version( next_version )
|
||||
|
@@ -1,7 +1,7 @@
|
||||
prefix=@CMAKE_INSTALL_PREFIX@
|
||||
exec_prefix=${prefix}
|
||||
libdir=${exec_prefix}/@LIBRARY_INSTALL_DIR@
|
||||
includedir=${prefix}/@INCLUDE_INSTALL_DIR@
|
||||
libdir=${exec_prefix}/lib
|
||||
includedir=${prefix}/include
|
||||
|
||||
Name: jsoncpp
|
||||
Description: A C++ library for interacting with JSON
|
||||
|
@@ -1,6 +1,5 @@
|
||||
import re
|
||||
from SCons.Script import * # the usual scons stuff you get in a SConscript
|
||||
import collections
|
||||
|
||||
def generate(env):
|
||||
"""
|
||||
@@ -26,28 +25,28 @@ def generate(env):
|
||||
contents = f.read()
|
||||
f.close()
|
||||
except:
|
||||
raise SCons.Errors.UserError("Can't read source file %s"%sourcefile)
|
||||
for (k,v) in list(dict.items()):
|
||||
raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile
|
||||
for (k,v) in dict.items():
|
||||
contents = re.sub(k, v, contents)
|
||||
try:
|
||||
f = open(targetfile, 'wb')
|
||||
f.write(contents)
|
||||
f.close()
|
||||
except:
|
||||
raise SCons.Errors.UserError("Can't write target file %s"%targetfile)
|
||||
raise SCons.Errors.UserError, "Can't write target file %s"%targetfile
|
||||
return 0 # success
|
||||
|
||||
def subst_in_file(target, source, env):
|
||||
if 'SUBST_DICT' not in env:
|
||||
raise SCons.Errors.UserError("SubstInFile requires SUBST_DICT to be set.")
|
||||
if not env.has_key('SUBST_DICT'):
|
||||
raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set."
|
||||
d = dict(env['SUBST_DICT']) # copy it
|
||||
for (k,v) in list(d.items()):
|
||||
if isinstance(v, collections.Callable):
|
||||
for (k,v) in d.items():
|
||||
if callable(v):
|
||||
d[k] = env.subst(v()).replace('\\','\\\\')
|
||||
elif SCons.Util.is_String(v):
|
||||
d[k] = env.subst(v).replace('\\','\\\\')
|
||||
else:
|
||||
raise SCons.Errors.UserError("SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)))
|
||||
raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v))
|
||||
for (t,s) in zip(target, source):
|
||||
return do_subst_in_file(str(t), str(s), d)
|
||||
|
||||
@@ -61,8 +60,8 @@ def generate(env):
|
||||
Returns original target, source tuple unchanged.
|
||||
"""
|
||||
d = env['SUBST_DICT'].copy() # copy it
|
||||
for (k,v) in list(d.items()):
|
||||
if isinstance(v, collections.Callable):
|
||||
for (k,v) in d.items():
|
||||
if callable(v):
|
||||
d[k] = env.subst(v())
|
||||
elif SCons.Util.is_String(v):
|
||||
d[k]=env.subst(v)
|
||||
|
@@ -1,8 +1,4 @@
|
||||
OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF)
|
||||
IF(BUILD_SHARED_LIBS)
|
||||
SET(JSONCPP_LIB_BUILD_SHARED ON)
|
||||
ENDIF(BUILD_SHARED_LIBS)
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
SET(JSONCPP_LIB_TYPE SHARED)
|
||||
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
|
||||
@@ -10,15 +6,9 @@ ELSE(JSONCPP_LIB_BUILD_SHARED)
|
||||
SET(JSONCPP_LIB_TYPE STATIC)
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
|
||||
|
||||
if( CMAKE_COMPILER_IS_GNUCXX )
|
||||
#Get compiler version.
|
||||
execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion
|
||||
OUTPUT_VARIABLE GNUCXX_VERSION )
|
||||
|
||||
#-Werror=* was introduced -after- GCC 4.1.2
|
||||
if( GNUCXX_VERSION VERSION_GREATER 4.1.2 )
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing")
|
||||
endif()
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing")
|
||||
endif( CMAKE_COMPILER_IS_GNUCXX )
|
||||
|
||||
SET( JSONCPP_INCLUDE_DIR ../../include )
|
||||
@@ -49,15 +39,12 @@ ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE}
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp )
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR} )
|
||||
|
||||
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC
|
||||
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
)
|
||||
ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
|
||||
# Install instructions for this target
|
||||
IF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib
|
||||
PUBLIC $<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSON_INCLUDE_DIR}>
|
||||
)
|
||||
SET(INSTALL_EXPORT EXPORT jsoncpp)
|
||||
ELSE(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
SET(INSTALL_EXPORT)
|
||||
|
@@ -47,6 +47,23 @@ Features Features::strictMode() {
|
||||
// Implementation of class Reader
|
||||
// ////////////////////////////////
|
||||
|
||||
static inline bool in(Reader::Char c,
|
||||
Reader::Char c1,
|
||||
Reader::Char c2,
|
||||
Reader::Char c3,
|
||||
Reader::Char c4) {
|
||||
return c == c1 || c == c2 || c == c3 || c == c4;
|
||||
}
|
||||
|
||||
static inline bool in(Reader::Char c,
|
||||
Reader::Char c1,
|
||||
Reader::Char c2,
|
||||
Reader::Char c3,
|
||||
Reader::Char c4,
|
||||
Reader::Char c5) {
|
||||
return c == c1 || c == c2 || c == c3 || c == c4 || c == c5;
|
||||
}
|
||||
|
||||
static bool containsNewLine(Reader::Location begin, Reader::Location end) {
|
||||
for (; begin < end; ++begin)
|
||||
if (*begin == '\n' || *begin == '\r')
|
||||
@@ -163,36 +180,26 @@ bool Reader::readValue() {
|
||||
successful = decodeString(token);
|
||||
break;
|
||||
case tokenTrue:
|
||||
{
|
||||
Value v(true);
|
||||
currentValue().swapPayload(v);
|
||||
currentValue() = true;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
}
|
||||
break;
|
||||
case tokenFalse:
|
||||
{
|
||||
Value v(false);
|
||||
currentValue().swapPayload(v);
|
||||
currentValue() = false;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
}
|
||||
break;
|
||||
case tokenNull:
|
||||
{
|
||||
Value v;
|
||||
currentValue().swapPayload(v);
|
||||
currentValue() = Value();
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
}
|
||||
break;
|
||||
case tokenArraySeparator:
|
||||
if (features_.allowDroppedNullPlaceholders_) {
|
||||
// "Un-read" the current token and mark the current value as a null
|
||||
// token.
|
||||
current_--;
|
||||
Value v;
|
||||
currentValue().swapPayload(v);
|
||||
currentValue() = Value();
|
||||
currentValue().setOffsetStart(current_ - begin_ - 1);
|
||||
currentValue().setOffsetLimit(current_ - begin_);
|
||||
break;
|
||||
@@ -222,6 +229,13 @@ void Reader::skipCommentTokens(Token& token) {
|
||||
}
|
||||
}
|
||||
|
||||
bool Reader::expectToken(TokenType type, Token& token, const char* message) {
|
||||
readToken(token);
|
||||
if (token.type_ != type)
|
||||
return addError(message, token);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Reader::readToken(Token& token) {
|
||||
skipSpaces();
|
||||
token.start_ = current_;
|
||||
@@ -367,24 +381,11 @@ bool Reader::readCppStyleComment() {
|
||||
}
|
||||
|
||||
void Reader::readNumber() {
|
||||
const char *p = current_;
|
||||
char c = '0'; // stopgap for already consumed character
|
||||
// integral part
|
||||
while (c >= '0' && c <= '9')
|
||||
c = (current_ = p) < end_ ? *p++ : 0;
|
||||
// fractional part
|
||||
if (c == '.') {
|
||||
c = (current_ = p) < end_ ? *p++ : 0;
|
||||
while (c >= '0' && c <= '9')
|
||||
c = (current_ = p) < end_ ? *p++ : 0;
|
||||
}
|
||||
// exponential part
|
||||
if (c == 'e' || c == 'E') {
|
||||
c = (current_ = p) < end_ ? *p++ : 0;
|
||||
if (c == '+' || c == '-')
|
||||
c = (current_ = p) < end_ ? *p++ : 0;
|
||||
while (c >= '0' && c <= '9')
|
||||
c = (current_ = p) < end_ ? *p++ : 0;
|
||||
while (current_ != end_) {
|
||||
if (!(*current_ >= '0' && *current_ <= '9') &&
|
||||
!in(*current_, '.', 'e', 'E', '+', '-'))
|
||||
break;
|
||||
++current_;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -403,8 +404,7 @@ bool Reader::readString() {
|
||||
bool Reader::readObject(Token& tokenStart) {
|
||||
Token tokenName;
|
||||
std::string name;
|
||||
Value init(objectValue);
|
||||
currentValue().swapPayload(init);
|
||||
currentValue() = Value(objectValue);
|
||||
currentValue().setOffsetStart(tokenStart.start_ - begin_);
|
||||
while (readToken(tokenName)) {
|
||||
bool initialTokenOk = true;
|
||||
@@ -457,8 +457,7 @@ bool Reader::readObject(Token& tokenStart) {
|
||||
}
|
||||
|
||||
bool Reader::readArray(Token& tokenStart) {
|
||||
Value init(arrayValue);
|
||||
currentValue().swapPayload(init);
|
||||
currentValue() = Value(arrayValue);
|
||||
currentValue().setOffsetStart(tokenStart.start_ - begin_);
|
||||
skipSpaces();
|
||||
if (*current_ == ']') // empty array
|
||||
@@ -498,13 +497,20 @@ bool Reader::decodeNumber(Token& token) {
|
||||
Value decoded;
|
||||
if (!decodeNumber(token, decoded))
|
||||
return false;
|
||||
currentValue().swapPayload(decoded);
|
||||
currentValue() = decoded;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Reader::decodeNumber(Token& token, Value& decoded) {
|
||||
bool isDouble = false;
|
||||
for (Location inspect = token.start_; inspect != token.end_; ++inspect) {
|
||||
isDouble = isDouble || in(*inspect, '.', 'e', 'E', '+') ||
|
||||
(*inspect == '-' && inspect != token.start_);
|
||||
}
|
||||
if (isDouble)
|
||||
return decodeDouble(token, decoded);
|
||||
// Attempts to parse the number as an integer. If the number is
|
||||
// larger than the maximum supported value of an integer then
|
||||
// we decode the number as a double.
|
||||
@@ -512,7 +518,6 @@ bool Reader::decodeNumber(Token& token, Value& decoded) {
|
||||
bool isNegative = *current == '-';
|
||||
if (isNegative)
|
||||
++current;
|
||||
// TODO: Help the compiler do the div and mod at compile time or get rid of them.
|
||||
Value::LargestUInt maxIntegerValue =
|
||||
isNegative ? Value::LargestUInt(-Value::minLargestInt)
|
||||
: Value::maxLargestUInt;
|
||||
@@ -521,7 +526,9 @@ bool Reader::decodeNumber(Token& token, Value& decoded) {
|
||||
while (current < token.end_) {
|
||||
Char c = *current++;
|
||||
if (c < '0' || c > '9')
|
||||
return decodeDouble(token, decoded);
|
||||
return addError("'" + std::string(token.start_, token.end_) +
|
||||
"' is not a number.",
|
||||
token);
|
||||
Value::UInt digit(c - '0');
|
||||
if (value >= threshold) {
|
||||
// We've hit or exceeded the max value divided by 10 (rounded down). If
|
||||
@@ -548,7 +555,7 @@ bool Reader::decodeDouble(Token& token) {
|
||||
Value decoded;
|
||||
if (!decodeDouble(token, decoded))
|
||||
return false;
|
||||
currentValue().swapPayload(decoded);
|
||||
currentValue() = decoded;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
return true;
|
||||
@@ -591,11 +598,10 @@ bool Reader::decodeDouble(Token& token, Value& decoded) {
|
||||
}
|
||||
|
||||
bool Reader::decodeString(Token& token) {
|
||||
std::string decoded_string;
|
||||
if (!decodeString(token, decoded_string))
|
||||
std::string decoded;
|
||||
if (!decodeString(token, decoded))
|
||||
return false;
|
||||
Value decoded(decoded_string);
|
||||
currentValue().swapPayload(decoded);
|
||||
currentValue() = decoded;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
return true;
|
||||
@@ -825,9 +831,8 @@ std::vector<Reader::StructuredError> Reader::getStructuredErrors() const {
|
||||
}
|
||||
|
||||
bool Reader::pushError(const Value& value, const std::string& message) {
|
||||
size_t length = end_ - begin_;
|
||||
if(value.getOffsetStart() > length
|
||||
|| value.getOffsetLimit() > length)
|
||||
if(value.getOffsetStart() > end_ - begin_
|
||||
|| value.getOffsetLimit() > end_ - begin_)
|
||||
return false;
|
||||
Token token;
|
||||
token.type_ = tokenError;
|
||||
@@ -842,10 +847,9 @@ bool Reader::pushError(const Value& value, const std::string& message) {
|
||||
}
|
||||
|
||||
bool Reader::pushError(const Value& value, const std::string& message, const Value& extra) {
|
||||
size_t length = end_ - begin_;
|
||||
if(value.getOffsetStart() > length
|
||||
|| value.getOffsetLimit() > length
|
||||
|| extra.getOffsetLimit() > length)
|
||||
if(value.getOffsetStart() > end_ - begin_
|
||||
|| value.getOffsetLimit() > end_ - begin_
|
||||
|| extra.getOffsetLimit() > end_ - begin_)
|
||||
return false;
|
||||
Token token;
|
||||
token.type_ = tokenError;
|
||||
|
@@ -175,8 +175,7 @@ Value::CZString::CZString(const CZString& other)
|
||||
? duplicateStringValue(other.cstr_)
|
||||
: other.cstr_),
|
||||
index_(other.cstr_
|
||||
? static_cast<ArrayIndex>(other.index_ == noDuplication
|
||||
? noDuplication : duplicate)
|
||||
? (other.index_ == noDuplication ? noDuplication : duplicate)
|
||||
: other.index_) {}
|
||||
|
||||
Value::CZString::~CZString() {
|
||||
@@ -189,8 +188,9 @@ void Value::CZString::swap(CZString& other) {
|
||||
std::swap(index_, other.index_);
|
||||
}
|
||||
|
||||
Value::CZString& Value::CZString::operator=(CZString other) {
|
||||
swap(other);
|
||||
Value::CZString &Value::CZString::operator=(const CZString &other) {
|
||||
CZString temp(other);
|
||||
swap(temp);
|
||||
return *this;
|
||||
}
|
||||
|
||||
@@ -226,8 +226,14 @@ bool Value::CZString::isStaticString() const { return index_ == noDuplication; }
|
||||
* memset( this, 0, sizeof(Value) )
|
||||
* This optimization is used in ValueInternalMap fast allocator.
|
||||
*/
|
||||
Value::Value(ValueType type) {
|
||||
initBasic(type);
|
||||
Value::Value(ValueType type)
|
||||
: type_(type), allocated_(false)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
switch (type) {
|
||||
case nullValue:
|
||||
break;
|
||||
@@ -262,62 +268,130 @@ Value::Value(ValueType type) {
|
||||
}
|
||||
}
|
||||
|
||||
Value::Value(Int value) {
|
||||
initBasic(intValue);
|
||||
Value::Value(UInt value)
|
||||
: type_(uintValue), allocated_(false)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.uint_ = value;
|
||||
}
|
||||
|
||||
Value::Value(Int value)
|
||||
: type_(intValue), allocated_(false)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.int_ = value;
|
||||
}
|
||||
|
||||
Value::Value(UInt value) {
|
||||
initBasic(uintValue);
|
||||
value_.uint_ = value;
|
||||
}
|
||||
#if defined(JSON_HAS_INT64)
|
||||
Value::Value(Int64 value) {
|
||||
initBasic(intValue);
|
||||
Value::Value(Int64 value)
|
||||
: type_(intValue), allocated_(false)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.int_ = value;
|
||||
}
|
||||
Value::Value(UInt64 value) {
|
||||
initBasic(uintValue);
|
||||
|
||||
Value::Value(UInt64 value)
|
||||
: type_(uintValue), allocated_(false)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.uint_ = value;
|
||||
}
|
||||
#endif // defined(JSON_HAS_INT64)
|
||||
|
||||
Value::Value(double value) {
|
||||
initBasic(realValue);
|
||||
Value::Value(double value)
|
||||
: type_(realValue), allocated_(false)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.real_ = value;
|
||||
}
|
||||
|
||||
Value::Value(const char* value) {
|
||||
initBasic(stringValue, true);
|
||||
Value::Value(const char* value)
|
||||
: type_(stringValue), allocated_(true)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.string_ = duplicateStringValue(value);
|
||||
}
|
||||
|
||||
Value::Value(const char* beginValue, const char* endValue) {
|
||||
initBasic(stringValue, true);
|
||||
Value::Value(const char* beginValue, const char* endValue)
|
||||
: type_(stringValue), allocated_(true)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.string_ =
|
||||
duplicateStringValue(beginValue, (unsigned int)(endValue - beginValue));
|
||||
}
|
||||
|
||||
Value::Value(const std::string& value) {
|
||||
initBasic(stringValue, true);
|
||||
Value::Value(const std::string& value)
|
||||
: type_(stringValue), allocated_(true)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.string_ =
|
||||
duplicateStringValue(value.c_str(), (unsigned int)value.length());
|
||||
}
|
||||
|
||||
Value::Value(const StaticString& value) {
|
||||
initBasic(stringValue);
|
||||
Value::Value(const StaticString& value)
|
||||
: type_(stringValue), allocated_(false)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.string_ = const_cast<char*>(value.c_str());
|
||||
}
|
||||
|
||||
#ifdef JSON_USE_CPPTL
|
||||
Value::Value(const CppTL::ConstString& value) {
|
||||
initBasic(stringValue, true);
|
||||
Value::Value(const CppTL::ConstString& value)
|
||||
: type_(stringValue), allocated_(true)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.string_ = duplicateStringValue(value, value.length());
|
||||
}
|
||||
#endif
|
||||
|
||||
Value::Value(bool value) {
|
||||
initBasic(booleanValue);
|
||||
Value::Value(bool value)
|
||||
: type_(booleanValue), allocated_(false)
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
,
|
||||
itemIsUsed_(0)
|
||||
#endif
|
||||
,
|
||||
comments_(0), start_(0), limit_(0) {
|
||||
value_.bool_ = value;
|
||||
}
|
||||
|
||||
@@ -405,12 +479,13 @@ Value::~Value() {
|
||||
delete[] comments_;
|
||||
}
|
||||
|
||||
Value& Value::operator=(Value other) {
|
||||
swap(other);
|
||||
Value &Value::operator=(const Value &other) {
|
||||
Value temp(other);
|
||||
swap(temp);
|
||||
return *this;
|
||||
}
|
||||
|
||||
void Value::swapPayload(Value& other) {
|
||||
void Value::swap(Value& other) {
|
||||
ValueType temp = type_;
|
||||
type_ = other.type_;
|
||||
other.type_ = temp;
|
||||
@@ -418,11 +493,6 @@ void Value::swapPayload(Value& other) {
|
||||
int temp2 = allocated_;
|
||||
allocated_ = other.allocated_;
|
||||
other.allocated_ = temp2;
|
||||
}
|
||||
|
||||
void Value::swap(Value& other) {
|
||||
swapPayload(other);
|
||||
std::swap(comments_, other.comments_);
|
||||
std::swap(start_, other.start_);
|
||||
std::swap(limit_, other.limit_);
|
||||
}
|
||||
@@ -898,17 +968,6 @@ Value& Value::operator[](const char* key) {
|
||||
return resolveReference(key, false);
|
||||
}
|
||||
|
||||
void Value::initBasic(ValueType type, bool allocated) {
|
||||
type_ = type;
|
||||
allocated_ = allocated;
|
||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
||||
itemIsUsed_ = 0;
|
||||
#endif
|
||||
comments_ = 0;
|
||||
start_ = 0;
|
||||
limit_ = 0;
|
||||
}
|
||||
|
||||
Value& Value::resolveReference(const char* key, bool isStatic) {
|
||||
JSON_ASSERT_MESSAGE(
|
||||
type_ == nullValue || type_ == objectValue,
|
||||
|
@@ -26,11 +26,6 @@
|
||||
#pragma warning(disable : 4996)
|
||||
#endif
|
||||
|
||||
#if defined(__sun) && defined(__SVR4) //Solaris
|
||||
#include <ieeefp.h>
|
||||
#define isfinite finite
|
||||
#endif
|
||||
|
||||
namespace Json {
|
||||
|
||||
static bool containsControlCharacter(const char* str) {
|
||||
@@ -87,13 +82,13 @@ std::string valueToString(double value) {
|
||||
// visual studio 2005 to
|
||||
// avoid warning.
|
||||
#if defined(WINCE)
|
||||
len = _snprintf(buffer, sizeof(buffer), "%.17g", value);
|
||||
len = _snprintf(buffer, sizeof(buffer), "%.16g", value);
|
||||
#else
|
||||
len = sprintf_s(buffer, sizeof(buffer), "%.17g", value);
|
||||
len = sprintf_s(buffer, sizeof(buffer), "%.16g", value);
|
||||
#endif
|
||||
#else
|
||||
if (isfinite(value)) {
|
||||
len = snprintf(buffer, sizeof(buffer), "%.17g", value);
|
||||
len = snprintf(buffer, sizeof(buffer), "%.16g", value);
|
||||
} else {
|
||||
// IEEE standard states that NaN values will not compare to themselves
|
||||
if (value != value) {
|
||||
@@ -222,28 +217,28 @@ void FastWriter::writeValue(const Value& value) {
|
||||
document_ += valueToString(value.asBool());
|
||||
break;
|
||||
case arrayValue: {
|
||||
document_ += '[';
|
||||
document_ += "[";
|
||||
int size = value.size();
|
||||
for (int index = 0; index < size; ++index) {
|
||||
if (index > 0)
|
||||
document_ += ',';
|
||||
document_ += ",";
|
||||
writeValue(value[index]);
|
||||
}
|
||||
document_ += ']';
|
||||
document_ += "]";
|
||||
} break;
|
||||
case objectValue: {
|
||||
Value::Members members(value.getMemberNames());
|
||||
document_ += '{';
|
||||
document_ += "{";
|
||||
for (Value::Members::iterator it = members.begin(); it != members.end();
|
||||
++it) {
|
||||
const std::string& name = *it;
|
||||
if (it != members.begin())
|
||||
document_ += ',';
|
||||
document_ += ",";
|
||||
document_ += valueToQuotedString(name.c_str());
|
||||
document_ += yamlCompatiblityEnabled_ ? ": " : ":";
|
||||
writeValue(value[name]);
|
||||
}
|
||||
document_ += '}';
|
||||
document_ += "}";
|
||||
} break;
|
||||
}
|
||||
}
|
||||
@@ -307,7 +302,7 @@ void StyledWriter::writeValue(const Value& value) {
|
||||
writeCommentAfterValueOnSameLine(childValue);
|
||||
break;
|
||||
}
|
||||
document_ += ',';
|
||||
document_ += ",";
|
||||
writeCommentAfterValueOnSameLine(childValue);
|
||||
}
|
||||
unindent();
|
||||
@@ -341,7 +336,7 @@ void StyledWriter::writeArrayValue(const Value& value) {
|
||||
writeCommentAfterValueOnSameLine(childValue);
|
||||
break;
|
||||
}
|
||||
document_ += ',';
|
||||
document_ += ",";
|
||||
writeCommentAfterValueOnSameLine(childValue);
|
||||
}
|
||||
unindent();
|
||||
|
@@ -926,7 +926,7 @@ JSONTEST_FIXTURE(ValueTest, integers) {
|
||||
JSONTEST_ASSERT_EQUAL(float(uint64ToDouble(Json::UInt64(1) << 63)),
|
||||
val.asFloat());
|
||||
JSONTEST_ASSERT_EQUAL(true, val.asBool());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("9.2233720368547758e+18",
|
||||
JSONTEST_ASSERT_STRING_EQUAL("9.223372036854776e+18",
|
||||
normalizeFloatingPointStr(val.asString()));
|
||||
|
||||
// int64 min
|
||||
@@ -974,7 +974,7 @@ JSONTEST_FIXTURE(ValueTest, integers) {
|
||||
JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asDouble());
|
||||
JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asFloat());
|
||||
JSONTEST_ASSERT_EQUAL(true, val.asBool());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("-9.2233720368547758e+18",
|
||||
JSONTEST_ASSERT_STRING_EQUAL("-9.223372036854776e+18",
|
||||
normalizeFloatingPointStr(val.asString()));
|
||||
|
||||
// 10^19
|
||||
@@ -1065,7 +1065,7 @@ JSONTEST_FIXTURE(ValueTest, integers) {
|
||||
JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asDouble());
|
||||
JSONTEST_ASSERT_EQUAL(18446744073709551616.0, val.asFloat());
|
||||
JSONTEST_ASSERT_EQUAL(true, val.asBool());
|
||||
JSONTEST_ASSERT_STRING_EQUAL("1.8446744073709552e+19",
|
||||
JSONTEST_ASSERT_STRING_EQUAL("1.844674407370955e+19",
|
||||
normalizeFloatingPointStr(val.asString()));
|
||||
#endif
|
||||
}
|
||||
@@ -1217,7 +1217,7 @@ JSONTEST_FIXTURE(ValueTest, nonIntegers) {
|
||||
normalizeFloatingPointStr(val.asString()));
|
||||
|
||||
val = Json::Value(1.2345678901234);
|
||||
JSONTEST_ASSERT_STRING_EQUAL("1.2345678901234001",
|
||||
JSONTEST_ASSERT_STRING_EQUAL("1.2345678901234",
|
||||
normalizeFloatingPointStr(val.asString()));
|
||||
|
||||
// A 16-digit floating point number.
|
||||
|
@@ -1,5 +1,4 @@
|
||||
.={}
|
||||
// Comment for array
|
||||
.test=[]
|
||||
.test[0]={}
|
||||
.test[0].a="aaa"
|
||||
|
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"test":
|
||||
// Comment for array
|
||||
[
|
||||
{ "a" : "aaa" }, // Comment for a
|
||||
{ "b" : "bbb" }, // Comment for b
|
||||
|
@@ -11,13 +11,4 @@
|
||||
// Multiline comment cpp-style
|
||||
// Second line
|
||||
.cpp-test.c=3
|
||||
// Comment before double
|
||||
.cpp-test.d=4.1
|
||||
// Comment before string
|
||||
.cpp-test.e="e-string"
|
||||
// Comment before true
|
||||
.cpp-test.f=true
|
||||
// Comment before false
|
||||
.cpp-test.g=false
|
||||
// Comment before null
|
||||
.cpp-test.h=null
|
||||
.cpp-test.d=4
|
||||
|
@@ -12,15 +12,6 @@
|
||||
// Multiline comment cpp-style
|
||||
// Second line
|
||||
"c" : 3,
|
||||
// Comment before double
|
||||
"d" : 4.1,
|
||||
// Comment before string
|
||||
"e" : "e-string",
|
||||
// Comment before true
|
||||
"f" : true,
|
||||
// Comment before false
|
||||
"g" : false,
|
||||
// Comment before null
|
||||
"h" : null
|
||||
"d" : 4
|
||||
}
|
||||
}
|
||||
|
@@ -1,12 +1,11 @@
|
||||
from __future__ import print_function
|
||||
import glob
|
||||
import os.path
|
||||
for path in glob.glob( '*.json' ):
|
||||
text = file(path,'rt').read()
|
||||
target = os.path.splitext(path)[0] + '.expected'
|
||||
if os.path.exists( target ):
|
||||
print('skipping:', target)
|
||||
print 'skipping:', target
|
||||
else:
|
||||
print('creating:', target)
|
||||
print 'creating:', target
|
||||
file(target,'wt').write(text)
|
||||
|
||||
|
@@ -1,12 +1,12 @@
|
||||
# Simple implementation of a json test runner to run the test against json-py.
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
import os.path
|
||||
import json
|
||||
import types
|
||||
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: %s input-json-file", sys.argv[0])
|
||||
print "Usage: %s input-json-file", sys.argv[0]
|
||||
sys.exit(3)
|
||||
|
||||
input_path = sys.argv[1]
|
||||
|
@@ -1,36 +1,17 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from io import open
|
||||
from glob import glob
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
from glob import glob
|
||||
import optparse
|
||||
|
||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes '
|
||||
|
||||
def getStatusOutput(cmd):
|
||||
"""
|
||||
Return int, unicode (for both Python 2 and 3).
|
||||
Note: os.popen().close() would return None for 0.
|
||||
"""
|
||||
pipe = os.popen(cmd)
|
||||
process_output = pipe.read()
|
||||
try:
|
||||
# We have been using os.popen(). When we read() the result
|
||||
# we get 'str' (bytes) in py2, and 'str' (unicode) in py3.
|
||||
# Ugh! There must be a better way to handle this.
|
||||
process_output = process_output.decode('utf-8')
|
||||
except AttributeError:
|
||||
pass # python3
|
||||
status = pipe.close()
|
||||
return status, process_output
|
||||
def compareOutputs( expected, actual, message ):
|
||||
expected = expected.strip().replace('\r','').split('\n')
|
||||
actual = actual.strip().replace('\r','').split('\n')
|
||||
diff_line = 0
|
||||
max_line_to_compare = min( len(expected), len(actual) )
|
||||
for index in range(0,max_line_to_compare):
|
||||
for index in xrange(0,max_line_to_compare):
|
||||
if expected[index].strip() != actual[index].strip():
|
||||
diff_line = index + 1
|
||||
break
|
||||
@@ -52,8 +33,8 @@ def compareOutputs( expected, actual, message ):
|
||||
|
||||
def safeReadFile( path ):
|
||||
try:
|
||||
return open( path, 'rt', encoding = 'utf-8' ).read()
|
||||
except IOError as e:
|
||||
return file( path, 'rt' ).read()
|
||||
except IOError, e:
|
||||
return '<File "%s" is missing: %s>' % (path,e)
|
||||
|
||||
def runAllTests( jsontest_executable_path, input_dir = None,
|
||||
@@ -70,57 +51,58 @@ def runAllTests( jsontest_executable_path, input_dir = None,
|
||||
for input_path in tests + test_jsonchecker:
|
||||
expect_failure = os.path.basename( input_path ).startswith( 'fail' )
|
||||
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
|
||||
print('TESTING:', input_path, end=' ')
|
||||
print 'TESTING:', input_path,
|
||||
options = is_json_checker_test and '--json-checker' or ''
|
||||
cmd = '%s%s %s "%s"' % (
|
||||
pipe = os.popen( "%s%s %s %s" % (
|
||||
valgrind_path, jsontest_executable_path, options,
|
||||
input_path)
|
||||
status, process_output = getStatusOutput(cmd)
|
||||
input_path) )
|
||||
process_output = pipe.read()
|
||||
status = pipe.close()
|
||||
if is_json_checker_test:
|
||||
if expect_failure:
|
||||
if not status:
|
||||
print('FAILED')
|
||||
if status is None:
|
||||
print 'FAILED'
|
||||
failed_tests.append( (input_path, 'Parsing should have failed:\n%s' %
|
||||
safeReadFile(input_path)) )
|
||||
else:
|
||||
print('OK')
|
||||
print 'OK'
|
||||
else:
|
||||
if status:
|
||||
print('FAILED')
|
||||
if status is not None:
|
||||
print 'FAILED'
|
||||
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
|
||||
else:
|
||||
print('OK')
|
||||
print 'OK'
|
||||
else:
|
||||
base_path = os.path.splitext(input_path)[0]
|
||||
actual_output = safeReadFile( base_path + '.actual' )
|
||||
actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' )
|
||||
open(base_path + '.process-output', 'wt', encoding = 'utf-8').write( process_output )
|
||||
file(base_path + '.process-output','wt').write( process_output )
|
||||
if status:
|
||||
print('parsing failed')
|
||||
print 'parsing failed'
|
||||
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
|
||||
else:
|
||||
expected_output_path = os.path.splitext(input_path)[0] + '.expected'
|
||||
expected_output = open( expected_output_path, 'rt', encoding = 'utf-8' ).read()
|
||||
expected_output = file( expected_output_path, 'rt' ).read()
|
||||
detail = ( compareOutputs( expected_output, actual_output, 'input' )
|
||||
or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) )
|
||||
if detail:
|
||||
print('FAILED')
|
||||
print 'FAILED'
|
||||
failed_tests.append( (input_path, detail) )
|
||||
else:
|
||||
print('OK')
|
||||
print 'OK'
|
||||
|
||||
if failed_tests:
|
||||
print()
|
||||
print('Failure details:')
|
||||
print
|
||||
print 'Failure details:'
|
||||
for failed_test in failed_tests:
|
||||
print('* Test', failed_test[0])
|
||||
print(failed_test[1])
|
||||
print()
|
||||
print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
|
||||
len(failed_tests) ))
|
||||
print '* Test', failed_test[0]
|
||||
print failed_test[1]
|
||||
print
|
||||
print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
|
||||
len(failed_tests) )
|
||||
return 1
|
||||
else:
|
||||
print('All %d tests passed.' % len(tests))
|
||||
print 'All %d tests passed.' % len(tests)
|
||||
return 0
|
||||
|
||||
def main():
|
||||
|
@@ -1,11 +1,8 @@
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from io import open
|
||||
from glob import glob
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
from glob import glob
|
||||
import optparse
|
||||
|
||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
||||
@@ -21,11 +18,7 @@ class TestProxy(object):
|
||||
else:
|
||||
cmd = []
|
||||
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
|
||||
try:
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
except:
|
||||
print(cmd)
|
||||
raise
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
return False, stdout
|
||||
@@ -35,29 +28,29 @@ def runAllTests( exe_path, use_valgrind=False ):
|
||||
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
||||
status, test_names = test_proxy.run( ['--list-tests'] )
|
||||
if not status:
|
||||
print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr)
|
||||
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
|
||||
return 1
|
||||
test_names = [name.strip() for name in test_names.decode('utf-8').strip().split('\n')]
|
||||
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
||||
failures = []
|
||||
for name in test_names:
|
||||
print('TESTING %s:' % name, end=' ')
|
||||
print 'TESTING %s:' % name,
|
||||
succeed, result = test_proxy.run( ['--test', name] )
|
||||
if succeed:
|
||||
print('OK')
|
||||
print 'OK'
|
||||
else:
|
||||
failures.append( (name, result) )
|
||||
print('FAILED')
|
||||
print 'FAILED'
|
||||
failed_count = len(failures)
|
||||
pass_count = len(test_names) - failed_count
|
||||
if failed_count:
|
||||
print()
|
||||
print
|
||||
for name, result in failures:
|
||||
print(result)
|
||||
print('%d/%d tests passed (%d failure(s))' % (
|
||||
pass_count, len(test_names), failed_count))
|
||||
print result
|
||||
print '%d/%d tests passed (%d failure(s))' % (
|
||||
pass_count, len(test_names), failed_count)
|
||||
return 1
|
||||
else:
|
||||
print('All %d tests passed' % len(test_names))
|
||||
print 'All %d tests passed' % len(test_names)
|
||||
return 0
|
||||
|
||||
def main():
|
||||
|
Reference in New Issue
Block a user