Compare commits

..

16 Commits
0.7.0 ... 1.0.0

Author SHA1 Message Date
Christopher Dunn
7165f6ac4c 1.0.0 2014-11-20 08:45:58 -06:00
Christopher Dunn
37a9fa9f9d 1.0.0 2014-11-20 00:20:51 -06:00
xiaoyur347
83683da13f fix gcc warning when CXXFLAGS contains '-Wextra'
json_value.cpp:179:26: warning: enumeral and non-enumeral type in conditional expression [enabled by default]

https://github.com/open-source-parsers/jsoncpp/pull/84
2014-11-19 23:59:34 -06:00
Christopher Dunn
e5de78db82 Merge pull request #87 from cdunn2001/master
2to3 (but only the changes which should work with python2 also)
2014-11-19 23:54:56 -06:00
Christopher Dunn
ffd7295ab8 simple 2014-11-19 23:35:56 -06:00
Christopher Dunn
433876866d ws 2014-11-19 23:34:15 -06:00
Christopher Dunn
bd1e895287 simple py3 changes 2014-11-19 23:30:47 -06:00
Christopher Dunn
9aa6144b2a python except as 2014-11-19 23:10:02 -06:00
Christopher Dunn
5fda247dab Merge pull request #79 from ya1gaurav/patch-2
Remove gcc compilation warnings in json_reader.cpp
2014-11-18 00:14:06 -06:00
Gaurav
767713be2b Remove gcc compilation warning in json_reader.cpp
Submitting Patch for Issue : https://github.com/open-source-parsers/jsoncpp/issues/77
It will fix warnings in json_reader.cpp
2014-11-17 14:04:03 +05:30
Aaron Jacobs
3e3a8d5bd2 Merge pull request #74 from ya1gaurav/master
Prefer appending character constants over string literals.
2014-11-14 10:39:03 +11:00
Gaurav
abc1e07543 Prefer appending character constants over string literals - correct patch.
Submitting correct patch for https://github.com/open-source-parsers/jsoncpp/issues/61
2014-11-13 12:47:19 +05:30
Christopher Dunn
00b0a1b992 Merge pull request #70 from jmesmon/pkg-config-include-var
pkg-config: support INCLUDE_INSTALL_DIR
2014-11-12 00:03:52 -06:00
Cody P Schafer
1fe6c59827 pkg-config: support INCLUDE_INSTALL_DIR 2014-11-11 16:09:05 -05:00
Aaron Jacobs
20672ed02c Merge pull request #68 from BillyDonahue/refactor_ctor_boilerplate
Json::Value: Refactor common code in all constructors to an initBasic() function.
2014-11-10 20:23:52 +11:00
Billy Donahue
8eb5d89db6 Remove initInt and initUInt until they are needed. 2014-11-10 01:35:42 -05:00
19 changed files with 463 additions and 514 deletions

View File

@@ -56,7 +56,7 @@ def amalgamate_source( source_top_dir=None,
target_source_path: output .cpp path target_source_path: output .cpp path
header_include_path: generated header path relative to target_source_path. header_include_path: generated header path relative to target_source_path.
""" """
print ("Amalgating header...") print("Amalgating header...")
header = AmalgamationFile( source_top_dir ) header = AmalgamationFile( source_top_dir )
header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." ) header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." )
header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path ) header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
@@ -77,12 +77,12 @@ def amalgamate_source( source_top_dir=None,
header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" ) header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" )
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
print ("Writing amalgated header to %r" % target_header_path) print("Writing amalgated header to %r" % target_header_path)
header.write_to( target_header_path ) header.write_to( target_header_path )
base, ext = os.path.splitext( header_include_path ) base, ext = os.path.splitext( header_include_path )
forward_header_include_path = base + "-forwards" + ext forward_header_include_path = base + "-forwards" + ext
print ("Amalgating forward header...") print("Amalgating forward header...")
header = AmalgamationFile( source_top_dir ) header = AmalgamationFile( source_top_dir )
header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." ) header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." )
header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path ) header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path )
@@ -99,10 +99,10 @@ def amalgamate_source( source_top_dir=None,
target_forward_header_path = os.path.join( os.path.dirname(target_source_path), target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
forward_header_include_path ) forward_header_include_path )
print ("Writing amalgated forward header to %r" % target_forward_header_path) print("Writing amalgated forward header to %r" % target_forward_header_path)
header.write_to( target_forward_header_path ) header.write_to( target_forward_header_path )
print ("Amalgating source...") print("Amalgating source...")
source = AmalgamationFile( source_top_dir ) source = AmalgamationFile( source_top_dir )
source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." ) source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." )
source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path ) source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
@@ -118,7 +118,7 @@ def amalgamate_source( source_top_dir=None,
source.add_file( os.path.join(lib_json, "json_value.cpp") ) source.add_file( os.path.join(lib_json, "json_value.cpp") )
source.add_file( os.path.join(lib_json, "json_writer.cpp") ) source.add_file( os.path.join(lib_json, "json_writer.cpp") )
print ("Writing amalgated source to %r" % target_source_path) print("Writing amalgated source to %r" % target_source_path)
source.write_to( target_source_path ) source.write_to( target_source_path )
def main(): def main():
@@ -144,7 +144,7 @@ Generate a single amalgated source and header file from the sources.
sys.stderr.write( msg + "\n" ) sys.stderr.write( msg + "\n" )
sys.exit( 1 ) sys.exit( 1 )
else: else:
print ("Source succesfully amalagated") print("Source succesfully amalagated")
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -2,6 +2,7 @@
# encoding: utf-8 # encoding: utf-8
# Baptiste Lepilleur, 2009 # Baptiste Lepilleur, 2009
from __future__ import print_function
from dircache import listdir from dircache import listdir
import re import re
import fnmatch import fnmatch
@@ -190,12 +191,12 @@ if __name__ == "__main__":
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
for ant_pattern, accepted_matches, rejected_matches in test_cases: for ant_pattern, accepted_matches, rejected_matches in test_cases:
rex = ant_pattern_to_re( ant_pattern ) rex = ant_pattern_to_re( ant_pattern )
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
for accepted_match in accepted_matches: for accepted_match in accepted_matches:
print 'Accepted?:', accepted_match print('Accepted?:', accepted_match)
self.assert_( rex.match( accepted_match ) is not None ) self.assertTrue( rex.match( accepted_match ) is not None )
for rejected_match in rejected_matches: for rejected_match in rejected_matches:
print 'Rejected?:', rejected_match print('Rejected?:', rejected_match)
self.assert_( rex.match( rejected_match ) is None ) self.assertTrue( rex.match( rejected_match ) is None )
unittest.main() unittest.main()

View File

@@ -1,280 +1,281 @@
import collections from __future__ import print_function
import itertools import collections
import json import itertools
import os import json
import os.path import os
import re import os.path
import shutil import re
import string import shutil
import subprocess import string
import sys import subprocess
import cgi import sys
import cgi
class BuildDesc:
def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None): class BuildDesc:
self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ] def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None):
self.variables = variables or [] self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ]
self.build_type = build_type self.variables = variables or []
self.generator = generator self.build_type = build_type
self.generator = generator
def merged_with( self, build_desc ):
"""Returns a new BuildDesc by merging field content. def merged_with( self, build_desc ):
Prefer build_desc fields to self fields for single valued field. """Returns a new BuildDesc by merging field content.
""" Prefer build_desc fields to self fields for single valued field.
return BuildDesc( self.prepend_envs + build_desc.prepend_envs, """
self.variables + build_desc.variables, return BuildDesc( self.prepend_envs + build_desc.prepend_envs,
build_desc.build_type or self.build_type, self.variables + build_desc.variables,
build_desc.generator or self.generator ) build_desc.build_type or self.build_type,
build_desc.generator or self.generator )
def env( self ):
environ = os.environ.copy() def env( self ):
for values_by_name in self.prepend_envs: environ = os.environ.copy()
for var, value in values_by_name.items(): for values_by_name in self.prepend_envs:
var = var.upper() for var, value in list(values_by_name.items()):
if type(value) is unicode: var = var.upper()
value = value.encode( sys.getdefaultencoding() ) if type(value) is unicode:
if var in environ: value = value.encode( sys.getdefaultencoding() )
environ[var] = value + os.pathsep + environ[var] if var in environ:
else: environ[var] = value + os.pathsep + environ[var]
environ[var] = value else:
return environ environ[var] = value
return environ
def cmake_args( self ):
args = ["-D%s" % var for var in self.variables] def cmake_args( self ):
# skip build type for Visual Studio solution as it cause warning args = ["-D%s" % var for var in self.variables]
if self.build_type and 'Visual' not in self.generator: # skip build type for Visual Studio solution as it cause warning
args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type ) if self.build_type and 'Visual' not in self.generator:
if self.generator: args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type )
args.extend( ['-G', self.generator] ) if self.generator:
return args args.extend( ['-G', self.generator] )
return args
def __repr__( self ):
return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type) def __repr__( self ):
return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type)
class BuildData:
def __init__( self, desc, work_dir, source_dir ): class BuildData:
self.desc = desc def __init__( self, desc, work_dir, source_dir ):
self.work_dir = work_dir self.desc = desc
self.source_dir = source_dir self.work_dir = work_dir
self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' ) self.source_dir = source_dir
self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' ) self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' )
self.cmake_succeeded = False self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' )
self.build_succeeded = False self.cmake_succeeded = False
self.build_succeeded = False
def execute_build(self):
print 'Build %s' % self.desc def execute_build(self):
self._make_new_work_dir( ) print('Build %s' % self.desc)
self.cmake_succeeded = self._generate_makefiles( ) self._make_new_work_dir( )
if self.cmake_succeeded: self.cmake_succeeded = self._generate_makefiles( )
self.build_succeeded = self._build_using_makefiles( ) if self.cmake_succeeded:
return self.build_succeeded self.build_succeeded = self._build_using_makefiles( )
return self.build_succeeded
def _generate_makefiles(self):
print ' Generating makefiles: ', def _generate_makefiles(self):
cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )] print(' Generating makefiles: ', end=' ')
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path ) cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )]
print 'done' if succeeded else 'FAILED' succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path )
return succeeded print('done' if succeeded else 'FAILED')
return succeeded
def _build_using_makefiles(self):
print ' Building:', def _build_using_makefiles(self):
cmd = ['cmake', '--build', self.work_dir] print(' Building:', end=' ')
if self.desc.build_type: cmd = ['cmake', '--build', self.work_dir]
cmd += ['--config', self.desc.build_type] if self.desc.build_type:
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path ) cmd += ['--config', self.desc.build_type]
print 'done' if succeeded else 'FAILED' succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path )
return succeeded print('done' if succeeded else 'FAILED')
return succeeded
def _execute_build_subprocess(self, cmd, env, log_path):
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir, def _execute_build_subprocess(self, cmd, env, log_path):
env=env ) process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
stdout, _ = process.communicate( ) env=env )
succeeded = (process.returncode == 0) stdout, _ = process.communicate( )
with open( log_path, 'wb' ) as flog: succeeded = (process.returncode == 0)
log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode with open( log_path, 'wb' ) as flog:
flog.write( fix_eol( log ) ) log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
return succeeded flog.write( fix_eol( log ) )
return succeeded
def _make_new_work_dir(self):
if os.path.isdir( self.work_dir ): def _make_new_work_dir(self):
print ' Removing work directory', self.work_dir if os.path.isdir( self.work_dir ):
shutil.rmtree( self.work_dir, ignore_errors=True ) print(' Removing work directory', self.work_dir)
if not os.path.isdir( self.work_dir ): shutil.rmtree( self.work_dir, ignore_errors=True )
os.makedirs( self.work_dir ) if not os.path.isdir( self.work_dir ):
os.makedirs( self.work_dir )
def fix_eol( stdout ):
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n). def fix_eol( stdout ):
""" """Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
return re.sub( '\r*\n', os.linesep, stdout ) """
return re.sub( '\r*\n', os.linesep, stdout )
def load_build_variants_from_config( config_path ):
with open( config_path, 'rb' ) as fconfig: def load_build_variants_from_config( config_path ):
data = json.load( fconfig ) with open( config_path, 'rb' ) as fconfig:
variants = data[ 'cmake_variants' ] data = json.load( fconfig )
build_descs_by_axis = collections.defaultdict( list ) variants = data[ 'cmake_variants' ]
for axis in variants: build_descs_by_axis = collections.defaultdict( list )
axis_name = axis["name"] for axis in variants:
build_descs = [] axis_name = axis["name"]
if "generators" in axis: build_descs = []
for generator_data in axis["generators"]: if "generators" in axis:
for generator in generator_data["generator"]: for generator_data in axis["generators"]:
build_desc = BuildDesc( generator=generator, for generator in generator_data["generator"]:
prepend_envs=generator_data.get("env_prepend") ) build_desc = BuildDesc( generator=generator,
build_descs.append( build_desc ) prepend_envs=generator_data.get("env_prepend") )
elif "variables" in axis: build_descs.append( build_desc )
for variables in axis["variables"]: elif "variables" in axis:
build_desc = BuildDesc( variables=variables ) for variables in axis["variables"]:
build_descs.append( build_desc ) build_desc = BuildDesc( variables=variables )
elif "build_types" in axis: build_descs.append( build_desc )
for build_type in axis["build_types"]: elif "build_types" in axis:
build_desc = BuildDesc( build_type=build_type ) for build_type in axis["build_types"]:
build_descs.append( build_desc ) build_desc = BuildDesc( build_type=build_type )
build_descs_by_axis[axis_name].extend( build_descs ) build_descs.append( build_desc )
return build_descs_by_axis build_descs_by_axis[axis_name].extend( build_descs )
return build_descs_by_axis
def generate_build_variants( build_descs_by_axis ):
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis.""" def generate_build_variants( build_descs_by_axis ):
axis_names = build_descs_by_axis.keys() """Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
build_descs = [] axis_names = list(build_descs_by_axis.keys())
for axis_name, axis_build_descs in build_descs_by_axis.items(): build_descs = []
if len(build_descs): for axis_name, axis_build_descs in list(build_descs_by_axis.items()):
# for each existing build_desc and each axis build desc, create a new build_desc if len(build_descs):
new_build_descs = [] # for each existing build_desc and each axis build desc, create a new build_desc
for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs): new_build_descs = []
new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) ) for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs):
build_descs = new_build_descs new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) )
else: build_descs = new_build_descs
build_descs = axis_build_descs else:
return build_descs build_descs = axis_build_descs
return build_descs
HTML_TEMPLATE = string.Template('''<html>
<head> HTML_TEMPLATE = string.Template('''<html>
<title>$title</title> <head>
<style type="text/css"> <title>$title</title>
td.failed {background-color:#f08080;} <style type="text/css">
td.ok {background-color:#c0eec0;} td.failed {background-color:#f08080;}
</style> td.ok {background-color:#c0eec0;}
</head> </style>
<body> </head>
<table border="1"> <body>
<thead> <table border="1">
<tr> <thead>
<th>Variables</th> <tr>
$th_vars <th>Variables</th>
</tr> $th_vars
<tr> </tr>
<th>Build type</th> <tr>
$th_build_types <th>Build type</th>
</tr> $th_build_types
</thead> </tr>
<tbody> </thead>
$tr_builds <tbody>
</tbody> $tr_builds
</table> </tbody>
</body></html>''') </table>
</body></html>''')
def generate_html_report( html_report_path, builds ):
report_dir = os.path.dirname( html_report_path ) def generate_html_report( html_report_path, builds ):
# Vertical axis: generator report_dir = os.path.dirname( html_report_path )
# Horizontal: variables, then build_type # Vertical axis: generator
builds_by_generator = collections.defaultdict( list ) # Horizontal: variables, then build_type
variables = set() builds_by_generator = collections.defaultdict( list )
build_types_by_variable = collections.defaultdict( set ) variables = set()
build_by_pos_key = {} # { (generator, var_key, build_type): build } build_types_by_variable = collections.defaultdict( set )
for build in builds: build_by_pos_key = {} # { (generator, var_key, build_type): build }
builds_by_generator[build.desc.generator].append( build ) for build in builds:
var_key = tuple(sorted(build.desc.variables)) builds_by_generator[build.desc.generator].append( build )
variables.add( var_key ) var_key = tuple(sorted(build.desc.variables))
build_types_by_variable[var_key].add( build.desc.build_type ) variables.add( var_key )
pos_key = (build.desc.generator, var_key, build.desc.build_type) build_types_by_variable[var_key].add( build.desc.build_type )
build_by_pos_key[pos_key] = build pos_key = (build.desc.generator, var_key, build.desc.build_type)
variables = sorted( variables ) build_by_pos_key[pos_key] = build
th_vars = [] variables = sorted( variables )
th_build_types = [] th_vars = []
for variable in variables: th_build_types = []
build_types = sorted( build_types_by_variable[variable] ) for variable in variables:
nb_build_type = len(build_types_by_variable[variable]) build_types = sorted( build_types_by_variable[variable] )
th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) ) nb_build_type = len(build_types_by_variable[variable])
for build_type in build_types: th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) )
th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) ) for build_type in build_types:
tr_builds = [] th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) )
for generator in sorted( builds_by_generator ): tr_builds = []
tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ] for generator in sorted( builds_by_generator ):
for variable in variables: tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ]
build_types = sorted( build_types_by_variable[variable] ) for variable in variables:
for build_type in build_types: build_types = sorted( build_types_by_variable[variable] )
pos_key = (generator, variable, build_type) for build_type in build_types:
build = build_by_pos_key.get(pos_key) pos_key = (generator, variable, build_type)
if build: build = build_by_pos_key.get(pos_key)
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED' if build:
build_status = 'ok' if build.build_succeeded else 'FAILED' cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir ) build_status = 'ok' if build.build_succeeded else 'FAILED'
build_log_url = os.path.relpath( build.build_log_path, report_dir ) cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir )
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % ( build_log_url = os.path.relpath( build.build_log_path, report_dir )
build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status) td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (
if build.cmake_succeeded: build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
td += '<br><a href="%s" class="%s">Build: %s</a>' % ( if build.cmake_succeeded:
build_log_url, build_status.lower(), build_status) td += '<br><a href="%s" class="%s">Build: %s</a>' % (
td += '</td>' build_log_url, build_status.lower(), build_status)
else: td += '</td>'
td = '<td></td>' else:
tds.append( td ) td = '<td></td>'
tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) ) tds.append( td )
html = HTML_TEMPLATE.substitute( tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) )
title='Batch build report', html = HTML_TEMPLATE.substitute(
th_vars=' '.join(th_vars), title='Batch build report',
th_build_types=' '.join( th_build_types), th_vars=' '.join(th_vars),
tr_builds='\n'.join( tr_builds ) ) th_build_types=' '.join( th_build_types),
with open( html_report_path, 'wt' ) as fhtml: tr_builds='\n'.join( tr_builds ) )
fhtml.write( html ) with open( html_report_path, 'wt' ) as fhtml:
print 'HTML report generated in:', html_report_path fhtml.write( html )
print('HTML report generated in:', html_report_path)
def main():
usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...] def main():
Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...]
as described in CONFIG_JSON_PATH building in WORK_DIR. Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run
as described in CONFIG_JSON_PATH building in WORK_DIR.
Example of call:
python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json Example of call:
""" python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json
from optparse import OptionParser """
parser = OptionParser(usage=usage) from optparse import OptionParser
parser.allow_interspersed_args = True parser = OptionParser(usage=usage)
# parser.add_option('-v', '--verbose', dest="verbose", action='store_true', parser.allow_interspersed_args = True
# help="""Be verbose.""") # parser.add_option('-v', '--verbose', dest="verbose", action='store_true',
parser.enable_interspersed_args() # help="""Be verbose.""")
options, args = parser.parse_args() parser.enable_interspersed_args()
if len(args) < 3: options, args = parser.parse_args()
parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." ) if len(args) < 3:
work_dir = args[0] parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." )
source_dir = args[1].rstrip('/\\') work_dir = args[0]
config_paths = args[2:] source_dir = args[1].rstrip('/\\')
for config_path in config_paths: config_paths = args[2:]
if not os.path.isfile( config_path ): for config_path in config_paths:
parser.error( "Can not read: %r" % config_path ) if not os.path.isfile( config_path ):
parser.error( "Can not read: %r" % config_path )
# generate build variants
build_descs = [] # generate build variants
for config_path in config_paths: build_descs = []
build_descs_by_axis = load_build_variants_from_config( config_path ) for config_path in config_paths:
build_descs.extend( generate_build_variants( build_descs_by_axis ) ) build_descs_by_axis = load_build_variants_from_config( config_path )
print 'Build variants (%d):' % len(build_descs) build_descs.extend( generate_build_variants( build_descs_by_axis ) )
# assign build directory for each variant print('Build variants (%d):' % len(build_descs))
if not os.path.isdir( work_dir ): # assign build directory for each variant
os.makedirs( work_dir ) if not os.path.isdir( work_dir ):
builds = [] os.makedirs( work_dir )
with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap: builds = []
for index, build_desc in enumerate( build_descs ): with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap:
build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) ) for index, build_desc in enumerate( build_descs ):
builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) ) build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) )
fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) ) builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) )
for build in builds: fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) )
build.execute_build() for build in builds:
html_report_path = os.path.join( work_dir, 'batchbuild-report.html' ) build.execute_build()
generate_html_report( html_report_path, builds ) html_report_path = os.path.join( work_dir, 'batchbuild-report.html' )
print 'Done' generate_html_report( html_report_path, builds )
print('Done')
if __name__ == '__main__':
main() if __name__ == '__main__':
main()

View File

@@ -1,3 +1,4 @@
from __future__ import print_function
import os.path import os.path
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
@@ -6,8 +7,8 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
raise ValueError( 'Path "%s" is not a file' % path ) raise ValueError( 'Path "%s" is not a file' % path )
try: try:
f = open(path, 'rb') f = open(path, 'rb')
except IOError, msg: except IOError as msg:
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr)
return False return False
try: try:
raw_lines = f.readlines() raw_lines = f.readlines()
@@ -15,7 +16,7 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
f.close() f.close()
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
if raw_lines != fixed_lines: if raw_lines != fixed_lines:
print '%s =>' % path, print('%s =>' % path, end=' ')
if not is_dry_run: if not is_dry_run:
f = open(path, "wb") f = open(path, "wb")
try: try:
@@ -23,7 +24,7 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
finally: finally:
f.close() f.close()
if verbose: if verbose:
print is_dry_run and ' NEED FIX' or ' FIXED' print(is_dry_run and ' NEED FIX' or ' FIXED')
return True return True
## ##
## ##

View File

@@ -1,5 +1,6 @@
"""Updates the license text in source file. """Updates the license text in source file.
""" """
from __future__ import print_function
# An existing license is found if the file starts with the string below, # An existing license is found if the file starts with the string below,
# and ends with the first blank line. # and ends with the first blank line.
@@ -34,11 +35,11 @@ def update_license( path, dry_run, show_diff ):
if not dry_run: if not dry_run:
with open( path, 'wb' ) as fout: with open( path, 'wb' ) as fout:
fout.write( new_text.replace('\n', newline ) ) fout.write( new_text.replace('\n', newline ) )
print 'Updated', path print('Updated', path)
if show_diff: if show_diff:
import difflib import difflib
print '\n'.join( difflib.unified_diff( original_text.split('\n'), print('\n'.join( difflib.unified_diff( original_text.split('\n'),
new_text.split('\n') ) ) new_text.split('\n') ) ))
return True return True
return False return False
@@ -83,7 +84,7 @@ python devtools\licenseupdater.py include src
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
update_license_in_source_directories( args, options.dry_run, options.show_diff ) update_license_in_source_directories( args, options.dry_run, options.show_diff )
print 'Done' print('Done')
if __name__ == '__main__': if __name__ == '__main__':
import sys import sys

View File

@@ -1,12 +1,12 @@
"""Script to generate doxygen documentation. """Script to generate doxygen documentation.
""" """
from __future__ import print_function
from devtools import tarball
import re import re
import os import os
import os.path import os.path
import sys import sys
import shutil import shutil
from devtools import tarball
def find_program(*filenames): def find_program(*filenames):
"""find a program in folders path_lst, and sets env[var] """find a program in folders path_lst, and sets env[var]
@@ -33,9 +33,9 @@ def do_subst_in_file(targetfile, sourcefile, dict):
contents = f.read() contents = f.read()
f.close() f.close()
except: except:
print "Can't read source file %s"%sourcefile print("Can't read source file %s"%sourcefile)
raise raise
for (k,v) in dict.items(): for (k,v) in list(dict.items()):
v = v.replace('\\','\\\\') v = v.replace('\\','\\\\')
contents = re.sub(k, v, contents) contents = re.sub(k, v, contents)
try: try:
@@ -43,7 +43,7 @@ def do_subst_in_file(targetfile, sourcefile, dict):
f.write(contents) f.write(contents)
f.close() f.close()
except: except:
print "Can't write target file %s"%targetfile print("Can't write target file %s"%targetfile)
raise raise
def run_doxygen(doxygen_path, config_file, working_dir, is_silent): def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
@@ -53,12 +53,12 @@ def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
try: try:
os.chdir( working_dir ) os.chdir( working_dir )
cmd = [doxygen_path, config_file] cmd = [doxygen_path, config_file]
print 'Running:', ' '.join( cmd ) print('Running:', ' '.join( cmd ))
try: try:
import subprocess import subprocess
except: except:
if os.system( ' '.join( cmd ) ) != 0: if os.system( ' '.join( cmd ) ) != 0:
print 'Documentation generation failed' print('Documentation generation failed')
return False return False
else: else:
if is_silent: if is_silent:
@@ -67,8 +67,8 @@ def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
process = subprocess.Popen( cmd ) process = subprocess.Popen( cmd )
stdout, _ = process.communicate() stdout, _ = process.communicate()
if process.returncode: if process.returncode:
print 'Documentation generation failed:' print('Documentation generation failed:')
print stdout print(stdout)
return False return False
return True return True
finally: finally:
@@ -107,7 +107,7 @@ def build_doc( options, make_release=False ):
} }
if os.path.isdir( output_dir ): if os.path.isdir( output_dir ):
print 'Deleting directory:', output_dir print('Deleting directory:', output_dir)
shutil.rmtree( output_dir ) shutil.rmtree( output_dir )
if not os.path.isdir( output_dir ): if not os.path.isdir( output_dir ):
os.makedirs( output_dir ) os.makedirs( output_dir )
@@ -115,15 +115,15 @@ def build_doc( options, make_release=False ):
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
if not options.silent: if not options.silent:
print open(warning_log_path, 'rb').read() print(open(warning_log_path, 'rb').read())
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html')) index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
print 'Generated documentation can be found in:' print('Generated documentation can be found in:')
print index_path print(index_path)
if options.open: if options.open:
import webbrowser import webbrowser
webbrowser.open( 'file://' + index_path ) webbrowser.open( 'file://' + index_path )
if options.make_tarball: if options.make_tarball:
print 'Generating doc tarball to', tarball_path print('Generating doc tarball to', tarball_path)
tarball_sources = [ tarball_sources = [
output_dir, output_dir,
'README.txt', 'README.txt',

View File

@@ -171,7 +171,7 @@ private:
CZString(const char* cstr, DuplicationPolicy allocate); CZString(const char* cstr, DuplicationPolicy allocate);
CZString(const CZString& other); CZString(const CZString& other);
~CZString(); ~CZString();
CZString &operator=(const CZString &other); CZString& operator=(CZString other);
bool operator<(const CZString& other) const; bool operator<(const CZString& other) const;
bool operator==(const CZString& other) const; bool operator==(const CZString& other) const;
ArrayIndex index() const; ArrayIndex index() const;
@@ -238,7 +238,7 @@ Json::Value obj_value(Json::objectValue); // {}
Value(const Value& other); Value(const Value& other);
~Value(); ~Value();
Value &operator=(const Value &other); Value& operator=(Value other);
/// Swap values. /// Swap values.
/// \note Currently, comments are intentionally not swapped, for /// \note Currently, comments are intentionally not swapped, for
/// both logic and efficiency. /// both logic and efficiency.
@@ -440,6 +440,8 @@ Json::Value obj_value(Json::objectValue); // {}
size_t getOffsetLimit() const; size_t getOffsetLimit() const;
private: private:
void initBasic(ValueType type, bool allocated = false);
Value& resolveReference(const char* key, bool isStatic); Value& resolveReference(const char* key, bool isStatic);
#ifdef JSON_VALUE_USE_INTERNAL_MAP #ifdef JSON_VALUE_USE_INTERNAL_MAP

View File

@@ -4,9 +4,9 @@
#ifndef JSON_VERSION_H_INCLUDED #ifndef JSON_VERSION_H_INCLUDED
# define JSON_VERSION_H_INCLUDED # define JSON_VERSION_H_INCLUDED
# define JSONCPP_VERSION_STRING "0.7.0" # define JSONCPP_VERSION_STRING "1.0.0"
# define JSONCPP_VERSION_MAJOR 0 # define JSONCPP_VERSION_MAJOR 1
# define JSONCPP_VERSION_MINOR 7 # define JSONCPP_VERSION_MINOR 0
# define JSONCPP_VERSION_PATCH 0 # define JSONCPP_VERSION_PATCH 0
# define JSONCPP_VERSION_QUALIFIER # define JSONCPP_VERSION_QUALIFIER
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) # define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))

View File

@@ -14,6 +14,7 @@ python makerelease.py 0.5.0 0.6.0-dev
Note: This was for Subversion. Now that we are in GitHub, we do not Note: This was for Subversion. Now that we are in GitHub, we do not
need to build versioned tarballs anymore, so makerelease.py is defunct. need to build versioned tarballs anymore, so makerelease.py is defunct.
""" """
from __future__ import print_function
import os.path import os.path
import subprocess import subprocess
import sys import sys
@@ -46,7 +47,7 @@ class SVNError(Exception):
def svn_command( command, *args ): def svn_command( command, *args ):
cmd = ['svn', '--non-interactive', command] + list(args) cmd = ['svn', '--non-interactive', command] + list(args)
print 'Running:', ' '.join( cmd ) print('Running:', ' '.join( cmd ))
process = subprocess.Popen( cmd, process = subprocess.Popen( cmd,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT ) stderr=subprocess.STDOUT )
@@ -84,7 +85,7 @@ def svn_check_if_tag_exist( tag_url ):
""" """
try: try:
list_stdout = svn_command( 'list', tag_url ) list_stdout = svn_command( 'list', tag_url )
except SVNError, e: except SVNError as e:
if e.returncode != 1 or not str(e).find('tag_url'): if e.returncode != 1 or not str(e).find('tag_url'):
raise e raise e
# otherwise ignore error, meaning tag does not exist # otherwise ignore error, meaning tag does not exist
@@ -117,7 +118,7 @@ def svn_export( tag_url, export_dir ):
def fix_sources_eol( dist_dir ): def fix_sources_eol( dist_dir ):
"""Set file EOL for tarball distribution. """Set file EOL for tarball distribution.
""" """
print 'Preparing exported source file EOL for distribution...' print('Preparing exported source file EOL for distribution...')
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
win_sources = antglob.glob( dist_dir, win_sources = antglob.glob( dist_dir,
includes = '**/*.sln **/*.vcproj', includes = '**/*.sln **/*.vcproj',
@@ -148,7 +149,7 @@ def download( url, target_path ):
def check_compile( distcheck_top_dir, platform ): def check_compile( distcheck_top_dir, platform ):
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
print 'Running:', ' '.join( cmd ) print('Running:', ' '.join( cmd ))
log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
flog = open( log_path, 'wb' ) flog = open( log_path, 'wb' )
try: try:
@@ -179,9 +180,9 @@ def run_sftp_batch( userhost, sftp, batch, retry=0 ):
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
error = None error = None
for retry_index in xrange(0, max(1,retry)): for retry_index in range(0, max(1,retry)):
heading = retry_index == 0 and 'Running:' or 'Retrying:' heading = retry_index == 0 and 'Running:' or 'Retrying:'
print heading, ' '.join( cmd ) print(heading, ' '.join( cmd ))
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
stdout = process.communicate()[0] stdout = process.communicate()[0]
if process.returncode != 0: if process.returncode != 0:
@@ -219,21 +220,21 @@ exit
upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
paths_to_remove = existing_paths - upload_paths paths_to_remove = existing_paths - upload_paths
if paths_to_remove: if paths_to_remove:
print 'Removing the following file from web:' print('Removing the following file from web:')
print '\n'.join( paths_to_remove ) print('\n'.join( paths_to_remove ))
stdout = run_sftp_batch( userhost, sftp, """cd htdocs stdout = run_sftp_batch( userhost, sftp, """cd htdocs
rm %s rm %s
exit""" % ' '.join(paths_to_remove) ) exit""" % ' '.join(paths_to_remove) )
print 'Uploading %d files:' % len(upload_paths) print('Uploading %d files:' % len(upload_paths))
batch_size = 10 batch_size = 10
upload_paths = list(upload_paths) upload_paths = list(upload_paths)
start_time = time.time() start_time = time.time()
for index in xrange(0,len(upload_paths),batch_size): for index in range(0,len(upload_paths),batch_size):
paths = upload_paths[index:index+batch_size] paths = upload_paths[index:index+batch_size]
file_per_sec = (time.time() - start_time) / (index+1) file_per_sec = (time.time() - start_time) / (index+1)
remaining_files = len(upload_paths) - index remaining_files = len(upload_paths) - index
remaining_sec = file_per_sec * remaining_files remaining_sec = file_per_sec * remaining_files
print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec) print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
run_sftp_batch( userhost, sftp, """cd htdocs run_sftp_batch( userhost, sftp, """cd htdocs
lcd %s lcd %s
mput %s mput %s
@@ -297,7 +298,7 @@ Warning: --force should only be used when developping/testing the release script
else: else:
msg = check_no_pending_commit() msg = check_no_pending_commit()
if not msg: if not msg:
print 'Setting version to', release_version print('Setting version to', release_version)
set_version( release_version ) set_version( release_version )
svn_commit( 'Release ' + release_version ) svn_commit( 'Release ' + release_version )
tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
@@ -305,11 +306,11 @@ Warning: --force should only be used when developping/testing the release script
if options.retag_release: if options.retag_release:
svn_remove_tag( tag_url, 'Overwriting previous tag' ) svn_remove_tag( tag_url, 'Overwriting previous tag' )
else: else:
print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
sys.exit( 1 ) sys.exit( 1 )
svn_tag_sandbox( tag_url, 'Release ' + release_version ) svn_tag_sandbox( tag_url, 'Release ' + release_version )
print 'Generated doxygen document...' print('Generated doxygen document...')
## doc_dirname = r'jsoncpp-api-html-0.5.0' ## doc_dirname = r'jsoncpp-api-html-0.5.0'
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' ## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
@@ -323,11 +324,11 @@ Warning: --force should only be used when developping/testing the release script
source_dir = 'jsoncpp-src-' + release_version source_dir = 'jsoncpp-src-' + release_version
source_tarball_path = 'dist/%s.tar.gz' % source_dir source_tarball_path = 'dist/%s.tar.gz' % source_dir
print 'Generating source tarball to', source_tarball_path print('Generating source tarball to', source_tarball_path)
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
print 'Generating amalgamation source tarball to', amalgamation_tarball_path print('Generating amalgamation source tarball to', amalgamation_tarball_path)
amalgamation_dir = 'dist/amalgamation' amalgamation_dir = 'dist/amalgamation'
amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' ) amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
@@ -337,41 +338,41 @@ Warning: --force should only be used when developping/testing the release script
# Decompress source tarball, download and install scons-local # Decompress source tarball, download and install scons-local
distcheck_dir = 'dist/distcheck' distcheck_dir = 'dist/distcheck'
distcheck_top_dir = distcheck_dir + '/' + source_dir distcheck_top_dir = distcheck_dir + '/' + source_dir
print 'Decompressing source tarball to', distcheck_dir print('Decompressing source tarball to', distcheck_dir)
rmdir_if_exist( distcheck_dir ) rmdir_if_exist( distcheck_dir )
tarball.decompress( source_tarball_path, distcheck_dir ) tarball.decompress( source_tarball_path, distcheck_dir )
scons_local_path = 'dist/scons-local.tar.gz' scons_local_path = 'dist/scons-local.tar.gz'
print 'Downloading scons-local to', scons_local_path print('Downloading scons-local to', scons_local_path)
download( SCONS_LOCAL_URL, scons_local_path ) download( SCONS_LOCAL_URL, scons_local_path )
print 'Decompressing scons-local to', distcheck_top_dir print('Decompressing scons-local to', distcheck_top_dir)
tarball.decompress( scons_local_path, distcheck_top_dir ) tarball.decompress( scons_local_path, distcheck_top_dir )
# Run compilation # Run compilation
print 'Compiling decompressed tarball' print('Compiling decompressed tarball')
all_build_status = True all_build_status = True
for platform in options.platforms.split(','): for platform in options.platforms.split(','):
print 'Testing platform:', platform print('Testing platform:', platform)
build_status, log_path = check_compile( distcheck_top_dir, platform ) build_status, log_path = check_compile( distcheck_top_dir, platform )
print 'see build log:', log_path print('see build log:', log_path)
print build_status and '=> ok' or '=> FAILED' print(build_status and '=> ok' or '=> FAILED')
all_build_status = all_build_status and build_status all_build_status = all_build_status and build_status
if not build_status: if not build_status:
print 'Testing failed on at least one platform, aborting...' print('Testing failed on at least one platform, aborting...')
svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
sys.exit(1) sys.exit(1)
if options.user: if options.user:
if not options.no_web: if not options.no_web:
print 'Uploading documentation using user', options.user print('Uploading documentation using user', options.user)
sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
print 'Completed documentation upload' print('Completed documentation upload')
print 'Uploading source and documentation tarballs for release using user', options.user print('Uploading source and documentation tarballs for release using user', options.user)
sourceforge_release_tarball( SOURCEFORGE_PROJECT, sourceforge_release_tarball( SOURCEFORGE_PROJECT,
[source_tarball_path, doc_tarball_path], [source_tarball_path, doc_tarball_path],
user=options.user, sftp=options.sftp ) user=options.user, sftp=options.sftp )
print 'Source and doc release tarballs uploaded' print('Source and doc release tarballs uploaded')
else: else:
print 'No upload user specified. Web site and download tarbal were not uploaded.' print('No upload user specified. Web site and download tarbal were not uploaded.')
print 'Tarball can be found at:', doc_tarball_path print('Tarball can be found at:', doc_tarball_path)
# Set next version number and commit # Set next version number and commit
set_version( next_version ) set_version( next_version )

View File

@@ -1,7 +1,7 @@
prefix=@CMAKE_INSTALL_PREFIX@ prefix=@CMAKE_INSTALL_PREFIX@
exec_prefix=${prefix} exec_prefix=${prefix}
libdir=${exec_prefix}/lib libdir=${exec_prefix}/lib
includedir=${prefix}/include includedir=${prefix}/@INCLUDE_INSTALL_DIR@
Name: jsoncpp Name: jsoncpp
Description: A C++ library for interacting with JSON Description: A C++ library for interacting with JSON

View File

@@ -1,5 +1,6 @@
import re import re
from SCons.Script import * # the usual scons stuff you get in a SConscript from SCons.Script import * # the usual scons stuff you get in a SConscript
import collections
def generate(env): def generate(env):
""" """
@@ -25,28 +26,28 @@ def generate(env):
contents = f.read() contents = f.read()
f.close() f.close()
except: except:
raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile raise SCons.Errors.UserError("Can't read source file %s"%sourcefile)
for (k,v) in dict.items(): for (k,v) in list(dict.items()):
contents = re.sub(k, v, contents) contents = re.sub(k, v, contents)
try: try:
f = open(targetfile, 'wb') f = open(targetfile, 'wb')
f.write(contents) f.write(contents)
f.close() f.close()
except: except:
raise SCons.Errors.UserError, "Can't write target file %s"%targetfile raise SCons.Errors.UserError("Can't write target file %s"%targetfile)
return 0 # success return 0 # success
def subst_in_file(target, source, env): def subst_in_file(target, source, env):
if not env.has_key('SUBST_DICT'): if 'SUBST_DICT' not in env:
raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set." raise SCons.Errors.UserError("SubstInFile requires SUBST_DICT to be set.")
d = dict(env['SUBST_DICT']) # copy it d = dict(env['SUBST_DICT']) # copy it
for (k,v) in d.items(): for (k,v) in list(d.items()):
if callable(v): if isinstance(v, collections.Callable):
d[k] = env.subst(v()).replace('\\','\\\\') d[k] = env.subst(v()).replace('\\','\\\\')
elif SCons.Util.is_String(v): elif SCons.Util.is_String(v):
d[k] = env.subst(v).replace('\\','\\\\') d[k] = env.subst(v).replace('\\','\\\\')
else: else:
raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)) raise SCons.Errors.UserError("SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)))
for (t,s) in zip(target, source): for (t,s) in zip(target, source):
return do_subst_in_file(str(t), str(s), d) return do_subst_in_file(str(t), str(s), d)
@@ -60,8 +61,8 @@ def generate(env):
Returns original target, source tuple unchanged. Returns original target, source tuple unchanged.
""" """
d = env['SUBST_DICT'].copy() # copy it d = env['SUBST_DICT'].copy() # copy it
for (k,v) in d.items(): for (k,v) in list(d.items()):
if callable(v): if isinstance(v, collections.Callable):
d[k] = env.subst(v()) d[k] = env.subst(v())
elif SCons.Util.is_String(v): elif SCons.Util.is_String(v):
d[k]=env.subst(v) d[k]=env.subst(v)

View File

@@ -831,8 +831,9 @@ std::vector<Reader::StructuredError> Reader::getStructuredErrors() const {
} }
bool Reader::pushError(const Value& value, const std::string& message) { bool Reader::pushError(const Value& value, const std::string& message) {
if(value.getOffsetStart() > end_ - begin_ size_t length = end_ - begin_;
|| value.getOffsetLimit() > end_ - begin_) if(value.getOffsetStart() > length
|| value.getOffsetLimit() > length)
return false; return false;
Token token; Token token;
token.type_ = tokenError; token.type_ = tokenError;
@@ -847,9 +848,10 @@ bool Reader::pushError(const Value& value, const std::string& message) {
} }
bool Reader::pushError(const Value& value, const std::string& message, const Value& extra) { bool Reader::pushError(const Value& value, const std::string& message, const Value& extra) {
if(value.getOffsetStart() > end_ - begin_ size_t length = end_ - begin_;
|| value.getOffsetLimit() > end_ - begin_ if(value.getOffsetStart() > length
|| extra.getOffsetLimit() > end_ - begin_) || value.getOffsetLimit() > length
|| extra.getOffsetLimit() > length)
return false; return false;
Token token; Token token;
token.type_ = tokenError; token.type_ = tokenError;

View File

@@ -175,7 +175,8 @@ Value::CZString::CZString(const CZString& other)
? duplicateStringValue(other.cstr_) ? duplicateStringValue(other.cstr_)
: other.cstr_), : other.cstr_),
index_(other.cstr_ index_(other.cstr_
? (other.index_ == noDuplication ? noDuplication : duplicate) ? static_cast<ArrayIndex>(other.index_ == noDuplication
? noDuplication : duplicate)
: other.index_) {} : other.index_) {}
Value::CZString::~CZString() { Value::CZString::~CZString() {
@@ -188,9 +189,8 @@ void Value::CZString::swap(CZString& other) {
std::swap(index_, other.index_); std::swap(index_, other.index_);
} }
Value::CZString &Value::CZString::operator=(const CZString &other) { Value::CZString& Value::CZString::operator=(CZString other) {
CZString temp(other); swap(other);
swap(temp);
return *this; return *this;
} }
@@ -226,14 +226,8 @@ bool Value::CZString::isStaticString() const { return index_ == noDuplication; }
* memset( this, 0, sizeof(Value) ) * memset( this, 0, sizeof(Value) )
* This optimization is used in ValueInternalMap fast allocator. * This optimization is used in ValueInternalMap fast allocator.
*/ */
Value::Value(ValueType type) Value::Value(ValueType type) {
: type_(type), allocated_(false) initBasic(type);
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
switch (type) { switch (type) {
case nullValue: case nullValue:
break; break;
@@ -268,130 +262,62 @@ Value::Value(ValueType type)
} }
} }
Value::Value(UInt value) Value::Value(Int value) {
: type_(uintValue), allocated_(false) initBasic(intValue);
#ifdef JSON_VALUE_USE_INTERNAL_MAP value_.int_ = value;
, }
itemIsUsed_(0)
#endif Value::Value(UInt value) {
, initBasic(uintValue);
comments_(0), start_(0), limit_(0) {
value_.uint_ = value; value_.uint_ = value;
} }
Value::Value(Int value)
: type_(intValue), allocated_(false)
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.int_ = value;
}
#if defined(JSON_HAS_INT64) #if defined(JSON_HAS_INT64)
Value::Value(Int64 value) Value::Value(Int64 value) {
: type_(intValue), allocated_(false) initBasic(intValue);
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.int_ = value; value_.int_ = value;
} }
Value::Value(UInt64 value) {
Value::Value(UInt64 value) initBasic(uintValue);
: type_(uintValue), allocated_(false)
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.uint_ = value; value_.uint_ = value;
} }
#endif // defined(JSON_HAS_INT64) #endif // defined(JSON_HAS_INT64)
Value::Value(double value) Value::Value(double value) {
: type_(realValue), allocated_(false) initBasic(realValue);
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.real_ = value; value_.real_ = value;
} }
Value::Value(const char* value) Value::Value(const char* value) {
: type_(stringValue), allocated_(true) initBasic(stringValue, true);
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.string_ = duplicateStringValue(value); value_.string_ = duplicateStringValue(value);
} }
Value::Value(const char* beginValue, const char* endValue) Value::Value(const char* beginValue, const char* endValue) {
: type_(stringValue), allocated_(true) initBasic(stringValue, true);
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.string_ = value_.string_ =
duplicateStringValue(beginValue, (unsigned int)(endValue - beginValue)); duplicateStringValue(beginValue, (unsigned int)(endValue - beginValue));
} }
Value::Value(const std::string& value) Value::Value(const std::string& value) {
: type_(stringValue), allocated_(true) initBasic(stringValue, true);
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.string_ = value_.string_ =
duplicateStringValue(value.c_str(), (unsigned int)value.length()); duplicateStringValue(value.c_str(), (unsigned int)value.length());
} }
Value::Value(const StaticString& value) Value::Value(const StaticString& value) {
: type_(stringValue), allocated_(false) initBasic(stringValue);
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.string_ = const_cast<char*>(value.c_str()); value_.string_ = const_cast<char*>(value.c_str());
} }
#ifdef JSON_USE_CPPTL #ifdef JSON_USE_CPPTL
Value::Value(const CppTL::ConstString& value) Value::Value(const CppTL::ConstString& value) {
: type_(stringValue), allocated_(true) initBasic(stringValue, true);
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.string_ = duplicateStringValue(value, value.length()); value_.string_ = duplicateStringValue(value, value.length());
} }
#endif #endif
Value::Value(bool value) Value::Value(bool value) {
: type_(booleanValue), allocated_(false) initBasic(booleanValue);
#ifdef JSON_VALUE_USE_INTERNAL_MAP
,
itemIsUsed_(0)
#endif
,
comments_(0), start_(0), limit_(0) {
value_.bool_ = value; value_.bool_ = value;
} }
@@ -479,9 +405,8 @@ Value::~Value() {
delete[] comments_; delete[] comments_;
} }
Value &Value::operator=(const Value &other) { Value& Value::operator=(Value other) {
Value temp(other); swap(other);
swap(temp);
return *this; return *this;
} }
@@ -968,6 +893,17 @@ Value& Value::operator[](const char* key) {
return resolveReference(key, false); return resolveReference(key, false);
} }
void Value::initBasic(ValueType type, bool allocated) {
type_ = type;
allocated_ = allocated;
#ifdef JSON_VALUE_USE_INTERNAL_MAP
itemIsUsed_ = 0;
#endif
comments_ = 0;
start_ = 0;
limit_ = 0;
}
Value& Value::resolveReference(const char* key, bool isStatic) { Value& Value::resolveReference(const char* key, bool isStatic) {
JSON_ASSERT_MESSAGE( JSON_ASSERT_MESSAGE(
type_ == nullValue || type_ == objectValue, type_ == nullValue || type_ == objectValue,

View File

@@ -217,28 +217,28 @@ void FastWriter::writeValue(const Value& value) {
document_ += valueToString(value.asBool()); document_ += valueToString(value.asBool());
break; break;
case arrayValue: { case arrayValue: {
document_ += "["; document_ += '[';
int size = value.size(); int size = value.size();
for (int index = 0; index < size; ++index) { for (int index = 0; index < size; ++index) {
if (index > 0) if (index > 0)
document_ += ","; document_ += ',';
writeValue(value[index]); writeValue(value[index]);
} }
document_ += "]"; document_ += ']';
} break; } break;
case objectValue: { case objectValue: {
Value::Members members(value.getMemberNames()); Value::Members members(value.getMemberNames());
document_ += "{"; document_ += '{';
for (Value::Members::iterator it = members.begin(); it != members.end(); for (Value::Members::iterator it = members.begin(); it != members.end();
++it) { ++it) {
const std::string& name = *it; const std::string& name = *it;
if (it != members.begin()) if (it != members.begin())
document_ += ","; document_ += ',';
document_ += valueToQuotedString(name.c_str()); document_ += valueToQuotedString(name.c_str());
document_ += yamlCompatiblityEnabled_ ? ": " : ":"; document_ += yamlCompatiblityEnabled_ ? ": " : ":";
writeValue(value[name]); writeValue(value[name]);
} }
document_ += "}"; document_ += '}';
} break; } break;
} }
} }
@@ -302,7 +302,7 @@ void StyledWriter::writeValue(const Value& value) {
writeCommentAfterValueOnSameLine(childValue); writeCommentAfterValueOnSameLine(childValue);
break; break;
} }
document_ += ","; document_ += ',';
writeCommentAfterValueOnSameLine(childValue); writeCommentAfterValueOnSameLine(childValue);
} }
unindent(); unindent();
@@ -336,7 +336,7 @@ void StyledWriter::writeArrayValue(const Value& value) {
writeCommentAfterValueOnSameLine(childValue); writeCommentAfterValueOnSameLine(childValue);
break; break;
} }
document_ += ","; document_ += ',';
writeCommentAfterValueOnSameLine(childValue); writeCommentAfterValueOnSameLine(childValue);
} }
unindent(); unindent();

View File

@@ -1,11 +1,12 @@
from __future__ import print_function
import glob import glob
import os.path import os.path
for path in glob.glob( '*.json' ): for path in glob.glob( '*.json' ):
text = file(path,'rt').read() text = file(path,'rt').read()
target = os.path.splitext(path)[0] + '.expected' target = os.path.splitext(path)[0] + '.expected'
if os.path.exists( target ): if os.path.exists( target ):
print 'skipping:', target print('skipping:', target)
else: else:
print 'creating:', target print('creating:', target)
file(target,'wt').write(text) file(target,'wt').write(text)

View File

@@ -1,12 +1,12 @@
# Simple implementation of a json test runner to run the test against json-py. # Simple implementation of a json test runner to run the test against json-py.
from __future__ import print_function
import sys import sys
import os.path import os.path
import json import json
import types import types
if len(sys.argv) != 2: if len(sys.argv) != 2:
print "Usage: %s input-json-file", sys.argv[0] print("Usage: %s input-json-file", sys.argv[0])
sys.exit(3) sys.exit(3)
input_path = sys.argv[1] input_path = sys.argv[1]

View File

@@ -1,3 +1,4 @@
from __future__ import print_function
import sys import sys
import os import os
import os.path import os.path
@@ -11,7 +12,7 @@ def compareOutputs( expected, actual, message ):
actual = actual.strip().replace('\r','').split('\n') actual = actual.strip().replace('\r','').split('\n')
diff_line = 0 diff_line = 0
max_line_to_compare = min( len(expected), len(actual) ) max_line_to_compare = min( len(expected), len(actual) )
for index in xrange(0,max_line_to_compare): for index in range(0,max_line_to_compare):
if expected[index].strip() != actual[index].strip(): if expected[index].strip() != actual[index].strip():
diff_line = index + 1 diff_line = index + 1
break break
@@ -34,7 +35,7 @@ def compareOutputs( expected, actual, message ):
def safeReadFile( path ): def safeReadFile( path ):
try: try:
return file( path, 'rt' ).read() return file( path, 'rt' ).read()
except IOError, e: except IOError as e:
return '<File "%s" is missing: %s>' % (path,e) return '<File "%s" is missing: %s>' % (path,e)
def runAllTests( jsontest_executable_path, input_dir = None, def runAllTests( jsontest_executable_path, input_dir = None,
@@ -51,7 +52,7 @@ def runAllTests( jsontest_executable_path, input_dir = None,
for input_path in tests + test_jsonchecker: for input_path in tests + test_jsonchecker:
expect_failure = os.path.basename( input_path ).startswith( 'fail' ) expect_failure = os.path.basename( input_path ).startswith( 'fail' )
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
print 'TESTING:', input_path, print('TESTING:', input_path, end=' ')
options = is_json_checker_test and '--json-checker' or '' options = is_json_checker_test and '--json-checker' or ''
pipe = os.popen( "%s%s %s %s" % ( pipe = os.popen( "%s%s %s %s" % (
valgrind_path, jsontest_executable_path, options, valgrind_path, jsontest_executable_path, options,
@@ -61,24 +62,24 @@ def runAllTests( jsontest_executable_path, input_dir = None,
if is_json_checker_test: if is_json_checker_test:
if expect_failure: if expect_failure:
if status is None: if status is None:
print 'FAILED' print('FAILED')
failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % failed_tests.append( (input_path, 'Parsing should have failed:\n%s' %
safeReadFile(input_path)) ) safeReadFile(input_path)) )
else: else:
print 'OK' print('OK')
else: else:
if status is not None: if status is not None:
print 'FAILED' print('FAILED')
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
else: else:
print 'OK' print('OK')
else: else:
base_path = os.path.splitext(input_path)[0] base_path = os.path.splitext(input_path)[0]
actual_output = safeReadFile( base_path + '.actual' ) actual_output = safeReadFile( base_path + '.actual' )
actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' )
file(base_path + '.process-output','wt').write( process_output ) file(base_path + '.process-output','wt').write( process_output )
if status: if status:
print 'parsing failed' print('parsing failed')
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
else: else:
expected_output_path = os.path.splitext(input_path)[0] + '.expected' expected_output_path = os.path.splitext(input_path)[0] + '.expected'
@@ -86,23 +87,23 @@ def runAllTests( jsontest_executable_path, input_dir = None,
detail = ( compareOutputs( expected_output, actual_output, 'input' ) detail = ( compareOutputs( expected_output, actual_output, 'input' )
or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) )
if detail: if detail:
print 'FAILED' print('FAILED')
failed_tests.append( (input_path, detail) ) failed_tests.append( (input_path, detail) )
else: else:
print 'OK' print('OK')
if failed_tests: if failed_tests:
print print()
print 'Failure details:' print('Failure details:')
for failed_test in failed_tests: for failed_test in failed_tests:
print '* Test', failed_test[0] print('* Test', failed_test[0])
print failed_test[1] print(failed_test[1])
print print()
print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
len(failed_tests) ) len(failed_tests) ))
return 1 return 1
else: else:
print 'All %d tests passed.' % len(tests) print('All %d tests passed.' % len(tests))
return 0 return 0
def main(): def main():

View File

@@ -1,8 +1,9 @@
from __future__ import print_function
from glob import glob
import sys import sys
import os import os
import os.path import os.path
import subprocess import subprocess
from glob import glob
import optparse import optparse
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
@@ -28,29 +29,29 @@ def runAllTests( exe_path, use_valgrind=False ):
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
status, test_names = test_proxy.run( ['--list-tests'] ) status, test_names = test_proxy.run( ['--list-tests'] )
if not status: if not status:
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr)
return 1 return 1
test_names = [name.strip() for name in test_names.strip().split('\n')] test_names = [name.strip() for name in test_names.strip().split('\n')]
failures = [] failures = []
for name in test_names: for name in test_names:
print 'TESTING %s:' % name, print('TESTING %s:' % name, end=' ')
succeed, result = test_proxy.run( ['--test', name] ) succeed, result = test_proxy.run( ['--test', name] )
if succeed: if succeed:
print 'OK' print('OK')
else: else:
failures.append( (name, result) ) failures.append( (name, result) )
print 'FAILED' print('FAILED')
failed_count = len(failures) failed_count = len(failures)
pass_count = len(test_names) - failed_count pass_count = len(test_names) - failed_count
if failed_count: if failed_count:
print print()
for name, result in failures: for name, result in failures:
print result print(result)
print '%d/%d tests passed (%d failure(s))' % ( print('%d/%d tests passed (%d failure(s))' % (
pass_count, len(test_names), failed_count) pass_count, len(test_names), failed_count))
return 1 return 1
else: else:
print 'All %d tests passed' % len(test_names) print('All %d tests passed' % len(test_names))
return 0 return 0
def main(): def main():

View File

@@ -1 +1 @@
0.7.0 1.0.0