mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-04-03 18:10:12 +02:00
simple py3 changes
This commit is contained in:
parent
9aa6144b2a
commit
bd1e895287
@ -56,7 +56,7 @@ def amalgamate_source( source_top_dir=None,
|
|||||||
target_source_path: output .cpp path
|
target_source_path: output .cpp path
|
||||||
header_include_path: generated header path relative to target_source_path.
|
header_include_path: generated header path relative to target_source_path.
|
||||||
"""
|
"""
|
||||||
print ("Amalgating header...")
|
print("Amalgating header...")
|
||||||
header = AmalgamationFile( source_top_dir )
|
header = AmalgamationFile( source_top_dir )
|
||||||
header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." )
|
header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." )
|
||||||
header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
||||||
@ -77,12 +77,12 @@ def amalgamate_source( source_top_dir=None,
|
|||||||
header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" )
|
header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" )
|
||||||
|
|
||||||
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
||||||
print ("Writing amalgated header to %r" % target_header_path)
|
print("Writing amalgated header to %r" % target_header_path)
|
||||||
header.write_to( target_header_path )
|
header.write_to( target_header_path )
|
||||||
|
|
||||||
base, ext = os.path.splitext( header_include_path )
|
base, ext = os.path.splitext( header_include_path )
|
||||||
forward_header_include_path = base + "-forwards" + ext
|
forward_header_include_path = base + "-forwards" + ext
|
||||||
print ("Amalgating forward header...")
|
print("Amalgating forward header...")
|
||||||
header = AmalgamationFile( source_top_dir )
|
header = AmalgamationFile( source_top_dir )
|
||||||
header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." )
|
header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." )
|
||||||
header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path )
|
header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path )
|
||||||
@ -99,10 +99,10 @@ def amalgamate_source( source_top_dir=None,
|
|||||||
|
|
||||||
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
||||||
forward_header_include_path )
|
forward_header_include_path )
|
||||||
print ("Writing amalgated forward header to %r" % target_forward_header_path)
|
print("Writing amalgated forward header to %r" % target_forward_header_path)
|
||||||
header.write_to( target_forward_header_path )
|
header.write_to( target_forward_header_path )
|
||||||
|
|
||||||
print ("Amalgating source...")
|
print("Amalgating source...")
|
||||||
source = AmalgamationFile( source_top_dir )
|
source = AmalgamationFile( source_top_dir )
|
||||||
source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." )
|
source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." )
|
||||||
source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
||||||
@ -118,7 +118,7 @@ def amalgamate_source( source_top_dir=None,
|
|||||||
source.add_file( os.path.join(lib_json, "json_value.cpp") )
|
source.add_file( os.path.join(lib_json, "json_value.cpp") )
|
||||||
source.add_file( os.path.join(lib_json, "json_writer.cpp") )
|
source.add_file( os.path.join(lib_json, "json_writer.cpp") )
|
||||||
|
|
||||||
print ("Writing amalgated source to %r" % target_source_path)
|
print("Writing amalgated source to %r" % target_source_path)
|
||||||
source.write_to( target_source_path )
|
source.write_to( target_source_path )
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@ -144,7 +144,7 @@ Generate a single amalgated source and header file from the sources.
|
|||||||
sys.stderr.write( msg + "\n" )
|
sys.stderr.write( msg + "\n" )
|
||||||
sys.exit( 1 )
|
sys.exit( 1 )
|
||||||
else:
|
else:
|
||||||
print ("Source succesfully amalagated")
|
print("Source succesfully amalagated")
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
# Baptiste Lepilleur, 2009
|
# Baptiste Lepilleur, 2009
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
from dircache import listdir
|
from dircache import listdir
|
||||||
import re
|
import re
|
||||||
import fnmatch
|
import fnmatch
|
||||||
@ -190,12 +191,12 @@ if __name__ == "__main__":
|
|||||||
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
||||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||||
rex = ant_pattern_to_re( ant_pattern )
|
rex = ant_pattern_to_re( ant_pattern )
|
||||||
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
|
print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
|
||||||
for accepted_match in accepted_matches:
|
for accepted_match in accepted_matches:
|
||||||
print 'Accepted?:', accepted_match
|
print('Accepted?:', accepted_match)
|
||||||
self.assert_( rex.match( accepted_match ) is not None )
|
self.assertTrue( rex.match( accepted_match ) is not None )
|
||||||
for rejected_match in rejected_matches:
|
for rejected_match in rejected_matches:
|
||||||
print 'Rejected?:', rejected_match
|
print('Rejected?:', rejected_match)
|
||||||
self.assert_( rex.match( rejected_match ) is None )
|
self.assertTrue( rex.match( rejected_match ) is None )
|
||||||
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from __future__ import print_function
|
||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||||
@ -7,7 +8,7 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
|||||||
try:
|
try:
|
||||||
f = open(path, 'rb')
|
f = open(path, 'rb')
|
||||||
except IOError as msg:
|
except IOError as msg:
|
||||||
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr)
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
raw_lines = f.readlines()
|
raw_lines = f.readlines()
|
||||||
@ -15,7 +16,7 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
|||||||
f.close()
|
f.close()
|
||||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||||
if raw_lines != fixed_lines:
|
if raw_lines != fixed_lines:
|
||||||
print '%s =>' % path,
|
print('%s =>' % path, end=' ')
|
||||||
if not is_dry_run:
|
if not is_dry_run:
|
||||||
f = open(path, "wb")
|
f = open(path, "wb")
|
||||||
try:
|
try:
|
||||||
@ -23,7 +24,7 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
|||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
if verbose:
|
if verbose:
|
||||||
print is_dry_run and ' NEED FIX' or ' FIXED'
|
print(is_dry_run and ' NEED FIX' or ' FIXED')
|
||||||
return True
|
return True
|
||||||
##
|
##
|
||||||
##
|
##
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
"""Updates the license text in source file.
|
"""Updates the license text in source file.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
# An existing license is found if the file starts with the string below,
|
# An existing license is found if the file starts with the string below,
|
||||||
# and ends with the first blank line.
|
# and ends with the first blank line.
|
||||||
@ -34,11 +35,11 @@ def update_license( path, dry_run, show_diff ):
|
|||||||
if not dry_run:
|
if not dry_run:
|
||||||
with open( path, 'wb' ) as fout:
|
with open( path, 'wb' ) as fout:
|
||||||
fout.write( new_text.replace('\n', newline ) )
|
fout.write( new_text.replace('\n', newline ) )
|
||||||
print 'Updated', path
|
print('Updated', path)
|
||||||
if show_diff:
|
if show_diff:
|
||||||
import difflib
|
import difflib
|
||||||
print '\n'.join( difflib.unified_diff( original_text.split('\n'),
|
print('\n'.join( difflib.unified_diff( original_text.split('\n'),
|
||||||
new_text.split('\n') ) )
|
new_text.split('\n') ) ))
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -83,7 +84,7 @@ python devtools\licenseupdater.py include src
|
|||||||
parser.enable_interspersed_args()
|
parser.enable_interspersed_args()
|
||||||
options, args = parser.parse_args()
|
options, args = parser.parse_args()
|
||||||
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
||||||
print 'Done'
|
print('Done')
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
import sys
|
||||||
|
28
doxybuild.py
28
doxybuild.py
@ -1,12 +1,12 @@
|
|||||||
"""Script to generate doxygen documentation.
|
"""Script to generate doxygen documentation.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
from devtools import tarball
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
from devtools import tarball
|
|
||||||
|
|
||||||
def find_program(*filenames):
|
def find_program(*filenames):
|
||||||
"""find a program in folders path_lst, and sets env[var]
|
"""find a program in folders path_lst, and sets env[var]
|
||||||
@ -33,9 +33,9 @@ def do_subst_in_file(targetfile, sourcefile, dict):
|
|||||||
contents = f.read()
|
contents = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
except:
|
except:
|
||||||
print "Can't read source file %s"%sourcefile
|
print("Can't read source file %s"%sourcefile)
|
||||||
raise
|
raise
|
||||||
for (k,v) in dict.items():
|
for (k,v) in list(dict.items()):
|
||||||
v = v.replace('\\','\\\\')
|
v = v.replace('\\','\\\\')
|
||||||
contents = re.sub(k, v, contents)
|
contents = re.sub(k, v, contents)
|
||||||
try:
|
try:
|
||||||
@ -43,7 +43,7 @@ def do_subst_in_file(targetfile, sourcefile, dict):
|
|||||||
f.write(contents)
|
f.write(contents)
|
||||||
f.close()
|
f.close()
|
||||||
except:
|
except:
|
||||||
print "Can't write target file %s"%targetfile
|
print("Can't write target file %s"%targetfile)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
||||||
@ -53,12 +53,12 @@ def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
|||||||
try:
|
try:
|
||||||
os.chdir( working_dir )
|
os.chdir( working_dir )
|
||||||
cmd = [doxygen_path, config_file]
|
cmd = [doxygen_path, config_file]
|
||||||
print 'Running:', ' '.join( cmd )
|
print('Running:', ' '.join( cmd ))
|
||||||
try:
|
try:
|
||||||
import subprocess
|
import subprocess
|
||||||
except:
|
except:
|
||||||
if os.system( ' '.join( cmd ) ) != 0:
|
if os.system( ' '.join( cmd ) ) != 0:
|
||||||
print 'Documentation generation failed'
|
print('Documentation generation failed')
|
||||||
return False
|
return False
|
||||||
else:
|
else:
|
||||||
if is_silent:
|
if is_silent:
|
||||||
@ -67,8 +67,8 @@ def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
|||||||
process = subprocess.Popen( cmd )
|
process = subprocess.Popen( cmd )
|
||||||
stdout, _ = process.communicate()
|
stdout, _ = process.communicate()
|
||||||
if process.returncode:
|
if process.returncode:
|
||||||
print 'Documentation generation failed:'
|
print('Documentation generation failed:')
|
||||||
print stdout
|
print(stdout)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
finally:
|
finally:
|
||||||
@ -107,7 +107,7 @@ def build_doc( options, make_release=False ):
|
|||||||
}
|
}
|
||||||
|
|
||||||
if os.path.isdir( output_dir ):
|
if os.path.isdir( output_dir ):
|
||||||
print 'Deleting directory:', output_dir
|
print('Deleting directory:', output_dir)
|
||||||
shutil.rmtree( output_dir )
|
shutil.rmtree( output_dir )
|
||||||
if not os.path.isdir( output_dir ):
|
if not os.path.isdir( output_dir ):
|
||||||
os.makedirs( output_dir )
|
os.makedirs( output_dir )
|
||||||
@ -115,15 +115,15 @@ def build_doc( options, make_release=False ):
|
|||||||
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
|
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
|
||||||
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
|
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
|
||||||
if not options.silent:
|
if not options.silent:
|
||||||
print open(warning_log_path, 'rb').read()
|
print(open(warning_log_path, 'rb').read())
|
||||||
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
|
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
|
||||||
print 'Generated documentation can be found in:'
|
print('Generated documentation can be found in:')
|
||||||
print index_path
|
print(index_path)
|
||||||
if options.open:
|
if options.open:
|
||||||
import webbrowser
|
import webbrowser
|
||||||
webbrowser.open( 'file://' + index_path )
|
webbrowser.open( 'file://' + index_path )
|
||||||
if options.make_tarball:
|
if options.make_tarball:
|
||||||
print 'Generating doc tarball to', tarball_path
|
print('Generating doc tarball to', tarball_path)
|
||||||
tarball_sources = [
|
tarball_sources = [
|
||||||
output_dir,
|
output_dir,
|
||||||
'README.txt',
|
'README.txt',
|
||||||
|
@ -14,6 +14,7 @@ python makerelease.py 0.5.0 0.6.0-dev
|
|||||||
Note: This was for Subversion. Now that we are in GitHub, we do not
|
Note: This was for Subversion. Now that we are in GitHub, we do not
|
||||||
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
||||||
"""
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
@ -46,7 +47,7 @@ class SVNError(Exception):
|
|||||||
|
|
||||||
def svn_command( command, *args ):
|
def svn_command( command, *args ):
|
||||||
cmd = ['svn', '--non-interactive', command] + list(args)
|
cmd = ['svn', '--non-interactive', command] + list(args)
|
||||||
print 'Running:', ' '.join( cmd )
|
print('Running:', ' '.join( cmd ))
|
||||||
process = subprocess.Popen( cmd,
|
process = subprocess.Popen( cmd,
|
||||||
stdout=subprocess.PIPE,
|
stdout=subprocess.PIPE,
|
||||||
stderr=subprocess.STDOUT )
|
stderr=subprocess.STDOUT )
|
||||||
@ -117,7 +118,7 @@ def svn_export( tag_url, export_dir ):
|
|||||||
def fix_sources_eol( dist_dir ):
|
def fix_sources_eol( dist_dir ):
|
||||||
"""Set file EOL for tarball distribution.
|
"""Set file EOL for tarball distribution.
|
||||||
"""
|
"""
|
||||||
print 'Preparing exported source file EOL for distribution...'
|
print('Preparing exported source file EOL for distribution...')
|
||||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||||
win_sources = antglob.glob( dist_dir,
|
win_sources = antglob.glob( dist_dir,
|
||||||
includes = '**/*.sln **/*.vcproj',
|
includes = '**/*.sln **/*.vcproj',
|
||||||
@ -148,7 +149,7 @@ def download( url, target_path ):
|
|||||||
|
|
||||||
def check_compile( distcheck_top_dir, platform ):
|
def check_compile( distcheck_top_dir, platform ):
|
||||||
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
|
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
|
||||||
print 'Running:', ' '.join( cmd )
|
print('Running:', ' '.join( cmd ))
|
||||||
log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
|
log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
|
||||||
flog = open( log_path, 'wb' )
|
flog = open( log_path, 'wb' )
|
||||||
try:
|
try:
|
||||||
@ -179,9 +180,9 @@ def run_sftp_batch( userhost, sftp, batch, retry=0 ):
|
|||||||
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
|
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
|
||||||
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
|
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
|
||||||
error = None
|
error = None
|
||||||
for retry_index in xrange(0, max(1,retry)):
|
for retry_index in range(0, max(1,retry)):
|
||||||
heading = retry_index == 0 and 'Running:' or 'Retrying:'
|
heading = retry_index == 0 and 'Running:' or 'Retrying:'
|
||||||
print heading, ' '.join( cmd )
|
print(heading, ' '.join( cmd ))
|
||||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||||
stdout = process.communicate()[0]
|
stdout = process.communicate()[0]
|
||||||
if process.returncode != 0:
|
if process.returncode != 0:
|
||||||
@ -219,21 +220,21 @@ exit
|
|||||||
upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
|
upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
|
||||||
paths_to_remove = existing_paths - upload_paths
|
paths_to_remove = existing_paths - upload_paths
|
||||||
if paths_to_remove:
|
if paths_to_remove:
|
||||||
print 'Removing the following file from web:'
|
print('Removing the following file from web:')
|
||||||
print '\n'.join( paths_to_remove )
|
print('\n'.join( paths_to_remove ))
|
||||||
stdout = run_sftp_batch( userhost, sftp, """cd htdocs
|
stdout = run_sftp_batch( userhost, sftp, """cd htdocs
|
||||||
rm %s
|
rm %s
|
||||||
exit""" % ' '.join(paths_to_remove) )
|
exit""" % ' '.join(paths_to_remove) )
|
||||||
print 'Uploading %d files:' % len(upload_paths)
|
print('Uploading %d files:' % len(upload_paths))
|
||||||
batch_size = 10
|
batch_size = 10
|
||||||
upload_paths = list(upload_paths)
|
upload_paths = list(upload_paths)
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
for index in xrange(0,len(upload_paths),batch_size):
|
for index in range(0,len(upload_paths),batch_size):
|
||||||
paths = upload_paths[index:index+batch_size]
|
paths = upload_paths[index:index+batch_size]
|
||||||
file_per_sec = (time.time() - start_time) / (index+1)
|
file_per_sec = (time.time() - start_time) / (index+1)
|
||||||
remaining_files = len(upload_paths) - index
|
remaining_files = len(upload_paths) - index
|
||||||
remaining_sec = file_per_sec * remaining_files
|
remaining_sec = file_per_sec * remaining_files
|
||||||
print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)
|
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
|
||||||
run_sftp_batch( userhost, sftp, """cd htdocs
|
run_sftp_batch( userhost, sftp, """cd htdocs
|
||||||
lcd %s
|
lcd %s
|
||||||
mput %s
|
mput %s
|
||||||
@ -297,7 +298,7 @@ Warning: --force should only be used when developping/testing the release script
|
|||||||
else:
|
else:
|
||||||
msg = check_no_pending_commit()
|
msg = check_no_pending_commit()
|
||||||
if not msg:
|
if not msg:
|
||||||
print 'Setting version to', release_version
|
print('Setting version to', release_version)
|
||||||
set_version( release_version )
|
set_version( release_version )
|
||||||
svn_commit( 'Release ' + release_version )
|
svn_commit( 'Release ' + release_version )
|
||||||
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
|
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
|
||||||
@ -305,11 +306,11 @@ Warning: --force should only be used when developping/testing the release script
|
|||||||
if options.retag_release:
|
if options.retag_release:
|
||||||
svn_remove_tag( tag_url, 'Overwriting previous tag' )
|
svn_remove_tag( tag_url, 'Overwriting previous tag' )
|
||||||
else:
|
else:
|
||||||
print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
|
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
|
||||||
sys.exit( 1 )
|
sys.exit( 1 )
|
||||||
svn_tag_sandbox( tag_url, 'Release ' + release_version )
|
svn_tag_sandbox( tag_url, 'Release ' + release_version )
|
||||||
|
|
||||||
print 'Generated doxygen document...'
|
print('Generated doxygen document...')
|
||||||
## doc_dirname = r'jsoncpp-api-html-0.5.0'
|
## doc_dirname = r'jsoncpp-api-html-0.5.0'
|
||||||
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
|
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
|
||||||
doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
|
doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
|
||||||
@ -323,11 +324,11 @@ Warning: --force should only be used when developping/testing the release script
|
|||||||
|
|
||||||
source_dir = 'jsoncpp-src-' + release_version
|
source_dir = 'jsoncpp-src-' + release_version
|
||||||
source_tarball_path = 'dist/%s.tar.gz' % source_dir
|
source_tarball_path = 'dist/%s.tar.gz' % source_dir
|
||||||
print 'Generating source tarball to', source_tarball_path
|
print('Generating source tarball to', source_tarball_path)
|
||||||
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
|
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
|
||||||
|
|
||||||
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
|
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
|
||||||
print 'Generating amalgamation source tarball to', amalgamation_tarball_path
|
print('Generating amalgamation source tarball to', amalgamation_tarball_path)
|
||||||
amalgamation_dir = 'dist/amalgamation'
|
amalgamation_dir = 'dist/amalgamation'
|
||||||
amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
|
amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
|
||||||
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
|
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
|
||||||
@ -337,41 +338,41 @@ Warning: --force should only be used when developping/testing the release script
|
|||||||
# Decompress source tarball, download and install scons-local
|
# Decompress source tarball, download and install scons-local
|
||||||
distcheck_dir = 'dist/distcheck'
|
distcheck_dir = 'dist/distcheck'
|
||||||
distcheck_top_dir = distcheck_dir + '/' + source_dir
|
distcheck_top_dir = distcheck_dir + '/' + source_dir
|
||||||
print 'Decompressing source tarball to', distcheck_dir
|
print('Decompressing source tarball to', distcheck_dir)
|
||||||
rmdir_if_exist( distcheck_dir )
|
rmdir_if_exist( distcheck_dir )
|
||||||
tarball.decompress( source_tarball_path, distcheck_dir )
|
tarball.decompress( source_tarball_path, distcheck_dir )
|
||||||
scons_local_path = 'dist/scons-local.tar.gz'
|
scons_local_path = 'dist/scons-local.tar.gz'
|
||||||
print 'Downloading scons-local to', scons_local_path
|
print('Downloading scons-local to', scons_local_path)
|
||||||
download( SCONS_LOCAL_URL, scons_local_path )
|
download( SCONS_LOCAL_URL, scons_local_path )
|
||||||
print 'Decompressing scons-local to', distcheck_top_dir
|
print('Decompressing scons-local to', distcheck_top_dir)
|
||||||
tarball.decompress( scons_local_path, distcheck_top_dir )
|
tarball.decompress( scons_local_path, distcheck_top_dir )
|
||||||
|
|
||||||
# Run compilation
|
# Run compilation
|
||||||
print 'Compiling decompressed tarball'
|
print('Compiling decompressed tarball')
|
||||||
all_build_status = True
|
all_build_status = True
|
||||||
for platform in options.platforms.split(','):
|
for platform in options.platforms.split(','):
|
||||||
print 'Testing platform:', platform
|
print('Testing platform:', platform)
|
||||||
build_status, log_path = check_compile( distcheck_top_dir, platform )
|
build_status, log_path = check_compile( distcheck_top_dir, platform )
|
||||||
print 'see build log:', log_path
|
print('see build log:', log_path)
|
||||||
print build_status and '=> ok' or '=> FAILED'
|
print(build_status and '=> ok' or '=> FAILED')
|
||||||
all_build_status = all_build_status and build_status
|
all_build_status = all_build_status and build_status
|
||||||
if not build_status:
|
if not build_status:
|
||||||
print 'Testing failed on at least one platform, aborting...'
|
print('Testing failed on at least one platform, aborting...')
|
||||||
svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
|
svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
if options.user:
|
if options.user:
|
||||||
if not options.no_web:
|
if not options.no_web:
|
||||||
print 'Uploading documentation using user', options.user
|
print('Uploading documentation using user', options.user)
|
||||||
sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
|
sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
|
||||||
print 'Completed documentation upload'
|
print('Completed documentation upload')
|
||||||
print 'Uploading source and documentation tarballs for release using user', options.user
|
print('Uploading source and documentation tarballs for release using user', options.user)
|
||||||
sourceforge_release_tarball( SOURCEFORGE_PROJECT,
|
sourceforge_release_tarball( SOURCEFORGE_PROJECT,
|
||||||
[source_tarball_path, doc_tarball_path],
|
[source_tarball_path, doc_tarball_path],
|
||||||
user=options.user, sftp=options.sftp )
|
user=options.user, sftp=options.sftp )
|
||||||
print 'Source and doc release tarballs uploaded'
|
print('Source and doc release tarballs uploaded')
|
||||||
else:
|
else:
|
||||||
print 'No upload user specified. Web site and download tarbal were not uploaded.'
|
print('No upload user specified. Web site and download tarbal were not uploaded.')
|
||||||
print 'Tarball can be found at:', doc_tarball_path
|
print('Tarball can be found at:', doc_tarball_path)
|
||||||
|
|
||||||
# Set next version number and commit
|
# Set next version number and commit
|
||||||
set_version( next_version )
|
set_version( next_version )
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import re
|
import re
|
||||||
from SCons.Script import * # the usual scons stuff you get in a SConscript
|
from SCons.Script import * # the usual scons stuff you get in a SConscript
|
||||||
|
import collections
|
||||||
|
|
||||||
def generate(env):
|
def generate(env):
|
||||||
"""
|
"""
|
||||||
@ -25,28 +26,28 @@ def generate(env):
|
|||||||
contents = f.read()
|
contents = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
except:
|
except:
|
||||||
raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile
|
raise SCons.Errors.UserError("Can't read source file %s"%sourcefile)
|
||||||
for (k,v) in dict.items():
|
for (k,v) in list(dict.items()):
|
||||||
contents = re.sub(k, v, contents)
|
contents = re.sub(k, v, contents)
|
||||||
try:
|
try:
|
||||||
f = open(targetfile, 'wb')
|
f = open(targetfile, 'wb')
|
||||||
f.write(contents)
|
f.write(contents)
|
||||||
f.close()
|
f.close()
|
||||||
except:
|
except:
|
||||||
raise SCons.Errors.UserError, "Can't write target file %s"%targetfile
|
raise SCons.Errors.UserError("Can't write target file %s"%targetfile)
|
||||||
return 0 # success
|
return 0 # success
|
||||||
|
|
||||||
def subst_in_file(target, source, env):
|
def subst_in_file(target, source, env):
|
||||||
if not env.has_key('SUBST_DICT'):
|
if 'SUBST_DICT' not in env:
|
||||||
raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set."
|
raise SCons.Errors.UserError("SubstInFile requires SUBST_DICT to be set.")
|
||||||
d = dict(env['SUBST_DICT']) # copy it
|
d = dict(env['SUBST_DICT']) # copy it
|
||||||
for (k,v) in d.items():
|
for (k,v) in list(d.items()):
|
||||||
if callable(v):
|
if isinstance(v, collections.Callable):
|
||||||
d[k] = env.subst(v()).replace('\\','\\\\')
|
d[k] = env.subst(v()).replace('\\','\\\\')
|
||||||
elif SCons.Util.is_String(v):
|
elif SCons.Util.is_String(v):
|
||||||
d[k] = env.subst(v).replace('\\','\\\\')
|
d[k] = env.subst(v).replace('\\','\\\\')
|
||||||
else:
|
else:
|
||||||
raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v))
|
raise SCons.Errors.UserError("SubstInFile: key %s: %s must be a string or callable"%(k, repr(v)))
|
||||||
for (t,s) in zip(target, source):
|
for (t,s) in zip(target, source):
|
||||||
return do_subst_in_file(str(t), str(s), d)
|
return do_subst_in_file(str(t), str(s), d)
|
||||||
|
|
||||||
@ -60,8 +61,8 @@ def generate(env):
|
|||||||
Returns original target, source tuple unchanged.
|
Returns original target, source tuple unchanged.
|
||||||
"""
|
"""
|
||||||
d = env['SUBST_DICT'].copy() # copy it
|
d = env['SUBST_DICT'].copy() # copy it
|
||||||
for (k,v) in d.items():
|
for (k,v) in list(d.items()):
|
||||||
if callable(v):
|
if isinstance(v, collections.Callable):
|
||||||
d[k] = env.subst(v())
|
d[k] = env.subst(v())
|
||||||
elif SCons.Util.is_String(v):
|
elif SCons.Util.is_String(v):
|
||||||
d[k]=env.subst(v)
|
d[k]=env.subst(v)
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
|
from __future__ import print_function
|
||||||
import glob
|
import glob
|
||||||
import os.path
|
import os.path
|
||||||
for path in glob.glob( '*.json' ):
|
for path in glob.glob( '*.json' ):
|
||||||
text = file(path,'rt').read()
|
text = file(path,'rt').read()
|
||||||
target = os.path.splitext(path)[0] + '.expected'
|
target = os.path.splitext(path)[0] + '.expected'
|
||||||
if os.path.exists( target ):
|
if os.path.exists( target ):
|
||||||
print 'skipping:', target
|
print('skipping:', target)
|
||||||
else:
|
else:
|
||||||
print 'creating:', target
|
print('creating:', target)
|
||||||
file(target,'wt').write(text)
|
file(target,'wt').write(text)
|
||||||
|
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
# Simple implementation of a json test runner to run the test against json-py.
|
# Simple implementation of a json test runner to run the test against json-py.
|
||||||
|
from __future__ import print_function
|
||||||
import sys
|
import sys
|
||||||
import os.path
|
import os.path
|
||||||
import json
|
import json
|
||||||
import types
|
import types
|
||||||
|
|
||||||
if len(sys.argv) != 2:
|
if len(sys.argv) != 2:
|
||||||
print "Usage: %s input-json-file", sys.argv[0]
|
print("Usage: %s input-json-file", sys.argv[0])
|
||||||
sys.exit(3)
|
sys.exit(3)
|
||||||
|
|
||||||
input_path = sys.argv[1]
|
input_path = sys.argv[1]
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from __future__ import print_function
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
@ -11,7 +12,7 @@ def compareOutputs( expected, actual, message ):
|
|||||||
actual = actual.strip().replace('\r','').split('\n')
|
actual = actual.strip().replace('\r','').split('\n')
|
||||||
diff_line = 0
|
diff_line = 0
|
||||||
max_line_to_compare = min( len(expected), len(actual) )
|
max_line_to_compare = min( len(expected), len(actual) )
|
||||||
for index in xrange(0,max_line_to_compare):
|
for index in range(0,max_line_to_compare):
|
||||||
if expected[index].strip() != actual[index].strip():
|
if expected[index].strip() != actual[index].strip():
|
||||||
diff_line = index + 1
|
diff_line = index + 1
|
||||||
break
|
break
|
||||||
@ -51,7 +52,7 @@ def runAllTests( jsontest_executable_path, input_dir = None,
|
|||||||
for input_path in tests + test_jsonchecker:
|
for input_path in tests + test_jsonchecker:
|
||||||
expect_failure = os.path.basename( input_path ).startswith( 'fail' )
|
expect_failure = os.path.basename( input_path ).startswith( 'fail' )
|
||||||
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
|
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
|
||||||
print 'TESTING:', input_path,
|
print('TESTING:', input_path, end=' ')
|
||||||
options = is_json_checker_test and '--json-checker' or ''
|
options = is_json_checker_test and '--json-checker' or ''
|
||||||
pipe = os.popen( "%s%s %s %s" % (
|
pipe = os.popen( "%s%s %s %s" % (
|
||||||
valgrind_path, jsontest_executable_path, options,
|
valgrind_path, jsontest_executable_path, options,
|
||||||
@ -61,24 +62,24 @@ def runAllTests( jsontest_executable_path, input_dir = None,
|
|||||||
if is_json_checker_test:
|
if is_json_checker_test:
|
||||||
if expect_failure:
|
if expect_failure:
|
||||||
if status is None:
|
if status is None:
|
||||||
print 'FAILED'
|
print('FAILED')
|
||||||
failed_tests.append( (input_path, 'Parsing should have failed:\n%s' %
|
failed_tests.append( (input_path, 'Parsing should have failed:\n%s' %
|
||||||
safeReadFile(input_path)) )
|
safeReadFile(input_path)) )
|
||||||
else:
|
else:
|
||||||
print 'OK'
|
print('OK')
|
||||||
else:
|
else:
|
||||||
if status is not None:
|
if status is not None:
|
||||||
print 'FAILED'
|
print('FAILED')
|
||||||
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
|
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
|
||||||
else:
|
else:
|
||||||
print 'OK'
|
print('OK')
|
||||||
else:
|
else:
|
||||||
base_path = os.path.splitext(input_path)[0]
|
base_path = os.path.splitext(input_path)[0]
|
||||||
actual_output = safeReadFile( base_path + '.actual' )
|
actual_output = safeReadFile( base_path + '.actual' )
|
||||||
actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' )
|
actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' )
|
||||||
file(base_path + '.process-output','wt').write( process_output )
|
file(base_path + '.process-output','wt').write( process_output )
|
||||||
if status:
|
if status:
|
||||||
print 'parsing failed'
|
print('parsing failed')
|
||||||
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
|
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
|
||||||
else:
|
else:
|
||||||
expected_output_path = os.path.splitext(input_path)[0] + '.expected'
|
expected_output_path = os.path.splitext(input_path)[0] + '.expected'
|
||||||
@ -86,23 +87,23 @@ def runAllTests( jsontest_executable_path, input_dir = None,
|
|||||||
detail = ( compareOutputs( expected_output, actual_output, 'input' )
|
detail = ( compareOutputs( expected_output, actual_output, 'input' )
|
||||||
or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) )
|
or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) )
|
||||||
if detail:
|
if detail:
|
||||||
print 'FAILED'
|
print('FAILED')
|
||||||
failed_tests.append( (input_path, detail) )
|
failed_tests.append( (input_path, detail) )
|
||||||
else:
|
else:
|
||||||
print 'OK'
|
print('OK')
|
||||||
|
|
||||||
if failed_tests:
|
if failed_tests:
|
||||||
print
|
print()
|
||||||
print 'Failure details:'
|
print('Failure details:')
|
||||||
for failed_test in failed_tests:
|
for failed_test in failed_tests:
|
||||||
print '* Test', failed_test[0]
|
print('* Test', failed_test[0])
|
||||||
print failed_test[1]
|
print(failed_test[1])
|
||||||
print
|
print()
|
||||||
print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
|
print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
|
||||||
len(failed_tests) )
|
len(failed_tests) ))
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
print 'All %d tests passed.' % len(tests)
|
print('All %d tests passed.' % len(tests))
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
|
from __future__ import print_function
|
||||||
|
from glob import glob
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
from glob import glob
|
|
||||||
import optparse
|
import optparse
|
||||||
|
|
||||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
||||||
@ -28,29 +29,29 @@ def runAllTests( exe_path, use_valgrind=False ):
|
|||||||
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
||||||
status, test_names = test_proxy.run( ['--list-tests'] )
|
status, test_names = test_proxy.run( ['--list-tests'] )
|
||||||
if not status:
|
if not status:
|
||||||
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
|
print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr)
|
||||||
return 1
|
return 1
|
||||||
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
||||||
failures = []
|
failures = []
|
||||||
for name in test_names:
|
for name in test_names:
|
||||||
print 'TESTING %s:' % name,
|
print('TESTING %s:' % name, end=' ')
|
||||||
succeed, result = test_proxy.run( ['--test', name] )
|
succeed, result = test_proxy.run( ['--test', name] )
|
||||||
if succeed:
|
if succeed:
|
||||||
print 'OK'
|
print('OK')
|
||||||
else:
|
else:
|
||||||
failures.append( (name, result) )
|
failures.append( (name, result) )
|
||||||
print 'FAILED'
|
print('FAILED')
|
||||||
failed_count = len(failures)
|
failed_count = len(failures)
|
||||||
pass_count = len(test_names) - failed_count
|
pass_count = len(test_names) - failed_count
|
||||||
if failed_count:
|
if failed_count:
|
||||||
print
|
print()
|
||||||
for name, result in failures:
|
for name, result in failures:
|
||||||
print result
|
print(result)
|
||||||
print '%d/%d tests passed (%d failure(s))' % (
|
print('%d/%d tests passed (%d failure(s))' % (
|
||||||
pass_count, len(test_names), failed_count)
|
pass_count, len(test_names), failed_count))
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
print 'All %d tests passed' % len(test_names)
|
print('All %d tests passed' % len(test_names))
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
Loading…
x
Reference in New Issue
Block a user