Merge pull request #139 from cdunn2001/some-python-changes

Some python changes.

* Better messaging.
* Make `doxybuild.py` work with python3.4
This commit is contained in:
Christopher Dunn 2015-01-24 16:24:12 -06:00
commit 2a46e295ec
17 changed files with 545 additions and 539 deletions

View File

@ -10,46 +10,46 @@ import os.path
import sys import sys
class AmalgamationFile: class AmalgamationFile:
def __init__( self, top_dir ): def __init__(self, top_dir):
self.top_dir = top_dir self.top_dir = top_dir
self.blocks = [] self.blocks = []
def add_text( self, text ): def add_text(self, text):
if not text.endswith( "\n" ): if not text.endswith("\n"):
text += "\n" text += "\n"
self.blocks.append( text ) self.blocks.append(text)
def add_file( self, relative_input_path, wrap_in_comment=False ): def add_file(self, relative_input_path, wrap_in_comment=False):
def add_marker( prefix ): def add_marker(prefix):
self.add_text( "" ) self.add_text("")
self.add_text( "// " + "/"*70 ) self.add_text("// " + "/"*70)
self.add_text( "// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/")) ) self.add_text("// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/")))
self.add_text( "// " + "/"*70 ) self.add_text("// " + "/"*70)
self.add_text( "" ) self.add_text("")
add_marker( "Beginning" ) add_marker("Beginning")
f = open( os.path.join( self.top_dir, relative_input_path ), "rt" ) f = open(os.path.join(self.top_dir, relative_input_path), "rt")
content = f.read() content = f.read()
if wrap_in_comment: if wrap_in_comment:
content = "/*\n" + content + "\n*/" content = "/*\n" + content + "\n*/"
self.add_text( content ) self.add_text(content)
f.close() f.close()
add_marker( "End" ) add_marker("End")
self.add_text( "\n\n\n\n" ) self.add_text("\n\n\n\n")
def get_value( self ): def get_value(self):
return "".join( self.blocks ).replace("\r\n","\n") return "".join(self.blocks).replace("\r\n","\n")
def write_to( self, output_path ): def write_to(self, output_path):
output_dir = os.path.dirname( output_path ) output_dir = os.path.dirname(output_path)
if output_dir and not os.path.isdir( output_dir ): if output_dir and not os.path.isdir(output_dir):
os.makedirs( output_dir ) os.makedirs(output_dir)
f = open( output_path, "wb" ) f = open(output_path, "wb")
f.write( str.encode(self.get_value(), 'UTF-8') ) f.write(str.encode(self.get_value(), 'UTF-8'))
f.close() f.close()
def amalgamate_source( source_top_dir=None, def amalgamate_source(source_top_dir=None,
target_source_path=None, target_source_path=None,
header_include_path=None ): header_include_path=None):
"""Produces amalgated source. """Produces amalgated source.
Parameters: Parameters:
source_top_dir: top-directory source_top_dir: top-directory
@ -57,69 +57,69 @@ def amalgamate_source( source_top_dir=None,
header_include_path: generated header path relative to target_source_path. header_include_path: generated header path relative to target_source_path.
""" """
print("Amalgating header...") print("Amalgating header...")
header = AmalgamationFile( source_top_dir ) header = AmalgamationFile(source_top_dir)
header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." ) header.add_text("/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).")
header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path ) header.add_text("/// It is intented to be used with #include <%s>" % header_include_path)
header.add_file( "LICENSE", wrap_in_comment=True ) header.add_file("LICENSE", wrap_in_comment=True)
header.add_text( "#ifndef JSON_AMALGATED_H_INCLUDED" ) header.add_text("#ifndef JSON_AMALGATED_H_INCLUDED")
header.add_text( "# define JSON_AMALGATED_H_INCLUDED" ) header.add_text("# define JSON_AMALGATED_H_INCLUDED")
header.add_text( "/// If defined, indicates that the source file is amalgated" ) header.add_text("/// If defined, indicates that the source file is amalgated")
header.add_text( "/// to prevent private header inclusion." ) header.add_text("/// to prevent private header inclusion.")
header.add_text( "#define JSON_IS_AMALGAMATION" ) header.add_text("#define JSON_IS_AMALGAMATION")
header.add_file( "include/json/version.h" ) header.add_file("include/json/version.h")
header.add_file( "include/json/config.h" ) header.add_file("include/json/config.h")
header.add_file( "include/json/forwards.h" ) header.add_file("include/json/forwards.h")
header.add_file( "include/json/features.h" ) header.add_file("include/json/features.h")
header.add_file( "include/json/value.h" ) header.add_file("include/json/value.h")
header.add_file( "include/json/reader.h" ) header.add_file("include/json/reader.h")
header.add_file( "include/json/writer.h" ) header.add_file("include/json/writer.h")
header.add_file( "include/json/assertions.h" ) header.add_file("include/json/assertions.h")
header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" ) header.add_text("#endif //ifndef JSON_AMALGATED_H_INCLUDED")
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) target_header_path = os.path.join(os.path.dirname(target_source_path), header_include_path)
print("Writing amalgated header to %r" % target_header_path) print("Writing amalgated header to %r" % target_header_path)
header.write_to( target_header_path ) header.write_to(target_header_path)
base, ext = os.path.splitext( header_include_path ) base, ext = os.path.splitext(header_include_path)
forward_header_include_path = base + "-forwards" + ext forward_header_include_path = base + "-forwards" + ext
print("Amalgating forward header...") print("Amalgating forward header...")
header = AmalgamationFile( source_top_dir ) header = AmalgamationFile(source_top_dir)
header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." ) header.add_text("/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).")
header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path ) header.add_text("/// It is intented to be used with #include <%s>" % forward_header_include_path)
header.add_text( "/// This header provides forward declaration for all JsonCpp types." ) header.add_text("/// This header provides forward declaration for all JsonCpp types.")
header.add_file( "LICENSE", wrap_in_comment=True ) header.add_file("LICENSE", wrap_in_comment=True)
header.add_text( "#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" ) header.add_text("#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
header.add_text( "# define JSON_FORWARD_AMALGATED_H_INCLUDED" ) header.add_text("# define JSON_FORWARD_AMALGATED_H_INCLUDED")
header.add_text( "/// If defined, indicates that the source file is amalgated" ) header.add_text("/// If defined, indicates that the source file is amalgated")
header.add_text( "/// to prevent private header inclusion." ) header.add_text("/// to prevent private header inclusion.")
header.add_text( "#define JSON_IS_AMALGAMATION" ) header.add_text("#define JSON_IS_AMALGAMATION")
header.add_file( "include/json/config.h" ) header.add_file("include/json/config.h")
header.add_file( "include/json/forwards.h" ) header.add_file("include/json/forwards.h")
header.add_text( "#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" ) header.add_text("#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
target_forward_header_path = os.path.join( os.path.dirname(target_source_path), target_forward_header_path = os.path.join(os.path.dirname(target_source_path),
forward_header_include_path ) forward_header_include_path)
print("Writing amalgated forward header to %r" % target_forward_header_path) print("Writing amalgated forward header to %r" % target_forward_header_path)
header.write_to( target_forward_header_path ) header.write_to(target_forward_header_path)
print("Amalgating source...") print("Amalgating source...")
source = AmalgamationFile( source_top_dir ) source = AmalgamationFile(source_top_dir)
source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." ) source.add_text("/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).")
source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path ) source.add_text("/// It is intented to be used with #include <%s>" % header_include_path)
source.add_file( "LICENSE", wrap_in_comment=True ) source.add_file("LICENSE", wrap_in_comment=True)
source.add_text( "" ) source.add_text("")
source.add_text( "#include <%s>" % header_include_path ) source.add_text("#include <%s>" % header_include_path)
source.add_text( "" ) source.add_text("")
lib_json = "src/lib_json" lib_json = "src/lib_json"
source.add_file( os.path.join(lib_json, "json_tool.h") ) source.add_file(os.path.join(lib_json, "json_tool.h"))
source.add_file( os.path.join(lib_json, "json_reader.cpp") ) source.add_file(os.path.join(lib_json, "json_reader.cpp"))
source.add_file( os.path.join(lib_json, "json_batchallocator.h") ) source.add_file(os.path.join(lib_json, "json_batchallocator.h"))
source.add_file( os.path.join(lib_json, "json_valueiterator.inl") ) source.add_file(os.path.join(lib_json, "json_valueiterator.inl"))
source.add_file( os.path.join(lib_json, "json_value.cpp") ) source.add_file(os.path.join(lib_json, "json_value.cpp"))
source.add_file( os.path.join(lib_json, "json_writer.cpp") ) source.add_file(os.path.join(lib_json, "json_writer.cpp"))
print("Writing amalgated source to %r" % target_source_path) print("Writing amalgated source to %r" % target_source_path)
source.write_to( target_source_path ) source.write_to(target_source_path)
def main(): def main():
usage = """%prog [options] usage = """%prog [options]
@ -137,12 +137,12 @@ Generate a single amalgated source and header file from the sources.
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
msg = amalgamate_source( source_top_dir=options.top_dir, msg = amalgamate_source(source_top_dir=options.top_dir,
target_source_path=options.target_source_path, target_source_path=options.target_source_path,
header_include_path=options.header_include_path ) header_include_path=options.header_include_path)
if msg: if msg:
sys.stderr.write( msg + "\n" ) sys.stderr.write(msg + "\n")
sys.exit( 1 ) sys.exit(1)
else: else:
print("Source succesfully amalagated") print("Source succesfully amalagated")

View File

@ -54,9 +54,9 @@ LINKS = DIR_LINK | FILE_LINK
ALL_NO_LINK = DIR | FILE ALL_NO_LINK = DIR | FILE
ALL = DIR | FILE | LINKS ALL = DIR | FILE | LINKS
_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) _ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)')
def ant_pattern_to_re( ant_pattern ): def ant_pattern_to_re(ant_pattern):
"""Generates a regular expression from the ant pattern. """Generates a regular expression from the ant pattern.
Matching convention: Matching convention:
**/a: match 'a', 'dir/a', 'dir1/dir2/a' **/a: match 'a', 'dir/a', 'dir1/dir2/a'
@ -65,30 +65,30 @@ def ant_pattern_to_re( ant_pattern ):
""" """
rex = ['^'] rex = ['^']
next_pos = 0 next_pos = 0
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) sep_rex = r'(?:/|%s)' % re.escape(os.path.sep)
## print 'Converting', ant_pattern ## print 'Converting', ant_pattern
for match in _ANT_RE.finditer( ant_pattern ): for match in _ANT_RE.finditer(ant_pattern):
## print 'Matched', match.group() ## print 'Matched', match.group()
## print match.start(0), next_pos ## print match.start(0), next_pos
if match.start(0) != next_pos: if match.start(0) != next_pos:
raise ValueError( "Invalid ant pattern" ) raise ValueError("Invalid ant pattern")
if match.group(1): # /**/ if match.group(1): # /**/
rex.append( sep_rex + '(?:.*%s)?' % sep_rex ) rex.append(sep_rex + '(?:.*%s)?' % sep_rex)
elif match.group(2): # **/ elif match.group(2): # **/
rex.append( '(?:.*%s)?' % sep_rex ) rex.append('(?:.*%s)?' % sep_rex)
elif match.group(3): # /** elif match.group(3): # /**
rex.append( sep_rex + '.*' ) rex.append(sep_rex + '.*')
elif match.group(4): # * elif match.group(4): # *
rex.append( '[^/%s]*' % re.escape(os.path.sep) ) rex.append('[^/%s]*' % re.escape(os.path.sep))
elif match.group(5): # / elif match.group(5): # /
rex.append( sep_rex ) rex.append(sep_rex)
else: # somepath else: # somepath
rex.append( re.escape(match.group(6)) ) rex.append(re.escape(match.group(6)))
next_pos = match.end() next_pos = match.end()
rex.append('$') rex.append('$')
return re.compile( ''.join( rex ) ) return re.compile(''.join(rex))
def _as_list( l ): def _as_list(l):
if isinstance(l, basestring): if isinstance(l, basestring):
return l.split() return l.split()
return l return l
@ -105,37 +105,37 @@ def glob(dir_path,
dir_path = dir_path.replace('/',os.path.sep) dir_path = dir_path.replace('/',os.path.sep)
entry_type_filter = entry_type entry_type_filter = entry_type
def is_pruned_dir( dir_name ): def is_pruned_dir(dir_name):
for pattern in prune_dirs: for pattern in prune_dirs:
if fnmatch.fnmatch( dir_name, pattern ): if fnmatch.fnmatch(dir_name, pattern):
return True return True
return False return False
def apply_filter( full_path, filter_rexs ): def apply_filter(full_path, filter_rexs):
"""Return True if at least one of the filter regular expression match full_path.""" """Return True if at least one of the filter regular expression match full_path."""
for rex in filter_rexs: for rex in filter_rexs:
if rex.match( full_path ): if rex.match(full_path):
return True return True
return False return False
def glob_impl( root_dir_path ): def glob_impl(root_dir_path):
child_dirs = [root_dir_path] child_dirs = [root_dir_path]
while child_dirs: while child_dirs:
dir_path = child_dirs.pop() dir_path = child_dirs.pop()
for entry in listdir( dir_path ): for entry in listdir(dir_path):
full_path = os.path.join( dir_path, entry ) full_path = os.path.join(dir_path, entry)
## print 'Testing:', full_path, ## print 'Testing:', full_path,
is_dir = os.path.isdir( full_path ) is_dir = os.path.isdir(full_path)
if is_dir and not is_pruned_dir( entry ): # explore child directory ? if is_dir and not is_pruned_dir(entry): # explore child directory ?
## print '===> marked for recursion', ## print '===> marked for recursion',
child_dirs.append( full_path ) child_dirs.append(full_path)
included = apply_filter( full_path, include_filter ) included = apply_filter(full_path, include_filter)
rejected = apply_filter( full_path, exclude_filter ) rejected = apply_filter(full_path, exclude_filter)
if not included or rejected: # do not include entry ? if not included or rejected: # do not include entry ?
## print '=> not included or rejected' ## print '=> not included or rejected'
continue continue
link = os.path.islink( full_path ) link = os.path.islink(full_path)
is_file = os.path.isfile( full_path ) is_file = os.path.isfile(full_path)
if not is_file and not is_dir: if not is_file and not is_dir:
## print '=> unknown entry type' ## print '=> unknown entry type'
continue continue
@ -146,57 +146,57 @@ def glob(dir_path,
## print '=> type: %d' % entry_type, ## print '=> type: %d' % entry_type,
if (entry_type & entry_type_filter) != 0: if (entry_type & entry_type_filter) != 0:
## print ' => KEEP' ## print ' => KEEP'
yield os.path.join( dir_path, entry ) yield os.path.join(dir_path, entry)
## else: ## else:
## print ' => TYPE REJECTED' ## print ' => TYPE REJECTED'
return list( glob_impl( dir_path ) ) return list(glob_impl(dir_path))
if __name__ == "__main__": if __name__ == "__main__":
import unittest import unittest
class AntPatternToRETest(unittest.TestCase): class AntPatternToRETest(unittest.TestCase):
## def test_conversion( self ): ## def test_conversion(self):
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) ## self.assertEqual('^somepath$', ant_pattern_to_re('somepath').pattern)
def test_matching( self ): def test_matching(self):
test_cases = [ ( 'path', test_cases = [ ('path',
['path'], ['path'],
['somepath', 'pathsuffix', '/path', '/path'] ), ['somepath', 'pathsuffix', '/path', '/path']),
( '*.py', ('*.py',
['source.py', 'source.ext.py', '.py'], ['source.py', 'source.ext.py', '.py'],
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c']),
( '**/path', ('**/path',
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath']),
( 'path/**', ('path/**',
['path/a', 'path/path/a', 'path//'], ['path/a', 'path/path/a', 'path//'],
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a']),
( '/**/path', ('/**/path',
['/path', '/a/path', '/a/b/path/path', '/path/path'], ['/path', '/a/path', '/a/b/path/path', '/path/path'],
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), ['path', 'path/', 'a/path', '/pathsuffix', '/somepath']),
( 'a/b', ('a/b',
['a/b'], ['a/b'],
['somea/b', 'a/bsuffix', 'a/b/c'] ), ['somea/b', 'a/bsuffix', 'a/b/c']),
( '**/*.py', ('**/*.py',
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
['script.pyc', 'script.pyo', 'a.py/b'] ), ['script.pyc', 'script.pyo', 'a.py/b']),
( 'src/**/*.py', ('src/**/*.py',
['src/a.py', 'src/dir/a.py'], ['src/a.py', 'src/dir/a.py'],
['a/src/a.py', '/src/a.py'] ), ['a/src/a.py', '/src/a.py']),
] ]
for ant_pattern, accepted_matches, rejected_matches in list(test_cases): for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
def local_path( paths ): def local_path(paths):
return [ p.replace('/',os.path.sep) for p in paths ] return [ p.replace('/',os.path.sep) for p in paths ]
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) test_cases.append((ant_pattern, local_path(accepted_matches), local_path(rejected_matches)))
for ant_pattern, accepted_matches, rejected_matches in test_cases: for ant_pattern, accepted_matches, rejected_matches in test_cases:
rex = ant_pattern_to_re( ant_pattern ) rex = ant_pattern_to_re(ant_pattern)
print('ant_pattern:', ant_pattern, ' => ', rex.pattern) print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
for accepted_match in accepted_matches: for accepted_match in accepted_matches:
print('Accepted?:', accepted_match) print('Accepted?:', accepted_match)
self.assertTrue( rex.match( accepted_match ) is not None ) self.assertTrue(rex.match(accepted_match) is not None)
for rejected_match in rejected_matches: for rejected_match in rejected_matches:
print('Rejected?:', rejected_match) print('Rejected?:', rejected_match)
self.assertTrue( rex.match( rejected_match ) is None ) self.assertTrue(rex.match(rejected_match) is None)
unittest.main() unittest.main()

View File

@ -18,62 +18,62 @@ class BuildDesc:
self.build_type = build_type self.build_type = build_type
self.generator = generator self.generator = generator
def merged_with( self, build_desc ): def merged_with(self, build_desc):
"""Returns a new BuildDesc by merging field content. """Returns a new BuildDesc by merging field content.
Prefer build_desc fields to self fields for single valued field. Prefer build_desc fields to self fields for single valued field.
""" """
return BuildDesc( self.prepend_envs + build_desc.prepend_envs, return BuildDesc(self.prepend_envs + build_desc.prepend_envs,
self.variables + build_desc.variables, self.variables + build_desc.variables,
build_desc.build_type or self.build_type, build_desc.build_type or self.build_type,
build_desc.generator or self.generator ) build_desc.generator or self.generator)
def env( self ): def env(self):
environ = os.environ.copy() environ = os.environ.copy()
for values_by_name in self.prepend_envs: for values_by_name in self.prepend_envs:
for var, value in list(values_by_name.items()): for var, value in list(values_by_name.items()):
var = var.upper() var = var.upper()
if type(value) is unicode: if type(value) is unicode:
value = value.encode( sys.getdefaultencoding() ) value = value.encode(sys.getdefaultencoding())
if var in environ: if var in environ:
environ[var] = value + os.pathsep + environ[var] environ[var] = value + os.pathsep + environ[var]
else: else:
environ[var] = value environ[var] = value
return environ return environ
def cmake_args( self ): def cmake_args(self):
args = ["-D%s" % var for var in self.variables] args = ["-D%s" % var for var in self.variables]
# skip build type for Visual Studio solution as it cause warning # skip build type for Visual Studio solution as it cause warning
if self.build_type and 'Visual' not in self.generator: if self.build_type and 'Visual' not in self.generator:
args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type ) args.append("-DCMAKE_BUILD_TYPE=%s" % self.build_type)
if self.generator: if self.generator:
args.extend( ['-G', self.generator] ) args.extend(['-G', self.generator])
return args return args
def __repr__( self ): def __repr__(self):
return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type) return "BuildDesc(%s, build_type=%s)" % (" ".join(self.cmake_args()), self.build_type)
class BuildData: class BuildData:
def __init__( self, desc, work_dir, source_dir ): def __init__(self, desc, work_dir, source_dir):
self.desc = desc self.desc = desc
self.work_dir = work_dir self.work_dir = work_dir
self.source_dir = source_dir self.source_dir = source_dir
self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' ) self.cmake_log_path = os.path.join(work_dir, 'batchbuild_cmake.log')
self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' ) self.build_log_path = os.path.join(work_dir, 'batchbuild_build.log')
self.cmake_succeeded = False self.cmake_succeeded = False
self.build_succeeded = False self.build_succeeded = False
def execute_build(self): def execute_build(self):
print('Build %s' % self.desc) print('Build %s' % self.desc)
self._make_new_work_dir( ) self._make_new_work_dir()
self.cmake_succeeded = self._generate_makefiles( ) self.cmake_succeeded = self._generate_makefiles()
if self.cmake_succeeded: if self.cmake_succeeded:
self.build_succeeded = self._build_using_makefiles( ) self.build_succeeded = self._build_using_makefiles()
return self.build_succeeded return self.build_succeeded
def _generate_makefiles(self): def _generate_makefiles(self):
print(' Generating makefiles: ', end=' ') print(' Generating makefiles: ', end=' ')
cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )] cmd = ['cmake'] + self.desc.cmake_args() + [os.path.abspath(self.source_dir)]
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path ) succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.cmake_log_path)
print('done' if succeeded else 'FAILED') print('done' if succeeded else 'FAILED')
return succeeded return succeeded
@ -82,58 +82,58 @@ class BuildData:
cmd = ['cmake', '--build', self.work_dir] cmd = ['cmake', '--build', self.work_dir]
if self.desc.build_type: if self.desc.build_type:
cmd += ['--config', self.desc.build_type] cmd += ['--config', self.desc.build_type]
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path ) succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.build_log_path)
print('done' if succeeded else 'FAILED') print('done' if succeeded else 'FAILED')
return succeeded return succeeded
def _execute_build_subprocess(self, cmd, env, log_path): def _execute_build_subprocess(self, cmd, env, log_path):
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir, process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
env=env ) env=env)
stdout, _ = process.communicate( ) stdout, _ = process.communicate()
succeeded = (process.returncode == 0) succeeded = (process.returncode == 0)
with open( log_path, 'wb' ) as flog: with open(log_path, 'wb') as flog:
log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode log = ' '.join(cmd) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
flog.write( fix_eol( log ) ) flog.write(fix_eol(log))
return succeeded return succeeded
def _make_new_work_dir(self): def _make_new_work_dir(self):
if os.path.isdir( self.work_dir ): if os.path.isdir(self.work_dir):
print(' Removing work directory', self.work_dir) print(' Removing work directory', self.work_dir)
shutil.rmtree( self.work_dir, ignore_errors=True ) shutil.rmtree(self.work_dir, ignore_errors=True)
if not os.path.isdir( self.work_dir ): if not os.path.isdir(self.work_dir):
os.makedirs( self.work_dir ) os.makedirs(self.work_dir)
def fix_eol( stdout ): def fix_eol(stdout):
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n). """Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
""" """
return re.sub( '\r*\n', os.linesep, stdout ) return re.sub('\r*\n', os.linesep, stdout)
def load_build_variants_from_config( config_path ): def load_build_variants_from_config(config_path):
with open( config_path, 'rb' ) as fconfig: with open(config_path, 'rb') as fconfig:
data = json.load( fconfig ) data = json.load(fconfig)
variants = data[ 'cmake_variants' ] variants = data[ 'cmake_variants' ]
build_descs_by_axis = collections.defaultdict( list ) build_descs_by_axis = collections.defaultdict(list)
for axis in variants: for axis in variants:
axis_name = axis["name"] axis_name = axis["name"]
build_descs = [] build_descs = []
if "generators" in axis: if "generators" in axis:
for generator_data in axis["generators"]: for generator_data in axis["generators"]:
for generator in generator_data["generator"]: for generator in generator_data["generator"]:
build_desc = BuildDesc( generator=generator, build_desc = BuildDesc(generator=generator,
prepend_envs=generator_data.get("env_prepend") ) prepend_envs=generator_data.get("env_prepend"))
build_descs.append( build_desc ) build_descs.append(build_desc)
elif "variables" in axis: elif "variables" in axis:
for variables in axis["variables"]: for variables in axis["variables"]:
build_desc = BuildDesc( variables=variables ) build_desc = BuildDesc(variables=variables)
build_descs.append( build_desc ) build_descs.append(build_desc)
elif "build_types" in axis: elif "build_types" in axis:
for build_type in axis["build_types"]: for build_type in axis["build_types"]:
build_desc = BuildDesc( build_type=build_type ) build_desc = BuildDesc(build_type=build_type)
build_descs.append( build_desc ) build_descs.append(build_desc)
build_descs_by_axis[axis_name].extend( build_descs ) build_descs_by_axis[axis_name].extend(build_descs)
return build_descs_by_axis return build_descs_by_axis
def generate_build_variants( build_descs_by_axis ): def generate_build_variants(build_descs_by_axis):
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis.""" """Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
axis_names = list(build_descs_by_axis.keys()) axis_names = list(build_descs_by_axis.keys())
build_descs = [] build_descs = []
@ -141,8 +141,8 @@ def generate_build_variants( build_descs_by_axis ):
if len(build_descs): if len(build_descs):
# for each existing build_desc and each axis build desc, create a new build_desc # for each existing build_desc and each axis build desc, create a new build_desc
new_build_descs = [] new_build_descs = []
for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs): for prototype_build_desc, axis_build_desc in itertools.product(build_descs, axis_build_descs):
new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) ) new_build_descs.append(prototype_build_desc.merged_with(axis_build_desc))
build_descs = new_build_descs build_descs = new_build_descs
else: else:
build_descs = axis_build_descs build_descs = axis_build_descs
@ -174,60 +174,57 @@ $tr_builds
</table> </table>
</body></html>''') </body></html>''')
def generate_html_report( html_report_path, builds ): def generate_html_report(html_report_path, builds):
report_dir = os.path.dirname( html_report_path ) report_dir = os.path.dirname(html_report_path)
# Vertical axis: generator # Vertical axis: generator
# Horizontal: variables, then build_type # Horizontal: variables, then build_type
builds_by_generator = collections.defaultdict( list ) builds_by_generator = collections.defaultdict(list)
variables = set() variables = set()
build_types_by_variable = collections.defaultdict( set ) build_types_by_variable = collections.defaultdict(set)
build_by_pos_key = {} # { (generator, var_key, build_type): build } build_by_pos_key = {} # { (generator, var_key, build_type): build }
for build in builds: for build in builds:
builds_by_generator[build.desc.generator].append( build ) builds_by_generator[build.desc.generator].append(build)
var_key = tuple(sorted(build.desc.variables)) var_key = tuple(sorted(build.desc.variables))
variables.add( var_key ) variables.add(var_key)
build_types_by_variable[var_key].add( build.desc.build_type ) build_types_by_variable[var_key].add(build.desc.build_type)
pos_key = (build.desc.generator, var_key, build.desc.build_type) pos_key = (build.desc.generator, var_key, build.desc.build_type)
build_by_pos_key[pos_key] = build build_by_pos_key[pos_key] = build
variables = sorted( variables ) variables = sorted(variables)
th_vars = [] th_vars = []
th_build_types = [] th_build_types = []
for variable in variables: for variable in variables:
build_types = sorted( build_types_by_variable[variable] ) build_types = sorted(build_types_by_variable[variable])
nb_build_type = len(build_types_by_variable[variable]) nb_build_type = len(build_types_by_variable[variable])
th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) ) th_vars.append('<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape(' '.join(variable))))
for build_type in build_types: for build_type in build_types:
th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) ) th_build_types.append('<th>%s</th>' % cgi.escape(build_type))
tr_builds = [] tr_builds = []
for generator in sorted( builds_by_generator ): for generator in sorted(builds_by_generator):
tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ] tds = [ '<td>%s</td>\n' % cgi.escape(generator) ]
for variable in variables: for variable in variables:
build_types = sorted( build_types_by_variable[variable] ) build_types = sorted(build_types_by_variable[variable])
for build_type in build_types: for build_type in build_types:
pos_key = (generator, variable, build_type) pos_key = (generator, variable, build_type)
build = build_by_pos_key.get(pos_key) build = build_by_pos_key.get(pos_key)
if build: if build:
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED' cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
build_status = 'ok' if build.build_succeeded else 'FAILED' build_status = 'ok' if build.build_succeeded else 'FAILED'
cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir ) cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir)
build_log_url = os.path.relpath( build.build_log_path, report_dir ) build_log_url = os.path.relpath(build.build_log_path, report_dir)
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % ( td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % ( build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
if build.cmake_succeeded: if build.cmake_succeeded:
td += '<br><a href="%s" class="%s">Build: %s</a>' % ( td += '<br><a href="%s" class="%s">Build: %s</a>' % ( build_log_url, build_status.lower(), build_status)
build_log_url, build_status.lower(), build_status)
td += '</td>' td += '</td>'
else: else:
td = '<td></td>' td = '<td></td>'
tds.append( td ) tds.append(td)
tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) ) tr_builds.append('<tr>%s</tr>' % '\n'.join(tds))
html = HTML_TEMPLATE.substitute( html = HTML_TEMPLATE.substitute( title='Batch build report',
title='Batch build report',
th_vars=' '.join(th_vars), th_vars=' '.join(th_vars),
th_build_types=' '.join( th_build_types), th_build_types=' '.join(th_build_types),
tr_builds='\n'.join( tr_builds ) ) tr_builds='\n'.join(tr_builds))
with open( html_report_path, 'wt' ) as fhtml: with open(html_report_path, 'wt') as fhtml:
fhtml.write( html ) fhtml.write(html)
print('HTML report generated in:', html_report_path) print('HTML report generated in:', html_report_path)
def main(): def main():
@ -246,33 +243,33 @@ python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.j
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) < 3: if len(args) < 3:
parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." ) parser.error("Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH.")
work_dir = args[0] work_dir = args[0]
source_dir = args[1].rstrip('/\\') source_dir = args[1].rstrip('/\\')
config_paths = args[2:] config_paths = args[2:]
for config_path in config_paths: for config_path in config_paths:
if not os.path.isfile( config_path ): if not os.path.isfile(config_path):
parser.error( "Can not read: %r" % config_path ) parser.error("Can not read: %r" % config_path)
# generate build variants # generate build variants
build_descs = [] build_descs = []
for config_path in config_paths: for config_path in config_paths:
build_descs_by_axis = load_build_variants_from_config( config_path ) build_descs_by_axis = load_build_variants_from_config(config_path)
build_descs.extend( generate_build_variants( build_descs_by_axis ) ) build_descs.extend(generate_build_variants(build_descs_by_axis))
print('Build variants (%d):' % len(build_descs)) print('Build variants (%d):' % len(build_descs))
# assign build directory for each variant # assign build directory for each variant
if not os.path.isdir( work_dir ): if not os.path.isdir(work_dir):
os.makedirs( work_dir ) os.makedirs(work_dir)
builds = [] builds = []
with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap: with open(os.path.join(work_dir, 'matrix-dir-map.txt'), 'wt') as fmatrixmap:
for index, build_desc in enumerate( build_descs ): for index, build_desc in enumerate(build_descs):
build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) ) build_desc_work_dir = os.path.join(work_dir, '%03d' % (index+1))
builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) ) builds.append(BuildData(build_desc, build_desc_work_dir, source_dir))
fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) ) fmatrixmap.write('%s: %s\n' % (build_desc_work_dir, build_desc))
for build in builds: for build in builds:
build.execute_build() build.execute_build()
html_report_path = os.path.join( work_dir, 'batchbuild-report.html' ) html_report_path = os.path.join(work_dir, 'batchbuild-report.html')
generate_html_report( html_report_path, builds ) generate_html_report(html_report_path, builds)
print('Done') print('Done')

View File

@ -1,10 +1,10 @@
from __future__ import print_function from __future__ import print_function
import os.path import os.path
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
"""Makes sure that all sources have the specified eol sequence (default: unix).""" """Makes sure that all sources have the specified eol sequence (default: unix)."""
if not os.path.isfile( path ): if not os.path.isfile(path):
raise ValueError( 'Path "%s" is not a file' % path ) raise ValueError('Path "%s" is not a file' % path)
try: try:
f = open(path, 'rb') f = open(path, 'rb')
except IOError as msg: except IOError as msg:
@ -29,27 +29,27 @@ def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
## ##
## ##
## ##
##def _do_fix( is_dry_run = True ): ##def _do_fix(is_dry_run = True):
## from waftools import antglob ## from waftools import antglob
## python_sources = antglob.glob( '.', ## python_sources = antglob.glob('.',
## includes = '**/*.py **/wscript **/wscript_build', ## includes = '**/*.py **/wscript **/wscript_build',
## excludes = antglob.default_excludes + './waf.py', ## excludes = antglob.default_excludes + './waf.py',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) ## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
## for path in python_sources: ## for path in python_sources:
## _fix_python_source( path, is_dry_run ) ## _fix_python_source(path, is_dry_run)
## ##
## cpp_sources = antglob.glob( '.', ## cpp_sources = antglob.glob('.',
## includes = '**/*.cpp **/*.h **/*.inl', ## includes = '**/*.cpp **/*.h **/*.inl',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) ## prune_dirs = antglob.prune_dirs + 'waf-* ./build')
## for path in cpp_sources: ## for path in cpp_sources:
## _fix_source_eol( path, is_dry_run ) ## _fix_source_eol(path, is_dry_run)
## ##
## ##
##def dry_fix(context): ##def dry_fix(context):
## _do_fix( is_dry_run = True ) ## _do_fix(is_dry_run = True)
## ##
##def fix(context): ##def fix(context):
## _do_fix( is_dry_run = False ) ## _do_fix(is_dry_run = False)
## ##
##def shutdown(): ##def shutdown():
## pass ## pass

View File

@ -13,7 +13,7 @@ BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
""".replace('\r\n','\n') """.replace('\r\n','\n')
def update_license( path, dry_run, show_diff ): def update_license(path, dry_run, show_diff):
"""Update the license statement in the specified file. """Update the license statement in the specified file.
Parameters: Parameters:
path: path of the C++ source file to update. path: path of the C++ source file to update.
@ -22,28 +22,28 @@ def update_license( path, dry_run, show_diff ):
show_diff: if True, print the path of the file that would be modified, show_diff: if True, print the path of the file that would be modified,
as well as the change made to the file. as well as the change made to the file.
""" """
with open( path, 'rt' ) as fin: with open(path, 'rt') as fin:
original_text = fin.read().replace('\r\n','\n') original_text = fin.read().replace('\r\n','\n')
newline = fin.newlines and fin.newlines[0] or '\n' newline = fin.newlines and fin.newlines[0] or '\n'
if not original_text.startswith( LICENSE_BEGIN ): if not original_text.startswith(LICENSE_BEGIN):
# No existing license found => prepend it # No existing license found => prepend it
new_text = BRIEF_LICENSE + original_text new_text = BRIEF_LICENSE + original_text
else: else:
license_end_index = original_text.index( '\n\n' ) # search first blank line license_end_index = original_text.index('\n\n') # search first blank line
new_text = BRIEF_LICENSE + original_text[license_end_index+2:] new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
if original_text != new_text: if original_text != new_text:
if not dry_run: if not dry_run:
with open( path, 'wb' ) as fout: with open(path, 'wb') as fout:
fout.write( new_text.replace('\n', newline ) ) fout.write(new_text.replace('\n', newline))
print('Updated', path) print('Updated', path)
if show_diff: if show_diff:
import difflib import difflib
print('\n'.join( difflib.unified_diff( original_text.split('\n'), print('\n'.join(difflib.unified_diff(original_text.split('\n'),
new_text.split('\n') ) )) new_text.split('\n'))))
return True return True
return False return False
def update_license_in_source_directories( source_dirs, dry_run, show_diff ): def update_license_in_source_directories(source_dirs, dry_run, show_diff):
"""Updates license text in C++ source files found in directory source_dirs. """Updates license text in C++ source files found in directory source_dirs.
Parameters: Parameters:
source_dirs: list of directory to scan for C++ sources. Directories are source_dirs: list of directory to scan for C++ sources. Directories are
@ -56,11 +56,11 @@ def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
from devtools import antglob from devtools import antglob
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
for source_dir in source_dirs: for source_dir in source_dirs:
cpp_sources = antglob.glob( source_dir, cpp_sources = antglob.glob(source_dir,
includes = '''**/*.h **/*.cpp **/*.inl''', includes = '''**/*.h **/*.cpp **/*.inl''',
prune_dirs = prune_dirs ) prune_dirs = prune_dirs)
for source in cpp_sources: for source in cpp_sources:
update_license( source, dry_run, show_diff ) update_license(source, dry_run, show_diff)
def main(): def main():
usage = """%prog DIR [DIR2...] usage = """%prog DIR [DIR2...]
@ -83,7 +83,7 @@ python devtools\licenseupdater.py include src
help="""On update, show change made to the file.""") help="""On update, show change made to the file.""")
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
update_license_in_source_directories( args, options.dry_run, options.show_diff ) update_license_in_source_directories(args, options.dry_run, options.show_diff)
print('Done') print('Done')
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -1,5 +1,5 @@
import os.path from contextlib import closing
import gzip import os
import tarfile import tarfile
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
@ -13,41 +13,35 @@ def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
to make them child of root. to make them child of root.
""" """
base_dir = os.path.normpath( os.path.abspath( base_dir ) ) base_dir = os.path.normpath(os.path.abspath(base_dir))
def archive_name( path ): def archive_name(path):
"""Makes path relative to base_dir.""" """Makes path relative to base_dir."""
path = os.path.normpath( os.path.abspath( path ) ) path = os.path.normpath(os.path.abspath(path))
common_path = os.path.commonprefix( (base_dir, path) ) common_path = os.path.commonprefix((base_dir, path))
archive_name = path[len(common_path):] archive_name = path[len(common_path):]
if os.path.isabs( archive_name ): if os.path.isabs(archive_name):
archive_name = archive_name[1:] archive_name = archive_name[1:]
return os.path.join( prefix_dir, archive_name ) return os.path.join(prefix_dir, archive_name)
def visit(tar, dirname, names): def visit(tar, dirname, names):
for name in names: for name in names:
path = os.path.join(dirname, name) path = os.path.join(dirname, name)
if os.path.isfile(path): if os.path.isfile(path):
path_in_tar = archive_name(path) path_in_tar = archive_name(path)
tar.add(path, path_in_tar ) tar.add(path, path_in_tar)
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) with closing(tarfile.TarFile.open(tarball_path, 'w:gz',
try: compresslevel=compression)) as tar:
for source in sources: for source in sources:
source_path = source source_path = source
if os.path.isdir( source ): if os.path.isdir(source):
os.path.walk(source_path, visit, tar) for dirpath, dirnames, filenames in os.walk(source_path):
visit(tar, dirpath, filenames)
else: else:
path_in_tar = archive_name(source_path) path_in_tar = archive_name(source_path)
tar.add(source_path, path_in_tar ) # filename, arcname tar.add(source_path, path_in_tar) # filename, arcname
finally:
tar.close()
def decompress( tarball_path, base_dir ): def decompress(tarball_path, base_dir):
"""Decompress the gzipped tarball into directory base_dir. """Decompress the gzipped tarball into directory base_dir.
""" """
# !!! This class method is not documented in the online doc with closing(tarfile.TarFile.open(tarball_path)) as tar:
# nor is bz2open! tar.extractall(base_dir)
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
try:
tar.extractall( base_dir )
finally:
tar.close()

View File

@ -1,20 +1,35 @@
"""Script to generate doxygen documentation. """Script to generate doxygen documentation.
""" """
from __future__ import print_function from __future__ import print_function
from __future__ import unicode_literals
from devtools import tarball from devtools import tarball
from contextlib import contextmanager
import subprocess
import traceback
import re import re
import os import os
import os.path
import sys import sys
import shutil import shutil
@contextmanager
def cd(newdir):
"""
http://stackoverflow.com/questions/431684/how-do-i-cd-in-python
"""
prevdir = os.getcwd()
os.chdir(newdir)
try:
yield
finally:
os.chdir(prevdir)
def find_program(*filenames): def find_program(*filenames):
"""find a program in folders path_lst, and sets env[var] """find a program in folders path_lst, and sets env[var]
@param filenames: a list of possible names of the program to search for @param filenames: a list of possible names of the program to search for
@return: the full path of the filename if found, or '' if filename could not be found @return: the full path of the filename if found, or '' if filename could not be found
""" """
paths = os.environ.get('PATH', '').split(os.pathsep) paths = os.environ.get('PATH', '').split(os.pathsep)
suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or '' suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or ''
for filename in filenames: for filename in filenames:
for name in [filename+ext for ext in suffixes.split()]: for name in [filename+ext for ext in suffixes.split()]:
for directory in paths: for directory in paths:
@ -28,53 +43,56 @@ def do_subst_in_file(targetfile, sourcefile, dict):
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
then all instances of %VERSION% in the file will be replaced with 1.2345 etc. then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
""" """
try: with open(sourcefile, 'r') as f:
f = open(sourcefile, 'rb')
contents = f.read() contents = f.read()
f.close()
except:
print("Can't read source file %s"%sourcefile)
raise
for (k,v) in list(dict.items()): for (k,v) in list(dict.items()):
v = v.replace('\\','\\\\') v = v.replace('\\','\\\\')
contents = re.sub(k, v, contents) contents = re.sub(k, v, contents)
try: with open(targetfile, 'w') as f:
f = open(targetfile, 'wb')
f.write(contents) f.write(contents)
f.close()
def getstatusoutput(cmd):
"""cmd is a list.
"""
try:
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output, _ = process.communicate()
status = process.returncode
except: except:
print("Can't write target file %s"%targetfile) status = -1
raise output = traceback.format_exc()
return status, output
def run_cmd(cmd, silent=False):
"""Raise exception on failure.
"""
info = 'Running: %r in %r' %(' '.join(cmd), os.getcwd())
print(info)
sys.stdout.flush()
if silent:
status, output = getstatusoutput(cmd)
else:
status, output = os.system(' '.join(cmd)), ''
if status:
msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output)
raise Exception(msg)
def assert_is_exe(path):
if not path:
raise Exception('path is empty.')
if not os.path.isfile(path):
raise Exception('%r is not a file.' %path)
if not os.access(path, os.X_OK):
raise Exception('%r is not executable by this user.' %path)
def run_doxygen(doxygen_path, config_file, working_dir, is_silent): def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
config_file = os.path.abspath( config_file ) assert_is_exe(doxygen_path)
doxygen_path = doxygen_path config_file = os.path.abspath(config_file)
old_cwd = os.getcwd() with cd(working_dir):
try:
os.chdir( working_dir )
cmd = [doxygen_path, config_file] cmd = [doxygen_path, config_file]
print('Running:', ' '.join( cmd )) run_cmd(cmd, is_silent)
try:
import subprocess
except:
if os.system( ' '.join( cmd ) ) != 0:
print('Documentation generation failed')
return False
else:
if is_silent:
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
else:
process = subprocess.Popen( cmd )
stdout, _ = process.communicate()
if process.returncode:
print('Documentation generation failed:')
print(stdout)
return False
return True
finally:
os.chdir( old_cwd )
def build_doc( options, make_release=False ): def build_doc(options, make_release=False):
if make_release: if make_release:
options.make_tarball = True options.make_tarball = True
options.with_dot = True options.with_dot = True
@ -83,45 +101,45 @@ def build_doc( options, make_release=False ):
options.open = False options.open = False
options.silent = True options.silent = True
version = open('version','rt').read().strip() version = open('version', 'rt').read().strip()
output_dir = 'dist/doxygen' # relative to doc/doxyfile location. output_dir = 'dist/doxygen' # relative to doc/doxyfile location.
if not os.path.isdir( output_dir ): if not os.path.isdir(output_dir):
os.makedirs( output_dir ) os.makedirs(output_dir)
top_dir = os.path.abspath( '.' ) top_dir = os.path.abspath('.')
html_output_dirname = 'jsoncpp-api-html-' + version html_output_dirname = 'jsoncpp-api-html-' + version
tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' ) tarball_path = os.path.join('dist', html_output_dirname + '.tar.gz')
warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' ) warning_log_path = os.path.join(output_dir, '../jsoncpp-doxygen-warning.log')
html_output_path = os.path.join( output_dir, html_output_dirname ) html_output_path = os.path.join(output_dir, html_output_dirname)
def yesno( bool ): def yesno(bool):
return bool and 'YES' or 'NO' return bool and 'YES' or 'NO'
subst_keys = { subst_keys = {
'%JSONCPP_VERSION%': version, '%JSONCPP_VERSION%': version,
'%DOC_TOPDIR%': '', '%DOC_TOPDIR%': '',
'%TOPDIR%': top_dir, '%TOPDIR%': top_dir,
'%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ), '%HTML_OUTPUT%': os.path.join('..', output_dir, html_output_dirname),
'%HAVE_DOT%': yesno(options.with_dot), '%HAVE_DOT%': yesno(options.with_dot),
'%DOT_PATH%': os.path.split(options.dot_path)[0], '%DOT_PATH%': os.path.split(options.dot_path)[0],
'%HTML_HELP%': yesno(options.with_html_help), '%HTML_HELP%': yesno(options.with_html_help),
'%UML_LOOK%': yesno(options.with_uml_look), '%UML_LOOK%': yesno(options.with_uml_look),
'%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path ) '%WARNING_LOG_PATH%': os.path.join('..', warning_log_path)
} }
if os.path.isdir( output_dir ): if os.path.isdir(output_dir):
print('Deleting directory:', output_dir) print('Deleting directory:', output_dir)
shutil.rmtree( output_dir ) shutil.rmtree(output_dir)
if not os.path.isdir( output_dir ): if not os.path.isdir(output_dir):
os.makedirs( output_dir ) os.makedirs(output_dir)
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys ) do_subst_in_file('doc/doxyfile', 'doc/doxyfile.in', subst_keys)
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent ) run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent)
if not options.silent: if not options.silent:
print(open(warning_log_path, 'rb').read()) print(open(warning_log_path, 'r').read())
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html')) index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
print('Generated documentation can be found in:') print('Generated documentation can be found in:')
print(index_path) print(index_path)
if options.open: if options.open:
import webbrowser import webbrowser
webbrowser.open( 'file://' + index_path ) webbrowser.open('file://' + index_path)
if options.make_tarball: if options.make_tarball:
print('Generating doc tarball to', tarball_path) print('Generating doc tarball to', tarball_path)
tarball_sources = [ tarball_sources = [
@ -131,8 +149,8 @@ def build_doc( options, make_release=False ):
'NEWS.txt', 'NEWS.txt',
'version' 'version'
] ]
tarball_basedir = os.path.join( output_dir, html_output_dirname ) tarball_basedir = os.path.join(output_dir, html_output_dirname)
tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) tarball.make_tarball(tarball_path, tarball_sources, tarball_basedir, html_output_dirname)
return tarball_path, html_output_dirname return tarball_path, html_output_dirname
def main(): def main():
@ -163,7 +181,7 @@ def main():
help="""Hides doxygen output""") help="""Hides doxygen output""")
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
build_doc( options ) build_doc(options)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -34,57 +34,57 @@ SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
SOURCEFORGE_PROJECT = 'jsoncpp' SOURCEFORGE_PROJECT = 'jsoncpp'
def set_version( version ): def set_version(version):
with open('version','wb') as f: with open('version','wb') as f:
f.write( version.strip() ) f.write(version.strip())
def rmdir_if_exist( dir_path ): def rmdir_if_exist(dir_path):
if os.path.isdir( dir_path ): if os.path.isdir(dir_path):
shutil.rmtree( dir_path ) shutil.rmtree(dir_path)
class SVNError(Exception): class SVNError(Exception):
pass pass
def svn_command( command, *args ): def svn_command(command, *args):
cmd = ['svn', '--non-interactive', command] + list(args) cmd = ['svn', '--non-interactive', command] + list(args)
print('Running:', ' '.join( cmd )) print('Running:', ' '.join(cmd))
process = subprocess.Popen( cmd, process = subprocess.Popen(cmd,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT ) stderr=subprocess.STDOUT)
stdout = process.communicate()[0] stdout = process.communicate()[0]
if process.returncode: if process.returncode:
error = SVNError( 'SVN command failed:\n' + stdout ) error = SVNError('SVN command failed:\n' + stdout)
error.returncode = process.returncode error.returncode = process.returncode
raise error raise error
return stdout return stdout
def check_no_pending_commit(): def check_no_pending_commit():
"""Checks that there is no pending commit in the sandbox.""" """Checks that there is no pending commit in the sandbox."""
stdout = svn_command( 'status', '--xml' ) stdout = svn_command('status', '--xml')
etree = ElementTree.fromstring( stdout ) etree = ElementTree.fromstring(stdout)
msg = [] msg = []
for entry in etree.getiterator( 'entry' ): for entry in etree.getiterator('entry'):
path = entry.get('path') path = entry.get('path')
status = entry.find('wc-status').get('item') status = entry.find('wc-status').get('item')
if status != 'unversioned' and path != 'version': if status != 'unversioned' and path != 'version':
msg.append( 'File "%s" has pending change (status="%s")' % (path, status) ) msg.append('File "%s" has pending change (status="%s")' % (path, status))
if msg: if msg:
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' ) msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!')
return '\n'.join( msg ) return '\n'.join(msg)
def svn_join_url( base_url, suffix ): def svn_join_url(base_url, suffix):
if not base_url.endswith('/'): if not base_url.endswith('/'):
base_url += '/' base_url += '/'
if suffix.startswith('/'): if suffix.startswith('/'):
suffix = suffix[1:] suffix = suffix[1:]
return base_url + suffix return base_url + suffix
def svn_check_if_tag_exist( tag_url ): def svn_check_if_tag_exist(tag_url):
"""Checks if a tag exist. """Checks if a tag exist.
Returns: True if the tag exist, False otherwise. Returns: True if the tag exist, False otherwise.
""" """
try: try:
list_stdout = svn_command( 'list', tag_url ) list_stdout = svn_command('list', tag_url)
except SVNError as e: except SVNError as e:
if e.returncode != 1 or not str(e).find('tag_url'): if e.returncode != 1 or not str(e).find('tag_url'):
raise e raise e
@ -92,82 +92,82 @@ def svn_check_if_tag_exist( tag_url ):
return False return False
return True return True
def svn_commit( message ): def svn_commit(message):
"""Commit the sandbox, providing the specified comment. """Commit the sandbox, providing the specified comment.
""" """
svn_command( 'ci', '-m', message ) svn_command('ci', '-m', message)
def svn_tag_sandbox( tag_url, message ): def svn_tag_sandbox(tag_url, message):
"""Makes a tag based on the sandbox revisions. """Makes a tag based on the sandbox revisions.
""" """
svn_command( 'copy', '-m', message, '.', tag_url ) svn_command('copy', '-m', message, '.', tag_url)
def svn_remove_tag( tag_url, message ): def svn_remove_tag(tag_url, message):
"""Removes an existing tag. """Removes an existing tag.
""" """
svn_command( 'delete', '-m', message, tag_url ) svn_command('delete', '-m', message, tag_url)
def svn_export( tag_url, export_dir ): def svn_export(tag_url, export_dir):
"""Exports the tag_url revision to export_dir. """Exports the tag_url revision to export_dir.
Target directory, including its parent is created if it does not exist. Target directory, including its parent is created if it does not exist.
If the directory export_dir exist, it is deleted before export proceed. If the directory export_dir exist, it is deleted before export proceed.
""" """
rmdir_if_exist( export_dir ) rmdir_if_exist(export_dir)
svn_command( 'export', tag_url, export_dir ) svn_command('export', tag_url, export_dir)
def fix_sources_eol( dist_dir ): def fix_sources_eol(dist_dir):
"""Set file EOL for tarball distribution. """Set file EOL for tarball distribution.
""" """
print('Preparing exported source file EOL for distribution...') print('Preparing exported source file EOL for distribution...')
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
win_sources = antglob.glob( dist_dir, win_sources = antglob.glob(dist_dir,
includes = '**/*.sln **/*.vcproj', includes = '**/*.sln **/*.vcproj',
prune_dirs = prune_dirs ) prune_dirs = prune_dirs)
unix_sources = antglob.glob( dist_dir, unix_sources = antglob.glob(dist_dir,
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
sconscript *.json *.expected AUTHORS LICENSE''', sconscript *.json *.expected AUTHORS LICENSE''',
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
prune_dirs = prune_dirs ) prune_dirs = prune_dirs)
for path in win_sources: for path in win_sources:
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\r\n')
for path in unix_sources: for path in unix_sources:
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\n')
def download( url, target_path ): def download(url, target_path):
"""Download file represented by url to target_path. """Download file represented by url to target_path.
""" """
f = urllib2.urlopen( url ) f = urllib2.urlopen(url)
try: try:
data = f.read() data = f.read()
finally: finally:
f.close() f.close()
fout = open( target_path, 'wb' ) fout = open(target_path, 'wb')
try: try:
fout.write( data ) fout.write(data)
finally: finally:
fout.close() fout.close()
def check_compile( distcheck_top_dir, platform ): def check_compile(distcheck_top_dir, platform):
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
print('Running:', ' '.join( cmd )) print('Running:', ' '.join(cmd))
log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform ) log_path = os.path.join(distcheck_top_dir, 'build-%s.log' % platform)
flog = open( log_path, 'wb' ) flog = open(log_path, 'wb')
try: try:
process = subprocess.Popen( cmd, process = subprocess.Popen(cmd,
stdout=flog, stdout=flog,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
cwd=distcheck_top_dir ) cwd=distcheck_top_dir)
stdout = process.communicate()[0] stdout = process.communicate()[0]
status = (process.returncode == 0) status = (process.returncode == 0)
finally: finally:
flog.close() flog.close()
return (status, log_path) return (status, log_path)
def write_tempfile( content, **kwargs ): def write_tempfile(content, **kwargs):
fd, path = tempfile.mkstemp( **kwargs ) fd, path = tempfile.mkstemp(**kwargs)
f = os.fdopen( fd, 'wt' ) f = os.fdopen(fd, 'wt')
try: try:
f.write( content ) f.write(content)
finally: finally:
f.close() f.close()
return path return path
@ -175,34 +175,34 @@ def write_tempfile( content, **kwargs ):
class SFTPError(Exception): class SFTPError(Exception):
pass pass
def run_sftp_batch( userhost, sftp, batch, retry=0 ): def run_sftp_batch(userhost, sftp, batch, retry=0):
path = write_tempfile( batch, suffix='.sftp', text=True ) path = write_tempfile(batch, suffix='.sftp', text=True)
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
error = None error = None
for retry_index in range(0, max(1,retry)): for retry_index in range(0, max(1,retry)):
heading = retry_index == 0 and 'Running:' or 'Retrying:' heading = retry_index == 0 and 'Running:' or 'Retrying:'
print(heading, ' '.join( cmd )) print(heading, ' '.join(cmd))
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout = process.communicate()[0] stdout = process.communicate()[0]
if process.returncode != 0: if process.returncode != 0:
error = SFTPError( 'SFTP batch failed:\n' + stdout ) error = SFTPError('SFTP batch failed:\n' + stdout)
else: else:
break break
if error: if error:
raise error raise error
return stdout return stdout
def sourceforge_web_synchro( sourceforge_project, doc_dir, def sourceforge_web_synchro(sourceforge_project, doc_dir,
user=None, sftp='sftp' ): user=None, sftp='sftp'):
"""Notes: does not synchronize sub-directory of doc-dir. """Notes: does not synchronize sub-directory of doc-dir.
""" """
userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project)
stdout = run_sftp_batch( userhost, sftp, """ stdout = run_sftp_batch(userhost, sftp, """
cd htdocs cd htdocs
dir dir
exit exit
""" ) """)
existing_paths = set() existing_paths = set()
collect = 0 collect = 0
for line in stdout.split('\n'): for line in stdout.split('\n'):
@ -216,15 +216,15 @@ exit
elif collect == 2: elif collect == 2:
path = line.strip().split()[-1:] path = line.strip().split()[-1:]
if path and path[0] not in ('.', '..'): if path and path[0] not in ('.', '..'):
existing_paths.add( path[0] ) existing_paths.add(path[0])
upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] ) upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)])
paths_to_remove = existing_paths - upload_paths paths_to_remove = existing_paths - upload_paths
if paths_to_remove: if paths_to_remove:
print('Removing the following file from web:') print('Removing the following file from web:')
print('\n'.join( paths_to_remove )) print('\n'.join(paths_to_remove))
stdout = run_sftp_batch( userhost, sftp, """cd htdocs stdout = run_sftp_batch(userhost, sftp, """cd htdocs
rm %s rm %s
exit""" % ' '.join(paths_to_remove) ) exit""" % ' '.join(paths_to_remove))
print('Uploading %d files:' % len(upload_paths)) print('Uploading %d files:' % len(upload_paths))
batch_size = 10 batch_size = 10
upload_paths = list(upload_paths) upload_paths = list(upload_paths)
@ -235,17 +235,17 @@ exit""" % ' '.join(paths_to_remove) )
remaining_files = len(upload_paths) - index remaining_files = len(upload_paths) - index
remaining_sec = file_per_sec * remaining_files remaining_sec = file_per_sec * remaining_files
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)) print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
run_sftp_batch( userhost, sftp, """cd htdocs run_sftp_batch(userhost, sftp, """cd htdocs
lcd %s lcd %s
mput %s mput %s
exit""" % (doc_dir, ' '.join(paths) ), retry=3 ) exit""" % (doc_dir, ' '.join(paths)), retry=3)
def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ): def sourceforge_release_tarball(sourceforge_project, paths, user=None, sftp='sftp'):
userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project)
run_sftp_batch( userhost, sftp, """ run_sftp_batch(userhost, sftp, """
mput %s mput %s
exit exit
""" % (' '.join(paths),) ) """ % (' '.join(paths),))
def main(): def main():
@ -286,12 +286,12 @@ Warning: --force should only be used when developping/testing the release script
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) != 2: if len(args) != 2:
parser.error( 'release_version missing on command-line.' ) parser.error('release_version missing on command-line.')
release_version = args[0] release_version = args[0]
next_version = args[1] next_version = args[1]
if not options.platforms and not options.no_test: if not options.platforms and not options.no_test:
parser.error( 'You must specify either --platform or --no-test option.' ) parser.error('You must specify either --platform or --no-test option.')
if options.ignore_pending_commit: if options.ignore_pending_commit:
msg = '' msg = ''
@ -299,86 +299,86 @@ Warning: --force should only be used when developping/testing the release script
msg = check_no_pending_commit() msg = check_no_pending_commit()
if not msg: if not msg:
print('Setting version to', release_version) print('Setting version to', release_version)
set_version( release_version ) set_version(release_version)
svn_commit( 'Release ' + release_version ) svn_commit('Release ' + release_version)
tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) tag_url = svn_join_url(SVN_TAG_ROOT, release_version)
if svn_check_if_tag_exist( tag_url ): if svn_check_if_tag_exist(tag_url):
if options.retag_release: if options.retag_release:
svn_remove_tag( tag_url, 'Overwriting previous tag' ) svn_remove_tag(tag_url, 'Overwriting previous tag')
else: else:
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url) print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
sys.exit( 1 ) sys.exit(1)
svn_tag_sandbox( tag_url, 'Release ' + release_version ) svn_tag_sandbox(tag_url, 'Release ' + release_version)
print('Generated doxygen document...') print('Generated doxygen document...')
## doc_dirname = r'jsoncpp-api-html-0.5.0' ## doc_dirname = r'jsoncpp-api-html-0.5.0'
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' ## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True ) doc_tarball_path, doc_dirname = doxybuild.build_doc(options, make_release=True)
doc_distcheck_dir = 'dist/doccheck' doc_distcheck_dir = 'dist/doccheck'
tarball.decompress( doc_tarball_path, doc_distcheck_dir ) tarball.decompress(doc_tarball_path, doc_distcheck_dir)
doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname ) doc_distcheck_top_dir = os.path.join(doc_distcheck_dir, doc_dirname)
export_dir = 'dist/export' export_dir = 'dist/export'
svn_export( tag_url, export_dir ) svn_export(tag_url, export_dir)
fix_sources_eol( export_dir ) fix_sources_eol(export_dir)
source_dir = 'jsoncpp-src-' + release_version source_dir = 'jsoncpp-src-' + release_version
source_tarball_path = 'dist/%s.tar.gz' % source_dir source_tarball_path = 'dist/%s.tar.gz' % source_dir
print('Generating source tarball to', source_tarball_path) print('Generating source tarball to', source_tarball_path)
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) tarball.make_tarball(source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir)
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
print('Generating amalgamation source tarball to', amalgamation_tarball_path) print('Generating amalgamation source tarball to', amalgamation_tarball_path)
amalgamation_dir = 'dist/amalgamation' amalgamation_dir = 'dist/amalgamation'
amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' ) amalgamate.amalgamate_source(export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h')
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir], tarball.make_tarball(amalgamation_tarball_path, [amalgamation_dir],
amalgamation_dir, prefix_dir=amalgamation_source_dir ) amalgamation_dir, prefix_dir=amalgamation_source_dir)
# Decompress source tarball, download and install scons-local # Decompress source tarball, download and install scons-local
distcheck_dir = 'dist/distcheck' distcheck_dir = 'dist/distcheck'
distcheck_top_dir = distcheck_dir + '/' + source_dir distcheck_top_dir = distcheck_dir + '/' + source_dir
print('Decompressing source tarball to', distcheck_dir) print('Decompressing source tarball to', distcheck_dir)
rmdir_if_exist( distcheck_dir ) rmdir_if_exist(distcheck_dir)
tarball.decompress( source_tarball_path, distcheck_dir ) tarball.decompress(source_tarball_path, distcheck_dir)
scons_local_path = 'dist/scons-local.tar.gz' scons_local_path = 'dist/scons-local.tar.gz'
print('Downloading scons-local to', scons_local_path) print('Downloading scons-local to', scons_local_path)
download( SCONS_LOCAL_URL, scons_local_path ) download(SCONS_LOCAL_URL, scons_local_path)
print('Decompressing scons-local to', distcheck_top_dir) print('Decompressing scons-local to', distcheck_top_dir)
tarball.decompress( scons_local_path, distcheck_top_dir ) tarball.decompress(scons_local_path, distcheck_top_dir)
# Run compilation # Run compilation
print('Compiling decompressed tarball') print('Compiling decompressed tarball')
all_build_status = True all_build_status = True
for platform in options.platforms.split(','): for platform in options.platforms.split(','):
print('Testing platform:', platform) print('Testing platform:', platform)
build_status, log_path = check_compile( distcheck_top_dir, platform ) build_status, log_path = check_compile(distcheck_top_dir, platform)
print('see build log:', log_path) print('see build log:', log_path)
print(build_status and '=> ok' or '=> FAILED') print(build_status and '=> ok' or '=> FAILED')
all_build_status = all_build_status and build_status all_build_status = all_build_status and build_status
if not build_status: if not build_status:
print('Testing failed on at least one platform, aborting...') print('Testing failed on at least one platform, aborting...')
svn_remove_tag( tag_url, 'Removing tag due to failed testing' ) svn_remove_tag(tag_url, 'Removing tag due to failed testing')
sys.exit(1) sys.exit(1)
if options.user: if options.user:
if not options.no_web: if not options.no_web:
print('Uploading documentation using user', options.user) print('Uploading documentation using user', options.user)
sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp ) sourceforge_web_synchro(SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp)
print('Completed documentation upload') print('Completed documentation upload')
print('Uploading source and documentation tarballs for release using user', options.user) print('Uploading source and documentation tarballs for release using user', options.user)
sourceforge_release_tarball( SOURCEFORGE_PROJECT, sourceforge_release_tarball(SOURCEFORGE_PROJECT,
[source_tarball_path, doc_tarball_path], [source_tarball_path, doc_tarball_path],
user=options.user, sftp=options.sftp ) user=options.user, sftp=options.sftp)
print('Source and doc release tarballs uploaded') print('Source and doc release tarballs uploaded')
else: else:
print('No upload user specified. Web site and download tarbal were not uploaded.') print('No upload user specified. Web site and download tarbal were not uploaded.')
print('Tarball can be found at:', doc_tarball_path) print('Tarball can be found at:', doc_tarball_path)
# Set next version number and commit # Set next version number and commit
set_version( next_version ) set_version(next_version)
svn_commit( 'Released ' + release_version ) svn_commit('Released ' + release_version)
else: else:
sys.stderr.write( msg + '\n' ) sys.stderr.write(msg + '\n')
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -1,9 +1,9 @@
import fnmatch import fnmatch
import os import os
def generate( env ): def generate(env):
def Glob( env, includes = None, excludes = None, dir = '.' ): def Glob(env, includes = None, excludes = None, dir = '.'):
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') """Adds Glob(includes = Split('*'), excludes = None, dir = '.')
helper function to environment. helper function to environment.
Glob both the file-system files. Glob both the file-system files.
@ -12,36 +12,36 @@ def generate( env ):
excludes: list of file name pattern exluced from the return list. excludes: list of file name pattern exluced from the return list.
Example: Example:
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) sources = env.Glob(("*.cpp", '*.h'), "~*.cpp", "#src")
""" """
def filterFilename(path): def filterFilename(path):
abs_path = os.path.join( dir, path ) abs_path = os.path.join(dir, path)
if not os.path.isfile(abs_path): if not os.path.isfile(abs_path):
return 0 return 0
fn = os.path.basename(path) fn = os.path.basename(path)
match = 0 match = 0
for include in includes: for include in includes:
if fnmatch.fnmatchcase( fn, include ): if fnmatch.fnmatchcase(fn, include):
match = 1 match = 1
break break
if match == 1 and not excludes is None: if match == 1 and not excludes is None:
for exclude in excludes: for exclude in excludes:
if fnmatch.fnmatchcase( fn, exclude ): if fnmatch.fnmatchcase(fn, exclude):
match = 0 match = 0
break break
return match return match
if includes is None: if includes is None:
includes = ('*',) includes = ('*',)
elif type(includes) in ( type(''), type(u'') ): elif type(includes) in (type(''), type(u'')):
includes = (includes,) includes = (includes,)
if type(excludes) in ( type(''), type(u'') ): if type(excludes) in (type(''), type(u'')):
excludes = (excludes,) excludes = (excludes,)
dir = env.Dir(dir).abspath dir = env.Dir(dir).abspath
paths = os.listdir( dir ) paths = os.listdir(dir)
def makeAbsFileNode( path ): def makeAbsFileNode(path):
return env.File( os.path.join( dir, path ) ) return env.File(os.path.join(dir, path))
nodes = filter( filterFilename, paths ) nodes = filter(filterFilename, paths)
return map( makeAbsFileNode, nodes ) return map(makeAbsFileNode, nodes)
from SCons.Script import Environment from SCons.Script import Environment
Environment.Glob = Glob Environment.Glob = Glob

View File

@ -47,7 +47,7 @@ import targz
## elif token == "=": ## elif token == "=":
## data[key] = list() ## data[key] = list()
## else: ## else:
## append_data( data, key, new_data, token ) ## append_data(data, key, new_data, token)
## new_data = True ## new_data = True
## ##
## last_token = token ## last_token = token
@ -55,7 +55,7 @@ import targz
## ##
## if last_token == '\\' and token != '\n': ## if last_token == '\\' and token != '\n':
## new_data = False ## new_data = False
## append_data( data, key, new_data, '\\' ) ## append_data(data, key, new_data, '\\')
## ##
## # compress lists of len 1 into single strings ## # compress lists of len 1 into single strings
## for (k, v) in data.items(): ## for (k, v) in data.items():
@ -116,7 +116,7 @@ import targz
## else: ## else:
## for pattern in file_patterns: ## for pattern in file_patterns:
## sources.extend(glob.glob("/".join([node, pattern]))) ## sources.extend(glob.glob("/".join([node, pattern])))
## sources = map( lambda path: env.File(path), sources ) ## sources = map(lambda path: env.File(path), sources)
## return sources ## return sources
## ##
## ##
@ -143,7 +143,7 @@ def srcDistEmitter(source, target, env):
## # add our output locations ## # add our output locations
## for (k, v) in output_formats.items(): ## for (k, v) in output_formats.items():
## if data.get("GENERATE_" + k, v[0]) == "YES": ## if data.get("GENERATE_" + k, v[0]) == "YES":
## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) ) ## targets.append(env.Dir(os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))))
## ##
## # don't clobber targets ## # don't clobber targets
## for node in targets: ## for node in targets:
@ -161,14 +161,13 @@ def generate(env):
Add builders and construction variables for the Add builders and construction variables for the
SrcDist tool. SrcDist tool.
""" """
## doxyfile_scanner = env.Scanner( ## doxyfile_scanner = env.Scanner(## DoxySourceScan,
## DoxySourceScan,
## "DoxySourceScan", ## "DoxySourceScan",
## scan_check = DoxySourceScanCheck, ## scan_check = DoxySourceScanCheck,
## ) ##)
if targz.exists(env): if targz.exists(env):
srcdist_builder = targz.makeBuilder( srcDistEmitter ) srcdist_builder = targz.makeBuilder(srcDistEmitter)
env['BUILDERS']['SrcDist'] = srcdist_builder env['BUILDERS']['SrcDist'] = srcdist_builder

View File

@ -70,7 +70,7 @@ def generate(env):
return target, source return target, source
## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? ## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!?
subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string ) subst_action = SCons.Action.Action(subst_in_file, subst_in_file_string)
env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter)
def exists(env): def exists(env):

View File

@ -27,9 +27,9 @@ TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
if internal_targz: if internal_targz:
def targz(target, source, env): def targz(target, source, env):
def archive_name( path ): def archive_name(path):
path = os.path.normpath( os.path.abspath( path ) ) path = os.path.normpath(os.path.abspath(path))
common_path = os.path.commonprefix( (base_dir, path) ) common_path = os.path.commonprefix((base_dir, path))
archive_name = path[len(common_path):] archive_name = path[len(common_path):]
return archive_name return archive_name
@ -37,23 +37,23 @@ if internal_targz:
for name in names: for name in names:
path = os.path.join(dirname, name) path = os.path.join(dirname, name)
if os.path.isfile(path): if os.path.isfile(path):
tar.add(path, archive_name(path) ) tar.add(path, archive_name(path))
compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL)
base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath ) base_dir = os.path.normpath(env.get('TARGZ_BASEDIR', env.Dir('.')).abspath)
target_path = str(target[0]) target_path = str(target[0])
fileobj = gzip.GzipFile( target_path, 'wb', compression ) fileobj = gzip.GzipFile(target_path, 'wb', compression)
tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj)
for source in source: for source in source:
source_path = str(source) source_path = str(source)
if source.isdir(): if source.isdir():
os.path.walk(source_path, visit, tar) os.path.walk(source_path, visit, tar)
else: else:
tar.add(source_path, archive_name(source_path) ) # filename, arcname tar.add(source_path, archive_name(source_path)) # filename, arcname
tar.close() tar.close()
targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR'])
def makeBuilder( emitter = None ): def makeBuilder(emitter = None):
return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'),
source_factory = SCons.Node.FS.Entry, source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner, source_scanner = SCons.Defaults.DirScanner,

View File

@ -4,7 +4,7 @@ import os
paths = [] paths = []
for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]:
paths += glob.glob( 'data/' + pattern ) paths += glob.glob('data/' + pattern)
for path in paths: for path in paths:
os.unlink( path ) os.unlink(path)

View File

@ -1,10 +1,10 @@
from __future__ import print_function from __future__ import print_function
import glob import glob
import os.path import os.path
for path in glob.glob( '*.json' ): for path in glob.glob('*.json'):
text = file(path,'rt').read() text = file(path,'rt').read()
target = os.path.splitext(path)[0] + '.expected' target = os.path.splitext(path)[0] + '.expected'
if os.path.exists( target ): if os.path.exists(target):
print('skipping:', target) print('skipping:', target)
else: else:
print('creating:', target) print('creating:', target)

View File

@ -15,50 +15,50 @@ actual_path = base_path + '.actual'
rewrite_path = base_path + '.rewrite' rewrite_path = base_path + '.rewrite'
rewrite_actual_path = base_path + '.actual-rewrite' rewrite_actual_path = base_path + '.actual-rewrite'
def valueTreeToString( fout, value, path = '.' ): def valueTreeToString(fout, value, path = '.'):
ty = type(value) ty = type(value)
if ty is types.DictType: if ty is types.DictType:
fout.write( '%s={}\n' % path ) fout.write('%s={}\n' % path)
suffix = path[-1] != '.' and '.' or '' suffix = path[-1] != '.' and '.' or ''
names = value.keys() names = value.keys()
names.sort() names.sort()
for name in names: for name in names:
valueTreeToString( fout, value[name], path + suffix + name ) valueTreeToString(fout, value[name], path + suffix + name)
elif ty is types.ListType: elif ty is types.ListType:
fout.write( '%s=[]\n' % path ) fout.write('%s=[]\n' % path)
for index, childValue in zip( xrange(0,len(value)), value ): for index, childValue in zip(xrange(0,len(value)), value):
valueTreeToString( fout, childValue, path + '[%d]' % index ) valueTreeToString(fout, childValue, path + '[%d]' % index)
elif ty is types.StringType: elif ty is types.StringType:
fout.write( '%s="%s"\n' % (path,value) ) fout.write('%s="%s"\n' % (path,value))
elif ty is types.IntType: elif ty is types.IntType:
fout.write( '%s=%d\n' % (path,value) ) fout.write('%s=%d\n' % (path,value))
elif ty is types.FloatType: elif ty is types.FloatType:
fout.write( '%s=%.16g\n' % (path,value) ) fout.write('%s=%.16g\n' % (path,value))
elif value is True: elif value is True:
fout.write( '%s=true\n' % path ) fout.write('%s=true\n' % path)
elif value is False: elif value is False:
fout.write( '%s=false\n' % path ) fout.write('%s=false\n' % path)
elif value is None: elif value is None:
fout.write( '%s=null\n' % path ) fout.write('%s=null\n' % path)
else: else:
assert False and "Unexpected value type" assert False and "Unexpected value type"
def parseAndSaveValueTree( input, actual_path ): def parseAndSaveValueTree(input, actual_path):
root = json.loads( input ) root = json.loads(input)
fout = file( actual_path, 'wt' ) fout = file(actual_path, 'wt')
valueTreeToString( fout, root ) valueTreeToString(fout, root)
fout.close() fout.close()
return root return root
def rewriteValueTree( value, rewrite_path ): def rewriteValueTree(value, rewrite_path):
rewrite = json.dumps( value ) rewrite = json.dumps(value)
#rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ?
file( rewrite_path, 'wt').write( rewrite + '\n' ) file(rewrite_path, 'wt').write(rewrite + '\n')
return rewrite return rewrite
input = file( input_path, 'rt' ).read() input = file(input_path, 'rt').read()
root = parseAndSaveValueTree( input, actual_path ) root = parseAndSaveValueTree(input, actual_path)
rewrite = rewriteValueTree( json.write( root ), rewrite_path ) rewrite = rewriteValueTree(json.write(root), rewrite_path)
rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path ) rewrite_root = parseAndSaveValueTree(rewrite, rewrite_actual_path)
sys.exit( 0 ) sys.exit(0)

View File

@ -26,11 +26,11 @@ def getStatusOutput(cmd):
pass # python3 pass # python3
status = pipe.close() status = pipe.close()
return status, process_output return status, process_output
def compareOutputs( expected, actual, message ): def compareOutputs(expected, actual, message):
expected = expected.strip().replace('\r','').split('\n') expected = expected.strip().replace('\r','').split('\n')
actual = actual.strip().replace('\r','').split('\n') actual = actual.strip().replace('\r','').split('\n')
diff_line = 0 diff_line = 0
max_line_to_compare = min( len(expected), len(actual) ) max_line_to_compare = min(len(expected), len(actual))
for index in range(0,max_line_to_compare): for index in range(0,max_line_to_compare):
if expected[index].strip() != actual[index].strip(): if expected[index].strip() != actual[index].strip():
diff_line = index + 1 diff_line = index + 1
@ -39,7 +39,7 @@ def compareOutputs( expected, actual, message ):
diff_line = max_line_to_compare+1 diff_line = max_line_to_compare+1
if diff_line == 0: if diff_line == 0:
return None return None
def safeGetLine( lines, index ): def safeGetLine(lines, index):
index += -1 index += -1
if index >= len(lines): if index >= len(lines):
return '' return ''
@ -49,66 +49,65 @@ def compareOutputs( expected, actual, message ):
Actual: '%s' Actual: '%s'
""" % (message, diff_line, """ % (message, diff_line,
safeGetLine(expected,diff_line), safeGetLine(expected,diff_line),
safeGetLine(actual,diff_line) ) safeGetLine(actual,diff_line))
def safeReadFile( path ): def safeReadFile(path):
try: try:
return open( path, 'rt', encoding = 'utf-8' ).read() return open(path, 'rt', encoding = 'utf-8').read()
except IOError as e: except IOError as e:
return '<File "%s" is missing: %s>' % (path,e) return '<File "%s" is missing: %s>' % (path,e)
def runAllTests( jsontest_executable_path, input_dir = None, def runAllTests(jsontest_executable_path, input_dir = None,
use_valgrind=False, with_json_checker=False, use_valgrind=False, with_json_checker=False,
writerClass='StyledWriter'): writerClass='StyledWriter'):
if not input_dir: if not input_dir:
input_dir = os.path.join( os.getcwd(), 'data' ) input_dir = os.path.join(os.getcwd(), 'data')
tests = glob( os.path.join( input_dir, '*.json' ) ) tests = glob(os.path.join(input_dir, '*.json'))
if with_json_checker: if with_json_checker:
test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) ) test_jsonchecker = glob(os.path.join(input_dir, '../jsonchecker', '*.json'))
else: else:
test_jsonchecker = [] test_jsonchecker = []
failed_tests = [] failed_tests = []
valgrind_path = use_valgrind and VALGRIND_CMD or '' valgrind_path = use_valgrind and VALGRIND_CMD or ''
for input_path in tests + test_jsonchecker: for input_path in tests + test_jsonchecker:
expect_failure = os.path.basename( input_path ).startswith( 'fail' ) expect_failure = os.path.basename(input_path).startswith('fail')
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
print('TESTING:', input_path, end=' ') print('TESTING:', input_path, end=' ')
options = is_json_checker_test and '--json-checker' or '' options = is_json_checker_test and '--json-checker' or ''
options += ' --json-writer %s'%writerClass options += ' --json-writer %s'%writerClass
cmd = '%s%s %s "%s"' % ( cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options,
valgrind_path, jsontest_executable_path, options,
input_path) input_path)
status, process_output = getStatusOutput(cmd) status, process_output = getStatusOutput(cmd)
if is_json_checker_test: if is_json_checker_test:
if expect_failure: if expect_failure:
if not status: if not status:
print('FAILED') print('FAILED')
failed_tests.append( (input_path, 'Parsing should have failed:\n%s' % failed_tests.append((input_path, 'Parsing should have failed:\n%s' %
safeReadFile(input_path)) ) safeReadFile(input_path)))
else: else:
print('OK') print('OK')
else: else:
if status: if status:
print('FAILED') print('FAILED')
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) failed_tests.append((input_path, 'Parsing failed:\n' + process_output))
else: else:
print('OK') print('OK')
else: else:
base_path = os.path.splitext(input_path)[0] base_path = os.path.splitext(input_path)[0]
actual_output = safeReadFile( base_path + '.actual' ) actual_output = safeReadFile(base_path + '.actual')
actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' ) actual_rewrite_output = safeReadFile(base_path + '.actual-rewrite')
open(base_path + '.process-output', 'wt', encoding = 'utf-8').write( process_output ) open(base_path + '.process-output', 'wt', encoding = 'utf-8').write(process_output)
if status: if status:
print('parsing failed') print('parsing failed')
failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) ) failed_tests.append((input_path, 'Parsing failed:\n' + process_output))
else: else:
expected_output_path = os.path.splitext(input_path)[0] + '.expected' expected_output_path = os.path.splitext(input_path)[0] + '.expected'
expected_output = open( expected_output_path, 'rt', encoding = 'utf-8' ).read() expected_output = open(expected_output_path, 'rt', encoding = 'utf-8').read()
detail = ( compareOutputs( expected_output, actual_output, 'input' ) detail = (compareOutputs(expected_output, actual_output, 'input')
or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) ) or compareOutputs(expected_output, actual_rewrite_output, 'rewrite'))
if detail: if detail:
print('FAILED') print('FAILED')
failed_tests.append( (input_path, detail) ) failed_tests.append((input_path, detail))
else: else:
print('OK') print('OK')
@ -120,7 +119,7 @@ def runAllTests( jsontest_executable_path, input_dir = None,
print(failed_test[1]) print(failed_test[1])
print() print()
print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
len(failed_tests) )) len(failed_tests)))
return 1 return 1
else: else:
print('All %d tests passed.' % len(tests)) print('All %d tests passed.' % len(tests))
@ -128,7 +127,7 @@ def runAllTests( jsontest_executable_path, input_dir = None,
def main(): def main():
from optparse import OptionParser from optparse import OptionParser
parser = OptionParser( usage="%prog [options] <path to jsontestrunner.exe> [test case directory]" ) parser = OptionParser(usage="%prog [options] <path to jsontestrunner.exe> [test case directory]")
parser.add_option("--valgrind", parser.add_option("--valgrind",
action="store_true", dest="valgrind", default=False, action="store_true", dest="valgrind", default=False,
help="run all the tests using valgrind to detect memory leaks") help="run all the tests using valgrind to detect memory leaks")
@ -139,25 +138,25 @@ def main():
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) < 1 or len(args) > 2: if len(args) < 1 or len(args) > 2:
parser.error( 'Must provides at least path to jsontestrunner executable.' ) parser.error('Must provides at least path to jsontestrunner executable.')
sys.exit( 1 ) sys.exit(1)
jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) ) jsontest_executable_path = os.path.normpath(os.path.abspath(args[0]))
if len(args) > 1: if len(args) > 1:
input_path = os.path.normpath( os.path.abspath( args[1] ) ) input_path = os.path.normpath(os.path.abspath(args[1]))
else: else:
input_path = None input_path = None
status = runAllTests( jsontest_executable_path, input_path, status = runAllTests(jsontest_executable_path, input_path,
use_valgrind=options.valgrind, use_valgrind=options.valgrind,
with_json_checker=options.with_json_checker, with_json_checker=options.with_json_checker,
writerClass='StyledWriter') writerClass='StyledWriter')
if status: if status:
sys.exit( status ) sys.exit(status)
status = runAllTests( jsontest_executable_path, input_path, status = runAllTests(jsontest_executable_path, input_path,
use_valgrind=options.valgrind, use_valgrind=options.valgrind,
with_json_checker=options.with_json_checker, with_json_checker=options.with_json_checker,
writerClass='StyledStreamWriter') writerClass='StyledStreamWriter')
sys.exit( status ) sys.exit(status)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -11,18 +11,18 @@ import optparse
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
class TestProxy(object): class TestProxy(object):
def __init__( self, test_exe_path, use_valgrind=False ): def __init__(self, test_exe_path, use_valgrind=False):
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) self.test_exe_path = os.path.normpath(os.path.abspath(test_exe_path))
self.use_valgrind = use_valgrind self.use_valgrind = use_valgrind
def run( self, options ): def run(self, options):
if self.use_valgrind: if self.use_valgrind:
cmd = VALGRIND_CMD.split() cmd = VALGRIND_CMD.split()
else: else:
cmd = [] cmd = []
cmd.extend( [self.test_exe_path, '--test-auto'] + options ) cmd.extend([self.test_exe_path, '--test-auto'] + options)
try: try:
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except: except:
print(cmd) print(cmd)
raise raise
@ -31,9 +31,9 @@ class TestProxy(object):
return False, stdout return False, stdout
return True, stdout return True, stdout
def runAllTests( exe_path, use_valgrind=False ): def runAllTests(exe_path, use_valgrind=False):
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) test_proxy = TestProxy(exe_path, use_valgrind=use_valgrind)
status, test_names = test_proxy.run( ['--list-tests'] ) status, test_names = test_proxy.run(['--list-tests'])
if not status: if not status:
print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr) print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr)
return 1 return 1
@ -41,11 +41,11 @@ def runAllTests( exe_path, use_valgrind=False ):
failures = [] failures = []
for name in test_names: for name in test_names:
print('TESTING %s:' % name, end=' ') print('TESTING %s:' % name, end=' ')
succeed, result = test_proxy.run( ['--test', name] ) succeed, result = test_proxy.run(['--test', name])
if succeed: if succeed:
print('OK') print('OK')
else: else:
failures.append( (name, result) ) failures.append((name, result))
print('FAILED') print('FAILED')
failed_count = len(failures) failed_count = len(failures)
pass_count = len(test_names) - failed_count pass_count = len(test_names) - failed_count
@ -53,8 +53,7 @@ def runAllTests( exe_path, use_valgrind=False ):
print() print()
for name, result in failures: for name, result in failures:
print(result) print(result)
print('%d/%d tests passed (%d failure(s))' % ( print('%d/%d tests passed (%d failure(s))' % ( pass_count, len(test_names), failed_count))
pass_count, len(test_names), failed_count))
return 1 return 1
else: else:
print('All %d tests passed' % len(test_names)) print('All %d tests passed' % len(test_names))
@ -62,7 +61,7 @@ def runAllTests( exe_path, use_valgrind=False ):
def main(): def main():
from optparse import OptionParser from optparse import OptionParser
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" ) parser = OptionParser(usage="%prog [options] <path to test_lib_json.exe>")
parser.add_option("--valgrind", parser.add_option("--valgrind",
action="store_true", dest="valgrind", default=False, action="store_true", dest="valgrind", default=False,
help="run all the tests using valgrind to detect memory leaks") help="run all the tests using valgrind to detect memory leaks")
@ -70,11 +69,11 @@ def main():
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) != 1: if len(args) != 1:
parser.error( 'Must provides at least path to test_lib_json executable.' ) parser.error('Must provides at least path to test_lib_json executable.')
sys.exit( 1 ) sys.exit(1)
exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) exit_code = runAllTests(args[0], use_valgrind=options.valgrind)
sys.exit( exit_code ) sys.exit(exit_code)
if __name__ == '__main__': if __name__ == '__main__':
main() main()