Switched CRLF to LF in repo, and added svn:eol-style native. I might have missed a few files though. Just committing what I have so far.

This commit is contained in:
Christopher Dunn 2011-06-21 21:18:49 +00:00
parent 139da63aef
commit dc0f736f59
8 changed files with 575 additions and 575 deletions

View File

@ -1,147 +1,147 @@
"""Amalgate json-cpp library sources into a single source and header file. """Amalgate json-cpp library sources into a single source and header file.
Requires Python 2.6 Requires Python 2.6
Example of invocation (must be invoked from json-cpp top directory): Example of invocation (must be invoked from json-cpp top directory):
python amalgate.py python amalgate.py
""" """
import os import os
import os.path import os.path
import sys import sys
class AmalgamationFile: class AmalgamationFile:
def __init__( self, top_dir ): def __init__( self, top_dir ):
self.top_dir = top_dir self.top_dir = top_dir
self.blocks = [] self.blocks = []
def add_text( self, text ): def add_text( self, text ):
if not text.endswith( '\n' ): if not text.endswith( '\n' ):
text += '\n' text += '\n'
self.blocks.append( text ) self.blocks.append( text )
def add_file( self, relative_input_path, wrap_in_comment=False ): def add_file( self, relative_input_path, wrap_in_comment=False ):
def add_marker( prefix ): def add_marker( prefix ):
self.add_text( '' ) self.add_text( '' )
self.add_text( '// ' + '/'*70 ) self.add_text( '// ' + '/'*70 )
self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) ) self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) )
self.add_text( '// ' + '/'*70 ) self.add_text( '// ' + '/'*70 )
self.add_text( '' ) self.add_text( '' )
add_marker( 'Beginning' ) add_marker( 'Beginning' )
f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' ) f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' )
content = f.read() content = f.read()
if wrap_in_comment: if wrap_in_comment:
content = '/*\n' + content + '\n*/' content = '/*\n' + content + '\n*/'
self.add_text( content ) self.add_text( content )
f.close() f.close()
add_marker( 'End' ) add_marker( 'End' )
self.add_text( '\n\n\n\n' ) self.add_text( '\n\n\n\n' )
def get_value( self ): def get_value( self ):
return ''.join( self.blocks ).replace('\r\n','\n') return ''.join( self.blocks ).replace('\r\n','\n')
def write_to( self, output_path ): def write_to( self, output_path ):
output_dir = os.path.dirname( output_path ) output_dir = os.path.dirname( output_path )
if output_dir and not os.path.isdir( output_dir ): if output_dir and not os.path.isdir( output_dir ):
os.makedirs( output_dir ) os.makedirs( output_dir )
f = open( output_path, 'wb' ) f = open( output_path, 'wb' )
f.write( self.get_value() ) f.write( self.get_value() )
f.close() f.close()
def amalgamate_source( source_top_dir=None, def amalgamate_source( source_top_dir=None,
target_source_path=None, target_source_path=None,
header_include_path=None ): header_include_path=None ):
"""Produces amalgated source. """Produces amalgated source.
Parameters: Parameters:
source_top_dir: top-directory source_top_dir: top-directory
target_source_path: output .cpp path target_source_path: output .cpp path
header_include_path: generated header path relative to target_source_path. header_include_path: generated header path relative to target_source_path.
""" """
print 'Amalgating header...' print 'Amalgating header...'
header = AmalgamationFile( source_top_dir ) header = AmalgamationFile( source_top_dir )
header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' ) header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' )
header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
header.add_file( 'LICENSE', wrap_in_comment=True ) header.add_file( 'LICENSE', wrap_in_comment=True )
header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' ) header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' )
header.add_text( '# define JSON_AMALGATED_H_INCLUDED' ) header.add_text( '# define JSON_AMALGATED_H_INCLUDED' )
header.add_text( '/// If defined, indicates that the source file is amalgated' ) header.add_text( '/// If defined, indicates that the source file is amalgated' )
header.add_text( '/// to prevent private header inclusion.' ) header.add_text( '/// to prevent private header inclusion.' )
header.add_text( '#define JSON_IS_AMALGATED' ) header.add_text( '#define JSON_IS_AMALGATED' )
header.add_file( 'include/json/config.h' ) header.add_file( 'include/json/config.h' )
header.add_file( 'include/json/forwards.h' ) header.add_file( 'include/json/forwards.h' )
header.add_file( 'include/json/features.h' ) header.add_file( 'include/json/features.h' )
header.add_file( 'include/json/value.h' ) header.add_file( 'include/json/value.h' )
header.add_file( 'include/json/reader.h' ) header.add_file( 'include/json/reader.h' )
header.add_file( 'include/json/writer.h' ) header.add_file( 'include/json/writer.h' )
header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' ) header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' )
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path ) target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
print 'Writing amalgated header to %r' % target_header_path print 'Writing amalgated header to %r' % target_header_path
header.write_to( target_header_path ) header.write_to( target_header_path )
base, ext = os.path.splitext( header_include_path ) base, ext = os.path.splitext( header_include_path )
forward_header_include_path = base + '-forwards' + ext forward_header_include_path = base + '-forwards' + ext
print 'Amalgating forward header...' print 'Amalgating forward header...'
header = AmalgamationFile( source_top_dir ) header = AmalgamationFile( source_top_dir )
header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' ) header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' )
header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path ) header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path )
header.add_text( '/// This header provides forward declaration for all JsonCpp types.' ) header.add_text( '/// This header provides forward declaration for all JsonCpp types.' )
header.add_file( 'LICENSE', wrap_in_comment=True ) header.add_file( 'LICENSE', wrap_in_comment=True )
header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' ) header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' )
header.add_text( '/// If defined, indicates that the source file is amalgated' ) header.add_text( '/// If defined, indicates that the source file is amalgated' )
header.add_text( '/// to prevent private header inclusion.' ) header.add_text( '/// to prevent private header inclusion.' )
header.add_text( '#define JSON_IS_AMALGATED' ) header.add_text( '#define JSON_IS_AMALGATED' )
header.add_file( 'include/json/config.h' ) header.add_file( 'include/json/config.h' )
header.add_file( 'include/json/forwards.h' ) header.add_file( 'include/json/forwards.h' )
header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' ) header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
target_forward_header_path = os.path.join( os.path.dirname(target_source_path), target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
forward_header_include_path ) forward_header_include_path )
print 'Writing amalgated forward header to %r' % target_forward_header_path print 'Writing amalgated forward header to %r' % target_forward_header_path
header.write_to( target_forward_header_path ) header.write_to( target_forward_header_path )
print 'Amalgating source...' print 'Amalgating source...'
source = AmalgamationFile( source_top_dir ) source = AmalgamationFile( source_top_dir )
source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' ) source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' )
source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path ) source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
source.add_file( 'LICENSE', wrap_in_comment=True ) source.add_file( 'LICENSE', wrap_in_comment=True )
source.add_text( '' ) source.add_text( '' )
source.add_text( '#include <%s>' % header_include_path ) source.add_text( '#include <%s>' % header_include_path )
source.add_text( '' ) source.add_text( '' )
source.add_file( 'src/lib_json\json_tool.h' ) source.add_file( 'src/lib_json\json_tool.h' )
source.add_file( 'src/lib_json\json_reader.cpp' ) source.add_file( 'src/lib_json\json_reader.cpp' )
source.add_file( 'src/lib_json\json_batchallocator.h' ) source.add_file( 'src/lib_json\json_batchallocator.h' )
source.add_file( 'src/lib_json\json_valueiterator.inl' ) source.add_file( 'src/lib_json\json_valueiterator.inl' )
source.add_file( 'src/lib_json\json_value.cpp' ) source.add_file( 'src/lib_json\json_value.cpp' )
source.add_file( 'src/lib_json\json_writer.cpp' ) source.add_file( 'src/lib_json\json_writer.cpp' )
print 'Writing amalgated source to %r' % target_source_path print 'Writing amalgated source to %r' % target_source_path
source.write_to( target_source_path ) source.write_to( target_source_path )
def main(): def main():
usage = """%prog [options] usage = """%prog [options]
Generate a single amalgated source and header file from the sources. Generate a single amalgated source and header file from the sources.
""" """
from optparse import OptionParser from optparse import OptionParser
parser = OptionParser(usage=usage) parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False parser.allow_interspersed_args = False
parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp', parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp',
help="""Output .cpp source path. [Default: %default]""") help="""Output .cpp source path. [Default: %default]""")
parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h', parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h',
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""") help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(), parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(),
help="""Source top-directory. [Default: %default]""") help="""Source top-directory. [Default: %default]""")
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
msg = amalgamate_source( source_top_dir=options.top_dir, msg = amalgamate_source( source_top_dir=options.top_dir,
target_source_path=options.target_source_path, target_source_path=options.target_source_path,
header_include_path=options.header_include_path ) header_include_path=options.header_include_path )
if msg: if msg:
sys.stderr.write( msg + '\n' ) sys.stderr.write( msg + '\n' )
sys.exit( 1 ) sys.exit( 1 )
else: else:
print 'Source succesfully amalagated' print 'Source succesfully amalagated'
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -55,20 +55,20 @@ ALL = DIR | FILE | LINKS
_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' ) _ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
def ant_pattern_to_re( ant_pattern ): def ant_pattern_to_re( ant_pattern ):
"""Generates a regular expression from the ant pattern. """Generates a regular expression from the ant pattern.
Matching convention: Matching convention:
**/a: match 'a', 'dir/a', 'dir1/dir2/a' **/a: match 'a', 'dir/a', 'dir1/dir2/a'
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b' a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
*.py: match 'script.py' but not 'a/script.py' *.py: match 'script.py' but not 'a/script.py'
""" """
rex = ['^'] rex = ['^']
next_pos = 0 next_pos = 0
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep ) sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
## print 'Converting', ant_pattern ## print 'Converting', ant_pattern
for match in _ANT_RE.finditer( ant_pattern ): for match in _ANT_RE.finditer( ant_pattern ):
## print 'Matched', match.group() ## print 'Matched', match.group()
## print match.start(0), next_pos ## print match.start(0), next_pos
if match.start(0) != next_pos: if match.start(0) != next_pos:
raise ValueError( "Invalid ant pattern" ) raise ValueError( "Invalid ant pattern" )
if match.group(1): # /**/ if match.group(1): # /**/
@ -83,14 +83,14 @@ def ant_pattern_to_re( ant_pattern ):
rex.append( sep_rex ) rex.append( sep_rex )
else: # somepath else: # somepath
rex.append( re.escape(match.group(6)) ) rex.append( re.escape(match.group(6)) )
next_pos = match.end() next_pos = match.end()
rex.append('$') rex.append('$')
return re.compile( ''.join( rex ) ) return re.compile( ''.join( rex ) )
def _as_list( l ): def _as_list( l ):
if isinstance(l, basestring): if isinstance(l, basestring):
return l.split() return l.split()
return l return l
def glob(dir_path, def glob(dir_path,
includes = '**/*', includes = '**/*',
@ -99,8 +99,8 @@ def glob(dir_path,
prune_dirs = prune_dirs, prune_dirs = prune_dirs,
max_depth = 25): max_depth = 25):
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)] include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)] exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)] prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
dir_path = dir_path.replace('/',os.path.sep) dir_path = dir_path.replace('/',os.path.sep)
entry_type_filter = entry_type entry_type_filter = entry_type
@ -117,37 +117,37 @@ def glob(dir_path,
return True return True
return False return False
def glob_impl( root_dir_path ): def glob_impl( root_dir_path ):
child_dirs = [root_dir_path] child_dirs = [root_dir_path]
while child_dirs: while child_dirs:
dir_path = child_dirs.pop() dir_path = child_dirs.pop()
for entry in listdir( dir_path ): for entry in listdir( dir_path ):
full_path = os.path.join( dir_path, entry ) full_path = os.path.join( dir_path, entry )
## print 'Testing:', full_path, ## print 'Testing:', full_path,
is_dir = os.path.isdir( full_path ) is_dir = os.path.isdir( full_path )
if is_dir and not is_pruned_dir( entry ): # explore child directory ? if is_dir and not is_pruned_dir( entry ): # explore child directory ?
## print '===> marked for recursion', ## print '===> marked for recursion',
child_dirs.append( full_path ) child_dirs.append( full_path )
included = apply_filter( full_path, include_filter ) included = apply_filter( full_path, include_filter )
rejected = apply_filter( full_path, exclude_filter ) rejected = apply_filter( full_path, exclude_filter )
if not included or rejected: # do not include entry ? if not included or rejected: # do not include entry ?
## print '=> not included or rejected' ## print '=> not included or rejected'
continue continue
link = os.path.islink( full_path ) link = os.path.islink( full_path )
is_file = os.path.isfile( full_path ) is_file = os.path.isfile( full_path )
if not is_file and not is_dir: if not is_file and not is_dir:
## print '=> unknown entry type' ## print '=> unknown entry type'
continue continue
if link: if link:
entry_type = is_file and FILE_LINK or DIR_LINK entry_type = is_file and FILE_LINK or DIR_LINK
else: else:
entry_type = is_file and FILE or DIR entry_type = is_file and FILE or DIR
## print '=> type: %d' % entry_type, ## print '=> type: %d' % entry_type,
if (entry_type & entry_type_filter) != 0: if (entry_type & entry_type_filter) != 0:
## print ' => KEEP' ## print ' => KEEP'
yield os.path.join( dir_path, entry ) yield os.path.join( dir_path, entry )
## else: ## else:
## print ' => TYPE REJECTED' ## print ' => TYPE REJECTED'
return list( glob_impl( dir_path ) ) return list( glob_impl( dir_path ) )
@ -155,47 +155,47 @@ if __name__ == "__main__":
import unittest import unittest
class AntPatternToRETest(unittest.TestCase): class AntPatternToRETest(unittest.TestCase):
## def test_conversion( self ): ## def test_conversion( self ):
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern ) ## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
def test_matching( self ): def test_matching( self ):
test_cases = [ ( 'path', test_cases = [ ( 'path',
['path'], ['path'],
['somepath', 'pathsuffix', '/path', '/path'] ), ['somepath', 'pathsuffix', '/path', '/path'] ),
( '*.py', ( '*.py',
['source.py', 'source.ext.py', '.py'], ['source.py', 'source.ext.py', '.py'],
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ), ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
( '**/path', ( '**/path',
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ), ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
( 'path/**', ( 'path/**',
['path/a', 'path/path/a', 'path//'], ['path/a', 'path/path/a', 'path//'],
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ), ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
( '/**/path', ( '/**/path',
['/path', '/a/path', '/a/b/path/path', '/path/path'], ['/path', '/a/path', '/a/b/path/path', '/path/path'],
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ), ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
( 'a/b', ( 'a/b',
['a/b'], ['a/b'],
['somea/b', 'a/bsuffix', 'a/b/c'] ), ['somea/b', 'a/bsuffix', 'a/b/c'] ),
( '**/*.py', ( '**/*.py',
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
['script.pyc', 'script.pyo', 'a.py/b'] ), ['script.pyc', 'script.pyo', 'a.py/b'] ),
( 'src/**/*.py', ( 'src/**/*.py',
['src/a.py', 'src/dir/a.py'], ['src/a.py', 'src/dir/a.py'],
['a/src/a.py', '/src/a.py'] ), ['a/src/a.py', '/src/a.py'] ),
] ]
for ant_pattern, accepted_matches, rejected_matches in list(test_cases): for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
def local_path( paths ): def local_path( paths ):
return [ p.replace('/',os.path.sep) for p in paths ] return [ p.replace('/',os.path.sep) for p in paths ]
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) ) test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
for ant_pattern, accepted_matches, rejected_matches in test_cases: for ant_pattern, accepted_matches, rejected_matches in test_cases:
rex = ant_pattern_to_re( ant_pattern ) rex = ant_pattern_to_re( ant_pattern )
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
for accepted_match in accepted_matches: for accepted_match in accepted_matches:
print 'Accepted?:', accepted_match print 'Accepted?:', accepted_match
self.assert_( rex.match( accepted_match ) is not None ) self.assert_( rex.match( accepted_match ) is not None )
for rejected_match in rejected_matches: for rejected_match in rejected_matches:
print 'Rejected?:', rejected_match print 'Rejected?:', rejected_match
self.assert_( rex.match( rejected_match ) is None ) self.assert_( rex.match( rejected_match ) is None )
unittest.main() unittest.main()

View File

@ -1,63 +1,63 @@
import os.path import os.path
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
"""Makes sure that all sources have the specified eol sequence (default: unix).""" """Makes sure that all sources have the specified eol sequence (default: unix)."""
if not os.path.isfile( path ): if not os.path.isfile( path ):
raise ValueError( 'Path "%s" is not a file' % path ) raise ValueError( 'Path "%s" is not a file' % path )
try: try:
f = open(path, 'rb') f = open(path, 'rb')
except IOError, msg: except IOError, msg:
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
return False return False
try: try:
raw_lines = f.readlines() raw_lines = f.readlines()
finally: finally:
f.close() f.close()
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
if raw_lines != fixed_lines: if raw_lines != fixed_lines:
print '%s =>' % path, print '%s =>' % path,
if not is_dry_run: if not is_dry_run:
f = open(path, "wb") f = open(path, "wb")
try: try:
f.writelines(fixed_lines) f.writelines(fixed_lines)
finally: finally:
f.close() f.close()
if verbose: if verbose:
print is_dry_run and ' NEED FIX' or ' FIXED' print is_dry_run and ' NEED FIX' or ' FIXED'
return True return True
## ##
## ##
## ##
##def _do_fix( is_dry_run = True ): ##def _do_fix( is_dry_run = True ):
## from waftools import antglob ## from waftools import antglob
## python_sources = antglob.glob( '.', ## python_sources = antglob.glob( '.',
## includes = '**/*.py **/wscript **/wscript_build', ## includes = '**/*.py **/wscript **/wscript_build',
## excludes = antglob.default_excludes + './waf.py', ## excludes = antglob.default_excludes + './waf.py',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) ## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in python_sources: ## for path in python_sources:
## _fix_python_source( path, is_dry_run ) ## _fix_python_source( path, is_dry_run )
## ##
## cpp_sources = antglob.glob( '.', ## cpp_sources = antglob.glob( '.',
## includes = '**/*.cpp **/*.h **/*.inl', ## includes = '**/*.cpp **/*.h **/*.inl',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) ## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in cpp_sources: ## for path in cpp_sources:
## _fix_source_eol( path, is_dry_run ) ## _fix_source_eol( path, is_dry_run )
## ##
## ##
##def dry_fix(context): ##def dry_fix(context):
## _do_fix( is_dry_run = True ) ## _do_fix( is_dry_run = True )
## ##
##def fix(context): ##def fix(context):
## _do_fix( is_dry_run = False ) ## _do_fix( is_dry_run = False )
## ##
##def shutdown(): ##def shutdown():
## pass ## pass
## ##
##def check(context): ##def check(context):
## # Unit tests are run when "check" target is used ## # Unit tests are run when "check" target is used
## ut = UnitTest.unit_test() ## ut = UnitTest.unit_test()
## ut.change_to_testfile_dir = True ## ut.change_to_testfile_dir = True
## ut.want_to_see_test_output = True ## ut.want_to_see_test_output = True
## ut.want_to_see_test_error = True ## ut.want_to_see_test_error = True
## ut.run() ## ut.run()
## ut.print_results() ## ut.print_results()

View File

@ -1,93 +1,93 @@
"""Updates the license text in source file. """Updates the license text in source file.
""" """
# An existing license is found if the file starts with the string below, # An existing license is found if the file starts with the string below,
# and ends with the first blank line. # and ends with the first blank line.
LICENSE_BEGIN = "// Copyright " LICENSE_BEGIN = "// Copyright "
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and // Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction. // recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
""".replace('\r\n','\n') """.replace('\r\n','\n')
def update_license( path, dry_run, show_diff ): def update_license( path, dry_run, show_diff ):
"""Update the license statement in the specified file. """Update the license statement in the specified file.
Parameters: Parameters:
path: path of the C++ source file to update. path: path of the C++ source file to update.
dry_run: if True, just print the path of the file that would be updated, dry_run: if True, just print the path of the file that would be updated,
but don't change it. but don't change it.
show_diff: if True, print the path of the file that would be modified, show_diff: if True, print the path of the file that would be modified,
as well as the change made to the file. as well as the change made to the file.
""" """
with open( path, 'rt' ) as fin: with open( path, 'rt' ) as fin:
original_text = fin.read().replace('\r\n','\n') original_text = fin.read().replace('\r\n','\n')
newline = fin.newlines and fin.newlines[0] or '\n' newline = fin.newlines and fin.newlines[0] or '\n'
if not original_text.startswith( LICENSE_BEGIN ): if not original_text.startswith( LICENSE_BEGIN ):
# No existing license found => prepend it # No existing license found => prepend it
new_text = BRIEF_LICENSE + original_text new_text = BRIEF_LICENSE + original_text
else: else:
license_end_index = original_text.index( '\n\n' ) # search first blank line license_end_index = original_text.index( '\n\n' ) # search first blank line
new_text = BRIEF_LICENSE + original_text[license_end_index+2:] new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
if original_text != new_text: if original_text != new_text:
if not dry_run: if not dry_run:
with open( path, 'wb' ) as fout: with open( path, 'wb' ) as fout:
fout.write( new_text.replace('\n', newline ) ) fout.write( new_text.replace('\n', newline ) )
print 'Updated', path print 'Updated', path
if show_diff: if show_diff:
import difflib import difflib
print '\n'.join( difflib.unified_diff( original_text.split('\n'), print '\n'.join( difflib.unified_diff( original_text.split('\n'),
new_text.split('\n') ) ) new_text.split('\n') ) )
return True return True
return False return False
def update_license_in_source_directories( source_dirs, dry_run, show_diff ): def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
"""Updates license text in C++ source files found in directory source_dirs. """Updates license text in C++ source files found in directory source_dirs.
Parameters: Parameters:
source_dirs: list of directory to scan for C++ sources. Directories are source_dirs: list of directory to scan for C++ sources. Directories are
scanned recursively. scanned recursively.
dry_run: if True, just print the path of the file that would be updated, dry_run: if True, just print the path of the file that would be updated,
but don't change it. but don't change it.
show_diff: if True, print the path of the file that would be modified, show_diff: if True, print the path of the file that would be modified,
as well as the change made to the file. as well as the change made to the file.
""" """
from devtools import antglob from devtools import antglob
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
for source_dir in source_dirs: for source_dir in source_dirs:
cpp_sources = antglob.glob( source_dir, cpp_sources = antglob.glob( source_dir,
includes = '''**/*.h **/*.cpp **/*.inl''', includes = '''**/*.h **/*.cpp **/*.inl''',
prune_dirs = prune_dirs ) prune_dirs = prune_dirs )
for source in cpp_sources: for source in cpp_sources:
update_license( source, dry_run, show_diff ) update_license( source, dry_run, show_diff )
def main(): def main():
usage = """%prog DIR [DIR2...] usage = """%prog DIR [DIR2...]
Updates license text in sources of the project in source files found Updates license text in sources of the project in source files found
in the directory specified on the command-line. in the directory specified on the command-line.
Example of call: Example of call:
python devtools\licenseupdater.py include src -n --diff python devtools\licenseupdater.py include src -n --diff
=> Show change that would be made to the sources. => Show change that would be made to the sources.
python devtools\licenseupdater.py include src python devtools\licenseupdater.py include src
=> Update license statement on all sources in directories include/ and src/. => Update license statement on all sources in directories include/ and src/.
""" """
from optparse import OptionParser from optparse import OptionParser
parser = OptionParser(usage=usage) parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False parser.allow_interspersed_args = False
parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False, parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,
help="""Only show what files are updated, do not update the files""") help="""Only show what files are updated, do not update the files""")
parser.add_option('--diff', dest="show_diff", action='store_true', default=False, parser.add_option('--diff', dest="show_diff", action='store_true', default=False,
help="""On update, show change made to the file.""") help="""On update, show change made to the file.""")
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
update_license_in_source_directories( args, options.dry_run, options.show_diff ) update_license_in_source_directories( args, options.dry_run, options.show_diff )
print 'Done' print 'Done'
if __name__ == '__main__': if __name__ == '__main__':
import sys import sys
import os.path import os.path
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
main() main()

View File

@ -1,53 +1,53 @@
import os.path import os.path
import gzip import gzip
import tarfile import tarfile
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
"""Parameters: """Parameters:
tarball_path: output path of the .tar.gz file tarball_path: output path of the .tar.gz file
sources: list of sources to include in the tarball, relative to the current directory sources: list of sources to include in the tarball, relative to the current directory
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
from path in the tarball. from path in the tarball.
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
to make them child of root. to make them child of root.
""" """
base_dir = os.path.normpath( os.path.abspath( base_dir ) ) base_dir = os.path.normpath( os.path.abspath( base_dir ) )
def archive_name( path ): def archive_name( path ):
"""Makes path relative to base_dir.""" """Makes path relative to base_dir."""
path = os.path.normpath( os.path.abspath( path ) ) path = os.path.normpath( os.path.abspath( path ) )
common_path = os.path.commonprefix( (base_dir, path) ) common_path = os.path.commonprefix( (base_dir, path) )
archive_name = path[len(common_path):] archive_name = path[len(common_path):]
if os.path.isabs( archive_name ): if os.path.isabs( archive_name ):
archive_name = archive_name[1:] archive_name = archive_name[1:]
return os.path.join( prefix_dir, archive_name ) return os.path.join( prefix_dir, archive_name )
def visit(tar, dirname, names): def visit(tar, dirname, names):
for name in names: for name in names:
path = os.path.join(dirname, name) path = os.path.join(dirname, name)
if os.path.isfile(path): if os.path.isfile(path):
path_in_tar = archive_name(path) path_in_tar = archive_name(path)
tar.add(path, path_in_tar ) tar.add(path, path_in_tar )
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression ) tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
try: try:
for source in sources: for source in sources:
source_path = source source_path = source
if os.path.isdir( source ): if os.path.isdir( source ):
os.path.walk(source_path, visit, tar) os.path.walk(source_path, visit, tar)
else: else:
path_in_tar = archive_name(source_path) path_in_tar = archive_name(source_path)
tar.add(source_path, path_in_tar ) # filename, arcname tar.add(source_path, path_in_tar ) # filename, arcname
finally: finally:
tar.close() tar.close()
def decompress( tarball_path, base_dir ): def decompress( tarball_path, base_dir ):
"""Decompress the gzipped tarball into directory base_dir. """Decompress the gzipped tarball into directory base_dir.
""" """
# !!! This class method is not documented in the online doc # !!! This class method is not documented in the online doc
# nor is bz2open! # nor is bz2open!
tar = tarfile.TarFile.gzopen(tarball_path, mode='r') tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
try: try:
tar.extractall( base_dir ) tar.extractall( base_dir )
finally: finally:
tar.close() tar.close()

View File

@ -1,53 +1,53 @@
import fnmatch import fnmatch
import os import os
def generate( env ): def generate( env ):
def Glob( env, includes = None, excludes = None, dir = '.' ): def Glob( env, includes = None, excludes = None, dir = '.' ):
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.') """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
helper function to environment. helper function to environment.
Glob both the file-system files. Glob both the file-system files.
includes: list of file name pattern included in the return list when matched. includes: list of file name pattern included in the return list when matched.
excludes: list of file name pattern exluced from the return list. excludes: list of file name pattern exluced from the return list.
Example: Example:
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" ) sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
""" """
def filterFilename(path): def filterFilename(path):
abs_path = os.path.join( dir, path ) abs_path = os.path.join( dir, path )
if not os.path.isfile(abs_path): if not os.path.isfile(abs_path):
return 0 return 0
fn = os.path.basename(path) fn = os.path.basename(path)
match = 0 match = 0
for include in includes: for include in includes:
if fnmatch.fnmatchcase( fn, include ): if fnmatch.fnmatchcase( fn, include ):
match = 1 match = 1
break break
if match == 1 and not excludes is None: if match == 1 and not excludes is None:
for exclude in excludes: for exclude in excludes:
if fnmatch.fnmatchcase( fn, exclude ): if fnmatch.fnmatchcase( fn, exclude ):
match = 0 match = 0
break break
return match return match
if includes is None: if includes is None:
includes = ('*',) includes = ('*',)
elif type(includes) in ( type(''), type(u'') ): elif type(includes) in ( type(''), type(u'') ):
includes = (includes,) includes = (includes,)
if type(excludes) in ( type(''), type(u'') ): if type(excludes) in ( type(''), type(u'') ):
excludes = (excludes,) excludes = (excludes,)
dir = env.Dir(dir).abspath dir = env.Dir(dir).abspath
paths = os.listdir( dir ) paths = os.listdir( dir )
def makeAbsFileNode( path ): def makeAbsFileNode( path ):
return env.File( os.path.join( dir, path ) ) return env.File( os.path.join( dir, path ) )
nodes = filter( filterFilename, paths ) nodes = filter( filterFilename, paths )
return map( makeAbsFileNode, nodes ) return map( makeAbsFileNode, nodes )
from SCons.Script import Environment from SCons.Script import Environment
Environment.Glob = Glob Environment.Glob = Glob
def exists(env): def exists(env):
""" """
Tool always exists. Tool always exists.
""" """
return True return True

View File

@ -1,3 +1,3 @@
Test suite from http://json.org/JSON_checker/. Test suite from http://json.org/JSON_checker/.
If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files.

View File

@ -1,73 +1,73 @@
import sys import sys
import os import os
import os.path import os.path
import subprocess import subprocess
from glob import glob from glob import glob
import optparse import optparse
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
class TestProxy(object): class TestProxy(object):
def __init__( self, test_exe_path, use_valgrind=False ): def __init__( self, test_exe_path, use_valgrind=False ):
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) ) self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
self.use_valgrind = use_valgrind self.use_valgrind = use_valgrind
def run( self, options ): def run( self, options ):
if self.use_valgrind: if self.use_valgrind:
cmd = VALGRIND_CMD.split() cmd = VALGRIND_CMD.split()
else: else:
cmd = [] cmd = []
cmd.extend( [self.test_exe_path, '--test-auto'] + options ) cmd.extend( [self.test_exe_path, '--test-auto'] + options )
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
stdout = process.communicate()[0] stdout = process.communicate()[0]
if process.returncode: if process.returncode:
return False, stdout return False, stdout
return True, stdout return True, stdout
def runAllTests( exe_path, use_valgrind=False ): def runAllTests( exe_path, use_valgrind=False ):
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind ) test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
status, test_names = test_proxy.run( ['--list-tests'] ) status, test_names = test_proxy.run( ['--list-tests'] )
if not status: if not status:
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
return 1 return 1
test_names = [name.strip() for name in test_names.strip().split('\n')] test_names = [name.strip() for name in test_names.strip().split('\n')]
failures = [] failures = []
for name in test_names: for name in test_names:
print 'TESTING %s:' % name, print 'TESTING %s:' % name,
succeed, result = test_proxy.run( ['--test', name] ) succeed, result = test_proxy.run( ['--test', name] )
if succeed: if succeed:
print 'OK' print 'OK'
else: else:
failures.append( (name, result) ) failures.append( (name, result) )
print 'FAILED' print 'FAILED'
failed_count = len(failures) failed_count = len(failures)
pass_count = len(test_names) - failed_count pass_count = len(test_names) - failed_count
if failed_count: if failed_count:
print print
for name, result in failures: for name, result in failures:
print result print result
print '%d/%d tests passed (%d failure(s))' % ( print '%d/%d tests passed (%d failure(s))' % (
pass_count, len(test_names), failed_count) pass_count, len(test_names), failed_count)
return 1 return 1
else: else:
print 'All %d tests passed' % len(test_names) print 'All %d tests passed' % len(test_names)
return 0 return 0
def main(): def main():
from optparse import OptionParser from optparse import OptionParser
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" ) parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
parser.add_option("--valgrind", parser.add_option("--valgrind",
action="store_true", dest="valgrind", default=False, action="store_true", dest="valgrind", default=False,
help="run all the tests using valgrind to detect memory leaks") help="run all the tests using valgrind to detect memory leaks")
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) != 1: if len(args) != 1:
parser.error( 'Must provides at least path to test_lib_json executable.' ) parser.error( 'Must provides at least path to test_lib_json executable.' )
sys.exit( 1 ) sys.exit( 1 )
exit_code = runAllTests( args[0], use_valgrind=options.valgrind ) exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
sys.exit( exit_code ) sys.exit( exit_code )
if __name__ == '__main__': if __name__ == '__main__':
main() main()