From e94d2f483b7efaf9d8d1a405906b069c8f476448 Mon Sep 17 00:00:00 2001 From: Baptiste Lepilleur Date: Tue, 23 Feb 2010 21:00:30 +0000 Subject: [PATCH] - added the following step to make_release: fix EOL in distribution source, generate source tarball. - devtools/ was made into a python module and common utilities are being moved in this module --- devtools/__init__.py | 1 + devtools/fixeol.py | 63 ++++++++++++ devtools/tarball.py | 41 ++++++++ devtools/wscript | 225 ------------------------------------------- doxybuild.py | 44 +-------- makerelease.py | 50 +++++++--- version | 2 +- 7 files changed, 146 insertions(+), 280 deletions(-) create mode 100644 devtools/__init__.py create mode 100644 devtools/fixeol.py create mode 100644 devtools/tarball.py delete mode 100644 devtools/wscript diff --git a/devtools/__init__.py b/devtools/__init__.py new file mode 100644 index 0000000..c944e7c --- /dev/null +++ b/devtools/__init__.py @@ -0,0 +1 @@ +# module \ No newline at end of file diff --git a/devtools/fixeol.py b/devtools/fixeol.py new file mode 100644 index 0000000..5d8372d --- /dev/null +++ b/devtools/fixeol.py @@ -0,0 +1,63 @@ +import os.path + +def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): + """Makes sure that all sources have the specified eol sequence (default: unix).""" + if not os.path.isfile( path ): + raise ValueError( 'Path "%s" is not a file' % path ) + try: + f = open(path, 'rb') + except IOError, msg: + print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) + return False + try: + raw_lines = f.readlines() + finally: + f.close() + fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] + if raw_lines != fixed_lines: + print '%s =>' % path, + if not is_dry_run: + f = open(path, "wb") + try: + f.writelines(fixed_lines) + finally: + f.close() + if verbose: + print is_dry_run and ' NEED FIX' or ' FIXED' + return True +## +## +## +##def _do_fix( is_dry_run = True ): +## from waftools import antglob +## python_sources = antglob.glob( '.', +## includes = '**/*.py **/wscript **/wscript_build', +## excludes = antglob.default_excludes + './waf.py', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in python_sources: +## _fix_python_source( path, is_dry_run ) +## +## cpp_sources = antglob.glob( '.', +## includes = '**/*.cpp **/*.h **/*.inl', +## prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) +## for path in cpp_sources: +## _fix_source_eol( path, is_dry_run ) +## +## +##def dry_fix(context): +## _do_fix( is_dry_run = True ) +## +##def fix(context): +## _do_fix( is_dry_run = False ) +## +##def shutdown(): +## pass +## +##def check(context): +## # Unit tests are run when "check" target is used +## ut = UnitTest.unit_test() +## ut.change_to_testfile_dir = True +## ut.want_to_see_test_output = True +## ut.want_to_see_test_error = True +## ut.run() +## ut.print_results() diff --git a/devtools/tarball.py b/devtools/tarball.py new file mode 100644 index 0000000..2ce261a --- /dev/null +++ b/devtools/tarball.py @@ -0,0 +1,41 @@ +import os.path +import gzip +import tarfile + +TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 + +def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): + """Parameters: + tarball_path: output path of the .tar.gz file + sources: list of sources to include in the tarball, relative to the current directory + base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped + from path in the tarball. + prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' + to make them child of root. + """ + base_dir = os.path.normpath( os.path.abspath( base_dir ) ) + def archive_name( path ): + """Makes path relative to base_dir.""" + path = os.path.normpath( os.path.abspath( path ) ) + common_path = os.path.commonprefix( (base_dir, path) ) + archive_name = path[len(common_path):] + if os.path.isabs( archive_name ): + archive_name = archive_name[1:] + return os.path.join( prefix_dir, archive_name ) + def visit(tar, dirname, names): + for name in names: + path = os.path.join(dirname, name) + if os.path.isfile(path): + path_in_tar = archive_name(path) + tar.add(path, path_in_tar ) + compression = TARGZ_DEFAULT_COMPRESSION_LEVEL + fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) + tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) + for source in sources: + source_path = source + if os.path.isdir( source ): + os.path.walk(source_path, visit, tar) + else: + path_in_tar = archive_name(source_path) + tar.add(source_path, path_in_tar ) # filename, arcname + tar.close() diff --git a/devtools/wscript b/devtools/wscript deleted file mode 100644 index 61b5183..0000000 --- a/devtools/wscript +++ /dev/null @@ -1,225 +0,0 @@ -VERSION='0.1.0' -APPNAME='CppUnit2' -srcdir = '.' -blddir = 'build' - -import Options -import Logs -import UnitTest -import Utils -import os.path -import sys -import glob - -CPPUT_EXAMPLES = ''' - checking_assertions - ignore_failure_demo - input_test - light_fixture - log_demo - parametrized_test - stringize_demo - test_function - '''.split() - -BROKEN_CPPUT_EXAMPLES = ''' - input_based_test - opentest_demo - table_fixture - '''.split() - -def _get_example_dirs(): - return [ os.path.join( 'examples', d ) - for d in CPPUT_EXAMPLES ] - -def _get_main_script_dir(): - """Gets the path of the directory containing this script.""" - # The main script path is only valid once the it has been executed, hence this can not be a global var. - assert Utils.g_module is not None - return os.path.split( Utils.g_module.root_path )[0] - -def _fix_import_path(): - """Adds the main script directory to be able to import waftools modules.""" - import_dir = _get_main_script_dir() - if import_dir not in sys.path: - sys.path.append( import_dir ) - -def _get_tool_dir(): - return os.path.join( main_script_dir, 'waftools' ) - -def set_options(opt): - """Always called first during the build.""" - _fix_import_path() - import waftools.log_output - waftools.log_output.set_options( opt ) - - # Adds command-line options for compiler - opt.tool_options('compiler_cxx') - - # from compiler_cxx tools, set_options - import Tools.ccroot as ccroot - opt.add_option('-d', '--debug-level', - action = 'store', - default = ccroot.DEBUG_LEVELS.RELEASE, - help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s'] " % "', '".join(ccroot.DEBUG_LEVELS.ALL) + - "[default: %default]", - choices = ccroot.DEBUG_LEVELS.ALL, - dest = 'debug_level') - -def init(): - """Called set_options() once the command-line has been parsed. - Command-line options value are accessed through Options.options. - """ - import waftools.log_output - waftools.log_output.init() - - -def configure(conf): - # There is a link issue with msvc 9! - conf.env['MSVC_VERSIONS'] = ['msvc 8.0'] - - # CXX=g++-3.0 ./waf.py configure will use g++-3.0 instead of 'g++' - conf.check_tool('compiler_cxx') - - # Select debug/optimize flags - debug_level = Options.options.debug_level.upper() - conf.env.append_unique('CXXFLAGS', conf.env['CXXFLAGS_' + debug_level]) - - compiler = conf.env['COMPILER_CXX'] - if compiler == 'msvc': # Microsoft Visual Studio specifics - # Select run-time library variant - if 'DEBUG' in debug_level: - crt_variant = 'MULTITHREADED_DLL_DBG' - else: - crt_variant = 'MULTITHREADED_DLL' - # MULTITHREADED, MULTITHREADED_DLL, MULTITHREADED_DBG, MULTITHREADED_DLL_DBG - conf.env.append_unique('CPPFLAGS', conf.env['CPPFLAGS_CRT_' + crt_variant]) - conf.env.append_unique('CPPDEFINES', conf.env['CPPDEFINES_CRT_' + crt_variant]) - - ## batched builds can be enabled by including the module optim_cc - # conf.check_tool('batched_cc') - - -# WAF command: - -def build(bld): - # process subfolders from here - bld.add_subdirs('''src/cpptl - src/cpput - src/cpputtest''') - - bld.add_subdirs( _get_example_dirs() ) - -def gen_examples_wscript(ctx): - for example_dir in _get_example_dirs(): - wscript_path = os.path.join( example_dir, 'wscript_build' ) - sources = glob.glob( os.path.join( example_dir, '*.cpp' ) ) - Logs.info( 'Generating "%s"' % wscript_path ) - open( wscript_path, 'wb' ).write( """\ -#! /usr/bin/env python -# encoding: utf-8 -# Baptiste Lepilleur, 2009 - -bld.new_task_gen( - features = 'cxx cprogram', - source = '''%(sources)s''', - includes = '../.. ../../include', # for examples/common - uselib_local = 'cpptl cpput', - name = 'example_%(name)s', - target = 'example_%(name)s' ) -""" % { - 'sources': ' '.join( [os.path.basename(s) for s in sources] ), - 'name': os.path.basename( example_dir ) - } ) - -def _fix_python_source( path, is_dry_run = True, verbose = True ): - """Makes sure that all sources have unix EOL and replace tabs with 4 spaces.""" - from waftools import reindent - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - - if verbose: - print '%s =>' % path, - try: - r = reindent.Reindenter(f) - finally: - f.close() - if r.run(): # File need to be fixed ? - if not is_dry_run: - f = open(path, "wb") - try: - r.write(f) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - elif verbose: - print ' OK' - return True - -def _fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ): - """Makes sure that all sources have the specified eol sequence (default: unix).""" - if not os.path.isfile( path ): - raise ValueError( 'Path "%s" is not a file' % path ) - try: - f = open(path, 'rb') - except IOError, msg: - print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg)) - return False - try: - raw_lines = f.readlines() - finally: - f.close() - fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] - if raw_lines != fixed_lines: - print '%s =>' % path, - if not is_dry_run: - f = open(path, "wb") - try: - f.writelines(fixed_lines) - finally: - f.close() - if verbose: - print is_dry_run and ' NEED FIX' or ' FIXED' - return True - - - -def _do_fix( is_dry_run = True ): - from waftools import antglob - python_sources = antglob.glob( '.', - includes = '**/*.py **/wscript **/wscript_build', - excludes = antglob.default_excludes + './waf.py', - prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) - for path in python_sources: - _fix_python_source( path, is_dry_run ) - - cpp_sources = antglob.glob( '.', - includes = '**/*.cpp **/*.h **/*.inl', - prune_dirs = antglob.prune_dirs + 'waf-* ./build' ) - for path in cpp_sources: - _fix_source_eol( path, is_dry_run ) - - -def dry_fix(context): - _do_fix( is_dry_run = True ) - -def fix(context): - _do_fix( is_dry_run = False ) - -def shutdown(): - pass - -def check(context): - # Unit tests are run when "check" target is used - ut = UnitTest.unit_test() - ut.change_to_testfile_dir = True - ut.want_to_see_test_output = True - ut.want_to_see_test_error = True - ut.run() - ut.print_results() diff --git a/doxybuild.py b/doxybuild.py index 82bdea6..792bff7 100644 --- a/doxybuild.py +++ b/doxybuild.py @@ -6,47 +6,7 @@ import os import os.path import sys import shutil -import gzip -import tarfile - -TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 - -def make_tarball(tarball_path, sources, base_dir, prefix_dir=''): - """Parameters: - tarball_path: output path of the .tar.gz file - sources: list of sources to include in the tarball, relative to the current directory - base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped - from path in the tarball. - prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' - to make them child of root. - """ - base_dir = os.path.normpath( os.path.abspath( base_dir ) ) - def archive_name( path ): - """Makes path relative to base_dir.""" - path = os.path.normpath( os.path.abspath( path ) ) - common_path = os.path.commonprefix( (base_dir, path) ) - archive_name = path[len(common_path):] - if os.path.isabs( archive_name ): - archive_name = archive_name[1:] - return os.path.join( prefix_dir, archive_name ) - def visit(tar, dirname, names): - for name in names: - path = os.path.join(dirname, name) - if os.path.isfile(path): - path_in_tar = archive_name(path) - tar.add(path, path_in_tar ) - compression = TARGZ_DEFAULT_COMPRESSION_LEVEL - fileobj = gzip.GzipFile( tarball_path, 'wb', compression ) - tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj) - for source in sources: - source_path = source - if os.path.isdir( source ): - os.path.walk(source_path, visit, tar) - else: - path_in_tar = archive_name(source_path) - tar.add(source_path, path_in_tar ) # filename, arcname - tar.close() - +from devtools import tarball def find_program(filename): """find a program in folders path_lst, and sets env[var] @@ -171,7 +131,7 @@ def build_doc( options, make_release=False ): 'version' ] tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) - make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) + tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) def main(): usage = """%prog diff --git a/makerelease.py b/makerelease.py index c00062a..b928c62 100644 --- a/makerelease.py +++ b/makerelease.py @@ -15,6 +15,7 @@ import doxybuild import subprocess import xml.etree.ElementTree as ElementTree import shutil +from devtools import antglob, fixeol, tarball SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/' SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp' @@ -92,6 +93,24 @@ def svn_export( tag_url, export_dir ): shutil.rmtree( export_dir ) svn_command( 'export', tag_url, export_dir ) +def fix_sources_eol( dist_dir ): + """Set file EOL for tarball distribution. + """ + print 'Preparing exported source file EOL for distribution...' + prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' + win_sources = antglob.glob( dist_dir, + includes = '**/*.sln **/*.vcproj', + prune_dirs = prune_dirs ) + unix_sources = antglob.glob( dist_dir, + includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in + sconscript *.json *.expected AUTHORS LICENSE''', + excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', + prune_dirs = prune_dirs ) + for path in win_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' ) + for path in unix_sources: + fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' ) + def main(): usage = """%prog release_version next_dev_version Update 'version' file to release_version and commit. @@ -129,19 +148,26 @@ Must be started in the project top directory. print 'Setting version to', release_version set_version( release_version ) tag_url = svn_join_url( SVN_TAG_ROOT, release_version ) -## if svn_check_if_tag_exist( tag_url ): -## if options.retag_release: -## svn_remove_tag( tag_url, 'Overwriting previous tag' ) -## else: -## print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url -## sys.exit( 1 ) -## svn_tag_sandbox( tag_url, 'Release ' + release_version ) -## print 'Generated doxygen document...' -## doxybuild.build_doc( options, make_release=True ) - svn_export( tag_url, 'dist/distcheck' ) + if svn_check_if_tag_exist( tag_url ): + if options.retag_release: + svn_remove_tag( tag_url, 'Overwriting previous tag' ) + else: + print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url + sys.exit( 1 ) + svn_tag_sandbox( tag_url, 'Release ' + release_version ) + + print 'Generated doxygen document...' + doxybuild.build_doc( options, make_release=True ) + + export_dir = 'dist/export' + svn_export( tag_url, export_dir ) + fix_sources_eol( export_dir ) + + source_dir = 'jsoncpp-src-' + release_version + source_tarball_path = 'dist/%s.tar.gz' % source_dir + print 'Generating source tarball to', source_tarball_path + tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir ) #@todo: - # fix-eol - # source tarball # decompress source tarball # ?compile & run & check # ?upload documentation diff --git a/version b/version index ab67193..79a2734 100644 --- a/version +++ b/version @@ -1 +1 @@ -0.5.0-rc \ No newline at end of file +0.5.0 \ No newline at end of file