mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-04-02 01:41:02 +02:00
- added the following step to make_release: fix EOL in distribution source, generate source tarball.
- devtools/ was made into a python module and common utilities are being moved in this module
This commit is contained in:
parent
7c171ee726
commit
e94d2f483b
1
devtools/__init__.py
Normal file
1
devtools/__init__.py
Normal file
@ -0,0 +1 @@
|
|||||||
|
# module
|
63
devtools/fixeol.py
Normal file
63
devtools/fixeol.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import os.path
|
||||||
|
|
||||||
|
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||||
|
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||||
|
if not os.path.isfile( path ):
|
||||||
|
raise ValueError( 'Path "%s" is not a file' % path )
|
||||||
|
try:
|
||||||
|
f = open(path, 'rb')
|
||||||
|
except IOError, msg:
|
||||||
|
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
raw_lines = f.readlines()
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||||
|
if raw_lines != fixed_lines:
|
||||||
|
print '%s =>' % path,
|
||||||
|
if not is_dry_run:
|
||||||
|
f = open(path, "wb")
|
||||||
|
try:
|
||||||
|
f.writelines(fixed_lines)
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
if verbose:
|
||||||
|
print is_dry_run and ' NEED FIX' or ' FIXED'
|
||||||
|
return True
|
||||||
|
##
|
||||||
|
##
|
||||||
|
##
|
||||||
|
##def _do_fix( is_dry_run = True ):
|
||||||
|
## from waftools import antglob
|
||||||
|
## python_sources = antglob.glob( '.',
|
||||||
|
## includes = '**/*.py **/wscript **/wscript_build',
|
||||||
|
## excludes = antglob.default_excludes + './waf.py',
|
||||||
|
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||||
|
## for path in python_sources:
|
||||||
|
## _fix_python_source( path, is_dry_run )
|
||||||
|
##
|
||||||
|
## cpp_sources = antglob.glob( '.',
|
||||||
|
## includes = '**/*.cpp **/*.h **/*.inl',
|
||||||
|
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||||
|
## for path in cpp_sources:
|
||||||
|
## _fix_source_eol( path, is_dry_run )
|
||||||
|
##
|
||||||
|
##
|
||||||
|
##def dry_fix(context):
|
||||||
|
## _do_fix( is_dry_run = True )
|
||||||
|
##
|
||||||
|
##def fix(context):
|
||||||
|
## _do_fix( is_dry_run = False )
|
||||||
|
##
|
||||||
|
##def shutdown():
|
||||||
|
## pass
|
||||||
|
##
|
||||||
|
##def check(context):
|
||||||
|
## # Unit tests are run when "check" target is used
|
||||||
|
## ut = UnitTest.unit_test()
|
||||||
|
## ut.change_to_testfile_dir = True
|
||||||
|
## ut.want_to_see_test_output = True
|
||||||
|
## ut.want_to_see_test_error = True
|
||||||
|
## ut.run()
|
||||||
|
## ut.print_results()
|
41
devtools/tarball.py
Normal file
41
devtools/tarball.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
import os.path
|
||||||
|
import gzip
|
||||||
|
import tarfile
|
||||||
|
|
||||||
|
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||||
|
|
||||||
|
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
||||||
|
"""Parameters:
|
||||||
|
tarball_path: output path of the .tar.gz file
|
||||||
|
sources: list of sources to include in the tarball, relative to the current directory
|
||||||
|
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
|
||||||
|
from path in the tarball.
|
||||||
|
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
||||||
|
to make them child of root.
|
||||||
|
"""
|
||||||
|
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
|
||||||
|
def archive_name( path ):
|
||||||
|
"""Makes path relative to base_dir."""
|
||||||
|
path = os.path.normpath( os.path.abspath( path ) )
|
||||||
|
common_path = os.path.commonprefix( (base_dir, path) )
|
||||||
|
archive_name = path[len(common_path):]
|
||||||
|
if os.path.isabs( archive_name ):
|
||||||
|
archive_name = archive_name[1:]
|
||||||
|
return os.path.join( prefix_dir, archive_name )
|
||||||
|
def visit(tar, dirname, names):
|
||||||
|
for name in names:
|
||||||
|
path = os.path.join(dirname, name)
|
||||||
|
if os.path.isfile(path):
|
||||||
|
path_in_tar = archive_name(path)
|
||||||
|
tar.add(path, path_in_tar )
|
||||||
|
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
||||||
|
fileobj = gzip.GzipFile( tarball_path, 'wb', compression )
|
||||||
|
tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj)
|
||||||
|
for source in sources:
|
||||||
|
source_path = source
|
||||||
|
if os.path.isdir( source ):
|
||||||
|
os.path.walk(source_path, visit, tar)
|
||||||
|
else:
|
||||||
|
path_in_tar = archive_name(source_path)
|
||||||
|
tar.add(source_path, path_in_tar ) # filename, arcname
|
||||||
|
tar.close()
|
225
devtools/wscript
225
devtools/wscript
@ -1,225 +0,0 @@
|
|||||||
VERSION='0.1.0'
|
|
||||||
APPNAME='CppUnit2'
|
|
||||||
srcdir = '.'
|
|
||||||
blddir = 'build'
|
|
||||||
|
|
||||||
import Options
|
|
||||||
import Logs
|
|
||||||
import UnitTest
|
|
||||||
import Utils
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
import glob
|
|
||||||
|
|
||||||
CPPUT_EXAMPLES = '''
|
|
||||||
checking_assertions
|
|
||||||
ignore_failure_demo
|
|
||||||
input_test
|
|
||||||
light_fixture
|
|
||||||
log_demo
|
|
||||||
parametrized_test
|
|
||||||
stringize_demo
|
|
||||||
test_function
|
|
||||||
'''.split()
|
|
||||||
|
|
||||||
BROKEN_CPPUT_EXAMPLES = '''
|
|
||||||
input_based_test
|
|
||||||
opentest_demo
|
|
||||||
table_fixture
|
|
||||||
'''.split()
|
|
||||||
|
|
||||||
def _get_example_dirs():
|
|
||||||
return [ os.path.join( 'examples', d )
|
|
||||||
for d in CPPUT_EXAMPLES ]
|
|
||||||
|
|
||||||
def _get_main_script_dir():
|
|
||||||
"""Gets the path of the directory containing this script."""
|
|
||||||
# The main script path is only valid once the it has been executed, hence this can not be a global var.
|
|
||||||
assert Utils.g_module is not None
|
|
||||||
return os.path.split( Utils.g_module.root_path )[0]
|
|
||||||
|
|
||||||
def _fix_import_path():
|
|
||||||
"""Adds the main script directory to be able to import waftools modules."""
|
|
||||||
import_dir = _get_main_script_dir()
|
|
||||||
if import_dir not in sys.path:
|
|
||||||
sys.path.append( import_dir )
|
|
||||||
|
|
||||||
def _get_tool_dir():
|
|
||||||
return os.path.join( main_script_dir, 'waftools' )
|
|
||||||
|
|
||||||
def set_options(opt):
|
|
||||||
"""Always called first during the build."""
|
|
||||||
_fix_import_path()
|
|
||||||
import waftools.log_output
|
|
||||||
waftools.log_output.set_options( opt )
|
|
||||||
|
|
||||||
# Adds command-line options for compiler
|
|
||||||
opt.tool_options('compiler_cxx')
|
|
||||||
|
|
||||||
# from compiler_cxx tools, set_options
|
|
||||||
import Tools.ccroot as ccroot
|
|
||||||
opt.add_option('-d', '--debug-level',
|
|
||||||
action = 'store',
|
|
||||||
default = ccroot.DEBUG_LEVELS.RELEASE,
|
|
||||||
help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s'] " % "', '".join(ccroot.DEBUG_LEVELS.ALL) +
|
|
||||||
"[default: %default]",
|
|
||||||
choices = ccroot.DEBUG_LEVELS.ALL,
|
|
||||||
dest = 'debug_level')
|
|
||||||
|
|
||||||
def init():
|
|
||||||
"""Called set_options() once the command-line has been parsed.
|
|
||||||
Command-line options value are accessed through Options.options.
|
|
||||||
"""
|
|
||||||
import waftools.log_output
|
|
||||||
waftools.log_output.init()
|
|
||||||
|
|
||||||
|
|
||||||
def configure(conf):
|
|
||||||
# There is a link issue with msvc 9!
|
|
||||||
conf.env['MSVC_VERSIONS'] = ['msvc 8.0']
|
|
||||||
|
|
||||||
# CXX=g++-3.0 ./waf.py configure will use g++-3.0 instead of 'g++'
|
|
||||||
conf.check_tool('compiler_cxx')
|
|
||||||
|
|
||||||
# Select debug/optimize flags
|
|
||||||
debug_level = Options.options.debug_level.upper()
|
|
||||||
conf.env.append_unique('CXXFLAGS', conf.env['CXXFLAGS_' + debug_level])
|
|
||||||
|
|
||||||
compiler = conf.env['COMPILER_CXX']
|
|
||||||
if compiler == 'msvc': # Microsoft Visual Studio specifics
|
|
||||||
# Select run-time library variant
|
|
||||||
if 'DEBUG' in debug_level:
|
|
||||||
crt_variant = 'MULTITHREADED_DLL_DBG'
|
|
||||||
else:
|
|
||||||
crt_variant = 'MULTITHREADED_DLL'
|
|
||||||
# MULTITHREADED, MULTITHREADED_DLL, MULTITHREADED_DBG, MULTITHREADED_DLL_DBG
|
|
||||||
conf.env.append_unique('CPPFLAGS', conf.env['CPPFLAGS_CRT_' + crt_variant])
|
|
||||||
conf.env.append_unique('CPPDEFINES', conf.env['CPPDEFINES_CRT_' + crt_variant])
|
|
||||||
|
|
||||||
## batched builds can be enabled by including the module optim_cc
|
|
||||||
# conf.check_tool('batched_cc')
|
|
||||||
|
|
||||||
|
|
||||||
# WAF command:
|
|
||||||
|
|
||||||
def build(bld):
|
|
||||||
# process subfolders from here
|
|
||||||
bld.add_subdirs('''src/cpptl
|
|
||||||
src/cpput
|
|
||||||
src/cpputtest''')
|
|
||||||
|
|
||||||
bld.add_subdirs( _get_example_dirs() )
|
|
||||||
|
|
||||||
def gen_examples_wscript(ctx):
|
|
||||||
for example_dir in _get_example_dirs():
|
|
||||||
wscript_path = os.path.join( example_dir, 'wscript_build' )
|
|
||||||
sources = glob.glob( os.path.join( example_dir, '*.cpp' ) )
|
|
||||||
Logs.info( 'Generating "%s"' % wscript_path )
|
|
||||||
open( wscript_path, 'wb' ).write( """\
|
|
||||||
#! /usr/bin/env python
|
|
||||||
# encoding: utf-8
|
|
||||||
# Baptiste Lepilleur, 2009
|
|
||||||
|
|
||||||
bld.new_task_gen(
|
|
||||||
features = 'cxx cprogram',
|
|
||||||
source = '''%(sources)s''',
|
|
||||||
includes = '../.. ../../include', # for examples/common
|
|
||||||
uselib_local = 'cpptl cpput',
|
|
||||||
name = 'example_%(name)s',
|
|
||||||
target = 'example_%(name)s' )
|
|
||||||
""" % {
|
|
||||||
'sources': ' '.join( [os.path.basename(s) for s in sources] ),
|
|
||||||
'name': os.path.basename( example_dir )
|
|
||||||
} )
|
|
||||||
|
|
||||||
def _fix_python_source( path, is_dry_run = True, verbose = True ):
|
|
||||||
"""Makes sure that all sources have unix EOL and replace tabs with 4 spaces."""
|
|
||||||
from waftools import reindent
|
|
||||||
if not os.path.isfile( path ):
|
|
||||||
raise ValueError( 'Path "%s" is not a file' % path )
|
|
||||||
try:
|
|
||||||
f = open(path, 'rb')
|
|
||||||
except IOError, msg:
|
|
||||||
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
|
||||||
return False
|
|
||||||
|
|
||||||
if verbose:
|
|
||||||
print '%s =>' % path,
|
|
||||||
try:
|
|
||||||
r = reindent.Reindenter(f)
|
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
if r.run(): # File need to be fixed ?
|
|
||||||
if not is_dry_run:
|
|
||||||
f = open(path, "wb")
|
|
||||||
try:
|
|
||||||
r.write(f)
|
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
if verbose:
|
|
||||||
print is_dry_run and ' NEED FIX' or ' FIXED'
|
|
||||||
elif verbose:
|
|
||||||
print ' OK'
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
|
||||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
|
||||||
if not os.path.isfile( path ):
|
|
||||||
raise ValueError( 'Path "%s" is not a file' % path )
|
|
||||||
try:
|
|
||||||
f = open(path, 'rb')
|
|
||||||
except IOError, msg:
|
|
||||||
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
|
||||||
return False
|
|
||||||
try:
|
|
||||||
raw_lines = f.readlines()
|
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
|
||||||
if raw_lines != fixed_lines:
|
|
||||||
print '%s =>' % path,
|
|
||||||
if not is_dry_run:
|
|
||||||
f = open(path, "wb")
|
|
||||||
try:
|
|
||||||
f.writelines(fixed_lines)
|
|
||||||
finally:
|
|
||||||
f.close()
|
|
||||||
if verbose:
|
|
||||||
print is_dry_run and ' NEED FIX' or ' FIXED'
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def _do_fix( is_dry_run = True ):
|
|
||||||
from waftools import antglob
|
|
||||||
python_sources = antglob.glob( '.',
|
|
||||||
includes = '**/*.py **/wscript **/wscript_build',
|
|
||||||
excludes = antglob.default_excludes + './waf.py',
|
|
||||||
prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
|
||||||
for path in python_sources:
|
|
||||||
_fix_python_source( path, is_dry_run )
|
|
||||||
|
|
||||||
cpp_sources = antglob.glob( '.',
|
|
||||||
includes = '**/*.cpp **/*.h **/*.inl',
|
|
||||||
prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
|
||||||
for path in cpp_sources:
|
|
||||||
_fix_source_eol( path, is_dry_run )
|
|
||||||
|
|
||||||
|
|
||||||
def dry_fix(context):
|
|
||||||
_do_fix( is_dry_run = True )
|
|
||||||
|
|
||||||
def fix(context):
|
|
||||||
_do_fix( is_dry_run = False )
|
|
||||||
|
|
||||||
def shutdown():
|
|
||||||
pass
|
|
||||||
|
|
||||||
def check(context):
|
|
||||||
# Unit tests are run when "check" target is used
|
|
||||||
ut = UnitTest.unit_test()
|
|
||||||
ut.change_to_testfile_dir = True
|
|
||||||
ut.want_to_see_test_output = True
|
|
||||||
ut.want_to_see_test_error = True
|
|
||||||
ut.run()
|
|
||||||
ut.print_results()
|
|
44
doxybuild.py
44
doxybuild.py
@ -6,47 +6,7 @@ import os
|
|||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
import gzip
|
from devtools import tarball
|
||||||
import tarfile
|
|
||||||
|
|
||||||
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
|
||||||
|
|
||||||
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
|
||||||
"""Parameters:
|
|
||||||
tarball_path: output path of the .tar.gz file
|
|
||||||
sources: list of sources to include in the tarball, relative to the current directory
|
|
||||||
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
|
|
||||||
from path in the tarball.
|
|
||||||
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
|
||||||
to make them child of root.
|
|
||||||
"""
|
|
||||||
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
|
|
||||||
def archive_name( path ):
|
|
||||||
"""Makes path relative to base_dir."""
|
|
||||||
path = os.path.normpath( os.path.abspath( path ) )
|
|
||||||
common_path = os.path.commonprefix( (base_dir, path) )
|
|
||||||
archive_name = path[len(common_path):]
|
|
||||||
if os.path.isabs( archive_name ):
|
|
||||||
archive_name = archive_name[1:]
|
|
||||||
return os.path.join( prefix_dir, archive_name )
|
|
||||||
def visit(tar, dirname, names):
|
|
||||||
for name in names:
|
|
||||||
path = os.path.join(dirname, name)
|
|
||||||
if os.path.isfile(path):
|
|
||||||
path_in_tar = archive_name(path)
|
|
||||||
tar.add(path, path_in_tar )
|
|
||||||
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
|
||||||
fileobj = gzip.GzipFile( tarball_path, 'wb', compression )
|
|
||||||
tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj)
|
|
||||||
for source in sources:
|
|
||||||
source_path = source
|
|
||||||
if os.path.isdir( source ):
|
|
||||||
os.path.walk(source_path, visit, tar)
|
|
||||||
else:
|
|
||||||
path_in_tar = archive_name(source_path)
|
|
||||||
tar.add(source_path, path_in_tar ) # filename, arcname
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
|
|
||||||
def find_program(filename):
|
def find_program(filename):
|
||||||
"""find a program in folders path_lst, and sets env[var]
|
"""find a program in folders path_lst, and sets env[var]
|
||||||
@ -171,7 +131,7 @@ def build_doc( options, make_release=False ):
|
|||||||
'version'
|
'version'
|
||||||
]
|
]
|
||||||
tarball_basedir = os.path.join( full_output_dir, html_output_dirname )
|
tarball_basedir = os.path.join( full_output_dir, html_output_dirname )
|
||||||
make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname )
|
tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname )
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
usage = """%prog
|
usage = """%prog
|
||||||
|
@ -15,6 +15,7 @@ import doxybuild
|
|||||||
import subprocess
|
import subprocess
|
||||||
import xml.etree.ElementTree as ElementTree
|
import xml.etree.ElementTree as ElementTree
|
||||||
import shutil
|
import shutil
|
||||||
|
from devtools import antglob, fixeol, tarball
|
||||||
|
|
||||||
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
||||||
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
||||||
@ -92,6 +93,24 @@ def svn_export( tag_url, export_dir ):
|
|||||||
shutil.rmtree( export_dir )
|
shutil.rmtree( export_dir )
|
||||||
svn_command( 'export', tag_url, export_dir )
|
svn_command( 'export', tag_url, export_dir )
|
||||||
|
|
||||||
|
def fix_sources_eol( dist_dir ):
|
||||||
|
"""Set file EOL for tarball distribution.
|
||||||
|
"""
|
||||||
|
print 'Preparing exported source file EOL for distribution...'
|
||||||
|
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||||
|
win_sources = antglob.glob( dist_dir,
|
||||||
|
includes = '**/*.sln **/*.vcproj',
|
||||||
|
prune_dirs = prune_dirs )
|
||||||
|
unix_sources = antglob.glob( dist_dir,
|
||||||
|
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
|
||||||
|
sconscript *.json *.expected AUTHORS LICENSE''',
|
||||||
|
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
|
||||||
|
prune_dirs = prune_dirs )
|
||||||
|
for path in win_sources:
|
||||||
|
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' )
|
||||||
|
for path in unix_sources:
|
||||||
|
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' )
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
usage = """%prog release_version next_dev_version
|
usage = """%prog release_version next_dev_version
|
||||||
Update 'version' file to release_version and commit.
|
Update 'version' file to release_version and commit.
|
||||||
@ -129,19 +148,26 @@ Must be started in the project top directory.
|
|||||||
print 'Setting version to', release_version
|
print 'Setting version to', release_version
|
||||||
set_version( release_version )
|
set_version( release_version )
|
||||||
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
|
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
|
||||||
## if svn_check_if_tag_exist( tag_url ):
|
if svn_check_if_tag_exist( tag_url ):
|
||||||
## if options.retag_release:
|
if options.retag_release:
|
||||||
## svn_remove_tag( tag_url, 'Overwriting previous tag' )
|
svn_remove_tag( tag_url, 'Overwriting previous tag' )
|
||||||
## else:
|
else:
|
||||||
## print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
|
print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
|
||||||
## sys.exit( 1 )
|
sys.exit( 1 )
|
||||||
## svn_tag_sandbox( tag_url, 'Release ' + release_version )
|
svn_tag_sandbox( tag_url, 'Release ' + release_version )
|
||||||
## print 'Generated doxygen document...'
|
|
||||||
## doxybuild.build_doc( options, make_release=True )
|
print 'Generated doxygen document...'
|
||||||
svn_export( tag_url, 'dist/distcheck' )
|
doxybuild.build_doc( options, make_release=True )
|
||||||
|
|
||||||
|
export_dir = 'dist/export'
|
||||||
|
svn_export( tag_url, export_dir )
|
||||||
|
fix_sources_eol( export_dir )
|
||||||
|
|
||||||
|
source_dir = 'jsoncpp-src-' + release_version
|
||||||
|
source_tarball_path = 'dist/%s.tar.gz' % source_dir
|
||||||
|
print 'Generating source tarball to', source_tarball_path
|
||||||
|
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
|
||||||
#@todo:
|
#@todo:
|
||||||
# fix-eol
|
|
||||||
# source tarball
|
|
||||||
# decompress source tarball
|
# decompress source tarball
|
||||||
# ?compile & run & check
|
# ?compile & run & check
|
||||||
# ?upload documentation
|
# ?upload documentation
|
||||||
|
Loading…
x
Reference in New Issue
Block a user