mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-04-03 01:54:44 +02:00
- added svn export
- prepared tool for eol conversion
This commit is contained in:
parent
fcf145ecd4
commit
7c171ee726
201
devtools/antglob.py
Normal file
201
devtools/antglob.py
Normal file
@ -0,0 +1,201 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# encoding: utf-8
|
||||||
|
# Baptiste Lepilleur, 2009
|
||||||
|
|
||||||
|
from dircache import listdir
|
||||||
|
import re
|
||||||
|
import fnmatch
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
|
||||||
|
# These fnmatch expressions are used by default to prune the directory tree
|
||||||
|
# while doing the recursive traversal in the glob_impl method of glob function.
|
||||||
|
prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS '
|
||||||
|
|
||||||
|
# These fnmatch expressions are used by default to exclude files and dirs
|
||||||
|
# while doing the recursive traversal in the glob_impl method of glob function.
|
||||||
|
##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
|
||||||
|
|
||||||
|
# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree
|
||||||
|
# while doing the recursive traversal in the glob_impl method of glob function.
|
||||||
|
default_excludes = '''
|
||||||
|
**/*~
|
||||||
|
**/#*#
|
||||||
|
**/.#*
|
||||||
|
**/%*%
|
||||||
|
**/._*
|
||||||
|
**/CVS
|
||||||
|
**/CVS/**
|
||||||
|
**/.cvsignore
|
||||||
|
**/SCCS
|
||||||
|
**/SCCS/**
|
||||||
|
**/vssver.scc
|
||||||
|
**/.svn
|
||||||
|
**/.svn/**
|
||||||
|
**/.git
|
||||||
|
**/.git/**
|
||||||
|
**/.gitignore
|
||||||
|
**/.bzr
|
||||||
|
**/.bzr/**
|
||||||
|
**/.hg
|
||||||
|
**/.hg/**
|
||||||
|
**/_MTN
|
||||||
|
**/_MTN/**
|
||||||
|
**/_darcs
|
||||||
|
**/_darcs/**
|
||||||
|
**/.DS_Store '''
|
||||||
|
|
||||||
|
DIR = 1
|
||||||
|
FILE = 2
|
||||||
|
DIR_LINK = 4
|
||||||
|
FILE_LINK = 8
|
||||||
|
LINKS = DIR_LINK | FILE_LINK
|
||||||
|
ALL_NO_LINK = DIR | FILE
|
||||||
|
ALL = DIR | FILE | LINKS
|
||||||
|
|
||||||
|
_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
|
||||||
|
|
||||||
|
def ant_pattern_to_re( ant_pattern ):
|
||||||
|
"""Generates a regular expression from the ant pattern.
|
||||||
|
Matching convention:
|
||||||
|
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
||||||
|
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
|
||||||
|
*.py: match 'script.py' but not 'a/script.py'
|
||||||
|
"""
|
||||||
|
rex = ['^']
|
||||||
|
next_pos = 0
|
||||||
|
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
|
||||||
|
## print 'Converting', ant_pattern
|
||||||
|
for match in _ANT_RE.finditer( ant_pattern ):
|
||||||
|
## print 'Matched', match.group()
|
||||||
|
## print match.start(0), next_pos
|
||||||
|
if match.start(0) != next_pos:
|
||||||
|
raise ValueError( "Invalid ant pattern" )
|
||||||
|
if match.group(1): # /**/
|
||||||
|
rex.append( sep_rex + '(?:.*%s)?' % sep_rex )
|
||||||
|
elif match.group(2): # **/
|
||||||
|
rex.append( '(?:.*%s)?' % sep_rex )
|
||||||
|
elif match.group(3): # /**
|
||||||
|
rex.append( sep_rex + '.*' )
|
||||||
|
elif match.group(4): # *
|
||||||
|
rex.append( '[^/%s]*' % re.escape(os.path.sep) )
|
||||||
|
elif match.group(5): # /
|
||||||
|
rex.append( sep_rex )
|
||||||
|
else: # somepath
|
||||||
|
rex.append( re.escape(match.group(6)) )
|
||||||
|
next_pos = match.end()
|
||||||
|
rex.append('$')
|
||||||
|
return re.compile( ''.join( rex ) )
|
||||||
|
|
||||||
|
def _as_list( l ):
|
||||||
|
if isinstance(l, basestring):
|
||||||
|
return l.split()
|
||||||
|
return l
|
||||||
|
|
||||||
|
def glob(dir_path,
|
||||||
|
includes = '**/*',
|
||||||
|
excludes = default_excludes,
|
||||||
|
entry_type = FILE,
|
||||||
|
prune_dirs = prune_dirs,
|
||||||
|
max_depth = 25):
|
||||||
|
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
|
||||||
|
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
|
||||||
|
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
|
||||||
|
dir_path = dir_path.replace('/',os.path.sep)
|
||||||
|
entry_type_filter = entry_type
|
||||||
|
|
||||||
|
def is_pruned_dir( dir_name ):
|
||||||
|
for pattern in prune_dirs:
|
||||||
|
if fnmatch.fnmatch( dir_name, pattern ):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def apply_filter( full_path, filter_rexs ):
|
||||||
|
"""Return True if at least one of the filter regular expression match full_path."""
|
||||||
|
for rex in filter_rexs:
|
||||||
|
if rex.match( full_path ):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def glob_impl( root_dir_path ):
|
||||||
|
child_dirs = [root_dir_path]
|
||||||
|
while child_dirs:
|
||||||
|
dir_path = child_dirs.pop()
|
||||||
|
for entry in listdir( dir_path ):
|
||||||
|
full_path = os.path.join( dir_path, entry )
|
||||||
|
## print 'Testing:', full_path,
|
||||||
|
is_dir = os.path.isdir( full_path )
|
||||||
|
if is_dir and not is_pruned_dir( entry ): # explore child directory ?
|
||||||
|
## print '===> marked for recursion',
|
||||||
|
child_dirs.append( full_path )
|
||||||
|
included = apply_filter( full_path, include_filter )
|
||||||
|
rejected = apply_filter( full_path, exclude_filter )
|
||||||
|
if not included or rejected: # do not include entry ?
|
||||||
|
## print '=> not included or rejected'
|
||||||
|
continue
|
||||||
|
link = os.path.islink( full_path )
|
||||||
|
is_file = os.path.isfile( full_path )
|
||||||
|
if not is_file and not is_dir:
|
||||||
|
## print '=> unknown entry type'
|
||||||
|
continue
|
||||||
|
if link:
|
||||||
|
entry_type = is_file and FILE_LINK or DIR_LINK
|
||||||
|
else:
|
||||||
|
entry_type = is_file and FILE or DIR
|
||||||
|
## print '=> type: %d' % entry_type,
|
||||||
|
if (entry_type & entry_type_filter) != 0:
|
||||||
|
## print ' => KEEP'
|
||||||
|
yield os.path.join( dir_path, entry )
|
||||||
|
## else:
|
||||||
|
## print ' => TYPE REJECTED'
|
||||||
|
return list( glob_impl( dir_path ) )
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
class AntPatternToRETest(unittest.TestCase):
|
||||||
|
## def test_conversion( self ):
|
||||||
|
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
|
||||||
|
|
||||||
|
def test_matching( self ):
|
||||||
|
test_cases = [ ( 'path',
|
||||||
|
['path'],
|
||||||
|
['somepath', 'pathsuffix', '/path', '/path'] ),
|
||||||
|
( '*.py',
|
||||||
|
['source.py', 'source.ext.py', '.py'],
|
||||||
|
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
|
||||||
|
( '**/path',
|
||||||
|
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
||||||
|
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
|
||||||
|
( 'path/**',
|
||||||
|
['path/a', 'path/path/a', 'path//'],
|
||||||
|
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
|
||||||
|
( '/**/path',
|
||||||
|
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
||||||
|
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
|
||||||
|
( 'a/b',
|
||||||
|
['a/b'],
|
||||||
|
['somea/b', 'a/bsuffix', 'a/b/c'] ),
|
||||||
|
( '**/*.py',
|
||||||
|
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
||||||
|
['script.pyc', 'script.pyo', 'a.py/b'] ),
|
||||||
|
( 'src/**/*.py',
|
||||||
|
['src/a.py', 'src/dir/a.py'],
|
||||||
|
['a/src/a.py', '/src/a.py'] ),
|
||||||
|
]
|
||||||
|
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
||||||
|
def local_path( paths ):
|
||||||
|
return [ p.replace('/',os.path.sep) for p in paths ]
|
||||||
|
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
||||||
|
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||||
|
rex = ant_pattern_to_re( ant_pattern )
|
||||||
|
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
|
||||||
|
for accepted_match in accepted_matches:
|
||||||
|
print 'Accepted?:', accepted_match
|
||||||
|
self.assert_( rex.match( accepted_match ) is not None )
|
||||||
|
for rejected_match in rejected_matches:
|
||||||
|
print 'Rejected?:', rejected_match
|
||||||
|
self.assert_( rex.match( rejected_match ) is None )
|
||||||
|
|
||||||
|
unittest.main()
|
225
devtools/wscript
Normal file
225
devtools/wscript
Normal file
@ -0,0 +1,225 @@
|
|||||||
|
VERSION='0.1.0'
|
||||||
|
APPNAME='CppUnit2'
|
||||||
|
srcdir = '.'
|
||||||
|
blddir = 'build'
|
||||||
|
|
||||||
|
import Options
|
||||||
|
import Logs
|
||||||
|
import UnitTest
|
||||||
|
import Utils
|
||||||
|
import os.path
|
||||||
|
import sys
|
||||||
|
import glob
|
||||||
|
|
||||||
|
CPPUT_EXAMPLES = '''
|
||||||
|
checking_assertions
|
||||||
|
ignore_failure_demo
|
||||||
|
input_test
|
||||||
|
light_fixture
|
||||||
|
log_demo
|
||||||
|
parametrized_test
|
||||||
|
stringize_demo
|
||||||
|
test_function
|
||||||
|
'''.split()
|
||||||
|
|
||||||
|
BROKEN_CPPUT_EXAMPLES = '''
|
||||||
|
input_based_test
|
||||||
|
opentest_demo
|
||||||
|
table_fixture
|
||||||
|
'''.split()
|
||||||
|
|
||||||
|
def _get_example_dirs():
|
||||||
|
return [ os.path.join( 'examples', d )
|
||||||
|
for d in CPPUT_EXAMPLES ]
|
||||||
|
|
||||||
|
def _get_main_script_dir():
|
||||||
|
"""Gets the path of the directory containing this script."""
|
||||||
|
# The main script path is only valid once the it has been executed, hence this can not be a global var.
|
||||||
|
assert Utils.g_module is not None
|
||||||
|
return os.path.split( Utils.g_module.root_path )[0]
|
||||||
|
|
||||||
|
def _fix_import_path():
|
||||||
|
"""Adds the main script directory to be able to import waftools modules."""
|
||||||
|
import_dir = _get_main_script_dir()
|
||||||
|
if import_dir not in sys.path:
|
||||||
|
sys.path.append( import_dir )
|
||||||
|
|
||||||
|
def _get_tool_dir():
|
||||||
|
return os.path.join( main_script_dir, 'waftools' )
|
||||||
|
|
||||||
|
def set_options(opt):
|
||||||
|
"""Always called first during the build."""
|
||||||
|
_fix_import_path()
|
||||||
|
import waftools.log_output
|
||||||
|
waftools.log_output.set_options( opt )
|
||||||
|
|
||||||
|
# Adds command-line options for compiler
|
||||||
|
opt.tool_options('compiler_cxx')
|
||||||
|
|
||||||
|
# from compiler_cxx tools, set_options
|
||||||
|
import Tools.ccroot as ccroot
|
||||||
|
opt.add_option('-d', '--debug-level',
|
||||||
|
action = 'store',
|
||||||
|
default = ccroot.DEBUG_LEVELS.RELEASE,
|
||||||
|
help = "Specify the debug level, does nothing if CXXFLAGS is set in the environment. [Allowed Values: '%s'] " % "', '".join(ccroot.DEBUG_LEVELS.ALL) +
|
||||||
|
"[default: %default]",
|
||||||
|
choices = ccroot.DEBUG_LEVELS.ALL,
|
||||||
|
dest = 'debug_level')
|
||||||
|
|
||||||
|
def init():
|
||||||
|
"""Called set_options() once the command-line has been parsed.
|
||||||
|
Command-line options value are accessed through Options.options.
|
||||||
|
"""
|
||||||
|
import waftools.log_output
|
||||||
|
waftools.log_output.init()
|
||||||
|
|
||||||
|
|
||||||
|
def configure(conf):
|
||||||
|
# There is a link issue with msvc 9!
|
||||||
|
conf.env['MSVC_VERSIONS'] = ['msvc 8.0']
|
||||||
|
|
||||||
|
# CXX=g++-3.0 ./waf.py configure will use g++-3.0 instead of 'g++'
|
||||||
|
conf.check_tool('compiler_cxx')
|
||||||
|
|
||||||
|
# Select debug/optimize flags
|
||||||
|
debug_level = Options.options.debug_level.upper()
|
||||||
|
conf.env.append_unique('CXXFLAGS', conf.env['CXXFLAGS_' + debug_level])
|
||||||
|
|
||||||
|
compiler = conf.env['COMPILER_CXX']
|
||||||
|
if compiler == 'msvc': # Microsoft Visual Studio specifics
|
||||||
|
# Select run-time library variant
|
||||||
|
if 'DEBUG' in debug_level:
|
||||||
|
crt_variant = 'MULTITHREADED_DLL_DBG'
|
||||||
|
else:
|
||||||
|
crt_variant = 'MULTITHREADED_DLL'
|
||||||
|
# MULTITHREADED, MULTITHREADED_DLL, MULTITHREADED_DBG, MULTITHREADED_DLL_DBG
|
||||||
|
conf.env.append_unique('CPPFLAGS', conf.env['CPPFLAGS_CRT_' + crt_variant])
|
||||||
|
conf.env.append_unique('CPPDEFINES', conf.env['CPPDEFINES_CRT_' + crt_variant])
|
||||||
|
|
||||||
|
## batched builds can be enabled by including the module optim_cc
|
||||||
|
# conf.check_tool('batched_cc')
|
||||||
|
|
||||||
|
|
||||||
|
# WAF command:
|
||||||
|
|
||||||
|
def build(bld):
|
||||||
|
# process subfolders from here
|
||||||
|
bld.add_subdirs('''src/cpptl
|
||||||
|
src/cpput
|
||||||
|
src/cpputtest''')
|
||||||
|
|
||||||
|
bld.add_subdirs( _get_example_dirs() )
|
||||||
|
|
||||||
|
def gen_examples_wscript(ctx):
|
||||||
|
for example_dir in _get_example_dirs():
|
||||||
|
wscript_path = os.path.join( example_dir, 'wscript_build' )
|
||||||
|
sources = glob.glob( os.path.join( example_dir, '*.cpp' ) )
|
||||||
|
Logs.info( 'Generating "%s"' % wscript_path )
|
||||||
|
open( wscript_path, 'wb' ).write( """\
|
||||||
|
#! /usr/bin/env python
|
||||||
|
# encoding: utf-8
|
||||||
|
# Baptiste Lepilleur, 2009
|
||||||
|
|
||||||
|
bld.new_task_gen(
|
||||||
|
features = 'cxx cprogram',
|
||||||
|
source = '''%(sources)s''',
|
||||||
|
includes = '../.. ../../include', # for examples/common
|
||||||
|
uselib_local = 'cpptl cpput',
|
||||||
|
name = 'example_%(name)s',
|
||||||
|
target = 'example_%(name)s' )
|
||||||
|
""" % {
|
||||||
|
'sources': ' '.join( [os.path.basename(s) for s in sources] ),
|
||||||
|
'name': os.path.basename( example_dir )
|
||||||
|
} )
|
||||||
|
|
||||||
|
def _fix_python_source( path, is_dry_run = True, verbose = True ):
|
||||||
|
"""Makes sure that all sources have unix EOL and replace tabs with 4 spaces."""
|
||||||
|
from waftools import reindent
|
||||||
|
if not os.path.isfile( path ):
|
||||||
|
raise ValueError( 'Path "%s" is not a file' % path )
|
||||||
|
try:
|
||||||
|
f = open(path, 'rb')
|
||||||
|
except IOError, msg:
|
||||||
|
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
||||||
|
return False
|
||||||
|
|
||||||
|
if verbose:
|
||||||
|
print '%s =>' % path,
|
||||||
|
try:
|
||||||
|
r = reindent.Reindenter(f)
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
if r.run(): # File need to be fixed ?
|
||||||
|
if not is_dry_run:
|
||||||
|
f = open(path, "wb")
|
||||||
|
try:
|
||||||
|
r.write(f)
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
if verbose:
|
||||||
|
print is_dry_run and ' NEED FIX' or ' FIXED'
|
||||||
|
elif verbose:
|
||||||
|
print ' OK'
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||||
|
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||||
|
if not os.path.isfile( path ):
|
||||||
|
raise ValueError( 'Path "%s" is not a file' % path )
|
||||||
|
try:
|
||||||
|
f = open(path, 'rb')
|
||||||
|
except IOError, msg:
|
||||||
|
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
raw_lines = f.readlines()
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||||
|
if raw_lines != fixed_lines:
|
||||||
|
print '%s =>' % path,
|
||||||
|
if not is_dry_run:
|
||||||
|
f = open(path, "wb")
|
||||||
|
try:
|
||||||
|
f.writelines(fixed_lines)
|
||||||
|
finally:
|
||||||
|
f.close()
|
||||||
|
if verbose:
|
||||||
|
print is_dry_run and ' NEED FIX' or ' FIXED'
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def _do_fix( is_dry_run = True ):
|
||||||
|
from waftools import antglob
|
||||||
|
python_sources = antglob.glob( '.',
|
||||||
|
includes = '**/*.py **/wscript **/wscript_build',
|
||||||
|
excludes = antglob.default_excludes + './waf.py',
|
||||||
|
prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||||
|
for path in python_sources:
|
||||||
|
_fix_python_source( path, is_dry_run )
|
||||||
|
|
||||||
|
cpp_sources = antglob.glob( '.',
|
||||||
|
includes = '**/*.cpp **/*.h **/*.inl',
|
||||||
|
prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||||
|
for path in cpp_sources:
|
||||||
|
_fix_source_eol( path, is_dry_run )
|
||||||
|
|
||||||
|
|
||||||
|
def dry_fix(context):
|
||||||
|
_do_fix( is_dry_run = True )
|
||||||
|
|
||||||
|
def fix(context):
|
||||||
|
_do_fix( is_dry_run = False )
|
||||||
|
|
||||||
|
def shutdown():
|
||||||
|
pass
|
||||||
|
|
||||||
|
def check(context):
|
||||||
|
# Unit tests are run when "check" target is used
|
||||||
|
ut = UnitTest.unit_test()
|
||||||
|
ut.change_to_testfile_dir = True
|
||||||
|
ut.want_to_see_test_output = True
|
||||||
|
ut.want_to_see_test_error = True
|
||||||
|
ut.run()
|
||||||
|
ut.print_results()
|
@ -14,6 +14,7 @@ import sys
|
|||||||
import doxybuild
|
import doxybuild
|
||||||
import subprocess
|
import subprocess
|
||||||
import xml.etree.ElementTree as ElementTree
|
import xml.etree.ElementTree as ElementTree
|
||||||
|
import shutil
|
||||||
|
|
||||||
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
||||||
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
||||||
@ -82,6 +83,15 @@ def svn_remove_tag( tag_url, message ):
|
|||||||
"""
|
"""
|
||||||
svn_command( 'delete', '-m', message, tag_url )
|
svn_command( 'delete', '-m', message, tag_url )
|
||||||
|
|
||||||
|
def svn_export( tag_url, export_dir ):
|
||||||
|
"""Exports the tag_url revision to export_dir.
|
||||||
|
Target directory, including its parent is created if it does not exist.
|
||||||
|
If the directory export_dir exist, it is deleted before export proceed.
|
||||||
|
"""
|
||||||
|
if os.path.isdir( export_dir ):
|
||||||
|
shutil.rmtree( export_dir )
|
||||||
|
svn_command( 'export', tag_url, export_dir )
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
usage = """%prog release_version next_dev_version
|
usage = """%prog release_version next_dev_version
|
||||||
Update 'version' file to release_version and commit.
|
Update 'version' file to release_version and commit.
|
||||||
@ -119,17 +129,18 @@ Must be started in the project top directory.
|
|||||||
print 'Setting version to', release_version
|
print 'Setting version to', release_version
|
||||||
set_version( release_version )
|
set_version( release_version )
|
||||||
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
|
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
|
||||||
if svn_check_if_tag_exist( tag_url ):
|
## if svn_check_if_tag_exist( tag_url ):
|
||||||
if options.retag_release:
|
## if options.retag_release:
|
||||||
svn_remove_tag( tag_url, 'Overwriting previous tag' )
|
## svn_remove_tag( tag_url, 'Overwriting previous tag' )
|
||||||
else:
|
## else:
|
||||||
print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
|
## print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
|
||||||
sys.exit( 1 )
|
## sys.exit( 1 )
|
||||||
svn_tag_sandbox( tag_url, 'Release ' + release_version )
|
## svn_tag_sandbox( tag_url, 'Release ' + release_version )
|
||||||
print 'Generated doxygen document...'
|
## print 'Generated doxygen document...'
|
||||||
doxybuild.build_doc( options, make_release=True )
|
## doxybuild.build_doc( options, make_release=True )
|
||||||
|
svn_export( tag_url, 'dist/distcheck' )
|
||||||
#@todo:
|
#@todo:
|
||||||
# svn export
|
# fix-eol
|
||||||
# source tarball
|
# source tarball
|
||||||
# decompress source tarball
|
# decompress source tarball
|
||||||
# ?compile & run & check
|
# ?compile & run & check
|
||||||
|
Loading…
x
Reference in New Issue
Block a user