Release 0.5.0

This commit is contained in:
Baptiste Lepilleur 2010-02-23 20:41:23 +00:00
commit 64cc86bf5f
8 changed files with 1166 additions and 764 deletions

201
devtools/antglob.py Normal file
View File

@ -0,0 +1,201 @@
#!/usr/bin/env python
# encoding: utf-8
# Baptiste Lepilleur, 2009
from dircache import listdir
import re
import fnmatch
import os.path
# These fnmatch expressions are used by default to prune the directory tree
# while doing the recursive traversal in the glob_impl method of glob function.
prune_dirs = '.git .bzr .hg .svn _MTN _darcs CVS SCCS '
# These fnmatch expressions are used by default to exclude files and dirs
# while doing the recursive traversal in the glob_impl method of glob function.
##exclude_pats = prune_pats + '*~ #*# .#* %*% ._* .gitignore .cvsignore vssver.scc .DS_Store'.split()
# These ant_glob expressions are used by default to exclude files and dirs and also prune the directory tree
# while doing the recursive traversal in the glob_impl method of glob function.
default_excludes = '''
**/*~
**/#*#
**/.#*
**/%*%
**/._*
**/CVS
**/CVS/**
**/.cvsignore
**/SCCS
**/SCCS/**
**/vssver.scc
**/.svn
**/.svn/**
**/.git
**/.git/**
**/.gitignore
**/.bzr
**/.bzr/**
**/.hg
**/.hg/**
**/_MTN
**/_MTN/**
**/_darcs
**/_darcs/**
**/.DS_Store '''
DIR = 1
FILE = 2
DIR_LINK = 4
FILE_LINK = 8
LINKS = DIR_LINK | FILE_LINK
ALL_NO_LINK = DIR | FILE
ALL = DIR | FILE | LINKS
_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
def ant_pattern_to_re( ant_pattern ):
"""Generates a regular expression from the ant pattern.
Matching convention:
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
*.py: match 'script.py' but not 'a/script.py'
"""
rex = ['^']
next_pos = 0
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
## print 'Converting', ant_pattern
for match in _ANT_RE.finditer( ant_pattern ):
## print 'Matched', match.group()
## print match.start(0), next_pos
if match.start(0) != next_pos:
raise ValueError( "Invalid ant pattern" )
if match.group(1): # /**/
rex.append( sep_rex + '(?:.*%s)?' % sep_rex )
elif match.group(2): # **/
rex.append( '(?:.*%s)?' % sep_rex )
elif match.group(3): # /**
rex.append( sep_rex + '.*' )
elif match.group(4): # *
rex.append( '[^/%s]*' % re.escape(os.path.sep) )
elif match.group(5): # /
rex.append( sep_rex )
else: # somepath
rex.append( re.escape(match.group(6)) )
next_pos = match.end()
rex.append('$')
return re.compile( ''.join( rex ) )
def _as_list( l ):
if isinstance(l, basestring):
return l.split()
return l
def glob(dir_path,
includes = '**/*',
excludes = default_excludes,
entry_type = FILE,
prune_dirs = prune_dirs,
max_depth = 25):
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
dir_path = dir_path.replace('/',os.path.sep)
entry_type_filter = entry_type
def is_pruned_dir( dir_name ):
for pattern in prune_dirs:
if fnmatch.fnmatch( dir_name, pattern ):
return True
return False
def apply_filter( full_path, filter_rexs ):
"""Return True if at least one of the filter regular expression match full_path."""
for rex in filter_rexs:
if rex.match( full_path ):
return True
return False
def glob_impl( root_dir_path ):
child_dirs = [root_dir_path]
while child_dirs:
dir_path = child_dirs.pop()
for entry in listdir( dir_path ):
full_path = os.path.join( dir_path, entry )
## print 'Testing:', full_path,
is_dir = os.path.isdir( full_path )
if is_dir and not is_pruned_dir( entry ): # explore child directory ?
## print '===> marked for recursion',
child_dirs.append( full_path )
included = apply_filter( full_path, include_filter )
rejected = apply_filter( full_path, exclude_filter )
if not included or rejected: # do not include entry ?
## print '=> not included or rejected'
continue
link = os.path.islink( full_path )
is_file = os.path.isfile( full_path )
if not is_file and not is_dir:
## print '=> unknown entry type'
continue
if link:
entry_type = is_file and FILE_LINK or DIR_LINK
else:
entry_type = is_file and FILE or DIR
## print '=> type: %d' % entry_type,
if (entry_type & entry_type_filter) != 0:
## print ' => KEEP'
yield os.path.join( dir_path, entry )
## else:
## print ' => TYPE REJECTED'
return list( glob_impl( dir_path ) )
if __name__ == "__main__":
import unittest
class AntPatternToRETest(unittest.TestCase):
## def test_conversion( self ):
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
def test_matching( self ):
test_cases = [ ( 'path',
['path'],
['somepath', 'pathsuffix', '/path', '/path'] ),
( '*.py',
['source.py', 'source.ext.py', '.py'],
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
( '**/path',
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
( 'path/**',
['path/a', 'path/path/a', 'path//'],
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
( '/**/path',
['/path', '/a/path', '/a/b/path/path', '/path/path'],
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
( 'a/b',
['a/b'],
['somea/b', 'a/bsuffix', 'a/b/c'] ),
( '**/*.py',
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
['script.pyc', 'script.pyo', 'a.py/b'] ),
( 'src/**/*.py',
['src/a.py', 'src/dir/a.py'],
['a/src/a.py', '/src/a.py'] ),
]
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
def local_path( paths ):
return [ p.replace('/',os.path.sep) for p in paths ]
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
for ant_pattern, accepted_matches, rejected_matches in test_cases:
rex = ant_pattern_to_re( ant_pattern )
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
for accepted_match in accepted_matches:
print 'Accepted?:', accepted_match
self.assert_( rex.match( accepted_match ) is not None )
for rejected_match in rejected_matches:
print 'Rejected?:', rejected_match
self.assert_( rex.match( rejected_match ) is None )
unittest.main()

63
devtools/fixeol.py Normal file
View File

@ -0,0 +1,63 @@
import os.path
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
if not os.path.isfile( path ):
raise ValueError( 'Path "%s" is not a file' % path )
try:
f = open(path, 'rb')
except IOError, msg:
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
return False
try:
raw_lines = f.readlines()
finally:
f.close()
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
if raw_lines != fixed_lines:
print '%s =>' % path,
if not is_dry_run:
f = open(path, "wb")
try:
f.writelines(fixed_lines)
finally:
f.close()
if verbose:
print is_dry_run and ' NEED FIX' or ' FIXED'
return True
##
##
##
##def _do_fix( is_dry_run = True ):
## from waftools import antglob
## python_sources = antglob.glob( '.',
## includes = '**/*.py **/wscript **/wscript_build',
## excludes = antglob.default_excludes + './waf.py',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in python_sources:
## _fix_python_source( path, is_dry_run )
##
## cpp_sources = antglob.glob( '.',
## includes = '**/*.cpp **/*.h **/*.inl',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in cpp_sources:
## _fix_source_eol( path, is_dry_run )
##
##
##def dry_fix(context):
## _do_fix( is_dry_run = True )
##
##def fix(context):
## _do_fix( is_dry_run = False )
##
##def shutdown():
## pass
##
##def check(context):
## # Unit tests are run when "check" target is used
## ut = UnitTest.unit_test()
## ut.change_to_testfile_dir = True
## ut.want_to_see_test_output = True
## ut.want_to_see_test_error = True
## ut.run()
## ut.print_results()

View File

@ -6,47 +6,7 @@ import os
import os.path import os.path
import sys import sys
import shutil import shutil
import gzip from devtools import tarball
import tarfile
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
"""Parameters:
tarball_path: output path of the .tar.gz file
sources: list of sources to include in the tarball, relative to the current directory
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
from path in the tarball.
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
to make them child of root.
"""
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
def archive_name( path ):
"""Makes path relative to base_dir."""
path = os.path.normpath( os.path.abspath( path ) )
common_path = os.path.commonprefix( (base_dir, path) )
archive_name = path[len(common_path):]
if os.path.isabs( archive_name ):
archive_name = archive_name[1:]
return os.path.join( prefix_dir, archive_name )
def visit(tar, dirname, names):
for name in names:
path = os.path.join(dirname, name)
if os.path.isfile(path):
path_in_tar = archive_name(path)
tar.add(path, path_in_tar )
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
fileobj = gzip.GzipFile( tarball_path, 'wb', compression )
tar = tarfile.TarFile(os.path.splitext(tarball_path)[0], 'w', fileobj)
for source in sources:
source_path = source
if os.path.isdir( source ):
os.path.walk(source_path, visit, tar)
else:
path_in_tar = archive_name(source_path)
tar.add(source_path, path_in_tar ) # filename, arcname
tar.close()
def find_program(filename): def find_program(filename):
"""find a program in folders path_lst, and sets env[var] """find a program in folders path_lst, and sets env[var]
@ -171,7 +131,7 @@ def build_doc( options, make_release=False ):
'version' 'version'
] ]
tarball_basedir = os.path.join( full_output_dir, html_output_dirname ) tarball_basedir = os.path.join( full_output_dir, html_output_dirname )
make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname ) tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname )
def main(): def main():
usage = """%prog usage = """%prog

178
makerelease.py Normal file
View File

@ -0,0 +1,178 @@
"""Tag the sandbox for release, make source and doc tarballs.
Requires Python 2.6
Example of invocation (use to test the script):
python makerelease.py --force --retag 0.5.0 0.6.0-dev
Example of invocation when doing a release:
python makerelease.py 0.5.0 0.6.0-dev
"""
import os.path
import subprocess
import sys
import doxybuild
import subprocess
import xml.etree.ElementTree as ElementTree
import shutil
from devtools import antglob, fixeol, tarball
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
def set_version( version ):
with open('version','wb') as f:
f.write( version.strip() )
class SVNError(Exception):
pass
def svn_command( command, *args ):
cmd = ['svn', '--non-interactive', command] + list(args)
print 'Running:', ' '.join( cmd )
process = subprocess.Popen( cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT )
stdout = process.communicate()[0]
if process.returncode:
error = SVNError( 'SVN command failed:\n' + stdout )
error.returncode = process.returncode
raise error
return stdout
def check_no_pending_commit():
"""Checks that there is no pending commit in the sandbox."""
stdout = svn_command( 'status', '--xml' )
etree = ElementTree.fromstring( stdout )
msg = []
for entry in etree.getiterator( 'entry' ):
path = entry.get('path')
status = entry.find('wc-status').get('item')
if status != 'unversioned':
msg.append( 'File "%s" has pending change (status="%s")' % (path, status) )
if msg:
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' )
return '\n'.join( msg )
def svn_join_url( base_url, suffix ):
if not base_url.endswith('/'):
base_url += '/'
if suffix.startswith('/'):
suffix = suffix[1:]
return base_url + suffix
def svn_check_if_tag_exist( tag_url ):
"""Checks if a tag exist.
Returns: True if the tag exist, False otherwise.
"""
try:
list_stdout = svn_command( 'list', tag_url )
except SVNError, e:
if e.returncode != 1 or not str(e).find('tag_url'):
raise e
# otherwise ignore error, meaning tag does not exist
return False
return True
def svn_tag_sandbox( tag_url, message ):
"""Makes a tag based on the sandbox revisions.
"""
svn_command( 'copy', '-m', message, '.', tag_url )
def svn_remove_tag( tag_url, message ):
"""Removes an existing tag.
"""
svn_command( 'delete', '-m', message, tag_url )
def svn_export( tag_url, export_dir ):
"""Exports the tag_url revision to export_dir.
Target directory, including its parent is created if it does not exist.
If the directory export_dir exist, it is deleted before export proceed.
"""
if os.path.isdir( export_dir ):
shutil.rmtree( export_dir )
svn_command( 'export', tag_url, export_dir )
def fix_sources_eol( dist_dir ):
"""Set file EOL for tarball distribution.
"""
print 'Preparing exported source file EOL for distribution...'
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
win_sources = antglob.glob( dist_dir,
includes = '**/*.sln **/*.vcproj',
prune_dirs = prune_dirs )
unix_sources = antglob.glob( dist_dir,
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
sconscript *.json *.expected AUTHORS LICENSE''',
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
prune_dirs = prune_dirs )
for path in win_sources:
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' )
for path in unix_sources:
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' )
def main():
usage = """%prog release_version next_dev_version
Update 'version' file to release_version and commit.
Generates the document tarball.
Tags the sandbox revision with release_version.
Update 'version' file to next_dev_version and commit.
Performs an svn export of tag release version, and build a source tarball.
Must be started in the project top directory.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'),
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'),
help="""Path to Doxygen tool. [Default: %default]""")
parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False,
help="""Ignore pending commit. [Default: %default]""")
parser.add_option('--retag', dest="retag_release", action='store_true', default=False,
help="""Overwrite release existing tag if it exist. [Default: %default]""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
if len(args) < 1:
parser.error( 'release_version missing on command-line.' )
release_version = args[0]
if options.ignore_pending_commit:
msg = ''
else:
msg = check_no_pending_commit()
if not msg:
print 'Setting version to', release_version
set_version( release_version )
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
if svn_check_if_tag_exist( tag_url ):
if options.retag_release:
svn_remove_tag( tag_url, 'Overwriting previous tag' )
else:
print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
sys.exit( 1 )
svn_tag_sandbox( tag_url, 'Release ' + release_version )
print 'Generated doxygen document...'
doxybuild.build_doc( options, make_release=True )
export_dir = 'dist/export'
svn_export( tag_url, export_dir )
fix_sources_eol( export_dir )
source_dir = 'jsoncpp-src-' + release_version
source_tarball_path = 'dist/%s.tar.gz' % source_dir
print 'Generating source tarball to', source_tarball_path
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
#@todo:
# decompress source tarball
# ?compile & run & check
# ?upload documentation
else:
sys.stderr.write( msg + '\n' )
if __name__ == '__main__':
main()