mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-10-15 07:14:45 +02:00
Compare commits
96 Commits
svn-releas
...
svn-import
Author | SHA1 | Date | |
---|---|---|---|
![]() |
6764059395 | ||
![]() |
5d32295a6e | ||
![]() |
68db655347 | ||
![]() |
642befc836 | ||
![]() |
77cd83890d | ||
![]() |
09439b7bc7 | ||
![]() |
ea0797351f | ||
![]() |
94d17e9fdf | ||
![]() |
a3f19c23a0 | ||
![]() |
d2618806ba | ||
![]() |
36400ac0c1 | ||
![]() |
32ffb931e7 | ||
![]() |
bb53cd0899 | ||
![]() |
4c531bb584 | ||
![]() |
42d918b7aa | ||
![]() |
700b38020e | ||
![]() |
7b62ceacee | ||
![]() |
cb5ae30f6e | ||
![]() |
58b6541478 | ||
![]() |
1ccfdfcb9b | ||
![]() |
71860de813 | ||
![]() |
c515b8ec30 | ||
![]() |
5fff185aa4 | ||
![]() |
10712e85d6 | ||
![]() |
53c08ad916 | ||
![]() |
79e90fba0b | ||
![]() |
ce277aa6e4 | ||
![]() |
eafd702a17 | ||
![]() |
a8afdd40af | ||
![]() |
f92ace5e82 | ||
![]() |
3f124172ce | ||
![]() |
f8715856f3 | ||
![]() |
42321f24a6 | ||
![]() |
aff1171153 | ||
![]() |
ae3c7a7aab | ||
![]() |
f572e8e42e | ||
![]() |
2b853c4067 | ||
![]() |
7c507d7eba | ||
![]() |
c3763f55da | ||
![]() |
3b3540d9ef | ||
![]() |
9d317c3794 | ||
![]() |
03288e8eb6 | ||
![]() |
c9f91dd929 | ||
![]() |
2ba3bc3252 | ||
![]() |
ac5df77bbc | ||
![]() |
468564b3fe | ||
![]() |
dc0f736f59 | ||
![]() |
139da63aef | ||
![]() |
d496e044b1 | ||
![]() |
f587e6a420 | ||
![]() |
f0b24e705f | ||
![]() |
e807a7640e | ||
![]() |
d3cd9a7fc5 | ||
![]() |
a2fb7fb918 | ||
![]() |
4b819c2309 | ||
![]() |
c649badb95 | ||
![]() |
a9eb1eccc0 | ||
![]() |
6ffff91c54 | ||
![]() |
acdefb0869 | ||
![]() |
c025697ea5 | ||
![]() |
b0ec41c3e3 | ||
![]() |
2a2b5cf3ad | ||
![]() |
b6620e2801 | ||
![]() |
ccde848fd1 | ||
![]() |
e082248001 | ||
![]() |
7b5edd9859 | ||
![]() |
e91a68cb9e | ||
![]() |
1b138e8544 | ||
![]() |
4f081b50e6 | ||
![]() |
3c9fdeb859 | ||
![]() |
4b79fd1a00 | ||
![]() |
e12d84ebaa | ||
![]() |
078e0d7c37 | ||
![]() |
fee49b1a37 | ||
![]() |
22eede44c1 | ||
![]() |
d9ec234fc2 | ||
![]() |
3e5b347f75 | ||
![]() |
96408a30e1 | ||
![]() |
1d648f089a | ||
![]() |
f40c880585 | ||
![]() |
39ba2dbea9 | ||
![]() |
a761530f14 | ||
![]() |
ae9ffb5443 | ||
![]() |
e656c5fa2d | ||
![]() |
f1053e7acb | ||
![]() |
e3d0eca9f4 | ||
![]() |
a77a803c85 | ||
![]() |
785ba2675d | ||
![]() |
3b556ec633 | ||
![]() |
5fb0f09cbb | ||
![]() |
73911f2e33 | ||
![]() |
d21c256fae | ||
![]() |
72c406b550 | ||
![]() |
eadc478e50 | ||
![]() |
1837a1c508 | ||
![]() |
e3cc0f004b |
10
.gitignore
vendored
Normal file
10
.gitignore
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
*.pyc
|
||||
*.swp
|
||||
|
||||
*.actual
|
||||
*.actual-rewrite
|
||||
*.process-output
|
||||
*.rewrite
|
||||
bin/
|
||||
buildscons/
|
||||
libs/
|
23
.travis.yml
Normal file
23
.travis.yml
Normal file
@@ -0,0 +1,23 @@
|
||||
# Build matrix / environment variable are explained on:
|
||||
# http://about.travis-ci.org/docs/user/build-configuration/
|
||||
# This file can be validated on:
|
||||
# http://lint.travis-ci.org/
|
||||
before_install: sudo apt-get install cmake
|
||||
language: cpp
|
||||
compiler:
|
||||
- gcc
|
||||
- clang
|
||||
script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make
|
||||
env:
|
||||
global:
|
||||
- JSONCPP_CONTINUOUS_INTERATION=1
|
||||
matrix:
|
||||
- SHARED_LIBRARY=ON BUILD_TYPE=release VERBOSE_MAKE=false
|
||||
- SHARED_LIBRARY=OFF BUILD_TYPE=release VERBOSE_MAKE=false
|
||||
- SHARED_LIBRARY=OFF BUILD_TYPE=debug VERBOSE VERBOSE_MAKE=true
|
||||
notifications:
|
||||
recipients:
|
||||
- baptiste.lepilleur@gmail.com
|
||||
email:
|
||||
on_success: change
|
||||
on_failure: always
|
90
CMakeLists.txt
Normal file
90
CMakeLists.txt
Normal file
@@ -0,0 +1,90 @@
|
||||
CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
|
||||
PROJECT(jsoncpp)
|
||||
ENABLE_TESTING()
|
||||
|
||||
OPTION(JSONCPP_WITH_TESTS "Compile and run JsonCpp test executables" ON)
|
||||
OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
|
||||
OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF)
|
||||
|
||||
# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix
|
||||
IF(NOT WIN32)
|
||||
IF(NOT CMAKE_BUILD_TYPE)
|
||||
SET(CMAKE_BUILD_TYPE Release CACHE STRING
|
||||
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage."
|
||||
FORCE)
|
||||
ENDIF(NOT CMAKE_BUILD_TYPE)
|
||||
ENDIF(NOT WIN32)
|
||||
|
||||
# This ensures shared DLL are in the same dir as executable on Windows.
|
||||
# Put all executables / libraries are in a project global directory.
|
||||
SET(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
|
||||
CACHE PATH "Single directory for all static libraries.")
|
||||
SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
|
||||
CACHE PATH "Single directory for all dynamic libraries on Unix.")
|
||||
SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin
|
||||
CACHE PATH "Single directory for all executable and dynamic libraries on Windows.")
|
||||
MARK_AS_ADVANCED( CMAKE_RUNTIME_OUTPUT_DIRECTORY CMAKE_LIBRARY_OUTPUT_DIRECTORY CMAKE_ARCHIVE_OUTPUT_DIRECTORY )
|
||||
|
||||
# Set variable named ${VAR_NAME} to value ${VALUE}
|
||||
FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
|
||||
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
|
||||
ENDFUNCTION(set_using_dynamic_name)
|
||||
|
||||
# Extract major, minor, patch and qualifier from version text
|
||||
# Parse a version string "X.Y.Z[-qualifier]" and outputs
|
||||
# version parts in ${OUPUT_PREFIX}_MAJOR, _MINOR, _PATCH, _QUALIFIER.
|
||||
# If parse succed then ${OUPUT_PREFIX}_FOUND is TRUE.
|
||||
MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX)
|
||||
SET(VERSION_REGEX "[0-9]+\\.[0-9]+\\.[0-9]+(-[a-zA-Z0-9_]+)?")
|
||||
IF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
||||
STRING(REGEX MATCHALL "[0-9]+|-([A-Za-z0-9_]+)" VERSION_PARTS ${VERSION_TEXT})
|
||||
list(APPEND VERSION_PARTS "") # empty qualifier to handle no qualifier case
|
||||
LIST(GET VERSION_PARTS 0 ${OUPUT_PREFIX}_MAJOR)
|
||||
LIST(GET VERSION_PARTS 1 ${OUPUT_PREFIX}_MINOR)
|
||||
LIST(GET VERSION_PARTS 2 ${OUPUT_PREFIX}_PATCH)
|
||||
LIST(GET VERSION_PARTS 3 ${OUPUT_PREFIX}_QUALIFIER)
|
||||
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE )
|
||||
ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
||||
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE )
|
||||
ENDIF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
||||
ENDMACRO(jsoncpp_parse_version)
|
||||
|
||||
# Read out version from "version" file
|
||||
FILE(STRINGS "version" JSONCPP_VERSION)
|
||||
|
||||
jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION )
|
||||
IF(NOT JSONCPP_VERSION_FOUND)
|
||||
MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z[-qualifier]")
|
||||
ENDIF(NOT JSONCPP_VERSION_FOUND)
|
||||
|
||||
MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}${JSONCPP_VERSION_QUALIFIER}")
|
||||
# File version.h is only regenerated on CMake configure step
|
||||
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in"
|
||||
"${PROJECT_SOURCE_DIR}/include/json/version.h" )
|
||||
|
||||
macro(UseCompilationWarningAsError)
|
||||
if ( MSVC )
|
||||
# Only enabled in debug because some old versions of VS STL generate
|
||||
# warnings when compiled in release configuration.
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ")
|
||||
endif( MSVC )
|
||||
endmacro()
|
||||
|
||||
# Include our configuration header
|
||||
INCLUDE_DIRECTORIES( ${CMAKE_SOURCE_DIR}/include )
|
||||
|
||||
if ( MSVC )
|
||||
# Only enabled in debug because some old versions of VS STL generate
|
||||
# unreachable code warning when compiled in release configuration.
|
||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ")
|
||||
endif( MSVC )
|
||||
|
||||
IF(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
UseCompilationWarningAsError()
|
||||
ENDIF(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
|
||||
# Build the different applications
|
||||
ADD_SUBDIRECTORY( src )
|
||||
|
||||
#install the includes
|
||||
ADD_SUBDIRECTORY( include )
|
83
NEWS.txt
83
NEWS.txt
@@ -1,3 +1,69 @@
|
||||
New in SVN
|
||||
----------
|
||||
|
||||
* Updated the type system's behavior, in order to better support backwards
|
||||
compatibility with code that was written before 64-bit integer support was
|
||||
introduced. Here's how it works now:
|
||||
|
||||
* isInt, isInt64, isUInt, and isUInt64 return true if and only if the
|
||||
value can be exactly represented as that type. In particular, a value
|
||||
constructed with a double like 17.0 will now return true for all of
|
||||
these methods.
|
||||
|
||||
* isDouble and isFloat now return true for all numeric values, since all
|
||||
numeric values can be converted to a double or float without
|
||||
truncation. Note however that the conversion may not be exact -- for
|
||||
example, doubles cannot exactly represent all integers above 2^53 + 1.
|
||||
|
||||
* isBool, isNull, isString, isArray, and isObject now return true if and
|
||||
only if the value is of that type.
|
||||
|
||||
* isConvertibleTo(fooValue) indicates that it is safe to call asFoo.
|
||||
(For each type foo, isFoo always implies isConvertibleTo(fooValue).)
|
||||
asFoo returns an approximate or exact representation as appropriate.
|
||||
For example, a double value may be truncated when asInt is called.
|
||||
|
||||
* For backwards compatibility with old code, isConvertibleTo(intValue)
|
||||
may return false even if type() == intValue. This is because the value
|
||||
may have been constructed with a 64-bit integer larger than maxInt,
|
||||
and calling asInt() would cause an exception. If you're writing new
|
||||
code, use isInt64 to find out whether the value is exactly
|
||||
representable using an Int64, or asDouble() combined with minInt64 and
|
||||
maxInt64 to figure out whether it is approximately representable.
|
||||
|
||||
* Value
|
||||
- Patch #10: BOOST_FOREACH compatibility. Made Json::iterator more
|
||||
standard compliant, added missing iterator_category and value_type
|
||||
typedefs (contribued by Robert A. Iannucci).
|
||||
|
||||
* Compilation
|
||||
|
||||
- New CMake based build system. Based in part on contribution from
|
||||
Igor Okulist and Damien Buhl (Patch #14).
|
||||
|
||||
- New header json/version.h now contains version number macros
|
||||
(JSONCPP_VERSION_MAJOR, JSONCPP_VERSION_MINOR, JSONCPP_VERSION_PATCH
|
||||
and JSONCPP_VERSION_HEXA).
|
||||
|
||||
- Patch #11: added missing JSON_API on some classes causing link issues
|
||||
when building as a dynamic library on Windows
|
||||
(contributed by Francis Bolduc).
|
||||
|
||||
- Visual Studio DLL: suppressed warning "C4251: <data member>: <type>
|
||||
needs to have dll-interface to be used by..." via pragma push/pop
|
||||
in json-cpp headers.
|
||||
|
||||
- Added Travis CI intregration: https://travis-ci.org/blep/jsoncpp-mirror
|
||||
|
||||
* Bug fixes
|
||||
- Patch #15: Copy constructor does not initialize allocated_ for stringValue
|
||||
(contributed by rmongia).
|
||||
|
||||
- Patch #16: Missing field copy in Json::Value::iterator causing infinite
|
||||
loop when using experimental internal map (#define JSON_VALUE_USE_INTERNAL_MAP)
|
||||
(contributed by Ming-Lin Kao).
|
||||
|
||||
|
||||
New in JsonCpp 0.6.0:
|
||||
---------------------
|
||||
|
||||
@@ -13,8 +79,8 @@
|
||||
Notes: you need to setup the environment by running vcvars32.bat
|
||||
(e.g. MSVC 2008 command prompt in start menu) before running scons.
|
||||
|
||||
- Added support for amalgated source and header generation (a la sqlite).
|
||||
Refer to README.txt section "Generating amalgated source and header"
|
||||
- Added support for amalgamated source and header generation (a la sqlite).
|
||||
Refer to README.txt section "Generating amalgamated source and header"
|
||||
for detail.
|
||||
|
||||
* Value
|
||||
@@ -88,7 +154,18 @@
|
||||
length of 32 characters.
|
||||
|
||||
- Fixed Value::operator <= implementation (had the semantic of operator >=).
|
||||
Found when addigin unit tests for comparison operators.
|
||||
Found when adding unit tests for comparison operators.
|
||||
|
||||
- Value::compare() is now const and has an actual implementation with
|
||||
unit tests.
|
||||
|
||||
- Bug #2407932: strpbrk() can fail for NULL pointer.
|
||||
|
||||
- Bug #3306345: Fixed minor typo in Path::resolve().
|
||||
|
||||
- Bug #3314841/#3306896: errors in amalgamate.py
|
||||
|
||||
- Fixed some Coverity warnings and line-endings.
|
||||
|
||||
* License
|
||||
|
||||
|
88
README.txt
88
README.txt
@@ -13,9 +13,66 @@ making it a convenient format to store user input files.
|
||||
|
||||
Unserialization parsing is user friendly and provides precise error reports.
|
||||
|
||||
* Using json-cpp in your project:
|
||||
===============================
|
||||
|
||||
* Building/Testing:
|
||||
=================
|
||||
The recommended approach to integrate json-cpp in your project is to
|
||||
build the the amalgamated source (a single .cpp) with your own build
|
||||
system. This ensures compilation flags consistency and ABI compatibility.
|
||||
|
||||
See section "Generating amalgamated source and header" to generate them
|
||||
from the source distribution.
|
||||
|
||||
Directory include/ should be added to your compiler include path.
|
||||
json-cpp headers should be included as follow:
|
||||
|
||||
#include <json/json.h>
|
||||
|
||||
If json-cpp was build as a dynamic library on Windows, then your project
|
||||
need to define macro "JSON_DLL" to JSON_API should import exported symbols.
|
||||
|
||||
* Building/Testing with new CMake build system:
|
||||
=============================================
|
||||
|
||||
CMake is a C++ Makefiles/Solution generator that can be downloaded from:
|
||||
http://www.cmake.org
|
||||
|
||||
It is usually available on most Linux system as package. On Ubuntu:
|
||||
sudo apt-get install cmake
|
||||
|
||||
Notes that python is also required to run JSON reader/writer tests. If
|
||||
missing, the build will skip running those tests.
|
||||
|
||||
When running CMake, a few parameters are required:
|
||||
- a build directory where the makefiles/solution are generated. It is
|
||||
also used to store objects, libraries and executables files.
|
||||
- the generator to use: makefiles or Visual Studio solution? What version
|
||||
or Visual Studio, 32 or 64 bits solution?
|
||||
|
||||
Generating solution/makefiles using cmake-gui:
|
||||
- Makes "source code" points the source directory
|
||||
- Makes "where to build the binary" points to the directory to use for
|
||||
the build.
|
||||
- Click on the "Grouped" check box
|
||||
- Review JsonCpp build option (tick JSONCPP_LIB_BUILD_SHARED to build as
|
||||
a dynamic library)
|
||||
- Click configure button at the bottom, then the generate button.
|
||||
- The generated solution/makefiles can be found in the binary directory.
|
||||
|
||||
Alternatively, from the command-line on Unix in the source directory:
|
||||
|
||||
mkdir -p ../build/debug
|
||||
cd ../build/debug
|
||||
cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../../jsoncpp-src
|
||||
make
|
||||
|
||||
Running "cmake -h" will display the list of available generators (passed as -G option).
|
||||
|
||||
By default CMake hides compilation command-line. This can be modified by specifying:
|
||||
-DCMAKE_VERBOSE_MAKEFILE=true when generating makefiles.
|
||||
|
||||
* Building/Testing with the legacy build system based on SCons:
|
||||
=============================================================
|
||||
|
||||
JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires
|
||||
python to be installed (http://www.python.org).
|
||||
@@ -47,7 +104,6 @@ to do so.
|
||||
|
||||
and TARGET may be:
|
||||
check: build library and run unit tests.
|
||||
|
||||
|
||||
* Running the test manually:
|
||||
==========================
|
||||
@@ -90,39 +146,30 @@ Notes that the documentation is also available for download as a tarball.
|
||||
The documentation of the latest release is available online at:
|
||||
http://jsoncpp.sourceforge.net/
|
||||
|
||||
* Generating amalgated source and header
|
||||
======================================
|
||||
* Generating amalgamated source and header
|
||||
========================================
|
||||
|
||||
JsonCpp is provided with a script to generate a single header and a single
|
||||
source file to ease inclusion in an existing project.
|
||||
|
||||
The amalgated source can be generated at any time by running the following
|
||||
The amalgamated source can be generated at any time by running the following
|
||||
command from the top-directory (requires python 2.6):
|
||||
|
||||
python amalgate.py
|
||||
python amalgamate.py
|
||||
|
||||
It is possible to specify header name. See -h options for detail. By default,
|
||||
the following files are generated:
|
||||
- dist/jsoncpp.cpp: source file that need to be added to your project
|
||||
- dist/json/json.h: header file corresponding to use in your project. It is
|
||||
equivalent to including json/json.h in non-amalgated source. This header
|
||||
equivalent to including json/json.h in non-amalgamated source. This header
|
||||
only depends on standard headers.
|
||||
- dist/json/json-forwards.h: header the provides forward declaration
|
||||
of all JsonCpp types. This typically what should be included in headers to
|
||||
speed-up compilation.
|
||||
|
||||
The amalgated sources are generated by concatenating JsonCpp source in the
|
||||
correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of
|
||||
other headers.
|
||||
|
||||
* Using json-cpp in your project:
|
||||
===============================
|
||||
|
||||
include/ should be added to your compiler include path. jsoncpp headers
|
||||
should be included as follow:
|
||||
|
||||
#include <json/json.h>
|
||||
|
||||
The amalgamated sources are generated by concatenating JsonCpp source in the
|
||||
correct order and defining macro JSON_IS_AMALGAMATION to prevent inclusion
|
||||
of other headers.
|
||||
|
||||
* Adding a reader/writer test:
|
||||
============================
|
||||
@@ -170,3 +217,4 @@ test_complex_01.process-output: jsontest.exe output, typically useful to
|
||||
|
||||
See file LICENSE for details. Basically JsonCpp is licensed under
|
||||
MIT license, or public domain if desired and recognized in your jurisdiction.
|
||||
|
||||
|
@@ -1,147 +1,150 @@
|
||||
"""Amalgate json-cpp library sources into a single source and header file.
|
||||
|
||||
Requires Python 2.6
|
||||
|
||||
Example of invocation (must be invoked from json-cpp top directory):
|
||||
python amalgate.py
|
||||
"""
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
class AmalagatedFile:
|
||||
def __init__( self, top_dir ):
|
||||
self.top_dir = top_dir
|
||||
self.blocks = []
|
||||
|
||||
def add_text( self, text ):
|
||||
if not text.endswith( '\n' ):
|
||||
text += '\n'
|
||||
self.blocks.append( text )
|
||||
|
||||
def add_file( self, relative_input_path, wrap_in_comment=False ):
|
||||
def add_marker( prefix ):
|
||||
self.add_text( '' )
|
||||
self.add_text( '// ' + '/'*70 )
|
||||
self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) )
|
||||
self.add_text( '// ' + '/'*70 )
|
||||
self.add_text( '' )
|
||||
add_marker( 'Beginning' )
|
||||
f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' )
|
||||
content = f.read()
|
||||
if wrap_in_comment:
|
||||
content = '/*\n' + content + '\n*/'
|
||||
self.add_text( content )
|
||||
f.close()
|
||||
add_marker( 'End' )
|
||||
self.add_text( '\n\n\n\n' )
|
||||
|
||||
def get_value( self ):
|
||||
return ''.join( self.blocks ).replace('\r\n','\n')
|
||||
|
||||
def write_to( self, output_path ):
|
||||
output_dir = os.path.dirname( output_path )
|
||||
if output_dir and not os.path.isdir( output_dir ):
|
||||
os.makedirs( output_dir )
|
||||
f = open( output_path, 'wb' )
|
||||
f.write( self.get_value() )
|
||||
f.close()
|
||||
|
||||
def amalgate_source( source_top_dir=None,
|
||||
target_source_path=None,
|
||||
header_include_path=None ):
|
||||
"""Produces amalgated source.
|
||||
Parameters:
|
||||
source_top_dir: top-directory
|
||||
target_source_path: output .cpp path
|
||||
header_include_path: generated header path relative to target_source_path.
|
||||
"""
|
||||
print 'Amalgating header...'
|
||||
header = AmalagatedFile( source_top_dir )
|
||||
header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' )
|
||||
header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '# define JSON_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||
header.add_text( '/// to prevent private header inclusion.' )
|
||||
header.add_text( '#define JSON_IS_AMALGATED' )
|
||||
header.add_file( 'include/json/config.h' )
|
||||
header.add_file( 'include/json/forwards.h' )
|
||||
header.add_file( 'include/json/features.h' )
|
||||
header.add_file( 'include/json/value.h' )
|
||||
header.add_file( 'include/json/reader.h' )
|
||||
header.add_file( 'include/json/writer.h' )
|
||||
header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||
|
||||
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
||||
print 'Writing amalgated header to %r' % target_header_path
|
||||
header.write_to( target_header_path )
|
||||
|
||||
base, ext = os.path.splitext( header_include_path )
|
||||
forward_header_include_path = base + '-forwards' + ext
|
||||
print 'Amalgating forward header...'
|
||||
header = AmalagatedFile( source_top_dir )
|
||||
header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' )
|
||||
header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path )
|
||||
header.add_text( '/// This header provides forward declaration for all JsonCpp types.' )
|
||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||
header.add_text( '/// to prevent private header inclusion.' )
|
||||
header.add_text( '#define JSON_IS_AMALGATED' )
|
||||
header.add_file( 'include/json/config.h' )
|
||||
header.add_file( 'include/json/forwards.h' )
|
||||
header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
|
||||
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
||||
forward_header_include_path )
|
||||
print 'Writing amalgated forward header to %r' % target_forward_header_path
|
||||
header.write_to( target_forward_header_path )
|
||||
|
||||
print 'Amalgating source...'
|
||||
source = AmalagatedFile( source_top_dir )
|
||||
source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' )
|
||||
source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||
source.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
source.add_text( '' )
|
||||
source.add_text( '#include <%s>' % header_include_path )
|
||||
source.add_text( '' )
|
||||
source.add_file( 'src/lib_json\json_tool.h' )
|
||||
source.add_file( 'src/lib_json\json_reader.cpp' )
|
||||
source.add_file( 'src/lib_json\json_batchallocator.h' )
|
||||
source.add_file( 'src/lib_json\json_valueiterator.inl' )
|
||||
source.add_file( 'src/lib_json\json_value.cpp' )
|
||||
source.add_file( 'src/lib_json\json_writer.cpp' )
|
||||
|
||||
print 'Writing amalgated source to %r' % target_source_path
|
||||
source.write_to( target_source_path )
|
||||
|
||||
def main():
|
||||
usage = """%prog [options]
|
||||
Generate a single amalgated source and header file from the sources.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp',
|
||||
help="""Output .cpp source path. [Default: %default]""")
|
||||
parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h',
|
||||
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
|
||||
parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(),
|
||||
help="""Source top-directory. [Default: %default]""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
msg = amalgate_source( source_top_dir=options.top_dir,
|
||||
target_source_path=options.target_source_path,
|
||||
header_include_path=options.header_include_path )
|
||||
if msg:
|
||||
sys.stderr.write( msg + '\n' )
|
||||
sys.exit( 1 )
|
||||
else:
|
||||
print 'Source succesfully amalagated'
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
"""Amalgate json-cpp library sources into a single source and header file.
|
||||
|
||||
Requires Python 2.6
|
||||
|
||||
Example of invocation (must be invoked from json-cpp top directory):
|
||||
python amalgate.py
|
||||
"""
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
class AmalgamationFile:
|
||||
def __init__( self, top_dir ):
|
||||
self.top_dir = top_dir
|
||||
self.blocks = []
|
||||
|
||||
def add_text( self, text ):
|
||||
if not text.endswith( '\n' ):
|
||||
text += '\n'
|
||||
self.blocks.append( text )
|
||||
|
||||
def add_file( self, relative_input_path, wrap_in_comment=False ):
|
||||
def add_marker( prefix ):
|
||||
self.add_text( '' )
|
||||
self.add_text( '// ' + '/'*70 )
|
||||
self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) )
|
||||
self.add_text( '// ' + '/'*70 )
|
||||
self.add_text( '' )
|
||||
add_marker( 'Beginning' )
|
||||
f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' )
|
||||
content = f.read()
|
||||
if wrap_in_comment:
|
||||
content = '/*\n' + content + '\n*/'
|
||||
self.add_text( content )
|
||||
f.close()
|
||||
add_marker( 'End' )
|
||||
self.add_text( '\n\n\n\n' )
|
||||
|
||||
def get_value( self ):
|
||||
return ''.join( self.blocks ).replace('\r\n','\n')
|
||||
|
||||
def write_to( self, output_path ):
|
||||
output_dir = os.path.dirname( output_path )
|
||||
if output_dir and not os.path.isdir( output_dir ):
|
||||
os.makedirs( output_dir )
|
||||
f = open( output_path, 'wb' )
|
||||
f.write( self.get_value() )
|
||||
f.close()
|
||||
|
||||
def amalgamate_source( source_top_dir=None,
|
||||
target_source_path=None,
|
||||
header_include_path=None ):
|
||||
"""Produces amalgated source.
|
||||
Parameters:
|
||||
source_top_dir: top-directory
|
||||
target_source_path: output .cpp path
|
||||
header_include_path: generated header path relative to target_source_path.
|
||||
"""
|
||||
print 'Amalgating header...'
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' )
|
||||
header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '# define JSON_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||
header.add_text( '/// to prevent private header inclusion.' )
|
||||
header.add_text( '#define JSON_IS_AMALGAMATION' )
|
||||
header.add_file( 'include/json/version.h' )
|
||||
header.add_file( 'include/json/config.h' )
|
||||
header.add_file( 'include/json/forwards.h' )
|
||||
header.add_file( 'include/json/features.h' )
|
||||
header.add_file( 'include/json/value.h' )
|
||||
header.add_file( 'include/json/reader.h' )
|
||||
header.add_file( 'include/json/writer.h' )
|
||||
header.add_file( 'include/json/assertions.h' )
|
||||
header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||
|
||||
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
||||
print 'Writing amalgated header to %r' % target_header_path
|
||||
header.write_to( target_header_path )
|
||||
|
||||
base, ext = os.path.splitext( header_include_path )
|
||||
forward_header_include_path = base + '-forwards' + ext
|
||||
print 'Amalgating forward header...'
|
||||
header = AmalgamationFile( source_top_dir )
|
||||
header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' )
|
||||
header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path )
|
||||
header.add_text( '/// This header provides forward declaration for all JsonCpp types.' )
|
||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||
header.add_text( '/// to prevent private header inclusion.' )
|
||||
header.add_text( '#define JSON_IS_AMALGAMATION' )
|
||||
header.add_file( 'include/json/config.h' )
|
||||
header.add_file( 'include/json/forwards.h' )
|
||||
header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||
|
||||
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
||||
forward_header_include_path )
|
||||
print 'Writing amalgated forward header to %r' % target_forward_header_path
|
||||
header.write_to( target_forward_header_path )
|
||||
|
||||
print 'Amalgating source...'
|
||||
source = AmalgamationFile( source_top_dir )
|
||||
source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' )
|
||||
source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||
source.add_file( 'LICENSE', wrap_in_comment=True )
|
||||
source.add_text( '' )
|
||||
source.add_text( '#include <%s>' % header_include_path )
|
||||
source.add_text( '' )
|
||||
lib_json = 'src/lib_json'
|
||||
source.add_file( os.path.join(lib_json, 'json_tool.h') )
|
||||
source.add_file( os.path.join(lib_json, 'json_reader.cpp') )
|
||||
source.add_file( os.path.join(lib_json, 'json_batchallocator.h') )
|
||||
source.add_file( os.path.join(lib_json, 'json_valueiterator.inl') )
|
||||
source.add_file( os.path.join(lib_json, 'json_value.cpp') )
|
||||
source.add_file( os.path.join(lib_json, 'json_writer.cpp') )
|
||||
|
||||
print 'Writing amalgated source to %r' % target_source_path
|
||||
source.write_to( target_source_path )
|
||||
|
||||
def main():
|
||||
usage = """%prog [options]
|
||||
Generate a single amalgated source and header file from the sources.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp',
|
||||
help="""Output .cpp source path. [Default: %default]""")
|
||||
parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h',
|
||||
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
|
||||
parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(),
|
||||
help="""Source top-directory. [Default: %default]""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
msg = amalgamate_source( source_top_dir=options.top_dir,
|
||||
target_source_path=options.target_source_path,
|
||||
header_include_path=options.header_include_path )
|
||||
if msg:
|
||||
sys.stderr.write( msg + '\n' )
|
||||
sys.exit( 1 )
|
||||
else:
|
||||
print 'Source succesfully amalagated'
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
33
devtools/agent_vmw7.json
Normal file
33
devtools/agent_vmw7.json
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 7 .NET 2003",
|
||||
"Visual Studio 9 2008",
|
||||
"Visual Studio 9 2008 Win64",
|
||||
"Visual Studio 10",
|
||||
"Visual Studio 10 Win64",
|
||||
"Visual Studio 11",
|
||||
"Visual Studio 11 Win64"
|
||||
]
|
||||
},
|
||||
{"generator": ["MinGW Makefiles"],
|
||||
"env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
26
devtools/agent_vmxp.json
Normal file
26
devtools/agent_vmxp.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"cmake_variants" : [
|
||||
{"name": "generator",
|
||||
"generators": [
|
||||
{"generator": [
|
||||
"Visual Studio 6",
|
||||
"Visual Studio 7",
|
||||
"Visual Studio 8 2005"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{"name": "shared_dll",
|
||||
"variables": [
|
||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||
]
|
||||
},
|
||||
{"name": "build_type",
|
||||
"build_types": [
|
||||
"debug",
|
||||
"release"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
@@ -55,20 +55,20 @@ ALL = DIR | FILE | LINKS
|
||||
|
||||
_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
|
||||
|
||||
def ant_pattern_to_re( ant_pattern ):
|
||||
"""Generates a regular expression from the ant pattern.
|
||||
Matching convention:
|
||||
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
||||
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
|
||||
*.py: match 'script.py' but not 'a/script.py'
|
||||
def ant_pattern_to_re( ant_pattern ):
|
||||
"""Generates a regular expression from the ant pattern.
|
||||
Matching convention:
|
||||
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
||||
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
|
||||
*.py: match 'script.py' but not 'a/script.py'
|
||||
"""
|
||||
rex = ['^']
|
||||
next_pos = 0
|
||||
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
|
||||
## print 'Converting', ant_pattern
|
||||
for match in _ANT_RE.finditer( ant_pattern ):
|
||||
## print 'Matched', match.group()
|
||||
## print match.start(0), next_pos
|
||||
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
|
||||
## print 'Converting', ant_pattern
|
||||
for match in _ANT_RE.finditer( ant_pattern ):
|
||||
## print 'Matched', match.group()
|
||||
## print match.start(0), next_pos
|
||||
if match.start(0) != next_pos:
|
||||
raise ValueError( "Invalid ant pattern" )
|
||||
if match.group(1): # /**/
|
||||
@@ -83,14 +83,14 @@ def ant_pattern_to_re( ant_pattern ):
|
||||
rex.append( sep_rex )
|
||||
else: # somepath
|
||||
rex.append( re.escape(match.group(6)) )
|
||||
next_pos = match.end()
|
||||
next_pos = match.end()
|
||||
rex.append('$')
|
||||
return re.compile( ''.join( rex ) )
|
||||
|
||||
def _as_list( l ):
|
||||
if isinstance(l, basestring):
|
||||
return l.split()
|
||||
return l
|
||||
|
||||
def _as_list( l ):
|
||||
if isinstance(l, basestring):
|
||||
return l.split()
|
||||
return l
|
||||
|
||||
def glob(dir_path,
|
||||
includes = '**/*',
|
||||
@@ -99,8 +99,8 @@ def glob(dir_path,
|
||||
prune_dirs = prune_dirs,
|
||||
max_depth = 25):
|
||||
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
|
||||
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
|
||||
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
|
||||
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
|
||||
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
|
||||
dir_path = dir_path.replace('/',os.path.sep)
|
||||
entry_type_filter = entry_type
|
||||
|
||||
@@ -117,37 +117,37 @@ def glob(dir_path,
|
||||
return True
|
||||
return False
|
||||
|
||||
def glob_impl( root_dir_path ):
|
||||
child_dirs = [root_dir_path]
|
||||
while child_dirs:
|
||||
def glob_impl( root_dir_path ):
|
||||
child_dirs = [root_dir_path]
|
||||
while child_dirs:
|
||||
dir_path = child_dirs.pop()
|
||||
for entry in listdir( dir_path ):
|
||||
full_path = os.path.join( dir_path, entry )
|
||||
## print 'Testing:', full_path,
|
||||
is_dir = os.path.isdir( full_path )
|
||||
if is_dir and not is_pruned_dir( entry ): # explore child directory ?
|
||||
## print '===> marked for recursion',
|
||||
child_dirs.append( full_path )
|
||||
included = apply_filter( full_path, include_filter )
|
||||
rejected = apply_filter( full_path, exclude_filter )
|
||||
if not included or rejected: # do not include entry ?
|
||||
## print '=> not included or rejected'
|
||||
continue
|
||||
link = os.path.islink( full_path )
|
||||
is_file = os.path.isfile( full_path )
|
||||
if not is_file and not is_dir:
|
||||
## print '=> unknown entry type'
|
||||
continue
|
||||
if link:
|
||||
entry_type = is_file and FILE_LINK or DIR_LINK
|
||||
else:
|
||||
entry_type = is_file and FILE or DIR
|
||||
## print '=> type: %d' % entry_type,
|
||||
if (entry_type & entry_type_filter) != 0:
|
||||
## print ' => KEEP'
|
||||
yield os.path.join( dir_path, entry )
|
||||
## else:
|
||||
## print ' => TYPE REJECTED'
|
||||
for entry in listdir( dir_path ):
|
||||
full_path = os.path.join( dir_path, entry )
|
||||
## print 'Testing:', full_path,
|
||||
is_dir = os.path.isdir( full_path )
|
||||
if is_dir and not is_pruned_dir( entry ): # explore child directory ?
|
||||
## print '===> marked for recursion',
|
||||
child_dirs.append( full_path )
|
||||
included = apply_filter( full_path, include_filter )
|
||||
rejected = apply_filter( full_path, exclude_filter )
|
||||
if not included or rejected: # do not include entry ?
|
||||
## print '=> not included or rejected'
|
||||
continue
|
||||
link = os.path.islink( full_path )
|
||||
is_file = os.path.isfile( full_path )
|
||||
if not is_file and not is_dir:
|
||||
## print '=> unknown entry type'
|
||||
continue
|
||||
if link:
|
||||
entry_type = is_file and FILE_LINK or DIR_LINK
|
||||
else:
|
||||
entry_type = is_file and FILE or DIR
|
||||
## print '=> type: %d' % entry_type,
|
||||
if (entry_type & entry_type_filter) != 0:
|
||||
## print ' => KEEP'
|
||||
yield os.path.join( dir_path, entry )
|
||||
## else:
|
||||
## print ' => TYPE REJECTED'
|
||||
return list( glob_impl( dir_path ) )
|
||||
|
||||
|
||||
@@ -155,47 +155,47 @@ if __name__ == "__main__":
|
||||
import unittest
|
||||
|
||||
class AntPatternToRETest(unittest.TestCase):
|
||||
## def test_conversion( self ):
|
||||
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
|
||||
|
||||
def test_matching( self ):
|
||||
test_cases = [ ( 'path',
|
||||
['path'],
|
||||
['somepath', 'pathsuffix', '/path', '/path'] ),
|
||||
( '*.py',
|
||||
['source.py', 'source.ext.py', '.py'],
|
||||
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
|
||||
( '**/path',
|
||||
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
||||
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
|
||||
( 'path/**',
|
||||
['path/a', 'path/path/a', 'path//'],
|
||||
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
|
||||
( '/**/path',
|
||||
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
||||
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
|
||||
( 'a/b',
|
||||
['a/b'],
|
||||
['somea/b', 'a/bsuffix', 'a/b/c'] ),
|
||||
( '**/*.py',
|
||||
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
||||
['script.pyc', 'script.pyo', 'a.py/b'] ),
|
||||
( 'src/**/*.py',
|
||||
['src/a.py', 'src/dir/a.py'],
|
||||
['a/src/a.py', '/src/a.py'] ),
|
||||
]
|
||||
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
||||
def local_path( paths ):
|
||||
return [ p.replace('/',os.path.sep) for p in paths ]
|
||||
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||
rex = ant_pattern_to_re( ant_pattern )
|
||||
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
|
||||
for accepted_match in accepted_matches:
|
||||
print 'Accepted?:', accepted_match
|
||||
self.assert_( rex.match( accepted_match ) is not None )
|
||||
for rejected_match in rejected_matches:
|
||||
print 'Rejected?:', rejected_match
|
||||
self.assert_( rex.match( rejected_match ) is None )
|
||||
## def test_conversion( self ):
|
||||
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
|
||||
|
||||
def test_matching( self ):
|
||||
test_cases = [ ( 'path',
|
||||
['path'],
|
||||
['somepath', 'pathsuffix', '/path', '/path'] ),
|
||||
( '*.py',
|
||||
['source.py', 'source.ext.py', '.py'],
|
||||
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
|
||||
( '**/path',
|
||||
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
||||
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
|
||||
( 'path/**',
|
||||
['path/a', 'path/path/a', 'path//'],
|
||||
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
|
||||
( '/**/path',
|
||||
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
||||
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
|
||||
( 'a/b',
|
||||
['a/b'],
|
||||
['somea/b', 'a/bsuffix', 'a/b/c'] ),
|
||||
( '**/*.py',
|
||||
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
||||
['script.pyc', 'script.pyo', 'a.py/b'] ),
|
||||
( 'src/**/*.py',
|
||||
['src/a.py', 'src/dir/a.py'],
|
||||
['a/src/a.py', '/src/a.py'] ),
|
||||
]
|
||||
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
||||
def local_path( paths ):
|
||||
return [ p.replace('/',os.path.sep) for p in paths ]
|
||||
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||
rex = ant_pattern_to_re( ant_pattern )
|
||||
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
|
||||
for accepted_match in accepted_matches:
|
||||
print 'Accepted?:', accepted_match
|
||||
self.assert_( rex.match( accepted_match ) is not None )
|
||||
for rejected_match in rejected_matches:
|
||||
print 'Rejected?:', rejected_match
|
||||
self.assert_( rex.match( rejected_match ) is None )
|
||||
|
||||
unittest.main()
|
||||
|
280
devtools/batchbuild.py
Normal file
280
devtools/batchbuild.py
Normal file
@@ -0,0 +1,280 @@
|
||||
import collections
|
||||
import itertools
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import re
|
||||
import shutil
|
||||
import string
|
||||
import subprocess
|
||||
import sys
|
||||
import cgi
|
||||
|
||||
class BuildDesc:
|
||||
def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None):
|
||||
self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ]
|
||||
self.variables = variables or []
|
||||
self.build_type = build_type
|
||||
self.generator = generator
|
||||
|
||||
def merged_with( self, build_desc ):
|
||||
"""Returns a new BuildDesc by merging field content.
|
||||
Prefer build_desc fields to self fields for single valued field.
|
||||
"""
|
||||
return BuildDesc( self.prepend_envs + build_desc.prepend_envs,
|
||||
self.variables + build_desc.variables,
|
||||
build_desc.build_type or self.build_type,
|
||||
build_desc.generator or self.generator )
|
||||
|
||||
def env( self ):
|
||||
environ = os.environ.copy()
|
||||
for values_by_name in self.prepend_envs:
|
||||
for var, value in values_by_name.items():
|
||||
var = var.upper()
|
||||
if type(value) is unicode:
|
||||
value = value.encode( sys.getdefaultencoding() )
|
||||
if var in environ:
|
||||
environ[var] = value + os.pathsep + environ[var]
|
||||
else:
|
||||
environ[var] = value
|
||||
return environ
|
||||
|
||||
def cmake_args( self ):
|
||||
args = ["-D%s" % var for var in self.variables]
|
||||
# skip build type for Visual Studio solution as it cause warning
|
||||
if self.build_type and 'Visual' not in self.generator:
|
||||
args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type )
|
||||
if self.generator:
|
||||
args.extend( ['-G', self.generator] )
|
||||
return args
|
||||
|
||||
def __repr__( self ):
|
||||
return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type)
|
||||
|
||||
class BuildData:
|
||||
def __init__( self, desc, work_dir, source_dir ):
|
||||
self.desc = desc
|
||||
self.work_dir = work_dir
|
||||
self.source_dir = source_dir
|
||||
self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' )
|
||||
self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' )
|
||||
self.cmake_succeeded = False
|
||||
self.build_succeeded = False
|
||||
|
||||
def execute_build(self):
|
||||
print 'Build %s' % self.desc
|
||||
self._make_new_work_dir( )
|
||||
self.cmake_succeeded = self._generate_makefiles( )
|
||||
if self.cmake_succeeded:
|
||||
self.build_succeeded = self._build_using_makefiles( )
|
||||
return self.build_succeeded
|
||||
|
||||
def _generate_makefiles(self):
|
||||
print ' Generating makefiles: ',
|
||||
cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )]
|
||||
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path )
|
||||
print 'done' if succeeded else 'FAILED'
|
||||
return succeeded
|
||||
|
||||
def _build_using_makefiles(self):
|
||||
print ' Building:',
|
||||
cmd = ['cmake', '--build', self.work_dir]
|
||||
if self.desc.build_type:
|
||||
cmd += ['--config', self.desc.build_type]
|
||||
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path )
|
||||
print 'done' if succeeded else 'FAILED'
|
||||
return succeeded
|
||||
|
||||
def _execute_build_subprocess(self, cmd, env, log_path):
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
|
||||
env=env )
|
||||
stdout, _ = process.communicate( )
|
||||
succeeded = (process.returncode == 0)
|
||||
with open( log_path, 'wb' ) as flog:
|
||||
log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
|
||||
flog.write( fix_eol( log ) )
|
||||
return succeeded
|
||||
|
||||
def _make_new_work_dir(self):
|
||||
if os.path.isdir( self.work_dir ):
|
||||
print ' Removing work directory', self.work_dir
|
||||
shutil.rmtree( self.work_dir, ignore_errors=True )
|
||||
if not os.path.isdir( self.work_dir ):
|
||||
os.makedirs( self.work_dir )
|
||||
|
||||
def fix_eol( stdout ):
|
||||
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
|
||||
"""
|
||||
return re.sub( '\r*\n', os.linesep, stdout )
|
||||
|
||||
def load_build_variants_from_config( config_path ):
|
||||
with open( config_path, 'rb' ) as fconfig:
|
||||
data = json.load( fconfig )
|
||||
variants = data[ 'cmake_variants' ]
|
||||
build_descs_by_axis = collections.defaultdict( list )
|
||||
for axis in variants:
|
||||
axis_name = axis["name"]
|
||||
build_descs = []
|
||||
if "generators" in axis:
|
||||
for generator_data in axis["generators"]:
|
||||
for generator in generator_data["generator"]:
|
||||
build_desc = BuildDesc( generator=generator,
|
||||
prepend_envs=generator_data.get("env_prepend") )
|
||||
build_descs.append( build_desc )
|
||||
elif "variables" in axis:
|
||||
for variables in axis["variables"]:
|
||||
build_desc = BuildDesc( variables=variables )
|
||||
build_descs.append( build_desc )
|
||||
elif "build_types" in axis:
|
||||
for build_type in axis["build_types"]:
|
||||
build_desc = BuildDesc( build_type=build_type )
|
||||
build_descs.append( build_desc )
|
||||
build_descs_by_axis[axis_name].extend( build_descs )
|
||||
return build_descs_by_axis
|
||||
|
||||
def generate_build_variants( build_descs_by_axis ):
|
||||
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
|
||||
axis_names = build_descs_by_axis.keys()
|
||||
build_descs = []
|
||||
for axis_name, axis_build_descs in build_descs_by_axis.items():
|
||||
if len(build_descs):
|
||||
# for each existing build_desc and each axis build desc, create a new build_desc
|
||||
new_build_descs = []
|
||||
for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs):
|
||||
new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) )
|
||||
build_descs = new_build_descs
|
||||
else:
|
||||
build_descs = axis_build_descs
|
||||
return build_descs
|
||||
|
||||
HTML_TEMPLATE = string.Template('''<html>
|
||||
<head>
|
||||
<title>$title</title>
|
||||
<style type="text/css">
|
||||
td.failed {background-color:#f08080;}
|
||||
td.ok {background-color:#c0eec0;}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<table border="1">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Variables</th>
|
||||
$th_vars
|
||||
</tr>
|
||||
<tr>
|
||||
<th>Build type</th>
|
||||
$th_build_types
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
$tr_builds
|
||||
</tbody>
|
||||
</table>
|
||||
</body></html>''')
|
||||
|
||||
def generate_html_report( html_report_path, builds ):
|
||||
report_dir = os.path.dirname( html_report_path )
|
||||
# Vertical axis: generator
|
||||
# Horizontal: variables, then build_type
|
||||
builds_by_generator = collections.defaultdict( list )
|
||||
variables = set()
|
||||
build_types_by_variable = collections.defaultdict( set )
|
||||
build_by_pos_key = {} # { (generator, var_key, build_type): build }
|
||||
for build in builds:
|
||||
builds_by_generator[build.desc.generator].append( build )
|
||||
var_key = tuple(sorted(build.desc.variables))
|
||||
variables.add( var_key )
|
||||
build_types_by_variable[var_key].add( build.desc.build_type )
|
||||
pos_key = (build.desc.generator, var_key, build.desc.build_type)
|
||||
build_by_pos_key[pos_key] = build
|
||||
variables = sorted( variables )
|
||||
th_vars = []
|
||||
th_build_types = []
|
||||
for variable in variables:
|
||||
build_types = sorted( build_types_by_variable[variable] )
|
||||
nb_build_type = len(build_types_by_variable[variable])
|
||||
th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) )
|
||||
for build_type in build_types:
|
||||
th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) )
|
||||
tr_builds = []
|
||||
for generator in sorted( builds_by_generator ):
|
||||
tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ]
|
||||
for variable in variables:
|
||||
build_types = sorted( build_types_by_variable[variable] )
|
||||
for build_type in build_types:
|
||||
pos_key = (generator, variable, build_type)
|
||||
build = build_by_pos_key.get(pos_key)
|
||||
if build:
|
||||
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
|
||||
build_status = 'ok' if build.build_succeeded else 'FAILED'
|
||||
cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir )
|
||||
build_log_url = os.path.relpath( build.build_log_path, report_dir )
|
||||
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (
|
||||
build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
|
||||
if build.cmake_succeeded:
|
||||
td += '<br><a href="%s" class="%s">Build: %s</a>' % (
|
||||
build_log_url, build_status.lower(), build_status)
|
||||
td += '</td>'
|
||||
else:
|
||||
td = '<td></td>'
|
||||
tds.append( td )
|
||||
tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) )
|
||||
html = HTML_TEMPLATE.substitute(
|
||||
title='Batch build report',
|
||||
th_vars=' '.join(th_vars),
|
||||
th_build_types=' '.join( th_build_types),
|
||||
tr_builds='\n'.join( tr_builds ) )
|
||||
with open( html_report_path, 'wt' ) as fhtml:
|
||||
fhtml.write( html )
|
||||
print 'HTML report generated in:', html_report_path
|
||||
|
||||
def main():
|
||||
usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...]
|
||||
Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run
|
||||
as described in CONFIG_JSON_PATH building in WORK_DIR.
|
||||
|
||||
Example of call:
|
||||
python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = True
|
||||
# parser.add_option('-v', '--verbose', dest="verbose", action='store_true',
|
||||
# help="""Be verbose.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
if len(args) < 3:
|
||||
parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." )
|
||||
work_dir = args[0]
|
||||
source_dir = args[1].rstrip('/\\')
|
||||
config_paths = args[2:]
|
||||
for config_path in config_paths:
|
||||
if not os.path.isfile( config_path ):
|
||||
parser.error( "Can not read: %r" % config_path )
|
||||
|
||||
# generate build variants
|
||||
build_descs = []
|
||||
for config_path in config_paths:
|
||||
build_descs_by_axis = load_build_variants_from_config( config_path )
|
||||
build_descs.extend( generate_build_variants( build_descs_by_axis ) )
|
||||
print 'Build variants (%d):' % len(build_descs)
|
||||
# assign build directory for each variant
|
||||
if not os.path.isdir( work_dir ):
|
||||
os.makedirs( work_dir )
|
||||
builds = []
|
||||
with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap:
|
||||
for index, build_desc in enumerate( build_descs ):
|
||||
build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) )
|
||||
builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) )
|
||||
fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) )
|
||||
for build in builds:
|
||||
build.execute_build()
|
||||
html_report_path = os.path.join( work_dir, 'batchbuild-report.html' )
|
||||
generate_html_report( html_report_path, builds )
|
||||
print 'Done'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@@ -1,63 +1,63 @@
|
||||
import os.path
|
||||
|
||||
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||
if not os.path.isfile( path ):
|
||||
raise ValueError( 'Path "%s" is not a file' % path )
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError, msg:
|
||||
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
||||
return False
|
||||
try:
|
||||
raw_lines = f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||
if raw_lines != fixed_lines:
|
||||
print '%s =>' % path,
|
||||
if not is_dry_run:
|
||||
f = open(path, "wb")
|
||||
try:
|
||||
f.writelines(fixed_lines)
|
||||
finally:
|
||||
f.close()
|
||||
if verbose:
|
||||
print is_dry_run and ' NEED FIX' or ' FIXED'
|
||||
return True
|
||||
##
|
||||
##
|
||||
##
|
||||
##def _do_fix( is_dry_run = True ):
|
||||
## from waftools import antglob
|
||||
## python_sources = antglob.glob( '.',
|
||||
## includes = '**/*.py **/wscript **/wscript_build',
|
||||
## excludes = antglob.default_excludes + './waf.py',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## for path in python_sources:
|
||||
## _fix_python_source( path, is_dry_run )
|
||||
##
|
||||
## cpp_sources = antglob.glob( '.',
|
||||
## includes = '**/*.cpp **/*.h **/*.inl',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## for path in cpp_sources:
|
||||
## _fix_source_eol( path, is_dry_run )
|
||||
##
|
||||
##
|
||||
##def dry_fix(context):
|
||||
## _do_fix( is_dry_run = True )
|
||||
##
|
||||
##def fix(context):
|
||||
## _do_fix( is_dry_run = False )
|
||||
##
|
||||
##def shutdown():
|
||||
## pass
|
||||
##
|
||||
##def check(context):
|
||||
## # Unit tests are run when "check" target is used
|
||||
## ut = UnitTest.unit_test()
|
||||
## ut.change_to_testfile_dir = True
|
||||
## ut.want_to_see_test_output = True
|
||||
## ut.want_to_see_test_error = True
|
||||
## ut.run()
|
||||
## ut.print_results()
|
||||
import os.path
|
||||
|
||||
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||
if not os.path.isfile( path ):
|
||||
raise ValueError( 'Path "%s" is not a file' % path )
|
||||
try:
|
||||
f = open(path, 'rb')
|
||||
except IOError, msg:
|
||||
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
||||
return False
|
||||
try:
|
||||
raw_lines = f.readlines()
|
||||
finally:
|
||||
f.close()
|
||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||
if raw_lines != fixed_lines:
|
||||
print '%s =>' % path,
|
||||
if not is_dry_run:
|
||||
f = open(path, "wb")
|
||||
try:
|
||||
f.writelines(fixed_lines)
|
||||
finally:
|
||||
f.close()
|
||||
if verbose:
|
||||
print is_dry_run and ' NEED FIX' or ' FIXED'
|
||||
return True
|
||||
##
|
||||
##
|
||||
##
|
||||
##def _do_fix( is_dry_run = True ):
|
||||
## from waftools import antglob
|
||||
## python_sources = antglob.glob( '.',
|
||||
## includes = '**/*.py **/wscript **/wscript_build',
|
||||
## excludes = antglob.default_excludes + './waf.py',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## for path in python_sources:
|
||||
## _fix_python_source( path, is_dry_run )
|
||||
##
|
||||
## cpp_sources = antglob.glob( '.',
|
||||
## includes = '**/*.cpp **/*.h **/*.inl',
|
||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||
## for path in cpp_sources:
|
||||
## _fix_source_eol( path, is_dry_run )
|
||||
##
|
||||
##
|
||||
##def dry_fix(context):
|
||||
## _do_fix( is_dry_run = True )
|
||||
##
|
||||
##def fix(context):
|
||||
## _do_fix( is_dry_run = False )
|
||||
##
|
||||
##def shutdown():
|
||||
## pass
|
||||
##
|
||||
##def check(context):
|
||||
## # Unit tests are run when "check" target is used
|
||||
## ut = UnitTest.unit_test()
|
||||
## ut.change_to_testfile_dir = True
|
||||
## ut.want_to_see_test_output = True
|
||||
## ut.want_to_see_test_error = True
|
||||
## ut.run()
|
||||
## ut.print_results()
|
||||
|
@@ -1,93 +1,93 @@
|
||||
"""Updates the license text in source file.
|
||||
"""
|
||||
|
||||
# An existing license is found if the file starts with the string below,
|
||||
# and ends with the first blank line.
|
||||
LICENSE_BEGIN = "// Copyright "
|
||||
|
||||
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
""".replace('\r\n','\n')
|
||||
|
||||
def update_license( path, dry_run, show_diff ):
|
||||
"""Update the license statement in the specified file.
|
||||
Parameters:
|
||||
path: path of the C++ source file to update.
|
||||
dry_run: if True, just print the path of the file that would be updated,
|
||||
but don't change it.
|
||||
show_diff: if True, print the path of the file that would be modified,
|
||||
as well as the change made to the file.
|
||||
"""
|
||||
with open( path, 'rt' ) as fin:
|
||||
original_text = fin.read().replace('\r\n','\n')
|
||||
newline = fin.newlines and fin.newlines[0] or '\n'
|
||||
if not original_text.startswith( LICENSE_BEGIN ):
|
||||
# No existing license found => prepend it
|
||||
new_text = BRIEF_LICENSE + original_text
|
||||
else:
|
||||
license_end_index = original_text.index( '\n\n' ) # search first blank line
|
||||
new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
|
||||
if original_text != new_text:
|
||||
if not dry_run:
|
||||
with open( path, 'wb' ) as fout:
|
||||
fout.write( new_text.replace('\n', newline ) )
|
||||
print 'Updated', path
|
||||
if show_diff:
|
||||
import difflib
|
||||
print '\n'.join( difflib.unified_diff( original_text.split('\n'),
|
||||
new_text.split('\n') ) )
|
||||
return True
|
||||
return False
|
||||
|
||||
def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
|
||||
"""Updates license text in C++ source files found in directory source_dirs.
|
||||
Parameters:
|
||||
source_dirs: list of directory to scan for C++ sources. Directories are
|
||||
scanned recursively.
|
||||
dry_run: if True, just print the path of the file that would be updated,
|
||||
but don't change it.
|
||||
show_diff: if True, print the path of the file that would be modified,
|
||||
as well as the change made to the file.
|
||||
"""
|
||||
from devtools import antglob
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
for source_dir in source_dirs:
|
||||
cpp_sources = antglob.glob( source_dir,
|
||||
includes = '''**/*.h **/*.cpp **/*.inl''',
|
||||
prune_dirs = prune_dirs )
|
||||
for source in cpp_sources:
|
||||
update_license( source, dry_run, show_diff )
|
||||
|
||||
def main():
|
||||
usage = """%prog DIR [DIR2...]
|
||||
Updates license text in sources of the project in source files found
|
||||
in the directory specified on the command-line.
|
||||
|
||||
Example of call:
|
||||
python devtools\licenseupdater.py include src -n --diff
|
||||
=> Show change that would be made to the sources.
|
||||
|
||||
python devtools\licenseupdater.py include src
|
||||
=> Update license statement on all sources in directories include/ and src/.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,
|
||||
help="""Only show what files are updated, do not update the files""")
|
||||
parser.add_option('--diff', dest="show_diff", action='store_true', default=False,
|
||||
help="""On update, show change made to the file.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
||||
print 'Done'
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
import os.path
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
main()
|
||||
|
||||
"""Updates the license text in source file.
|
||||
"""
|
||||
|
||||
# An existing license is found if the file starts with the string below,
|
||||
# and ends with the first blank line.
|
||||
LICENSE_BEGIN = "// Copyright "
|
||||
|
||||
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
""".replace('\r\n','\n')
|
||||
|
||||
def update_license( path, dry_run, show_diff ):
|
||||
"""Update the license statement in the specified file.
|
||||
Parameters:
|
||||
path: path of the C++ source file to update.
|
||||
dry_run: if True, just print the path of the file that would be updated,
|
||||
but don't change it.
|
||||
show_diff: if True, print the path of the file that would be modified,
|
||||
as well as the change made to the file.
|
||||
"""
|
||||
with open( path, 'rt' ) as fin:
|
||||
original_text = fin.read().replace('\r\n','\n')
|
||||
newline = fin.newlines and fin.newlines[0] or '\n'
|
||||
if not original_text.startswith( LICENSE_BEGIN ):
|
||||
# No existing license found => prepend it
|
||||
new_text = BRIEF_LICENSE + original_text
|
||||
else:
|
||||
license_end_index = original_text.index( '\n\n' ) # search first blank line
|
||||
new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
|
||||
if original_text != new_text:
|
||||
if not dry_run:
|
||||
with open( path, 'wb' ) as fout:
|
||||
fout.write( new_text.replace('\n', newline ) )
|
||||
print 'Updated', path
|
||||
if show_diff:
|
||||
import difflib
|
||||
print '\n'.join( difflib.unified_diff( original_text.split('\n'),
|
||||
new_text.split('\n') ) )
|
||||
return True
|
||||
return False
|
||||
|
||||
def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
|
||||
"""Updates license text in C++ source files found in directory source_dirs.
|
||||
Parameters:
|
||||
source_dirs: list of directory to scan for C++ sources. Directories are
|
||||
scanned recursively.
|
||||
dry_run: if True, just print the path of the file that would be updated,
|
||||
but don't change it.
|
||||
show_diff: if True, print the path of the file that would be modified,
|
||||
as well as the change made to the file.
|
||||
"""
|
||||
from devtools import antglob
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
for source_dir in source_dirs:
|
||||
cpp_sources = antglob.glob( source_dir,
|
||||
includes = '''**/*.h **/*.cpp **/*.inl''',
|
||||
prune_dirs = prune_dirs )
|
||||
for source in cpp_sources:
|
||||
update_license( source, dry_run, show_diff )
|
||||
|
||||
def main():
|
||||
usage = """%prog DIR [DIR2...]
|
||||
Updates license text in sources of the project in source files found
|
||||
in the directory specified on the command-line.
|
||||
|
||||
Example of call:
|
||||
python devtools\licenseupdater.py include src -n --diff
|
||||
=> Show change that would be made to the sources.
|
||||
|
||||
python devtools\licenseupdater.py include src
|
||||
=> Update license statement on all sources in directories include/ and src/.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,
|
||||
help="""Only show what files are updated, do not update the files""")
|
||||
parser.add_option('--diff', dest="show_diff", action='store_true', default=False,
|
||||
help="""On update, show change made to the file.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
||||
print 'Done'
|
||||
|
||||
if __name__ == '__main__':
|
||||
import sys
|
||||
import os.path
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
main()
|
||||
|
||||
|
@@ -1,53 +1,53 @@
|
||||
import os.path
|
||||
import gzip
|
||||
import tarfile
|
||||
|
||||
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||
|
||||
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
||||
"""Parameters:
|
||||
tarball_path: output path of the .tar.gz file
|
||||
sources: list of sources to include in the tarball, relative to the current directory
|
||||
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
|
||||
from path in the tarball.
|
||||
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
||||
to make them child of root.
|
||||
"""
|
||||
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
|
||||
def archive_name( path ):
|
||||
"""Makes path relative to base_dir."""
|
||||
path = os.path.normpath( os.path.abspath( path ) )
|
||||
common_path = os.path.commonprefix( (base_dir, path) )
|
||||
archive_name = path[len(common_path):]
|
||||
if os.path.isabs( archive_name ):
|
||||
archive_name = archive_name[1:]
|
||||
return os.path.join( prefix_dir, archive_name )
|
||||
def visit(tar, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.join(dirname, name)
|
||||
if os.path.isfile(path):
|
||||
path_in_tar = archive_name(path)
|
||||
tar.add(path, path_in_tar )
|
||||
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
||||
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
|
||||
try:
|
||||
for source in sources:
|
||||
source_path = source
|
||||
if os.path.isdir( source ):
|
||||
os.path.walk(source_path, visit, tar)
|
||||
else:
|
||||
path_in_tar = archive_name(source_path)
|
||||
tar.add(source_path, path_in_tar ) # filename, arcname
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
def decompress( tarball_path, base_dir ):
|
||||
"""Decompress the gzipped tarball into directory base_dir.
|
||||
"""
|
||||
# !!! This class method is not documented in the online doc
|
||||
# nor is bz2open!
|
||||
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
|
||||
try:
|
||||
tar.extractall( base_dir )
|
||||
finally:
|
||||
tar.close()
|
||||
import os.path
|
||||
import gzip
|
||||
import tarfile
|
||||
|
||||
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||
|
||||
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
||||
"""Parameters:
|
||||
tarball_path: output path of the .tar.gz file
|
||||
sources: list of sources to include in the tarball, relative to the current directory
|
||||
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
|
||||
from path in the tarball.
|
||||
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
||||
to make them child of root.
|
||||
"""
|
||||
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
|
||||
def archive_name( path ):
|
||||
"""Makes path relative to base_dir."""
|
||||
path = os.path.normpath( os.path.abspath( path ) )
|
||||
common_path = os.path.commonprefix( (base_dir, path) )
|
||||
archive_name = path[len(common_path):]
|
||||
if os.path.isabs( archive_name ):
|
||||
archive_name = archive_name[1:]
|
||||
return os.path.join( prefix_dir, archive_name )
|
||||
def visit(tar, dirname, names):
|
||||
for name in names:
|
||||
path = os.path.join(dirname, name)
|
||||
if os.path.isfile(path):
|
||||
path_in_tar = archive_name(path)
|
||||
tar.add(path, path_in_tar )
|
||||
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
||||
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
|
||||
try:
|
||||
for source in sources:
|
||||
source_path = source
|
||||
if os.path.isdir( source ):
|
||||
os.path.walk(source_path, visit, tar)
|
||||
else:
|
||||
path_in_tar = archive_name(source_path)
|
||||
tar.add(source_path, path_in_tar ) # filename, arcname
|
||||
finally:
|
||||
tar.close()
|
||||
|
||||
def decompress( tarball_path, base_dir ):
|
||||
"""Decompress the gzipped tarball into directory base_dir.
|
||||
"""
|
||||
# !!! This class method is not documented in the online doc
|
||||
# nor is bz2open!
|
||||
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
|
||||
try:
|
||||
tar.extractall( base_dir )
|
||||
finally:
|
||||
tar.close()
|
||||
|
@@ -2,6 +2,7 @@
|
||||
\section ms_release Makes JsonCpp ready for release
|
||||
- Build system clean-up:
|
||||
- Fix build on Windows (shared-library build is broken)
|
||||
- Compile and run tests using shared library on Windows to ensure no JSON_API macro is missing.
|
||||
- Add enable/disable flag for static and shared library build
|
||||
- Enhance help
|
||||
- Platform portability check: (Notes: was ok on last check)
|
||||
|
2
include/CMakeLists.txt
Normal file
2
include/CMakeLists.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
FILE(GLOB INCLUDE_FILES "json/*.h")
|
||||
INSTALL(FILES ${INCLUDE_FILES} DESTINATION include/json)
|
32
include/json/assertions.h
Normal file
32
include/json/assertions.h
Normal file
@@ -0,0 +1,32 @@
|
||||
// Copyright 2007-2010 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||
# define CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||
|
||||
#include <stdlib.h>
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
# include <json/config.h>
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
|
||||
#if JSON_USE_EXCEPTION
|
||||
# include <stdexcept>
|
||||
#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw
|
||||
#define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message );
|
||||
#else // JSON_USE_EXCEPTION
|
||||
#define JSON_ASSERT( condition ) assert( condition );
|
||||
|
||||
// The call to assert() will show the failure message in debug builds. In
|
||||
// release bugs we write to invalid memory in order to crash hard, so that a
|
||||
// debugger or crash reporter gets the chance to take over. We still call exit()
|
||||
// afterward in order to tell the compiler that this macro doesn't return.
|
||||
#define JSON_FAIL_MESSAGE( message ) { assert(false && message); strcpy(reinterpret_cast<char*>(666), message); exit(123); }
|
||||
|
||||
#endif
|
||||
|
||||
#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) { JSON_FAIL_MESSAGE( message ) }
|
||||
|
||||
#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
@@ -24,14 +24,16 @@
|
||||
/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
|
||||
//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
|
||||
|
||||
/// If defined, indicates that Json use exception to report invalid type manipulation
|
||||
/// instead of C assert macro.
|
||||
// If non-zero, the library uses exceptions to report bad input instead of C
|
||||
// assertion macros. The default is to use exceptions.
|
||||
# ifndef JSON_USE_EXCEPTION
|
||||
# define JSON_USE_EXCEPTION 1
|
||||
# endif
|
||||
|
||||
/// If defined, indicates that the source file is amalgated
|
||||
/// to prevent private header inclusion.
|
||||
/// Remarks: it is automatically defined in the generated amalgated header.
|
||||
// #define JSON_IS_AMALGATED
|
||||
// #define JSON_IS_AMALGAMATION
|
||||
|
||||
|
||||
# ifdef JSON_IN_CPPTL
|
||||
@@ -44,10 +46,17 @@
|
||||
# ifdef JSON_IN_CPPTL
|
||||
# define JSON_API CPPTL_API
|
||||
# elif defined(JSON_DLL_BUILD)
|
||||
# define JSON_API __declspec(dllexport)
|
||||
# if defined(_MSC_VER)
|
||||
# define JSON_API __declspec(dllexport)
|
||||
# define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
|
||||
# endif // if defined(_MSC_VER)
|
||||
# elif defined(JSON_DLL)
|
||||
# define JSON_API __declspec(dllimport)
|
||||
# else
|
||||
# if defined(_MSC_VER)
|
||||
# define JSON_API __declspec(dllimport)
|
||||
# define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
|
||||
# endif // if defined(_MSC_VER)
|
||||
# endif // ifdef JSON_IN_CPPTL
|
||||
# if !defined(JSON_API)
|
||||
# define JSON_API
|
||||
# endif
|
||||
|
||||
@@ -59,6 +68,9 @@
|
||||
// Microsoft Visual Studio 6 only support conversion from __int64 to double
|
||||
// (no conversion from unsigned __int64).
|
||||
#define JSON_USE_INT64_DOUBLE_CONVERSION 1
|
||||
// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255' characters in the debug information)
|
||||
// All projects I've ever seen with VS6 were using this globally (not bothering with pragma push/pop).
|
||||
#pragma warning(disable : 4786)
|
||||
#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6
|
||||
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
|
||||
|
@@ -6,9 +6,9 @@
|
||||
#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
|
||||
# define CPPTL_JSON_FEATURES_H_INCLUDED
|
||||
|
||||
#if !defined(JSON_IS_AMALGATED)
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
# include "forwards.h"
|
||||
#endif // if !defined(JSON_IS_AMALGATED)
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
|
||||
namespace Json {
|
||||
|
||||
@@ -42,6 +42,12 @@ namespace Json {
|
||||
|
||||
/// \c true if root must be either an array or an object value. Default: \c false.
|
||||
bool strictRoot_;
|
||||
|
||||
/// \c true if dropped null placeholders are allowed. Default: \c false.
|
||||
bool allowDroppedNullPlaceholders_;
|
||||
|
||||
/// \c true if numeric object key are allowed. Default: \c false.
|
||||
bool allowNumericKeys_;
|
||||
};
|
||||
|
||||
} // namespace Json
|
||||
|
@@ -6,9 +6,9 @@
|
||||
#ifndef JSON_FORWARDS_H_INCLUDED
|
||||
# define JSON_FORWARDS_H_INCLUDED
|
||||
|
||||
#if !defined(JSON_IS_AMALGATED)
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
# include "config.h"
|
||||
#endif // if !defined(JSON_IS_AMALGATED)
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
|
||||
namespace Json {
|
||||
|
||||
|
@@ -6,14 +6,21 @@
|
||||
#ifndef CPPTL_JSON_READER_H_INCLUDED
|
||||
# define CPPTL_JSON_READER_H_INCLUDED
|
||||
|
||||
#if !defined(JSON_IS_AMALGATED)
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
# include "features.h"
|
||||
# include "value.h"
|
||||
#endif // if !defined(JSON_IS_AMALGATED)
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
# include <deque>
|
||||
# include <iosfwd>
|
||||
# include <stack>
|
||||
# include <string>
|
||||
# include <iostream>
|
||||
|
||||
// Disable warning C4251: <data member>: <type> needs to have dll-interface to be used by...
|
||||
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
# pragma warning(push)
|
||||
# pragma warning(disable:4251)
|
||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
|
||||
|
||||
namespace Json {
|
||||
|
||||
@@ -26,6 +33,19 @@ namespace Json {
|
||||
typedef char Char;
|
||||
typedef const Char *Location;
|
||||
|
||||
/** \brief An error tagged with where in the JSON text it was encountered.
|
||||
*
|
||||
* The offsets give the [start, limit) range of bytes within the text. Note
|
||||
* that this is bytes, not codepoints.
|
||||
*
|
||||
*/
|
||||
struct StructuredError
|
||||
{
|
||||
size_t offset_start;
|
||||
size_t offset_limit;
|
||||
std::string message;
|
||||
};
|
||||
|
||||
/** \brief Constructs a Reader allowing all features
|
||||
* for parsing.
|
||||
*/
|
||||
@@ -88,6 +108,14 @@ namespace Json {
|
||||
*/
|
||||
std::string getFormattedErrorMessages() const;
|
||||
|
||||
/** \brief Returns a vector of structured erros encounted while parsing.
|
||||
* \return A (possibly empty) vector of StructuredError objects. Currently
|
||||
* only one error can be returned, but the caller should tolerate multiple
|
||||
* errors. This can occur if the parser recovers from a non-fatal
|
||||
* parse error and then encounters additional errors.
|
||||
*/
|
||||
std::vector<StructuredError> getStructuredErrors() const;
|
||||
|
||||
private:
|
||||
enum TokenType
|
||||
{
|
||||
@@ -139,9 +167,11 @@ namespace Json {
|
||||
bool readObject( Token &token );
|
||||
bool readArray( Token &token );
|
||||
bool decodeNumber( Token &token );
|
||||
bool decodeNumber( Token &token, Value &decoded );
|
||||
bool decodeString( Token &token );
|
||||
bool decodeString( Token &token, std::string &decoded );
|
||||
bool decodeDouble( Token &token );
|
||||
bool decodeDouble( Token &token, Value &decoded );
|
||||
bool decodeUnicodeCodePoint( Token &token,
|
||||
Location ¤t,
|
||||
Location end,
|
||||
@@ -197,18 +227,23 @@ namespace Json {
|
||||
Result:
|
||||
\verbatim
|
||||
{
|
||||
"dir": {
|
||||
"file": {
|
||||
// The input stream JSON would be nested here.
|
||||
}
|
||||
}
|
||||
"dir": {
|
||||
"file": {
|
||||
// The input stream JSON would be nested here.
|
||||
}
|
||||
}
|
||||
}
|
||||
\endverbatim
|
||||
\throw std::exception on parse error.
|
||||
\see Json::operator<<()
|
||||
*/
|
||||
std::istream& operator>>( std::istream&, Value& );
|
||||
JSON_API std::istream& operator>>( std::istream&, Value& );
|
||||
|
||||
} // namespace Json
|
||||
|
||||
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
# pragma warning(pop)
|
||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
|
||||
|
||||
#endif // CPPTL_JSON_READER_H_INCLUDED
|
||||
|
@@ -6,9 +6,9 @@
|
||||
#ifndef CPPTL_JSON_H_INCLUDED
|
||||
# define CPPTL_JSON_H_INCLUDED
|
||||
|
||||
#if !defined(JSON_IS_AMALGATED)
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
# include "forwards.h"
|
||||
#endif // if !defined(JSON_IS_AMALGATED)
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
# include <string>
|
||||
# include <vector>
|
||||
|
||||
@@ -21,6 +21,13 @@
|
||||
# include <cpptl/forwards.h>
|
||||
# endif
|
||||
|
||||
// Disable warning C4251: <data member>: <type> needs to have dll-interface to be used by...
|
||||
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
# pragma warning(push)
|
||||
# pragma warning(disable:4251)
|
||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
|
||||
|
||||
/** \brief JSON (JavaScript Object Notation).
|
||||
*/
|
||||
namespace Json {
|
||||
@@ -132,31 +139,33 @@ namespace Json {
|
||||
typedef Json::UInt64 UInt64;
|
||||
typedef Json::Int64 Int64;
|
||||
#endif // defined(JSON_HAS_INT64)
|
||||
typedef Json::LargestInt LargestInt;
|
||||
typedef Json::LargestUInt LargestUInt;
|
||||
typedef Json::LargestInt LargestInt;
|
||||
typedef Json::LargestUInt LargestUInt;
|
||||
typedef Json::ArrayIndex ArrayIndex;
|
||||
|
||||
static const Value null;
|
||||
/// Minimum signed integer value that can be stored in a Json::Value.
|
||||
static const LargestInt minLargestInt;
|
||||
/// Maximum signed integer value that can be stored in a Json::Value.
|
||||
/// Minimum signed integer value that can be stored in a Json::Value.
|
||||
static const LargestInt minLargestInt;
|
||||
/// Maximum signed integer value that can be stored in a Json::Value.
|
||||
static const LargestInt maxLargestInt;
|
||||
/// Maximum unsigned integer value that can be stored in a Json::Value.
|
||||
/// Maximum unsigned integer value that can be stored in a Json::Value.
|
||||
static const LargestUInt maxLargestUInt;
|
||||
|
||||
/// Minimum signed int value that can be stored in a Json::Value.
|
||||
static const Int minInt;
|
||||
/// Maximum signed int value that can be stored in a Json::Value.
|
||||
/// Minimum signed int value that can be stored in a Json::Value.
|
||||
static const Int minInt;
|
||||
/// Maximum signed int value that can be stored in a Json::Value.
|
||||
static const Int maxInt;
|
||||
/// Maximum unsigned int value that can be stored in a Json::Value.
|
||||
/// Maximum unsigned int value that can be stored in a Json::Value.
|
||||
static const UInt maxUInt;
|
||||
|
||||
/// Minimum signed 64 bits int value that can be stored in a Json::Value.
|
||||
static const Int64 minInt64;
|
||||
/// Maximum signed 64 bits int value that can be stored in a Json::Value.
|
||||
# if defined(JSON_HAS_INT64)
|
||||
/// Minimum signed 64 bits int value that can be stored in a Json::Value.
|
||||
static const Int64 minInt64;
|
||||
/// Maximum signed 64 bits int value that can be stored in a Json::Value.
|
||||
static const Int64 maxInt64;
|
||||
/// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
|
||||
/// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
|
||||
static const UInt64 maxUInt64;
|
||||
#endif // defined(JSON_HAS_INT64)
|
||||
|
||||
private:
|
||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
@@ -202,14 +211,14 @@ namespace Json {
|
||||
To create an empty array, pass arrayValue.
|
||||
To create an empty object, pass objectValue.
|
||||
Another Value can then be set to this one by assignment.
|
||||
This is useful since clear() and resize() will not alter types.
|
||||
This is useful since clear() and resize() will not alter types.
|
||||
|
||||
Examples:
|
||||
\code
|
||||
Json::Value null_value; // null
|
||||
Json::Value arr_value(Json::arrayValue); // []
|
||||
Json::Value obj_value(Json::objectValue); // {}
|
||||
\endcode
|
||||
\code
|
||||
Json::Value null_value; // null
|
||||
Json::Value arr_value(Json::arrayValue); // []
|
||||
Json::Value obj_value(Json::objectValue); // {}
|
||||
\endcode
|
||||
*/
|
||||
Value( ValueType type = nullValue );
|
||||
Value( Int value );
|
||||
@@ -256,7 +265,7 @@ namespace Json {
|
||||
bool operator ==( const Value &other ) const;
|
||||
bool operator !=( const Value &other ) const;
|
||||
|
||||
int compare( const Value &other );
|
||||
int compare( const Value &other ) const;
|
||||
|
||||
const char *asCString() const;
|
||||
std::string asString() const;
|
||||
@@ -265,8 +274,10 @@ namespace Json {
|
||||
# endif
|
||||
Int asInt() const;
|
||||
UInt asUInt() const;
|
||||
#if defined(JSON_HAS_INT64)
|
||||
Int64 asInt64() const;
|
||||
UInt64 asUInt64() const;
|
||||
#endif // if defined(JSON_HAS_INT64)
|
||||
LargestInt asLargestInt() const;
|
||||
LargestUInt asLargestUInt() const;
|
||||
float asFloat() const;
|
||||
@@ -276,7 +287,9 @@ namespace Json {
|
||||
bool isNull() const;
|
||||
bool isBool() const;
|
||||
bool isInt() const;
|
||||
bool isInt64() const;
|
||||
bool isUInt() const;
|
||||
bool isUInt64() const;
|
||||
bool isIntegral() const;
|
||||
bool isDouble() const;
|
||||
bool isNumeric() const;
|
||||
@@ -315,24 +328,24 @@ namespace Json {
|
||||
/// this from the operator[] which takes a string.)
|
||||
Value &operator[]( ArrayIndex index );
|
||||
|
||||
/// Access an array element (zero based index ).
|
||||
/// Access an array element (zero based index ).
|
||||
/// If the array contains less than index element, then null value are inserted
|
||||
/// in the array so that its size is index+1.
|
||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||
/// this from the operator[] which takes a string.)
|
||||
Value &operator[]( int index );
|
||||
|
||||
/// Access an array element (zero based index )
|
||||
/// Access an array element (zero based index )
|
||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||
/// this from the operator[] which takes a string.)
|
||||
const Value &operator[]( ArrayIndex index ) const;
|
||||
|
||||
/// Access an array element (zero based index )
|
||||
/// Access an array element (zero based index )
|
||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||
/// this from the operator[] which takes a string.)
|
||||
const Value &operator[]( int index ) const;
|
||||
|
||||
/// If the array contains at least index+1 elements, returns the element value,
|
||||
/// If the array contains at least index+1 elements, returns the element value,
|
||||
/// otherwise returns defaultValue.
|
||||
Value get( ArrayIndex index,
|
||||
const Value &defaultValue ) const;
|
||||
@@ -429,6 +442,13 @@ namespace Json {
|
||||
iterator begin();
|
||||
iterator end();
|
||||
|
||||
// Accessors for the [start, limit) range of bytes within the JSON text from
|
||||
// which this value was parsed, if any.
|
||||
void setOffsetStart( size_t start );
|
||||
void setOffsetLimit( size_t limit );
|
||||
size_t getOffsetStart() const;
|
||||
size_t getOffsetLimit() const;
|
||||
|
||||
private:
|
||||
Value &resolveReference( const char *key,
|
||||
bool isStatic );
|
||||
@@ -496,12 +516,17 @@ namespace Json {
|
||||
int memberNameIsStatic_ : 1; // used by the ValueInternalMap container.
|
||||
# endif
|
||||
CommentInfo *comments_;
|
||||
|
||||
// [start, limit) byte offsets in the source JSON text from which this Value
|
||||
// was extracted.
|
||||
size_t start_;
|
||||
size_t limit_;
|
||||
};
|
||||
|
||||
|
||||
/** \brief Experimental and untested: represents an element of the "path" to access a node.
|
||||
*/
|
||||
class PathArgument
|
||||
class JSON_API PathArgument
|
||||
{
|
||||
public:
|
||||
friend class Path;
|
||||
@@ -534,7 +559,7 @@ namespace Json {
|
||||
* - ".%" => member name is provided as parameter
|
||||
* - ".[%]" => index is provied as parameter
|
||||
*/
|
||||
class Path
|
||||
class JSON_API Path
|
||||
{
|
||||
public:
|
||||
Path( const std::string &path,
|
||||
@@ -910,9 +935,10 @@ public: // overridden from ValueArrayAllocator
|
||||
/** \brief base class for Value iterators.
|
||||
*
|
||||
*/
|
||||
class ValueIteratorBase
|
||||
class JSON_API ValueIteratorBase
|
||||
{
|
||||
public:
|
||||
typedef std::bidirectional_iterator_tag iterator_category;
|
||||
typedef unsigned int size_t;
|
||||
typedef int difference_type;
|
||||
typedef ValueIteratorBase SelfType;
|
||||
@@ -980,10 +1006,11 @@ public: // overridden from ValueArrayAllocator
|
||||
/** \brief const iterator for object and array value.
|
||||
*
|
||||
*/
|
||||
class ValueConstIterator : public ValueIteratorBase
|
||||
class JSON_API ValueConstIterator : public ValueIteratorBase
|
||||
{
|
||||
friend class Value;
|
||||
public:
|
||||
typedef const Value value_type;
|
||||
typedef unsigned int size_t;
|
||||
typedef int difference_type;
|
||||
typedef const Value &reference;
|
||||
@@ -1038,10 +1065,11 @@ public: // overridden from ValueArrayAllocator
|
||||
|
||||
/** \brief Iterator for object and array value.
|
||||
*/
|
||||
class ValueIterator : public ValueIteratorBase
|
||||
class JSON_API ValueIterator : public ValueIteratorBase
|
||||
{
|
||||
friend class Value;
|
||||
public:
|
||||
typedef Value value_type;
|
||||
typedef unsigned int size_t;
|
||||
typedef int difference_type;
|
||||
typedef Value &reference;
|
||||
@@ -1100,4 +1128,9 @@ public: // overridden from ValueArrayAllocator
|
||||
} // namespace Json
|
||||
|
||||
|
||||
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
# pragma warning(pop)
|
||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
|
||||
|
||||
#endif // CPPTL_JSON_H_INCLUDED
|
||||
|
14
include/json/version.h
Normal file
14
include/json/version.h
Normal file
@@ -0,0 +1,14 @@
|
||||
// DO NOT EDIT. This file is generated by CMake from "version"
|
||||
// and "version.h.in" files.
|
||||
// Run CMake configure step to update it.
|
||||
#ifndef JSON_VERSION_H_INCLUDED
|
||||
# define JSON_VERSION_H_INCLUDED
|
||||
|
||||
# define JSONCPP_VERSION_STRING "0.6.0-dev"
|
||||
# define JSONCPP_VERSION_MAJOR 0
|
||||
# define JSONCPP_VERSION_MINOR 6
|
||||
# define JSONCPP_VERSION_PATCH 0
|
||||
# define JSONCPP_VERSION_QUALIFIER -dev
|
||||
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
|
||||
|
||||
#endif // JSON_VERSION_H_INCLUDED
|
@@ -6,12 +6,18 @@
|
||||
#ifndef JSON_WRITER_H_INCLUDED
|
||||
# define JSON_WRITER_H_INCLUDED
|
||||
|
||||
#if !defined(JSON_IS_AMALGATED)
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
# include "value.h"
|
||||
#endif // if !defined(JSON_IS_AMALGATED)
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
# include <vector>
|
||||
# include <string>
|
||||
# include <iostream>
|
||||
|
||||
// Disable warning C4251: <data member>: <type> needs to have dll-interface to be used by...
|
||||
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
# pragma warning(push)
|
||||
# pragma warning(disable:4251)
|
||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
|
||||
|
||||
namespace Json {
|
||||
|
||||
@@ -41,6 +47,13 @@ namespace Json {
|
||||
|
||||
void enableYAMLCompatibility();
|
||||
|
||||
/** \brief Drop the "null" string from the writer's output for nullValues.
|
||||
* Strictly speaking, this is not valid JSON. But when the output is being
|
||||
* fed to a browser's Javascript, it makes for smaller output and the
|
||||
* browser can handle the output just fine.
|
||||
*/
|
||||
void dropNullPlaceholders();
|
||||
|
||||
public: // overridden from Writer
|
||||
virtual std::string write( const Value &root );
|
||||
|
||||
@@ -49,6 +62,7 @@ namespace Json {
|
||||
|
||||
std::string document_;
|
||||
bool yamlCompatiblityEnabled_;
|
||||
bool dropNullPlaceholders_;
|
||||
};
|
||||
|
||||
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a human friendly way.
|
||||
@@ -176,10 +190,14 @@ namespace Json {
|
||||
|
||||
/// \brief Output using the StyledStreamWriter.
|
||||
/// \see Json::operator>>()
|
||||
std::ostream& operator<<( std::ostream&, const Value &root );
|
||||
JSON_API std::ostream& operator<<( std::ostream&, const Value &root );
|
||||
|
||||
} // namespace Json
|
||||
|
||||
|
||||
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
# pragma warning(pop)
|
||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
|
||||
|
||||
#endif // JSON_WRITER_H_INCLUDED
|
||||
|
@@ -23,7 +23,7 @@ import tempfile
|
||||
import os
|
||||
import time
|
||||
from devtools import antglob, fixeol, tarball
|
||||
import amalgate
|
||||
import amalgamate
|
||||
|
||||
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
||||
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
||||
@@ -323,13 +323,13 @@ Warning: --force should only be used when developping/testing the release script
|
||||
print 'Generating source tarball to', source_tarball_path
|
||||
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
|
||||
|
||||
amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir
|
||||
print 'Generating amalgated source tarball to', amalgated_tarball_path
|
||||
amalgated_dir = 'dist/amalgated'
|
||||
amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' )
|
||||
amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version
|
||||
tarball.make_tarball( amalgated_tarball_path, [amalgated_dir],
|
||||
amalgated_dir, prefix_dir=amalgated_source_dir )
|
||||
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
|
||||
print 'Generating amalgamation source tarball to', amalgamation_tarball_path
|
||||
amalgamation_dir = 'dist/amalgamation'
|
||||
amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
|
||||
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
|
||||
tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir],
|
||||
amalgamation_dir, prefix_dir=amalgamation_source_dir )
|
||||
|
||||
# Decompress source tarball, download and install scons-local
|
||||
distcheck_dir = 'dist/distcheck'
|
||||
|
@@ -1,53 +1,53 @@
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
def generate( env ):
|
||||
def Glob( env, includes = None, excludes = None, dir = '.' ):
|
||||
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
|
||||
helper function to environment.
|
||||
|
||||
Glob both the file-system files.
|
||||
|
||||
includes: list of file name pattern included in the return list when matched.
|
||||
excludes: list of file name pattern exluced from the return list.
|
||||
|
||||
Example:
|
||||
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
|
||||
"""
|
||||
def filterFilename(path):
|
||||
abs_path = os.path.join( dir, path )
|
||||
if not os.path.isfile(abs_path):
|
||||
return 0
|
||||
fn = os.path.basename(path)
|
||||
match = 0
|
||||
for include in includes:
|
||||
if fnmatch.fnmatchcase( fn, include ):
|
||||
match = 1
|
||||
break
|
||||
if match == 1 and not excludes is None:
|
||||
for exclude in excludes:
|
||||
if fnmatch.fnmatchcase( fn, exclude ):
|
||||
match = 0
|
||||
break
|
||||
return match
|
||||
if includes is None:
|
||||
includes = ('*',)
|
||||
elif type(includes) in ( type(''), type(u'') ):
|
||||
includes = (includes,)
|
||||
if type(excludes) in ( type(''), type(u'') ):
|
||||
excludes = (excludes,)
|
||||
dir = env.Dir(dir).abspath
|
||||
paths = os.listdir( dir )
|
||||
def makeAbsFileNode( path ):
|
||||
return env.File( os.path.join( dir, path ) )
|
||||
nodes = filter( filterFilename, paths )
|
||||
return map( makeAbsFileNode, nodes )
|
||||
|
||||
from SCons.Script import Environment
|
||||
import fnmatch
|
||||
import os
|
||||
|
||||
def generate( env ):
|
||||
def Glob( env, includes = None, excludes = None, dir = '.' ):
|
||||
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
|
||||
helper function to environment.
|
||||
|
||||
Glob both the file-system files.
|
||||
|
||||
includes: list of file name pattern included in the return list when matched.
|
||||
excludes: list of file name pattern exluced from the return list.
|
||||
|
||||
Example:
|
||||
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
|
||||
"""
|
||||
def filterFilename(path):
|
||||
abs_path = os.path.join( dir, path )
|
||||
if not os.path.isfile(abs_path):
|
||||
return 0
|
||||
fn = os.path.basename(path)
|
||||
match = 0
|
||||
for include in includes:
|
||||
if fnmatch.fnmatchcase( fn, include ):
|
||||
match = 1
|
||||
break
|
||||
if match == 1 and not excludes is None:
|
||||
for exclude in excludes:
|
||||
if fnmatch.fnmatchcase( fn, exclude ):
|
||||
match = 0
|
||||
break
|
||||
return match
|
||||
if includes is None:
|
||||
includes = ('*',)
|
||||
elif type(includes) in ( type(''), type(u'') ):
|
||||
includes = (includes,)
|
||||
if type(excludes) in ( type(''), type(u'') ):
|
||||
excludes = (excludes,)
|
||||
dir = env.Dir(dir).abspath
|
||||
paths = os.listdir( dir )
|
||||
def makeAbsFileNode( path ):
|
||||
return env.File( os.path.join( dir, path ) )
|
||||
nodes = filter( filterFilename, paths )
|
||||
return map( makeAbsFileNode, nodes )
|
||||
|
||||
from SCons.Script import Environment
|
||||
Environment.Glob = Glob
|
||||
|
||||
def exists(env):
|
||||
"""
|
||||
Tool always exists.
|
||||
"""
|
||||
return True
|
||||
|
||||
def exists(env):
|
||||
"""
|
||||
Tool always exists.
|
||||
"""
|
||||
return True
|
||||
|
5
src/CMakeLists.txt
Normal file
5
src/CMakeLists.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
ADD_SUBDIRECTORY(lib_json)
|
||||
IF(JSONCPP_WITH_TESTS)
|
||||
ADD_SUBDIRECTORY(jsontestrunner)
|
||||
ADD_SUBDIRECTORY(test_lib_json)
|
||||
ENDIF(JSONCPP_WITH_TESTS)
|
23
src/jsontestrunner/CMakeLists.txt
Normal file
23
src/jsontestrunner/CMakeLists.txt
Normal file
@@ -0,0 +1,23 @@
|
||||
FIND_PACKAGE(PythonInterp 2.6 REQUIRED)
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
|
||||
ADD_EXECUTABLE(jsontestrunner_exe
|
||||
main.cpp
|
||||
)
|
||||
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
|
||||
SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
|
||||
|
||||
IF(PYTHONINTERP_FOUND)
|
||||
# Run end to end parser/writer tests
|
||||
GET_PROPERTY(JSONTESTRUNNER_EXE_PATH TARGET jsontestrunner_exe PROPERTY LOCATION)
|
||||
SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test)
|
||||
SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py)
|
||||
ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests ALL
|
||||
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" "${JSONTESTRUNNER_EXE_PATH}" "${TEST_DIR}/data"
|
||||
DEPENDS jsontestrunner_exe jsoncpp_test
|
||||
)
|
||||
ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests)
|
||||
ENDIF(PYTHONINTERP_FOUND)
|
@@ -15,6 +15,35 @@
|
||||
# pragma warning( disable: 4996 ) // disable fopen deprecation warning
|
||||
#endif
|
||||
|
||||
static std::string
|
||||
normalizeFloatingPointStr( double value )
|
||||
{
|
||||
char buffer[32];
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||
sprintf_s( buffer, sizeof(buffer), "%.16g", value );
|
||||
#else
|
||||
snprintf( buffer, sizeof(buffer), "%.16g", value );
|
||||
#endif
|
||||
buffer[sizeof(buffer)-1] = 0;
|
||||
std::string s( buffer );
|
||||
std::string::size_type index = s.find_last_of( "eE" );
|
||||
if ( index != std::string::npos )
|
||||
{
|
||||
std::string::size_type hasSign = (s[index+1] == '+' || s[index+1] == '-') ? 1 : 0;
|
||||
std::string::size_type exponentStartIndex = index + 1 + hasSign;
|
||||
std::string normalized = s.substr( 0, exponentStartIndex );
|
||||
std::string::size_type indexDigit = s.find_first_not_of( '0', exponentStartIndex );
|
||||
std::string exponent = "0";
|
||||
if ( indexDigit != std::string::npos ) // There is an exponent different from 0
|
||||
{
|
||||
exponent = s.substr( indexDigit );
|
||||
}
|
||||
return normalized + exponent;
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
static std::string
|
||||
readInputTestFile( const char *path )
|
||||
{
|
||||
@@ -34,7 +63,6 @@ readInputTestFile( const char *path )
|
||||
return text;
|
||||
}
|
||||
|
||||
|
||||
static void
|
||||
printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." )
|
||||
{
|
||||
@@ -50,7 +78,7 @@ printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." )
|
||||
fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() );
|
||||
break;
|
||||
case Json::realValue:
|
||||
fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() );
|
||||
fprintf( fout, "%s=%s\n", path.c_str(), normalizeFloatingPointStr(value.asDouble()).c_str() );
|
||||
break;
|
||||
case Json::stringValue:
|
||||
fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() );
|
||||
@@ -65,7 +93,11 @@ printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." )
|
||||
for ( int index =0; index < size; ++index )
|
||||
{
|
||||
static char buffer[16];
|
||||
sprintf( buffer, "[%d]", index );
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||
sprintf_s( buffer, sizeof(buffer), "[%d]", index );
|
||||
#else
|
||||
snprintf( buffer, sizeof(buffer), "[%d]", index );
|
||||
#endif
|
||||
printValueTree( fout, value[index], path + buffer );
|
||||
}
|
||||
}
|
||||
@@ -266,4 +298,4 @@ int main( int argc, const char *argv[] )
|
||||
|
||||
return exitCode;
|
||||
}
|
||||
|
||||
// vim: et ts=4 sts=4 sw=4 tw=0
|
||||
|
43
src/lib_json/CMakeLists.txt
Normal file
43
src/lib_json/CMakeLists.txt
Normal file
@@ -0,0 +1,43 @@
|
||||
OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF)
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
SET(JSONCPP_LIB_TYPE SHARED)
|
||||
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
|
||||
ELSE(JSONCPP_LIB_BUILD_SHARED)
|
||||
SET(JSONCPP_LIB_TYPE STATIC)
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
|
||||
|
||||
SET( JSONCPP_INCLUDE_DIR ../../include )
|
||||
|
||||
SET( PUBLIC_HEADERS
|
||||
${JSONCPP_INCLUDE_DIR}/json/config.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/forwards.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/features.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/value.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/reader.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/writer.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/assertions.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/version.h
|
||||
)
|
||||
|
||||
SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} )
|
||||
|
||||
ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE}
|
||||
${PUBLIC_HEADERS}
|
||||
json_tool.h
|
||||
json_reader.cpp
|
||||
json_batchallocator.h
|
||||
json_valueiterator.inl
|
||||
json_value.cpp
|
||||
json_writer.cpp
|
||||
version.h.in
|
||||
)
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp )
|
||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSON_CPP_VERSION} SOVERSION ${JSON_CPP_VERSION} )
|
||||
|
||||
# Install instructions for this target
|
||||
INSTALL( TARGETS jsoncpp_lib
|
||||
RUNTIME DESTINATION bin
|
||||
LIBRARY DESTINATION lib
|
||||
ARCHIVE DESTINATION lib
|
||||
)
|
@@ -30,8 +30,6 @@ template<typename AllocatedType
|
||||
class BatchAllocator
|
||||
{
|
||||
public:
|
||||
typedef AllocatedType Type;
|
||||
|
||||
BatchAllocator( unsigned int objectsPerPage = 255 )
|
||||
: freeHead_( 0 )
|
||||
, objectsPerPage_( objectsPerPage )
|
||||
@@ -127,4 +125,4 @@ private:
|
||||
# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION
|
||||
|
||||
#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED
|
||||
|
||||
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||
|
@@ -53,8 +53,7 @@ public: // overridden from ValueArrayAllocator
|
||||
if ( minNewIndexCount > newIndexCount )
|
||||
newIndexCount = minNewIndexCount;
|
||||
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
|
||||
if ( !newIndexes )
|
||||
throw std::bad_alloc();
|
||||
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
|
||||
indexCount = newIndexCount;
|
||||
indexes = static_cast<Value **>( newIndexes );
|
||||
}
|
||||
@@ -117,8 +116,7 @@ public: // overridden from ValueArrayAllocator
|
||||
if ( minNewIndexCount > newIndexCount )
|
||||
newIndexCount = minNewIndexCount;
|
||||
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
|
||||
if ( !newIndexes )
|
||||
throw std::bad_alloc();
|
||||
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
|
||||
indexCount = newIndexCount;
|
||||
indexes = static_cast<Value **>( newIndexes );
|
||||
}
|
||||
@@ -258,8 +256,8 @@ ValueInternalArray::ValueInternalArray()
|
||||
|
||||
ValueInternalArray::ValueInternalArray( const ValueInternalArray &other )
|
||||
: pages_( 0 )
|
||||
, pageCount_( 0 )
|
||||
, size_( other.size_ )
|
||||
, pageCount_( 0 )
|
||||
{
|
||||
PageIndex minNewPages = other.size_ / itemsPerPage;
|
||||
arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages );
|
||||
@@ -454,3 +452,4 @@ ValueInternalArray::compare( const ValueInternalArray &other ) const
|
||||
}
|
||||
|
||||
} // namespace Json
|
||||
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||
|
@@ -613,3 +613,4 @@ ValueInternalMap::distance( const IteratorState &x, const IteratorState &y )
|
||||
}
|
||||
|
||||
} // namespace Json
|
||||
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||
|
@@ -1,21 +1,21 @@
|
||||
// Copyright 2007-2010 Baptiste Lepilleur
|
||||
// Copyright 2007-2011 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#if !defined(JSON_IS_AMALGATED)
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
# include <json/assertions.h>
|
||||
# include <json/reader.h>
|
||||
# include <json/value.h>
|
||||
# include "json_tool.h"
|
||||
#endif // if !defined(JSON_IS_AMALGATED)
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <utility>
|
||||
#include <cstdio>
|
||||
#include <cassert>
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
#include <stdexcept>
|
||||
#include <istream>
|
||||
|
||||
#if _MSC_VER >= 1400 // VC++ 8.0
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
|
||||
#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated.
|
||||
#endif
|
||||
|
||||
@@ -27,6 +27,8 @@ namespace Json {
|
||||
Features::Features()
|
||||
: allowComments_( true )
|
||||
, strictRoot_( false )
|
||||
, allowDroppedNullPlaceholders_ ( false )
|
||||
, allowNumericKeys_ ( false )
|
||||
{
|
||||
}
|
||||
|
||||
@@ -44,6 +46,8 @@ Features::strictMode()
|
||||
Features features;
|
||||
features.allowComments_ = false;
|
||||
features.strictRoot_ = true;
|
||||
features.allowDroppedNullPlaceholders_ = false;
|
||||
features.allowNumericKeys_ = false;
|
||||
return features;
|
||||
}
|
||||
|
||||
@@ -79,13 +83,31 @@ containsNewLine( Reader::Location begin,
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
Reader::Reader()
|
||||
: features_( Features::all() )
|
||||
: errors_(),
|
||||
document_(),
|
||||
begin_(),
|
||||
end_(),
|
||||
current_(),
|
||||
lastValueEnd_(),
|
||||
lastValue_(),
|
||||
commentsBefore_(),
|
||||
features_( Features::all() ),
|
||||
collectComments_()
|
||||
{
|
||||
}
|
||||
|
||||
|
||||
Reader::Reader( const Features &features )
|
||||
: features_( features )
|
||||
: errors_(),
|
||||
document_(),
|
||||
begin_(),
|
||||
end_(),
|
||||
current_(),
|
||||
lastValueEnd_(),
|
||||
lastValue_(),
|
||||
commentsBefore_(),
|
||||
features_( features ),
|
||||
collectComments_()
|
||||
{
|
||||
}
|
||||
|
||||
@@ -172,6 +194,17 @@ Reader::readValue()
|
||||
|
||||
if ( collectComments_ && !commentsBefore_.empty() )
|
||||
{
|
||||
// Remove newline characters at the end of the comments
|
||||
size_t lastNonNewline = commentsBefore_.find_last_not_of("\r\n");
|
||||
if (lastNonNewline != std::string::npos)
|
||||
{
|
||||
commentsBefore_.erase(lastNonNewline+1);
|
||||
}
|
||||
else
|
||||
{
|
||||
commentsBefore_.clear();
|
||||
}
|
||||
|
||||
currentValue().setComment( commentsBefore_, commentBefore );
|
||||
commentsBefore_ = "";
|
||||
}
|
||||
@@ -181,9 +214,11 @@ Reader::readValue()
|
||||
{
|
||||
case tokenObjectBegin:
|
||||
successful = readObject( token );
|
||||
currentValue().setOffsetLimit(current_ - begin_);
|
||||
break;
|
||||
case tokenArrayBegin:
|
||||
successful = readArray( token );
|
||||
currentValue().setOffsetLimit(current_ - begin_);
|
||||
break;
|
||||
case tokenNumber:
|
||||
successful = decodeNumber( token );
|
||||
@@ -193,14 +228,34 @@ Reader::readValue()
|
||||
break;
|
||||
case tokenTrue:
|
||||
currentValue() = true;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
break;
|
||||
case tokenFalse:
|
||||
currentValue() = false;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
break;
|
||||
case tokenNull:
|
||||
currentValue() = Value();
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
break;
|
||||
case tokenArraySeparator:
|
||||
if ( features_.allowDroppedNullPlaceholders_ )
|
||||
{
|
||||
// "Un-read" the current token and mark the current value as a null
|
||||
// token.
|
||||
current_--;
|
||||
currentValue() = Value();
|
||||
currentValue().setOffsetStart(current_ - begin_ - 1);
|
||||
currentValue().setOffsetLimit(current_ - begin_);
|
||||
break;
|
||||
}
|
||||
// Else, fall through...
|
||||
default:
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
return addError( "Syntax error: value, object or array expected.", token );
|
||||
}
|
||||
|
||||
@@ -449,11 +504,12 @@ Reader::readString()
|
||||
|
||||
|
||||
bool
|
||||
Reader::readObject( Token &/*tokenStart*/ )
|
||||
Reader::readObject( Token &tokenStart )
|
||||
{
|
||||
Token tokenName;
|
||||
std::string name;
|
||||
currentValue() = Value( objectValue );
|
||||
currentValue().setOffsetStart(tokenStart.start_ - begin_);
|
||||
while ( readToken( tokenName ) )
|
||||
{
|
||||
bool initialTokenOk = true;
|
||||
@@ -463,12 +519,24 @@ Reader::readObject( Token &/*tokenStart*/ )
|
||||
break;
|
||||
if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object
|
||||
return true;
|
||||
if ( tokenName.type_ != tokenString )
|
||||
break;
|
||||
|
||||
name = "";
|
||||
if ( !decodeString( tokenName, name ) )
|
||||
return recoverFromError( tokenObjectEnd );
|
||||
if ( tokenName.type_ == tokenString )
|
||||
{
|
||||
if ( !decodeString( tokenName, name ) )
|
||||
return recoverFromError( tokenObjectEnd );
|
||||
}
|
||||
else if ( tokenName.type_ == tokenNumber &&
|
||||
features_.allowNumericKeys_ )
|
||||
{
|
||||
Value numberName;
|
||||
if ( !decodeNumber( tokenName, numberName ) )
|
||||
return recoverFromError( tokenObjectEnd );
|
||||
name = numberName.asString();
|
||||
}
|
||||
else
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
Token colon;
|
||||
if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator )
|
||||
@@ -488,7 +556,7 @@ Reader::readObject( Token &/*tokenStart*/ )
|
||||
if ( !readToken( comma )
|
||||
|| ( comma.type_ != tokenObjectEnd &&
|
||||
comma.type_ != tokenArraySeparator &&
|
||||
comma.type_ != tokenComment ) )
|
||||
comma.type_ != tokenComment ) )
|
||||
{
|
||||
return addErrorAndRecover( "Missing ',' or '}' in object declaration",
|
||||
comma,
|
||||
@@ -508,9 +576,10 @@ Reader::readObject( Token &/*tokenStart*/ )
|
||||
|
||||
|
||||
bool
|
||||
Reader::readArray( Token &/*tokenStart*/ )
|
||||
Reader::readArray( Token &tokenStart )
|
||||
{
|
||||
currentValue() = Value( arrayValue );
|
||||
currentValue().setOffsetStart(tokenStart.start_ - begin_);
|
||||
skipSpaces();
|
||||
if ( *current_ == ']' ) // empty array
|
||||
{
|
||||
@@ -552,6 +621,19 @@ Reader::readArray( Token &/*tokenStart*/ )
|
||||
|
||||
bool
|
||||
Reader::decodeNumber( Token &token )
|
||||
{
|
||||
Value decoded;
|
||||
if ( !decodeNumber( token, decoded ) )
|
||||
return false;
|
||||
currentValue() = decoded;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::decodeNumber( Token &token, Value &decoded )
|
||||
{
|
||||
bool isDouble = false;
|
||||
for ( Location inspect = token.start_; inspect != token.end_; ++inspect )
|
||||
@@ -561,7 +643,7 @@ Reader::decodeNumber( Token &token )
|
||||
|| ( *inspect == '-' && inspect != token.start_ );
|
||||
}
|
||||
if ( isDouble )
|
||||
return decodeDouble( token );
|
||||
return decodeDouble( token, decoded );
|
||||
// Attempts to parse the number as an integer. If the number is
|
||||
// larger than the maximum supported value of an integer then
|
||||
// we decode the number as a double.
|
||||
@@ -572,8 +654,6 @@ Reader::decodeNumber( Token &token )
|
||||
Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt)
|
||||
: Value::maxLargestUInt;
|
||||
Value::LargestUInt threshold = maxIntegerValue / 10;
|
||||
Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 );
|
||||
assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 );
|
||||
Value::LargestUInt value = 0;
|
||||
while ( current < token.end_ )
|
||||
{
|
||||
@@ -583,49 +663,78 @@ Reader::decodeNumber( Token &token )
|
||||
Value::UInt digit(c - '0');
|
||||
if ( value >= threshold )
|
||||
{
|
||||
// If the current digit is not the last one, or if it is
|
||||
// greater than the last digit of the maximum integer value,
|
||||
// the parse the number as a double.
|
||||
if ( current != token.end_ || digit > lastDigitThreshold )
|
||||
// We've hit or exceeded the max value divided by 10 (rounded down). If
|
||||
// a) we've only just touched the limit, b) this is the last digit, and
|
||||
// c) it's small enough to fit in that rounding delta, we're okay.
|
||||
// Otherwise treat this number as a double to avoid overflow.
|
||||
if (value > threshold ||
|
||||
current != token.end_ ||
|
||||
digit > maxIntegerValue % 10)
|
||||
{
|
||||
return decodeDouble( token );
|
||||
return decodeDouble( token, decoded );
|
||||
}
|
||||
}
|
||||
value = value * 10 + digit;
|
||||
}
|
||||
if ( isNegative )
|
||||
currentValue() = -Value::LargestInt( value );
|
||||
decoded = -Value::LargestInt( value );
|
||||
else if ( value <= Value::LargestUInt(Value::maxInt) )
|
||||
currentValue() = Value::LargestInt( value );
|
||||
decoded = Value::LargestInt( value );
|
||||
else
|
||||
currentValue() = value;
|
||||
decoded = value;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::decodeDouble( Token &token )
|
||||
{
|
||||
Value decoded;
|
||||
if ( !decodeDouble( token, decoded ) )
|
||||
return false;
|
||||
currentValue() = decoded;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
bool
|
||||
Reader::decodeDouble( Token &token, Value &decoded )
|
||||
{
|
||||
double value = 0;
|
||||
const int bufferSize = 32;
|
||||
int count;
|
||||
int length = int(token.end_ - token.start_);
|
||||
|
||||
// Sanity check to avoid buffer overflow exploits.
|
||||
if (length < 0) {
|
||||
return addError( "Unable to parse token length", token );
|
||||
}
|
||||
|
||||
// Avoid using a string constant for the format control string given to
|
||||
// sscanf, as this can cause hard to debug crashes on OS X. See here for more
|
||||
// info:
|
||||
//
|
||||
// http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html
|
||||
char format[] = "%lf";
|
||||
|
||||
if ( length <= bufferSize )
|
||||
{
|
||||
Char buffer[bufferSize+1];
|
||||
memcpy( buffer, token.start_, length );
|
||||
buffer[length] = 0;
|
||||
count = sscanf( buffer, "%lf", &value );
|
||||
count = sscanf( buffer, format, &value );
|
||||
}
|
||||
else
|
||||
{
|
||||
std::string buffer( token.start_, token.end_ );
|
||||
count = sscanf( buffer.c_str(), "%lf", &value );
|
||||
count = sscanf( buffer.c_str(), format, &value );
|
||||
}
|
||||
|
||||
if ( count != 1 )
|
||||
return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token );
|
||||
currentValue() = value;
|
||||
decoded = value;
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -637,6 +746,8 @@ Reader::decodeString( Token &token )
|
||||
if ( !decodeString( token, decoded ) )
|
||||
return false;
|
||||
currentValue() = decoded;
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -836,7 +947,11 @@ Reader::getLocationLineAndColumn( Location location ) const
|
||||
int line, column;
|
||||
getLocationLineAndColumn( location, line, column );
|
||||
char buffer[18+16+16+1];
|
||||
sprintf( buffer, "Line %d, Column %d", line, column );
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||
sprintf_s(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||
#else
|
||||
snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||
#endif
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@@ -867,14 +982,40 @@ Reader::getFormattedErrorMessages() const
|
||||
}
|
||||
|
||||
|
||||
std::vector<Reader::StructuredError>
|
||||
Reader::getStructuredErrors() const
|
||||
{
|
||||
std::vector<Reader::StructuredError> allErrors;
|
||||
for ( Errors::const_iterator itError = errors_.begin();
|
||||
itError != errors_.end();
|
||||
++itError )
|
||||
{
|
||||
const ErrorInfo &error = *itError;
|
||||
Reader::StructuredError structured;
|
||||
structured.offset_start = error.token_.start_ - begin_;
|
||||
structured.offset_limit = error.token_.end_ - begin_;
|
||||
structured.message = error.message_;
|
||||
allErrors.push_back(structured);
|
||||
}
|
||||
return allErrors;
|
||||
}
|
||||
|
||||
|
||||
std::istream& operator>>( std::istream &sin, Value &root )
|
||||
{
|
||||
Json::Reader reader;
|
||||
bool ok = reader.parse(sin, root, true);
|
||||
//JSON_ASSERT( ok );
|
||||
if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages());
|
||||
if (!ok) {
|
||||
fprintf(
|
||||
stderr,
|
||||
"Error from reader: %s",
|
||||
reader.getFormattedErrorMessages().c_str());
|
||||
|
||||
JSON_FAIL_MESSAGE("reader error");
|
||||
}
|
||||
return sin;
|
||||
}
|
||||
|
||||
|
||||
} // namespace Json
|
||||
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -149,6 +149,7 @@ ValueIteratorBase::copy( const SelfType &other )
|
||||
{
|
||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||
current_ = other.current_;
|
||||
isNull_ = other.isNull_;
|
||||
#else
|
||||
if ( isArray_ )
|
||||
iterator_.array_ = other.iterator_.array_;
|
||||
@@ -297,3 +298,4 @@ ValueIterator::operator =( const SelfType &other )
|
||||
}
|
||||
|
||||
} // namespace Json
|
||||
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||
|
@@ -1,21 +1,20 @@
|
||||
// Copyright 2007-2010 Baptiste Lepilleur
|
||||
// Copyright 2011 Baptiste Lepilleur
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#if !defined(JSON_IS_AMALGATED)
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
# include <json/writer.h>
|
||||
# include "json_tool.h"
|
||||
#endif // if !defined(JSON_IS_AMALGATED)
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <utility>
|
||||
#include <assert.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <iomanip>
|
||||
|
||||
#if _MSC_VER >= 1400 // VC++ 8.0
|
||||
#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
|
||||
#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated.
|
||||
#endif
|
||||
|
||||
@@ -74,40 +73,19 @@ std::string valueToString( UInt value )
|
||||
|
||||
std::string valueToString( double value )
|
||||
{
|
||||
// Allocate a buffer that is more than large enough to store the 16 digits of
|
||||
// precision requested below.
|
||||
char buffer[32];
|
||||
|
||||
// Print into the buffer. We need not request the alternative representation
|
||||
// that always has a decimal point because JSON doesn't distingish the
|
||||
// concepts of reals and integers.
|
||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning.
|
||||
sprintf_s(buffer, sizeof(buffer), "%#.16g", value);
|
||||
#else
|
||||
sprintf(buffer, "%#.16g", value);
|
||||
sprintf_s(buffer, sizeof(buffer), "%.16g", value);
|
||||
#else
|
||||
snprintf(buffer, sizeof(buffer), "%.16g", value);
|
||||
#endif
|
||||
char* ch = buffer + strlen(buffer) - 1;
|
||||
if (*ch != '0') return buffer; // nothing to truncate, so save time
|
||||
while(ch > buffer && *ch == '0'){
|
||||
--ch;
|
||||
}
|
||||
char* last_nonzero = ch;
|
||||
while(ch >= buffer){
|
||||
switch(*ch){
|
||||
case '0':
|
||||
case '1':
|
||||
case '2':
|
||||
case '3':
|
||||
case '4':
|
||||
case '5':
|
||||
case '6':
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
--ch;
|
||||
continue;
|
||||
case '.':
|
||||
// Truncate zeroes to save bytes in output, but keep one.
|
||||
*(last_nonzero+2) = '\0';
|
||||
return buffer;
|
||||
default:
|
||||
return buffer;
|
||||
}
|
||||
}
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
@@ -119,6 +97,8 @@ std::string valueToString( bool value )
|
||||
|
||||
std::string valueToQuotedString( const char *value )
|
||||
{
|
||||
if (value == NULL)
|
||||
return "";
|
||||
// Not sure how to handle unicode...
|
||||
if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value ))
|
||||
return std::string("\"") + value + "\"";
|
||||
@@ -191,7 +171,8 @@ Writer::~Writer()
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
FastWriter::FastWriter()
|
||||
: yamlCompatiblityEnabled_( false )
|
||||
: yamlCompatiblityEnabled_( false ),
|
||||
dropNullPlaceholders_( false )
|
||||
{
|
||||
}
|
||||
|
||||
@@ -203,6 +184,13 @@ FastWriter::enableYAMLCompatibility()
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
FastWriter::dropNullPlaceholders()
|
||||
{
|
||||
dropNullPlaceholders_ = true;
|
||||
}
|
||||
|
||||
|
||||
std::string
|
||||
FastWriter::write( const Value &root )
|
||||
{
|
||||
@@ -219,7 +207,7 @@ FastWriter::writeValue( const Value &value )
|
||||
switch ( value.type() )
|
||||
{
|
||||
case nullValue:
|
||||
document_ += "null";
|
||||
if (!dropNullPlaceholders_) document_ += "null";
|
||||
break;
|
||||
case intValue:
|
||||
document_ += valueToString( value.asLargestInt() );
|
||||
@@ -278,6 +266,7 @@ FastWriter::writeValue( const Value &value )
|
||||
StyledWriter::StyledWriter()
|
||||
: rightMargin_( 74 )
|
||||
, indentSize_( 3 )
|
||||
, addChildValues_()
|
||||
{
|
||||
}
|
||||
|
||||
@@ -494,7 +483,20 @@ StyledWriter::writeCommentBeforeValue( const Value &root )
|
||||
{
|
||||
if ( !root.hasComment( commentBefore ) )
|
||||
return;
|
||||
document_ += normalizeEOL( root.getComment( commentBefore ) );
|
||||
|
||||
document_ += "\n";
|
||||
writeIndent();
|
||||
std::string normalizedComment = normalizeEOL( root.getComment( commentBefore ) );
|
||||
std::string::const_iterator iter = normalizedComment.begin();
|
||||
while ( iter != normalizedComment.end() )
|
||||
{
|
||||
document_ += *iter;
|
||||
if ( *iter == '\n' && *(iter+1) == '/' )
|
||||
writeIndent();
|
||||
++iter;
|
||||
}
|
||||
|
||||
// Comments are stripped of newlines, so add one here
|
||||
document_ += "\n";
|
||||
}
|
||||
|
||||
@@ -554,6 +556,7 @@ StyledStreamWriter::StyledStreamWriter( std::string indentation )
|
||||
: document_(NULL)
|
||||
, rightMargin_( 74 )
|
||||
, indentation_( indentation )
|
||||
, addChildValues_()
|
||||
{
|
||||
}
|
||||
|
||||
@@ -656,7 +659,7 @@ StyledStreamWriter::writeArrayValue( const Value &value )
|
||||
writeWithIndent( childValues_[index] );
|
||||
else
|
||||
{
|
||||
writeIndent();
|
||||
writeIndent();
|
||||
writeValue( childValue );
|
||||
}
|
||||
if ( ++index == size )
|
||||
@@ -836,3 +839,4 @@ std::ostream& operator<<( std::ostream &sout, const Value &root )
|
||||
|
||||
|
||||
} // namespace Json
|
||||
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||
|
14
src/lib_json/version.h.in
Normal file
14
src/lib_json/version.h.in
Normal file
@@ -0,0 +1,14 @@
|
||||
// DO NOT EDIT. This file is generated by CMake from "version"
|
||||
// and "version.h.in" files.
|
||||
// Run CMake configure step to update it.
|
||||
#ifndef JSON_VERSION_H_INCLUDED
|
||||
# define JSON_VERSION_H_INCLUDED
|
||||
|
||||
# define JSONCPP_VERSION_STRING "@JSONCPP_VERSION@"
|
||||
# define JSONCPP_VERSION_MAJOR @JSONCPP_VERSION_MAJOR@
|
||||
# define JSONCPP_VERSION_MINOR @JSONCPP_VERSION_MINOR@
|
||||
# define JSONCPP_VERSION_PATCH @JSONCPP_VERSION_PATCH@
|
||||
# define JSONCPP_VERSION_QUALIFIER @JSONCPP_VERSION_QUALIFIER@
|
||||
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
|
||||
|
||||
#endif // JSON_VERSION_H_INCLUDED
|
22
src/test_lib_json/CMakeLists.txt
Normal file
22
src/test_lib_json/CMakeLists.txt
Normal file
@@ -0,0 +1,22 @@
|
||||
|
||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||
ADD_DEFINITIONS( -DJSON_DLL )
|
||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||
|
||||
ADD_EXECUTABLE( jsoncpp_test
|
||||
jsontest.cpp
|
||||
jsontest.h
|
||||
main.cpp
|
||||
)
|
||||
|
||||
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
|
||||
|
||||
# Run unit tests in post-build
|
||||
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
|
||||
IF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
||||
POST_BUILD
|
||||
COMMAND jsoncpp_test)
|
||||
ENDIF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||
|
||||
SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)
|
@@ -249,57 +249,24 @@ TestResult::addToLastFailure( const std::string &message )
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
TestResult &
|
||||
TestResult::operator << ( bool value )
|
||||
{
|
||||
return addToLastFailure( value ? "true" : "false" );
|
||||
TestResult::operator << ( Json::Int64 value ) {
|
||||
return addToLastFailure( Json::valueToString(value) );
|
||||
}
|
||||
|
||||
|
||||
TestResult &
|
||||
TestResult::operator << ( int value )
|
||||
{
|
||||
char buffer[32];
|
||||
sprintf( buffer, "%d", value );
|
||||
return addToLastFailure( buffer );
|
||||
TestResult::operator << ( Json::UInt64 value ) {
|
||||
return addToLastFailure( Json::valueToString(value) );
|
||||
}
|
||||
|
||||
|
||||
TestResult &
|
||||
TestResult::operator << ( unsigned int value )
|
||||
{
|
||||
char buffer[32];
|
||||
sprintf( buffer, "%u", value );
|
||||
return addToLastFailure( buffer );
|
||||
TestResult::operator << ( bool value ) {
|
||||
return addToLastFailure(value ? "true" : "false");
|
||||
}
|
||||
|
||||
|
||||
TestResult &
|
||||
TestResult::operator << ( double value )
|
||||
{
|
||||
char buffer[32];
|
||||
sprintf( buffer, "%16g", value );
|
||||
return addToLastFailure( buffer );
|
||||
}
|
||||
|
||||
|
||||
TestResult &
|
||||
TestResult::operator << ( const char *value )
|
||||
{
|
||||
return addToLastFailure( value ? value
|
||||
: "<NULL>" );
|
||||
}
|
||||
|
||||
|
||||
TestResult &
|
||||
TestResult::operator << ( const std::string &value )
|
||||
{
|
||||
return addToLastFailure( value );
|
||||
}
|
||||
|
||||
|
||||
|
||||
// class TestCase
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
@@ -373,7 +340,7 @@ Runner::runTestAt( unsigned int index, TestResult &result ) const
|
||||
catch ( const std::exception &e )
|
||||
{
|
||||
result.addFailure( __FILE__, __LINE__,
|
||||
"Unexpected exception caugth:" ) << e.what();
|
||||
"Unexpected exception caught:" ) << e.what();
|
||||
}
|
||||
#endif // if JSON_USE_EXCEPTION
|
||||
delete test;
|
||||
@@ -513,10 +480,10 @@ Runner::runCommandLine( int argc, const char *argv[] ) const
|
||||
}
|
||||
|
||||
|
||||
#if defined(_MSC_VER)
|
||||
#if defined(_MSC_VER) && defined(_DEBUG)
|
||||
// Hook MSVCRT assertions to prevent dialog from appearing
|
||||
static int
|
||||
msvcrtSilentReportHook( int reportType, char *message, int *returnValue )
|
||||
msvcrtSilentReportHook( int reportType, char *message, int * /*returnValue*/ )
|
||||
{
|
||||
// The default CRT handling of error and assertion is to display
|
||||
// an error dialog to the user.
|
||||
@@ -550,9 +517,11 @@ msvcrtSilentReportHook( int reportType, char *message, int *returnValue )
|
||||
void
|
||||
Runner::preventDialogOnCrash()
|
||||
{
|
||||
#if defined(_MSC_VER)
|
||||
#if defined(_MSC_VER) && defined(_DEBUG)
|
||||
// Install a hook to prevent MSVCRT error and assertion from
|
||||
// popping a dialog.
|
||||
// popping a dialog
|
||||
// This function a NO-OP in release configuration
|
||||
// (which cause warning since msvcrtSilentReportHook is not referenced)
|
||||
_CrtSetReportHook( &msvcrtSilentReportHook );
|
||||
#endif // if defined(_MSC_VER)
|
||||
|
||||
@@ -606,3 +575,4 @@ checkStringEqual( TestResult &result,
|
||||
|
||||
|
||||
} // namespace JsonTest
|
||||
// vim: et ts=4 sts=4 sw=4 tw=0
|
||||
|
@@ -7,8 +7,11 @@
|
||||
# define JSONTEST_H_INCLUDED
|
||||
|
||||
# include <json/config.h>
|
||||
# include <json/value.h>
|
||||
# include <json/writer.h>
|
||||
# include <stdio.h>
|
||||
# include <deque>
|
||||
# include <sstream>
|
||||
# include <string>
|
||||
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
@@ -84,12 +87,21 @@ namespace JsonTest {
|
||||
|
||||
void printFailure( bool printTestName ) const;
|
||||
|
||||
// Generic operator that will work with anything ostream can deal with.
|
||||
template <typename T>
|
||||
TestResult &operator << ( const T& value ) {
|
||||
std::ostringstream oss;
|
||||
oss.precision( 16 );
|
||||
oss.setf( std::ios_base::floatfield );
|
||||
oss << value;
|
||||
return addToLastFailure(oss.str());
|
||||
}
|
||||
|
||||
// Specialized versions.
|
||||
TestResult &operator << ( bool value );
|
||||
TestResult &operator << ( int value );
|
||||
TestResult &operator << ( unsigned int value );
|
||||
TestResult &operator << ( double value );
|
||||
TestResult &operator << ( const char *value );
|
||||
TestResult &operator << ( const std::string &value );
|
||||
// std:ostream does not support 64bits integers on all STL implementation
|
||||
TestResult &operator << ( Json::Int64 value );
|
||||
TestResult &operator << ( Json::UInt64 value );
|
||||
|
||||
private:
|
||||
TestResult &addToLastFailure( const std::string &message );
|
||||
@@ -173,20 +185,21 @@ namespace JsonTest {
|
||||
Factories tests_;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
template<typename T, typename U>
|
||||
TestResult &
|
||||
checkEqual( TestResult &result, const T &expected, const T &actual,
|
||||
checkEqual( TestResult &result, const T &expected, const U &actual,
|
||||
const char *file, unsigned int line, const char *expr )
|
||||
{
|
||||
if ( expected != actual )
|
||||
if ( static_cast< U >( expected ) != actual )
|
||||
{
|
||||
result.addFailure( file, line, expr );
|
||||
result << "Expected: " << expected << "\n";
|
||||
result << "Expected: " << static_cast< U >( expected ) << "\n";
|
||||
result << "Actual : " << actual;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
TestResult &
|
||||
checkStringEqual( TestResult &result,
|
||||
const std::string &expected, const std::string &actual,
|
||||
@@ -216,8 +229,7 @@ namespace JsonTest {
|
||||
result_->predicateStackTail_ = &_minitest_Context; \
|
||||
(expr); \
|
||||
result_->popPredicateContext(); \
|
||||
} \
|
||||
*result_
|
||||
}
|
||||
|
||||
/// \brief Asserts that two values are equals.
|
||||
#define JSONTEST_ASSERT_EQUAL( expected, actual ) \
|
||||
@@ -229,6 +241,7 @@ namespace JsonTest {
|
||||
#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \
|
||||
JsonTest::checkStringEqual( *result_, \
|
||||
std::string(expected), std::string(actual), \
|
||||
__FILE__, __LINE__, \
|
||||
#expected " == " #actual )
|
||||
|
||||
/// \brief Begin a fixture test case.
|
||||
@@ -257,3 +270,4 @@ namespace JsonTest {
|
||||
(runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) )
|
||||
|
||||
#endif // ifndef JSONTEST_H_INCLUDED
|
||||
// vim: et ts=4 sts=4 sw=4 tw=0
|
||||
|
File diff suppressed because it is too large
Load Diff
@@ -1 +1 @@
|
||||
[ 1 2 3]
|
||||
[ 1 2 3]
|
||||
|
2122
test/data/test_array_07.expected
Normal file
2122
test/data/test_array_07.expected
Normal file
File diff suppressed because it is too large
Load Diff
2
test/data/test_array_07.json
Normal file
2
test/data/test_array_07.json
Normal file
File diff suppressed because one or more lines are too long
@@ -1,8 +1,8 @@
|
||||
.={}
|
||||
.test=[]
|
||||
.test[0]={}
|
||||
.test[0].a="aaa"
|
||||
.test[1]={}
|
||||
.test[1].b="bbb"
|
||||
.test[2]={}
|
||||
.test[2].c="ccc"
|
||||
.={}
|
||||
.test=[]
|
||||
.test[0]={}
|
||||
.test[0].a="aaa"
|
||||
.test[1]={}
|
||||
.test[1].b="bbb"
|
||||
.test[2]={}
|
||||
.test[2].c="ccc"
|
||||
|
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"test":
|
||||
[
|
||||
{ "a" : "aaa" }, // Comment for a
|
||||
{ "b" : "bbb" }, // Comment for b
|
||||
{ "c" : "ccc" } // Comment for c
|
||||
]
|
||||
}
|
||||
{
|
||||
"test":
|
||||
[
|
||||
{ "a" : "aaa" }, // Comment for a
|
||||
{ "b" : "bbb" }, // Comment for b
|
||||
{ "c" : "ccc" } // Comment for c
|
||||
]
|
||||
}
|
||||
|
7
test/data/test_comment_02.expected
Normal file
7
test/data/test_comment_02.expected
Normal file
@@ -0,0 +1,7 @@
|
||||
.={}
|
||||
.c-test={}
|
||||
.c-test.a=1
|
||||
.c-test.b=2
|
||||
.cpp-test={}
|
||||
.cpp-test.c=3
|
||||
.cpp-test.d=4
|
16
test/data/test_comment_02.json
Normal file
16
test/data/test_comment_02.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
/* C-style comment
|
||||
|
||||
C-style-2 comment */
|
||||
"c-test" : {
|
||||
"a" : 1,
|
||||
/* Internal comment c-style */
|
||||
"b" : 2
|
||||
},
|
||||
// C++-style comment
|
||||
"cpp-test" : {
|
||||
// Internal comment cpp-style
|
||||
"c" : 3,
|
||||
"d" : 4
|
||||
}
|
||||
}
|
@@ -1 +1 @@
|
||||
.=9223372036854775808
|
||||
.=9223372036854775808
|
||||
|
@@ -1,2 +1,2 @@
|
||||
9223372036854775808
|
||||
|
||||
9223372036854775808
|
||||
|
||||
|
@@ -1 +1 @@
|
||||
.=-9223372036854775808
|
||||
.=-9223372036854775808
|
||||
|
@@ -1,2 +1,2 @@
|
||||
-9223372036854775808
|
||||
|
||||
-9223372036854775808
|
||||
|
||||
|
@@ -1 +1 @@
|
||||
.=18446744073709551615
|
||||
.=18446744073709551615
|
||||
|
@@ -1,2 +1,2 @@
|
||||
18446744073709551615
|
||||
|
||||
18446744073709551615
|
||||
|
||||
|
1
test/data/test_real_08.expected
Normal file
1
test/data/test_real_08.expected
Normal file
@@ -0,0 +1 @@
|
||||
.=4300000001
|
4
test/data/test_real_08.json
Normal file
4
test/data/test_real_08.json
Normal file
@@ -0,0 +1,4 @@
|
||||
// Out of 32-bit integer range, switch to double in 32-bit mode. Length the
|
||||
// same as UINT_MAX in base 10 and digit less than UINT_MAX's last digit in
|
||||
// order to catch a bug in the parsing code.
|
||||
4300000001
|
1
test/data/test_real_09.expected
Normal file
1
test/data/test_real_09.expected
Normal file
@@ -0,0 +1 @@
|
||||
.=1.9e+19
|
4
test/data/test_real_09.json
Normal file
4
test/data/test_real_09.json
Normal file
@@ -0,0 +1,4 @@
|
||||
// Out of 64-bit integer range, switch to double in all modes. Length the same
|
||||
// as ULONG_MAX in base 10 and digit less than ULONG_MAX's last digit in order
|
||||
// to catch a bug in the parsing code.
|
||||
19000000000000000001
|
1
test/data/test_real_10.expected
Normal file
1
test/data/test_real_10.expected
Normal file
@@ -0,0 +1 @@
|
||||
.=-2200000001
|
4
test/data/test_real_10.json
Normal file
4
test/data/test_real_10.json
Normal file
@@ -0,0 +1,4 @@
|
||||
// Out of 32-bit signed integer range, switch to double in all modes. Length
|
||||
// the same as INT_MIN in base 10 and digit less than INT_MIN's last digit in
|
||||
// order to catch a bug in the parsing code.
|
||||
-2200000001
|
1
test/data/test_real_11.expected
Normal file
1
test/data/test_real_11.expected
Normal file
@@ -0,0 +1 @@
|
||||
.=-9.3e+18
|
4
test/data/test_real_11.json
Normal file
4
test/data/test_real_11.json
Normal file
@@ -0,0 +1,4 @@
|
||||
// Out of 64-bit signed integer range, switch to double in all modes. Length
|
||||
// the same as LONG_MIN in base 10 and digit less than LONG_MIN's last digit in
|
||||
// order to catch a bug in the parsing code.
|
||||
-9300000000000000001
|
1
test/data/test_real_12.expected
Normal file
1
test/data/test_real_12.expected
Normal file
@@ -0,0 +1 @@
|
||||
.=1.844674407370955e+19
|
2
test/data/test_real_12.json
Normal file
2
test/data/test_real_12.json
Normal file
@@ -0,0 +1,2 @@
|
||||
// 2^64 -> switch to double.
|
||||
18446744073709551616
|
@@ -1 +1 @@
|
||||
"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
|
||||
"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
|
||||
|
@@ -1 +1 @@
|
||||
"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
|
||||
"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
|
||||
|
@@ -1 +1 @@
|
||||
"http:\/\/jsoncpp.sourceforge.net\/"
|
||||
"http:\/\/jsoncpp.sourceforge.net\/"
|
||||
|
2
test/data/test_string_04.expected
Normal file
2
test/data/test_string_04.expected
Normal file
@@ -0,0 +1,2 @@
|
||||
.=""abc\def""
|
||||
|
2
test/data/test_string_04.json
Normal file
2
test/data/test_string_04.json
Normal file
@@ -0,0 +1,2 @@
|
||||
"\"abc\\def\""
|
||||
|
2
test/data/test_string_05.expected
Normal file
2
test/data/test_string_05.expected
Normal file
@@ -0,0 +1,2 @@
|
||||
.="\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"
|
||||
|
2
test/data/test_string_05.json
Normal file
2
test/data/test_string_05.json
Normal file
@@ -0,0 +1,2 @@
|
||||
"\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"
|
||||
|
@@ -1 +1 @@
|
||||
.="a"
|
||||
.="a"
|
||||
|
@@ -1 +1 @@
|
||||
.="¢"
|
||||
.="¢"
|
||||
|
@@ -1 +1 @@
|
||||
.="€"
|
||||
.="€"
|
||||
|
@@ -1 +1 @@
|
||||
.="𝄞"
|
||||
.="𝄞"
|
||||
|
@@ -1,2 +1,2 @@
|
||||
.="Zażółć gęślą jaźń"
|
||||
|
||||
.="Zażółć gęślą jaźń"
|
||||
|
||||
|
@@ -1,3 +1,3 @@
|
||||
Test suite from http://json.org/JSON_checker/.
|
||||
|
||||
If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files.
|
||||
Test suite from http://json.org/JSON_checker/.
|
||||
|
||||
If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files.
|
||||
|
@@ -1,73 +1,73 @@
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
from glob import glob
|
||||
import optparse
|
||||
|
||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
||||
|
||||
class TestProxy(object):
|
||||
def __init__( self, test_exe_path, use_valgrind=False ):
|
||||
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
|
||||
self.use_valgrind = use_valgrind
|
||||
|
||||
def run( self, options ):
|
||||
if self.use_valgrind:
|
||||
cmd = VALGRIND_CMD.split()
|
||||
else:
|
||||
cmd = []
|
||||
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
return False, stdout
|
||||
return True, stdout
|
||||
|
||||
def runAllTests( exe_path, use_valgrind=False ):
|
||||
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
||||
status, test_names = test_proxy.run( ['--list-tests'] )
|
||||
if not status:
|
||||
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
|
||||
return 1
|
||||
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
||||
failures = []
|
||||
for name in test_names:
|
||||
print 'TESTING %s:' % name,
|
||||
succeed, result = test_proxy.run( ['--test', name] )
|
||||
if succeed:
|
||||
print 'OK'
|
||||
else:
|
||||
failures.append( (name, result) )
|
||||
print 'FAILED'
|
||||
failed_count = len(failures)
|
||||
pass_count = len(test_names) - failed_count
|
||||
if failed_count:
|
||||
print
|
||||
for name, result in failures:
|
||||
print result
|
||||
print '%d/%d tests passed (%d failure(s))' % (
|
||||
pass_count, len(test_names), failed_count)
|
||||
return 1
|
||||
else:
|
||||
print 'All %d tests passed' % len(test_names)
|
||||
return 0
|
||||
|
||||
def main():
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
|
||||
parser.add_option("--valgrind",
|
||||
action="store_true", dest="valgrind", default=False,
|
||||
help="run all the tests using valgrind to detect memory leaks")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error( 'Must provides at least path to test_lib_json executable.' )
|
||||
sys.exit( 1 )
|
||||
|
||||
exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
|
||||
sys.exit( exit_code )
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
import sys
|
||||
import os
|
||||
import os.path
|
||||
import subprocess
|
||||
from glob import glob
|
||||
import optparse
|
||||
|
||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
||||
|
||||
class TestProxy(object):
|
||||
def __init__( self, test_exe_path, use_valgrind=False ):
|
||||
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
|
||||
self.use_valgrind = use_valgrind
|
||||
|
||||
def run( self, options ):
|
||||
if self.use_valgrind:
|
||||
cmd = VALGRIND_CMD.split()
|
||||
else:
|
||||
cmd = []
|
||||
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
|
||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
return False, stdout
|
||||
return True, stdout
|
||||
|
||||
def runAllTests( exe_path, use_valgrind=False ):
|
||||
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
||||
status, test_names = test_proxy.run( ['--list-tests'] )
|
||||
if not status:
|
||||
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
|
||||
return 1
|
||||
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
||||
failures = []
|
||||
for name in test_names:
|
||||
print 'TESTING %s:' % name,
|
||||
succeed, result = test_proxy.run( ['--test', name] )
|
||||
if succeed:
|
||||
print 'OK'
|
||||
else:
|
||||
failures.append( (name, result) )
|
||||
print 'FAILED'
|
||||
failed_count = len(failures)
|
||||
pass_count = len(test_names) - failed_count
|
||||
if failed_count:
|
||||
print
|
||||
for name, result in failures:
|
||||
print result
|
||||
print '%d/%d tests passed (%d failure(s))' % (
|
||||
pass_count, len(test_names), failed_count)
|
||||
return 1
|
||||
else:
|
||||
print 'All %d tests passed' % len(test_names)
|
||||
return 0
|
||||
|
||||
def main():
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
|
||||
parser.add_option("--valgrind",
|
||||
action="store_true", dest="valgrind", default=False,
|
||||
help="run all the tests using valgrind to detect memory leaks")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error( 'Must provides at least path to test_lib_json executable.' )
|
||||
sys.exit( 1 )
|
||||
|
||||
exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
|
||||
sys.exit( exit_code )
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
Reference in New Issue
Block a user