Compare commits

..

3 Commits
1.7.6 ... 0.7.0

Author SHA1 Message Date
Christopher Dunn
15949af098 0.7.0 2014-11-20 08:48:22 -06:00
Christopher Dunn
8dc52b3cca 0.7.0 2014-11-20 00:18:47 -06:00
Christopher Dunn
add941c1a9 Revert "Switch to copy-and-swap idiom for operator=."
This reverts commit 45cd9490cd.

Ignored ValueInternal* changes, since those did not produce symbols for
Debian build. (They must not have used the INTERNAL stuff.)

  https://github.com/open-source-parsers/jsoncpp/issues/78

Conflicts:
	include/json/value.h
	src/lib_json/json_internalarray.inl
	src/lib_json/json_internalmap.inl
	src/lib_json/json_value.cpp
2014-11-17 00:16:39 -06:00
72 changed files with 4112 additions and 8825 deletions

11
.gitattributes vendored
View File

@@ -1,11 +0,0 @@
* text=auto
*.h text
*.cpp text
*.json text
*.in text
*.sh eol=lf
*.bat eol=crlf
*.vcproj eol=crlf
*.vcxproj eol=crlf
*.sln eol=crlf
devtools/agent_vm* eol=crlf

41
.gitignore vendored
View File

@@ -10,43 +10,4 @@
/libs/ /libs/
/doc/doxyfile /doc/doxyfile
/dist/ /dist/
#/version /include/json/version.h
#/include/json/version.h
# MSVC project files:
*.sln
*.vcxproj
*.filters
*.user
*.sdf
*.opensdf
*.suo
# MSVC build files:
*.lib
*.obj
*.tlog/
*.pdb
# CMake-generated files:
CMakeFiles/
CTestTestFile.cmake
cmake_install.cmake
pkg-config/jsoncpp.pc
jsoncpp_lib_static.dir/
# In case someone runs cmake in the root-dir:
/CMakeCache.txt
/Makefile
/include/Makefile
/src/Makefile
/src/jsontestrunner/Makefile
/src/jsontestrunner/jsontestrunner_exe
/src/lib_json/Makefile
/src/test_lib_json/Makefile
/src/test_lib_json/jsoncpp_test
# eclipse project files
.project
.cproject
/.settings/

View File

@@ -2,42 +2,17 @@
# http://about.travis-ci.org/docs/user/build-configuration/ # http://about.travis-ci.org/docs/user/build-configuration/
# This file can be validated on: # This file can be validated on:
# http://lint.travis-ci.org/ # http://lint.travis-ci.org/
# See also before_install: sudo apt-get install cmake
# http://stackoverflow.com/questions/22111549/travis-ci-with-clang-3-4-and-c11/30925448#30925448
# to allow C++11, though we are not yet building with -std=c++11
install:
# /usr/bin/gcc is 4.6 always, but gcc-X.Y is available.
- if [ "$CXX" = "g++" ]; then export CXX="g++-4.9" CC="gcc-4.9"; fi
# /usr/bin/clang is our version already, and clang-X.Y does not exist.
#- if [ "$CXX" = "clang++" ]; then export CXX="clang++-3.7" CC="clang-3.7"; fi
- echo ${PATH}
- ls /usr/local
- ls /usr/local/bin
- export PATH=/usr/local/bin:/usr/bin:${PATH}
- echo ${CXX}
- ${CXX} --version
- which valgrind
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-4.9
- g++-4.9
- clang
- valgrind
os:
- linux
language: cpp language: cpp
compiler: compiler:
- gcc - gcc
- clang - clang
script: ./travis.sh script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make
env: env:
matrix: matrix:
- SHARED_LIB=ON STATIC_LIB=ON CMAKE_PKG=ON BUILD_TYPE=release VERBOSE_MAKE=false - SHARED_LIBRARY=ON BUILD_TYPE=release VERBOSE_MAKE=false
- SHARED_LIB=OFF STATIC_LIB=ON CMAKE_PKG=OFF BUILD_TYPE=debug VERBOSE_MAKE=true VERBOSE - SHARED_LIBRARY=OFF BUILD_TYPE=release VERBOSE_MAKE=false
- SHARED_LIBRARY=OFF BUILD_TYPE=debug VERBOSE VERBOSE_MAKE=true
notifications: notifications:
email: false email:
sudo: false - aaronjjacobs@gmail.com

View File

@@ -1,17 +1,12 @@
# vim: et ts=4 sts=4 sw=4 tw=0
CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5) CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5)
PROJECT(jsoncpp) PROJECT(jsoncpp)
ENABLE_TESTING() ENABLE_TESTING()
OPTION(JSONCPP_WITH_TESTS "Compile and (for jsoncpp_check) run JsonCpp test executables" ON) OPTION(JSONCPP_WITH_TESTS "Compile and run JsonCpp test executables" ON)
OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON) OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF) OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF)
OPTION(JSONCPP_WITH_STRICT_ISO "Issue all the warnings demanded by strict ISO C and ISO C++" ON)
OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON) OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON)
OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF) OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF)
OPTION(BUILD_SHARED_LIBS "Build jsoncpp_lib as a shared library." OFF)
OPTION(BUILD_STATIC_LIBS "Build jsoncpp_lib static library." ON)
# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix # Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix
IF(NOT WIN32) IF(NOT WIN32)
@@ -19,33 +14,35 @@ IF(NOT WIN32)
SET(CMAKE_BUILD_TYPE Release CACHE STRING SET(CMAKE_BUILD_TYPE Release CACHE STRING
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage." "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage."
FORCE) FORCE)
ENDIF() ENDIF(NOT CMAKE_BUILD_TYPE)
ENDIF() ENDIF(NOT WIN32)
# Enable runtime search path support for dynamic libraries on OSX
IF(APPLE)
SET(CMAKE_MACOSX_RPATH 1)
ENDIF()
SET(DEBUG_LIBNAME_SUFFIX "" CACHE STRING "Optional suffix to append to the library name for a debug build")
SET(LIB_SUFFIX "" CACHE STRING "Optional arch-dependent suffix for the library installation directory")
SET(RUNTIME_INSTALL_DIR bin SET(RUNTIME_INSTALL_DIR bin
CACHE PATH "Install dir for executables and dlls") CACHE PATH "Install dir for executables and dlls")
SET(ARCHIVE_INSTALL_DIR lib${LIB_SUFFIX} SET(ARCHIVE_INSTALL_DIR lib
CACHE PATH "Install dir for static libraries") CACHE PATH "Install dir for static libraries")
SET(LIBRARY_INSTALL_DIR lib${LIB_SUFFIX} SET(LIBRARY_INSTALL_DIR lib
CACHE PATH "Install dir for shared libraries") CACHE PATH "Install dir for shared libraries")
SET(INCLUDE_INSTALL_DIR include SET(INCLUDE_INSTALL_DIR include
CACHE PATH "Install dir for headers") CACHE PATH "Install dir for headers")
SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake SET(PACKAGE_INSTALL_DIR lib/cmake
CACHE PATH "Install dir for cmake package config files") CACHE PATH "Install dir for cmake package config files")
MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PACKAGE_INSTALL_DIR ) MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PACKAGE_INSTALL_DIR )
# This ensures shared DLL are in the same dir as executable on Windows.
# Put all executables / libraries are in a project global directory.
SET(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
CACHE PATH "Single directory for all static libraries.")
SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
CACHE PATH "Single directory for all dynamic libraries on Unix.")
SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin
CACHE PATH "Single directory for all executable and dynamic libraries on Windows.")
MARK_AS_ADVANCED( CMAKE_RUNTIME_OUTPUT_DIRECTORY CMAKE_LIBRARY_OUTPUT_DIRECTORY CMAKE_ARCHIVE_OUTPUT_DIRECTORY )
# Set variable named ${VAR_NAME} to value ${VALUE} # Set variable named ${VAR_NAME} to value ${VALUE}
FUNCTION(set_using_dynamic_name VAR_NAME VALUE) FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE) SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
ENDFUNCTION() ENDFUNCTION(set_using_dynamic_name)
# Extract major, minor, patch from version text # Extract major, minor, patch from version text
# Parse a version string "X.Y.Z" and outputs # Parse a version string "X.Y.Z" and outputs
@@ -61,41 +58,28 @@ MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX)
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE ) set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE )
ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} ) ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE ) set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE )
ENDIF() ENDIF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
ENDMACRO() ENDMACRO(jsoncpp_parse_version)
# Read out version from "version" file # Read out version from "version" file
#FILE(STRINGS "version" JSONCPP_VERSION) FILE(STRINGS "version" JSONCPP_VERSION)
#SET( JSONCPP_VERSION_MAJOR X )
#SET( JSONCPP_VERSION_MINOR Y )
#SET( JSONCPP_VERSION_PATCH Z )
SET( JSONCPP_VERSION 1.7.6 )
jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION ) jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION )
#IF(NOT JSONCPP_VERSION_FOUND) IF(NOT JSONCPP_VERSION_FOUND)
# MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z") MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z")
#ENDIF(NOT JSONCPP_VERSION_FOUND) ENDIF(NOT JSONCPP_VERSION_FOUND)
SET( JSONCPP_USE_SECURE_MEMORY "0" CACHE STRING "-D...=1 to use memory-wiping allocator for STL" )
MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}") MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}")
# File version.h is only regenerated on CMake configure step # File version.h is only regenerated on CMake configure step
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in" CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in"
"${PROJECT_SOURCE_DIR}/include/json/version.h" "${PROJECT_SOURCE_DIR}/include/json/version.h" )
NEWLINE_STYLE UNIX )
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/version.in"
"${PROJECT_SOURCE_DIR}/version"
NEWLINE_STYLE UNIX )
macro(UseCompilationWarningAsError) macro(UseCompilationWarningAsError)
if ( MSVC ) if ( MSVC )
# Only enabled in debug because some old versions of VS STL generate # Only enabled in debug because some old versions of VS STL generate
# warnings when compiled in release configuration. # warnings when compiled in release configuration.
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ") set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") endif( MSVC )
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
if (JSONCPP_WITH_STRICT_ISO)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pedantic-errors")
endif ()
endif()
endmacro() endmacro()
# Include our configuration header # Include our configuration header
@@ -105,52 +89,26 @@ if ( MSVC )
# Only enabled in debug because some old versions of VS STL generate # Only enabled in debug because some old versions of VS STL generate
# unreachable code warning when compiled in release configuration. # unreachable code warning when compiled in release configuration.
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ") set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ")
endif() endif( MSVC )
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
# using regular Clang or AppleClang
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wconversion -Wshadow -Werror=conversion -Werror=sign-compare")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
# using GCC
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wconversion -Wshadow -Wextra")
# not yet ready for -Wsign-conversion
if (JSONCPP_WITH_STRICT_ISO AND NOT JSONCPP_WITH_WARNING_AS_ERROR)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=conversion -pedantic")
endif ()
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
# using Intel compiler
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Wconversion -Wshadow -Wextra -Werror=conversion")
if (JSONCPP_WITH_STRICT_ISO AND NOT JSONCPP_WITH_WARNING_AS_ERROR)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pedantic")
endif ()
endif()
find_program(CCACHE_FOUND ccache)
if(CCACHE_FOUND)
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
endif(CCACHE_FOUND)
IF(JSONCPP_WITH_WARNING_AS_ERROR) IF(JSONCPP_WITH_WARNING_AS_ERROR)
UseCompilationWarningAsError() UseCompilationWarningAsError()
ENDIF() ENDIF(JSONCPP_WITH_WARNING_AS_ERROR)
IF(JSONCPP_WITH_PKGCONFIG_SUPPORT) IF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
CONFIGURE_FILE( CONFIGURE_FILE(
"pkg-config/jsoncpp.pc.in" "pkg-config/jsoncpp.pc.in"
"pkg-config/jsoncpp.pc" "pkg-config/jsoncpp.pc"
@ONLY) @ONLY)
INSTALL(FILES "${CMAKE_CURRENT_BINARY_DIR}/pkg-config/jsoncpp.pc" INSTALL(FILES "${CMAKE_BINARY_DIR}/pkg-config/jsoncpp.pc"
DESTINATION "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}/pkgconfig") DESTINATION "${CMAKE_INSTALL_PREFIX}/lib/pkgconfig")
ENDIF() ENDIF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
IF(JSONCPP_WITH_CMAKE_PACKAGE) IF(JSONCPP_WITH_CMAKE_PACKAGE)
INSTALL(EXPORT jsoncpp INSTALL(EXPORT jsoncpp
DESTINATION ${PACKAGE_INSTALL_DIR}/jsoncpp DESTINATION ${PACKAGE_INSTALL_DIR}/jsoncpp
FILE jsoncppConfig.cmake) FILE jsoncppConfig.cmake)
ENDIF() ENDIF(JSONCPP_WITH_CMAKE_PACKAGE)
# Build the different applications # Build the different applications
ADD_SUBDIRECTORY( src ) ADD_SUBDIRECTORY( src )

View File

@@ -80,7 +80,7 @@ New in SVN
(e.g. MSVC 2008 command prompt in start menu) before running scons. (e.g. MSVC 2008 command prompt in start menu) before running scons.
- Added support for amalgamated source and header generation (a la sqlite). - Added support for amalgamated source and header generation (a la sqlite).
Refer to README.md section "Generating amalgamated source and header" Refer to README.txt section "Generating amalgamated source and header"
for detail. for detail.
* Value * Value

104
README.md
View File

@@ -7,62 +7,34 @@ pairs.
[json-org]: http://json.org/ [json-org]: http://json.org/
[JsonCpp][] is a C++ library that allows manipulating JSON values, including JsonCpp is a C++ library that allows manipulating JSON values, including
serialization and deserialization to and from strings. It can also preserve serialization and deserialization to and from strings. It can also preserve
existing comment in unserialization/serialization steps, making it a convenient existing comment in unserialization/serialization steps, making it a convenient
format to store user input files. format to store user input files.
[JsonCpp]: http://open-source-parsers.github.io/jsoncpp-docs/doxygen/index.html
## A note on backward-compatibility ## A note on backward-compatibility
* `1.y.z` is built with C++11. Very soon, we are switching to C++11 only. For older compilers, try the `pre-C++11` branch.
* `0.y.z` can be used with older compilers.
* Major versions maintain binary-compatibility.
# Using JsonCpp in your project Using JsonCpp in your project
----------------------------- -----------------------------
The recommended approach to integrating JsonCpp in your project is to include
the [amalgamated source](#generating-amalgamated-source-and-header) (a single The recommended approach to integrating JsonCpp in your project is to build
`.cpp` file and two `.h` files) in your project, and compile and build as you the amalgamated source (a single `.cpp` file) with your own build system. This
would any other source file. This ensures consistency of compilation flags and ensures consistency of compilation flags and ABI compatibility. See the section
ABI compatibility, issues which arise when building shared or static "Generating amalgamated source and header" for instructions.
libraries. See the next section for instructions.
The `include/` should be added to your compiler include path. Jsoncpp headers The `include/` should be added to your compiler include path. Jsoncpp headers
should be included as follow: should be included as follow:
#include <json/json.h> #include <json/json.h>
If JsonCpp was built as a dynamic library on Windows, then your project needs to If JsonCpp was build as a dynamic library on Windows, then your project needs to
define the macro `JSON_DLL`. define the macro `JSON_DLL`.
Generating amalgamated source and header
----------------------------------------
JsonCpp is provided with a script to generate a single header and a single
source file to ease inclusion into an existing project. The amalgamated source
can be generated at any time by running the following command from the
top-directory (this requires Python 2.6):
python amalgamate.py Building and testing with new CMake
-----------------------------------
It is possible to specify header name. See the `-h` option for detail.
By default, the following files are generated:
* `dist/jsoncpp.cpp`: source file that needs to be added to your project.
* `dist/json/json.h`: corresponding header file for use in your project. It is
equivalent to including `json/json.h` in non-amalgamated source. This header
only depends on standard headers.
* `dist/json/json-forwards.h`: header that provides forward declaration of all
JsonCpp types.
The amalgamated sources are generated by concatenating JsonCpp source in the
correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion
of other headers.
# Contributing to JsonCpp
Building and testing with CMake
-------------------------------
[CMake][] is a C++ Makefiles/Solution generator. It is usually available on most [CMake][] is a C++ Makefiles/Solution generator. It is usually available on most
Linux system as package. On Ubuntu: Linux system as package. On Ubuntu:
@@ -85,7 +57,7 @@ Steps for generating solution/makefiles using `cmake-gui`:
* Make "source code" point to the source directory. * Make "source code" point to the source directory.
* Make "where to build the binary" point to the directory to use for the build. * Make "where to build the binary" point to the directory to use for the build.
* Click on the "Grouped" check box. * Click on the "Grouped" check box.
* Review JsonCpp build options (tick `BUILD_SHARED_LIBS` to build as a * Review JsonCpp build options (tick `JSONCPP_LIB_BUILD_SHARED` to build as a
dynamic library). dynamic library).
* Click the configure button at the bottom, then the generate button. * Click the configure button at the bottom, then the generate button.
* The generated solution/makefiles can be found in the binary directory. * The generated solution/makefiles can be found in the binary directory.
@@ -94,17 +66,19 @@ Alternatively, from the command-line on Unix in the source directory:
mkdir -p build/debug mkdir -p build/debug
cd build/debug cd build/debug
cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_STATIC_LIBS=ON -DBUILD_SHARED_LIBS=OFF -DARCHIVE_INSTALL_DIR=. -G "Unix Makefiles" ../.. cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../..
make make
Running `cmake -h` will display the list of available generators (passed using Running `cmake -`" will display the list of available generators (passed using
the `-G` option). the `-G` option).
By default CMake hides compilation commands. This can be modified by specifying By default CMake hides compilation commands. This can be modified by specifying
`-DCMAKE_VERBOSE_MAKEFILE=true` when generating makefiles. `-DCMAKE_VERBOSE_MAKEFILE=true` when generating makefiles.
Building and testing with SCons Building and testing with SCons
------------------------------- -------------------------------
**Note:** The SCons-based build system is deprecated. Please use CMake; see the **Note:** The SCons-based build system is deprecated. Please use CMake; see the
section above. section above.
@@ -133,7 +107,14 @@ If you are building with Microsoft Visual Studio 2008, you need to set up the
environment by running `vcvars32.bat` (e.g. MSVC 2008 command prompt) before environment by running `vcvars32.bat` (e.g. MSVC 2008 command prompt) before
running SCons. running SCons.
## Running the tests manually
Running the tests manually
--------------------------
Note that test can be run using SCons using the `check` target:
scons platform=$PLATFORM check
You need to run tests manually only if you are troubleshooting an issue. You need to run tests manually only if you are troubleshooting an issue.
In the instructions below, replace `path/to/jsontest` with the path of the In the instructions below, replace `path/to/jsontest` with the path of the
@@ -156,21 +137,45 @@ In the instructions below, replace `path/to/jsontest` with the path of the
# You can run the tests using valgrind: # You can run the tests using valgrind:
python rununittests.py --valgrind path/to/test_lib_json python rununittests.py --valgrind path/to/test_lib_json
## Running the tests using scons
Note that tests can be run using SCons using the `check` target:
scons platform=$PLATFORM check
Building the documentation Building the documentation
-------------------------- --------------------------
Run the Python script `doxybuild.py` from the top directory: Run the Python script `doxybuild.py` from the top directory:
python doxybuild.py --doxygen=$(which doxygen) --open --with-dot python doxybuild.py --doxygen=$(which doxygen) --open --with-dot
See `doxybuild.py --help` for options. See `doxybuild.py --help` for options.
Generating amalgamated source and header
----------------------------------------
JsonCpp is provided with a script to generate a single header and a single
source file to ease inclusion into an existing project. The amalgamated source
can be generated at any time by running the following command from the
top-directory (this requires Python 2.6):
python amalgamate.py
It is possible to specify header name. See the `-h` option for detail.
By default, the following files are generated:
* `dist/jsoncpp.cpp`: source file that needs to be added to your project.
* `dist/json/json.h`: corresponding header file for use in your project. It is
equivalent to including `json/json.h` in non-amalgamated source. This header
only depends on standard headers.
* `dist/json/json-forwards.h`: header that provides forward declaration of all
JsonCpp types.
The amalgamated sources are generated by concatenating JsonCpp source in the
correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion
of other headers.
Adding a reader/writer test Adding a reader/writer test
--------------------------- ---------------------------
To add a test, you need to create two files in test/data: To add a test, you need to create two files in test/data:
* a `TESTNAME.json` file, that contains the input document in JSON format. * a `TESTNAME.json` file, that contains the input document in JSON format.
@@ -190,8 +195,10 @@ The `TESTNAME.expected` file format is as follows:
See the examples `test_complex_01.json` and `test_complex_01.expected` to better See the examples `test_complex_01.json` and `test_complex_01.expected` to better
understand element paths. understand element paths.
Understanding reader/writer test output Understanding reader/writer test output
--------------------------------------- ---------------------------------------
When a test is run, output files are generated beside the input test files. When a test is run, output files are generated beside the input test files.
Below is a short description of the content of each file: Below is a short description of the content of each file:
@@ -208,7 +215,10 @@ Below is a short description of the content of each file:
* `test_complex_01.process-output`: `jsontest` output, typically useful for * `test_complex_01.process-output`: `jsontest` output, typically useful for
understanding parsing errors. understanding parsing errors.
License License
------- -------
See the `LICENSE` file for details. In summary, JsonCpp is licensed under the See the `LICENSE` file for details. In summary, JsonCpp is licensed under the
MIT license, or public domain if desired and recognized in your jurisdiction. MIT license, or public domain if desired and recognized in your jurisdiction.

View File

@@ -237,7 +237,7 @@ RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string )
env.Alias( 'check' ) env.Alias( 'check' )
srcdist_cmd = env['SRCDIST_ADD']( source = """ srcdist_cmd = env['SRCDIST_ADD']( source = """
AUTHORS README.md SConstruct AUTHORS README.txt SConstruct
""".split() ) """.split() )
env.Alias( 'src-dist', srcdist_cmd ) env.Alias( 'src-dist', srcdist_cmd )

View File

@@ -1,6 +1,6 @@
"""Amalgate json-cpp library sources into a single source and header file. """Amalgate json-cpp library sources into a single source and header file.
Works with python2.6+ and python3.4+. Requires Python 2.6
Example of invocation (must be invoked from json-cpp top directory): Example of invocation (must be invoked from json-cpp top directory):
python amalgate.py python amalgate.py
@@ -10,121 +10,116 @@ import os.path
import sys import sys
class AmalgamationFile: class AmalgamationFile:
def __init__(self, top_dir): def __init__( self, top_dir ):
self.top_dir = top_dir self.top_dir = top_dir
self.blocks = [] self.blocks = []
def add_text(self, text): def add_text( self, text ):
if not text.endswith("\n"): if not text.endswith( "\n" ):
text += "\n" text += "\n"
self.blocks.append(text) self.blocks.append( text )
def add_file(self, relative_input_path, wrap_in_comment=False): def add_file( self, relative_input_path, wrap_in_comment=False ):
def add_marker(prefix): def add_marker( prefix ):
self.add_text("") self.add_text( "" )
self.add_text("// " + "/"*70) self.add_text( "// " + "/"*70 )
self.add_text("// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/"))) self.add_text( "// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/")) )
self.add_text("// " + "/"*70) self.add_text( "// " + "/"*70 )
self.add_text("") self.add_text( "" )
add_marker("Beginning") add_marker( "Beginning" )
f = open(os.path.join(self.top_dir, relative_input_path), "rt") f = open( os.path.join( self.top_dir, relative_input_path ), "rt" )
content = f.read() content = f.read()
if wrap_in_comment: if wrap_in_comment:
content = "/*\n" + content + "\n*/" content = "/*\n" + content + "\n*/"
self.add_text(content) self.add_text( content )
f.close() f.close()
add_marker("End") add_marker( "End" )
self.add_text("\n\n\n\n") self.add_text( "\n\n\n\n" )
def get_value(self): def get_value( self ):
return "".join(self.blocks).replace("\r\n","\n") return "".join( self.blocks ).replace("\r\n","\n")
def write_to(self, output_path): def write_to( self, output_path ):
output_dir = os.path.dirname(output_path) output_dir = os.path.dirname( output_path )
if output_dir and not os.path.isdir(output_dir): if output_dir and not os.path.isdir( output_dir ):
os.makedirs(output_dir) os.makedirs( output_dir )
f = open(output_path, "wb") f = open( output_path, "wb" )
f.write(str.encode(self.get_value(), 'UTF-8')) f.write( str.encode(self.get_value(), 'UTF-8') )
f.close() f.close()
def amalgamate_source(source_top_dir=None, def amalgamate_source( source_top_dir=None,
target_source_path=None, target_source_path=None,
header_include_path=None): header_include_path=None ):
"""Produces amalgated source. """Produces amalgated source.
Parameters: Parameters:
source_top_dir: top-directory source_top_dir: top-directory
target_source_path: output .cpp path target_source_path: output .cpp path
header_include_path: generated header path relative to target_source_path. header_include_path: generated header path relative to target_source_path.
""" """
print("Amalgating header...") print ("Amalgating header...")
header = AmalgamationFile(source_top_dir) header = AmalgamationFile( source_top_dir )
header.add_text("/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).") header.add_text( "/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/)." )
header.add_text('/// It is intended to be used with #include "%s"' % header_include_path) header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
header.add_file("LICENSE", wrap_in_comment=True) header.add_file( "LICENSE", wrap_in_comment=True )
header.add_text("#ifndef JSON_AMALGATED_H_INCLUDED") header.add_text( "#ifndef JSON_AMALGATED_H_INCLUDED" )
header.add_text("# define JSON_AMALGATED_H_INCLUDED") header.add_text( "# define JSON_AMALGATED_H_INCLUDED" )
header.add_text("/// If defined, indicates that the source file is amalgated") header.add_text( "/// If defined, indicates that the source file is amalgated" )
header.add_text("/// to prevent private header inclusion.") header.add_text( "/// to prevent private header inclusion." )
header.add_text("#define JSON_IS_AMALGAMATION") header.add_text( "#define JSON_IS_AMALGAMATION" )
header.add_file("include/json/version.h") header.add_file( "include/json/version.h" )
#header.add_file("include/json/allocator.h") # Not available here. header.add_file( "include/json/config.h" )
header.add_file("include/json/config.h") header.add_file( "include/json/forwards.h" )
header.add_file("include/json/forwards.h") header.add_file( "include/json/features.h" )
header.add_file("include/json/features.h") header.add_file( "include/json/value.h" )
header.add_file("include/json/value.h") header.add_file( "include/json/reader.h" )
header.add_file("include/json/reader.h") header.add_file( "include/json/writer.h" )
header.add_file("include/json/writer.h") header.add_file( "include/json/assertions.h" )
header.add_file("include/json/assertions.h") header.add_text( "#endif //ifndef JSON_AMALGATED_H_INCLUDED" )
header.add_text("#endif //ifndef JSON_AMALGATED_H_INCLUDED")
target_header_path = os.path.join(os.path.dirname(target_source_path), header_include_path) target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
print("Writing amalgated header to %r" % target_header_path) print ("Writing amalgated header to %r" % target_header_path)
header.write_to(target_header_path) header.write_to( target_header_path )
base, ext = os.path.splitext(header_include_path) base, ext = os.path.splitext( header_include_path )
forward_header_include_path = base + "-forwards" + ext forward_header_include_path = base + "-forwards" + ext
print("Amalgating forward header...") print ("Amalgating forward header...")
header = AmalgamationFile(source_top_dir) header = AmalgamationFile( source_top_dir )
header.add_text("/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).") header.add_text( "/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/)." )
header.add_text('/// It is intended to be used with #include "%s"' % forward_header_include_path) header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path )
header.add_text("/// This header provides forward declaration for all JsonCpp types.") header.add_text( "/// This header provides forward declaration for all JsonCpp types." )
header.add_file("LICENSE", wrap_in_comment=True) header.add_file( "LICENSE", wrap_in_comment=True )
header.add_text("#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED") header.add_text( "#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" )
header.add_text("# define JSON_FORWARD_AMALGATED_H_INCLUDED") header.add_text( "# define JSON_FORWARD_AMALGATED_H_INCLUDED" )
header.add_text("/// If defined, indicates that the source file is amalgated") header.add_text( "/// If defined, indicates that the source file is amalgated" )
header.add_text("/// to prevent private header inclusion.") header.add_text( "/// to prevent private header inclusion." )
header.add_text("#define JSON_IS_AMALGAMATION") header.add_text( "#define JSON_IS_AMALGAMATION" )
header.add_file("include/json/config.h") header.add_file( "include/json/config.h" )
header.add_file("include/json/forwards.h") header.add_file( "include/json/forwards.h" )
header.add_text("#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED") header.add_text( "#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED" )
target_forward_header_path = os.path.join(os.path.dirname(target_source_path), target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
forward_header_include_path) forward_header_include_path )
print("Writing amalgated forward header to %r" % target_forward_header_path) print ("Writing amalgated forward header to %r" % target_forward_header_path)
header.write_to(target_forward_header_path) header.write_to( target_forward_header_path )
print("Amalgating source...") print ("Amalgating source...")
source = AmalgamationFile(source_top_dir) source = AmalgamationFile( source_top_dir )
source.add_text("/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).") source.add_text( "/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/)." )
source.add_text('/// It is intended to be used with #include "%s"' % header_include_path) source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
source.add_file("LICENSE", wrap_in_comment=True) source.add_file( "LICENSE", wrap_in_comment=True )
source.add_text("") source.add_text( "" )
source.add_text('#include "%s"' % header_include_path) source.add_text( "#include <%s>" % header_include_path )
source.add_text(""" source.add_text( "" )
#ifndef JSON_IS_AMALGAMATION
#error "Compile with -I PATH_TO_JSON_DIRECTORY"
#endif
""")
source.add_text("")
lib_json = "src/lib_json" lib_json = "src/lib_json"
source.add_file(os.path.join(lib_json, "json_tool.h")) source.add_file( os.path.join(lib_json, "json_tool.h") )
source.add_file(os.path.join(lib_json, "json_reader.cpp")) source.add_file( os.path.join(lib_json, "json_reader.cpp") )
source.add_file(os.path.join(lib_json, "json_valueiterator.inl")) source.add_file( os.path.join(lib_json, "json_batchallocator.h") )
source.add_file(os.path.join(lib_json, "json_value.cpp")) source.add_file( os.path.join(lib_json, "json_valueiterator.inl") )
source.add_file(os.path.join(lib_json, "json_writer.cpp")) source.add_file( os.path.join(lib_json, "json_value.cpp") )
source.add_file( os.path.join(lib_json, "json_writer.cpp") )
print("Writing amalgated source to %r" % target_source_path) print ("Writing amalgated source to %r" % target_source_path)
source.write_to(target_source_path) source.write_to( target_source_path )
def main(): def main():
usage = """%prog [options] usage = """%prog [options]
@@ -142,14 +137,14 @@ Generate a single amalgated source and header file from the sources.
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
msg = amalgamate_source(source_top_dir=options.top_dir, msg = amalgamate_source( source_top_dir=options.top_dir,
target_source_path=options.target_source_path, target_source_path=options.target_source_path,
header_include_path=options.header_include_path) header_include_path=options.header_include_path )
if msg: if msg:
sys.stderr.write(msg + "\n") sys.stderr.write( msg + "\n" )
sys.exit(1) sys.exit( 1 )
else: else:
print("Source succesfully amalagated") print ("Source succesfully amalagated")
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -1,35 +0,0 @@
# This is a comment.
version: build.{build}
os: Windows Server 2012 R2
clone_folder: c:\projects\jsoncpp
platform:
- Win32
- x64
configuration:
- Debug
- Release
# scripts to run before build
before_build:
- echo "Running cmake..."
- cd c:\projects\jsoncpp
- cmake --version
- set PATH=C:\Program Files (x86)\MSBuild\14.0\Bin;%PATH%
- if %PLATFORM% == Win32 cmake .
- if %PLATFORM% == x64 cmake -G "Visual Studio 12 2013 Win64" .
build:
project: jsoncpp.sln # path to Visual Studio solution or project
deploy:
provider: GitHub
auth_token:
secure: K2Tp1q8pIZ7rs0Ot24ZMWuwr12Ev6Tc6QkhMjGQxoQG3ng1pXtgPasiJ45IDXGdg
on:
branch: master
appveyor_repo_tag: true

View File

@@ -1,35 +1,14 @@
# This is only for jsoncpp developers/contributors. all: build test-amalgamate
# We use this to sign releases, generate documentation, etc.
VER?=$(shell cat version)
default:
@echo "VER=${VER}"
sign: jsoncpp-${VER}.tar.gz
gpg --armor --detach-sign $<
gpg --verify $<.asc
# Then upload .asc to the release.
jsoncpp-%.tar.gz:
curl https://github.com/open-source-parsers/jsoncpp/archive/$*.tar.gz -o $@
dox:
python doxybuild.py --doxygen=$$(which doxygen) --in doc/web_doxyfile.in
rsync -va --delete dist/doxygen/jsoncpp-api-html-${VER}/ ../jsoncpp-docs/doxygen/
# Then 'git add -A' and 'git push' in jsoncpp-docs.
build: build:
mkdir -p build/debug mkdir -p build/debug
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_SHARED_LIBS=ON -G "Unix Makefiles" ../.. cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=ON -G "Unix Makefiles" ../..
make -C build/debug make -C build/debug
# Currently, this depends on include/json/version.h generated # Currently, this depends on include/json/version.h generated
# by cmake. # by cmake.
test-amalgamate: test-amalgamate: build
python2.7 amalgamate.py python2.7 amalgamate.py
python3.4 amalgamate.py python3.4 amalgamate.py
cd dist; gcc -I. -c jsoncpp.cpp
valgrind:
valgrind --error-exitcode=42 --leak-check=full ./build/debug/src/test_lib_json/jsoncpp_test
clean:
\rm -rf *.gz *.asc dist/
.PHONY: build .PHONY: build

View File

@@ -1,6 +1 @@
# Copyright 2010 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
# module # module

View File

@@ -19,8 +19,8 @@
}, },
{"name": "shared_dll", {"name": "shared_dll",
"variables": [ "variables": [
["BUILD_SHARED_LIBS=true"], ["JSONCPP_LIB_BUILD_SHARED=true"],
["BUILD_SHARED_LIBS=false"] ["JSONCPP_LIB_BUILD_SHARED=false"]
] ]
}, },
{"name": "build_type", {"name": "build_type",

View File

@@ -12,8 +12,8 @@
}, },
{"name": "shared_dll", {"name": "shared_dll",
"variables": [ "variables": [
["BUILD_SHARED_LIBS=true"], ["JSONCPP_LIB_BUILD_SHARED=true"],
["BUILD_SHARED_LIBS=false"] ["JSONCPP_LIB_BUILD_SHARED=false"]
] ]
}, },
{"name": "build_type", {"name": "build_type",

View File

@@ -1,11 +1,7 @@
#!/usr/bin/env python #!/usr/bin/env python
# encoding: utf-8 # encoding: utf-8
# Copyright 2009 Baptiste Lepilleur # Baptiste Lepilleur, 2009
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from __future__ import print_function
from dircache import listdir from dircache import listdir
import re import re
import fnmatch import fnmatch
@@ -57,9 +53,9 @@ LINKS = DIR_LINK | FILE_LINK
ALL_NO_LINK = DIR | FILE ALL_NO_LINK = DIR | FILE
ALL = DIR | FILE | LINKS ALL = DIR | FILE | LINKS
_ANT_RE = re.compile(r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)') _ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
def ant_pattern_to_re(ant_pattern): def ant_pattern_to_re( ant_pattern ):
"""Generates a regular expression from the ant pattern. """Generates a regular expression from the ant pattern.
Matching convention: Matching convention:
**/a: match 'a', 'dir/a', 'dir1/dir2/a' **/a: match 'a', 'dir/a', 'dir1/dir2/a'
@@ -68,30 +64,30 @@ def ant_pattern_to_re(ant_pattern):
""" """
rex = ['^'] rex = ['^']
next_pos = 0 next_pos = 0
sep_rex = r'(?:/|%s)' % re.escape(os.path.sep) sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
## print 'Converting', ant_pattern ## print 'Converting', ant_pattern
for match in _ANT_RE.finditer(ant_pattern): for match in _ANT_RE.finditer( ant_pattern ):
## print 'Matched', match.group() ## print 'Matched', match.group()
## print match.start(0), next_pos ## print match.start(0), next_pos
if match.start(0) != next_pos: if match.start(0) != next_pos:
raise ValueError("Invalid ant pattern") raise ValueError( "Invalid ant pattern" )
if match.group(1): # /**/ if match.group(1): # /**/
rex.append(sep_rex + '(?:.*%s)?' % sep_rex) rex.append( sep_rex + '(?:.*%s)?' % sep_rex )
elif match.group(2): # **/ elif match.group(2): # **/
rex.append('(?:.*%s)?' % sep_rex) rex.append( '(?:.*%s)?' % sep_rex )
elif match.group(3): # /** elif match.group(3): # /**
rex.append(sep_rex + '.*') rex.append( sep_rex + '.*' )
elif match.group(4): # * elif match.group(4): # *
rex.append('[^/%s]*' % re.escape(os.path.sep)) rex.append( '[^/%s]*' % re.escape(os.path.sep) )
elif match.group(5): # / elif match.group(5): # /
rex.append(sep_rex) rex.append( sep_rex )
else: # somepath else: # somepath
rex.append(re.escape(match.group(6))) rex.append( re.escape(match.group(6)) )
next_pos = match.end() next_pos = match.end()
rex.append('$') rex.append('$')
return re.compile(''.join(rex)) return re.compile( ''.join( rex ) )
def _as_list(l): def _as_list( l ):
if isinstance(l, basestring): if isinstance(l, basestring):
return l.split() return l.split()
return l return l
@@ -108,37 +104,37 @@ def glob(dir_path,
dir_path = dir_path.replace('/',os.path.sep) dir_path = dir_path.replace('/',os.path.sep)
entry_type_filter = entry_type entry_type_filter = entry_type
def is_pruned_dir(dir_name): def is_pruned_dir( dir_name ):
for pattern in prune_dirs: for pattern in prune_dirs:
if fnmatch.fnmatch(dir_name, pattern): if fnmatch.fnmatch( dir_name, pattern ):
return True return True
return False return False
def apply_filter(full_path, filter_rexs): def apply_filter( full_path, filter_rexs ):
"""Return True if at least one of the filter regular expression match full_path.""" """Return True if at least one of the filter regular expression match full_path."""
for rex in filter_rexs: for rex in filter_rexs:
if rex.match(full_path): if rex.match( full_path ):
return True return True
return False return False
def glob_impl(root_dir_path): def glob_impl( root_dir_path ):
child_dirs = [root_dir_path] child_dirs = [root_dir_path]
while child_dirs: while child_dirs:
dir_path = child_dirs.pop() dir_path = child_dirs.pop()
for entry in listdir(dir_path): for entry in listdir( dir_path ):
full_path = os.path.join(dir_path, entry) full_path = os.path.join( dir_path, entry )
## print 'Testing:', full_path, ## print 'Testing:', full_path,
is_dir = os.path.isdir(full_path) is_dir = os.path.isdir( full_path )
if is_dir and not is_pruned_dir(entry): # explore child directory ? if is_dir and not is_pruned_dir( entry ): # explore child directory ?
## print '===> marked for recursion', ## print '===> marked for recursion',
child_dirs.append(full_path) child_dirs.append( full_path )
included = apply_filter(full_path, include_filter) included = apply_filter( full_path, include_filter )
rejected = apply_filter(full_path, exclude_filter) rejected = apply_filter( full_path, exclude_filter )
if not included or rejected: # do not include entry ? if not included or rejected: # do not include entry ?
## print '=> not included or rejected' ## print '=> not included or rejected'
continue continue
link = os.path.islink(full_path) link = os.path.islink( full_path )
is_file = os.path.isfile(full_path) is_file = os.path.isfile( full_path )
if not is_file and not is_dir: if not is_file and not is_dir:
## print '=> unknown entry type' ## print '=> unknown entry type'
continue continue
@@ -149,57 +145,57 @@ def glob(dir_path,
## print '=> type: %d' % entry_type, ## print '=> type: %d' % entry_type,
if (entry_type & entry_type_filter) != 0: if (entry_type & entry_type_filter) != 0:
## print ' => KEEP' ## print ' => KEEP'
yield os.path.join(dir_path, entry) yield os.path.join( dir_path, entry )
## else: ## else:
## print ' => TYPE REJECTED' ## print ' => TYPE REJECTED'
return list(glob_impl(dir_path)) return list( glob_impl( dir_path ) )
if __name__ == "__main__": if __name__ == "__main__":
import unittest import unittest
class AntPatternToRETest(unittest.TestCase): class AntPatternToRETest(unittest.TestCase):
## def test_conversion(self): ## def test_conversion( self ):
## self.assertEqual('^somepath$', ant_pattern_to_re('somepath').pattern) ## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
def test_matching(self): def test_matching( self ):
test_cases = [ ('path', test_cases = [ ( 'path',
['path'], ['path'],
['somepath', 'pathsuffix', '/path', '/path']), ['somepath', 'pathsuffix', '/path', '/path'] ),
('*.py', ( '*.py',
['source.py', 'source.ext.py', '.py'], ['source.py', 'source.ext.py', '.py'],
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c']), ['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
('**/path', ( '**/path',
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'], ['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath']), ['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
('path/**', ( 'path/**',
['path/a', 'path/path/a', 'path//'], ['path/a', 'path/path/a', 'path//'],
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a']), ['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
('/**/path', ( '/**/path',
['/path', '/a/path', '/a/b/path/path', '/path/path'], ['/path', '/a/path', '/a/b/path/path', '/path/path'],
['path', 'path/', 'a/path', '/pathsuffix', '/somepath']), ['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
('a/b', ( 'a/b',
['a/b'], ['a/b'],
['somea/b', 'a/bsuffix', 'a/b/c']), ['somea/b', 'a/bsuffix', 'a/b/c'] ),
('**/*.py', ( '**/*.py',
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'], ['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
['script.pyc', 'script.pyo', 'a.py/b']), ['script.pyc', 'script.pyo', 'a.py/b'] ),
('src/**/*.py', ( 'src/**/*.py',
['src/a.py', 'src/dir/a.py'], ['src/a.py', 'src/dir/a.py'],
['a/src/a.py', '/src/a.py']), ['a/src/a.py', '/src/a.py'] ),
] ]
for ant_pattern, accepted_matches, rejected_matches in list(test_cases): for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
def local_path(paths): def local_path( paths ):
return [ p.replace('/',os.path.sep) for p in paths ] return [ p.replace('/',os.path.sep) for p in paths ]
test_cases.append((ant_pattern, local_path(accepted_matches), local_path(rejected_matches))) test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
for ant_pattern, accepted_matches, rejected_matches in test_cases: for ant_pattern, accepted_matches, rejected_matches in test_cases:
rex = ant_pattern_to_re(ant_pattern) rex = ant_pattern_to_re( ant_pattern )
print('ant_pattern:', ant_pattern, ' => ', rex.pattern) print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
for accepted_match in accepted_matches: for accepted_match in accepted_matches:
print('Accepted?:', accepted_match) print 'Accepted?:', accepted_match
self.assertTrue(rex.match(accepted_match) is not None) self.assert_( rex.match( accepted_match ) is not None )
for rejected_match in rejected_matches: for rejected_match in rejected_matches:
print('Rejected?:', rejected_match) print 'Rejected?:', rejected_match
self.assertTrue(rex.match(rejected_match) is None) self.assert_( rex.match( rejected_match ) is None )
unittest.main() unittest.main()

View File

@@ -1,4 +1,3 @@
from __future__ import print_function
import collections import collections
import itertools import itertools
import json import json
@@ -18,131 +17,131 @@ class BuildDesc:
self.build_type = build_type self.build_type = build_type
self.generator = generator self.generator = generator
def merged_with(self, build_desc): def merged_with( self, build_desc ):
"""Returns a new BuildDesc by merging field content. """Returns a new BuildDesc by merging field content.
Prefer build_desc fields to self fields for single valued field. Prefer build_desc fields to self fields for single valued field.
""" """
return BuildDesc(self.prepend_envs + build_desc.prepend_envs, return BuildDesc( self.prepend_envs + build_desc.prepend_envs,
self.variables + build_desc.variables, self.variables + build_desc.variables,
build_desc.build_type or self.build_type, build_desc.build_type or self.build_type,
build_desc.generator or self.generator) build_desc.generator or self.generator )
def env(self): def env( self ):
environ = os.environ.copy() environ = os.environ.copy()
for values_by_name in self.prepend_envs: for values_by_name in self.prepend_envs:
for var, value in list(values_by_name.items()): for var, value in values_by_name.items():
var = var.upper() var = var.upper()
if type(value) is unicode: if type(value) is unicode:
value = value.encode(sys.getdefaultencoding()) value = value.encode( sys.getdefaultencoding() )
if var in environ: if var in environ:
environ[var] = value + os.pathsep + environ[var] environ[var] = value + os.pathsep + environ[var]
else: else:
environ[var] = value environ[var] = value
return environ return environ
def cmake_args(self): def cmake_args( self ):
args = ["-D%s" % var for var in self.variables] args = ["-D%s" % var for var in self.variables]
# skip build type for Visual Studio solution as it cause warning # skip build type for Visual Studio solution as it cause warning
if self.build_type and 'Visual' not in self.generator: if self.build_type and 'Visual' not in self.generator:
args.append("-DCMAKE_BUILD_TYPE=%s" % self.build_type) args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type )
if self.generator: if self.generator:
args.extend(['-G', self.generator]) args.extend( ['-G', self.generator] )
return args return args
def __repr__(self): def __repr__( self ):
return "BuildDesc(%s, build_type=%s)" % (" ".join(self.cmake_args()), self.build_type) return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type)
class BuildData: class BuildData:
def __init__(self, desc, work_dir, source_dir): def __init__( self, desc, work_dir, source_dir ):
self.desc = desc self.desc = desc
self.work_dir = work_dir self.work_dir = work_dir
self.source_dir = source_dir self.source_dir = source_dir
self.cmake_log_path = os.path.join(work_dir, 'batchbuild_cmake.log') self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' )
self.build_log_path = os.path.join(work_dir, 'batchbuild_build.log') self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' )
self.cmake_succeeded = False self.cmake_succeeded = False
self.build_succeeded = False self.build_succeeded = False
def execute_build(self): def execute_build(self):
print('Build %s' % self.desc) print 'Build %s' % self.desc
self._make_new_work_dir() self._make_new_work_dir( )
self.cmake_succeeded = self._generate_makefiles() self.cmake_succeeded = self._generate_makefiles( )
if self.cmake_succeeded: if self.cmake_succeeded:
self.build_succeeded = self._build_using_makefiles() self.build_succeeded = self._build_using_makefiles( )
return self.build_succeeded return self.build_succeeded
def _generate_makefiles(self): def _generate_makefiles(self):
print(' Generating makefiles: ', end=' ') print ' Generating makefiles: ',
cmd = ['cmake'] + self.desc.cmake_args() + [os.path.abspath(self.source_dir)] cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )]
succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.cmake_log_path) succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path )
print('done' if succeeded else 'FAILED') print 'done' if succeeded else 'FAILED'
return succeeded return succeeded
def _build_using_makefiles(self): def _build_using_makefiles(self):
print(' Building:', end=' ') print ' Building:',
cmd = ['cmake', '--build', self.work_dir] cmd = ['cmake', '--build', self.work_dir]
if self.desc.build_type: if self.desc.build_type:
cmd += ['--config', self.desc.build_type] cmd += ['--config', self.desc.build_type]
succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.build_log_path) succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path )
print('done' if succeeded else 'FAILED') print 'done' if succeeded else 'FAILED'
return succeeded return succeeded
def _execute_build_subprocess(self, cmd, env, log_path): def _execute_build_subprocess(self, cmd, env, log_path):
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir, process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
env=env) env=env )
stdout, _ = process.communicate() stdout, _ = process.communicate( )
succeeded = (process.returncode == 0) succeeded = (process.returncode == 0)
with open(log_path, 'wb') as flog: with open( log_path, 'wb' ) as flog:
log = ' '.join(cmd) + '\n' + stdout + '\nExit code: %r\n' % process.returncode log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
flog.write(fix_eol(log)) flog.write( fix_eol( log ) )
return succeeded return succeeded
def _make_new_work_dir(self): def _make_new_work_dir(self):
if os.path.isdir(self.work_dir): if os.path.isdir( self.work_dir ):
print(' Removing work directory', self.work_dir) print ' Removing work directory', self.work_dir
shutil.rmtree(self.work_dir, ignore_errors=True) shutil.rmtree( self.work_dir, ignore_errors=True )
if not os.path.isdir(self.work_dir): if not os.path.isdir( self.work_dir ):
os.makedirs(self.work_dir) os.makedirs( self.work_dir )
def fix_eol(stdout): def fix_eol( stdout ):
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n). """Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
""" """
return re.sub('\r*\n', os.linesep, stdout) return re.sub( '\r*\n', os.linesep, stdout )
def load_build_variants_from_config(config_path): def load_build_variants_from_config( config_path ):
with open(config_path, 'rb') as fconfig: with open( config_path, 'rb' ) as fconfig:
data = json.load(fconfig) data = json.load( fconfig )
variants = data[ 'cmake_variants' ] variants = data[ 'cmake_variants' ]
build_descs_by_axis = collections.defaultdict(list) build_descs_by_axis = collections.defaultdict( list )
for axis in variants: for axis in variants:
axis_name = axis["name"] axis_name = axis["name"]
build_descs = [] build_descs = []
if "generators" in axis: if "generators" in axis:
for generator_data in axis["generators"]: for generator_data in axis["generators"]:
for generator in generator_data["generator"]: for generator in generator_data["generator"]:
build_desc = BuildDesc(generator=generator, build_desc = BuildDesc( generator=generator,
prepend_envs=generator_data.get("env_prepend")) prepend_envs=generator_data.get("env_prepend") )
build_descs.append(build_desc) build_descs.append( build_desc )
elif "variables" in axis: elif "variables" in axis:
for variables in axis["variables"]: for variables in axis["variables"]:
build_desc = BuildDesc(variables=variables) build_desc = BuildDesc( variables=variables )
build_descs.append(build_desc) build_descs.append( build_desc )
elif "build_types" in axis: elif "build_types" in axis:
for build_type in axis["build_types"]: for build_type in axis["build_types"]:
build_desc = BuildDesc(build_type=build_type) build_desc = BuildDesc( build_type=build_type )
build_descs.append(build_desc) build_descs.append( build_desc )
build_descs_by_axis[axis_name].extend(build_descs) build_descs_by_axis[axis_name].extend( build_descs )
return build_descs_by_axis return build_descs_by_axis
def generate_build_variants(build_descs_by_axis): def generate_build_variants( build_descs_by_axis ):
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis.""" """Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
axis_names = list(build_descs_by_axis.keys()) axis_names = build_descs_by_axis.keys()
build_descs = [] build_descs = []
for axis_name, axis_build_descs in list(build_descs_by_axis.items()): for axis_name, axis_build_descs in build_descs_by_axis.items():
if len(build_descs): if len(build_descs):
# for each existing build_desc and each axis build desc, create a new build_desc # for each existing build_desc and each axis build desc, create a new build_desc
new_build_descs = [] new_build_descs = []
for prototype_build_desc, axis_build_desc in itertools.product(build_descs, axis_build_descs): for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs):
new_build_descs.append(prototype_build_desc.merged_with(axis_build_desc)) new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) )
build_descs = new_build_descs build_descs = new_build_descs
else: else:
build_descs = axis_build_descs build_descs = axis_build_descs
@@ -174,58 +173,61 @@ $tr_builds
</table> </table>
</body></html>''') </body></html>''')
def generate_html_report(html_report_path, builds): def generate_html_report( html_report_path, builds ):
report_dir = os.path.dirname(html_report_path) report_dir = os.path.dirname( html_report_path )
# Vertical axis: generator # Vertical axis: generator
# Horizontal: variables, then build_type # Horizontal: variables, then build_type
builds_by_generator = collections.defaultdict(list) builds_by_generator = collections.defaultdict( list )
variables = set() variables = set()
build_types_by_variable = collections.defaultdict(set) build_types_by_variable = collections.defaultdict( set )
build_by_pos_key = {} # { (generator, var_key, build_type): build } build_by_pos_key = {} # { (generator, var_key, build_type): build }
for build in builds: for build in builds:
builds_by_generator[build.desc.generator].append(build) builds_by_generator[build.desc.generator].append( build )
var_key = tuple(sorted(build.desc.variables)) var_key = tuple(sorted(build.desc.variables))
variables.add(var_key) variables.add( var_key )
build_types_by_variable[var_key].add(build.desc.build_type) build_types_by_variable[var_key].add( build.desc.build_type )
pos_key = (build.desc.generator, var_key, build.desc.build_type) pos_key = (build.desc.generator, var_key, build.desc.build_type)
build_by_pos_key[pos_key] = build build_by_pos_key[pos_key] = build
variables = sorted(variables) variables = sorted( variables )
th_vars = [] th_vars = []
th_build_types = [] th_build_types = []
for variable in variables: for variable in variables:
build_types = sorted(build_types_by_variable[variable]) build_types = sorted( build_types_by_variable[variable] )
nb_build_type = len(build_types_by_variable[variable]) nb_build_type = len(build_types_by_variable[variable])
th_vars.append('<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape(' '.join(variable)))) th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) )
for build_type in build_types: for build_type in build_types:
th_build_types.append('<th>%s</th>' % cgi.escape(build_type)) th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) )
tr_builds = [] tr_builds = []
for generator in sorted(builds_by_generator): for generator in sorted( builds_by_generator ):
tds = [ '<td>%s</td>\n' % cgi.escape(generator) ] tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ]
for variable in variables: for variable in variables:
build_types = sorted(build_types_by_variable[variable]) build_types = sorted( build_types_by_variable[variable] )
for build_type in build_types: for build_type in build_types:
pos_key = (generator, variable, build_type) pos_key = (generator, variable, build_type)
build = build_by_pos_key.get(pos_key) build = build_by_pos_key.get(pos_key)
if build: if build:
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED' cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
build_status = 'ok' if build.build_succeeded else 'FAILED' build_status = 'ok' if build.build_succeeded else 'FAILED'
cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir) cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir )
build_log_url = os.path.relpath(build.build_log_path, report_dir) build_log_url = os.path.relpath( build.build_log_path, report_dir )
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % ( build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status) td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (
build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
if build.cmake_succeeded: if build.cmake_succeeded:
td += '<br><a href="%s" class="%s">Build: %s</a>' % ( build_log_url, build_status.lower(), build_status) td += '<br><a href="%s" class="%s">Build: %s</a>' % (
build_log_url, build_status.lower(), build_status)
td += '</td>' td += '</td>'
else: else:
td = '<td></td>' td = '<td></td>'
tds.append(td) tds.append( td )
tr_builds.append('<tr>%s</tr>' % '\n'.join(tds)) tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) )
html = HTML_TEMPLATE.substitute( title='Batch build report', html = HTML_TEMPLATE.substitute(
title='Batch build report',
th_vars=' '.join(th_vars), th_vars=' '.join(th_vars),
th_build_types=' '.join(th_build_types), th_build_types=' '.join( th_build_types),
tr_builds='\n'.join(tr_builds)) tr_builds='\n'.join( tr_builds ) )
with open(html_report_path, 'wt') as fhtml: with open( html_report_path, 'wt' ) as fhtml:
fhtml.write(html) fhtml.write( html )
print('HTML report generated in:', html_report_path) print 'HTML report generated in:', html_report_path
def main(): def main():
usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...] usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...]
@@ -243,34 +245,34 @@ python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.j
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) < 3: if len(args) < 3:
parser.error("Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH.") parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." )
work_dir = args[0] work_dir = args[0]
source_dir = args[1].rstrip('/\\') source_dir = args[1].rstrip('/\\')
config_paths = args[2:] config_paths = args[2:]
for config_path in config_paths: for config_path in config_paths:
if not os.path.isfile(config_path): if not os.path.isfile( config_path ):
parser.error("Can not read: %r" % config_path) parser.error( "Can not read: %r" % config_path )
# generate build variants # generate build variants
build_descs = [] build_descs = []
for config_path in config_paths: for config_path in config_paths:
build_descs_by_axis = load_build_variants_from_config(config_path) build_descs_by_axis = load_build_variants_from_config( config_path )
build_descs.extend(generate_build_variants(build_descs_by_axis)) build_descs.extend( generate_build_variants( build_descs_by_axis ) )
print('Build variants (%d):' % len(build_descs)) print 'Build variants (%d):' % len(build_descs)
# assign build directory for each variant # assign build directory for each variant
if not os.path.isdir(work_dir): if not os.path.isdir( work_dir ):
os.makedirs(work_dir) os.makedirs( work_dir )
builds = [] builds = []
with open(os.path.join(work_dir, 'matrix-dir-map.txt'), 'wt') as fmatrixmap: with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap:
for index, build_desc in enumerate(build_descs): for index, build_desc in enumerate( build_descs ):
build_desc_work_dir = os.path.join(work_dir, '%03d' % (index+1)) build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) )
builds.append(BuildData(build_desc, build_desc_work_dir, source_dir)) builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) )
fmatrixmap.write('%s: %s\n' % (build_desc_work_dir, build_desc)) fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) )
for build in builds: for build in builds:
build.execute_build() build.execute_build()
html_report_path = os.path.join(work_dir, 'batchbuild-report.html') html_report_path = os.path.join( work_dir, 'batchbuild-report.html' )
generate_html_report(html_report_path, builds) generate_html_report( html_report_path, builds )
print('Done') print 'Done'
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -1,20 +1,13 @@
# Copyright 2010 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from __future__ import print_function
import os.path import os.path
import sys
def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'): def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
"""Makes sure that all sources have the specified eol sequence (default: unix).""" """Makes sure that all sources have the specified eol sequence (default: unix)."""
if not os.path.isfile(path): if not os.path.isfile( path ):
raise ValueError('Path "%s" is not a file' % path) raise ValueError( 'Path "%s" is not a file' % path )
try: try:
f = open(path, 'rb') f = open(path, 'rb')
except IOError as msg: except IOError, msg:
print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr) print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
return False return False
try: try:
raw_lines = f.readlines() raw_lines = f.readlines()
@@ -22,7 +15,7 @@ def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
f.close() f.close()
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines] fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
if raw_lines != fixed_lines: if raw_lines != fixed_lines:
print('%s =>' % path, end=' ') print '%s =>' % path,
if not is_dry_run: if not is_dry_run:
f = open(path, "wb") f = open(path, "wb")
try: try:
@@ -30,32 +23,32 @@ def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
finally: finally:
f.close() f.close()
if verbose: if verbose:
print(is_dry_run and ' NEED FIX' or ' FIXED') print is_dry_run and ' NEED FIX' or ' FIXED'
return True return True
## ##
## ##
## ##
##def _do_fix(is_dry_run = True): ##def _do_fix( is_dry_run = True ):
## from waftools import antglob ## from waftools import antglob
## python_sources = antglob.glob('.', ## python_sources = antglob.glob( '.',
## includes = '**/*.py **/wscript **/wscript_build', ## includes = '**/*.py **/wscript **/wscript_build',
## excludes = antglob.default_excludes + './waf.py', ## excludes = antglob.default_excludes + './waf.py',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build') ## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in python_sources: ## for path in python_sources:
## _fix_python_source(path, is_dry_run) ## _fix_python_source( path, is_dry_run )
## ##
## cpp_sources = antglob.glob('.', ## cpp_sources = antglob.glob( '.',
## includes = '**/*.cpp **/*.h **/*.inl', ## includes = '**/*.cpp **/*.h **/*.inl',
## prune_dirs = antglob.prune_dirs + 'waf-* ./build') ## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
## for path in cpp_sources: ## for path in cpp_sources:
## _fix_source_eol(path, is_dry_run) ## _fix_source_eol( path, is_dry_run )
## ##
## ##
##def dry_fix(context): ##def dry_fix(context):
## _do_fix(is_dry_run = True) ## _do_fix( is_dry_run = True )
## ##
##def fix(context): ##def fix(context):
## _do_fix(is_dry_run = False) ## _do_fix( is_dry_run = False )
## ##
##def shutdown(): ##def shutdown():
## pass ## pass

View File

@@ -1,6 +1,5 @@
"""Updates the license text in source file. """Updates the license text in source file.
""" """
from __future__ import print_function
# An existing license is found if the file starts with the string below, # An existing license is found if the file starts with the string below,
# and ends with the first blank line. # and ends with the first blank line.
@@ -13,7 +12,7 @@ BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
""".replace('\r\n','\n') """.replace('\r\n','\n')
def update_license(path, dry_run, show_diff): def update_license( path, dry_run, show_diff ):
"""Update the license statement in the specified file. """Update the license statement in the specified file.
Parameters: Parameters:
path: path of the C++ source file to update. path: path of the C++ source file to update.
@@ -22,28 +21,28 @@ def update_license(path, dry_run, show_diff):
show_diff: if True, print the path of the file that would be modified, show_diff: if True, print the path of the file that would be modified,
as well as the change made to the file. as well as the change made to the file.
""" """
with open(path, 'rt') as fin: with open( path, 'rt' ) as fin:
original_text = fin.read().replace('\r\n','\n') original_text = fin.read().replace('\r\n','\n')
newline = fin.newlines and fin.newlines[0] or '\n' newline = fin.newlines and fin.newlines[0] or '\n'
if not original_text.startswith(LICENSE_BEGIN): if not original_text.startswith( LICENSE_BEGIN ):
# No existing license found => prepend it # No existing license found => prepend it
new_text = BRIEF_LICENSE + original_text new_text = BRIEF_LICENSE + original_text
else: else:
license_end_index = original_text.index('\n\n') # search first blank line license_end_index = original_text.index( '\n\n' ) # search first blank line
new_text = BRIEF_LICENSE + original_text[license_end_index+2:] new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
if original_text != new_text: if original_text != new_text:
if not dry_run: if not dry_run:
with open(path, 'wb') as fout: with open( path, 'wb' ) as fout:
fout.write(new_text.replace('\n', newline)) fout.write( new_text.replace('\n', newline ) )
print('Updated', path) print 'Updated', path
if show_diff: if show_diff:
import difflib import difflib
print('\n'.join(difflib.unified_diff(original_text.split('\n'), print '\n'.join( difflib.unified_diff( original_text.split('\n'),
new_text.split('\n')))) new_text.split('\n') ) )
return True return True
return False return False
def update_license_in_source_directories(source_dirs, dry_run, show_diff): def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
"""Updates license text in C++ source files found in directory source_dirs. """Updates license text in C++ source files found in directory source_dirs.
Parameters: Parameters:
source_dirs: list of directory to scan for C++ sources. Directories are source_dirs: list of directory to scan for C++ sources. Directories are
@@ -56,11 +55,11 @@ def update_license_in_source_directories(source_dirs, dry_run, show_diff):
from devtools import antglob from devtools import antglob
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
for source_dir in source_dirs: for source_dir in source_dirs:
cpp_sources = antglob.glob(source_dir, cpp_sources = antglob.glob( source_dir,
includes = '''**/*.h **/*.cpp **/*.inl''', includes = '''**/*.h **/*.cpp **/*.inl''',
prune_dirs = prune_dirs) prune_dirs = prune_dirs )
for source in cpp_sources: for source in cpp_sources:
update_license(source, dry_run, show_diff) update_license( source, dry_run, show_diff )
def main(): def main():
usage = """%prog DIR [DIR2...] usage = """%prog DIR [DIR2...]
@@ -83,8 +82,8 @@ python devtools\licenseupdater.py include src
help="""On update, show change made to the file.""") help="""On update, show change made to the file.""")
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
update_license_in_source_directories(args, options.dry_run, options.show_diff) update_license_in_source_directories( args, options.dry_run, options.show_diff )
print('Done') print 'Done'
if __name__ == '__main__': if __name__ == '__main__':
import sys import sys

View File

@@ -1,10 +1,5 @@
# Copyright 2010 Baptiste Lepilleur import os.path
# Distributed under MIT license, or public domain if desired and import gzip
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from contextlib import closing
import os
import tarfile import tarfile
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9 TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
@@ -18,35 +13,41 @@ def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to '' prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
to make them child of root. to make them child of root.
""" """
base_dir = os.path.normpath(os.path.abspath(base_dir)) base_dir = os.path.normpath( os.path.abspath( base_dir ) )
def archive_name(path): def archive_name( path ):
"""Makes path relative to base_dir.""" """Makes path relative to base_dir."""
path = os.path.normpath(os.path.abspath(path)) path = os.path.normpath( os.path.abspath( path ) )
common_path = os.path.commonprefix((base_dir, path)) common_path = os.path.commonprefix( (base_dir, path) )
archive_name = path[len(common_path):] archive_name = path[len(common_path):]
if os.path.isabs(archive_name): if os.path.isabs( archive_name ):
archive_name = archive_name[1:] archive_name = archive_name[1:]
return os.path.join(prefix_dir, archive_name) return os.path.join( prefix_dir, archive_name )
def visit(tar, dirname, names): def visit(tar, dirname, names):
for name in names: for name in names:
path = os.path.join(dirname, name) path = os.path.join(dirname, name)
if os.path.isfile(path): if os.path.isfile(path):
path_in_tar = archive_name(path) path_in_tar = archive_name(path)
tar.add(path, path_in_tar) tar.add(path, path_in_tar )
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
with closing(tarfile.TarFile.open(tarball_path, 'w:gz', tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
compresslevel=compression)) as tar: try:
for source in sources: for source in sources:
source_path = source source_path = source
if os.path.isdir(source): if os.path.isdir( source ):
for dirpath, dirnames, filenames in os.walk(source_path): os.path.walk(source_path, visit, tar)
visit(tar, dirpath, filenames)
else: else:
path_in_tar = archive_name(source_path) path_in_tar = archive_name(source_path)
tar.add(source_path, path_in_tar) # filename, arcname tar.add(source_path, path_in_tar ) # filename, arcname
finally:
tar.close()
def decompress(tarball_path, base_dir): def decompress( tarball_path, base_dir ):
"""Decompress the gzipped tarball into directory base_dir. """Decompress the gzipped tarball into directory base_dir.
""" """
with closing(tarfile.TarFile.open(tarball_path)) as tar: # !!! This class method is not documented in the online doc
tar.extractall(base_dir) # nor is bz2open!
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
try:
tar.extractall( base_dir )
finally:
tar.close()

View File

@@ -819,7 +819,7 @@ EXCLUDE_SYMBOLS =
# that contain example code fragments that are included (see the \include # that contain example code fragments that are included (see the \include
# command). # command).
EXAMPLE_PATH = .. EXAMPLE_PATH =
# If the value of the EXAMPLE_PATH tag contains directories, you can use the # If the value of the EXAMPLE_PATH tag contains directories, you can use the
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
@@ -1946,7 +1946,8 @@ INCLUDE_FILE_PATTERNS = *.h
PREDEFINED = "_MSC_VER=1400" \ PREDEFINED = "_MSC_VER=1400" \
_CPPRTTI \ _CPPRTTI \
_WIN32 \ _WIN32 \
JSONCPP_DOC_EXCLUDE_IMPLEMENTATION JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \
JSON_VALUE_USE_INTERNAL_MAP
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
# tag can be used to specify a list of macro names that should be expanded. The # tag can be used to specify a list of macro names that should be expanded. The

View File

@@ -16,7 +16,7 @@ JsonCpp - JSON data format manipulation library
</a> </a>
</td> </td>
<td width="40%" align="right" valign="center"> <td width="40%" align="right" valign="center">
<a href="http://open-source-parsers.github.io/jsoncpp-docs/doxygen/">JsonCpp home page</a> <a href="https://github.com/open-source-parsers/jsoncpp">JsonCpp home page</a>
</td> </td>
</tr> </tr>
</table> </table>

View File

@@ -4,21 +4,11 @@
<a HREF="http://www.json.org/">JSON (JavaScript Object Notation)</a> <a HREF="http://www.json.org/">JSON (JavaScript Object Notation)</a>
is a lightweight data-interchange format. is a lightweight data-interchange format.
It can represent integer, real number, string, an ordered sequence of value, and
a collection of name/value pairs.
Here is an example of JSON data: Here is an example of JSON data:
\verbatim \verbatim
{
"encoding" : "UTF-8",
"plug-ins" : [
"python",
"c++",
"ruby"
],
"indent" : { "length" : 3, "use_space": true }
}
\endverbatim
<b>JsonCpp</b> supports comments as <i>meta-data</i>:
\code
// Configuration options // Configuration options
{ {
// Default encoding for text // Default encoding for text
@@ -27,22 +17,22 @@ Here is an example of JSON data:
// Plug-ins loaded at start-up // Plug-ins loaded at start-up
"plug-ins" : [ "plug-ins" : [
"python", "python",
"c++", // trailing comment "c++",
"ruby" "ruby"
], ],
// Tab indent size // Tab indent size
// (multi-line comment) "indent" : { "length" : 3, "use_space": true }
"indent" : { /*embedded comment*/ "length" : 3, "use_space": true }
} }
\endcode \endverbatim
<code>jsoncpp</code> supports comments as <i>meta-data</i>.
\section _features Features \section _features Features
- read and write JSON document - read and write JSON document
- attach C++ style comments to element during parsing - attach C++ style comments to element during parsing
- rewrite JSON document preserving original comments - rewrite JSON document preserving original comments
Notes: Comments used to be supported in JSON but were removed for Notes: Comments used to be supported in JSON but where removed for
portability (C like comments are not supported in Python). Since portability (C like comments are not supported in Python). Since
comments are useful in configuration/input file, this feature was comments are useful in configuration/input file, this feature was
preserved. preserved.
@@ -50,77 +40,47 @@ preserved.
\section _example Code example \section _example Code example
\code \code
Json::Value root; // 'root' will contain the root value after parsing. Json::Value root; // will contains the root value after parsing.
std::cin >> root; Json::Reader reader;
bool parsingSuccessful = reader.parse( config_doc, root );
if ( !parsingSuccessful )
{
// report to the user the failure and their locations in the document.
std::cout << "Failed to parse configuration\n"
<< reader.getFormattedErrorMessages();
return;
}
// You can also read into a particular sub-value. // Get the value of the member of root named 'encoding', return 'UTF-8' if there is no
std::cin >> root["subtree"]; // such member.
// Get the value of the member of root named 'encoding',
// and return 'UTF-8' if there is no such member.
std::string encoding = root.get("encoding", "UTF-8" ).asString(); std::string encoding = root.get("encoding", "UTF-8" ).asString();
// Get the value of the member of root named 'encoding', return a 'null' value if
// Get the value of the member of root named 'plug-ins'; return a 'null' value if
// there is no such member. // there is no such member.
const Json::Value plugins = root["plug-ins"]; const Json::Value plugins = root["plug-ins"];
for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements.
// Iterate over the sequence elements.
for ( int index = 0; index < plugins.size(); ++index )
loadPlugIn( plugins[index].asString() ); loadPlugIn( plugins[index].asString() );
// Try other datatypes. Some are auto-convertible to others. setIndentLength( root["indent"].get("length", 3).asInt() );
foo::setIndentLength( root["indent"].get("length", 3).asInt() ); setIndentUseSpace( root["indent"].get("use_space", true).asBool() );
foo::setIndentUseSpace( root["indent"].get("use_space", true).asBool() );
// Since Json::Value has an implicit constructor for all value types, it is not
// necessary to explicitly construct the Json::Value object.
root["encoding"] = foo::getCurrentEncoding();
root["indent"]["length"] = foo::getCurrentIndentLength();
root["indent"]["use_space"] = foo::getCurrentIndentUseSpace();
// If you like the defaults, you can insert directly into a stream.
std::cout << root;
// Of course, you can write to `std::ostringstream` if you prefer.
// If desired, remember to add a linefeed and flush.
std::cout << std::endl;
\endcode
\section _advanced Advanced usage
Configure *builders* to create *readers* and *writers*. For
configuration, we use our own `Json::Value` (rather than
standard setters/getters) so that we can add
features without losing binary-compatibility.
\code
// For convenience, use `writeString()` with a specialized builder.
Json::StreamWriterBuilder wbuilder;
wbuilder["indentation"] = "\t";
std::string document = Json::writeString(wbuilder, root);
// Here, using a specialized Builder, we discard comments and
// record errors as we parse.
Json::CharReaderBuilder rbuilder;
rbuilder["collectComments"] = false;
std::string errs;
bool ok = Json::parseFromStream(rbuilder, std::cin, &root, &errs);
\endcode
Yes, compile-time configuration-checking would be helpful,
but `Json::Value` lets you
write and read the builder configuration, which is better! In other words,
you can configure your JSON parser using JSON.
CharReaders and StreamWriters are not thread-safe, but they are re-usable.
\code
Json::CharReaderBuilder rbuilder;
cfg >> rbuilder.settings_;
std::unique_ptr<Json::CharReader> const reader(rbuilder.newCharReader());
reader->parse(start, stop, &value1, &errs);
// ... // ...
reader->parse(start, stop, &value2, &errs); // At application shutdown to make the new configuration document:
// etc. // Since Json::Value has implicit constructor for all value types, it is not
// necessary to explicitly construct the Json::Value object:
root["encoding"] = getCurrentEncoding();
root["indent"]["length"] = getCurrentIndentLength();
root["indent"]["use_space"] = getCurrentIndentUseSpace();
Json::StyledWriter writer;
// Make a new JSON document for the configuration. Preserve original comments.
std::string outputConfig = writer.write( root );
// You can also use streams. This will put the contents of any JSON
// stream at a particular sub-value, if you'd like.
std::cin >> root["subtree"];
// And you can write to a stream, using the StyledWriter automatically.
std::cout << root;
\endcode \endcode
\section _pbuild Build instructions \section _pbuild Build instructions
@@ -156,9 +116,4 @@ Basically JsonCpp is licensed under MIT license, or public domain if desired
and recognized in your jurisdiction. and recognized in your jurisdiction.
\author Baptiste Lepilleur <blep@users.sourceforge.net> (originator) \author Baptiste Lepilleur <blep@users.sourceforge.net> (originator)
\author Christopher Dunn <cdunn2001@gmail.com> (primary maintainer)
\version \include version
We make strong guarantees about binary-compatibility, consistent with
<a href="http://apr.apache.org/versioning.html">the Apache versioning scheme</a>.
\sa version.h
*/ */

File diff suppressed because it is too large Load Diff

View File

@@ -1,27 +1,12 @@
"""Script to generate doxygen documentation. """Script to generate doxygen documentation.
""" """
from __future__ import print_function
from __future__ import unicode_literals
from devtools import tarball
from contextlib import contextmanager
import subprocess
import traceback
import re import re
import os import os
import os.path
import sys import sys
import shutil import shutil
from devtools import tarball
@contextmanager
def cd(newdir):
"""
http://stackoverflow.com/questions/431684/how-do-i-cd-in-python
"""
prevdir = os.getcwd()
os.chdir(newdir)
try:
yield
finally:
os.chdir(prevdir)
def find_program(*filenames): def find_program(*filenames):
"""find a program in folders path_lst, and sets env[var] """find a program in folders path_lst, and sets env[var]
@@ -29,9 +14,9 @@ def find_program(*filenames):
@return: the full path of the filename if found, or '' if filename could not be found @return: the full path of the filename if found, or '' if filename could not be found
""" """
paths = os.environ.get('PATH', '').split(os.pathsep) paths = os.environ.get('PATH', '').split(os.pathsep)
suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or '' suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or ''
for filename in filenames: for filename in filenames:
for name in [filename+ext for ext in suffixes.split(' ')]: for name in [filename+ext for ext in suffixes.split()]:
for directory in paths: for directory in paths:
full_path = os.path.join(directory, name) full_path = os.path.join(directory, name)
if os.path.isfile(full_path): if os.path.isfile(full_path):
@@ -43,56 +28,53 @@ def do_subst_in_file(targetfile, sourcefile, dict):
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'}, For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
then all instances of %VERSION% in the file will be replaced with 1.2345 etc. then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
""" """
with open(sourcefile, 'r') as f: try:
f = open(sourcefile, 'rb')
contents = f.read() contents = f.read()
for (k,v) in list(dict.items()): f.close()
except:
print "Can't read source file %s"%sourcefile
raise
for (k,v) in dict.items():
v = v.replace('\\','\\\\') v = v.replace('\\','\\\\')
contents = re.sub(k, v, contents) contents = re.sub(k, v, contents)
with open(targetfile, 'w') as f:
f.write(contents)
def getstatusoutput(cmd):
"""cmd is a list.
"""
try: try:
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) f = open(targetfile, 'wb')
output, _ = process.communicate() f.write(contents)
status = process.returncode f.close()
except: except:
status = -1 print "Can't write target file %s"%targetfile
output = traceback.format_exc() raise
return status, output
def run_cmd(cmd, silent=False):
"""Raise exception on failure.
"""
info = 'Running: %r in %r' %(' '.join(cmd), os.getcwd())
print(info)
sys.stdout.flush()
if silent:
status, output = getstatusoutput(cmd)
else:
status, output = subprocess.call(cmd), ''
if status:
msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output)
raise Exception(msg)
def assert_is_exe(path):
if not path:
raise Exception('path is empty.')
if not os.path.isfile(path):
raise Exception('%r is not a file.' %path)
if not os.access(path, os.X_OK):
raise Exception('%r is not executable by this user.' %path)
def run_doxygen(doxygen_path, config_file, working_dir, is_silent): def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
assert_is_exe(doxygen_path) config_file = os.path.abspath( config_file )
config_file = os.path.abspath(config_file) doxygen_path = doxygen_path
with cd(working_dir): old_cwd = os.getcwd()
try:
os.chdir( working_dir )
cmd = [doxygen_path, config_file] cmd = [doxygen_path, config_file]
run_cmd(cmd, is_silent) print 'Running:', ' '.join( cmd )
try:
import subprocess
except:
if os.system( ' '.join( cmd ) ) != 0:
print 'Documentation generation failed'
return False
else:
if is_silent:
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
else:
process = subprocess.Popen( cmd )
stdout, _ = process.communicate()
if process.returncode:
print 'Documentation generation failed:'
print stdout
return False
return True
finally:
os.chdir( old_cwd )
def build_doc(options, make_release=False): def build_doc( options, make_release=False ):
if make_release: if make_release:
options.make_tarball = True options.make_tarball = True
options.with_dot = True options.with_dot = True
@@ -101,56 +83,56 @@ def build_doc(options, make_release=False):
options.open = False options.open = False
options.silent = True options.silent = True
version = open('version', 'rt').read().strip() version = open('version','rt').read().strip()
output_dir = 'dist/doxygen' # relative to doc/doxyfile location. output_dir = 'dist/doxygen' # relative to doc/doxyfile location.
if not os.path.isdir(output_dir): if not os.path.isdir( output_dir ):
os.makedirs(output_dir) os.makedirs( output_dir )
top_dir = os.path.abspath('.') top_dir = os.path.abspath( '.' )
html_output_dirname = 'jsoncpp-api-html-' + version html_output_dirname = 'jsoncpp-api-html-' + version
tarball_path = os.path.join('dist', html_output_dirname + '.tar.gz') tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' )
warning_log_path = os.path.join(output_dir, '../jsoncpp-doxygen-warning.log') warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' )
html_output_path = os.path.join(output_dir, html_output_dirname) html_output_path = os.path.join( output_dir, html_output_dirname )
def yesno(bool): def yesno( bool ):
return bool and 'YES' or 'NO' return bool and 'YES' or 'NO'
subst_keys = { subst_keys = {
'%JSONCPP_VERSION%': version, '%JSONCPP_VERSION%': version,
'%DOC_TOPDIR%': '', '%DOC_TOPDIR%': '',
'%TOPDIR%': top_dir, '%TOPDIR%': top_dir,
'%HTML_OUTPUT%': os.path.join('..', output_dir, html_output_dirname), '%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ),
'%HAVE_DOT%': yesno(options.with_dot), '%HAVE_DOT%': yesno(options.with_dot),
'%DOT_PATH%': os.path.split(options.dot_path)[0], '%DOT_PATH%': os.path.split(options.dot_path)[0],
'%HTML_HELP%': yesno(options.with_html_help), '%HTML_HELP%': yesno(options.with_html_help),
'%UML_LOOK%': yesno(options.with_uml_look), '%UML_LOOK%': yesno(options.with_uml_look),
'%WARNING_LOG_PATH%': os.path.join('..', warning_log_path) '%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path )
} }
if os.path.isdir(output_dir): if os.path.isdir( output_dir ):
print('Deleting directory:', output_dir) print 'Deleting directory:', output_dir
shutil.rmtree(output_dir) shutil.rmtree( output_dir )
if not os.path.isdir(output_dir): if not os.path.isdir( output_dir ):
os.makedirs(output_dir) os.makedirs( output_dir )
do_subst_in_file('doc/doxyfile', options.doxyfile_input_path, subst_keys) do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent) ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
if not options.silent: if not options.silent:
print(open(warning_log_path, 'r').read()) print open(warning_log_path, 'rb').read()
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html')) index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
print('Generated documentation can be found in:') print 'Generated documentation can be found in:'
print(index_path) print index_path
if options.open: if options.open:
import webbrowser import webbrowser
webbrowser.open('file://' + index_path) webbrowser.open( 'file://' + index_path )
if options.make_tarball: if options.make_tarball:
print('Generating doc tarball to', tarball_path) print 'Generating doc tarball to', tarball_path
tarball_sources = [ tarball_sources = [
output_dir, output_dir,
'README.md', 'README.txt',
'LICENSE', 'LICENSE',
'NEWS.txt', 'NEWS.txt',
'version' 'version'
] ]
tarball_basedir = os.path.join(output_dir, html_output_dirname) tarball_basedir = os.path.join( output_dir, html_output_dirname )
tarball.make_tarball(tarball_path, tarball_sources, tarball_basedir, html_output_dirname) tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname )
return tarball_path, html_output_dirname return tarball_path, html_output_dirname
def main(): def main():
@@ -169,8 +151,6 @@ def main():
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""") help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'), parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'),
help="""Path to Doxygen tool. [Default: %default]""") help="""Path to Doxygen tool. [Default: %default]""")
parser.add_option('--in', dest="doxyfile_input_path", action='store', default='doc/doxyfile.in',
help="""Path to doxygen inputs. [Default: %default]""")
parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False, parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False,
help="""Enable generation of Microsoft HTML HELP""") help="""Enable generation of Microsoft HTML HELP""")
parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True, parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True,
@@ -183,7 +163,7 @@ def main():
help="""Hides doxygen output""") help="""Hides doxygen output""")
parser.enable_interspersed_args() parser.enable_interspersed_args()
options, args = parser.parse_args() options, args = parser.parse_args()
build_doc(options) build_doc( options )
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,94 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef CPPTL_JSON_ALLOCATOR_H_INCLUDED
#define CPPTL_JSON_ALLOCATOR_H_INCLUDED
#include <cstring>
#include <memory>
namespace Json {
template<typename T>
class SecureAllocator {
public:
// Type definitions
using value_type = T;
using pointer = T*;
using const_pointer = const T*;
using reference = T&;
using const_reference = const T&;
using size_type = std::size_t;
using difference_type = std::ptrdiff_t;
/**
* Allocate memory for N items using the standard allocator.
*/
pointer allocate(size_type n) {
// allocate using "global operator new"
return static_cast<pointer>(::operator new(n * sizeof(T)));
}
/**
* Release memory which was allocated for N items at pointer P.
*
* The memory block is filled with zeroes before being released.
* The pointer argument is tagged as "volatile" to prevent the
* compiler optimizing out this critical step.
*/
void deallocate(volatile pointer p, size_type n) {
std::memset(p, 0, n * sizeof(T));
// free using "global operator delete"
::operator delete(p);
}
/**
* Construct an item in-place at pointer P.
*/
template<typename... Args>
void construct(pointer p, Args&&... args) {
// construct using "placement new" and "perfect forwarding"
::new (static_cast<void*>(p)) T(std::forward<Args>(args)...);
}
size_type max_size() const {
return size_t(-1) / sizeof(T);
}
pointer address( reference x ) const {
return std::addressof(x);
}
const_pointer address( const_reference x ) const {
return std::addressof(x);
}
/**
* Destroy an item in-place at pointer P.
*/
void destroy(pointer p) {
// destroy using "explicit destructor"
p->~T();
}
// Boilerplate
SecureAllocator() {}
template<typename U> SecureAllocator(const SecureAllocator<U>&) {}
template<typename U> struct rebind { using other = SecureAllocator<U>; };
};
template<typename T, typename U>
bool operator==(const SecureAllocator<T>&, const SecureAllocator<U>&) {
return true;
}
template<typename T, typename U>
bool operator!=(const SecureAllocator<T>&, const SecureAllocator<U>&) {
return false;
}
} //namespace Json
#endif // CPPTL_JSON_ALLOCATOR_H_INCLUDED

View File

@@ -7,48 +7,35 @@
#define CPPTL_JSON_ASSERTIONS_H_INCLUDED #define CPPTL_JSON_ASSERTIONS_H_INCLUDED
#include <stdlib.h> #include <stdlib.h>
#include <sstream>
#if !defined(JSON_IS_AMALGAMATION) #if !defined(JSON_IS_AMALGAMATION)
#include "config.h" #include "config.h"
#endif // if !defined(JSON_IS_AMALGAMATION) #endif // if !defined(JSON_IS_AMALGAMATION)
/** It should not be possible for a maliciously designed file to
* cause an abort() or seg-fault, so these macros are used only
* for pre-condition violations and internal logic errors.
*/
#if JSON_USE_EXCEPTION #if JSON_USE_EXCEPTION
#include <stdexcept>
// @todo <= add detail about condition in exception #define JSON_ASSERT(condition) \
# define JSON_ASSERT(condition) \ assert(condition); // @todo <= change this into an exception throw
{if (!(condition)) {Json::throwLogicError( "assert json failed" );}} #define JSON_FAIL_MESSAGE(message) throw std::runtime_error(message);
# define JSON_FAIL_MESSAGE(message) \
{ \
JSONCPP_OSTRINGSTREAM oss; oss << message; \
Json::throwLogicError(oss.str()); \
abort(); \
}
#else // JSON_USE_EXCEPTION #else // JSON_USE_EXCEPTION
#define JSON_ASSERT(condition) assert(condition);
# define JSON_ASSERT(condition) assert(condition)
// The call to assert() will show the failure message in debug builds. In // The call to assert() will show the failure message in debug builds. In
// release builds we abort, for a core-dump or debugger. // release bugs we write to invalid memory in order to crash hard, so that a
# define JSON_FAIL_MESSAGE(message) \ // debugger or crash reporter gets the chance to take over. We still call exit()
// afterward in order to tell the compiler that this macro doesn't return.
#define JSON_FAIL_MESSAGE(message) \
{ \ { \
JSONCPP_OSTRINGSTREAM oss; oss << message; \ assert(false&& message); \
assert(false && oss.str().c_str()); \ strcpy(reinterpret_cast<char*>(666), message); \
abort(); \ exit(123); \
} }
#endif #endif
#define JSON_ASSERT_MESSAGE(condition, message) \ #define JSON_ASSERT_MESSAGE(condition, message) \
if (!(condition)) { \ if (!(condition)) { \
JSON_FAIL_MESSAGE(message); \ JSON_FAIL_MESSAGE(message) \
} }
#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED #endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED

View File

@@ -5,8 +5,6 @@
#ifndef JSON_CONFIG_H_INCLUDED #ifndef JSON_CONFIG_H_INCLUDED
#define JSON_CONFIG_H_INCLUDED #define JSON_CONFIG_H_INCLUDED
#include <stddef.h>
#include <string> //typdef String
/// If defined, indicates that json library is embedded in CppTL library. /// If defined, indicates that json library is embedded in CppTL library.
//# define JSON_IN_CPPTL 1 //# define JSON_IN_CPPTL 1
@@ -17,6 +15,17 @@
/// std::map /// std::map
/// as Value container. /// as Value container.
//# define JSON_USE_CPPTL_SMALLMAP 1 //# define JSON_USE_CPPTL_SMALLMAP 1
/// If defined, indicates that Json specific container should be used
/// (hash table & simple deque container with customizable allocator).
/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332
//# define JSON_VALUE_USE_INTERNAL_MAP 1
/// Force usage of standard new/malloc based allocator instead of memory pool
/// based allocator.
/// The memory pools allocator used optimization (initializing Value and
/// ValueInternalLink
/// as if it was a POD) that may cause some validation tool to report errors.
/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
// If non-zero, the library uses exceptions to report bad input instead of C // If non-zero, the library uses exceptions to report bad input instead of C
// assertion macros. The default is to use exceptions. // assertion macros. The default is to use exceptions.
@@ -39,12 +48,12 @@
#ifdef JSON_IN_CPPTL #ifdef JSON_IN_CPPTL
#define JSON_API CPPTL_API #define JSON_API CPPTL_API
#elif defined(JSON_DLL_BUILD) #elif defined(JSON_DLL_BUILD)
#if defined(_MSC_VER) || defined(__MINGW32__) #if defined(_MSC_VER)
#define JSON_API __declspec(dllexport) #define JSON_API __declspec(dllexport)
#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING #define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
#endif // if defined(_MSC_VER) #endif // if defined(_MSC_VER)
#elif defined(JSON_DLL) #elif defined(JSON_DLL)
#if defined(_MSC_VER) || defined(__MINGW32__) #if defined(_MSC_VER)
#define JSON_API __declspec(dllimport) #define JSON_API __declspec(dllimport)
#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING #define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
#endif // if defined(_MSC_VER) #endif // if defined(_MSC_VER)
@@ -58,87 +67,26 @@
// Storages, and 64 bits integer support is disabled. // Storages, and 64 bits integer support is disabled.
// #define JSON_NO_INT64 1 // #define JSON_NO_INT64 1
#if defined(_MSC_VER) // MSVC #if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6
# if _MSC_VER <= 1200 // MSVC 6 // Microsoft Visual Studio 6 only support conversion from __int64 to double
// Microsoft Visual Studio 6 only support conversion from __int64 to double // (no conversion from unsigned __int64).
// (no conversion from unsigned __int64). #define JSON_USE_INT64_DOUBLE_CONVERSION 1
# define JSON_USE_INT64_DOUBLE_CONVERSION 1 // Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255'
// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255' // characters in the debug information)
// characters in the debug information) // All projects I've ever seen with VS6 were using this globally (not bothering
// All projects I've ever seen with VS6 were using this globally (not bothering // with pragma push/pop).
// with pragma push/pop). #pragma warning(disable : 4786)
# pragma warning(disable : 4786) #endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6
# endif // MSVC 6
# if _MSC_VER >= 1500 // MSVC 2008 #if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
/// Indicates that the following function is deprecated. /// Indicates that the following function is deprecated.
# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message)) #define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
# endif
#endif // defined(_MSC_VER)
// In c++11 the override keyword allows you to explicity define that a function
// is intended to override the base-class version. This makes the code more
// managable and fixes a set of common hard-to-find bugs.
#if __cplusplus >= 201103L
# define JSONCPP_OVERRIDE override
#elif defined(_MSC_VER) && _MSC_VER > 1600
# define JSONCPP_OVERRIDE override
#else
# define JSONCPP_OVERRIDE
#endif #endif
#ifndef JSON_HAS_RVALUE_REFERENCES
#if defined(_MSC_VER) && _MSC_VER >= 1600 // MSVC >= 2010
#define JSON_HAS_RVALUE_REFERENCES 1
#endif // MSVC >= 2010
#ifdef __clang__
#if __has_feature(cxx_rvalue_references)
#define JSON_HAS_RVALUE_REFERENCES 1
#endif // has_feature
#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
#if defined(__GXX_EXPERIMENTAL_CXX0X__) || (__cplusplus >= 201103L)
#define JSON_HAS_RVALUE_REFERENCES 1
#endif // GXX_EXPERIMENTAL
#endif // __clang__ || __GNUC__
#endif // not defined JSON_HAS_RVALUE_REFERENCES
#ifndef JSON_HAS_RVALUE_REFERENCES
#define JSON_HAS_RVALUE_REFERENCES 0
#endif
#ifdef __clang__
#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
# if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
# define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
# elif (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
# define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__))
# endif // GNUC version
#endif // __clang__ || __GNUC__
#if !defined(JSONCPP_DEPRECATED) #if !defined(JSONCPP_DEPRECATED)
#define JSONCPP_DEPRECATED(message) #define JSONCPP_DEPRECATED(message)
#endif // if !defined(JSONCPP_DEPRECATED) #endif // if !defined(JSONCPP_DEPRECATED)
#if __GNUC__ >= 6
# define JSON_USE_INT64_DOUBLE_CONVERSION 1
#endif
#if !defined(JSON_IS_AMALGAMATION)
# include "version.h"
# if JSONCPP_USING_SECURE_MEMORY
# include "allocator.h" //typedef Allocator
# endif
#endif // if !defined(JSON_IS_AMALGAMATION)
namespace Json { namespace Json {
typedef int Int; typedef int Int;
typedef unsigned int UInt; typedef unsigned int UInt;
@@ -152,26 +100,13 @@ typedef unsigned int LargestUInt;
typedef __int64 Int64; typedef __int64 Int64;
typedef unsigned __int64 UInt64; typedef unsigned __int64 UInt64;
#else // if defined(_MSC_VER) // Other platforms, use long long #else // if defined(_MSC_VER) // Other platforms, use long long
typedef int64_t Int64; typedef long long int Int64;
typedef uint64_t UInt64; typedef unsigned long long int UInt64;
#endif // if defined(_MSC_VER) #endif // if defined(_MSC_VER)
typedef Int64 LargestInt; typedef Int64 LargestInt;
typedef UInt64 LargestUInt; typedef UInt64 LargestUInt;
#define JSON_HAS_INT64 #define JSON_HAS_INT64
#endif // if defined(JSON_NO_INT64) #endif // if defined(JSON_NO_INT64)
#if JSONCPP_USING_SECURE_MEMORY
#define JSONCPP_STRING std::basic_string<char, std::char_traits<char>, Json::SecureAllocator<char> >
#define JSONCPP_OSTRINGSTREAM std::basic_ostringstream<char, std::char_traits<char>, Json::SecureAllocator<char> >
#define JSONCPP_OSTREAM std::basic_ostream<char, std::char_traits<char>>
#define JSONCPP_ISTRINGSTREAM std::basic_istringstream<char, std::char_traits<char>, Json::SecureAllocator<char> >
#define JSONCPP_ISTREAM std::istream
#else
#define JSONCPP_STRING std::string
#define JSONCPP_OSTRINGSTREAM std::ostringstream
#define JSONCPP_OSTREAM std::ostream
#define JSONCPP_ISTRINGSTREAM std::istringstream
#define JSONCPP_ISTREAM std::istream
#endif // if JSONCPP_USING_SECURE_MEMORY
} // end namespace Json } // end namespace Json
#endif // JSON_CONFIG_H_INCLUDED #endif // JSON_CONFIG_H_INCLUDED

View File

@@ -31,6 +31,12 @@ class Value;
class ValueIteratorBase; class ValueIteratorBase;
class ValueIterator; class ValueIterator;
class ValueConstIterator; class ValueConstIterator;
#ifdef JSON_VALUE_USE_INTERNAL_MAP
class ValueMapAllocator;
class ValueInternalLink;
class ValueInternalArray;
class ValueInternalMap;
#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
} // namespace Json } // namespace Json

View File

@@ -14,7 +14,6 @@
#include <iosfwd> #include <iosfwd>
#include <stack> #include <stack>
#include <string> #include <string>
#include <istream>
// Disable warning C4251: <data member>: <type> needs to have dll-interface to // Disable warning C4251: <data member>: <type> needs to have dll-interface to
// be used by... // be used by...
@@ -28,7 +27,6 @@ namespace Json {
/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a /** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a
*Value. *Value.
* *
* \deprecated Use CharReader and CharReaderBuilder.
*/ */
class JSON_API Reader { class JSON_API Reader {
public: public:
@@ -42,9 +40,9 @@ public:
* *
*/ */
struct StructuredError { struct StructuredError {
ptrdiff_t offset_start; size_t offset_start;
ptrdiff_t offset_limit; size_t offset_limit;
JSONCPP_STRING message; std::string message;
}; };
/** \brief Constructs a Reader allowing all features /** \brief Constructs a Reader allowing all features
@@ -80,7 +78,7 @@ public:
document to read. document to read.
* \param endDoc Pointer on the end of the UTF-8 encoded string of the * \param endDoc Pointer on the end of the UTF-8 encoded string of the
document to read. document to read.
* Must be >= beginDoc. \ Must be >= beginDoc.
* \param root [out] Contains the root value of the document if it was * \param root [out] Contains the root value of the document if it was
* successfully parsed. * successfully parsed.
* \param collectComments \c true to collect comment and allow writing them * \param collectComments \c true to collect comment and allow writing them
@@ -99,7 +97,7 @@ public:
/// \brief Parse from input stream. /// \brief Parse from input stream.
/// \see Json::operator>>(std::istream&, Json::Value&). /// \see Json::operator>>(std::istream&, Json::Value&).
bool parse(JSONCPP_ISTREAM& is, Value& root, bool collectComments = true); bool parse(std::istream& is, Value& root, bool collectComments = true);
/** \brief Returns a user friendly string that list errors in the parsed /** \brief Returns a user friendly string that list errors in the parsed
* document. * document.
@@ -110,8 +108,8 @@ public:
* during parsing. * during parsing.
* \deprecated Use getFormattedErrorMessages() instead (typo fix). * \deprecated Use getFormattedErrorMessages() instead (typo fix).
*/ */
JSONCPP_DEPRECATED("Use getFormattedErrorMessages() instead.") JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead")
JSONCPP_STRING getFormatedErrorMessages() const; std::string getFormatedErrorMessages() const;
/** \brief Returns a user friendly string that list errors in the parsed /** \brief Returns a user friendly string that list errors in the parsed
* document. * document.
@@ -121,7 +119,7 @@ public:
* occurred * occurred
* during parsing. * during parsing.
*/ */
JSONCPP_STRING getFormattedErrorMessages() const; std::string getFormattedErrorMessages() const;
/** \brief Returns a vector of structured erros encounted while parsing. /** \brief Returns a vector of structured erros encounted while parsing.
* \return A (possibly empty) vector of StructuredError objects. Currently * \return A (possibly empty) vector of StructuredError objects. Currently
@@ -138,7 +136,7 @@ public:
* \return \c true if the error was successfully added, \c false if the * \return \c true if the error was successfully added, \c false if the
* Value offset exceeds the document size. * Value offset exceeds the document size.
*/ */
bool pushError(const Value& value, const JSONCPP_STRING& message); bool pushError(const Value& value, const std::string& message);
/** \brief Add a semantic error message with extra context. /** \brief Add a semantic error message with extra context.
* \param value JSON Value location associated with the error * \param value JSON Value location associated with the error
@@ -147,7 +145,7 @@ public:
* \return \c true if the error was successfully added, \c false if either * \return \c true if the error was successfully added, \c false if either
* Value offset exceeds the document size. * Value offset exceeds the document size.
*/ */
bool pushError(const Value& value, const JSONCPP_STRING& message, const Value& extra); bool pushError(const Value& value, const std::string& message, const Value& extra);
/** \brief Return whether there are any errors. /** \brief Return whether there are any errors.
* \return \c true if there are no errors to report \c false if * \return \c true if there are no errors to report \c false if
@@ -183,12 +181,13 @@ private:
class ErrorInfo { class ErrorInfo {
public: public:
Token token_; Token token_;
JSONCPP_STRING message_; std::string message_;
Location extra_; Location extra_;
}; };
typedef std::deque<ErrorInfo> Errors; typedef std::deque<ErrorInfo> Errors;
bool expectToken(TokenType type, Token& token, const char* message);
bool readToken(Token& token); bool readToken(Token& token);
void skipSpaces(); void skipSpaces();
bool match(Location pattern, int patternLength); bool match(Location pattern, int patternLength);
@@ -203,7 +202,7 @@ private:
bool decodeNumber(Token& token); bool decodeNumber(Token& token);
bool decodeNumber(Token& token, Value& decoded); bool decodeNumber(Token& token, Value& decoded);
bool decodeString(Token& token); bool decodeString(Token& token);
bool decodeString(Token& token, JSONCPP_STRING& decoded); bool decodeString(Token& token, std::string& decoded);
bool decodeDouble(Token& token); bool decodeDouble(Token& token);
bool decodeDouble(Token& token, Value& decoded); bool decodeDouble(Token& token, Value& decoded);
bool decodeUnicodeCodePoint(Token& token, bool decodeUnicodeCodePoint(Token& token,
@@ -214,9 +213,9 @@ private:
Location& current, Location& current,
Location end, Location end,
unsigned int& unicode); unsigned int& unicode);
bool addError(const JSONCPP_STRING& message, Token& token, Location extra = 0); bool addError(const std::string& message, Token& token, Location extra = 0);
bool recoverFromError(TokenType skipUntilToken); bool recoverFromError(TokenType skipUntilToken);
bool addErrorAndRecover(const JSONCPP_STRING& message, bool addErrorAndRecover(const std::string& message,
Token& token, Token& token,
TokenType skipUntilToken); TokenType skipUntilToken);
void skipUntilSpace(); void skipUntilSpace();
@@ -224,151 +223,24 @@ private:
Char getNextChar(); Char getNextChar();
void void
getLocationLineAndColumn(Location location, int& line, int& column) const; getLocationLineAndColumn(Location location, int& line, int& column) const;
JSONCPP_STRING getLocationLineAndColumn(Location location) const; std::string getLocationLineAndColumn(Location location) const;
void addComment(Location begin, Location end, CommentPlacement placement); void addComment(Location begin, Location end, CommentPlacement placement);
void skipCommentTokens(Token& token); void skipCommentTokens(Token& token);
typedef std::stack<Value*> Nodes; typedef std::stack<Value*> Nodes;
Nodes nodes_; Nodes nodes_;
Errors errors_; Errors errors_;
JSONCPP_STRING document_; std::string document_;
Location begin_; Location begin_;
Location end_; Location end_;
Location current_; Location current_;
Location lastValueEnd_; Location lastValueEnd_;
Value* lastValue_; Value* lastValue_;
JSONCPP_STRING commentsBefore_; std::string commentsBefore_;
Features features_; Features features_;
bool collectComments_; bool collectComments_;
}; // Reader
/** Interface for reading JSON from a char array.
*/
class JSON_API CharReader {
public:
virtual ~CharReader() {}
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
document.
* The document must be a UTF-8 encoded string containing the document to read.
*
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
document to read.
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
document to read.
* Must be >= beginDoc.
* \param root [out] Contains the root value of the document if it was
* successfully parsed.
* \param errs [out] Formatted error messages (if not NULL)
* a user friendly string that lists errors in the parsed
* document.
* \return \c true if the document was successfully parsed, \c false if an
error occurred.
*/
virtual bool parse(
char const* beginDoc, char const* endDoc,
Value* root, JSONCPP_STRING* errs) = 0;
class JSON_API Factory {
public:
virtual ~Factory() {}
/** \brief Allocate a CharReader via operator new().
* \throw std::exception if something goes wrong (e.g. invalid settings)
*/
virtual CharReader* newCharReader() const = 0;
}; // Factory
}; // CharReader
/** \brief Build a CharReader implementation.
Usage:
\code
using namespace Json;
CharReaderBuilder builder;
builder["collectComments"] = false;
Value value;
JSONCPP_STRING errs;
bool ok = parseFromStream(builder, std::cin, &value, &errs);
\endcode
*/
class JSON_API CharReaderBuilder : public CharReader::Factory {
public:
// Note: We use a Json::Value so that we can add data-members to this class
// without a major version bump.
/** Configuration of this builder.
These are case-sensitive.
Available settings (case-sensitive):
- `"collectComments": false or true`
- true to collect comment and allow writing them
back during serialization, false to discard comments.
This parameter is ignored if allowComments is false.
- `"allowComments": false or true`
- true if comments are allowed.
- `"strictRoot": false or true`
- true if root must be either an array or an object value
- `"allowDroppedNullPlaceholders": false or true`
- true if dropped null placeholders are allowed. (See StreamWriterBuilder.)
- `"allowNumericKeys": false or true`
- true if numeric object keys are allowed.
- `"allowSingleQuotes": false or true`
- true if '' are allowed for strings (both keys and values)
- `"stackLimit": integer`
- Exceeding stackLimit (recursive depth of `readValue()`) will
cause an exception.
- This is a security issue (seg-faults caused by deeply nested JSON),
so the default is low.
- `"failIfExtra": false or true`
- If true, `parse()` returns false when extra non-whitespace trails
the JSON value in the input string.
- `"rejectDupKeys": false or true`
- If true, `parse()` returns false when a key is duplicated within an object.
- `"allowSpecialFloats": false or true`
- If true, special float values (NaNs and infinities) are allowed
and their values are lossfree restorable.
You can examine 'settings_` yourself
to see the defaults. You can also write and read them just like any
JSON Value.
\sa setDefaults()
*/
Json::Value settings_;
CharReaderBuilder();
~CharReaderBuilder() JSONCPP_OVERRIDE;
CharReader* newCharReader() const JSONCPP_OVERRIDE;
/** \return true if 'settings' are legal and consistent;
* otherwise, indicate bad settings via 'invalid'.
*/
bool validate(Json::Value* invalid) const;
/** A simple way to update a specific setting.
*/
Value& operator[](JSONCPP_STRING key);
/** Called by ctor, but you can use this to reset settings_.
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults
*/
static void setDefaults(Json::Value* settings);
/** Same as old Features::strictMode().
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
*/
static void strictMode(Json::Value* settings);
}; };
/** Consume entire stream and use its begin/end.
* Someday we might have a real StreamReader, but for now this
* is convenient.
*/
bool JSON_API parseFromStream(
CharReader::Factory const&,
JSONCPP_ISTREAM&,
Value* root, std::string* errs);
/** \brief Read from 'sin' into 'root'. /** \brief Read from 'sin' into 'root'.
Always keep comments from the input JSON. Always keep comments from the input JSON.
@@ -393,7 +265,7 @@ bool JSON_API parseFromStream(
\throw std::exception on parse error. \throw std::exception on parse error.
\see Json::operator<<() \see Json::operator<<()
*/ */
JSON_API JSONCPP_ISTREAM& operator>>(JSONCPP_ISTREAM&, Value&); JSON_API std::istream& operator>>(std::istream&, Value&);
} // namespace Json } // namespace Json

View File

@@ -11,7 +11,6 @@
#endif // if !defined(JSON_IS_AMALGAMATION) #endif // if !defined(JSON_IS_AMALGAMATION)
#include <string> #include <string>
#include <vector> #include <vector>
#include <exception>
#ifndef JSON_USE_CPPTL_SMALLMAP #ifndef JSON_USE_CPPTL_SMALLMAP
#include <map> #include <map>
@@ -22,19 +21,6 @@
#include <cpptl/forwards.h> #include <cpptl/forwards.h>
#endif #endif
//Conditional NORETURN attribute on the throw functions would:
// a) suppress false positives from static code analysis
// b) possibly improve optimization opportunities.
#if !defined(JSONCPP_NORETURN)
# if defined(_MSC_VER)
# define JSONCPP_NORETURN __declspec(noreturn)
# elif defined(__GNUC__)
# define JSONCPP_NORETURN __attribute__ ((__noreturn__))
# else
# define JSONCPP_NORETURN
# endif
#endif
// Disable warning C4251: <data member>: <type> needs to have dll-interface to // Disable warning C4251: <data member>: <type> needs to have dll-interface to
// be used by... // be used by...
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) #if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
@@ -46,46 +32,6 @@
*/ */
namespace Json { namespace Json {
/** Base class for all exceptions we throw.
*
* We use nothing but these internally. Of course, STL can throw others.
*/
class JSON_API Exception : public std::exception {
public:
Exception(JSONCPP_STRING const& msg);
~Exception() throw() JSONCPP_OVERRIDE;
char const* what() const throw() JSONCPP_OVERRIDE;
protected:
JSONCPP_STRING msg_;
};
/** Exceptions which the user cannot easily avoid.
*
* E.g. out-of-memory (when we use malloc), stack-overflow, malicious input
*
* \remark derived from Json::Exception
*/
class JSON_API RuntimeError : public Exception {
public:
RuntimeError(JSONCPP_STRING const& msg);
};
/** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros.
*
* These are precondition-violations (user bugs) and internal errors (our bugs).
*
* \remark derived from Json::Exception
*/
class JSON_API LogicError : public Exception {
public:
LogicError(JSONCPP_STRING const& msg);
};
/// used internally
JSONCPP_NORETURN void throwRuntimeError(JSONCPP_STRING const& msg);
/// used internally
JSONCPP_NORETURN void throwLogicError(JSONCPP_STRING const& msg);
/** \brief Type of the value held by a Value object. /** \brief Type of the value held by a Value object.
*/ */
enum ValueType { enum ValueType {
@@ -128,14 +74,14 @@ enum CommentPlacement {
*/ */
class JSON_API StaticString { class JSON_API StaticString {
public: public:
explicit StaticString(const char* czstring) : c_str_(czstring) {} explicit StaticString(const char* czstring) : str_(czstring) {}
operator const char*() const { return c_str_; } operator const char*() const { return str_; }
const char* c_str() const { return c_str_; } const char* c_str() const { return str_; }
private: private:
const char* c_str_; const char* str_;
}; };
/** \brief Represents a <a HREF="http://www.json.org">JSON</a> value. /** \brief Represents a <a HREF="http://www.json.org">JSON</a> value.
@@ -153,29 +99,28 @@ private:
* The type of the held value is represented by a #ValueType and * The type of the held value is represented by a #ValueType and
* can be obtained using type(). * can be obtained using type().
* *
* Values of an #objectValue or #arrayValue can be accessed using operator[]() * values of an #objectValue or #arrayValue can be accessed using operator[]()
* methods. *methods.
* Non-const methods will automatically create the a #nullValue element * Non const methods will automatically create the a #nullValue element
* if it does not exist. * if it does not exist.
* The sequence of an #arrayValue will be automatically resized and initialized * The sequence of an #arrayValue will be automatically resize and initialized
* with #nullValue. resize() can be used to enlarge or truncate an #arrayValue. * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue.
* *
* The get() methods can be used to obtain default value in the case the * The get() methods can be used to obtanis default value in the case the
* required element does not exist. *required element
* does not exist.
* *
* It is possible to iterate over the list of a #objectValue values using * It is possible to iterate over the list of a #objectValue values using
* the getMemberNames() method. * the getMemberNames() method.
*
* \note #Value string-length fit in size_t, but keys must be < 2^30.
* (The reason is an implementation detail.) A #CharReader will raise an
* exception if a bound is exceeded to avoid security holes in your app,
* but the Value API does *not* check bounds. That is the responsibility
* of the caller.
*/ */
class JSON_API Value { class JSON_API Value {
friend class ValueIteratorBase; friend class ValueIteratorBase;
#ifdef JSON_VALUE_USE_INTERNAL_MAP
friend class ValueInternalLink;
friend class ValueInternalMap;
#endif
public: public:
typedef std::vector<JSONCPP_STRING> Members; typedef std::vector<std::string> Members;
typedef ValueIterator iterator; typedef ValueIterator iterator;
typedef ValueConstIterator const_iterator; typedef ValueConstIterator const_iterator;
typedef Json::UInt UInt; typedef Json::UInt UInt;
@@ -188,10 +133,7 @@ public:
typedef Json::LargestUInt LargestUInt; typedef Json::LargestUInt LargestUInt;
typedef Json::ArrayIndex ArrayIndex; typedef Json::ArrayIndex ArrayIndex;
static const Value& null; ///< We regret this reference to a global instance; prefer the simpler Value(). static const Value& null;
static const Value& nullRef; ///< just a kludge for binary-compatibility; same as null
static Value const& nullSingleton(); ///< Prefer this to null or nullRef.
/// Minimum signed integer value that can be stored in a Json::Value. /// Minimum signed integer value that can be stored in a Json::Value.
static const LargestInt minLargestInt; static const LargestInt minLargestInt;
/// Maximum signed integer value that can be stored in a Json::Value. /// Maximum signed integer value that can be stored in a Json::Value.
@@ -217,6 +159,7 @@ public:
private: private:
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION #ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
#ifndef JSON_VALUE_USE_INTERNAL_MAP
class CZString { class CZString {
public: public:
enum DuplicationPolicy { enum DuplicationPolicy {
@@ -225,34 +168,20 @@ private:
duplicateOnCopy duplicateOnCopy
}; };
CZString(ArrayIndex index); CZString(ArrayIndex index);
CZString(char const* str, unsigned length, DuplicationPolicy allocate); CZString(const char* cstr, DuplicationPolicy allocate);
CZString(CZString const& other); CZString(const CZString& other);
#if JSON_HAS_RVALUE_REFERENCES
CZString(CZString&& other);
#endif
~CZString(); ~CZString();
CZString& operator=(CZString other); CZString &operator=(const CZString &other);
bool operator<(CZString const& other) const; bool operator<(const CZString& other) const;
bool operator==(CZString const& other) const; bool operator==(const CZString& other) const;
ArrayIndex index() const; ArrayIndex index() const;
//const char* c_str() const; ///< \deprecated const char* c_str() const;
char const* data() const;
unsigned length() const;
bool isStaticString() const; bool isStaticString() const;
private: private:
void swap(CZString& other); void swap(CZString& other);
const char* cstr_;
struct StringStorage { ArrayIndex index_;
unsigned policy_: 2;
unsigned length_: 30; // 1GB max
};
char const* cstr_; // actually, a prefixed string, unless policy is noDup
union {
ArrayIndex index_;
StringStorage storage_;
};
}; };
public: public:
@@ -261,6 +190,7 @@ public:
#else #else
typedef CppTL::SmallMap<CZString, Value> ObjectValues; typedef CppTL::SmallMap<CZString, Value> ObjectValues;
#endif // ifndef JSON_USE_CPPTL_SMALLMAP #endif // ifndef JSON_USE_CPPTL_SMALLMAP
#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION #endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
public: public:
@@ -287,66 +217,47 @@ Json::Value obj_value(Json::objectValue); // {}
Value(UInt64 value); Value(UInt64 value);
#endif // if defined(JSON_HAS_INT64) #endif // if defined(JSON_HAS_INT64)
Value(double value); Value(double value);
Value(const char* value); ///< Copy til first 0. (NULL causes to seg-fault.) Value(const char* value);
Value(const char* begin, const char* end); ///< Copy all, incl zeroes. Value(const char* beginValue, const char* endValue);
/** \brief Constructs a value from a static string. /** \brief Constructs a value from a static string.
* Like other value string constructor but do not duplicate the string for * Like other value string constructor but do not duplicate the string for
* internal storage. The given string must remain alive after the call to this * internal storage. The given string must remain alive after the call to this
* constructor. * constructor.
* \note This works only for null-terminated strings. (We cannot change the
* size of this class, so we have nowhere to store the length,
* which might be computed later for various operations.)
*
* Example of usage: * Example of usage:
* \code * \code
* static StaticString foo("some text"); * Json::Value aValue( StaticString("some text") );
* Json::Value aValue(foo);
* \endcode * \endcode
*/ */
Value(const StaticString& value); Value(const StaticString& value);
Value(const JSONCPP_STRING& value); ///< Copy data() til size(). Embedded zeroes too. Value(const std::string& value);
#ifdef JSON_USE_CPPTL #ifdef JSON_USE_CPPTL
Value(const CppTL::ConstString& value); Value(const CppTL::ConstString& value);
#endif #endif
Value(bool value); Value(bool value);
/// Deep copy.
Value(const Value& other); Value(const Value& other);
#if JSON_HAS_RVALUE_REFERENCES
/// Move constructor
Value(Value&& other);
#endif
~Value(); ~Value();
/// Deep copy, then swap(other). Value &operator=(const Value &other);
/// \note Over-write existing comments. To preserve comments, use #swapPayload(). /// Swap values.
Value& operator=(Value other); /// \note Currently, comments are intentionally not swapped, for
/// Swap everything. /// both logic and efficiency.
void swap(Value& other); void swap(Value& other);
/// Swap values but leave comments and source offsets in place.
void swapPayload(Value& other);
ValueType type() const; ValueType type() const;
/// Compare payload only, not comments etc.
bool operator<(const Value& other) const; bool operator<(const Value& other) const;
bool operator<=(const Value& other) const; bool operator<=(const Value& other) const;
bool operator>=(const Value& other) const; bool operator>=(const Value& other) const;
bool operator>(const Value& other) const; bool operator>(const Value& other) const;
bool operator==(const Value& other) const; bool operator==(const Value& other) const;
bool operator!=(const Value& other) const; bool operator!=(const Value& other) const;
int compare(const Value& other) const; int compare(const Value& other) const;
const char* asCString() const; ///< Embedded zeroes could cause you trouble! const char* asCString() const;
#if JSONCPP_USING_SECURE_MEMORY std::string asString() const;
unsigned getCStringLength() const; //Allows you to understand the length of the CString
#endif
JSONCPP_STRING asString() const; ///< Embedded zeroes are possible.
/** Get raw char* of string-value.
* \return false if !string. (Seg-fault if str or end are NULL.)
*/
bool getString(
char const** begin, char const** end) const;
#ifdef JSON_USE_CPPTL #ifdef JSON_USE_CPPTL
CppTL::ConstString asConstString() const; CppTL::ConstString asConstString() const;
#endif #endif
@@ -437,23 +348,19 @@ Json::Value obj_value(Json::objectValue); // {}
Value& append(const Value& value); Value& append(const Value& value);
/// Access an object value by name, create a null member if it does not exist. /// Access an object value by name, create a null member if it does not exist.
/// \note Because of our implementation, keys are limited to 2^30 -1 chars.
/// Exceeding that will cause an exception.
Value& operator[](const char* key); Value& operator[](const char* key);
/// Access an object value by name, returns null if there is no member with /// Access an object value by name, returns null if there is no member with
/// that name. /// that name.
const Value& operator[](const char* key) const; const Value& operator[](const char* key) const;
/// Access an object value by name, create a null member if it does not exist. /// Access an object value by name, create a null member if it does not exist.
/// \param key may contain embedded nulls. Value& operator[](const std::string& key);
Value& operator[](const JSONCPP_STRING& key);
/// Access an object value by name, returns null if there is no member with /// Access an object value by name, returns null if there is no member with
/// that name. /// that name.
/// \param key may contain embedded nulls. const Value& operator[](const std::string& key) const;
const Value& operator[](const JSONCPP_STRING& key) const;
/** \brief Access an object value by name, create a null member if it does not /** \brief Access an object value by name, create a null member if it does not
exist. exist.
* If the object has no entry for that name, then the member name used to store * If the object as no entry for that name, then the member name used to store
* the new entry is not duplicated. * the new entry is not duplicated.
* Example of use: * Example of use:
* \code * \code
@@ -471,69 +378,27 @@ Json::Value obj_value(Json::objectValue); // {}
const Value& operator[](const CppTL::ConstString& key) const; const Value& operator[](const CppTL::ConstString& key) const;
#endif #endif
/// Return the member named key if it exist, defaultValue otherwise. /// Return the member named key if it exist, defaultValue otherwise.
/// \note deep copy
Value get(const char* key, const Value& defaultValue) const; Value get(const char* key, const Value& defaultValue) const;
/// Return the member named key if it exist, defaultValue otherwise. /// Return the member named key if it exist, defaultValue otherwise.
/// \note deep copy Value get(const std::string& key, const Value& defaultValue) const;
/// \note key may contain embedded nulls.
Value get(const char* begin, const char* end, const Value& defaultValue) const;
/// Return the member named key if it exist, defaultValue otherwise.
/// \note deep copy
/// \param key may contain embedded nulls.
Value get(const JSONCPP_STRING& key, const Value& defaultValue) const;
#ifdef JSON_USE_CPPTL #ifdef JSON_USE_CPPTL
/// Return the member named key if it exist, defaultValue otherwise. /// Return the member named key if it exist, defaultValue otherwise.
/// \note deep copy
Value get(const CppTL::ConstString& key, const Value& defaultValue) const; Value get(const CppTL::ConstString& key, const Value& defaultValue) const;
#endif #endif
/// Most general and efficient version of isMember()const, get()const,
/// and operator[]const
/// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
Value const* find(char const* begin, char const* end) const;
/// Most general and efficient version of object-mutators.
/// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
/// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue.
Value const* demand(char const* begin, char const* end);
/// \brief Remove and return the named member. /// \brief Remove and return the named member.
/// ///
/// Do nothing if it did not exist. /// Do nothing if it did not exist.
/// \return the removed Value, or null. /// \return the removed Value, or null.
/// \pre type() is objectValue or nullValue /// \pre type() is objectValue or nullValue
/// \post type() is unchanged /// \post type() is unchanged
/// \deprecated
Value removeMember(const char* key); Value removeMember(const char* key);
/// Same as removeMember(const char*) /// Same as removeMember(const char*)
/// \param key may contain embedded nulls. Value removeMember(const std::string& key);
/// \deprecated
Value removeMember(const JSONCPP_STRING& key);
/// Same as removeMember(const char* begin, const char* end, Value* removed),
/// but 'key' is null-terminated.
bool removeMember(const char* key, Value* removed);
/** \brief Remove the named map member.
Update 'removed' iff removed.
\param key may contain embedded nulls.
\return true iff removed (no exceptions)
*/
bool removeMember(JSONCPP_STRING const& key, Value* removed);
/// Same as removeMember(JSONCPP_STRING const& key, Value* removed)
bool removeMember(const char* begin, const char* end, Value* removed);
/** \brief Remove the indexed array element.
O(n) expensive operations.
Update 'removed' iff removed.
\return true iff removed (no exceptions)
*/
bool removeIndex(ArrayIndex i, Value* removed);
/// Return true if the object has a member named key. /// Return true if the object has a member named key.
/// \note 'key' must be null-terminated.
bool isMember(const char* key) const; bool isMember(const char* key) const;
/// Return true if the object has a member named key. /// Return true if the object has a member named key.
/// \param key may contain embedded nulls. bool isMember(const std::string& key) const;
bool isMember(const JSONCPP_STRING& key) const;
/// Same as isMember(JSONCPP_STRING const& key)const
bool isMember(const char* begin, const char* end) const;
#ifdef JSON_USE_CPPTL #ifdef JSON_USE_CPPTL
/// Return true if the object has a member named key. /// Return true if the object has a member named key.
bool isMember(const CppTL::ConstString& key) const; bool isMember(const CppTL::ConstString& key) const;
@@ -551,18 +416,15 @@ Json::Value obj_value(Json::objectValue); // {}
// EnumValues enumValues() const; // EnumValues enumValues() const;
//# endif //# endif
/// \deprecated Always pass len. /// Comments must be //... or /* ... */
JSONCPP_DEPRECATED("Use setComment(JSONCPP_STRING const&) instead.")
void setComment(const char* comment, CommentPlacement placement); void setComment(const char* comment, CommentPlacement placement);
/// Comments must be //... or /* ... */ /// Comments must be //... or /* ... */
void setComment(const char* comment, size_t len, CommentPlacement placement); void setComment(const std::string& comment, CommentPlacement placement);
/// Comments must be //... or /* ... */
void setComment(const JSONCPP_STRING& comment, CommentPlacement placement);
bool hasComment(CommentPlacement placement) const; bool hasComment(CommentPlacement placement) const;
/// Include delimiters and embedded newlines. /// Include delimiters and embedded newlines.
JSONCPP_STRING getComment(CommentPlacement placement) const; std::string getComment(CommentPlacement placement) const;
JSONCPP_STRING toStyledString() const; std::string toStyledString() const;
const_iterator begin() const; const_iterator begin() const;
const_iterator end() const; const_iterator end() const;
@@ -572,22 +434,32 @@ Json::Value obj_value(Json::objectValue); // {}
// Accessors for the [start, limit) range of bytes within the JSON text from // Accessors for the [start, limit) range of bytes within the JSON text from
// which this value was parsed, if any. // which this value was parsed, if any.
void setOffsetStart(ptrdiff_t start); void setOffsetStart(size_t start);
void setOffsetLimit(ptrdiff_t limit); void setOffsetLimit(size_t limit);
ptrdiff_t getOffsetStart() const; size_t getOffsetStart() const;
ptrdiff_t getOffsetLimit() const; size_t getOffsetLimit() const;
private: private:
void initBasic(ValueType type, bool allocated = false); Value& resolveReference(const char* key, bool isStatic);
Value& resolveReference(const char* key); #ifdef JSON_VALUE_USE_INTERNAL_MAP
Value& resolveReference(const char* key, const char* end); inline bool isItemAvailable() const { return itemIsUsed_ == 0; }
inline void setItemUsed(bool isUsed = true) { itemIsUsed_ = isUsed ? 1 : 0; }
inline bool isMemberNameStatic() const { return memberNameIsStatic_ == 0; }
inline void setMemberNameIsStatic(bool isStatic) {
memberNameIsStatic_ = isStatic ? 1 : 0;
}
#endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP
private:
struct CommentInfo { struct CommentInfo {
CommentInfo(); CommentInfo();
~CommentInfo(); ~CommentInfo();
void setComment(const char* text, size_t len); void setComment(const char* text);
char* comment_; char* comment_;
}; };
@@ -606,18 +478,26 @@ private:
LargestUInt uint_; LargestUInt uint_;
double real_; double real_;
bool bool_; bool bool_;
char* string_; // actually ptr to unsigned, followed by str, unless !allocated_ char* string_;
#ifdef JSON_VALUE_USE_INTERNAL_MAP
ValueInternalArray* array_;
ValueInternalMap* map_;
#else
ObjectValues* map_; ObjectValues* map_;
#endif
} value_; } value_;
ValueType type_ : 8; ValueType type_ : 8;
unsigned int allocated_ : 1; // Notes: if declared as bool, bitfield is useless. int allocated_ : 1; // Notes: if declared as bool, bitfield is useless.
// If not allocated_, string_ must be null-terminated. #ifdef JSON_VALUE_USE_INTERNAL_MAP
unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container.
int memberNameIsStatic_ : 1; // used by the ValueInternalMap container.
#endif
CommentInfo* comments_; CommentInfo* comments_;
// [start, limit) byte offsets in the source JSON text from which this Value // [start, limit) byte offsets in the source JSON text from which this Value
// was extracted. // was extracted.
ptrdiff_t start_; size_t start_;
ptrdiff_t limit_; size_t limit_;
}; };
/** \brief Experimental and untested: represents an element of the "path" to /** \brief Experimental and untested: represents an element of the "path" to
@@ -630,7 +510,7 @@ public:
PathArgument(); PathArgument();
PathArgument(ArrayIndex index); PathArgument(ArrayIndex index);
PathArgument(const char* key); PathArgument(const char* key);
PathArgument(const JSONCPP_STRING& key); PathArgument(const std::string& key);
private: private:
enum Kind { enum Kind {
@@ -638,7 +518,7 @@ private:
kindIndex, kindIndex,
kindKey kindKey
}; };
JSONCPP_STRING key_; std::string key_;
ArrayIndex index_; ArrayIndex index_;
Kind kind_; Kind kind_;
}; };
@@ -656,7 +536,7 @@ private:
*/ */
class JSON_API Path { class JSON_API Path {
public: public:
Path(const JSONCPP_STRING& path, Path(const std::string& path,
const PathArgument& a1 = PathArgument(), const PathArgument& a1 = PathArgument(),
const PathArgument& a2 = PathArgument(), const PathArgument& a2 = PathArgument(),
const PathArgument& a3 = PathArgument(), const PathArgument& a3 = PathArgument(),
@@ -673,16 +553,355 @@ private:
typedef std::vector<const PathArgument*> InArgs; typedef std::vector<const PathArgument*> InArgs;
typedef std::vector<PathArgument> Args; typedef std::vector<PathArgument> Args;
void makePath(const JSONCPP_STRING& path, const InArgs& in); void makePath(const std::string& path, const InArgs& in);
void addPathInArg(const JSONCPP_STRING& path, void addPathInArg(const std::string& path,
const InArgs& in, const InArgs& in,
InArgs::const_iterator& itInArg, InArgs::const_iterator& itInArg,
PathArgument::Kind kind); PathArgument::Kind kind);
void invalidPath(const JSONCPP_STRING& path, int location); void invalidPath(const std::string& path, int location);
Args args_; Args args_;
}; };
#ifdef JSON_VALUE_USE_INTERNAL_MAP
/** \brief Allocator to customize Value internal map.
* Below is an example of a simple implementation (default implementation
actually
* use memory pool for speed).
* \code
class DefaultValueMapAllocator : public ValueMapAllocator
{
public: // overridden from ValueMapAllocator
virtual ValueInternalMap *newMap()
{
return new ValueInternalMap();
}
virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other )
{
return new ValueInternalMap( other );
}
virtual void destructMap( ValueInternalMap *map )
{
delete map;
}
virtual ValueInternalLink *allocateMapBuckets( unsigned int size )
{
return new ValueInternalLink[size];
}
virtual void releaseMapBuckets( ValueInternalLink *links )
{
delete [] links;
}
virtual ValueInternalLink *allocateMapLink()
{
return new ValueInternalLink();
}
virtual void releaseMapLink( ValueInternalLink *link )
{
delete link;
}
};
* \endcode
*/
class JSON_API ValueMapAllocator {
public:
virtual ~ValueMapAllocator();
virtual ValueInternalMap* newMap() = 0;
virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) = 0;
virtual void destructMap(ValueInternalMap* map) = 0;
virtual ValueInternalLink* allocateMapBuckets(unsigned int size) = 0;
virtual void releaseMapBuckets(ValueInternalLink* links) = 0;
virtual ValueInternalLink* allocateMapLink() = 0;
virtual void releaseMapLink(ValueInternalLink* link) = 0;
};
/** \brief ValueInternalMap hash-map bucket chain link (for internal use only).
* \internal previous_ & next_ allows for bidirectional traversal.
*/
class JSON_API ValueInternalLink {
public:
enum {
itemPerLink = 6
}; // sizeof(ValueInternalLink) = 128 on 32 bits architecture.
enum InternalFlags {
flagAvailable = 0,
flagUsed = 1
};
ValueInternalLink();
~ValueInternalLink();
Value items_[itemPerLink];
char* keys_[itemPerLink];
ValueInternalLink* previous_;
ValueInternalLink* next_;
};
/** \brief A linked page based hash-table implementation used internally by
*Value.
* \internal ValueInternalMap is a tradional bucket based hash-table, with a
*linked
* list in each bucket to handle collision. There is an addional twist in that
* each node of the collision linked list is a page containing a fixed amount of
* value. This provides a better compromise between memory usage and speed.
*
* Each bucket is made up of a chained list of ValueInternalLink. The last
* link of a given bucket can be found in the 'previous_' field of the following
*bucket.
* The last link of the last bucket is stored in tailLink_ as it has no
*following bucket.
* Only the last link of a bucket may contains 'available' item. The last link
*always
* contains at least one element unless is it the bucket one very first link.
*/
class JSON_API ValueInternalMap {
friend class ValueIteratorBase;
friend class Value;
public:
typedef unsigned int HashKey;
typedef unsigned int BucketIndex;
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
struct IteratorState {
IteratorState() : map_(0), link_(0), itemIndex_(0), bucketIndex_(0) {}
ValueInternalMap* map_;
ValueInternalLink* link_;
BucketIndex itemIndex_;
BucketIndex bucketIndex_;
};
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
ValueInternalMap();
ValueInternalMap(const ValueInternalMap& other);
ValueInternalMap& operator=(ValueInternalMap other);
~ValueInternalMap();
void swap(ValueInternalMap& other);
BucketIndex size() const;
void clear();
bool reserveDelta(BucketIndex growth);
bool reserve(BucketIndex newItemCount);
const Value* find(const char* key) const;
Value* find(const char* key);
Value& resolveReference(const char* key, bool isStatic);
void remove(const char* key);
void doActualRemove(ValueInternalLink* link,
BucketIndex index,
BucketIndex bucketIndex);
ValueInternalLink*& getLastLinkInBucket(BucketIndex bucketIndex);
Value& setNewItem(const char* key,
bool isStatic,
ValueInternalLink* link,
BucketIndex index);
Value& unsafeAdd(const char* key, bool isStatic, HashKey hashedKey);
HashKey hash(const char* key) const;
int compare(const ValueInternalMap& other) const;
private:
void makeBeginIterator(IteratorState& it) const;
void makeEndIterator(IteratorState& it) const;
static bool equals(const IteratorState& x, const IteratorState& other);
static void increment(IteratorState& iterator);
static void incrementBucket(IteratorState& iterator);
static void decrement(IteratorState& iterator);
static const char* key(const IteratorState& iterator);
static const char* key(const IteratorState& iterator, bool& isStatic);
static Value& value(const IteratorState& iterator);
static int distance(const IteratorState& x, const IteratorState& y);
private:
ValueInternalLink* buckets_;
ValueInternalLink* tailLink_;
BucketIndex bucketsSize_;
BucketIndex itemCount_;
};
/** \brief A simplified deque implementation used internally by Value.
* \internal
* It is based on a list of fixed "page", each page contains a fixed number of
*items.
* Instead of using a linked-list, a array of pointer is used for fast item
*look-up.
* Look-up for an element is as follow:
* - compute page index: pageIndex = itemIndex / itemsPerPage
* - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage]
*
* Insertion is amortized constant time (only the array containing the index of
*pointers
* need to be reallocated when items are appended).
*/
class JSON_API ValueInternalArray {
friend class Value;
friend class ValueIteratorBase;
public:
enum {
itemsPerPage = 8
}; // should be a power of 2 for fast divide and modulo.
typedef Value::ArrayIndex ArrayIndex;
typedef unsigned int PageIndex;
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
struct IteratorState // Must be a POD
{
IteratorState() : array_(0), currentPageIndex_(0), currentItemIndex_(0) {}
ValueInternalArray* array_;
Value** currentPageIndex_;
unsigned int currentItemIndex_;
};
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
ValueInternalArray();
ValueInternalArray(const ValueInternalArray& other);
ValueInternalArray& operator=(ValueInternalArray other);
~ValueInternalArray();
void swap(ValueInternalArray& other);
void clear();
void resize(ArrayIndex newSize);
Value& resolveReference(ArrayIndex index);
Value* find(ArrayIndex index) const;
ArrayIndex size() const;
int compare(const ValueInternalArray& other) const;
private:
static bool equals(const IteratorState& x, const IteratorState& other);
static void increment(IteratorState& iterator);
static void decrement(IteratorState& iterator);
static Value& dereference(const IteratorState& iterator);
static Value& unsafeDereference(const IteratorState& iterator);
static int distance(const IteratorState& x, const IteratorState& y);
static ArrayIndex indexOf(const IteratorState& iterator);
void makeBeginIterator(IteratorState& it) const;
void makeEndIterator(IteratorState& it) const;
void makeIterator(IteratorState& it, ArrayIndex index) const;
void makeIndexValid(ArrayIndex index);
Value** pages_;
ArrayIndex size_;
PageIndex pageCount_;
};
/** \brief Experimental: do not use. Allocator to customize Value internal
array.
* Below is an example of a simple implementation (actual implementation use
* memory pool).
\code
class DefaultValueArrayAllocator : public ValueArrayAllocator
{
public: // overridden from ValueArrayAllocator
virtual ~DefaultValueArrayAllocator()
{
}
virtual ValueInternalArray *newArray()
{
return new ValueInternalArray();
}
virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other )
{
return new ValueInternalArray( other );
}
virtual void destruct( ValueInternalArray *array )
{
delete array;
}
virtual void reallocateArrayPageIndex( Value **&indexes,
ValueInternalArray::PageIndex
&indexCount,
ValueInternalArray::PageIndex
minNewIndexCount )
{
ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1;
if ( minNewIndexCount > newIndexCount )
newIndexCount = minNewIndexCount;
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
if ( !newIndexes )
throw std::bad_alloc();
indexCount = newIndexCount;
indexes = static_cast<Value **>( newIndexes );
}
virtual void releaseArrayPageIndex( Value **indexes,
ValueInternalArray::PageIndex indexCount )
{
if ( indexes )
free( indexes );
}
virtual Value *allocateArrayPage()
{
return static_cast<Value *>( malloc( sizeof(Value) *
ValueInternalArray::itemsPerPage ) );
}
virtual void releaseArrayPage( Value *value )
{
if ( value )
free( value );
}
};
\endcode
*/
class JSON_API ValueArrayAllocator {
public:
virtual ~ValueArrayAllocator();
virtual ValueInternalArray* newArray() = 0;
virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) = 0;
virtual void destructArray(ValueInternalArray* array) = 0;
/** \brief Reallocate array page index.
* Reallocates an array of pointer on each page.
* \param indexes [input] pointer on the current index. May be \c NULL.
* [output] pointer on the new index of at least
* \a minNewIndexCount pages.
* \param indexCount [input] current number of pages in the index.
* [output] number of page the reallocated index can handle.
* \b MUST be >= \a minNewIndexCount.
* \param minNewIndexCount Minimum number of page the new index must be able
* to
* handle.
*/
virtual void
reallocateArrayPageIndex(Value**& indexes,
ValueInternalArray::PageIndex& indexCount,
ValueInternalArray::PageIndex minNewIndexCount) = 0;
virtual void
releaseArrayPageIndex(Value** indexes,
ValueInternalArray::PageIndex indexCount) = 0;
virtual Value* allocateArrayPage() = 0;
virtual void releaseArrayPage(Value* value) = 0;
};
#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
/** \brief base class for Value iterators. /** \brief base class for Value iterators.
* *
*/ */
@@ -693,35 +912,32 @@ public:
typedef int difference_type; typedef int difference_type;
typedef ValueIteratorBase SelfType; typedef ValueIteratorBase SelfType;
ValueIteratorBase();
#ifndef JSON_VALUE_USE_INTERNAL_MAP
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
#else
ValueIteratorBase(const ValueInternalArray::IteratorState& state);
ValueIteratorBase(const ValueInternalMap::IteratorState& state);
#endif
bool operator==(const SelfType& other) const { return isEqual(other); } bool operator==(const SelfType& other) const { return isEqual(other); }
bool operator!=(const SelfType& other) const { return !isEqual(other); } bool operator!=(const SelfType& other) const { return !isEqual(other); }
difference_type operator-(const SelfType& other) const { difference_type operator-(const SelfType& other) const {
return other.computeDistance(*this); return computeDistance(other);
} }
/// Return either the index or the member name of the referenced value as a /// Return either the index or the member name of the referenced value as a
/// Value. /// Value.
Value key() const; Value key() const;
/// Return the index of the referenced Value, or -1 if it is not an arrayValue. /// Return the index of the referenced Value. -1 if it is not an arrayValue.
UInt index() const; UInt index() const;
/// Return the member name of the referenced Value, or "" if it is not an
/// objectValue.
/// \note Avoid `c_str()` on result, as embedded zeroes are possible.
JSONCPP_STRING name() const;
/// Return the member name of the referenced Value. "" if it is not an /// Return the member name of the referenced Value. "" if it is not an
/// objectValue. /// objectValue.
/// \deprecated This cannot be used for UTF-8 strings, since there can be embedded nulls. const char* memberName() const;
JSONCPP_DEPRECATED("Use `key = name();` instead.")
char const* memberName() const;
/// Return the member name of the referenced Value, or NULL if it is not an
/// objectValue.
/// \note Better version than memberName(). Allows embedded nulls.
char const* memberName(char const** end) const;
protected: protected:
Value& deref() const; Value& deref() const;
@@ -737,15 +953,17 @@ protected:
void copy(const SelfType& other); void copy(const SelfType& other);
private: private:
#ifndef JSON_VALUE_USE_INTERNAL_MAP
Value::ObjectValues::iterator current_; Value::ObjectValues::iterator current_;
// Indicates that iterator is for a null value. // Indicates that iterator is for a null value.
bool isNull_; bool isNull_;
#else
public: union {
// For some reason, BORLAND needs these at the end, rather ValueInternalArray::IteratorState array_;
// than earlier. No idea why. ValueInternalMap::IteratorState map_;
ValueIteratorBase(); } iterator_;
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current); bool isArray_;
#endif
}; };
/** \brief const iterator for object and array value. /** \brief const iterator for object and array value.
@@ -756,19 +974,23 @@ class JSON_API ValueConstIterator : public ValueIteratorBase {
public: public:
typedef const Value value_type; typedef const Value value_type;
//typedef unsigned int size_t; typedef unsigned int size_t;
//typedef int difference_type; typedef int difference_type;
typedef const Value& reference; typedef const Value& reference;
typedef const Value* pointer; typedef const Value* pointer;
typedef ValueConstIterator SelfType; typedef ValueConstIterator SelfType;
ValueConstIterator(); ValueConstIterator();
ValueConstIterator(ValueIterator const& other);
private: private:
/*! \internal Use by Value to create an iterator. /*! \internal Use by Value to create an iterator.
*/ */
#ifndef JSON_VALUE_USE_INTERNAL_MAP
explicit ValueConstIterator(const Value::ObjectValues::iterator& current); explicit ValueConstIterator(const Value::ObjectValues::iterator& current);
#else
ValueConstIterator(const ValueInternalArray::IteratorState& state);
ValueConstIterator(const ValueInternalMap::IteratorState& state);
#endif
public: public:
SelfType& operator=(const ValueIteratorBase& other); SelfType& operator=(const ValueIteratorBase& other);
@@ -813,13 +1035,18 @@ public:
typedef ValueIterator SelfType; typedef ValueIterator SelfType;
ValueIterator(); ValueIterator();
explicit ValueIterator(const ValueConstIterator& other); ValueIterator(const ValueConstIterator& other);
ValueIterator(const ValueIterator& other); ValueIterator(const ValueIterator& other);
private: private:
/*! \internal Use by Value to create an iterator. /*! \internal Use by Value to create an iterator.
*/ */
#ifndef JSON_VALUE_USE_INTERNAL_MAP
explicit ValueIterator(const Value::ObjectValues::iterator& current); explicit ValueIterator(const Value::ObjectValues::iterator& current);
#else
ValueIterator(const ValueInternalArray::IteratorState& state);
ValueIterator(const ValueInternalMap::IteratorState& state);
#endif
public: public:
SelfType& operator=(const SelfType& other); SelfType& operator=(const SelfType& other);
@@ -852,14 +1079,6 @@ public:
} // namespace Json } // namespace Json
namespace std {
/// Specialize std::swap() for Json::Value.
template<>
inline void swap(Json::Value& a, Json::Value& b) { a.swap(b); }
}
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) #if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(pop) #pragma warning(pop)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) #endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)

View File

@@ -1,20 +1,14 @@
// DO NOT EDIT. This file (and "version") is generated by CMake. // DO NOT EDIT. This file is generated by CMake from "version"
// and "version.h.in" files.
// Run CMake configure step to update it. // Run CMake configure step to update it.
#ifndef JSON_VERSION_H_INCLUDED #ifndef JSON_VERSION_H_INCLUDED
# define JSON_VERSION_H_INCLUDED # define JSON_VERSION_H_INCLUDED
# define JSONCPP_VERSION_STRING "1.7.6" # define JSONCPP_VERSION_STRING "0.7.0"
# define JSONCPP_VERSION_MAJOR 1 # define JSONCPP_VERSION_MAJOR 0
# define JSONCPP_VERSION_MINOR 7 # define JSONCPP_VERSION_MINOR 7
# define JSONCPP_VERSION_PATCH 6 # define JSONCPP_VERSION_PATCH 0
# define JSONCPP_VERSION_QUALIFIER # define JSONCPP_VERSION_QUALIFIER
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) # define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
#ifdef JSONCPP_USING_SECURE_MEMORY
#undef JSONCPP_USING_SECURE_MEMORY
#endif
#define JSONCPP_USING_SECURE_MEMORY 0
// If non-zero, the library zeroes any memory that it has allocated before
// it frees its memory.
#endif // JSON_VERSION_H_INCLUDED #endif // JSON_VERSION_H_INCLUDED

View File

@@ -11,7 +11,6 @@
#endif // if !defined(JSON_IS_AMALGAMATION) #endif // if !defined(JSON_IS_AMALGAMATION)
#include <vector> #include <vector>
#include <string> #include <string>
#include <ostream>
// Disable warning C4251: <data member>: <type> needs to have dll-interface to // Disable warning C4251: <data member>: <type> needs to have dll-interface to
// be used by... // be used by...
@@ -24,125 +23,13 @@ namespace Json {
class Value; class Value;
/**
Usage:
\code
using namespace Json;
void writeToStdout(StreamWriter::Factory const& factory, Value const& value) {
std::unique_ptr<StreamWriter> const writer(
factory.newStreamWriter());
writer->write(value, &std::cout);
std::cout << std::endl; // add lf and flush
}
\endcode
*/
class JSON_API StreamWriter {
protected:
JSONCPP_OSTREAM* sout_; // not owned; will not delete
public:
StreamWriter();
virtual ~StreamWriter();
/** Write Value into document as configured in sub-class.
Do not take ownership of sout, but maintain a reference during function.
\pre sout != NULL
\return zero on success (For now, we always return zero, so check the stream instead.)
\throw std::exception possibly, depending on configuration
*/
virtual int write(Value const& root, JSONCPP_OSTREAM* sout) = 0;
/** \brief A simple abstract factory.
*/
class JSON_API Factory {
public:
virtual ~Factory();
/** \brief Allocate a CharReader via operator new().
* \throw std::exception if something goes wrong (e.g. invalid settings)
*/
virtual StreamWriter* newStreamWriter() const = 0;
}; // Factory
}; // StreamWriter
/** \brief Write into stringstream, then return string, for convenience.
* A StreamWriter will be created from the factory, used, and then deleted.
*/
JSONCPP_STRING JSON_API writeString(StreamWriter::Factory const& factory, Value const& root);
/** \brief Build a StreamWriter implementation.
Usage:
\code
using namespace Json;
Value value = ...;
StreamWriterBuilder builder;
builder["commentStyle"] = "None";
builder["indentation"] = " "; // or whatever you like
std::unique_ptr<Json::StreamWriter> writer(
builder.newStreamWriter());
writer->write(value, &std::cout);
std::cout << std::endl; // add lf and flush
\endcode
*/
class JSON_API StreamWriterBuilder : public StreamWriter::Factory {
public:
// Note: We use a Json::Value so that we can add data-members to this class
// without a major version bump.
/** Configuration of this builder.
Available settings (case-sensitive):
- "commentStyle": "None" or "All"
- "indentation": "<anything>"
- "enableYAMLCompatibility": false or true
- slightly change the whitespace around colons
- "dropNullPlaceholders": false or true
- Drop the "null" string from the writer's output for nullValues.
Strictly speaking, this is not valid JSON. But when the output is being
fed to a browser's Javascript, it makes for smaller output and the
browser can handle the output just fine.
- "useSpecialFloats": false or true
- If true, outputs non-finite floating point values in the following way:
NaN values as "NaN", positive infinity as "Infinity", and negative infinity
as "-Infinity".
You can examine 'settings_` yourself
to see the defaults. You can also write and read them just like any
JSON Value.
\sa setDefaults()
*/
Json::Value settings_;
StreamWriterBuilder();
~StreamWriterBuilder() JSONCPP_OVERRIDE;
/**
* \throw std::exception if something goes wrong (e.g. invalid settings)
*/
StreamWriter* newStreamWriter() const JSONCPP_OVERRIDE;
/** \return true if 'settings' are legal and consistent;
* otherwise, indicate bad settings via 'invalid'.
*/
bool validate(Json::Value* invalid) const;
/** A simple way to update a specific setting.
*/
Value& operator[](JSONCPP_STRING key);
/** Called by ctor, but you can use this to reset settings_.
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_writer.cpp StreamWriterBuilderDefaults
*/
static void setDefaults(Json::Value* settings);
};
/** \brief Abstract class for writers. /** \brief Abstract class for writers.
* \deprecated Use StreamWriter. (And really, this is an implementation detail.)
*/ */
class JSON_API Writer { class JSON_API Writer {
public: public:
virtual ~Writer(); virtual ~Writer();
virtual JSONCPP_STRING write(const Value& root) = 0; virtual std::string write(const Value& root) = 0;
}; };
/** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format /** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format
@@ -152,13 +39,11 @@ public:
*consumption, *consumption,
* but may be usefull to support feature such as RPC where bandwith is limited. * but may be usefull to support feature such as RPC where bandwith is limited.
* \sa Reader, Value * \sa Reader, Value
* \deprecated Use StreamWriterBuilder.
*/ */
class JSON_API FastWriter : public Writer { class JSON_API FastWriter : public Writer {
public: public:
FastWriter(); FastWriter();
~FastWriter() JSONCPP_OVERRIDE {} virtual ~FastWriter() {}
void enableYAMLCompatibility(); void enableYAMLCompatibility();
@@ -172,12 +57,12 @@ public:
void omitEndingLineFeed(); void omitEndingLineFeed();
public: // overridden from Writer public: // overridden from Writer
JSONCPP_STRING write(const Value& root) JSONCPP_OVERRIDE; virtual std::string write(const Value& root);
private: private:
void writeValue(const Value& value); void writeValue(const Value& value);
JSONCPP_STRING document_; std::string document_;
bool yamlCompatiblityEnabled_; bool yamlCompatiblityEnabled_;
bool dropNullPlaceholders_; bool dropNullPlaceholders_;
bool omitEndingLineFeed_; bool omitEndingLineFeed_;
@@ -205,41 +90,40 @@ private:
*#CommentPlacement. *#CommentPlacement.
* *
* \sa Reader, Value, Value::setComment() * \sa Reader, Value, Value::setComment()
* \deprecated Use StreamWriterBuilder.
*/ */
class JSON_API StyledWriter : public Writer { class JSON_API StyledWriter : public Writer {
public: public:
StyledWriter(); StyledWriter();
~StyledWriter() JSONCPP_OVERRIDE {} virtual ~StyledWriter() {}
public: // overridden from Writer public: // overridden from Writer
/** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format. /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
* \param root Value to serialize. * \param root Value to serialize.
* \return String containing the JSON document that represents the root value. * \return String containing the JSON document that represents the root value.
*/ */
JSONCPP_STRING write(const Value& root) JSONCPP_OVERRIDE; virtual std::string write(const Value& root);
private: private:
void writeValue(const Value& value); void writeValue(const Value& value);
void writeArrayValue(const Value& value); void writeArrayValue(const Value& value);
bool isMultineArray(const Value& value); bool isMultineArray(const Value& value);
void pushValue(const JSONCPP_STRING& value); void pushValue(const std::string& value);
void writeIndent(); void writeIndent();
void writeWithIndent(const JSONCPP_STRING& value); void writeWithIndent(const std::string& value);
void indent(); void indent();
void unindent(); void unindent();
void writeCommentBeforeValue(const Value& root); void writeCommentBeforeValue(const Value& root);
void writeCommentAfterValueOnSameLine(const Value& root); void writeCommentAfterValueOnSameLine(const Value& root);
bool hasCommentForValue(const Value& value); bool hasCommentForValue(const Value& value);
static JSONCPP_STRING normalizeEOL(const JSONCPP_STRING& text); static std::string normalizeEOL(const std::string& text);
typedef std::vector<JSONCPP_STRING> ChildValues; typedef std::vector<std::string> ChildValues;
ChildValues childValues_; ChildValues childValues_;
JSONCPP_STRING document_; std::string document_;
JSONCPP_STRING indentString_; std::string indentString_;
unsigned int rightMargin_; int rightMargin_;
unsigned int indentSize_; int indentSize_;
bool addChildValues_; bool addChildValues_;
}; };
@@ -267,11 +151,10 @@ private:
* *
* \param indentation Each level will be indented by this amount extra. * \param indentation Each level will be indented by this amount extra.
* \sa Reader, Value, Value::setComment() * \sa Reader, Value, Value::setComment()
* \deprecated Use StreamWriterBuilder.
*/ */
class JSON_API StyledStreamWriter { class JSON_API StyledStreamWriter {
public: public:
StyledStreamWriter(JSONCPP_STRING indentation = "\t"); StyledStreamWriter(std::string indentation = "\t");
~StyledStreamWriter() {} ~StyledStreamWriter() {}
public: public:
@@ -281,46 +164,45 @@ public:
* \note There is no point in deriving from Writer, since write() should not * \note There is no point in deriving from Writer, since write() should not
* return a value. * return a value.
*/ */
void write(JSONCPP_OSTREAM& out, const Value& root); void write(std::ostream& out, const Value& root);
private: private:
void writeValue(const Value& value); void writeValue(const Value& value);
void writeArrayValue(const Value& value); void writeArrayValue(const Value& value);
bool isMultineArray(const Value& value); bool isMultineArray(const Value& value);
void pushValue(const JSONCPP_STRING& value); void pushValue(const std::string& value);
void writeIndent(); void writeIndent();
void writeWithIndent(const JSONCPP_STRING& value); void writeWithIndent(const std::string& value);
void indent(); void indent();
void unindent(); void unindent();
void writeCommentBeforeValue(const Value& root); void writeCommentBeforeValue(const Value& root);
void writeCommentAfterValueOnSameLine(const Value& root); void writeCommentAfterValueOnSameLine(const Value& root);
bool hasCommentForValue(const Value& value); bool hasCommentForValue(const Value& value);
static JSONCPP_STRING normalizeEOL(const JSONCPP_STRING& text); static std::string normalizeEOL(const std::string& text);
typedef std::vector<JSONCPP_STRING> ChildValues; typedef std::vector<std::string> ChildValues;
ChildValues childValues_; ChildValues childValues_;
JSONCPP_OSTREAM* document_; std::ostream* document_;
JSONCPP_STRING indentString_; std::string indentString_;
unsigned int rightMargin_; int rightMargin_;
JSONCPP_STRING indentation_; std::string indentation_;
bool addChildValues_ : 1; bool addChildValues_;
bool indented_ : 1;
}; };
#if defined(JSON_HAS_INT64) #if defined(JSON_HAS_INT64)
JSONCPP_STRING JSON_API valueToString(Int value); std::string JSON_API valueToString(Int value);
JSONCPP_STRING JSON_API valueToString(UInt value); std::string JSON_API valueToString(UInt value);
#endif // if defined(JSON_HAS_INT64) #endif // if defined(JSON_HAS_INT64)
JSONCPP_STRING JSON_API valueToString(LargestInt value); std::string JSON_API valueToString(LargestInt value);
JSONCPP_STRING JSON_API valueToString(LargestUInt value); std::string JSON_API valueToString(LargestUInt value);
JSONCPP_STRING JSON_API valueToString(double value); std::string JSON_API valueToString(double value);
JSONCPP_STRING JSON_API valueToString(bool value); std::string JSON_API valueToString(bool value);
JSONCPP_STRING JSON_API valueToQuotedString(const char* value); std::string JSON_API valueToQuotedString(const char* value);
/// \brief Output using the StyledStreamWriter. /// \brief Output using the StyledStreamWriter.
/// \see Json::operator>>() /// \see Json::operator>>()
JSON_API JSONCPP_OSTREAM& operator<<(JSONCPP_OSTREAM&, const Value& root); JSON_API std::ostream& operator<<(std::ostream&, const Value& root);
} // namespace Json } // namespace Json

View File

@@ -178,6 +178,15 @@
<File <File
RelativePath="..\..\include\json\json.h"> RelativePath="..\..\include\json\json.h">
</File> </File>
<File
RelativePath="..\..\src\lib_json\json_batchallocator.h">
</File>
<File
RelativePath="..\..\src\lib_json\json_internalarray.inl">
</File>
<File
RelativePath="..\..\src\lib_json\json_internalmap.inl">
</File>
<File <File
RelativePath="..\..\src\lib_json\json_reader.cpp"> RelativePath="..\..\src\lib_json\json_reader.cpp">
</File> </File>

View File

@@ -1,8 +1,3 @@
# Copyright 2010 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
"""Tag the sandbox for release, make source and doc tarballs. """Tag the sandbox for release, make source and doc tarballs.
Requires Python 2.6 Requires Python 2.6
@@ -19,8 +14,6 @@ python makerelease.py 0.5.0 0.6.0-dev
Note: This was for Subversion. Now that we are in GitHub, we do not Note: This was for Subversion. Now that we are in GitHub, we do not
need to build versioned tarballs anymore, so makerelease.py is defunct. need to build versioned tarballs anymore, so makerelease.py is defunct.
""" """
from __future__ import print_function
import os.path import os.path
import subprocess import subprocess
import sys import sys
@@ -40,140 +33,140 @@ SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download' SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
SOURCEFORGE_PROJECT = 'jsoncpp' SOURCEFORGE_PROJECT = 'jsoncpp'
def set_version(version): def set_version( version ):
with open('version','wb') as f: with open('version','wb') as f:
f.write(version.strip()) f.write( version.strip() )
def rmdir_if_exist(dir_path): def rmdir_if_exist( dir_path ):
if os.path.isdir(dir_path): if os.path.isdir( dir_path ):
shutil.rmtree(dir_path) shutil.rmtree( dir_path )
class SVNError(Exception): class SVNError(Exception):
pass pass
def svn_command(command, *args): def svn_command( command, *args ):
cmd = ['svn', '--non-interactive', command] + list(args) cmd = ['svn', '--non-interactive', command] + list(args)
print('Running:', ' '.join(cmd)) print 'Running:', ' '.join( cmd )
process = subprocess.Popen(cmd, process = subprocess.Popen( cmd,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT) stderr=subprocess.STDOUT )
stdout = process.communicate()[0] stdout = process.communicate()[0]
if process.returncode: if process.returncode:
error = SVNError('SVN command failed:\n' + stdout) error = SVNError( 'SVN command failed:\n' + stdout )
error.returncode = process.returncode error.returncode = process.returncode
raise error raise error
return stdout return stdout
def check_no_pending_commit(): def check_no_pending_commit():
"""Checks that there is no pending commit in the sandbox.""" """Checks that there is no pending commit in the sandbox."""
stdout = svn_command('status', '--xml') stdout = svn_command( 'status', '--xml' )
etree = ElementTree.fromstring(stdout) etree = ElementTree.fromstring( stdout )
msg = [] msg = []
for entry in etree.getiterator('entry'): for entry in etree.getiterator( 'entry' ):
path = entry.get('path') path = entry.get('path')
status = entry.find('wc-status').get('item') status = entry.find('wc-status').get('item')
if status != 'unversioned' and path != 'version': if status != 'unversioned' and path != 'version':
msg.append('File "%s" has pending change (status="%s")' % (path, status)) msg.append( 'File "%s" has pending change (status="%s")' % (path, status) )
if msg: if msg:
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!') msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' )
return '\n'.join(msg) return '\n'.join( msg )
def svn_join_url(base_url, suffix): def svn_join_url( base_url, suffix ):
if not base_url.endswith('/'): if not base_url.endswith('/'):
base_url += '/' base_url += '/'
if suffix.startswith('/'): if suffix.startswith('/'):
suffix = suffix[1:] suffix = suffix[1:]
return base_url + suffix return base_url + suffix
def svn_check_if_tag_exist(tag_url): def svn_check_if_tag_exist( tag_url ):
"""Checks if a tag exist. """Checks if a tag exist.
Returns: True if the tag exist, False otherwise. Returns: True if the tag exist, False otherwise.
""" """
try: try:
list_stdout = svn_command('list', tag_url) list_stdout = svn_command( 'list', tag_url )
except SVNError as e: except SVNError, e:
if e.returncode != 1 or not str(e).find('tag_url'): if e.returncode != 1 or not str(e).find('tag_url'):
raise e raise e
# otherwise ignore error, meaning tag does not exist # otherwise ignore error, meaning tag does not exist
return False return False
return True return True
def svn_commit(message): def svn_commit( message ):
"""Commit the sandbox, providing the specified comment. """Commit the sandbox, providing the specified comment.
""" """
svn_command('ci', '-m', message) svn_command( 'ci', '-m', message )
def svn_tag_sandbox(tag_url, message): def svn_tag_sandbox( tag_url, message ):
"""Makes a tag based on the sandbox revisions. """Makes a tag based on the sandbox revisions.
""" """
svn_command('copy', '-m', message, '.', tag_url) svn_command( 'copy', '-m', message, '.', tag_url )
def svn_remove_tag(tag_url, message): def svn_remove_tag( tag_url, message ):
"""Removes an existing tag. """Removes an existing tag.
""" """
svn_command('delete', '-m', message, tag_url) svn_command( 'delete', '-m', message, tag_url )
def svn_export(tag_url, export_dir): def svn_export( tag_url, export_dir ):
"""Exports the tag_url revision to export_dir. """Exports the tag_url revision to export_dir.
Target directory, including its parent is created if it does not exist. Target directory, including its parent is created if it does not exist.
If the directory export_dir exist, it is deleted before export proceed. If the directory export_dir exist, it is deleted before export proceed.
""" """
rmdir_if_exist(export_dir) rmdir_if_exist( export_dir )
svn_command('export', tag_url, export_dir) svn_command( 'export', tag_url, export_dir )
def fix_sources_eol(dist_dir): def fix_sources_eol( dist_dir ):
"""Set file EOL for tarball distribution. """Set file EOL for tarball distribution.
""" """
print('Preparing exported source file EOL for distribution...') print 'Preparing exported source file EOL for distribution...'
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist' prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
win_sources = antglob.glob(dist_dir, win_sources = antglob.glob( dist_dir,
includes = '**/*.sln **/*.vcproj', includes = '**/*.sln **/*.vcproj',
prune_dirs = prune_dirs) prune_dirs = prune_dirs )
unix_sources = antglob.glob(dist_dir, unix_sources = antglob.glob( dist_dir,
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
sconscript *.json *.expected AUTHORS LICENSE''', sconscript *.json *.expected AUTHORS LICENSE''',
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*', excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
prune_dirs = prune_dirs) prune_dirs = prune_dirs )
for path in win_sources: for path in win_sources:
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\r\n') fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' )
for path in unix_sources: for path in unix_sources:
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\n') fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' )
def download(url, target_path): def download( url, target_path ):
"""Download file represented by url to target_path. """Download file represented by url to target_path.
""" """
f = urllib2.urlopen(url) f = urllib2.urlopen( url )
try: try:
data = f.read() data = f.read()
finally: finally:
f.close() f.close()
fout = open(target_path, 'wb') fout = open( target_path, 'wb' )
try: try:
fout.write(data) fout.write( data )
finally: finally:
fout.close() fout.close()
def check_compile(distcheck_top_dir, platform): def check_compile( distcheck_top_dir, platform ):
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check'] cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
print('Running:', ' '.join(cmd)) print 'Running:', ' '.join( cmd )
log_path = os.path.join(distcheck_top_dir, 'build-%s.log' % platform) log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
flog = open(log_path, 'wb') flog = open( log_path, 'wb' )
try: try:
process = subprocess.Popen(cmd, process = subprocess.Popen( cmd,
stdout=flog, stdout=flog,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
cwd=distcheck_top_dir) cwd=distcheck_top_dir )
stdout = process.communicate()[0] stdout = process.communicate()[0]
status = (process.returncode == 0) status = (process.returncode == 0)
finally: finally:
flog.close() flog.close()
return (status, log_path) return (status, log_path)
def write_tempfile(content, **kwargs): def write_tempfile( content, **kwargs ):
fd, path = tempfile.mkstemp(**kwargs) fd, path = tempfile.mkstemp( **kwargs )
f = os.fdopen(fd, 'wt') f = os.fdopen( fd, 'wt' )
try: try:
f.write(content) f.write( content )
finally: finally:
f.close() f.close()
return path return path
@@ -181,34 +174,34 @@ def write_tempfile(content, **kwargs):
class SFTPError(Exception): class SFTPError(Exception):
pass pass
def run_sftp_batch(userhost, sftp, batch, retry=0): def run_sftp_batch( userhost, sftp, batch, retry=0 ):
path = write_tempfile(batch, suffix='.sftp', text=True) path = write_tempfile( batch, suffix='.sftp', text=True )
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc # psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost] cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
error = None error = None
for retry_index in range(0, max(1,retry)): for retry_index in xrange(0, max(1,retry)):
heading = retry_index == 0 and 'Running:' or 'Retrying:' heading = retry_index == 0 and 'Running:' or 'Retrying:'
print(heading, ' '.join(cmd)) print heading, ' '.join( cmd )
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
stdout = process.communicate()[0] stdout = process.communicate()[0]
if process.returncode != 0: if process.returncode != 0:
error = SFTPError('SFTP batch failed:\n' + stdout) error = SFTPError( 'SFTP batch failed:\n' + stdout )
else: else:
break break
if error: if error:
raise error raise error
return stdout return stdout
def sourceforge_web_synchro(sourceforge_project, doc_dir, def sourceforge_web_synchro( sourceforge_project, doc_dir,
user=None, sftp='sftp'): user=None, sftp='sftp' ):
"""Notes: does not synchronize sub-directory of doc-dir. """Notes: does not synchronize sub-directory of doc-dir.
""" """
userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project) userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project)
stdout = run_sftp_batch(userhost, sftp, """ stdout = run_sftp_batch( userhost, sftp, """
cd htdocs cd htdocs
dir dir
exit exit
""") """ )
existing_paths = set() existing_paths = set()
collect = 0 collect = 0
for line in stdout.split('\n'): for line in stdout.split('\n'):
@@ -222,36 +215,36 @@ exit
elif collect == 2: elif collect == 2:
path = line.strip().split()[-1:] path = line.strip().split()[-1:]
if path and path[0] not in ('.', '..'): if path and path[0] not in ('.', '..'):
existing_paths.add(path[0]) existing_paths.add( path[0] )
upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)]) upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
paths_to_remove = existing_paths - upload_paths paths_to_remove = existing_paths - upload_paths
if paths_to_remove: if paths_to_remove:
print('Removing the following file from web:') print 'Removing the following file from web:'
print('\n'.join(paths_to_remove)) print '\n'.join( paths_to_remove )
stdout = run_sftp_batch(userhost, sftp, """cd htdocs stdout = run_sftp_batch( userhost, sftp, """cd htdocs
rm %s rm %s
exit""" % ' '.join(paths_to_remove)) exit""" % ' '.join(paths_to_remove) )
print('Uploading %d files:' % len(upload_paths)) print 'Uploading %d files:' % len(upload_paths)
batch_size = 10 batch_size = 10
upload_paths = list(upload_paths) upload_paths = list(upload_paths)
start_time = time.time() start_time = time.time()
for index in range(0,len(upload_paths),batch_size): for index in xrange(0,len(upload_paths),batch_size):
paths = upload_paths[index:index+batch_size] paths = upload_paths[index:index+batch_size]
file_per_sec = (time.time() - start_time) / (index+1) file_per_sec = (time.time() - start_time) / (index+1)
remaining_files = len(upload_paths) - index remaining_files = len(upload_paths) - index
remaining_sec = file_per_sec * remaining_files remaining_sec = file_per_sec * remaining_files
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)) print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)
run_sftp_batch(userhost, sftp, """cd htdocs run_sftp_batch( userhost, sftp, """cd htdocs
lcd %s lcd %s
mput %s mput %s
exit""" % (doc_dir, ' '.join(paths)), retry=3) exit""" % (doc_dir, ' '.join(paths) ), retry=3 )
def sourceforge_release_tarball(sourceforge_project, paths, user=None, sftp='sftp'): def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ):
userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project) userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project)
run_sftp_batch(userhost, sftp, """ run_sftp_batch( userhost, sftp, """
mput %s mput %s
exit exit
""" % (' '.join(paths),)) """ % (' '.join(paths),) )
def main(): def main():
@@ -292,99 +285,99 @@ Warning: --force should only be used when developping/testing the release script
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) != 2: if len(args) != 2:
parser.error('release_version missing on command-line.') parser.error( 'release_version missing on command-line.' )
release_version = args[0] release_version = args[0]
next_version = args[1] next_version = args[1]
if not options.platforms and not options.no_test: if not options.platforms and not options.no_test:
parser.error('You must specify either --platform or --no-test option.') parser.error( 'You must specify either --platform or --no-test option.' )
if options.ignore_pending_commit: if options.ignore_pending_commit:
msg = '' msg = ''
else: else:
msg = check_no_pending_commit() msg = check_no_pending_commit()
if not msg: if not msg:
print('Setting version to', release_version) print 'Setting version to', release_version
set_version(release_version) set_version( release_version )
svn_commit('Release ' + release_version) svn_commit( 'Release ' + release_version )
tag_url = svn_join_url(SVN_TAG_ROOT, release_version) tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
if svn_check_if_tag_exist(tag_url): if svn_check_if_tag_exist( tag_url ):
if options.retag_release: if options.retag_release:
svn_remove_tag(tag_url, 'Overwriting previous tag') svn_remove_tag( tag_url, 'Overwriting previous tag' )
else: else:
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url) print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
sys.exit(1) sys.exit( 1 )
svn_tag_sandbox(tag_url, 'Release ' + release_version) svn_tag_sandbox( tag_url, 'Release ' + release_version )
print('Generated doxygen document...') print 'Generated doxygen document...'
## doc_dirname = r'jsoncpp-api-html-0.5.0' ## doc_dirname = r'jsoncpp-api-html-0.5.0'
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz' ## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
doc_tarball_path, doc_dirname = doxybuild.build_doc(options, make_release=True) doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
doc_distcheck_dir = 'dist/doccheck' doc_distcheck_dir = 'dist/doccheck'
tarball.decompress(doc_tarball_path, doc_distcheck_dir) tarball.decompress( doc_tarball_path, doc_distcheck_dir )
doc_distcheck_top_dir = os.path.join(doc_distcheck_dir, doc_dirname) doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname )
export_dir = 'dist/export' export_dir = 'dist/export'
svn_export(tag_url, export_dir) svn_export( tag_url, export_dir )
fix_sources_eol(export_dir) fix_sources_eol( export_dir )
source_dir = 'jsoncpp-src-' + release_version source_dir = 'jsoncpp-src-' + release_version
source_tarball_path = 'dist/%s.tar.gz' % source_dir source_tarball_path = 'dist/%s.tar.gz' % source_dir
print('Generating source tarball to', source_tarball_path) print 'Generating source tarball to', source_tarball_path
tarball.make_tarball(source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir) tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
print('Generating amalgamation source tarball to', amalgamation_tarball_path) print 'Generating amalgamation source tarball to', amalgamation_tarball_path
amalgamation_dir = 'dist/amalgamation' amalgamation_dir = 'dist/amalgamation'
amalgamate.amalgamate_source(export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h') amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
tarball.make_tarball(amalgamation_tarball_path, [amalgamation_dir], tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir],
amalgamation_dir, prefix_dir=amalgamation_source_dir) amalgamation_dir, prefix_dir=amalgamation_source_dir )
# Decompress source tarball, download and install scons-local # Decompress source tarball, download and install scons-local
distcheck_dir = 'dist/distcheck' distcheck_dir = 'dist/distcheck'
distcheck_top_dir = distcheck_dir + '/' + source_dir distcheck_top_dir = distcheck_dir + '/' + source_dir
print('Decompressing source tarball to', distcheck_dir) print 'Decompressing source tarball to', distcheck_dir
rmdir_if_exist(distcheck_dir) rmdir_if_exist( distcheck_dir )
tarball.decompress(source_tarball_path, distcheck_dir) tarball.decompress( source_tarball_path, distcheck_dir )
scons_local_path = 'dist/scons-local.tar.gz' scons_local_path = 'dist/scons-local.tar.gz'
print('Downloading scons-local to', scons_local_path) print 'Downloading scons-local to', scons_local_path
download(SCONS_LOCAL_URL, scons_local_path) download( SCONS_LOCAL_URL, scons_local_path )
print('Decompressing scons-local to', distcheck_top_dir) print 'Decompressing scons-local to', distcheck_top_dir
tarball.decompress(scons_local_path, distcheck_top_dir) tarball.decompress( scons_local_path, distcheck_top_dir )
# Run compilation # Run compilation
print('Compiling decompressed tarball') print 'Compiling decompressed tarball'
all_build_status = True all_build_status = True
for platform in options.platforms.split(','): for platform in options.platforms.split(','):
print('Testing platform:', platform) print 'Testing platform:', platform
build_status, log_path = check_compile(distcheck_top_dir, platform) build_status, log_path = check_compile( distcheck_top_dir, platform )
print('see build log:', log_path) print 'see build log:', log_path
print(build_status and '=> ok' or '=> FAILED') print build_status and '=> ok' or '=> FAILED'
all_build_status = all_build_status and build_status all_build_status = all_build_status and build_status
if not build_status: if not build_status:
print('Testing failed on at least one platform, aborting...') print 'Testing failed on at least one platform, aborting...'
svn_remove_tag(tag_url, 'Removing tag due to failed testing') svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
sys.exit(1) sys.exit(1)
if options.user: if options.user:
if not options.no_web: if not options.no_web:
print('Uploading documentation using user', options.user) print 'Uploading documentation using user', options.user
sourceforge_web_synchro(SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp) sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
print('Completed documentation upload') print 'Completed documentation upload'
print('Uploading source and documentation tarballs for release using user', options.user) print 'Uploading source and documentation tarballs for release using user', options.user
sourceforge_release_tarball(SOURCEFORGE_PROJECT, sourceforge_release_tarball( SOURCEFORGE_PROJECT,
[source_tarball_path, doc_tarball_path], [source_tarball_path, doc_tarball_path],
user=options.user, sftp=options.sftp) user=options.user, sftp=options.sftp )
print('Source and doc release tarballs uploaded') print 'Source and doc release tarballs uploaded'
else: else:
print('No upload user specified. Web site and download tarbal were not uploaded.') print 'No upload user specified. Web site and download tarbal were not uploaded.'
print('Tarball can be found at:', doc_tarball_path) print 'Tarball can be found at:', doc_tarball_path
# Set next version number and commit # Set next version number and commit
set_version(next_version) set_version( next_version )
svn_commit('Released ' + release_version) svn_commit( 'Released ' + release_version )
else: else:
sys.stderr.write(msg + '\n') sys.stderr.write( msg + '\n' )
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,7 +1,7 @@
prefix=@CMAKE_INSTALL_PREFIX@ prefix=@CMAKE_INSTALL_PREFIX@
exec_prefix=${prefix} exec_prefix=${prefix}
libdir=${exec_prefix}/@LIBRARY_INSTALL_DIR@ libdir=${exec_prefix}/lib
includedir=${prefix}/@INCLUDE_INSTALL_DIR@ includedir=${prefix}/include
Name: jsoncpp Name: jsoncpp
Description: A C++ library for interacting with JSON Description: A C++ library for interacting with JSON

View File

@@ -1,14 +1,9 @@
# Copyright 2009 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
import fnmatch import fnmatch
import os import os
def generate(env): def generate( env ):
def Glob(env, includes = None, excludes = None, dir = '.'): def Glob( env, includes = None, excludes = None, dir = '.' ):
"""Adds Glob(includes = Split('*'), excludes = None, dir = '.') """Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
helper function to environment. helper function to environment.
Glob both the file-system files. Glob both the file-system files.
@@ -17,36 +12,36 @@ def generate(env):
excludes: list of file name pattern exluced from the return list. excludes: list of file name pattern exluced from the return list.
Example: Example:
sources = env.Glob(("*.cpp", '*.h'), "~*.cpp", "#src") sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
""" """
def filterFilename(path): def filterFilename(path):
abs_path = os.path.join(dir, path) abs_path = os.path.join( dir, path )
if not os.path.isfile(abs_path): if not os.path.isfile(abs_path):
return 0 return 0
fn = os.path.basename(path) fn = os.path.basename(path)
match = 0 match = 0
for include in includes: for include in includes:
if fnmatch.fnmatchcase(fn, include): if fnmatch.fnmatchcase( fn, include ):
match = 1 match = 1
break break
if match == 1 and not excludes is None: if match == 1 and not excludes is None:
for exclude in excludes: for exclude in excludes:
if fnmatch.fnmatchcase(fn, exclude): if fnmatch.fnmatchcase( fn, exclude ):
match = 0 match = 0
break break
return match return match
if includes is None: if includes is None:
includes = ('*',) includes = ('*',)
elif type(includes) in (type(''), type(u'')): elif type(includes) in ( type(''), type(u'') ):
includes = (includes,) includes = (includes,)
if type(excludes) in (type(''), type(u'')): if type(excludes) in ( type(''), type(u'') ):
excludes = (excludes,) excludes = (excludes,)
dir = env.Dir(dir).abspath dir = env.Dir(dir).abspath
paths = os.listdir(dir) paths = os.listdir( dir )
def makeAbsFileNode(path): def makeAbsFileNode( path ):
return env.File(os.path.join(dir, path)) return env.File( os.path.join( dir, path ) )
nodes = filter(filterFilename, paths) nodes = filter( filterFilename, paths )
return map(makeAbsFileNode, nodes) return map( makeAbsFileNode, nodes )
from SCons.Script import Environment from SCons.Script import Environment
Environment.Glob = Glob Environment.Glob = Glob

View File

@@ -1,8 +1,3 @@
# Copyright 2007 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
import os import os
import os.path import os.path
from fnmatch import fnmatch from fnmatch import fnmatch
@@ -52,7 +47,7 @@ import targz
## elif token == "=": ## elif token == "=":
## data[key] = list() ## data[key] = list()
## else: ## else:
## append_data(data, key, new_data, token) ## append_data( data, key, new_data, token )
## new_data = True ## new_data = True
## ##
## last_token = token ## last_token = token
@@ -60,7 +55,7 @@ import targz
## ##
## if last_token == '\\' and token != '\n': ## if last_token == '\\' and token != '\n':
## new_data = False ## new_data = False
## append_data(data, key, new_data, '\\') ## append_data( data, key, new_data, '\\' )
## ##
## # compress lists of len 1 into single strings ## # compress lists of len 1 into single strings
## for (k, v) in data.items(): ## for (k, v) in data.items():
@@ -121,7 +116,7 @@ import targz
## else: ## else:
## for pattern in file_patterns: ## for pattern in file_patterns:
## sources.extend(glob.glob("/".join([node, pattern]))) ## sources.extend(glob.glob("/".join([node, pattern])))
## sources = map(lambda path: env.File(path), sources) ## sources = map( lambda path: env.File(path), sources )
## return sources ## return sources
## ##
## ##
@@ -148,7 +143,7 @@ def srcDistEmitter(source, target, env):
## # add our output locations ## # add our output locations
## for (k, v) in output_formats.items(): ## for (k, v) in output_formats.items():
## if data.get("GENERATE_" + k, v[0]) == "YES": ## if data.get("GENERATE_" + k, v[0]) == "YES":
## targets.append(env.Dir(os.path.join(out_dir, data.get(k + "_OUTPUT", v[1])))) ## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) )
## ##
## # don't clobber targets ## # don't clobber targets
## for node in targets: ## for node in targets:
@@ -166,13 +161,14 @@ def generate(env):
Add builders and construction variables for the Add builders and construction variables for the
SrcDist tool. SrcDist tool.
""" """
## doxyfile_scanner = env.Scanner(## DoxySourceScan, ## doxyfile_scanner = env.Scanner(
## DoxySourceScan,
## "DoxySourceScan", ## "DoxySourceScan",
## scan_check = DoxySourceScanCheck, ## scan_check = DoxySourceScanCheck,
##) ## )
if targz.exists(env): if targz.exists(env):
srcdist_builder = targz.makeBuilder(srcDistEmitter) srcdist_builder = targz.makeBuilder( srcDistEmitter )
env['BUILDERS']['SrcDist'] = srcdist_builder env['BUILDERS']['SrcDist'] = srcdist_builder

View File

@@ -1,11 +1,5 @@
# Copyright 2010 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
import re import re
from SCons.Script import * # the usual scons stuff you get in a SConscript from SCons.Script import * # the usual scons stuff you get in a SConscript
import collections
def generate(env): def generate(env):
""" """
@@ -31,28 +25,28 @@ def generate(env):
contents = f.read() contents = f.read()
f.close() f.close()
except: except:
raise SCons.Errors.UserError("Can't read source file %s"%sourcefile) raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile
for (k,v) in list(dict.items()): for (k,v) in dict.items():
contents = re.sub(k, v, contents) contents = re.sub(k, v, contents)
try: try:
f = open(targetfile, 'wb') f = open(targetfile, 'wb')
f.write(contents) f.write(contents)
f.close() f.close()
except: except:
raise SCons.Errors.UserError("Can't write target file %s"%targetfile) raise SCons.Errors.UserError, "Can't write target file %s"%targetfile
return 0 # success return 0 # success
def subst_in_file(target, source, env): def subst_in_file(target, source, env):
if 'SUBST_DICT' not in env: if not env.has_key('SUBST_DICT'):
raise SCons.Errors.UserError("SubstInFile requires SUBST_DICT to be set.") raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set."
d = dict(env['SUBST_DICT']) # copy it d = dict(env['SUBST_DICT']) # copy it
for (k,v) in list(d.items()): for (k,v) in d.items():
if isinstance(v, collections.Callable): if callable(v):
d[k] = env.subst(v()).replace('\\','\\\\') d[k] = env.subst(v()).replace('\\','\\\\')
elif SCons.Util.is_String(v): elif SCons.Util.is_String(v):
d[k] = env.subst(v).replace('\\','\\\\') d[k] = env.subst(v).replace('\\','\\\\')
else: else:
raise SCons.Errors.UserError("SubstInFile: key %s: %s must be a string or callable"%(k, repr(v))) raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v))
for (t,s) in zip(target, source): for (t,s) in zip(target, source):
return do_subst_in_file(str(t), str(s), d) return do_subst_in_file(str(t), str(s), d)
@@ -66,8 +60,8 @@ def generate(env):
Returns original target, source tuple unchanged. Returns original target, source tuple unchanged.
""" """
d = env['SUBST_DICT'].copy() # copy it d = env['SUBST_DICT'].copy() # copy it
for (k,v) in list(d.items()): for (k,v) in d.items():
if isinstance(v, collections.Callable): if callable(v):
d[k] = env.subst(v()) d[k] = env.subst(v())
elif SCons.Util.is_String(v): elif SCons.Util.is_String(v):
d[k]=env.subst(v) d[k]=env.subst(v)
@@ -75,7 +69,7 @@ def generate(env):
return target, source return target, source
## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!? ## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!?
subst_action = SCons.Action.Action(subst_in_file, subst_in_file_string) subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string )
env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter) env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter)
def exists(env): def exists(env):

View File

@@ -1,8 +1,3 @@
# Copyright 2007 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
"""tarball """tarball
Tool-specific initialization for tarball. Tool-specific initialization for tarball.
@@ -32,9 +27,9 @@ TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
if internal_targz: if internal_targz:
def targz(target, source, env): def targz(target, source, env):
def archive_name(path): def archive_name( path ):
path = os.path.normpath(os.path.abspath(path)) path = os.path.normpath( os.path.abspath( path ) )
common_path = os.path.commonprefix((base_dir, path)) common_path = os.path.commonprefix( (base_dir, path) )
archive_name = path[len(common_path):] archive_name = path[len(common_path):]
return archive_name return archive_name
@@ -42,23 +37,23 @@ if internal_targz:
for name in names: for name in names:
path = os.path.join(dirname, name) path = os.path.join(dirname, name)
if os.path.isfile(path): if os.path.isfile(path):
tar.add(path, archive_name(path)) tar.add(path, archive_name(path) )
compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL) compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL)
base_dir = os.path.normpath(env.get('TARGZ_BASEDIR', env.Dir('.')).abspath) base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath )
target_path = str(target[0]) target_path = str(target[0])
fileobj = gzip.GzipFile(target_path, 'wb', compression) fileobj = gzip.GzipFile( target_path, 'wb', compression )
tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj) tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj)
for source in source: for source in source:
source_path = str(source) source_path = str(source)
if source.isdir(): if source.isdir():
os.path.walk(source_path, visit, tar) os.path.walk(source_path, visit, tar)
else: else:
tar.add(source_path, archive_name(source_path)) # filename, arcname tar.add(source_path, archive_name(source_path) ) # filename, arcname
tar.close() tar.close()
targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR']) targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR'])
def makeBuilder(emitter = None): def makeBuilder( emitter = None ):
return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'), return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'),
source_factory = SCons.Node.FS.Entry, source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner, source_scanner = SCons.Defaults.DirScanner,

View File

@@ -2,4 +2,4 @@ ADD_SUBDIRECTORY(lib_json)
IF(JSONCPP_WITH_TESTS) IF(JSONCPP_WITH_TESTS)
ADD_SUBDIRECTORY(jsontestrunner) ADD_SUBDIRECTORY(jsontestrunner)
ADD_SUBDIRECTORY(test_lib_json) ADD_SUBDIRECTORY(test_lib_json)
ENDIF() ENDIF(JSONCPP_WITH_TESTS)

View File

@@ -1,25 +1,22 @@
FIND_PACKAGE(PythonInterp 2.6) FIND_PACKAGE(PythonInterp 2.6 REQUIRED)
IF(JSONCPP_LIB_BUILD_SHARED)
ADD_DEFINITIONS( -DJSON_DLL )
ENDIF(JSONCPP_LIB_BUILD_SHARED)
ADD_EXECUTABLE(jsontestrunner_exe ADD_EXECUTABLE(jsontestrunner_exe
main.cpp main.cpp
) )
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
IF(BUILD_SHARED_LIBS)
ADD_DEFINITIONS( -DJSON_DLL )
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
ELSE(BUILD_SHARED_LIBS)
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib_static)
ENDIF()
SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe) SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
IF(PYTHONINTERP_FOUND) IF(PYTHONINTERP_FOUND)
# Run end to end parser/writer tests # Run end to end parser/writer tests
SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test) SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test)
SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py) SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py)
ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests ALL
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data" "${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
DEPENDS jsontestrunner_exe jsoncpp_test DEPENDS jsontestrunner_exe jsoncpp_test
) )
ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests) ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests)
ENDIF() ENDIF(PYTHONINTERP_FOUND)

View File

@@ -8,23 +8,13 @@
#include <json/json.h> #include <json/json.h>
#include <algorithm> // sort #include <algorithm> // sort
#include <sstream>
#include <stdio.h> #include <stdio.h>
#if defined(_MSC_VER) && _MSC_VER >= 1310 #if defined(_MSC_VER) && _MSC_VER >= 1310
#pragma warning(disable : 4996) // disable fopen deprecation warning #pragma warning(disable : 4996) // disable fopen deprecation warning
#endif #endif
struct Options static std::string normalizeFloatingPointStr(double value) {
{
JSONCPP_STRING path;
Json::Features features;
bool parseOnly;
typedef JSONCPP_STRING (*writeFuncType)(Json::Value const&);
writeFuncType write;
};
static JSONCPP_STRING normalizeFloatingPointStr(double value) {
char buffer[32]; char buffer[32];
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) #if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
sprintf_s(buffer, sizeof(buffer), "%.16g", value); sprintf_s(buffer, sizeof(buffer), "%.16g", value);
@@ -32,18 +22,18 @@ static JSONCPP_STRING normalizeFloatingPointStr(double value) {
snprintf(buffer, sizeof(buffer), "%.16g", value); snprintf(buffer, sizeof(buffer), "%.16g", value);
#endif #endif
buffer[sizeof(buffer) - 1] = 0; buffer[sizeof(buffer) - 1] = 0;
JSONCPP_STRING s(buffer); std::string s(buffer);
JSONCPP_STRING::size_type index = s.find_last_of("eE"); std::string::size_type index = s.find_last_of("eE");
if (index != JSONCPP_STRING::npos) { if (index != std::string::npos) {
JSONCPP_STRING::size_type hasSign = std::string::size_type hasSign =
(s[index + 1] == '+' || s[index + 1] == '-') ? 1 : 0; (s[index + 1] == '+' || s[index + 1] == '-') ? 1 : 0;
JSONCPP_STRING::size_type exponentStartIndex = index + 1 + hasSign; std::string::size_type exponentStartIndex = index + 1 + hasSign;
JSONCPP_STRING normalized = s.substr(0, exponentStartIndex); std::string normalized = s.substr(0, exponentStartIndex);
JSONCPP_STRING::size_type indexDigit = std::string::size_type indexDigit =
s.find_first_not_of('0', exponentStartIndex); s.find_first_not_of('0', exponentStartIndex);
JSONCPP_STRING exponent = "0"; std::string exponent = "0";
if (indexDigit != if (indexDigit !=
JSONCPP_STRING::npos) // There is an exponent different from 0 std::string::npos) // There is an exponent different from 0
{ {
exponent = s.substr(indexDigit); exponent = s.substr(indexDigit);
} }
@@ -52,18 +42,17 @@ static JSONCPP_STRING normalizeFloatingPointStr(double value) {
return s; return s;
} }
static JSONCPP_STRING readInputTestFile(const char* path) { static std::string readInputTestFile(const char* path) {
FILE* file = fopen(path, "rb"); FILE* file = fopen(path, "rb");
if (!file) if (!file)
return JSONCPP_STRING(""); return std::string("");
fseek(file, 0, SEEK_END); fseek(file, 0, SEEK_END);
long const size = ftell(file); long size = ftell(file);
unsigned long const usize = static_cast<unsigned long>(size);
fseek(file, 0, SEEK_SET); fseek(file, 0, SEEK_SET);
JSONCPP_STRING text; std::string text;
char* buffer = new char[size + 1]; char* buffer = new char[size + 1];
buffer[size] = 0; buffer[size] = 0;
if (fread(buffer, 1, usize, file) == usize) if (fread(buffer, 1, size, file) == (unsigned long)size)
text = buffer; text = buffer;
fclose(file); fclose(file);
delete[] buffer; delete[] buffer;
@@ -71,7 +60,7 @@ static JSONCPP_STRING readInputTestFile(const char* path) {
} }
static void static void
printValueTree(FILE* fout, Json::Value& value, const JSONCPP_STRING& path = ".") { printValueTree(FILE* fout, Json::Value& value, const std::string& path = ".") {
if (value.hasComment(Json::commentBefore)) { if (value.hasComment(Json::commentBefore)) {
fprintf(fout, "%s\n", value.getComment(Json::commentBefore).c_str()); fprintf(fout, "%s\n", value.getComment(Json::commentBefore).c_str());
} }
@@ -105,8 +94,8 @@ printValueTree(FILE* fout, Json::Value& value, const JSONCPP_STRING& path = ".")
break; break;
case Json::arrayValue: { case Json::arrayValue: {
fprintf(fout, "%s=[]\n", path.c_str()); fprintf(fout, "%s=[]\n", path.c_str());
Json::ArrayIndex size = value.size(); int size = value.size();
for (Json::ArrayIndex index = 0; index < size; ++index) { for (int index = 0; index < size; ++index) {
static char buffer[16]; static char buffer[16];
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) #if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
sprintf_s(buffer, sizeof(buffer), "[%d]", index); sprintf_s(buffer, sizeof(buffer), "[%d]", index);
@@ -120,11 +109,11 @@ printValueTree(FILE* fout, Json::Value& value, const JSONCPP_STRING& path = ".")
fprintf(fout, "%s={}\n", path.c_str()); fprintf(fout, "%s={}\n", path.c_str());
Json::Value::Members members(value.getMemberNames()); Json::Value::Members members(value.getMemberNames());
std::sort(members.begin(), members.end()); std::sort(members.begin(), members.end());
JSONCPP_STRING suffix = *(path.end() - 1) == '.' ? "" : "."; std::string suffix = *(path.end() - 1) == '.' ? "" : ".";
for (Json::Value::Members::iterator it = members.begin(); for (Json::Value::Members::iterator it = members.begin();
it != members.end(); it != members.end();
++it) { ++it) {
const JSONCPP_STRING name = *it; const std::string& name = *it;
printValueTree(fout, value[name], path + suffix + name); printValueTree(fout, value[name], path + suffix + name);
} }
} break; } break;
@@ -137,81 +126,57 @@ printValueTree(FILE* fout, Json::Value& value, const JSONCPP_STRING& path = ".")
} }
} }
static int parseAndSaveValueTree(const JSONCPP_STRING& input, static int parseAndSaveValueTree(const std::string& input,
const JSONCPP_STRING& actual, const std::string& actual,
const JSONCPP_STRING& kind, const std::string& kind,
Json::Value& root,
const Json::Features& features, const Json::Features& features,
bool parseOnly, bool parseOnly) {
Json::Value* root)
{
Json::Reader reader(features); Json::Reader reader(features);
bool parsingSuccessful = reader.parse(input.data(), input.data() + input.size(), *root); bool parsingSuccessful = reader.parse(input, root);
if (!parsingSuccessful) { if (!parsingSuccessful) {
printf("Failed to parse %s file: \n%s\n", printf("Failed to parse %s file: \n%s\n",
kind.c_str(), kind.c_str(),
reader.getFormattedErrorMessages().c_str()); reader.getFormattedErrorMessages().c_str());
return 1; return 1;
} }
if (!parseOnly) { if (!parseOnly) {
FILE* factual = fopen(actual.c_str(), "wt"); FILE* factual = fopen(actual.c_str(), "wt");
if (!factual) { if (!factual) {
printf("Failed to create %s actual file.\n", kind.c_str()); printf("Failed to create %s actual file.\n", kind.c_str());
return 2; return 2;
} }
printValueTree(factual, *root); printValueTree(factual, root);
fclose(factual); fclose(factual);
} }
return 0; return 0;
} }
// static JSONCPP_STRING useFastWriter(Json::Value const& root) {
// Json::FastWriter writer; static int rewriteValueTree(const std::string& rewritePath,
// writer.enableYAMLCompatibility(); const Json::Value& root,
// return writer.write(root); std::string& rewrite) {
// } // Json::FastWriter writer;
static JSONCPP_STRING useStyledWriter( // writer.enableYAMLCompatibility();
Json::Value const& root)
{
Json::StyledWriter writer; Json::StyledWriter writer;
return writer.write(root); rewrite = writer.write(root);
}
static JSONCPP_STRING useStyledStreamWriter(
Json::Value const& root)
{
Json::StyledStreamWriter writer;
JSONCPP_OSTRINGSTREAM sout;
writer.write(sout, root);
return sout.str();
}
static JSONCPP_STRING useBuiltStyledStreamWriter(
Json::Value const& root)
{
Json::StreamWriterBuilder builder;
return Json::writeString(builder, root);
}
static int rewriteValueTree(
const JSONCPP_STRING& rewritePath,
const Json::Value& root,
Options::writeFuncType write,
JSONCPP_STRING* rewrite)
{
*rewrite = write(root);
FILE* fout = fopen(rewritePath.c_str(), "wt"); FILE* fout = fopen(rewritePath.c_str(), "wt");
if (!fout) { if (!fout) {
printf("Failed to create rewrite file: %s\n", rewritePath.c_str()); printf("Failed to create rewrite file: %s\n", rewritePath.c_str());
return 2; return 2;
} }
fprintf(fout, "%s\n", rewrite->c_str()); fprintf(fout, "%s\n", rewrite.c_str());
fclose(fout); fclose(fout);
return 0; return 0;
} }
static JSONCPP_STRING removeSuffix(const JSONCPP_STRING& path, static std::string removeSuffix(const std::string& path,
const JSONCPP_STRING& extension) { const std::string& extension) {
if (extension.length() >= path.length()) if (extension.length() >= path.length())
return JSONCPP_STRING(""); return std::string("");
JSONCPP_STRING suffix = path.substr(path.length() - extension.length()); std::string suffix = path.substr(path.length() - extension.length());
if (suffix != extension) if (suffix != extension)
return JSONCPP_STRING(""); return std::string("");
return path.substr(0, path.length() - extension.length()); return path.substr(0, path.length() - extension.length());
} }
@@ -229,98 +194,84 @@ static int printUsage(const char* argv[]) {
return 3; return 3;
} }
static int parseCommandLine( int parseCommandLine(int argc,
int argc, const char* argv[], Options* opts) const char* argv[],
{ Json::Features& features,
opts->parseOnly = false; std::string& path,
opts->write = &useStyledWriter; bool& parseOnly) {
parseOnly = false;
if (argc < 2) { if (argc < 2) {
return printUsage(argv); return printUsage(argv);
} }
int index = 1; int index = 1;
if (JSONCPP_STRING(argv[index]) == "--json-checker") { if (std::string(argv[1]) == "--json-checker") {
opts->features = Json::Features::strictMode(); features = Json::Features::strictMode();
opts->parseOnly = true; parseOnly = true;
++index; ++index;
} }
if (JSONCPP_STRING(argv[index]) == "--json-config") {
if (std::string(argv[1]) == "--json-config") {
printConfig(); printConfig();
return 3; return 3;
} }
if (JSONCPP_STRING(argv[index]) == "--json-writer") {
++index;
JSONCPP_STRING const writerName(argv[index++]);
if (writerName == "StyledWriter") {
opts->write = &useStyledWriter;
} else if (writerName == "StyledStreamWriter") {
opts->write = &useStyledStreamWriter;
} else if (writerName == "BuiltStyledStreamWriter") {
opts->write = &useBuiltStyledStreamWriter;
} else {
printf("Unknown '--json-writer %s'\n", writerName.c_str());
return 4;
}
}
if (index == argc || index + 1 < argc) { if (index == argc || index + 1 < argc) {
return printUsage(argv); return printUsage(argv);
} }
opts->path = argv[index];
path = argv[index];
return 0; return 0;
} }
static int runTest(Options const& opts)
{
int exitCode = 0;
JSONCPP_STRING input = readInputTestFile(opts.path.c_str());
if (input.empty()) {
printf("Failed to read input or empty input: %s\n", opts.path.c_str());
return 3;
}
JSONCPP_STRING basePath = removeSuffix(opts.path, ".json");
if (!opts.parseOnly && basePath.empty()) {
printf("Bad input path. Path does not end with '.expected':\n%s\n",
opts.path.c_str());
return 3;
}
JSONCPP_STRING const actualPath = basePath + ".actual";
JSONCPP_STRING const rewritePath = basePath + ".rewrite";
JSONCPP_STRING const rewriteActualPath = basePath + ".actual-rewrite";
Json::Value root;
exitCode = parseAndSaveValueTree(
input, actualPath, "input",
opts.features, opts.parseOnly, &root);
if (exitCode || opts.parseOnly) {
return exitCode;
}
JSONCPP_STRING rewrite;
exitCode = rewriteValueTree(rewritePath, root, opts.write, &rewrite);
if (exitCode) {
return exitCode;
}
Json::Value rewriteRoot;
exitCode = parseAndSaveValueTree(
rewrite, rewriteActualPath, "rewrite",
opts.features, opts.parseOnly, &rewriteRoot);
if (exitCode) {
return exitCode;
}
return 0;
}
int main(int argc, const char* argv[]) { int main(int argc, const char* argv[]) {
Options opts; std::string path;
try { Json::Features features;
int exitCode = parseCommandLine(argc, argv, &opts); bool parseOnly;
int exitCode = parseCommandLine(argc, argv, features, path, parseOnly);
if (exitCode != 0) { if (exitCode != 0) {
printf("Failed to parse command-line.");
return exitCode; return exitCode;
} }
return runTest(opts);
try {
std::string input = readInputTestFile(path.c_str());
if (input.empty()) {
printf("Failed to read input or empty input: %s\n", path.c_str());
return 3;
}
std::string basePath = removeSuffix(argv[1], ".json");
if (!parseOnly && basePath.empty()) {
printf("Bad input path. Path does not end with '.expected':\n%s\n",
path.c_str());
return 3;
}
std::string actualPath = basePath + ".actual";
std::string rewritePath = basePath + ".rewrite";
std::string rewriteActualPath = basePath + ".actual-rewrite";
Json::Value root;
exitCode = parseAndSaveValueTree(
input, actualPath, "input", root, features, parseOnly);
if (exitCode == 0 && !parseOnly) {
std::string rewrite;
exitCode = rewriteValueTree(rewritePath, root, rewrite);
if (exitCode == 0) {
Json::Value rewriteRoot;
exitCode = parseAndSaveValueTree(rewrite,
rewriteActualPath,
"rewrite",
rewriteRoot,
features,
parseOnly);
}
}
} }
catch (const std::exception& e) { catch (const std::exception& e) {
printf("Unhandled exception:\n%s\n", e.what()); printf("Unhandled exception:\n%s\n", e.what());
return 1; exitCode = 1;
} }
return exitCode;
} }

View File

@@ -1,12 +1,14 @@
if( CMAKE_COMPILER_IS_GNUCXX ) OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF)
#Get compiler version. IF(JSONCPP_LIB_BUILD_SHARED)
execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion SET(JSONCPP_LIB_TYPE SHARED)
OUTPUT_VARIABLE GNUCXX_VERSION ) ADD_DEFINITIONS( -DJSON_DLL_BUILD )
ELSE(JSONCPP_LIB_BUILD_SHARED)
SET(JSONCPP_LIB_TYPE STATIC)
ENDIF(JSONCPP_LIB_BUILD_SHARED)
#-Werror=* was introduced -after- GCC 4.1.2
if( GNUCXX_VERSION VERSION_GREATER 4.1.2 ) if( CMAKE_COMPILER_IS_GNUCXX )
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing")
endif()
endif( CMAKE_COMPILER_IS_GNUCXX ) endif( CMAKE_COMPILER_IS_GNUCXX )
SET( JSONCPP_INCLUDE_DIR ../../include ) SET( JSONCPP_INCLUDE_DIR ../../include )
@@ -24,62 +26,32 @@ SET( PUBLIC_HEADERS
SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} ) SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} )
SET(jsoncpp_sources ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE}
json_tool.h ${PUBLIC_HEADERS}
json_reader.cpp json_tool.h
json_valueiterator.inl json_reader.cpp
json_value.cpp json_batchallocator.h
json_writer.cpp json_valueiterator.inl
version.h.in) json_value.cpp
json_writer.cpp
version.h.in
)
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp )
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR} )
# Install instructions for this target # Install instructions for this target
IF(JSONCPP_WITH_CMAKE_PACKAGE) IF(JSONCPP_WITH_CMAKE_PACKAGE)
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib
PUBLIC $<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSON_INCLUDE_DIR}>
)
SET(INSTALL_EXPORT EXPORT jsoncpp) SET(INSTALL_EXPORT EXPORT jsoncpp)
ELSE(JSONCPP_WITH_CMAKE_PACKAGE) ELSE(JSONCPP_WITH_CMAKE_PACKAGE)
SET(INSTALL_EXPORT) SET(INSTALL_EXPORT)
ENDIF() ENDIF(JSONCPP_WITH_CMAKE_PACKAGE)
IF(BUILD_SHARED_LIBS) INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
ADD_LIBRARY(jsoncpp_lib SHARED ${PUBLIC_HEADERS} ${jsoncpp_sources})
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
# Set library's runtime search path on OSX
IF(APPLE)
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES INSTALL_RPATH "@loader_path/." )
ENDIF()
INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR} RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR} LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR}
ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR}) ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR}
)
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>)
ENDIF()
ENDIF()
IF(BUILD_STATIC_LIBS)
ADD_LIBRARY(jsoncpp_lib_static STATIC ${PUBLIC_HEADERS} ${jsoncpp_sources})
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES OUTPUT_NAME jsoncpp
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
INSTALL( TARGETS jsoncpp_lib_static ${INSTALL_EXPORT}
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR}
ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR})
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib_static PUBLIC
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
)
ENDIF()
ENDIF()

View File

@@ -0,0 +1,121 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED
#define JSONCPP_BATCHALLOCATOR_H_INCLUDED
#include <stdlib.h>
#include <assert.h>
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
namespace Json {
/* Fast memory allocator.
*
* This memory allocator allocates memory for a batch of object (specified by
* the page size, the number of object in each page).
*
* It does not allow the destruction of a single object. All the allocated
* objects can be destroyed at once. The memory can be either released or reused
* for future allocation.
*
* The in-place new operator must be used to construct the object using the
* pointer returned by allocate.
*/
template <typename AllocatedType, const unsigned int objectPerAllocation>
class BatchAllocator {
public:
BatchAllocator(unsigned int objectsPerPage = 255)
: freeHead_(0), objectsPerPage_(objectsPerPage) {
// printf( "Size: %d => %s\n", sizeof(AllocatedType),
// typeid(AllocatedType).name() );
assert(sizeof(AllocatedType) * objectPerAllocation >=
sizeof(AllocatedType*)); // We must be able to store a slist in the
// object free space.
assert(objectsPerPage >= 16);
batches_ = allocateBatch(0); // allocated a dummy page
currentBatch_ = batches_;
}
~BatchAllocator() {
for (BatchInfo* batch = batches_; batch;) {
BatchInfo* nextBatch = batch->next_;
free(batch);
batch = nextBatch;
}
}
/// allocate space for an array of objectPerAllocation object.
/// @warning it is the responsability of the caller to call objects
/// constructors.
AllocatedType* allocate() {
if (freeHead_) // returns node from free list.
{
AllocatedType* object = freeHead_;
freeHead_ = *(AllocatedType**)object;
return object;
}
if (currentBatch_->used_ == currentBatch_->end_) {
currentBatch_ = currentBatch_->next_;
while (currentBatch_ && currentBatch_->used_ == currentBatch_->end_)
currentBatch_ = currentBatch_->next_;
if (!currentBatch_) // no free batch found, allocate a new one
{
currentBatch_ = allocateBatch(objectsPerPage_);
currentBatch_->next_ = batches_; // insert at the head of the list
batches_ = currentBatch_;
}
}
AllocatedType* allocated = currentBatch_->used_;
currentBatch_->used_ += objectPerAllocation;
return allocated;
}
/// Release the object.
/// @warning it is the responsability of the caller to actually destruct the
/// object.
void release(AllocatedType* object) {
assert(object != 0);
*(AllocatedType**)object = freeHead_;
freeHead_ = object;
}
private:
struct BatchInfo {
BatchInfo* next_;
AllocatedType* used_;
AllocatedType* end_;
AllocatedType buffer_[objectPerAllocation];
};
// disabled copy constructor and assignement operator.
BatchAllocator(const BatchAllocator&);
void operator=(const BatchAllocator&);
static BatchInfo* allocateBatch(unsigned int objectsPerPage) {
const unsigned int mallocSize =
sizeof(BatchInfo) - sizeof(AllocatedType) * objectPerAllocation +
sizeof(AllocatedType) * objectPerAllocation * objectsPerPage;
BatchInfo* batch = static_cast<BatchInfo*>(malloc(mallocSize));
batch->next_ = 0;
batch->used_ = batch->buffer_;
batch->end_ = batch->buffer_ + objectsPerPage;
return batch;
}
BatchInfo* batches_;
BatchInfo* currentBatch_;
/// Head of a single linked list within the allocated space of freeed object
AllocatedType* freeHead_;
unsigned int objectsPerPage_;
};
} // namespace Json
#endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION
#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED

View File

@@ -0,0 +1,360 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
// included by json_value.cpp
namespace Json {
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// class ValueInternalArray
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
ValueArrayAllocator::~ValueArrayAllocator() {}
// //////////////////////////////////////////////////////////////////
// class DefaultValueArrayAllocator
// //////////////////////////////////////////////////////////////////
#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
class DefaultValueArrayAllocator : public ValueArrayAllocator {
public: // overridden from ValueArrayAllocator
virtual ~DefaultValueArrayAllocator() {}
virtual ValueInternalArray* newArray() { return new ValueInternalArray(); }
virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) {
return new ValueInternalArray(other);
}
virtual void destructArray(ValueInternalArray* array) { delete array; }
virtual void
reallocateArrayPageIndex(Value**& indexes,
ValueInternalArray::PageIndex& indexCount,
ValueInternalArray::PageIndex minNewIndexCount) {
ValueInternalArray::PageIndex newIndexCount = (indexCount * 3) / 2 + 1;
if (minNewIndexCount > newIndexCount)
newIndexCount = minNewIndexCount;
void* newIndexes = realloc(indexes, sizeof(Value*) * newIndexCount);
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
indexCount = newIndexCount;
indexes = static_cast<Value**>(newIndexes);
}
virtual void releaseArrayPageIndex(Value** indexes,
ValueInternalArray::PageIndex indexCount) {
if (indexes)
free(indexes);
}
virtual Value* allocateArrayPage() {
return static_cast<Value*>(
malloc(sizeof(Value) * ValueInternalArray::itemsPerPage));
}
virtual void releaseArrayPage(Value* value) {
if (value)
free(value);
}
};
#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
/// @todo make this thread-safe (lock when accessign batch allocator)
class DefaultValueArrayAllocator : public ValueArrayAllocator {
public: // overridden from ValueArrayAllocator
virtual ~DefaultValueArrayAllocator() {}
virtual ValueInternalArray* newArray() {
ValueInternalArray* array = arraysAllocator_.allocate();
new (array) ValueInternalArray(); // placement new
return array;
}
virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) {
ValueInternalArray* array = arraysAllocator_.allocate();
new (array) ValueInternalArray(other); // placement new
return array;
}
virtual void destructArray(ValueInternalArray* array) {
if (array) {
array->~ValueInternalArray();
arraysAllocator_.release(array);
}
}
virtual void
reallocateArrayPageIndex(Value**& indexes,
ValueInternalArray::PageIndex& indexCount,
ValueInternalArray::PageIndex minNewIndexCount) {
ValueInternalArray::PageIndex newIndexCount = (indexCount * 3) / 2 + 1;
if (minNewIndexCount > newIndexCount)
newIndexCount = minNewIndexCount;
void* newIndexes = realloc(indexes, sizeof(Value*) * newIndexCount);
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
indexCount = newIndexCount;
indexes = static_cast<Value**>(newIndexes);
}
virtual void releaseArrayPageIndex(Value** indexes,
ValueInternalArray::PageIndex indexCount) {
if (indexes)
free(indexes);
}
virtual Value* allocateArrayPage() {
return static_cast<Value*>(pagesAllocator_.allocate());
}
virtual void releaseArrayPage(Value* value) {
if (value)
pagesAllocator_.release(value);
}
private:
BatchAllocator<ValueInternalArray, 1> arraysAllocator_;
BatchAllocator<Value, ValueInternalArray::itemsPerPage> pagesAllocator_;
};
#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
static ValueArrayAllocator*& arrayAllocator() {
static DefaultValueArrayAllocator defaultAllocator;
static ValueArrayAllocator* arrayAllocator = &defaultAllocator;
return arrayAllocator;
}
static struct DummyArrayAllocatorInitializer {
DummyArrayAllocatorInitializer() {
arrayAllocator(); // ensure arrayAllocator() statics are initialized before
// main().
}
} dummyArrayAllocatorInitializer;
// //////////////////////////////////////////////////////////////////
// class ValueInternalArray
// //////////////////////////////////////////////////////////////////
bool ValueInternalArray::equals(const IteratorState& x,
const IteratorState& other) {
return x.array_ == other.array_ &&
x.currentItemIndex_ == other.currentItemIndex_ &&
x.currentPageIndex_ == other.currentPageIndex_;
}
void ValueInternalArray::increment(IteratorState& it) {
JSON_ASSERT_MESSAGE(
it.array_ && (it.currentPageIndex_ - it.array_->pages_) * itemsPerPage +
it.currentItemIndex_ !=
it.array_->size_,
"ValueInternalArray::increment(): moving iterator beyond end");
++(it.currentItemIndex_);
if (it.currentItemIndex_ == itemsPerPage) {
it.currentItemIndex_ = 0;
++(it.currentPageIndex_);
}
}
void ValueInternalArray::decrement(IteratorState& it) {
JSON_ASSERT_MESSAGE(
it.array_ && it.currentPageIndex_ == it.array_->pages_ &&
it.currentItemIndex_ == 0,
"ValueInternalArray::decrement(): moving iterator beyond end");
if (it.currentItemIndex_ == 0) {
it.currentItemIndex_ = itemsPerPage - 1;
--(it.currentPageIndex_);
} else {
--(it.currentItemIndex_);
}
}
Value& ValueInternalArray::unsafeDereference(const IteratorState& it) {
return (*(it.currentPageIndex_))[it.currentItemIndex_];
}
Value& ValueInternalArray::dereference(const IteratorState& it) {
JSON_ASSERT_MESSAGE(
it.array_ && (it.currentPageIndex_ - it.array_->pages_) * itemsPerPage +
it.currentItemIndex_ <
it.array_->size_,
"ValueInternalArray::dereference(): dereferencing invalid iterator");
return unsafeDereference(it);
}
void ValueInternalArray::makeBeginIterator(IteratorState& it) const {
it.array_ = const_cast<ValueInternalArray*>(this);
it.currentItemIndex_ = 0;
it.currentPageIndex_ = pages_;
}
void ValueInternalArray::makeIterator(IteratorState& it,
ArrayIndex index) const {
it.array_ = const_cast<ValueInternalArray*>(this);
it.currentItemIndex_ = index % itemsPerPage;
it.currentPageIndex_ = pages_ + index / itemsPerPage;
}
void ValueInternalArray::makeEndIterator(IteratorState& it) const {
makeIterator(it, size_);
}
ValueInternalArray::ValueInternalArray() : pages_(0), size_(0), pageCount_(0) {}
ValueInternalArray::ValueInternalArray(const ValueInternalArray& other)
: pages_(0), size_(other.size_), pageCount_(0) {
PageIndex minNewPages = other.size_ / itemsPerPage;
arrayAllocator()->reallocateArrayPageIndex(pages_, pageCount_, minNewPages);
JSON_ASSERT_MESSAGE(pageCount_ >= minNewPages,
"ValueInternalArray::reserve(): bad reallocation");
IteratorState itOther;
other.makeBeginIterator(itOther);
Value* value;
for (ArrayIndex index = 0; index < size_; ++index, increment(itOther)) {
if (index % itemsPerPage == 0) {
PageIndex pageIndex = index / itemsPerPage;
value = arrayAllocator()->allocateArrayPage();
pages_[pageIndex] = value;
}
new (value) Value(dereference(itOther));
}
}
ValueInternalArray& ValueInternalArray::operator=(ValueInternalArray other) {
swap(other);
return *this;
}
ValueInternalArray::~ValueInternalArray() {
// destroy all constructed items
IteratorState it;
IteratorState itEnd;
makeBeginIterator(it);
makeEndIterator(itEnd);
for (; !equals(it, itEnd); increment(it)) {
Value* value = &dereference(it);
value->~Value();
}
// release all pages
PageIndex lastPageIndex = size_ / itemsPerPage;
for (PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex)
arrayAllocator()->releaseArrayPage(pages_[pageIndex]);
// release pages index
arrayAllocator()->releaseArrayPageIndex(pages_, pageCount_);
}
void ValueInternalArray::swap(ValueInternalArray& other) {
Value** tempPages = pages_;
pages_ = other.pages_;
other.pages_ = tempPages;
ArrayIndex tempSize = size_;
size_ = other.size_;
other.size_ = tempSize;
PageIndex tempPageCount = pageCount_;
pageCount_ = other.pageCount_;
other.pageCount_ = tempPageCount;
}
void ValueInternalArray::clear() {
ValueInternalArray dummy;
swap(dummy);
}
void ValueInternalArray::resize(ArrayIndex newSize) {
if (newSize == 0)
clear();
else if (newSize < size_) {
IteratorState it;
IteratorState itEnd;
makeIterator(it, newSize);
makeIterator(itEnd, size_);
for (; !equals(it, itEnd); increment(it)) {
Value* value = &dereference(it);
value->~Value();
}
PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage;
PageIndex lastPageIndex = size_ / itemsPerPage;
for (; pageIndex < lastPageIndex; ++pageIndex)
arrayAllocator()->releaseArrayPage(pages_[pageIndex]);
size_ = newSize;
} else if (newSize > size_)
resolveReference(newSize);
}
void ValueInternalArray::makeIndexValid(ArrayIndex index) {
// Need to enlarge page index ?
if (index >= pageCount_ * itemsPerPage) {
PageIndex minNewPages = (index + 1) / itemsPerPage;
arrayAllocator()->reallocateArrayPageIndex(pages_, pageCount_, minNewPages);
JSON_ASSERT_MESSAGE(pageCount_ >= minNewPages,
"ValueInternalArray::reserve(): bad reallocation");
}
// Need to allocate new pages ?
ArrayIndex nextPageIndex = (size_ % itemsPerPage) != 0
? size_ - (size_ % itemsPerPage) + itemsPerPage
: size_;
if (nextPageIndex <= index) {
PageIndex pageIndex = nextPageIndex / itemsPerPage;
PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1;
for (; pageToAllocate-- > 0; ++pageIndex)
pages_[pageIndex] = arrayAllocator()->allocateArrayPage();
}
// Initialize all new entries
IteratorState it;
IteratorState itEnd;
makeIterator(it, size_);
size_ = index + 1;
makeIterator(itEnd, size_);
for (; !equals(it, itEnd); increment(it)) {
Value* value = &dereference(it);
new (value) Value(); // Construct a default value using placement new
}
}
Value& ValueInternalArray::resolveReference(ArrayIndex index) {
if (index >= size_)
makeIndexValid(index);
return pages_[index / itemsPerPage][index % itemsPerPage];
}
Value* ValueInternalArray::find(ArrayIndex index) const {
if (index >= size_)
return 0;
return &(pages_[index / itemsPerPage][index % itemsPerPage]);
}
ValueInternalArray::ArrayIndex ValueInternalArray::size() const {
return size_;
}
int ValueInternalArray::distance(const IteratorState& x,
const IteratorState& y) {
return indexOf(y) - indexOf(x);
}
ValueInternalArray::ArrayIndex
ValueInternalArray::indexOf(const IteratorState& iterator) {
if (!iterator.array_)
return ArrayIndex(-1);
return ArrayIndex((iterator.currentPageIndex_ - iterator.array_->pages_) *
itemsPerPage +
iterator.currentItemIndex_);
}
int ValueInternalArray::compare(const ValueInternalArray& other) const {
int sizeDiff(size_ - other.size_);
if (sizeDiff != 0)
return sizeDiff;
for (ArrayIndex index = 0; index < size_; ++index) {
int diff = pages_[index / itemsPerPage][index % itemsPerPage].compare(
other.pages_[index / itemsPerPage][index % itemsPerPage]);
if (diff != 0)
return diff;
}
return 0;
}
} // namespace Json

View File

@@ -0,0 +1,473 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
// included by json_value.cpp
namespace Json {
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// class ValueInternalMap
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
/** \internal MUST be safely initialized using memset( this, 0,
* sizeof(ValueInternalLink) );
* This optimization is used by the fast allocator.
*/
ValueInternalLink::ValueInternalLink() : previous_(0), next_(0) {}
ValueInternalLink::~ValueInternalLink() {
for (int index = 0; index < itemPerLink; ++index) {
if (!items_[index].isItemAvailable()) {
if (!items_[index].isMemberNameStatic())
free(keys_[index]);
} else
break;
}
}
ValueMapAllocator::~ValueMapAllocator() {}
#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
class DefaultValueMapAllocator : public ValueMapAllocator {
public: // overridden from ValueMapAllocator
virtual ValueInternalMap* newMap() { return new ValueInternalMap(); }
virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) {
return new ValueInternalMap(other);
}
virtual void destructMap(ValueInternalMap* map) { delete map; }
virtual ValueInternalLink* allocateMapBuckets(unsigned int size) {
return new ValueInternalLink[size];
}
virtual void releaseMapBuckets(ValueInternalLink* links) { delete[] links; }
virtual ValueInternalLink* allocateMapLink() {
return new ValueInternalLink();
}
virtual void releaseMapLink(ValueInternalLink* link) { delete link; }
};
#else
/// @todo make this thread-safe (lock when accessign batch allocator)
class DefaultValueMapAllocator : public ValueMapAllocator {
public: // overridden from ValueMapAllocator
virtual ValueInternalMap* newMap() {
ValueInternalMap* map = mapsAllocator_.allocate();
new (map) ValueInternalMap(); // placement new
return map;
}
virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) {
ValueInternalMap* map = mapsAllocator_.allocate();
new (map) ValueInternalMap(other); // placement new
return map;
}
virtual void destructMap(ValueInternalMap* map) {
if (map) {
map->~ValueInternalMap();
mapsAllocator_.release(map);
}
}
virtual ValueInternalLink* allocateMapBuckets(unsigned int size) {
return new ValueInternalLink[size];
}
virtual void releaseMapBuckets(ValueInternalLink* links) { delete[] links; }
virtual ValueInternalLink* allocateMapLink() {
ValueInternalLink* link = linksAllocator_.allocate();
memset(link, 0, sizeof(ValueInternalLink));
return link;
}
virtual void releaseMapLink(ValueInternalLink* link) {
link->~ValueInternalLink();
linksAllocator_.release(link);
}
private:
BatchAllocator<ValueInternalMap, 1> mapsAllocator_;
BatchAllocator<ValueInternalLink, 1> linksAllocator_;
};
#endif
static ValueMapAllocator*& mapAllocator() {
static DefaultValueMapAllocator defaultAllocator;
static ValueMapAllocator* mapAllocator = &defaultAllocator;
return mapAllocator;
}
static struct DummyMapAllocatorInitializer {
DummyMapAllocatorInitializer() {
mapAllocator(); // ensure mapAllocator() statics are initialized before
// main().
}
} dummyMapAllocatorInitializer;
// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32.
/*
use linked list hash map.
buckets array is a container.
linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124)
value have extra state: valid, available, deleted
*/
ValueInternalMap::ValueInternalMap()
: buckets_(0), tailLink_(0), bucketsSize_(0), itemCount_(0) {}
ValueInternalMap::ValueInternalMap(const ValueInternalMap& other)
: buckets_(0), tailLink_(0), bucketsSize_(0), itemCount_(0) {
reserve(other.itemCount_);
IteratorState it;
IteratorState itEnd;
other.makeBeginIterator(it);
other.makeEndIterator(itEnd);
for (; !equals(it, itEnd); increment(it)) {
bool isStatic;
const char* memberName = key(it, isStatic);
const Value& aValue = value(it);
resolveReference(memberName, isStatic) = aValue;
}
}
ValueInternalMap& ValueInternalMap::operator=(ValueInternalMap other) {
swap(other);
return *this;
}
ValueInternalMap::~ValueInternalMap() {
if (buckets_) {
for (BucketIndex bucketIndex = 0; bucketIndex < bucketsSize_;
++bucketIndex) {
ValueInternalLink* link = buckets_[bucketIndex].next_;
while (link) {
ValueInternalLink* linkToRelease = link;
link = link->next_;
mapAllocator()->releaseMapLink(linkToRelease);
}
}
mapAllocator()->releaseMapBuckets(buckets_);
}
}
void ValueInternalMap::swap(ValueInternalMap& other) {
ValueInternalLink* tempBuckets = buckets_;
buckets_ = other.buckets_;
other.buckets_ = tempBuckets;
ValueInternalLink* tempTailLink = tailLink_;
tailLink_ = other.tailLink_;
other.tailLink_ = tempTailLink;
BucketIndex tempBucketsSize = bucketsSize_;
bucketsSize_ = other.bucketsSize_;
other.bucketsSize_ = tempBucketsSize;
BucketIndex tempItemCount = itemCount_;
itemCount_ = other.itemCount_;
other.itemCount_ = tempItemCount;
}
void ValueInternalMap::clear() {
ValueInternalMap dummy;
swap(dummy);
}
ValueInternalMap::BucketIndex ValueInternalMap::size() const {
return itemCount_;
}
bool ValueInternalMap::reserveDelta(BucketIndex growth) {
return reserve(itemCount_ + growth);
}
bool ValueInternalMap::reserve(BucketIndex newItemCount) {
if (!buckets_ && newItemCount > 0) {
buckets_ = mapAllocator()->allocateMapBuckets(1);
bucketsSize_ = 1;
tailLink_ = &buckets_[0];
}
// BucketIndex idealBucketCount = (newItemCount +
// ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink;
return true;
}
const Value* ValueInternalMap::find(const char* key) const {
if (!bucketsSize_)
return 0;
HashKey hashedKey = hash(key);
BucketIndex bucketIndex = hashedKey % bucketsSize_;
for (const ValueInternalLink* current = &buckets_[bucketIndex]; current != 0;
current = current->next_) {
for (BucketIndex index = 0; index < ValueInternalLink::itemPerLink;
++index) {
if (current->items_[index].isItemAvailable())
return 0;
if (strcmp(key, current->keys_[index]) == 0)
return &current->items_[index];
}
}
return 0;
}
Value* ValueInternalMap::find(const char* key) {
const ValueInternalMap* constThis = this;
return const_cast<Value*>(constThis->find(key));
}
Value& ValueInternalMap::resolveReference(const char* key, bool isStatic) {
HashKey hashedKey = hash(key);
if (bucketsSize_) {
BucketIndex bucketIndex = hashedKey % bucketsSize_;
ValueInternalLink** previous = 0;
BucketIndex index;
for (ValueInternalLink* current = &buckets_[bucketIndex]; current != 0;
previous = &current->next_, current = current->next_) {
for (index = 0; index < ValueInternalLink::itemPerLink; ++index) {
if (current->items_[index].isItemAvailable())
return setNewItem(key, isStatic, current, index);
if (strcmp(key, current->keys_[index]) == 0)
return current->items_[index];
}
}
}
reserveDelta(1);
return unsafeAdd(key, isStatic, hashedKey);
}
void ValueInternalMap::remove(const char* key) {
HashKey hashedKey = hash(key);
if (!bucketsSize_)
return;
BucketIndex bucketIndex = hashedKey % bucketsSize_;
for (ValueInternalLink* link = &buckets_[bucketIndex]; link != 0;
link = link->next_) {
BucketIndex index;
for (index = 0; index < ValueInternalLink::itemPerLink; ++index) {
if (link->items_[index].isItemAvailable())
return;
if (strcmp(key, link->keys_[index]) == 0) {
doActualRemove(link, index, bucketIndex);
return;
}
}
}
}
void ValueInternalMap::doActualRemove(ValueInternalLink* link,
BucketIndex index,
BucketIndex bucketIndex) {
// find last item of the bucket and swap it with the 'removed' one.
// set removed items flags to 'available'.
// if last page only contains 'available' items, then desallocate it (it's
// empty)
ValueInternalLink*& lastLink = getLastLinkInBucket(index);
BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1
for (; lastItemIndex < ValueInternalLink::itemPerLink;
++lastItemIndex) // may be optimized with dicotomic search
{
if (lastLink->items_[lastItemIndex].isItemAvailable())
break;
}
BucketIndex lastUsedIndex = lastItemIndex - 1;
Value* valueToDelete = &link->items_[index];
Value* valueToPreserve = &lastLink->items_[lastUsedIndex];
if (valueToDelete != valueToPreserve)
valueToDelete->swap(*valueToPreserve);
if (lastUsedIndex == 0) // page is now empty
{ // remove it from bucket linked list and delete it.
ValueInternalLink* linkPreviousToLast = lastLink->previous_;
if (linkPreviousToLast != 0) // can not deleted bucket link.
{
mapAllocator()->releaseMapLink(lastLink);
linkPreviousToLast->next_ = 0;
lastLink = linkPreviousToLast;
}
} else {
Value dummy;
valueToPreserve->swap(dummy); // restore deleted to default Value.
valueToPreserve->setItemUsed(false);
}
--itemCount_;
}
ValueInternalLink*&
ValueInternalMap::getLastLinkInBucket(BucketIndex bucketIndex) {
if (bucketIndex == bucketsSize_ - 1)
return tailLink_;
ValueInternalLink*& previous = buckets_[bucketIndex + 1].previous_;
if (!previous)
previous = &buckets_[bucketIndex];
return previous;
}
Value& ValueInternalMap::setNewItem(const char* key,
bool isStatic,
ValueInternalLink* link,
BucketIndex index) {
char* duplicatedKey = makeMemberName(key);
++itemCount_;
link->keys_[index] = duplicatedKey;
link->items_[index].setItemUsed();
link->items_[index].setMemberNameIsStatic(isStatic);
return link->items_[index]; // items already default constructed.
}
Value&
ValueInternalMap::unsafeAdd(const char* key, bool isStatic, HashKey hashedKey) {
JSON_ASSERT_MESSAGE(bucketsSize_ > 0,
"ValueInternalMap::unsafeAdd(): internal logic error.");
BucketIndex bucketIndex = hashedKey % bucketsSize_;
ValueInternalLink*& previousLink = getLastLinkInBucket(bucketIndex);
ValueInternalLink* link = previousLink;
BucketIndex index;
for (index = 0; index < ValueInternalLink::itemPerLink; ++index) {
if (link->items_[index].isItemAvailable())
break;
}
if (index == ValueInternalLink::itemPerLink) // need to add a new page
{
ValueInternalLink* newLink = mapAllocator()->allocateMapLink();
index = 0;
link->next_ = newLink;
previousLink = newLink;
link = newLink;
}
return setNewItem(key, isStatic, link, index);
}
ValueInternalMap::HashKey ValueInternalMap::hash(const char* key) const {
HashKey hash = 0;
while (*key)
hash += *key++ * 37;
return hash;
}
int ValueInternalMap::compare(const ValueInternalMap& other) const {
int sizeDiff(itemCount_ - other.itemCount_);
if (sizeDiff != 0)
return sizeDiff;
// Strict order guaranty is required. Compare all keys FIRST, then compare
// values.
IteratorState it;
IteratorState itEnd;
makeBeginIterator(it);
makeEndIterator(itEnd);
for (; !equals(it, itEnd); increment(it)) {
if (!other.find(key(it)))
return 1;
}
// All keys are equals, let's compare values
makeBeginIterator(it);
for (; !equals(it, itEnd); increment(it)) {
const Value* otherValue = other.find(key(it));
int valueDiff = value(it).compare(*otherValue);
if (valueDiff != 0)
return valueDiff;
}
return 0;
}
void ValueInternalMap::makeBeginIterator(IteratorState& it) const {
it.map_ = const_cast<ValueInternalMap*>(this);
it.bucketIndex_ = 0;
it.itemIndex_ = 0;
it.link_ = buckets_;
}
void ValueInternalMap::makeEndIterator(IteratorState& it) const {
it.map_ = const_cast<ValueInternalMap*>(this);
it.bucketIndex_ = bucketsSize_;
it.itemIndex_ = 0;
it.link_ = 0;
}
bool ValueInternalMap::equals(const IteratorState& x,
const IteratorState& other) {
return x.map_ == other.map_ && x.bucketIndex_ == other.bucketIndex_ &&
x.link_ == other.link_ && x.itemIndex_ == other.itemIndex_;
}
void ValueInternalMap::incrementBucket(IteratorState& iterator) {
++iterator.bucketIndex_;
JSON_ASSERT_MESSAGE(
iterator.bucketIndex_ <= iterator.map_->bucketsSize_,
"ValueInternalMap::increment(): attempting to iterate beyond end.");
if (iterator.bucketIndex_ == iterator.map_->bucketsSize_)
iterator.link_ = 0;
else
iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]);
iterator.itemIndex_ = 0;
}
void ValueInternalMap::increment(IteratorState& iterator) {
JSON_ASSERT_MESSAGE(iterator.map_,
"Attempting to iterator using invalid iterator.");
++iterator.itemIndex_;
if (iterator.itemIndex_ == ValueInternalLink::itemPerLink) {
JSON_ASSERT_MESSAGE(
iterator.link_ != 0,
"ValueInternalMap::increment(): attempting to iterate beyond end.");
iterator.link_ = iterator.link_->next_;
if (iterator.link_ == 0)
incrementBucket(iterator);
} else if (iterator.link_->items_[iterator.itemIndex_].isItemAvailable()) {
incrementBucket(iterator);
}
}
void ValueInternalMap::decrement(IteratorState& iterator) {
if (iterator.itemIndex_ == 0) {
JSON_ASSERT_MESSAGE(iterator.map_,
"Attempting to iterate using invalid iterator.");
if (iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_]) {
JSON_ASSERT_MESSAGE(iterator.bucketIndex_ > 0,
"Attempting to iterate beyond beginning.");
--(iterator.bucketIndex_);
}
iterator.link_ = iterator.link_->previous_;
iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1;
}
}
const char* ValueInternalMap::key(const IteratorState& iterator) {
JSON_ASSERT_MESSAGE(iterator.link_,
"Attempting to iterate using invalid iterator.");
return iterator.link_->keys_[iterator.itemIndex_];
}
const char* ValueInternalMap::key(const IteratorState& iterator,
bool& isStatic) {
JSON_ASSERT_MESSAGE(iterator.link_,
"Attempting to iterate using invalid iterator.");
isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic();
return iterator.link_->keys_[iterator.itemIndex_];
}
Value& ValueInternalMap::value(const IteratorState& iterator) {
JSON_ASSERT_MESSAGE(iterator.link_,
"Attempting to iterate using invalid iterator.");
return iterator.link_->items_[iterator.itemIndex_];
}
int ValueInternalMap::distance(const IteratorState& x, const IteratorState& y) {
int offset = 0;
IteratorState it = x;
while (!equals(it, y))
increment(it);
return offset;
}
} // namespace Json

File diff suppressed because it is too large Load Diff

View File

@@ -6,10 +6,6 @@
#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED #ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED
#define LIB_JSONCPP_JSON_TOOL_H_INCLUDED #define LIB_JSONCPP_JSON_TOOL_H_INCLUDED
#ifndef NO_LOCALE_SUPPORT
#include <clocale>
#endif
/* This header provides common string manipulation support, such as UTF-8, /* This header provides common string manipulation support, such as UTF-8,
* portable conversion from/to string... * portable conversion from/to string...
* *
@@ -17,18 +13,10 @@
*/ */
namespace Json { namespace Json {
static char getDecimalPoint() {
#ifdef NO_LOCALE_SUPPORT
return '\0';
#else
struct lconv* lc = localeconv();
return lc ? *(lc->decimal_point) : '\0';
#endif
}
/// Converts a unicode code-point to UTF-8. /// Converts a unicode code-point to UTF-8.
static inline JSONCPP_STRING codePointToUTF8(unsigned int cp) { static inline std::string codePointToUTF8(unsigned int cp) {
JSONCPP_STRING result; std::string result;
// based on description from http://en.wikipedia.org/wiki/UTF-8 // based on description from http://en.wikipedia.org/wiki/UTF-8
@@ -42,8 +30,8 @@ static inline JSONCPP_STRING codePointToUTF8(unsigned int cp) {
} else if (cp <= 0xFFFF) { } else if (cp <= 0xFFFF) {
result.resize(3); result.resize(3);
result[2] = static_cast<char>(0x80 | (0x3f & cp)); result[2] = static_cast<char>(0x80 | (0x3f & cp));
result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 6))); result[1] = 0x80 | static_cast<char>((0x3f & (cp >> 6)));
result[0] = static_cast<char>(0xE0 | (0xf & (cp >> 12))); result[0] = 0xE0 | static_cast<char>((0xf & (cp >> 12)));
} else if (cp <= 0x10FFFF) { } else if (cp <= 0x10FFFF) {
result.resize(4); result.resize(4);
result[3] = static_cast<char>(0x80 | (0x3f & cp)); result[3] = static_cast<char>(0x80 | (0x3f & cp));
@@ -55,7 +43,7 @@ static inline JSONCPP_STRING codePointToUTF8(unsigned int cp) {
return result; return result;
} }
/// Returns true if ch is a control character (in range [1,31]). /// Returns true if ch is a control character (in range [0,32[).
static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; } static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; }
enum { enum {
@@ -75,7 +63,7 @@ typedef char UIntToStringBuffer[uintToStringBufferSize];
static inline void uintToString(LargestUInt value, char*& current) { static inline void uintToString(LargestUInt value, char*& current) {
*--current = 0; *--current = 0;
do { do {
*--current = static_cast<char>(value % 10U + static_cast<unsigned>('0')); *--current = char(value % 10) + '0';
value /= 10; value /= 10;
} while (value != 0); } while (value != 0);
} }
@@ -94,18 +82,6 @@ static inline void fixNumericLocale(char* begin, char* end) {
} }
} }
static inline void fixNumericLocaleInput(char* begin, char* end) {
char decimalPoint = getDecimalPoint();
if (decimalPoint != '\0' && decimalPoint != '.') {
while (begin < end) {
if (*begin == '.') {
*begin = decimalPoint;
}
++begin;
}
}
}
} // namespace Json { } // namespace Json {
#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED #endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED

File diff suppressed because it is too large Load Diff

View File

@@ -16,29 +16,68 @@ namespace Json {
// ////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////
ValueIteratorBase::ValueIteratorBase() ValueIteratorBase::ValueIteratorBase()
#ifndef JSON_VALUE_USE_INTERNAL_MAP
: current_(), isNull_(true) { : current_(), isNull_(true) {
} }
#else
: isArray_(true), isNull_(true) {
iterator_.array_ = ValueInternalArray::IteratorState();
}
#endif
#ifndef JSON_VALUE_USE_INTERNAL_MAP
ValueIteratorBase::ValueIteratorBase( ValueIteratorBase::ValueIteratorBase(
const Value::ObjectValues::iterator& current) const Value::ObjectValues::iterator& current)
: current_(current), isNull_(false) {} : current_(current), isNull_(false) {}
#else
ValueIteratorBase::ValueIteratorBase(
const ValueInternalArray::IteratorState& state)
: isArray_(true) {
iterator_.array_ = state;
}
ValueIteratorBase::ValueIteratorBase(
const ValueInternalMap::IteratorState& state)
: isArray_(false) {
iterator_.map_ = state;
}
#endif
Value& ValueIteratorBase::deref() const { Value& ValueIteratorBase::deref() const {
#ifndef JSON_VALUE_USE_INTERNAL_MAP
return current_->second; return current_->second;
#else
if (isArray_)
return ValueInternalArray::dereference(iterator_.array_);
return ValueInternalMap::value(iterator_.map_);
#endif
} }
void ValueIteratorBase::increment() { void ValueIteratorBase::increment() {
#ifndef JSON_VALUE_USE_INTERNAL_MAP
++current_; ++current_;
#else
if (isArray_)
ValueInternalArray::increment(iterator_.array_);
ValueInternalMap::increment(iterator_.map_);
#endif
} }
void ValueIteratorBase::decrement() { void ValueIteratorBase::decrement() {
#ifndef JSON_VALUE_USE_INTERNAL_MAP
--current_; --current_;
#else
if (isArray_)
ValueInternalArray::decrement(iterator_.array_);
ValueInternalMap::decrement(iterator_.map_);
#endif
} }
ValueIteratorBase::difference_type ValueIteratorBase::difference_type
ValueIteratorBase::computeDistance(const SelfType& other) const { ValueIteratorBase::computeDistance(const SelfType& other) const {
#ifndef JSON_VALUE_USE_INTERNAL_MAP
#ifdef JSON_USE_CPPTL_SMALLMAP #ifdef JSON_USE_CPPTL_SMALLMAP
return other.current_ - current_; return current_ - other.current_;
#else #else
// Iterator for null value are initialized using the default // Iterator for null value are initialized using the default
// constructor, which initialize current_ to the default // constructor, which initialize current_ to the default
@@ -61,58 +100,80 @@ ValueIteratorBase::computeDistance(const SelfType& other) const {
} }
return myDistance; return myDistance;
#endif #endif
#else
if (isArray_)
return ValueInternalArray::distance(iterator_.array_,
other.iterator_.array_);
return ValueInternalMap::distance(iterator_.map_, other.iterator_.map_);
#endif
} }
bool ValueIteratorBase::isEqual(const SelfType& other) const { bool ValueIteratorBase::isEqual(const SelfType& other) const {
#ifndef JSON_VALUE_USE_INTERNAL_MAP
if (isNull_) { if (isNull_) {
return other.isNull_; return other.isNull_;
} }
return current_ == other.current_; return current_ == other.current_;
#else
if (isArray_)
return ValueInternalArray::equals(iterator_.array_, other.iterator_.array_);
return ValueInternalMap::equals(iterator_.map_, other.iterator_.map_);
#endif
} }
void ValueIteratorBase::copy(const SelfType& other) { void ValueIteratorBase::copy(const SelfType& other) {
#ifndef JSON_VALUE_USE_INTERNAL_MAP
current_ = other.current_; current_ = other.current_;
isNull_ = other.isNull_; isNull_ = other.isNull_;
#else
if (isArray_)
iterator_.array_ = other.iterator_.array_;
iterator_.map_ = other.iterator_.map_;
#endif
} }
Value ValueIteratorBase::key() const { Value ValueIteratorBase::key() const {
#ifndef JSON_VALUE_USE_INTERNAL_MAP
const Value::CZString czstring = (*current_).first; const Value::CZString czstring = (*current_).first;
if (czstring.data()) { if (czstring.c_str()) {
if (czstring.isStaticString()) if (czstring.isStaticString())
return Value(StaticString(czstring.data())); return Value(StaticString(czstring.c_str()));
return Value(czstring.data(), czstring.data() + czstring.length()); return Value(czstring.c_str());
} }
return Value(czstring.index()); return Value(czstring.index());
#else
if (isArray_)
return Value(ValueInternalArray::indexOf(iterator_.array_));
bool isStatic;
const char* memberName = ValueInternalMap::key(iterator_.map_, isStatic);
if (isStatic)
return Value(StaticString(memberName));
return Value(memberName);
#endif
} }
UInt ValueIteratorBase::index() const { UInt ValueIteratorBase::index() const {
#ifndef JSON_VALUE_USE_INTERNAL_MAP
const Value::CZString czstring = (*current_).first; const Value::CZString czstring = (*current_).first;
if (!czstring.data()) if (!czstring.c_str())
return czstring.index(); return czstring.index();
return Value::UInt(-1); return Value::UInt(-1);
#else
if (isArray_)
return Value::UInt(ValueInternalArray::indexOf(iterator_.array_));
return Value::UInt(-1);
#endif
} }
JSONCPP_STRING ValueIteratorBase::name() const { const char* ValueIteratorBase::memberName() const {
char const* keey; #ifndef JSON_VALUE_USE_INTERNAL_MAP
char const* end; const char* name = (*current_).first.c_str();
keey = memberName(&end); return name ? name : "";
if (!keey) return JSONCPP_STRING(); #else
return JSONCPP_STRING(keey, end); if (!isArray_)
} return ValueInternalMap::key(iterator_.map_);
return "";
char const* ValueIteratorBase::memberName() const { #endif
const char* cname = (*current_).first.data();
return cname ? cname : "";
}
char const* ValueIteratorBase::memberName(char const** end) const {
const char* cname = (*current_).first.data();
if (!cname) {
*end = NULL;
return NULL;
}
*end = cname + (*current_).first.length();
return cname;
} }
// ////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////
@@ -125,12 +186,19 @@ char const* ValueIteratorBase::memberName(char const** end) const {
ValueConstIterator::ValueConstIterator() {} ValueConstIterator::ValueConstIterator() {}
#ifndef JSON_VALUE_USE_INTERNAL_MAP
ValueConstIterator::ValueConstIterator( ValueConstIterator::ValueConstIterator(
const Value::ObjectValues::iterator& current) const Value::ObjectValues::iterator& current)
: ValueIteratorBase(current) {} : ValueIteratorBase(current) {}
#else
ValueConstIterator::ValueConstIterator(
const ValueInternalArray::IteratorState& state)
: ValueIteratorBase(state) {}
ValueConstIterator::ValueConstIterator(ValueIterator const& other) ValueConstIterator::ValueConstIterator(
: ValueIteratorBase(other) {} const ValueInternalMap::IteratorState& state)
: ValueIteratorBase(state) {}
#endif
ValueConstIterator& ValueConstIterator:: ValueConstIterator& ValueConstIterator::
operator=(const ValueIteratorBase& other) { operator=(const ValueIteratorBase& other) {
@@ -148,13 +216,19 @@ operator=(const ValueIteratorBase& other) {
ValueIterator::ValueIterator() {} ValueIterator::ValueIterator() {}
#ifndef JSON_VALUE_USE_INTERNAL_MAP
ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current) ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current)
: ValueIteratorBase(current) {} : ValueIteratorBase(current) {}
#else
ValueIterator::ValueIterator(const ValueInternalArray::IteratorState& state)
: ValueIteratorBase(state) {}
ValueIterator::ValueIterator(const ValueInternalMap::IteratorState& state)
: ValueIteratorBase(state) {}
#endif
ValueIterator::ValueIterator(const ValueConstIterator& other) ValueIterator::ValueIterator(const ValueConstIterator& other)
: ValueIteratorBase(other) { : ValueIteratorBase(other) {}
throwRuntimeError("ConstIterator to Iterator should never be allowed.");
}
ValueIterator::ValueIterator(const ValueIterator& other) ValueIterator::ValueIterator(const ValueIterator& other)
: ValueIteratorBase(other) {} : ValueIteratorBase(other) {}

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
// DO NOT EDIT. This file (and "version") is generated by CMake. // DO NOT EDIT. This file is generated by CMake from "version"
// and "version.h.in" files.
// Run CMake configure step to update it. // Run CMake configure step to update it.
#ifndef JSON_VERSION_H_INCLUDED #ifndef JSON_VERSION_H_INCLUDED
# define JSON_VERSION_H_INCLUDED # define JSON_VERSION_H_INCLUDED
@@ -10,11 +11,4 @@
# define JSONCPP_VERSION_QUALIFIER # define JSONCPP_VERSION_QUALIFIER
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8)) # define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
#ifdef JSONCPP_USING_SECURE_MEMORY
#undef JSONCPP_USING_SECURE_MEMORY
#endif
#define JSONCPP_USING_SECURE_MEMORY @JSONCPP_USE_SECURE_MEMORY@
// If non-zero, the library zeroes any memory that it has allocated before
// it frees its memory.
#endif // JSON_VERSION_H_INCLUDED #endif // JSON_VERSION_H_INCLUDED

View File

@@ -1,4 +1,7 @@
# vim: et ts=4 sts=4 sw=4 tw=0
IF(JSONCPP_LIB_BUILD_SHARED)
ADD_DEFINITIONS( -DJSON_DLL )
ENDIF(JSONCPP_LIB_BUILD_SHARED)
ADD_EXECUTABLE( jsoncpp_test ADD_EXECUTABLE( jsoncpp_test
jsontest.cpp jsontest.cpp
@@ -6,33 +9,14 @@ ADD_EXECUTABLE( jsoncpp_test
main.cpp main.cpp
) )
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
IF(BUILD_SHARED_LIBS)
ADD_DEFINITIONS( -DJSON_DLL )
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
ELSE(BUILD_SHARED_LIBS)
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib_static)
ENDIF()
# another way to solve issue #90
#set_target_properties(jsoncpp_test PROPERTIES COMPILE_FLAGS -ffloat-store)
# Run unit tests in post-build # Run unit tests in post-build
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?) # (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
IF(JSONCPP_WITH_POST_BUILD_UNITTEST) IF(JSONCPP_WITH_POST_BUILD_UNITTEST)
IF(BUILD_SHARED_LIBS) ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
# First, copy the shared lib, for Microsoft. POST_BUILD
# Then, run the test executable. COMMAND $<TARGET_FILE:jsoncpp_test>)
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test ENDIF(JSONCPP_WITH_POST_BUILD_UNITTEST)
POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different $<TARGET_FILE:jsoncpp_lib> $<TARGET_FILE_DIR:jsoncpp_test>
COMMAND $<TARGET_FILE:jsoncpp_test>)
ELSE(BUILD_SHARED_LIBS)
# Just run the test executable.
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
POST_BUILD
COMMAND $<TARGET_FILE:jsoncpp_test>)
ENDIF()
ENDIF()
SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test) SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)

View File

@@ -81,7 +81,7 @@ TestResult::TestResult()
predicateStackTail_ = &rootPredicateNode_; predicateStackTail_ = &rootPredicateNode_;
} }
void TestResult::setTestName(const JSONCPP_STRING& name) { name_ = name; } void TestResult::setTestName(const std::string& name) { name_ = name; }
TestResult& TestResult&
TestResult::addFailure(const char* file, unsigned int line, const char* expr) { TestResult::addFailure(const char* file, unsigned int line, const char* expr) {
@@ -163,7 +163,7 @@ void TestResult::printFailure(bool printTestName) const {
Failures::const_iterator itEnd = failures_.end(); Failures::const_iterator itEnd = failures_.end();
for (Failures::const_iterator it = failures_.begin(); it != itEnd; ++it) { for (Failures::const_iterator it = failures_.begin(); it != itEnd; ++it) {
const Failure& failure = *it; const Failure& failure = *it;
JSONCPP_STRING indent(failure.nestingLevel_ * 2, ' '); std::string indent(failure.nestingLevel_ * 2, ' ');
if (failure.file_) { if (failure.file_) {
printf("%s%s(%d): ", indent.c_str(), failure.file_, failure.line_); printf("%s%s(%d): ", indent.c_str(), failure.file_, failure.line_);
} }
@@ -173,19 +173,19 @@ void TestResult::printFailure(bool printTestName) const {
printf("\n"); printf("\n");
} }
if (!failure.message_.empty()) { if (!failure.message_.empty()) {
JSONCPP_STRING reindented = indentText(failure.message_, indent + " "); std::string reindented = indentText(failure.message_, indent + " ");
printf("%s\n", reindented.c_str()); printf("%s\n", reindented.c_str());
} }
} }
} }
JSONCPP_STRING TestResult::indentText(const JSONCPP_STRING& text, std::string TestResult::indentText(const std::string& text,
const JSONCPP_STRING& indent) { const std::string& indent) {
JSONCPP_STRING reindented; std::string reindented;
JSONCPP_STRING::size_type lastIndex = 0; std::string::size_type lastIndex = 0;
while (lastIndex < text.size()) { while (lastIndex < text.size()) {
JSONCPP_STRING::size_type nextIndex = text.find('\n', lastIndex); std::string::size_type nextIndex = text.find('\n', lastIndex);
if (nextIndex == JSONCPP_STRING::npos) { if (nextIndex == std::string::npos) {
nextIndex = text.size() - 1; nextIndex = text.size() - 1;
} }
reindented += indent; reindented += indent;
@@ -195,7 +195,7 @@ JSONCPP_STRING TestResult::indentText(const JSONCPP_STRING& text,
return reindented; return reindented;
} }
TestResult& TestResult::addToLastFailure(const JSONCPP_STRING& message) { TestResult& TestResult::addToLastFailure(const std::string& message) {
if (messageTarget_ != 0) { if (messageTarget_ != 0) {
messageTarget_->message_ += message; messageTarget_->message_ += message;
} }
@@ -240,9 +240,9 @@ unsigned int Runner::testCount() const {
return static_cast<unsigned int>(tests_.size()); return static_cast<unsigned int>(tests_.size());
} }
JSONCPP_STRING Runner::testNameAt(unsigned int index) const { std::string Runner::testNameAt(unsigned int index) const {
TestCase* test = tests_[index](); TestCase* test = tests_[index]();
JSONCPP_STRING name = test->testName(); std::string name = test->testName();
delete test; delete test;
return name; return name;
} }
@@ -303,7 +303,7 @@ bool Runner::runAllTest(bool printSummary) const {
} }
} }
bool Runner::testIndex(const JSONCPP_STRING& testName, bool Runner::testIndex(const std::string& testName,
unsigned int& indexOut) const { unsigned int& indexOut) const {
unsigned int count = testCount(); unsigned int count = testCount();
for (unsigned int index = 0; index < count; ++index) { for (unsigned int index = 0; index < count; ++index) {
@@ -323,10 +323,10 @@ void Runner::listTests() const {
} }
int Runner::runCommandLine(int argc, const char* argv[]) const { int Runner::runCommandLine(int argc, const char* argv[]) const {
// typedef std::deque<JSONCPP_STRING> TestNames; typedef std::deque<std::string> TestNames;
Runner subrunner; Runner subrunner;
for (int index = 1; index < argc; ++index) { for (int index = 1; index < argc; ++index) {
JSONCPP_STRING opt = argv[index]; std::string opt = argv[index];
if (opt == "--list-tests") { if (opt == "--list-tests") {
listTests(); listTests();
return 0; return 0;
@@ -426,23 +426,9 @@ void Runner::printUsage(const char* appName) {
// Assertion functions // Assertion functions
// ////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////
JSONCPP_STRING ToJsonString(const char* toConvert) {
return JSONCPP_STRING(toConvert);
}
JSONCPP_STRING ToJsonString(JSONCPP_STRING in) {
return in;
}
#if JSONCPP_USING_SECURE_MEMORY
JSONCPP_STRING ToJsonString(std::string in) {
return JSONCPP_STRING(in.data(), in.data() + in.length());
}
#endif
TestResult& checkStringEqual(TestResult& result, TestResult& checkStringEqual(TestResult& result,
const JSONCPP_STRING& expected, const std::string& expected,
const JSONCPP_STRING& actual, const std::string& actual,
const char* file, const char* file,
unsigned int line, unsigned int line,
const char* expr) { const char* expr) {

View File

@@ -32,8 +32,8 @@ class Failure {
public: public:
const char* file_; const char* file_;
unsigned int line_; unsigned int line_;
JSONCPP_STRING expr_; std::string expr_;
JSONCPP_STRING message_; std::string message_;
unsigned int nestingLevel_; unsigned int nestingLevel_;
}; };
@@ -65,7 +65,7 @@ public:
/// \internal Implementation detail for predicate macros /// \internal Implementation detail for predicate macros
PredicateContext* predicateStackTail_; PredicateContext* predicateStackTail_;
void setTestName(const JSONCPP_STRING& name); void setTestName(const std::string& name);
/// Adds an assertion failure. /// Adds an assertion failure.
TestResult& TestResult&
@@ -82,7 +82,7 @@ public:
// Generic operator that will work with anything ostream can deal with. // Generic operator that will work with anything ostream can deal with.
template <typename T> TestResult& operator<<(const T& value) { template <typename T> TestResult& operator<<(const T& value) {
JSONCPP_OSTRINGSTREAM oss; std::ostringstream oss;
oss.precision(16); oss.precision(16);
oss.setf(std::ios_base::floatfield); oss.setf(std::ios_base::floatfield);
oss << value; oss << value;
@@ -96,19 +96,19 @@ public:
TestResult& operator<<(Json::UInt64 value); TestResult& operator<<(Json::UInt64 value);
private: private:
TestResult& addToLastFailure(const JSONCPP_STRING& message); TestResult& addToLastFailure(const std::string& message);
unsigned int getAssertionNestingLevel() const; unsigned int getAssertionNestingLevel() const;
/// Adds a failure or a predicate context /// Adds a failure or a predicate context
void addFailureInfo(const char* file, void addFailureInfo(const char* file,
unsigned int line, unsigned int line,
const char* expr, const char* expr,
unsigned int nestingLevel); unsigned int nestingLevel);
static JSONCPP_STRING indentText(const JSONCPP_STRING& text, static std::string indentText(const std::string& text,
const JSONCPP_STRING& indent); const std::string& indent);
typedef std::deque<Failure> Failures; typedef std::deque<Failure> Failures;
Failures failures_; Failures failures_;
JSONCPP_STRING name_; std::string name_;
PredicateContext rootPredicateNode_; PredicateContext rootPredicateNode_;
PredicateContext::Id lastUsedPredicateId_; PredicateContext::Id lastUsedPredicateId_;
/// Failure which is the target of the messages added using operator << /// Failure which is the target of the messages added using operator <<
@@ -155,7 +155,7 @@ public:
unsigned int testCount() const; unsigned int testCount() const;
/// Returns the name of the test case at the specified index /// Returns the name of the test case at the specified index
JSONCPP_STRING testNameAt(unsigned int index) const; std::string testNameAt(unsigned int index) const;
/// Runs the test case at the specified index using the specified TestResult /// Runs the test case at the specified index using the specified TestResult
void runTestAt(unsigned int index, TestResult& result) const; void runTestAt(unsigned int index, TestResult& result) const;
@@ -168,7 +168,7 @@ private: // prevents copy construction and assignment
private: private:
void listTests() const; void listTests() const;
bool testIndex(const JSONCPP_STRING& testName, unsigned int& index) const; bool testIndex(const std::string& testName, unsigned int& index) const;
static void preventDialogOnCrash(); static void preventDialogOnCrash();
private: private:
@@ -178,8 +178,8 @@ private:
template <typename T, typename U> template <typename T, typename U>
TestResult& checkEqual(TestResult& result, TestResult& checkEqual(TestResult& result,
T expected, const T& expected,
U actual, const U& actual,
const char* file, const char* file,
unsigned int line, unsigned int line,
const char* expr) { const char* expr) {
@@ -191,15 +191,9 @@ TestResult& checkEqual(TestResult& result,
return result; return result;
} }
JSONCPP_STRING ToJsonString(const char* toConvert);
JSONCPP_STRING ToJsonString(JSONCPP_STRING in);
#if JSONCPP_USING_SECURE_MEMORY
JSONCPP_STRING ToJsonString(std::string in);
#endif
TestResult& checkStringEqual(TestResult& result, TestResult& checkStringEqual(TestResult& result,
const JSONCPP_STRING& expected, const std::string& expected,
const JSONCPP_STRING& actual, const std::string& actual,
const char* file, const char* file,
unsigned int line, unsigned int line,
const char* expr); const char* expr);
@@ -220,7 +214,7 @@ TestResult& checkStringEqual(TestResult& result,
#define JSONTEST_ASSERT_PRED(expr) \ #define JSONTEST_ASSERT_PRED(expr) \
{ \ { \
JsonTest::PredicateContext _minitest_Context = { \ JsonTest::PredicateContext _minitest_Context = { \
result_->predicateId_, __FILE__, __LINE__, #expr, NULL, NULL \ result_->predicateId_, __FILE__, __LINE__, #expr \
}; \ }; \
result_->predicateStackTail_->next_ = &_minitest_Context; \ result_->predicateStackTail_->next_ = &_minitest_Context; \
result_->predicateId_ += 1; \ result_->predicateId_ += 1; \
@@ -241,8 +235,8 @@ TestResult& checkStringEqual(TestResult& result,
/// \brief Asserts that two values are equals. /// \brief Asserts that two values are equals.
#define JSONTEST_ASSERT_STRING_EQUAL(expected, actual) \ #define JSONTEST_ASSERT_STRING_EQUAL(expected, actual) \
JsonTest::checkStringEqual(*result_, \ JsonTest::checkStringEqual(*result_, \
JsonTest::ToJsonString(expected), \ std::string(expected), \
JsonTest::ToJsonString(actual), \ std::string(actual), \
__FILE__, \ __FILE__, \
__LINE__, \ __LINE__, \
#expected " == " #actual) #expected " == " #actual)
@@ -271,8 +265,8 @@ TestResult& checkStringEqual(TestResult& result,
} \ } \
\ \
public: /* overidden from TestCase */ \ public: /* overidden from TestCase */ \
const char* testName() const JSONCPP_OVERRIDE { return #FixtureType "/" #name; } \ virtual const char* testName() const { return #FixtureType "/" #name; } \
void runTestCase() JSONCPP_OVERRIDE; \ virtual void runTestCase(); \
}; \ }; \
\ \
void Test##FixtureType##name::runTestCase() void Test##FixtureType##name::runTestCase()

File diff suppressed because it is too large Load Diff

View File

@@ -1,16 +1,10 @@
# Copyright 2007 Baptiste Lepilleur # removes all files created during testing
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
"""Removes all files created during testing."""
import glob import glob
import os import os
paths = [] paths = []
for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]: for pattern in [ '*.actual', '*.actual-rewrite', '*.rewrite', '*.process-output' ]:
paths += glob.glob('data/' + pattern) paths += glob.glob( 'data/' + pattern )
for path in paths: for path in paths:
os.unlink(path) os.unlink( path )

View File

@@ -1,4 +0,0 @@
// Comment for array
.=[]
// Comment within array
.[0]="one-element"

View File

@@ -1,5 +0,0 @@
// Comment for array
[
// Comment within array
"one-element"
]

View File

@@ -1,7 +1,5 @@
.={} .={}
// Comment for array
.test=[] .test=[]
// Comment within array
.test[0]={} .test[0]={}
.test[0].a="aaa" .test[0].a="aaa"
.test[1]={} .test[1]={}

View File

@@ -1,8 +1,6 @@
{ {
"test": "test":
// Comment for array
[ [
// Comment within array
{ "a" : "aaa" }, // Comment for a { "a" : "aaa" }, // Comment for a
{ "b" : "bbb" }, // Comment for b { "b" : "bbb" }, // Comment for b
{ "c" : "ccc" } // Comment for c { "c" : "ccc" } // Comment for c

View File

@@ -11,13 +11,4 @@
// Multiline comment cpp-style // Multiline comment cpp-style
// Second line // Second line
.cpp-test.c=3 .cpp-test.c=3
// Comment before double .cpp-test.d=4
.cpp-test.d=4.1
// Comment before string
.cpp-test.e="e-string"
// Comment before true
.cpp-test.f=true
// Comment before false
.cpp-test.g=false
// Comment before null
.cpp-test.h=null

View File

@@ -12,15 +12,6 @@
// Multiline comment cpp-style // Multiline comment cpp-style
// Second line // Second line
"c" : 3, "c" : 3,
// Comment before double "d" : 4
"d" : 4.1,
// Comment before string
"e" : "e-string",
// Comment before true
"f" : true,
// Comment before false
"g" : false,
// Comment before null
"h" : null
} }
} }

View File

@@ -1,17 +1,11 @@
# Copyright 2007 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from __future__ import print_function
import glob import glob
import os.path import os.path
for path in glob.glob('*.json'): for path in glob.glob( '*.json' ):
text = file(path,'rt').read() text = file(path,'rt').read()
target = os.path.splitext(path)[0] + '.expected' target = os.path.splitext(path)[0] + '.expected'
if os.path.exists(target): if os.path.exists( target ):
print('skipping:', target) print 'skipping:', target
else: else:
print('creating:', target) print 'creating:', target
file(target,'wt').write(text) file(target,'wt').write(text)

View File

@@ -1,19 +1,12 @@
# Copyright 2007 Baptiste Lepilleur # Simple implementation of a json test runner to run the test against json-py.
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
"""Simple implementation of a json test runner to run the test against
json-py."""
from __future__ import print_function
import sys import sys
import os.path import os.path
import json import json
import types import types
if len(sys.argv) != 2: if len(sys.argv) != 2:
print("Usage: %s input-json-file", sys.argv[0]) print "Usage: %s input-json-file", sys.argv[0]
sys.exit(3) sys.exit(3)
input_path = sys.argv[1] input_path = sys.argv[1]
@@ -22,50 +15,50 @@ actual_path = base_path + '.actual'
rewrite_path = base_path + '.rewrite' rewrite_path = base_path + '.rewrite'
rewrite_actual_path = base_path + '.actual-rewrite' rewrite_actual_path = base_path + '.actual-rewrite'
def valueTreeToString(fout, value, path = '.'): def valueTreeToString( fout, value, path = '.' ):
ty = type(value) ty = type(value)
if ty is types.DictType: if ty is types.DictType:
fout.write('%s={}\n' % path) fout.write( '%s={}\n' % path )
suffix = path[-1] != '.' and '.' or '' suffix = path[-1] != '.' and '.' or ''
names = value.keys() names = value.keys()
names.sort() names.sort()
for name in names: for name in names:
valueTreeToString(fout, value[name], path + suffix + name) valueTreeToString( fout, value[name], path + suffix + name )
elif ty is types.ListType: elif ty is types.ListType:
fout.write('%s=[]\n' % path) fout.write( '%s=[]\n' % path )
for index, childValue in zip(xrange(0,len(value)), value): for index, childValue in zip( xrange(0,len(value)), value ):
valueTreeToString(fout, childValue, path + '[%d]' % index) valueTreeToString( fout, childValue, path + '[%d]' % index )
elif ty is types.StringType: elif ty is types.StringType:
fout.write('%s="%s"\n' % (path,value)) fout.write( '%s="%s"\n' % (path,value) )
elif ty is types.IntType: elif ty is types.IntType:
fout.write('%s=%d\n' % (path,value)) fout.write( '%s=%d\n' % (path,value) )
elif ty is types.FloatType: elif ty is types.FloatType:
fout.write('%s=%.16g\n' % (path,value)) fout.write( '%s=%.16g\n' % (path,value) )
elif value is True: elif value is True:
fout.write('%s=true\n' % path) fout.write( '%s=true\n' % path )
elif value is False: elif value is False:
fout.write('%s=false\n' % path) fout.write( '%s=false\n' % path )
elif value is None: elif value is None:
fout.write('%s=null\n' % path) fout.write( '%s=null\n' % path )
else: else:
assert False and "Unexpected value type" assert False and "Unexpected value type"
def parseAndSaveValueTree(input, actual_path): def parseAndSaveValueTree( input, actual_path ):
root = json.loads(input) root = json.loads( input )
fout = file(actual_path, 'wt') fout = file( actual_path, 'wt' )
valueTreeToString(fout, root) valueTreeToString( fout, root )
fout.close() fout.close()
return root return root
def rewriteValueTree(value, rewrite_path): def rewriteValueTree( value, rewrite_path ):
rewrite = json.dumps(value) rewrite = json.dumps( value )
#rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ? #rewrite = rewrite[1:-1] # Somehow the string is quoted ! jsonpy bug ?
file(rewrite_path, 'wt').write(rewrite + '\n') file( rewrite_path, 'wt').write( rewrite + '\n' )
return rewrite return rewrite
input = file(input_path, 'rt').read() input = file( input_path, 'rt' ).read()
root = parseAndSaveValueTree(input, actual_path) root = parseAndSaveValueTree( input, actual_path )
rewrite = rewriteValueTree(json.write(root), rewrite_path) rewrite = rewriteValueTree( json.write( root ), rewrite_path )
rewrite_root = parseAndSaveValueTree(rewrite, rewrite_actual_path) rewrite_root = parseAndSaveValueTree( rewrite, rewrite_actual_path )
sys.exit(0) sys.exit( 0 )

View File

@@ -1,42 +1,17 @@
# Copyright 2007 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from __future__ import print_function
from __future__ import unicode_literals
from io import open
from glob import glob
import sys import sys
import os import os
import os.path import os.path
from glob import glob
import optparse import optparse
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes ' VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes '
def getStatusOutput(cmd): def compareOutputs( expected, actual, message ):
"""
Return int, unicode (for both Python 2 and 3).
Note: os.popen().close() would return None for 0.
"""
print(cmd, file=sys.stderr)
pipe = os.popen(cmd)
process_output = pipe.read()
try:
# We have been using os.popen(). When we read() the result
# we get 'str' (bytes) in py2, and 'str' (unicode) in py3.
# Ugh! There must be a better way to handle this.
process_output = process_output.decode('utf-8')
except AttributeError:
pass # python3
status = pipe.close()
return status, process_output
def compareOutputs(expected, actual, message):
expected = expected.strip().replace('\r','').split('\n') expected = expected.strip().replace('\r','').split('\n')
actual = actual.strip().replace('\r','').split('\n') actual = actual.strip().replace('\r','').split('\n')
diff_line = 0 diff_line = 0
max_line_to_compare = min(len(expected), len(actual)) max_line_to_compare = min( len(expected), len(actual) )
for index in range(0,max_line_to_compare): for index in xrange(0,max_line_to_compare):
if expected[index].strip() != actual[index].strip(): if expected[index].strip() != actual[index].strip():
diff_line = index + 1 diff_line = index + 1
break break
@@ -44,7 +19,7 @@ def compareOutputs(expected, actual, message):
diff_line = max_line_to_compare+1 diff_line = max_line_to_compare+1
if diff_line == 0: if diff_line == 0:
return None return None
def safeGetLine(lines, index): def safeGetLine( lines, index ):
index += -1 index += -1
if index >= len(lines): if index >= len(lines):
return '' return ''
@@ -54,85 +29,85 @@ def compareOutputs(expected, actual, message):
Actual: '%s' Actual: '%s'
""" % (message, diff_line, """ % (message, diff_line,
safeGetLine(expected,diff_line), safeGetLine(expected,diff_line),
safeGetLine(actual,diff_line)) safeGetLine(actual,diff_line) )
def safeReadFile(path): def safeReadFile( path ):
try: try:
return open(path, 'rt', encoding = 'utf-8').read() return file( path, 'rt' ).read()
except IOError as e: except IOError, e:
return '<File "%s" is missing: %s>' % (path,e) return '<File "%s" is missing: %s>' % (path,e)
def runAllTests(jsontest_executable_path, input_dir = None, def runAllTests( jsontest_executable_path, input_dir = None,
use_valgrind=False, with_json_checker=False, use_valgrind=False, with_json_checker=False ):
writerClass='StyledWriter'):
if not input_dir: if not input_dir:
input_dir = os.path.join(os.getcwd(), 'data') input_dir = os.path.join( os.getcwd(), 'data' )
tests = glob(os.path.join(input_dir, '*.json')) tests = glob( os.path.join( input_dir, '*.json' ) )
if with_json_checker: if with_json_checker:
test_jsonchecker = glob(os.path.join(input_dir, '../jsonchecker', '*.json')) test_jsonchecker = glob( os.path.join( input_dir, '../jsonchecker', '*.json' ) )
else: else:
test_jsonchecker = [] test_jsonchecker = []
failed_tests = [] failed_tests = []
valgrind_path = use_valgrind and VALGRIND_CMD or '' valgrind_path = use_valgrind and VALGRIND_CMD or ''
for input_path in tests + test_jsonchecker: for input_path in tests + test_jsonchecker:
expect_failure = os.path.basename(input_path).startswith('fail') expect_failure = os.path.basename( input_path ).startswith( 'fail' )
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
print('TESTING:', input_path, end=' ') print 'TESTING:', input_path,
options = is_json_checker_test and '--json-checker' or '' options = is_json_checker_test and '--json-checker' or ''
options += ' --json-writer %s'%writerClass pipe = os.popen( "%s%s %s %s" % (
cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options, valgrind_path, jsontest_executable_path, options,
input_path) input_path) )
status, process_output = getStatusOutput(cmd) process_output = pipe.read()
status = pipe.close()
if is_json_checker_test: if is_json_checker_test:
if expect_failure: if expect_failure:
if not status: if status is None:
print('FAILED') print 'FAILED'
failed_tests.append((input_path, 'Parsing should have failed:\n%s' % failed_tests.append( (input_path, 'Parsing should have failed:\n%s' %
safeReadFile(input_path))) safeReadFile(input_path)) )
else: else:
print('OK') print 'OK'
else: else:
if status: if status is not None:
print('FAILED') print 'FAILED'
failed_tests.append((input_path, 'Parsing failed:\n' + process_output)) failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
else: else:
print('OK') print 'OK'
else: else:
base_path = os.path.splitext(input_path)[0] base_path = os.path.splitext(input_path)[0]
actual_output = safeReadFile(base_path + '.actual') actual_output = safeReadFile( base_path + '.actual' )
actual_rewrite_output = safeReadFile(base_path + '.actual-rewrite') actual_rewrite_output = safeReadFile( base_path + '.actual-rewrite' )
open(base_path + '.process-output', 'wt', encoding = 'utf-8').write(process_output) file(base_path + '.process-output','wt').write( process_output )
if status: if status:
print('parsing failed') print 'parsing failed'
failed_tests.append((input_path, 'Parsing failed:\n' + process_output)) failed_tests.append( (input_path, 'Parsing failed:\n' + process_output) )
else: else:
expected_output_path = os.path.splitext(input_path)[0] + '.expected' expected_output_path = os.path.splitext(input_path)[0] + '.expected'
expected_output = open(expected_output_path, 'rt', encoding = 'utf-8').read() expected_output = file( expected_output_path, 'rt' ).read()
detail = (compareOutputs(expected_output, actual_output, 'input') detail = ( compareOutputs( expected_output, actual_output, 'input' )
or compareOutputs(expected_output, actual_rewrite_output, 'rewrite')) or compareOutputs( expected_output, actual_rewrite_output, 'rewrite' ) )
if detail: if detail:
print('FAILED') print 'FAILED'
failed_tests.append((input_path, detail)) failed_tests.append( (input_path, detail) )
else: else:
print('OK') print 'OK'
if failed_tests: if failed_tests:
print() print
print('Failure details:') print 'Failure details:'
for failed_test in failed_tests: for failed_test in failed_tests:
print('* Test', failed_test[0]) print '* Test', failed_test[0]
print(failed_test[1]) print failed_test[1]
print() print
print('Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests), print 'Test results: %d passed, %d failed.' % (len(tests)-len(failed_tests),
len(failed_tests))) len(failed_tests) )
return 1 return 1
else: else:
print('All %d tests passed.' % len(tests)) print 'All %d tests passed.' % len(tests)
return 0 return 0
def main(): def main():
from optparse import OptionParser from optparse import OptionParser
parser = OptionParser(usage="%prog [options] <path to jsontestrunner.exe> [test case directory]") parser = OptionParser( usage="%prog [options] <path to jsontestrunner.exe> [test case directory]" )
parser.add_option("--valgrind", parser.add_option("--valgrind",
action="store_true", dest="valgrind", default=False, action="store_true", dest="valgrind", default=False,
help="run all the tests using valgrind to detect memory leaks") help="run all the tests using valgrind to detect memory leaks")
@@ -143,32 +118,17 @@ def main():
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) < 1 or len(args) > 2: if len(args) < 1 or len(args) > 2:
parser.error('Must provides at least path to jsontestrunner executable.') parser.error( 'Must provides at least path to jsontestrunner executable.' )
sys.exit(1) sys.exit( 1 )
jsontest_executable_path = os.path.normpath(os.path.abspath(args[0])) jsontest_executable_path = os.path.normpath( os.path.abspath( args[0] ) )
if len(args) > 1: if len(args) > 1:
input_path = os.path.normpath(os.path.abspath(args[1])) input_path = os.path.normpath( os.path.abspath( args[1] ) )
else: else:
input_path = None input_path = None
status = runAllTests(jsontest_executable_path, input_path, status = runAllTests( jsontest_executable_path, input_path,
use_valgrind=options.valgrind, use_valgrind=options.valgrind, with_json_checker=options.with_json_checker )
with_json_checker=options.with_json_checker, sys.exit( status )
writerClass='StyledWriter')
if status:
sys.exit(status)
status = runAllTests(jsontest_executable_path, input_path,
use_valgrind=options.valgrind,
with_json_checker=options.with_json_checker,
writerClass='StyledStreamWriter')
if status:
sys.exit(status)
status = runAllTests(jsontest_executable_path, input_path,
use_valgrind=options.valgrind,
with_json_checker=options.with_json_checker,
writerClass='BuiltStyledStreamWriter')
if status:
sys.exit(status)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,72 +1,61 @@
# Copyright 2009 Baptiste Lepilleur
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from __future__ import print_function
from __future__ import unicode_literals
from io import open
from glob import glob
import sys import sys
import os import os
import os.path import os.path
import subprocess import subprocess
from glob import glob
import optparse import optparse
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes' VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
class TestProxy(object): class TestProxy(object):
def __init__(self, test_exe_path, use_valgrind=False): def __init__( self, test_exe_path, use_valgrind=False ):
self.test_exe_path = os.path.normpath(os.path.abspath(test_exe_path)) self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
self.use_valgrind = use_valgrind self.use_valgrind = use_valgrind
def run(self, options): def run( self, options ):
if self.use_valgrind: if self.use_valgrind:
cmd = VALGRIND_CMD.split() cmd = VALGRIND_CMD.split()
else: else:
cmd = [] cmd = []
cmd.extend([self.test_exe_path, '--test-auto'] + options) cmd.extend( [self.test_exe_path, '--test-auto'] + options )
try: process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except:
print(cmd)
raise
stdout = process.communicate()[0] stdout = process.communicate()[0]
if process.returncode: if process.returncode:
return False, stdout return False, stdout
return True, stdout return True, stdout
def runAllTests(exe_path, use_valgrind=False): def runAllTests( exe_path, use_valgrind=False ):
test_proxy = TestProxy(exe_path, use_valgrind=use_valgrind) test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
status, test_names = test_proxy.run(['--list-tests']) status, test_names = test_proxy.run( ['--list-tests'] )
if not status: if not status:
print("Failed to obtain unit tests list:\n" + test_names, file=sys.stderr) print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
return 1 return 1
test_names = [name.strip() for name in test_names.decode('utf-8').strip().split('\n')] test_names = [name.strip() for name in test_names.strip().split('\n')]
failures = [] failures = []
for name in test_names: for name in test_names:
print('TESTING %s:' % name, end=' ') print 'TESTING %s:' % name,
succeed, result = test_proxy.run(['--test', name]) succeed, result = test_proxy.run( ['--test', name] )
if succeed: if succeed:
print('OK') print 'OK'
else: else:
failures.append((name, result)) failures.append( (name, result) )
print('FAILED') print 'FAILED'
failed_count = len(failures) failed_count = len(failures)
pass_count = len(test_names) - failed_count pass_count = len(test_names) - failed_count
if failed_count: if failed_count:
print() print
for name, result in failures: for name, result in failures:
print(result) print result
print('%d/%d tests passed (%d failure(s))' % ( pass_count, len(test_names), failed_count)) print '%d/%d tests passed (%d failure(s))' % (
pass_count, len(test_names), failed_count)
return 1 return 1
else: else:
print('All %d tests passed' % len(test_names)) print 'All %d tests passed' % len(test_names)
return 0 return 0
def main(): def main():
from optparse import OptionParser from optparse import OptionParser
parser = OptionParser(usage="%prog [options] <path to test_lib_json.exe>") parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
parser.add_option("--valgrind", parser.add_option("--valgrind",
action="store_true", dest="valgrind", default=False, action="store_true", dest="valgrind", default=False,
help="run all the tests using valgrind to detect memory leaks") help="run all the tests using valgrind to detect memory leaks")
@@ -74,11 +63,11 @@ def main():
options, args = parser.parse_args() options, args = parser.parse_args()
if len(args) != 1: if len(args) != 1:
parser.error('Must provides at least path to test_lib_json executable.') parser.error( 'Must provides at least path to test_lib_json executable.' )
sys.exit(1) sys.exit( 1 )
exit_code = runAllTests(args[0], use_valgrind=options.valgrind) exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
sys.exit(exit_code) sys.exit( exit_code )
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@@ -1,31 +0,0 @@
#!/usr/bin/env sh
# This is called by `.travis.yml` via Travis CI.
# Travis supplies $TRAVIS_OS_NAME.
# http://docs.travis-ci.com/user/multi-os/
# Our .travis.yml also defines:
# - SHARED_LIB=ON/OFF
# - STATIC_LIB=ON/OFF
# - CMAKE_PKG=ON/OFF
# - BUILD_TYPE=release/debug
# - VERBOSE_MAKE=false/true
# - VERBOSE (set or not)
# -e: fail on error
# -v: show commands
# -x: show expanded commands
set -vex
env | sort
cmake -DJSONCPP_WITH_CMAKE_PACKAGE=$CMAKE_PKG -DBUILD_SHARED_LIBS=$SHARED_LIB -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE .
make
cmake -DJSONCPP_WITH_CMAKE_PACKAGE=$CMAKE_PKG -DBUILD_SHARED_LIBS=$SHARED_LIB -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE -DJSONCPP_USE_SECURE_MEMORY=1 .
make
# Python is not available in Travis for osx.
# https://github.com/travis-ci/travis-ci/issues/2320
if [ "$TRAVIS_OS_NAME" != "osx" ]
then
make jsoncpp_check
valgrind --error-exitcode=42 --leak-check=full ./src/test_lib_json/jsoncpp_test
fi

View File

@@ -1 +1 @@
1.7.6 0.7.0

View File

@@ -1 +0,0 @@
@JSONCPP_VERSION@