mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-11-07 05:58:42 +01:00
Compare commits
108 Commits
svn-releas
...
svn-import
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6764059395 | ||
|
|
5d32295a6e | ||
|
|
68db655347 | ||
|
|
642befc836 | ||
|
|
77cd83890d | ||
|
|
09439b7bc7 | ||
|
|
ea0797351f | ||
|
|
94d17e9fdf | ||
|
|
a3f19c23a0 | ||
|
|
d2618806ba | ||
|
|
36400ac0c1 | ||
|
|
32ffb931e7 | ||
|
|
bb53cd0899 | ||
|
|
4c531bb584 | ||
|
|
42d918b7aa | ||
|
|
700b38020e | ||
|
|
7b62ceacee | ||
|
|
cb5ae30f6e | ||
|
|
58b6541478 | ||
|
|
1ccfdfcb9b | ||
|
|
71860de813 | ||
|
|
c515b8ec30 | ||
|
|
5fff185aa4 | ||
|
|
10712e85d6 | ||
|
|
53c08ad916 | ||
|
|
79e90fba0b | ||
|
|
ce277aa6e4 | ||
|
|
eafd702a17 | ||
|
|
a8afdd40af | ||
|
|
f92ace5e82 | ||
|
|
3f124172ce | ||
|
|
f8715856f3 | ||
|
|
42321f24a6 | ||
|
|
aff1171153 | ||
|
|
ae3c7a7aab | ||
|
|
f572e8e42e | ||
|
|
2b853c4067 | ||
|
|
7c507d7eba | ||
|
|
c3763f55da | ||
|
|
3b3540d9ef | ||
|
|
9d317c3794 | ||
|
|
03288e8eb6 | ||
|
|
c9f91dd929 | ||
|
|
2ba3bc3252 | ||
|
|
ac5df77bbc | ||
|
|
468564b3fe | ||
|
|
dc0f736f59 | ||
|
|
139da63aef | ||
|
|
d496e044b1 | ||
|
|
f587e6a420 | ||
|
|
f0b24e705f | ||
|
|
e807a7640e | ||
|
|
d3cd9a7fc5 | ||
|
|
a2fb7fb918 | ||
|
|
4b819c2309 | ||
|
|
c649badb95 | ||
|
|
a9eb1eccc0 | ||
|
|
6ffff91c54 | ||
|
|
acdefb0869 | ||
|
|
c025697ea5 | ||
|
|
b0ec41c3e3 | ||
|
|
2a2b5cf3ad | ||
|
|
b6620e2801 | ||
|
|
ccde848fd1 | ||
|
|
e082248001 | ||
|
|
7b5edd9859 | ||
|
|
e91a68cb9e | ||
|
|
1b138e8544 | ||
|
|
4f081b50e6 | ||
|
|
3c9fdeb859 | ||
|
|
4b79fd1a00 | ||
|
|
e12d84ebaa | ||
|
|
078e0d7c37 | ||
|
|
fee49b1a37 | ||
|
|
22eede44c1 | ||
|
|
d9ec234fc2 | ||
|
|
3e5b347f75 | ||
|
|
96408a30e1 | ||
|
|
1d648f089a | ||
|
|
f40c880585 | ||
|
|
39ba2dbea9 | ||
|
|
a761530f14 | ||
|
|
ae9ffb5443 | ||
|
|
e656c5fa2d | ||
|
|
f1053e7acb | ||
|
|
e3d0eca9f4 | ||
|
|
a77a803c85 | ||
|
|
785ba2675d | ||
|
|
3b556ec633 | ||
|
|
5fb0f09cbb | ||
|
|
73911f2e33 | ||
|
|
d21c256fae | ||
|
|
72c406b550 | ||
|
|
eadc478e50 | ||
|
|
1837a1c508 | ||
|
|
e3cc0f004b | ||
|
|
fb17080142 | ||
|
|
e0e1fd37cd | ||
|
|
d0a9f3d98d | ||
|
|
7953a801c1 | ||
|
|
df4de558c3 | ||
|
|
62d7bc75db | ||
|
|
224a1aee72 | ||
|
|
40388494bd | ||
|
|
bafb43c203 | ||
|
|
64e40aafe5 | ||
|
|
91923f2cbc | ||
|
|
13698b5835 |
10
.gitignore
vendored
Normal file
10
.gitignore
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
*.pyc
|
||||||
|
*.swp
|
||||||
|
|
||||||
|
*.actual
|
||||||
|
*.actual-rewrite
|
||||||
|
*.process-output
|
||||||
|
*.rewrite
|
||||||
|
bin/
|
||||||
|
buildscons/
|
||||||
|
libs/
|
||||||
23
.travis.yml
Normal file
23
.travis.yml
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Build matrix / environment variable are explained on:
|
||||||
|
# http://about.travis-ci.org/docs/user/build-configuration/
|
||||||
|
# This file can be validated on:
|
||||||
|
# http://lint.travis-ci.org/
|
||||||
|
before_install: sudo apt-get install cmake
|
||||||
|
language: cpp
|
||||||
|
compiler:
|
||||||
|
- gcc
|
||||||
|
- clang
|
||||||
|
script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make
|
||||||
|
env:
|
||||||
|
global:
|
||||||
|
- JSONCPP_CONTINUOUS_INTERATION=1
|
||||||
|
matrix:
|
||||||
|
- SHARED_LIBRARY=ON BUILD_TYPE=release VERBOSE_MAKE=false
|
||||||
|
- SHARED_LIBRARY=OFF BUILD_TYPE=release VERBOSE_MAKE=false
|
||||||
|
- SHARED_LIBRARY=OFF BUILD_TYPE=debug VERBOSE VERBOSE_MAKE=true
|
||||||
|
notifications:
|
||||||
|
recipients:
|
||||||
|
- baptiste.lepilleur@gmail.com
|
||||||
|
email:
|
||||||
|
on_success: change
|
||||||
|
on_failure: always
|
||||||
90
CMakeLists.txt
Normal file
90
CMakeLists.txt
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
CMAKE_MINIMUM_REQUIRED(VERSION 2.6)
|
||||||
|
PROJECT(jsoncpp)
|
||||||
|
ENABLE_TESTING()
|
||||||
|
|
||||||
|
OPTION(JSONCPP_WITH_TESTS "Compile and run JsonCpp test executables" ON)
|
||||||
|
OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
|
||||||
|
OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF)
|
||||||
|
|
||||||
|
# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix
|
||||||
|
IF(NOT WIN32)
|
||||||
|
IF(NOT CMAKE_BUILD_TYPE)
|
||||||
|
SET(CMAKE_BUILD_TYPE Release CACHE STRING
|
||||||
|
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage."
|
||||||
|
FORCE)
|
||||||
|
ENDIF(NOT CMAKE_BUILD_TYPE)
|
||||||
|
ENDIF(NOT WIN32)
|
||||||
|
|
||||||
|
# This ensures shared DLL are in the same dir as executable on Windows.
|
||||||
|
# Put all executables / libraries are in a project global directory.
|
||||||
|
SET(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
|
||||||
|
CACHE PATH "Single directory for all static libraries.")
|
||||||
|
SET(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/lib
|
||||||
|
CACHE PATH "Single directory for all dynamic libraries on Unix.")
|
||||||
|
SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${PROJECT_BINARY_DIR}/bin
|
||||||
|
CACHE PATH "Single directory for all executable and dynamic libraries on Windows.")
|
||||||
|
MARK_AS_ADVANCED( CMAKE_RUNTIME_OUTPUT_DIRECTORY CMAKE_LIBRARY_OUTPUT_DIRECTORY CMAKE_ARCHIVE_OUTPUT_DIRECTORY )
|
||||||
|
|
||||||
|
# Set variable named ${VAR_NAME} to value ${VALUE}
|
||||||
|
FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
|
||||||
|
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
|
||||||
|
ENDFUNCTION(set_using_dynamic_name)
|
||||||
|
|
||||||
|
# Extract major, minor, patch and qualifier from version text
|
||||||
|
# Parse a version string "X.Y.Z[-qualifier]" and outputs
|
||||||
|
# version parts in ${OUPUT_PREFIX}_MAJOR, _MINOR, _PATCH, _QUALIFIER.
|
||||||
|
# If parse succed then ${OUPUT_PREFIX}_FOUND is TRUE.
|
||||||
|
MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX)
|
||||||
|
SET(VERSION_REGEX "[0-9]+\\.[0-9]+\\.[0-9]+(-[a-zA-Z0-9_]+)?")
|
||||||
|
IF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
||||||
|
STRING(REGEX MATCHALL "[0-9]+|-([A-Za-z0-9_]+)" VERSION_PARTS ${VERSION_TEXT})
|
||||||
|
list(APPEND VERSION_PARTS "") # empty qualifier to handle no qualifier case
|
||||||
|
LIST(GET VERSION_PARTS 0 ${OUPUT_PREFIX}_MAJOR)
|
||||||
|
LIST(GET VERSION_PARTS 1 ${OUPUT_PREFIX}_MINOR)
|
||||||
|
LIST(GET VERSION_PARTS 2 ${OUPUT_PREFIX}_PATCH)
|
||||||
|
LIST(GET VERSION_PARTS 3 ${OUPUT_PREFIX}_QUALIFIER)
|
||||||
|
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE )
|
||||||
|
ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
||||||
|
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE )
|
||||||
|
ENDIF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
||||||
|
ENDMACRO(jsoncpp_parse_version)
|
||||||
|
|
||||||
|
# Read out version from "version" file
|
||||||
|
FILE(STRINGS "version" JSONCPP_VERSION)
|
||||||
|
|
||||||
|
jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION )
|
||||||
|
IF(NOT JSONCPP_VERSION_FOUND)
|
||||||
|
MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z[-qualifier]")
|
||||||
|
ENDIF(NOT JSONCPP_VERSION_FOUND)
|
||||||
|
|
||||||
|
MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}${JSONCPP_VERSION_QUALIFIER}")
|
||||||
|
# File version.h is only regenerated on CMake configure step
|
||||||
|
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in"
|
||||||
|
"${PROJECT_SOURCE_DIR}/include/json/version.h" )
|
||||||
|
|
||||||
|
macro(UseCompilationWarningAsError)
|
||||||
|
if ( MSVC )
|
||||||
|
# Only enabled in debug because some old versions of VS STL generate
|
||||||
|
# warnings when compiled in release configuration.
|
||||||
|
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ")
|
||||||
|
endif( MSVC )
|
||||||
|
endmacro()
|
||||||
|
|
||||||
|
# Include our configuration header
|
||||||
|
INCLUDE_DIRECTORIES( ${CMAKE_SOURCE_DIR}/include )
|
||||||
|
|
||||||
|
if ( MSVC )
|
||||||
|
# Only enabled in debug because some old versions of VS STL generate
|
||||||
|
# unreachable code warning when compiled in release configuration.
|
||||||
|
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ")
|
||||||
|
endif( MSVC )
|
||||||
|
|
||||||
|
IF(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||||
|
UseCompilationWarningAsError()
|
||||||
|
ENDIF(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||||
|
|
||||||
|
# Build the different applications
|
||||||
|
ADD_SUBDIRECTORY( src )
|
||||||
|
|
||||||
|
#install the includes
|
||||||
|
ADD_SUBDIRECTORY( include )
|
||||||
84
NEWS.txt
84
NEWS.txt
@@ -1,3 +1,69 @@
|
|||||||
|
New in SVN
|
||||||
|
----------
|
||||||
|
|
||||||
|
* Updated the type system's behavior, in order to better support backwards
|
||||||
|
compatibility with code that was written before 64-bit integer support was
|
||||||
|
introduced. Here's how it works now:
|
||||||
|
|
||||||
|
* isInt, isInt64, isUInt, and isUInt64 return true if and only if the
|
||||||
|
value can be exactly represented as that type. In particular, a value
|
||||||
|
constructed with a double like 17.0 will now return true for all of
|
||||||
|
these methods.
|
||||||
|
|
||||||
|
* isDouble and isFloat now return true for all numeric values, since all
|
||||||
|
numeric values can be converted to a double or float without
|
||||||
|
truncation. Note however that the conversion may not be exact -- for
|
||||||
|
example, doubles cannot exactly represent all integers above 2^53 + 1.
|
||||||
|
|
||||||
|
* isBool, isNull, isString, isArray, and isObject now return true if and
|
||||||
|
only if the value is of that type.
|
||||||
|
|
||||||
|
* isConvertibleTo(fooValue) indicates that it is safe to call asFoo.
|
||||||
|
(For each type foo, isFoo always implies isConvertibleTo(fooValue).)
|
||||||
|
asFoo returns an approximate or exact representation as appropriate.
|
||||||
|
For example, a double value may be truncated when asInt is called.
|
||||||
|
|
||||||
|
* For backwards compatibility with old code, isConvertibleTo(intValue)
|
||||||
|
may return false even if type() == intValue. This is because the value
|
||||||
|
may have been constructed with a 64-bit integer larger than maxInt,
|
||||||
|
and calling asInt() would cause an exception. If you're writing new
|
||||||
|
code, use isInt64 to find out whether the value is exactly
|
||||||
|
representable using an Int64, or asDouble() combined with minInt64 and
|
||||||
|
maxInt64 to figure out whether it is approximately representable.
|
||||||
|
|
||||||
|
* Value
|
||||||
|
- Patch #10: BOOST_FOREACH compatibility. Made Json::iterator more
|
||||||
|
standard compliant, added missing iterator_category and value_type
|
||||||
|
typedefs (contribued by Robert A. Iannucci).
|
||||||
|
|
||||||
|
* Compilation
|
||||||
|
|
||||||
|
- New CMake based build system. Based in part on contribution from
|
||||||
|
Igor Okulist and Damien Buhl (Patch #14).
|
||||||
|
|
||||||
|
- New header json/version.h now contains version number macros
|
||||||
|
(JSONCPP_VERSION_MAJOR, JSONCPP_VERSION_MINOR, JSONCPP_VERSION_PATCH
|
||||||
|
and JSONCPP_VERSION_HEXA).
|
||||||
|
|
||||||
|
- Patch #11: added missing JSON_API on some classes causing link issues
|
||||||
|
when building as a dynamic library on Windows
|
||||||
|
(contributed by Francis Bolduc).
|
||||||
|
|
||||||
|
- Visual Studio DLL: suppressed warning "C4251: <data member>: <type>
|
||||||
|
needs to have dll-interface to be used by..." via pragma push/pop
|
||||||
|
in json-cpp headers.
|
||||||
|
|
||||||
|
- Added Travis CI intregration: https://travis-ci.org/blep/jsoncpp-mirror
|
||||||
|
|
||||||
|
* Bug fixes
|
||||||
|
- Patch #15: Copy constructor does not initialize allocated_ for stringValue
|
||||||
|
(contributed by rmongia).
|
||||||
|
|
||||||
|
- Patch #16: Missing field copy in Json::Value::iterator causing infinite
|
||||||
|
loop when using experimental internal map (#define JSON_VALUE_USE_INTERNAL_MAP)
|
||||||
|
(contributed by Ming-Lin Kao).
|
||||||
|
|
||||||
|
|
||||||
New in JsonCpp 0.6.0:
|
New in JsonCpp 0.6.0:
|
||||||
---------------------
|
---------------------
|
||||||
|
|
||||||
@@ -13,8 +79,8 @@
|
|||||||
Notes: you need to setup the environment by running vcvars32.bat
|
Notes: you need to setup the environment by running vcvars32.bat
|
||||||
(e.g. MSVC 2008 command prompt in start menu) before running scons.
|
(e.g. MSVC 2008 command prompt in start menu) before running scons.
|
||||||
|
|
||||||
- Added support for amalgated source and header generation (a la sqlite).
|
- Added support for amalgamated source and header generation (a la sqlite).
|
||||||
Refer to README.txt section "Generating amalgated source and header"
|
Refer to README.txt section "Generating amalgamated source and header"
|
||||||
for detail.
|
for detail.
|
||||||
|
|
||||||
* Value
|
* Value
|
||||||
@@ -86,6 +152,20 @@
|
|||||||
|
|
||||||
- Bug #3139678: stack buffer overflow when parsing a double with a
|
- Bug #3139678: stack buffer overflow when parsing a double with a
|
||||||
length of 32 characters.
|
length of 32 characters.
|
||||||
|
|
||||||
|
- Fixed Value::operator <= implementation (had the semantic of operator >=).
|
||||||
|
Found when adding unit tests for comparison operators.
|
||||||
|
|
||||||
|
- Value::compare() is now const and has an actual implementation with
|
||||||
|
unit tests.
|
||||||
|
|
||||||
|
- Bug #2407932: strpbrk() can fail for NULL pointer.
|
||||||
|
|
||||||
|
- Bug #3306345: Fixed minor typo in Path::resolve().
|
||||||
|
|
||||||
|
- Bug #3314841/#3306896: errors in amalgamate.py
|
||||||
|
|
||||||
|
- Fixed some Coverity warnings and line-endings.
|
||||||
|
|
||||||
* License
|
* License
|
||||||
|
|
||||||
|
|||||||
88
README.txt
88
README.txt
@@ -13,9 +13,66 @@ making it a convenient format to store user input files.
|
|||||||
|
|
||||||
Unserialization parsing is user friendly and provides precise error reports.
|
Unserialization parsing is user friendly and provides precise error reports.
|
||||||
|
|
||||||
|
* Using json-cpp in your project:
|
||||||
|
===============================
|
||||||
|
|
||||||
* Building/Testing:
|
The recommended approach to integrate json-cpp in your project is to
|
||||||
=================
|
build the the amalgamated source (a single .cpp) with your own build
|
||||||
|
system. This ensures compilation flags consistency and ABI compatibility.
|
||||||
|
|
||||||
|
See section "Generating amalgamated source and header" to generate them
|
||||||
|
from the source distribution.
|
||||||
|
|
||||||
|
Directory include/ should be added to your compiler include path.
|
||||||
|
json-cpp headers should be included as follow:
|
||||||
|
|
||||||
|
#include <json/json.h>
|
||||||
|
|
||||||
|
If json-cpp was build as a dynamic library on Windows, then your project
|
||||||
|
need to define macro "JSON_DLL" to JSON_API should import exported symbols.
|
||||||
|
|
||||||
|
* Building/Testing with new CMake build system:
|
||||||
|
=============================================
|
||||||
|
|
||||||
|
CMake is a C++ Makefiles/Solution generator that can be downloaded from:
|
||||||
|
http://www.cmake.org
|
||||||
|
|
||||||
|
It is usually available on most Linux system as package. On Ubuntu:
|
||||||
|
sudo apt-get install cmake
|
||||||
|
|
||||||
|
Notes that python is also required to run JSON reader/writer tests. If
|
||||||
|
missing, the build will skip running those tests.
|
||||||
|
|
||||||
|
When running CMake, a few parameters are required:
|
||||||
|
- a build directory where the makefiles/solution are generated. It is
|
||||||
|
also used to store objects, libraries and executables files.
|
||||||
|
- the generator to use: makefiles or Visual Studio solution? What version
|
||||||
|
or Visual Studio, 32 or 64 bits solution?
|
||||||
|
|
||||||
|
Generating solution/makefiles using cmake-gui:
|
||||||
|
- Makes "source code" points the source directory
|
||||||
|
- Makes "where to build the binary" points to the directory to use for
|
||||||
|
the build.
|
||||||
|
- Click on the "Grouped" check box
|
||||||
|
- Review JsonCpp build option (tick JSONCPP_LIB_BUILD_SHARED to build as
|
||||||
|
a dynamic library)
|
||||||
|
- Click configure button at the bottom, then the generate button.
|
||||||
|
- The generated solution/makefiles can be found in the binary directory.
|
||||||
|
|
||||||
|
Alternatively, from the command-line on Unix in the source directory:
|
||||||
|
|
||||||
|
mkdir -p ../build/debug
|
||||||
|
cd ../build/debug
|
||||||
|
cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../../jsoncpp-src
|
||||||
|
make
|
||||||
|
|
||||||
|
Running "cmake -h" will display the list of available generators (passed as -G option).
|
||||||
|
|
||||||
|
By default CMake hides compilation command-line. This can be modified by specifying:
|
||||||
|
-DCMAKE_VERBOSE_MAKEFILE=true when generating makefiles.
|
||||||
|
|
||||||
|
* Building/Testing with the legacy build system based on SCons:
|
||||||
|
=============================================================
|
||||||
|
|
||||||
JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires
|
JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires
|
||||||
python to be installed (http://www.python.org).
|
python to be installed (http://www.python.org).
|
||||||
@@ -47,7 +104,6 @@ to do so.
|
|||||||
|
|
||||||
and TARGET may be:
|
and TARGET may be:
|
||||||
check: build library and run unit tests.
|
check: build library and run unit tests.
|
||||||
|
|
||||||
|
|
||||||
* Running the test manually:
|
* Running the test manually:
|
||||||
==========================
|
==========================
|
||||||
@@ -90,39 +146,30 @@ Notes that the documentation is also available for download as a tarball.
|
|||||||
The documentation of the latest release is available online at:
|
The documentation of the latest release is available online at:
|
||||||
http://jsoncpp.sourceforge.net/
|
http://jsoncpp.sourceforge.net/
|
||||||
|
|
||||||
* Generating amalgated source and header
|
* Generating amalgamated source and header
|
||||||
======================================
|
========================================
|
||||||
|
|
||||||
JsonCpp is provided with a script to generate a single header and a single
|
JsonCpp is provided with a script to generate a single header and a single
|
||||||
source file to ease inclusion in an existing project.
|
source file to ease inclusion in an existing project.
|
||||||
|
|
||||||
The amalgated source can be generated at any time by running the following
|
The amalgamated source can be generated at any time by running the following
|
||||||
command from the top-directory (requires python 2.6):
|
command from the top-directory (requires python 2.6):
|
||||||
|
|
||||||
python amalgate.py
|
python amalgamate.py
|
||||||
|
|
||||||
It is possible to specify header name. See -h options for detail. By default,
|
It is possible to specify header name. See -h options for detail. By default,
|
||||||
the following files are generated:
|
the following files are generated:
|
||||||
- dist/jsoncpp.cpp: source file that need to be added to your project
|
- dist/jsoncpp.cpp: source file that need to be added to your project
|
||||||
- dist/json/json.h: header file corresponding to use in your project. It is
|
- dist/json/json.h: header file corresponding to use in your project. It is
|
||||||
equivalent to including json/json.h in non-amalgated source. This header
|
equivalent to including json/json.h in non-amalgamated source. This header
|
||||||
only depends on standard headers.
|
only depends on standard headers.
|
||||||
- dist/json/json-forwards.h: header the provides forward declaration
|
- dist/json/json-forwards.h: header the provides forward declaration
|
||||||
of all JsonCpp types. This typically what should be included in headers to
|
of all JsonCpp types. This typically what should be included in headers to
|
||||||
speed-up compilation.
|
speed-up compilation.
|
||||||
|
|
||||||
The amalgated sources are generated by concatenating JsonCpp source in the
|
The amalgamated sources are generated by concatenating JsonCpp source in the
|
||||||
correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of
|
correct order and defining macro JSON_IS_AMALGAMATION to prevent inclusion
|
||||||
other headers.
|
of other headers.
|
||||||
|
|
||||||
* Using json-cpp in your project:
|
|
||||||
===============================
|
|
||||||
|
|
||||||
include/ should be added to your compiler include path. jsoncpp headers
|
|
||||||
should be included as follow:
|
|
||||||
|
|
||||||
#include <json/json.h>
|
|
||||||
|
|
||||||
|
|
||||||
* Adding a reader/writer test:
|
* Adding a reader/writer test:
|
||||||
============================
|
============================
|
||||||
@@ -170,3 +217,4 @@ test_complex_01.process-output: jsontest.exe output, typically useful to
|
|||||||
|
|
||||||
See file LICENSE for details. Basically JsonCpp is licensed under
|
See file LICENSE for details. Basically JsonCpp is licensed under
|
||||||
MIT license, or public domain if desired and recognized in your jurisdiction.
|
MIT license, or public domain if desired and recognized in your jurisdiction.
|
||||||
|
|
||||||
|
|||||||
@@ -1,147 +1,150 @@
|
|||||||
"""Amalgate json-cpp library sources into a single source and header file.
|
"""Amalgate json-cpp library sources into a single source and header file.
|
||||||
|
|
||||||
Requires Python 2.6
|
Requires Python 2.6
|
||||||
|
|
||||||
Example of invocation (must be invoked from json-cpp top directory):
|
Example of invocation (must be invoked from json-cpp top directory):
|
||||||
python amalgate.py
|
python amalgate.py
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
class AmalagatedFile:
|
class AmalgamationFile:
|
||||||
def __init__( self, top_dir ):
|
def __init__( self, top_dir ):
|
||||||
self.top_dir = top_dir
|
self.top_dir = top_dir
|
||||||
self.blocks = []
|
self.blocks = []
|
||||||
|
|
||||||
def add_text( self, text ):
|
def add_text( self, text ):
|
||||||
if not text.endswith( '\n' ):
|
if not text.endswith( '\n' ):
|
||||||
text += '\n'
|
text += '\n'
|
||||||
self.blocks.append( text )
|
self.blocks.append( text )
|
||||||
|
|
||||||
def add_file( self, relative_input_path, wrap_in_comment=False ):
|
def add_file( self, relative_input_path, wrap_in_comment=False ):
|
||||||
def add_marker( prefix ):
|
def add_marker( prefix ):
|
||||||
self.add_text( '' )
|
self.add_text( '' )
|
||||||
self.add_text( '// ' + '/'*70 )
|
self.add_text( '// ' + '/'*70 )
|
||||||
self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) )
|
self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) )
|
||||||
self.add_text( '// ' + '/'*70 )
|
self.add_text( '// ' + '/'*70 )
|
||||||
self.add_text( '' )
|
self.add_text( '' )
|
||||||
add_marker( 'Beginning' )
|
add_marker( 'Beginning' )
|
||||||
f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' )
|
f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' )
|
||||||
content = f.read()
|
content = f.read()
|
||||||
if wrap_in_comment:
|
if wrap_in_comment:
|
||||||
content = '/*\n' + content + '\n*/'
|
content = '/*\n' + content + '\n*/'
|
||||||
self.add_text( content )
|
self.add_text( content )
|
||||||
f.close()
|
f.close()
|
||||||
add_marker( 'End' )
|
add_marker( 'End' )
|
||||||
self.add_text( '\n\n\n\n' )
|
self.add_text( '\n\n\n\n' )
|
||||||
|
|
||||||
def get_value( self ):
|
def get_value( self ):
|
||||||
return ''.join( self.blocks ).replace('\r\n','\n')
|
return ''.join( self.blocks ).replace('\r\n','\n')
|
||||||
|
|
||||||
def write_to( self, output_path ):
|
def write_to( self, output_path ):
|
||||||
output_dir = os.path.dirname( output_path )
|
output_dir = os.path.dirname( output_path )
|
||||||
if output_dir and not os.path.isdir( output_dir ):
|
if output_dir and not os.path.isdir( output_dir ):
|
||||||
os.makedirs( output_dir )
|
os.makedirs( output_dir )
|
||||||
f = open( output_path, 'wb' )
|
f = open( output_path, 'wb' )
|
||||||
f.write( self.get_value() )
|
f.write( self.get_value() )
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
def amalgate_source( source_top_dir=None,
|
def amalgamate_source( source_top_dir=None,
|
||||||
target_source_path=None,
|
target_source_path=None,
|
||||||
header_include_path=None ):
|
header_include_path=None ):
|
||||||
"""Produces amalgated source.
|
"""Produces amalgated source.
|
||||||
Parameters:
|
Parameters:
|
||||||
source_top_dir: top-directory
|
source_top_dir: top-directory
|
||||||
target_source_path: output .cpp path
|
target_source_path: output .cpp path
|
||||||
header_include_path: generated header path relative to target_source_path.
|
header_include_path: generated header path relative to target_source_path.
|
||||||
"""
|
"""
|
||||||
print 'Amalgating header...'
|
print 'Amalgating header...'
|
||||||
header = AmalagatedFile( source_top_dir )
|
header = AmalgamationFile( source_top_dir )
|
||||||
header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' )
|
header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' )
|
||||||
header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||||
header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' )
|
header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||||
header.add_text( '# define JSON_AMALGATED_H_INCLUDED' )
|
header.add_text( '# define JSON_AMALGATED_H_INCLUDED' )
|
||||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||||
header.add_text( '/// to prevent private header inclusion.' )
|
header.add_text( '/// to prevent private header inclusion.' )
|
||||||
header.add_text( '#define JSON_IS_AMALGATED' )
|
header.add_text( '#define JSON_IS_AMALGAMATION' )
|
||||||
header.add_file( 'include/json/config.h' )
|
header.add_file( 'include/json/version.h' )
|
||||||
header.add_file( 'include/json/forwards.h' )
|
header.add_file( 'include/json/config.h' )
|
||||||
header.add_file( 'include/json/features.h' )
|
header.add_file( 'include/json/forwards.h' )
|
||||||
header.add_file( 'include/json/value.h' )
|
header.add_file( 'include/json/features.h' )
|
||||||
header.add_file( 'include/json/reader.h' )
|
header.add_file( 'include/json/value.h' )
|
||||||
header.add_file( 'include/json/writer.h' )
|
header.add_file( 'include/json/reader.h' )
|
||||||
header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' )
|
header.add_file( 'include/json/writer.h' )
|
||||||
|
header.add_file( 'include/json/assertions.h' )
|
||||||
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' )
|
||||||
print 'Writing amalgated header to %r' % target_header_path
|
|
||||||
header.write_to( target_header_path )
|
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
|
||||||
|
print 'Writing amalgated header to %r' % target_header_path
|
||||||
base, ext = os.path.splitext( header_include_path )
|
header.write_to( target_header_path )
|
||||||
forward_header_include_path = base + '-forwards' + ext
|
|
||||||
print 'Amalgating forward header...'
|
base, ext = os.path.splitext( header_include_path )
|
||||||
header = AmalagatedFile( source_top_dir )
|
forward_header_include_path = base + '-forwards' + ext
|
||||||
header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' )
|
print 'Amalgating forward header...'
|
||||||
header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path )
|
header = AmalgamationFile( source_top_dir )
|
||||||
header.add_text( '/// This header provides forward declaration for all JsonCpp types.' )
|
header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' )
|
||||||
header.add_file( 'LICENSE', wrap_in_comment=True )
|
header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path )
|
||||||
header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
header.add_text( '/// This header provides forward declaration for all JsonCpp types.' )
|
||||||
header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
header.add_file( 'LICENSE', wrap_in_comment=True )
|
||||||
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||||
header.add_text( '/// to prevent private header inclusion.' )
|
header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||||
header.add_text( '#define JSON_IS_AMALGATED' )
|
header.add_text( '/// If defined, indicates that the source file is amalgated' )
|
||||||
header.add_file( 'include/json/config.h' )
|
header.add_text( '/// to prevent private header inclusion.' )
|
||||||
header.add_file( 'include/json/forwards.h' )
|
header.add_text( '#define JSON_IS_AMALGAMATION' )
|
||||||
header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
header.add_file( 'include/json/config.h' )
|
||||||
|
header.add_file( 'include/json/forwards.h' )
|
||||||
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
|
||||||
forward_header_include_path )
|
|
||||||
print 'Writing amalgated forward header to %r' % target_forward_header_path
|
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
|
||||||
header.write_to( target_forward_header_path )
|
forward_header_include_path )
|
||||||
|
print 'Writing amalgated forward header to %r' % target_forward_header_path
|
||||||
print 'Amalgating source...'
|
header.write_to( target_forward_header_path )
|
||||||
source = AmalagatedFile( source_top_dir )
|
|
||||||
source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' )
|
print 'Amalgating source...'
|
||||||
source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
source = AmalgamationFile( source_top_dir )
|
||||||
source.add_file( 'LICENSE', wrap_in_comment=True )
|
source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' )
|
||||||
source.add_text( '' )
|
source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
|
||||||
source.add_text( '#include <%s>' % header_include_path )
|
source.add_file( 'LICENSE', wrap_in_comment=True )
|
||||||
source.add_text( '' )
|
source.add_text( '' )
|
||||||
source.add_file( 'src/lib_json\json_tool.h' )
|
source.add_text( '#include <%s>' % header_include_path )
|
||||||
source.add_file( 'src/lib_json\json_reader.cpp' )
|
source.add_text( '' )
|
||||||
source.add_file( 'src/lib_json\json_batchallocator.h' )
|
lib_json = 'src/lib_json'
|
||||||
source.add_file( 'src/lib_json\json_valueiterator.inl' )
|
source.add_file( os.path.join(lib_json, 'json_tool.h') )
|
||||||
source.add_file( 'src/lib_json\json_value.cpp' )
|
source.add_file( os.path.join(lib_json, 'json_reader.cpp') )
|
||||||
source.add_file( 'src/lib_json\json_writer.cpp' )
|
source.add_file( os.path.join(lib_json, 'json_batchallocator.h') )
|
||||||
|
source.add_file( os.path.join(lib_json, 'json_valueiterator.inl') )
|
||||||
print 'Writing amalgated source to %r' % target_source_path
|
source.add_file( os.path.join(lib_json, 'json_value.cpp') )
|
||||||
source.write_to( target_source_path )
|
source.add_file( os.path.join(lib_json, 'json_writer.cpp') )
|
||||||
|
|
||||||
def main():
|
print 'Writing amalgated source to %r' % target_source_path
|
||||||
usage = """%prog [options]
|
source.write_to( target_source_path )
|
||||||
Generate a single amalgated source and header file from the sources.
|
|
||||||
"""
|
def main():
|
||||||
from optparse import OptionParser
|
usage = """%prog [options]
|
||||||
parser = OptionParser(usage=usage)
|
Generate a single amalgated source and header file from the sources.
|
||||||
parser.allow_interspersed_args = False
|
"""
|
||||||
parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp',
|
from optparse import OptionParser
|
||||||
help="""Output .cpp source path. [Default: %default]""")
|
parser = OptionParser(usage=usage)
|
||||||
parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h',
|
parser.allow_interspersed_args = False
|
||||||
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
|
parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp',
|
||||||
parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(),
|
help="""Output .cpp source path. [Default: %default]""")
|
||||||
help="""Source top-directory. [Default: %default]""")
|
parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h',
|
||||||
parser.enable_interspersed_args()
|
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
|
||||||
options, args = parser.parse_args()
|
parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(),
|
||||||
|
help="""Source top-directory. [Default: %default]""")
|
||||||
msg = amalgate_source( source_top_dir=options.top_dir,
|
parser.enable_interspersed_args()
|
||||||
target_source_path=options.target_source_path,
|
options, args = parser.parse_args()
|
||||||
header_include_path=options.header_include_path )
|
|
||||||
if msg:
|
msg = amalgamate_source( source_top_dir=options.top_dir,
|
||||||
sys.stderr.write( msg + '\n' )
|
target_source_path=options.target_source_path,
|
||||||
sys.exit( 1 )
|
header_include_path=options.header_include_path )
|
||||||
else:
|
if msg:
|
||||||
print 'Source succesfully amalagated'
|
sys.stderr.write( msg + '\n' )
|
||||||
|
sys.exit( 1 )
|
||||||
if __name__ == '__main__':
|
else:
|
||||||
main()
|
print 'Source succesfully amalagated'
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
33
devtools/agent_vmw7.json
Normal file
33
devtools/agent_vmw7.json
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
{
|
||||||
|
"cmake_variants" : [
|
||||||
|
{"name": "generator",
|
||||||
|
"generators": [
|
||||||
|
{"generator": [
|
||||||
|
"Visual Studio 7 .NET 2003",
|
||||||
|
"Visual Studio 9 2008",
|
||||||
|
"Visual Studio 9 2008 Win64",
|
||||||
|
"Visual Studio 10",
|
||||||
|
"Visual Studio 10 Win64",
|
||||||
|
"Visual Studio 11",
|
||||||
|
"Visual Studio 11 Win64"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{"generator": ["MinGW Makefiles"],
|
||||||
|
"env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{"name": "shared_dll",
|
||||||
|
"variables": [
|
||||||
|
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||||
|
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{"name": "build_type",
|
||||||
|
"build_types": [
|
||||||
|
"debug",
|
||||||
|
"release"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
26
devtools/agent_vmxp.json
Normal file
26
devtools/agent_vmxp.json
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
"cmake_variants" : [
|
||||||
|
{"name": "generator",
|
||||||
|
"generators": [
|
||||||
|
{"generator": [
|
||||||
|
"Visual Studio 6",
|
||||||
|
"Visual Studio 7",
|
||||||
|
"Visual Studio 8 2005"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{"name": "shared_dll",
|
||||||
|
"variables": [
|
||||||
|
["JSONCPP_LIB_BUILD_SHARED=true"],
|
||||||
|
["JSONCPP_LIB_BUILD_SHARED=false"]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{"name": "build_type",
|
||||||
|
"build_types": [
|
||||||
|
"debug",
|
||||||
|
"release"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -55,20 +55,20 @@ ALL = DIR | FILE | LINKS
|
|||||||
|
|
||||||
_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
|
_ANT_RE = re.compile( r'(/\*\*/)|(\*\*/)|(/\*\*)|(\*)|(/)|([^\*/]*)' )
|
||||||
|
|
||||||
def ant_pattern_to_re( ant_pattern ):
|
def ant_pattern_to_re( ant_pattern ):
|
||||||
"""Generates a regular expression from the ant pattern.
|
"""Generates a regular expression from the ant pattern.
|
||||||
Matching convention:
|
Matching convention:
|
||||||
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
**/a: match 'a', 'dir/a', 'dir1/dir2/a'
|
||||||
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
|
a/**/b: match 'a/b', 'a/c/b', 'a/d/c/b'
|
||||||
*.py: match 'script.py' but not 'a/script.py'
|
*.py: match 'script.py' but not 'a/script.py'
|
||||||
"""
|
"""
|
||||||
rex = ['^']
|
rex = ['^']
|
||||||
next_pos = 0
|
next_pos = 0
|
||||||
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
|
sep_rex = r'(?:/|%s)' % re.escape( os.path.sep )
|
||||||
## print 'Converting', ant_pattern
|
## print 'Converting', ant_pattern
|
||||||
for match in _ANT_RE.finditer( ant_pattern ):
|
for match in _ANT_RE.finditer( ant_pattern ):
|
||||||
## print 'Matched', match.group()
|
## print 'Matched', match.group()
|
||||||
## print match.start(0), next_pos
|
## print match.start(0), next_pos
|
||||||
if match.start(0) != next_pos:
|
if match.start(0) != next_pos:
|
||||||
raise ValueError( "Invalid ant pattern" )
|
raise ValueError( "Invalid ant pattern" )
|
||||||
if match.group(1): # /**/
|
if match.group(1): # /**/
|
||||||
@@ -83,14 +83,14 @@ def ant_pattern_to_re( ant_pattern ):
|
|||||||
rex.append( sep_rex )
|
rex.append( sep_rex )
|
||||||
else: # somepath
|
else: # somepath
|
||||||
rex.append( re.escape(match.group(6)) )
|
rex.append( re.escape(match.group(6)) )
|
||||||
next_pos = match.end()
|
next_pos = match.end()
|
||||||
rex.append('$')
|
rex.append('$')
|
||||||
return re.compile( ''.join( rex ) )
|
return re.compile( ''.join( rex ) )
|
||||||
|
|
||||||
def _as_list( l ):
|
def _as_list( l ):
|
||||||
if isinstance(l, basestring):
|
if isinstance(l, basestring):
|
||||||
return l.split()
|
return l.split()
|
||||||
return l
|
return l
|
||||||
|
|
||||||
def glob(dir_path,
|
def glob(dir_path,
|
||||||
includes = '**/*',
|
includes = '**/*',
|
||||||
@@ -99,8 +99,8 @@ def glob(dir_path,
|
|||||||
prune_dirs = prune_dirs,
|
prune_dirs = prune_dirs,
|
||||||
max_depth = 25):
|
max_depth = 25):
|
||||||
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
|
include_filter = [ant_pattern_to_re(p) for p in _as_list(includes)]
|
||||||
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
|
exclude_filter = [ant_pattern_to_re(p) for p in _as_list(excludes)]
|
||||||
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
|
prune_dirs = [p.replace('/',os.path.sep) for p in _as_list(prune_dirs)]
|
||||||
dir_path = dir_path.replace('/',os.path.sep)
|
dir_path = dir_path.replace('/',os.path.sep)
|
||||||
entry_type_filter = entry_type
|
entry_type_filter = entry_type
|
||||||
|
|
||||||
@@ -117,37 +117,37 @@ def glob(dir_path,
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def glob_impl( root_dir_path ):
|
def glob_impl( root_dir_path ):
|
||||||
child_dirs = [root_dir_path]
|
child_dirs = [root_dir_path]
|
||||||
while child_dirs:
|
while child_dirs:
|
||||||
dir_path = child_dirs.pop()
|
dir_path = child_dirs.pop()
|
||||||
for entry in listdir( dir_path ):
|
for entry in listdir( dir_path ):
|
||||||
full_path = os.path.join( dir_path, entry )
|
full_path = os.path.join( dir_path, entry )
|
||||||
## print 'Testing:', full_path,
|
## print 'Testing:', full_path,
|
||||||
is_dir = os.path.isdir( full_path )
|
is_dir = os.path.isdir( full_path )
|
||||||
if is_dir and not is_pruned_dir( entry ): # explore child directory ?
|
if is_dir and not is_pruned_dir( entry ): # explore child directory ?
|
||||||
## print '===> marked for recursion',
|
## print '===> marked for recursion',
|
||||||
child_dirs.append( full_path )
|
child_dirs.append( full_path )
|
||||||
included = apply_filter( full_path, include_filter )
|
included = apply_filter( full_path, include_filter )
|
||||||
rejected = apply_filter( full_path, exclude_filter )
|
rejected = apply_filter( full_path, exclude_filter )
|
||||||
if not included or rejected: # do not include entry ?
|
if not included or rejected: # do not include entry ?
|
||||||
## print '=> not included or rejected'
|
## print '=> not included or rejected'
|
||||||
continue
|
continue
|
||||||
link = os.path.islink( full_path )
|
link = os.path.islink( full_path )
|
||||||
is_file = os.path.isfile( full_path )
|
is_file = os.path.isfile( full_path )
|
||||||
if not is_file and not is_dir:
|
if not is_file and not is_dir:
|
||||||
## print '=> unknown entry type'
|
## print '=> unknown entry type'
|
||||||
continue
|
continue
|
||||||
if link:
|
if link:
|
||||||
entry_type = is_file and FILE_LINK or DIR_LINK
|
entry_type = is_file and FILE_LINK or DIR_LINK
|
||||||
else:
|
else:
|
||||||
entry_type = is_file and FILE or DIR
|
entry_type = is_file and FILE or DIR
|
||||||
## print '=> type: %d' % entry_type,
|
## print '=> type: %d' % entry_type,
|
||||||
if (entry_type & entry_type_filter) != 0:
|
if (entry_type & entry_type_filter) != 0:
|
||||||
## print ' => KEEP'
|
## print ' => KEEP'
|
||||||
yield os.path.join( dir_path, entry )
|
yield os.path.join( dir_path, entry )
|
||||||
## else:
|
## else:
|
||||||
## print ' => TYPE REJECTED'
|
## print ' => TYPE REJECTED'
|
||||||
return list( glob_impl( dir_path ) )
|
return list( glob_impl( dir_path ) )
|
||||||
|
|
||||||
|
|
||||||
@@ -155,47 +155,47 @@ if __name__ == "__main__":
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
class AntPatternToRETest(unittest.TestCase):
|
class AntPatternToRETest(unittest.TestCase):
|
||||||
## def test_conversion( self ):
|
## def test_conversion( self ):
|
||||||
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
|
## self.assertEqual( '^somepath$', ant_pattern_to_re( 'somepath' ).pattern )
|
||||||
|
|
||||||
def test_matching( self ):
|
def test_matching( self ):
|
||||||
test_cases = [ ( 'path',
|
test_cases = [ ( 'path',
|
||||||
['path'],
|
['path'],
|
||||||
['somepath', 'pathsuffix', '/path', '/path'] ),
|
['somepath', 'pathsuffix', '/path', '/path'] ),
|
||||||
( '*.py',
|
( '*.py',
|
||||||
['source.py', 'source.ext.py', '.py'],
|
['source.py', 'source.ext.py', '.py'],
|
||||||
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
|
['path/source.py', '/.py', 'dir.py/z', 'z.pyc', 'z.c'] ),
|
||||||
( '**/path',
|
( '**/path',
|
||||||
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
['path', '/path', '/a/path', 'c:/a/path', '/a/b/path', '//a/path', '/a/path/b/path'],
|
||||||
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
|
['path/', 'a/path/b', 'dir.py/z', 'somepath', 'pathsuffix', 'a/somepath'] ),
|
||||||
( 'path/**',
|
( 'path/**',
|
||||||
['path/a', 'path/path/a', 'path//'],
|
['path/a', 'path/path/a', 'path//'],
|
||||||
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
|
['path', 'somepath/a', 'a/path', 'a/path/a', 'pathsuffix/a'] ),
|
||||||
( '/**/path',
|
( '/**/path',
|
||||||
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
['/path', '/a/path', '/a/b/path/path', '/path/path'],
|
||||||
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
|
['path', 'path/', 'a/path', '/pathsuffix', '/somepath'] ),
|
||||||
( 'a/b',
|
( 'a/b',
|
||||||
['a/b'],
|
['a/b'],
|
||||||
['somea/b', 'a/bsuffix', 'a/b/c'] ),
|
['somea/b', 'a/bsuffix', 'a/b/c'] ),
|
||||||
( '**/*.py',
|
( '**/*.py',
|
||||||
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
['script.py', 'src/script.py', 'a/b/script.py', '/a/b/script.py'],
|
||||||
['script.pyc', 'script.pyo', 'a.py/b'] ),
|
['script.pyc', 'script.pyo', 'a.py/b'] ),
|
||||||
( 'src/**/*.py',
|
( 'src/**/*.py',
|
||||||
['src/a.py', 'src/dir/a.py'],
|
['src/a.py', 'src/dir/a.py'],
|
||||||
['a/src/a.py', '/src/a.py'] ),
|
['a/src/a.py', '/src/a.py'] ),
|
||||||
]
|
]
|
||||||
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
for ant_pattern, accepted_matches, rejected_matches in list(test_cases):
|
||||||
def local_path( paths ):
|
def local_path( paths ):
|
||||||
return [ p.replace('/',os.path.sep) for p in paths ]
|
return [ p.replace('/',os.path.sep) for p in paths ]
|
||||||
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
|
||||||
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
for ant_pattern, accepted_matches, rejected_matches in test_cases:
|
||||||
rex = ant_pattern_to_re( ant_pattern )
|
rex = ant_pattern_to_re( ant_pattern )
|
||||||
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
|
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
|
||||||
for accepted_match in accepted_matches:
|
for accepted_match in accepted_matches:
|
||||||
print 'Accepted?:', accepted_match
|
print 'Accepted?:', accepted_match
|
||||||
self.assert_( rex.match( accepted_match ) is not None )
|
self.assert_( rex.match( accepted_match ) is not None )
|
||||||
for rejected_match in rejected_matches:
|
for rejected_match in rejected_matches:
|
||||||
print 'Rejected?:', rejected_match
|
print 'Rejected?:', rejected_match
|
||||||
self.assert_( rex.match( rejected_match ) is None )
|
self.assert_( rex.match( rejected_match ) is None )
|
||||||
|
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
280
devtools/batchbuild.py
Normal file
280
devtools/batchbuild.py
Normal file
@@ -0,0 +1,280 @@
|
|||||||
|
import collections
|
||||||
|
import itertools
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import string
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import cgi
|
||||||
|
|
||||||
|
class BuildDesc:
|
||||||
|
def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None):
|
||||||
|
self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ]
|
||||||
|
self.variables = variables or []
|
||||||
|
self.build_type = build_type
|
||||||
|
self.generator = generator
|
||||||
|
|
||||||
|
def merged_with( self, build_desc ):
|
||||||
|
"""Returns a new BuildDesc by merging field content.
|
||||||
|
Prefer build_desc fields to self fields for single valued field.
|
||||||
|
"""
|
||||||
|
return BuildDesc( self.prepend_envs + build_desc.prepend_envs,
|
||||||
|
self.variables + build_desc.variables,
|
||||||
|
build_desc.build_type or self.build_type,
|
||||||
|
build_desc.generator or self.generator )
|
||||||
|
|
||||||
|
def env( self ):
|
||||||
|
environ = os.environ.copy()
|
||||||
|
for values_by_name in self.prepend_envs:
|
||||||
|
for var, value in values_by_name.items():
|
||||||
|
var = var.upper()
|
||||||
|
if type(value) is unicode:
|
||||||
|
value = value.encode( sys.getdefaultencoding() )
|
||||||
|
if var in environ:
|
||||||
|
environ[var] = value + os.pathsep + environ[var]
|
||||||
|
else:
|
||||||
|
environ[var] = value
|
||||||
|
return environ
|
||||||
|
|
||||||
|
def cmake_args( self ):
|
||||||
|
args = ["-D%s" % var for var in self.variables]
|
||||||
|
# skip build type for Visual Studio solution as it cause warning
|
||||||
|
if self.build_type and 'Visual' not in self.generator:
|
||||||
|
args.append( "-DCMAKE_BUILD_TYPE=%s" % self.build_type )
|
||||||
|
if self.generator:
|
||||||
|
args.extend( ['-G', self.generator] )
|
||||||
|
return args
|
||||||
|
|
||||||
|
def __repr__( self ):
|
||||||
|
return "BuildDesc( %s, build_type=%s )" % (" ".join( self.cmake_args()), self.build_type)
|
||||||
|
|
||||||
|
class BuildData:
|
||||||
|
def __init__( self, desc, work_dir, source_dir ):
|
||||||
|
self.desc = desc
|
||||||
|
self.work_dir = work_dir
|
||||||
|
self.source_dir = source_dir
|
||||||
|
self.cmake_log_path = os.path.join( work_dir, 'batchbuild_cmake.log' )
|
||||||
|
self.build_log_path = os.path.join( work_dir, 'batchbuild_build.log' )
|
||||||
|
self.cmake_succeeded = False
|
||||||
|
self.build_succeeded = False
|
||||||
|
|
||||||
|
def execute_build(self):
|
||||||
|
print 'Build %s' % self.desc
|
||||||
|
self._make_new_work_dir( )
|
||||||
|
self.cmake_succeeded = self._generate_makefiles( )
|
||||||
|
if self.cmake_succeeded:
|
||||||
|
self.build_succeeded = self._build_using_makefiles( )
|
||||||
|
return self.build_succeeded
|
||||||
|
|
||||||
|
def _generate_makefiles(self):
|
||||||
|
print ' Generating makefiles: ',
|
||||||
|
cmd = ['cmake'] + self.desc.cmake_args( ) + [os.path.abspath( self.source_dir )]
|
||||||
|
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.cmake_log_path )
|
||||||
|
print 'done' if succeeded else 'FAILED'
|
||||||
|
return succeeded
|
||||||
|
|
||||||
|
def _build_using_makefiles(self):
|
||||||
|
print ' Building:',
|
||||||
|
cmd = ['cmake', '--build', self.work_dir]
|
||||||
|
if self.desc.build_type:
|
||||||
|
cmd += ['--config', self.desc.build_type]
|
||||||
|
succeeded = self._execute_build_subprocess( cmd, self.desc.env(), self.build_log_path )
|
||||||
|
print 'done' if succeeded else 'FAILED'
|
||||||
|
return succeeded
|
||||||
|
|
||||||
|
def _execute_build_subprocess(self, cmd, env, log_path):
|
||||||
|
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
|
||||||
|
env=env )
|
||||||
|
stdout, _ = process.communicate( )
|
||||||
|
succeeded = (process.returncode == 0)
|
||||||
|
with open( log_path, 'wb' ) as flog:
|
||||||
|
log = ' '.join( cmd ) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
|
||||||
|
flog.write( fix_eol( log ) )
|
||||||
|
return succeeded
|
||||||
|
|
||||||
|
def _make_new_work_dir(self):
|
||||||
|
if os.path.isdir( self.work_dir ):
|
||||||
|
print ' Removing work directory', self.work_dir
|
||||||
|
shutil.rmtree( self.work_dir, ignore_errors=True )
|
||||||
|
if not os.path.isdir( self.work_dir ):
|
||||||
|
os.makedirs( self.work_dir )
|
||||||
|
|
||||||
|
def fix_eol( stdout ):
|
||||||
|
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
|
||||||
|
"""
|
||||||
|
return re.sub( '\r*\n', os.linesep, stdout )
|
||||||
|
|
||||||
|
def load_build_variants_from_config( config_path ):
|
||||||
|
with open( config_path, 'rb' ) as fconfig:
|
||||||
|
data = json.load( fconfig )
|
||||||
|
variants = data[ 'cmake_variants' ]
|
||||||
|
build_descs_by_axis = collections.defaultdict( list )
|
||||||
|
for axis in variants:
|
||||||
|
axis_name = axis["name"]
|
||||||
|
build_descs = []
|
||||||
|
if "generators" in axis:
|
||||||
|
for generator_data in axis["generators"]:
|
||||||
|
for generator in generator_data["generator"]:
|
||||||
|
build_desc = BuildDesc( generator=generator,
|
||||||
|
prepend_envs=generator_data.get("env_prepend") )
|
||||||
|
build_descs.append( build_desc )
|
||||||
|
elif "variables" in axis:
|
||||||
|
for variables in axis["variables"]:
|
||||||
|
build_desc = BuildDesc( variables=variables )
|
||||||
|
build_descs.append( build_desc )
|
||||||
|
elif "build_types" in axis:
|
||||||
|
for build_type in axis["build_types"]:
|
||||||
|
build_desc = BuildDesc( build_type=build_type )
|
||||||
|
build_descs.append( build_desc )
|
||||||
|
build_descs_by_axis[axis_name].extend( build_descs )
|
||||||
|
return build_descs_by_axis
|
||||||
|
|
||||||
|
def generate_build_variants( build_descs_by_axis ):
|
||||||
|
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
|
||||||
|
axis_names = build_descs_by_axis.keys()
|
||||||
|
build_descs = []
|
||||||
|
for axis_name, axis_build_descs in build_descs_by_axis.items():
|
||||||
|
if len(build_descs):
|
||||||
|
# for each existing build_desc and each axis build desc, create a new build_desc
|
||||||
|
new_build_descs = []
|
||||||
|
for prototype_build_desc, axis_build_desc in itertools.product( build_descs, axis_build_descs):
|
||||||
|
new_build_descs.append( prototype_build_desc.merged_with( axis_build_desc ) )
|
||||||
|
build_descs = new_build_descs
|
||||||
|
else:
|
||||||
|
build_descs = axis_build_descs
|
||||||
|
return build_descs
|
||||||
|
|
||||||
|
HTML_TEMPLATE = string.Template('''<html>
|
||||||
|
<head>
|
||||||
|
<title>$title</title>
|
||||||
|
<style type="text/css">
|
||||||
|
td.failed {background-color:#f08080;}
|
||||||
|
td.ok {background-color:#c0eec0;}
|
||||||
|
</style>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<table border="1">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Variables</th>
|
||||||
|
$th_vars
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<th>Build type</th>
|
||||||
|
$th_build_types
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
$tr_builds
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</body></html>''')
|
||||||
|
|
||||||
|
def generate_html_report( html_report_path, builds ):
|
||||||
|
report_dir = os.path.dirname( html_report_path )
|
||||||
|
# Vertical axis: generator
|
||||||
|
# Horizontal: variables, then build_type
|
||||||
|
builds_by_generator = collections.defaultdict( list )
|
||||||
|
variables = set()
|
||||||
|
build_types_by_variable = collections.defaultdict( set )
|
||||||
|
build_by_pos_key = {} # { (generator, var_key, build_type): build }
|
||||||
|
for build in builds:
|
||||||
|
builds_by_generator[build.desc.generator].append( build )
|
||||||
|
var_key = tuple(sorted(build.desc.variables))
|
||||||
|
variables.add( var_key )
|
||||||
|
build_types_by_variable[var_key].add( build.desc.build_type )
|
||||||
|
pos_key = (build.desc.generator, var_key, build.desc.build_type)
|
||||||
|
build_by_pos_key[pos_key] = build
|
||||||
|
variables = sorted( variables )
|
||||||
|
th_vars = []
|
||||||
|
th_build_types = []
|
||||||
|
for variable in variables:
|
||||||
|
build_types = sorted( build_types_by_variable[variable] )
|
||||||
|
nb_build_type = len(build_types_by_variable[variable])
|
||||||
|
th_vars.append( '<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape( ' '.join( variable ) ) ) )
|
||||||
|
for build_type in build_types:
|
||||||
|
th_build_types.append( '<th>%s</th>' % cgi.escape(build_type) )
|
||||||
|
tr_builds = []
|
||||||
|
for generator in sorted( builds_by_generator ):
|
||||||
|
tds = [ '<td>%s</td>\n' % cgi.escape( generator ) ]
|
||||||
|
for variable in variables:
|
||||||
|
build_types = sorted( build_types_by_variable[variable] )
|
||||||
|
for build_type in build_types:
|
||||||
|
pos_key = (generator, variable, build_type)
|
||||||
|
build = build_by_pos_key.get(pos_key)
|
||||||
|
if build:
|
||||||
|
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
|
||||||
|
build_status = 'ok' if build.build_succeeded else 'FAILED'
|
||||||
|
cmake_log_url = os.path.relpath( build.cmake_log_path, report_dir )
|
||||||
|
build_log_url = os.path.relpath( build.build_log_path, report_dir )
|
||||||
|
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (
|
||||||
|
build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
|
||||||
|
if build.cmake_succeeded:
|
||||||
|
td += '<br><a href="%s" class="%s">Build: %s</a>' % (
|
||||||
|
build_log_url, build_status.lower(), build_status)
|
||||||
|
td += '</td>'
|
||||||
|
else:
|
||||||
|
td = '<td></td>'
|
||||||
|
tds.append( td )
|
||||||
|
tr_builds.append( '<tr>%s</tr>' % '\n'.join( tds ) )
|
||||||
|
html = HTML_TEMPLATE.substitute(
|
||||||
|
title='Batch build report',
|
||||||
|
th_vars=' '.join(th_vars),
|
||||||
|
th_build_types=' '.join( th_build_types),
|
||||||
|
tr_builds='\n'.join( tr_builds ) )
|
||||||
|
with open( html_report_path, 'wt' ) as fhtml:
|
||||||
|
fhtml.write( html )
|
||||||
|
print 'HTML report generated in:', html_report_path
|
||||||
|
|
||||||
|
def main():
|
||||||
|
usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...]
|
||||||
|
Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run
|
||||||
|
as described in CONFIG_JSON_PATH building in WORK_DIR.
|
||||||
|
|
||||||
|
Example of call:
|
||||||
|
python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json
|
||||||
|
"""
|
||||||
|
from optparse import OptionParser
|
||||||
|
parser = OptionParser(usage=usage)
|
||||||
|
parser.allow_interspersed_args = True
|
||||||
|
# parser.add_option('-v', '--verbose', dest="verbose", action='store_true',
|
||||||
|
# help="""Be verbose.""")
|
||||||
|
parser.enable_interspersed_args()
|
||||||
|
options, args = parser.parse_args()
|
||||||
|
if len(args) < 3:
|
||||||
|
parser.error( "Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH." )
|
||||||
|
work_dir = args[0]
|
||||||
|
source_dir = args[1].rstrip('/\\')
|
||||||
|
config_paths = args[2:]
|
||||||
|
for config_path in config_paths:
|
||||||
|
if not os.path.isfile( config_path ):
|
||||||
|
parser.error( "Can not read: %r" % config_path )
|
||||||
|
|
||||||
|
# generate build variants
|
||||||
|
build_descs = []
|
||||||
|
for config_path in config_paths:
|
||||||
|
build_descs_by_axis = load_build_variants_from_config( config_path )
|
||||||
|
build_descs.extend( generate_build_variants( build_descs_by_axis ) )
|
||||||
|
print 'Build variants (%d):' % len(build_descs)
|
||||||
|
# assign build directory for each variant
|
||||||
|
if not os.path.isdir( work_dir ):
|
||||||
|
os.makedirs( work_dir )
|
||||||
|
builds = []
|
||||||
|
with open( os.path.join( work_dir, 'matrix-dir-map.txt' ), 'wt' ) as fmatrixmap:
|
||||||
|
for index, build_desc in enumerate( build_descs ):
|
||||||
|
build_desc_work_dir = os.path.join( work_dir, '%03d' % (index+1) )
|
||||||
|
builds.append( BuildData( build_desc, build_desc_work_dir, source_dir ) )
|
||||||
|
fmatrixmap.write( '%s: %s\n' % (build_desc_work_dir, build_desc) )
|
||||||
|
for build in builds:
|
||||||
|
build.execute_build()
|
||||||
|
html_report_path = os.path.join( work_dir, 'batchbuild-report.html' )
|
||||||
|
generate_html_report( html_report_path, builds )
|
||||||
|
print 'Done'
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
|
|
||||||
@@ -1,63 +1,63 @@
|
|||||||
import os.path
|
import os.path
|
||||||
|
|
||||||
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
|
||||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||||
if not os.path.isfile( path ):
|
if not os.path.isfile( path ):
|
||||||
raise ValueError( 'Path "%s" is not a file' % path )
|
raise ValueError( 'Path "%s" is not a file' % path )
|
||||||
try:
|
try:
|
||||||
f = open(path, 'rb')
|
f = open(path, 'rb')
|
||||||
except IOError, msg:
|
except IOError, msg:
|
||||||
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
raw_lines = f.readlines()
|
raw_lines = f.readlines()
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
|
||||||
if raw_lines != fixed_lines:
|
if raw_lines != fixed_lines:
|
||||||
print '%s =>' % path,
|
print '%s =>' % path,
|
||||||
if not is_dry_run:
|
if not is_dry_run:
|
||||||
f = open(path, "wb")
|
f = open(path, "wb")
|
||||||
try:
|
try:
|
||||||
f.writelines(fixed_lines)
|
f.writelines(fixed_lines)
|
||||||
finally:
|
finally:
|
||||||
f.close()
|
f.close()
|
||||||
if verbose:
|
if verbose:
|
||||||
print is_dry_run and ' NEED FIX' or ' FIXED'
|
print is_dry_run and ' NEED FIX' or ' FIXED'
|
||||||
return True
|
return True
|
||||||
##
|
##
|
||||||
##
|
##
|
||||||
##
|
##
|
||||||
##def _do_fix( is_dry_run = True ):
|
##def _do_fix( is_dry_run = True ):
|
||||||
## from waftools import antglob
|
## from waftools import antglob
|
||||||
## python_sources = antglob.glob( '.',
|
## python_sources = antglob.glob( '.',
|
||||||
## includes = '**/*.py **/wscript **/wscript_build',
|
## includes = '**/*.py **/wscript **/wscript_build',
|
||||||
## excludes = antglob.default_excludes + './waf.py',
|
## excludes = antglob.default_excludes + './waf.py',
|
||||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||||
## for path in python_sources:
|
## for path in python_sources:
|
||||||
## _fix_python_source( path, is_dry_run )
|
## _fix_python_source( path, is_dry_run )
|
||||||
##
|
##
|
||||||
## cpp_sources = antglob.glob( '.',
|
## cpp_sources = antglob.glob( '.',
|
||||||
## includes = '**/*.cpp **/*.h **/*.inl',
|
## includes = '**/*.cpp **/*.h **/*.inl',
|
||||||
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
## prune_dirs = antglob.prune_dirs + 'waf-* ./build' )
|
||||||
## for path in cpp_sources:
|
## for path in cpp_sources:
|
||||||
## _fix_source_eol( path, is_dry_run )
|
## _fix_source_eol( path, is_dry_run )
|
||||||
##
|
##
|
||||||
##
|
##
|
||||||
##def dry_fix(context):
|
##def dry_fix(context):
|
||||||
## _do_fix( is_dry_run = True )
|
## _do_fix( is_dry_run = True )
|
||||||
##
|
##
|
||||||
##def fix(context):
|
##def fix(context):
|
||||||
## _do_fix( is_dry_run = False )
|
## _do_fix( is_dry_run = False )
|
||||||
##
|
##
|
||||||
##def shutdown():
|
##def shutdown():
|
||||||
## pass
|
## pass
|
||||||
##
|
##
|
||||||
##def check(context):
|
##def check(context):
|
||||||
## # Unit tests are run when "check" target is used
|
## # Unit tests are run when "check" target is used
|
||||||
## ut = UnitTest.unit_test()
|
## ut = UnitTest.unit_test()
|
||||||
## ut.change_to_testfile_dir = True
|
## ut.change_to_testfile_dir = True
|
||||||
## ut.want_to_see_test_output = True
|
## ut.want_to_see_test_output = True
|
||||||
## ut.want_to_see_test_error = True
|
## ut.want_to_see_test_error = True
|
||||||
## ut.run()
|
## ut.run()
|
||||||
## ut.print_results()
|
## ut.print_results()
|
||||||
|
|||||||
@@ -1,93 +1,93 @@
|
|||||||
"""Updates the license text in source file.
|
"""Updates the license text in source file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# An existing license is found if the file starts with the string below,
|
# An existing license is found if the file starts with the string below,
|
||||||
# and ends with the first blank line.
|
# and ends with the first blank line.
|
||||||
LICENSE_BEGIN = "// Copyright "
|
LICENSE_BEGIN = "// Copyright "
|
||||||
|
|
||||||
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
|
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
|
||||||
// Distributed under MIT license, or public domain if desired and
|
// Distributed under MIT license, or public domain if desired and
|
||||||
// recognized in your jurisdiction.
|
// recognized in your jurisdiction.
|
||||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
""".replace('\r\n','\n')
|
""".replace('\r\n','\n')
|
||||||
|
|
||||||
def update_license( path, dry_run, show_diff ):
|
def update_license( path, dry_run, show_diff ):
|
||||||
"""Update the license statement in the specified file.
|
"""Update the license statement in the specified file.
|
||||||
Parameters:
|
Parameters:
|
||||||
path: path of the C++ source file to update.
|
path: path of the C++ source file to update.
|
||||||
dry_run: if True, just print the path of the file that would be updated,
|
dry_run: if True, just print the path of the file that would be updated,
|
||||||
but don't change it.
|
but don't change it.
|
||||||
show_diff: if True, print the path of the file that would be modified,
|
show_diff: if True, print the path of the file that would be modified,
|
||||||
as well as the change made to the file.
|
as well as the change made to the file.
|
||||||
"""
|
"""
|
||||||
with open( path, 'rt' ) as fin:
|
with open( path, 'rt' ) as fin:
|
||||||
original_text = fin.read().replace('\r\n','\n')
|
original_text = fin.read().replace('\r\n','\n')
|
||||||
newline = fin.newlines and fin.newlines[0] or '\n'
|
newline = fin.newlines and fin.newlines[0] or '\n'
|
||||||
if not original_text.startswith( LICENSE_BEGIN ):
|
if not original_text.startswith( LICENSE_BEGIN ):
|
||||||
# No existing license found => prepend it
|
# No existing license found => prepend it
|
||||||
new_text = BRIEF_LICENSE + original_text
|
new_text = BRIEF_LICENSE + original_text
|
||||||
else:
|
else:
|
||||||
license_end_index = original_text.index( '\n\n' ) # search first blank line
|
license_end_index = original_text.index( '\n\n' ) # search first blank line
|
||||||
new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
|
new_text = BRIEF_LICENSE + original_text[license_end_index+2:]
|
||||||
if original_text != new_text:
|
if original_text != new_text:
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
with open( path, 'wb' ) as fout:
|
with open( path, 'wb' ) as fout:
|
||||||
fout.write( new_text.replace('\n', newline ) )
|
fout.write( new_text.replace('\n', newline ) )
|
||||||
print 'Updated', path
|
print 'Updated', path
|
||||||
if show_diff:
|
if show_diff:
|
||||||
import difflib
|
import difflib
|
||||||
print '\n'.join( difflib.unified_diff( original_text.split('\n'),
|
print '\n'.join( difflib.unified_diff( original_text.split('\n'),
|
||||||
new_text.split('\n') ) )
|
new_text.split('\n') ) )
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
|
def update_license_in_source_directories( source_dirs, dry_run, show_diff ):
|
||||||
"""Updates license text in C++ source files found in directory source_dirs.
|
"""Updates license text in C++ source files found in directory source_dirs.
|
||||||
Parameters:
|
Parameters:
|
||||||
source_dirs: list of directory to scan for C++ sources. Directories are
|
source_dirs: list of directory to scan for C++ sources. Directories are
|
||||||
scanned recursively.
|
scanned recursively.
|
||||||
dry_run: if True, just print the path of the file that would be updated,
|
dry_run: if True, just print the path of the file that would be updated,
|
||||||
but don't change it.
|
but don't change it.
|
||||||
show_diff: if True, print the path of the file that would be modified,
|
show_diff: if True, print the path of the file that would be modified,
|
||||||
as well as the change made to the file.
|
as well as the change made to the file.
|
||||||
"""
|
"""
|
||||||
from devtools import antglob
|
from devtools import antglob
|
||||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||||
for source_dir in source_dirs:
|
for source_dir in source_dirs:
|
||||||
cpp_sources = antglob.glob( source_dir,
|
cpp_sources = antglob.glob( source_dir,
|
||||||
includes = '''**/*.h **/*.cpp **/*.inl''',
|
includes = '''**/*.h **/*.cpp **/*.inl''',
|
||||||
prune_dirs = prune_dirs )
|
prune_dirs = prune_dirs )
|
||||||
for source in cpp_sources:
|
for source in cpp_sources:
|
||||||
update_license( source, dry_run, show_diff )
|
update_license( source, dry_run, show_diff )
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
usage = """%prog DIR [DIR2...]
|
usage = """%prog DIR [DIR2...]
|
||||||
Updates license text in sources of the project in source files found
|
Updates license text in sources of the project in source files found
|
||||||
in the directory specified on the command-line.
|
in the directory specified on the command-line.
|
||||||
|
|
||||||
Example of call:
|
Example of call:
|
||||||
python devtools\licenseupdater.py include src -n --diff
|
python devtools\licenseupdater.py include src -n --diff
|
||||||
=> Show change that would be made to the sources.
|
=> Show change that would be made to the sources.
|
||||||
|
|
||||||
python devtools\licenseupdater.py include src
|
python devtools\licenseupdater.py include src
|
||||||
=> Update license statement on all sources in directories include/ and src/.
|
=> Update license statement on all sources in directories include/ and src/.
|
||||||
"""
|
"""
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
parser = OptionParser(usage=usage)
|
parser = OptionParser(usage=usage)
|
||||||
parser.allow_interspersed_args = False
|
parser.allow_interspersed_args = False
|
||||||
parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,
|
parser.add_option('-n', '--dry-run', dest="dry_run", action='store_true', default=False,
|
||||||
help="""Only show what files are updated, do not update the files""")
|
help="""Only show what files are updated, do not update the files""")
|
||||||
parser.add_option('--diff', dest="show_diff", action='store_true', default=False,
|
parser.add_option('--diff', dest="show_diff", action='store_true', default=False,
|
||||||
help="""On update, show change made to the file.""")
|
help="""On update, show change made to the file.""")
|
||||||
parser.enable_interspersed_args()
|
parser.enable_interspersed_args()
|
||||||
options, args = parser.parse_args()
|
options, args = parser.parse_args()
|
||||||
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
update_license_in_source_directories( args, options.dry_run, options.show_diff )
|
||||||
print 'Done'
|
print 'Done'
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
import sys
|
import sys
|
||||||
import os.path
|
import os.path
|
||||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|||||||
@@ -1,53 +1,53 @@
|
|||||||
import os.path
|
import os.path
|
||||||
import gzip
|
import gzip
|
||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||||
|
|
||||||
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
||||||
"""Parameters:
|
"""Parameters:
|
||||||
tarball_path: output path of the .tar.gz file
|
tarball_path: output path of the .tar.gz file
|
||||||
sources: list of sources to include in the tarball, relative to the current directory
|
sources: list of sources to include in the tarball, relative to the current directory
|
||||||
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
|
base_dir: if a source file is in a sub-directory of base_dir, then base_dir is stripped
|
||||||
from path in the tarball.
|
from path in the tarball.
|
||||||
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
prefix_dir: all files stored in the tarball be sub-directory of prefix_dir. Set to ''
|
||||||
to make them child of root.
|
to make them child of root.
|
||||||
"""
|
"""
|
||||||
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
|
base_dir = os.path.normpath( os.path.abspath( base_dir ) )
|
||||||
def archive_name( path ):
|
def archive_name( path ):
|
||||||
"""Makes path relative to base_dir."""
|
"""Makes path relative to base_dir."""
|
||||||
path = os.path.normpath( os.path.abspath( path ) )
|
path = os.path.normpath( os.path.abspath( path ) )
|
||||||
common_path = os.path.commonprefix( (base_dir, path) )
|
common_path = os.path.commonprefix( (base_dir, path) )
|
||||||
archive_name = path[len(common_path):]
|
archive_name = path[len(common_path):]
|
||||||
if os.path.isabs( archive_name ):
|
if os.path.isabs( archive_name ):
|
||||||
archive_name = archive_name[1:]
|
archive_name = archive_name[1:]
|
||||||
return os.path.join( prefix_dir, archive_name )
|
return os.path.join( prefix_dir, archive_name )
|
||||||
def visit(tar, dirname, names):
|
def visit(tar, dirname, names):
|
||||||
for name in names:
|
for name in names:
|
||||||
path = os.path.join(dirname, name)
|
path = os.path.join(dirname, name)
|
||||||
if os.path.isfile(path):
|
if os.path.isfile(path):
|
||||||
path_in_tar = archive_name(path)
|
path_in_tar = archive_name(path)
|
||||||
tar.add(path, path_in_tar )
|
tar.add(path, path_in_tar )
|
||||||
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
||||||
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
|
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
|
||||||
try:
|
try:
|
||||||
for source in sources:
|
for source in sources:
|
||||||
source_path = source
|
source_path = source
|
||||||
if os.path.isdir( source ):
|
if os.path.isdir( source ):
|
||||||
os.path.walk(source_path, visit, tar)
|
os.path.walk(source_path, visit, tar)
|
||||||
else:
|
else:
|
||||||
path_in_tar = archive_name(source_path)
|
path_in_tar = archive_name(source_path)
|
||||||
tar.add(source_path, path_in_tar ) # filename, arcname
|
tar.add(source_path, path_in_tar ) # filename, arcname
|
||||||
finally:
|
finally:
|
||||||
tar.close()
|
tar.close()
|
||||||
|
|
||||||
def decompress( tarball_path, base_dir ):
|
def decompress( tarball_path, base_dir ):
|
||||||
"""Decompress the gzipped tarball into directory base_dir.
|
"""Decompress the gzipped tarball into directory base_dir.
|
||||||
"""
|
"""
|
||||||
# !!! This class method is not documented in the online doc
|
# !!! This class method is not documented in the online doc
|
||||||
# nor is bz2open!
|
# nor is bz2open!
|
||||||
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
|
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
|
||||||
try:
|
try:
|
||||||
tar.extractall( base_dir )
|
tar.extractall( base_dir )
|
||||||
finally:
|
finally:
|
||||||
tar.close()
|
tar.close()
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
\section ms_release Makes JsonCpp ready for release
|
\section ms_release Makes JsonCpp ready for release
|
||||||
- Build system clean-up:
|
- Build system clean-up:
|
||||||
- Fix build on Windows (shared-library build is broken)
|
- Fix build on Windows (shared-library build is broken)
|
||||||
|
- Compile and run tests using shared library on Windows to ensure no JSON_API macro is missing.
|
||||||
- Add enable/disable flag for static and shared library build
|
- Add enable/disable flag for static and shared library build
|
||||||
- Enhance help
|
- Enhance help
|
||||||
- Platform portability check: (Notes: was ok on last check)
|
- Platform portability check: (Notes: was ok on last check)
|
||||||
|
|||||||
2
include/CMakeLists.txt
Normal file
2
include/CMakeLists.txt
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
FILE(GLOB INCLUDE_FILES "json/*.h")
|
||||||
|
INSTALL(FILES ${INCLUDE_FILES} DESTINATION include/json)
|
||||||
32
include/json/assertions.h
Normal file
32
include/json/assertions.h
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
// Copyright 2007-2010 Baptiste Lepilleur
|
||||||
|
// Distributed under MIT license, or public domain if desired and
|
||||||
|
// recognized in your jurisdiction.
|
||||||
|
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
|
#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||||
|
# define CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||||
|
|
||||||
|
#include <stdlib.h>
|
||||||
|
|
||||||
|
#if !defined(JSON_IS_AMALGAMATION)
|
||||||
|
# include <json/config.h>
|
||||||
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
|
|
||||||
|
#if JSON_USE_EXCEPTION
|
||||||
|
# include <stdexcept>
|
||||||
|
#define JSON_ASSERT( condition ) assert( condition ); // @todo <= change this into an exception throw
|
||||||
|
#define JSON_FAIL_MESSAGE( message ) throw std::runtime_error( message );
|
||||||
|
#else // JSON_USE_EXCEPTION
|
||||||
|
#define JSON_ASSERT( condition ) assert( condition );
|
||||||
|
|
||||||
|
// The call to assert() will show the failure message in debug builds. In
|
||||||
|
// release bugs we write to invalid memory in order to crash hard, so that a
|
||||||
|
// debugger or crash reporter gets the chance to take over. We still call exit()
|
||||||
|
// afterward in order to tell the compiler that this macro doesn't return.
|
||||||
|
#define JSON_FAIL_MESSAGE( message ) { assert(false && message); strcpy(reinterpret_cast<char*>(666), message); exit(123); }
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#define JSON_ASSERT_MESSAGE( condition, message ) if (!( condition )) { JSON_FAIL_MESSAGE( message ) }
|
||||||
|
|
||||||
|
#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||||
@@ -24,14 +24,16 @@
|
|||||||
/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
|
/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
|
||||||
//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
|
//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
|
||||||
|
|
||||||
/// If defined, indicates that Json use exception to report invalid type manipulation
|
// If non-zero, the library uses exceptions to report bad input instead of C
|
||||||
/// instead of C assert macro.
|
// assertion macros. The default is to use exceptions.
|
||||||
|
# ifndef JSON_USE_EXCEPTION
|
||||||
# define JSON_USE_EXCEPTION 1
|
# define JSON_USE_EXCEPTION 1
|
||||||
|
# endif
|
||||||
|
|
||||||
/// If defined, indicates that the source file is amalgated
|
/// If defined, indicates that the source file is amalgated
|
||||||
/// to prevent private header inclusion.
|
/// to prevent private header inclusion.
|
||||||
/// Remarks: it is automatically defined in the generated amalgated header.
|
/// Remarks: it is automatically defined in the generated amalgated header.
|
||||||
// #define JSON_IS_AMALGATED
|
// #define JSON_IS_AMALGAMATION
|
||||||
|
|
||||||
|
|
||||||
# ifdef JSON_IN_CPPTL
|
# ifdef JSON_IN_CPPTL
|
||||||
@@ -44,10 +46,17 @@
|
|||||||
# ifdef JSON_IN_CPPTL
|
# ifdef JSON_IN_CPPTL
|
||||||
# define JSON_API CPPTL_API
|
# define JSON_API CPPTL_API
|
||||||
# elif defined(JSON_DLL_BUILD)
|
# elif defined(JSON_DLL_BUILD)
|
||||||
# define JSON_API __declspec(dllexport)
|
# if defined(_MSC_VER)
|
||||||
|
# define JSON_API __declspec(dllexport)
|
||||||
|
# define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
|
||||||
|
# endif // if defined(_MSC_VER)
|
||||||
# elif defined(JSON_DLL)
|
# elif defined(JSON_DLL)
|
||||||
# define JSON_API __declspec(dllimport)
|
# if defined(_MSC_VER)
|
||||||
# else
|
# define JSON_API __declspec(dllimport)
|
||||||
|
# define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
|
||||||
|
# endif // if defined(_MSC_VER)
|
||||||
|
# endif // ifdef JSON_IN_CPPTL
|
||||||
|
# if !defined(JSON_API)
|
||||||
# define JSON_API
|
# define JSON_API
|
||||||
# endif
|
# endif
|
||||||
|
|
||||||
@@ -59,6 +68,9 @@
|
|||||||
// Microsoft Visual Studio 6 only support conversion from __int64 to double
|
// Microsoft Visual Studio 6 only support conversion from __int64 to double
|
||||||
// (no conversion from unsigned __int64).
|
// (no conversion from unsigned __int64).
|
||||||
#define JSON_USE_INT64_DOUBLE_CONVERSION 1
|
#define JSON_USE_INT64_DOUBLE_CONVERSION 1
|
||||||
|
// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255' characters in the debug information)
|
||||||
|
// All projects I've ever seen with VS6 were using this globally (not bothering with pragma push/pop).
|
||||||
|
#pragma warning(disable : 4786)
|
||||||
#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6
|
#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6
|
||||||
|
|
||||||
#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
|
#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
|
||||||
|
|||||||
@@ -6,9 +6,9 @@
|
|||||||
#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
|
#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
|
||||||
# define CPPTL_JSON_FEATURES_H_INCLUDED
|
# define CPPTL_JSON_FEATURES_H_INCLUDED
|
||||||
|
|
||||||
#if !defined(JSON_IS_AMALGATED)
|
#if !defined(JSON_IS_AMALGAMATION)
|
||||||
# include "forwards.h"
|
# include "forwards.h"
|
||||||
#endif // if !defined(JSON_IS_AMALGATED)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
|
|
||||||
namespace Json {
|
namespace Json {
|
||||||
|
|
||||||
@@ -42,6 +42,12 @@ namespace Json {
|
|||||||
|
|
||||||
/// \c true if root must be either an array or an object value. Default: \c false.
|
/// \c true if root must be either an array or an object value. Default: \c false.
|
||||||
bool strictRoot_;
|
bool strictRoot_;
|
||||||
|
|
||||||
|
/// \c true if dropped null placeholders are allowed. Default: \c false.
|
||||||
|
bool allowDroppedNullPlaceholders_;
|
||||||
|
|
||||||
|
/// \c true if numeric object key are allowed. Default: \c false.
|
||||||
|
bool allowNumericKeys_;
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
|||||||
@@ -6,9 +6,9 @@
|
|||||||
#ifndef JSON_FORWARDS_H_INCLUDED
|
#ifndef JSON_FORWARDS_H_INCLUDED
|
||||||
# define JSON_FORWARDS_H_INCLUDED
|
# define JSON_FORWARDS_H_INCLUDED
|
||||||
|
|
||||||
#if !defined(JSON_IS_AMALGATED)
|
#if !defined(JSON_IS_AMALGAMATION)
|
||||||
# include "config.h"
|
# include "config.h"
|
||||||
#endif // if !defined(JSON_IS_AMALGATED)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
|
|
||||||
namespace Json {
|
namespace Json {
|
||||||
|
|
||||||
|
|||||||
@@ -6,14 +6,21 @@
|
|||||||
#ifndef CPPTL_JSON_READER_H_INCLUDED
|
#ifndef CPPTL_JSON_READER_H_INCLUDED
|
||||||
# define CPPTL_JSON_READER_H_INCLUDED
|
# define CPPTL_JSON_READER_H_INCLUDED
|
||||||
|
|
||||||
#if !defined(JSON_IS_AMALGATED)
|
#if !defined(JSON_IS_AMALGAMATION)
|
||||||
# include "features.h"
|
# include "features.h"
|
||||||
# include "value.h"
|
# include "value.h"
|
||||||
#endif // if !defined(JSON_IS_AMALGATED)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
# include <deque>
|
# include <deque>
|
||||||
|
# include <iosfwd>
|
||||||
# include <stack>
|
# include <stack>
|
||||||
# include <string>
|
# include <string>
|
||||||
# include <iostream>
|
|
||||||
|
// Disable warning C4251: <data member>: <type> needs to have dll-interface to be used by...
|
||||||
|
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
# pragma warning(push)
|
||||||
|
# pragma warning(disable:4251)
|
||||||
|
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
|
||||||
|
|
||||||
namespace Json {
|
namespace Json {
|
||||||
|
|
||||||
@@ -26,6 +33,19 @@ namespace Json {
|
|||||||
typedef char Char;
|
typedef char Char;
|
||||||
typedef const Char *Location;
|
typedef const Char *Location;
|
||||||
|
|
||||||
|
/** \brief An error tagged with where in the JSON text it was encountered.
|
||||||
|
*
|
||||||
|
* The offsets give the [start, limit) range of bytes within the text. Note
|
||||||
|
* that this is bytes, not codepoints.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
struct StructuredError
|
||||||
|
{
|
||||||
|
size_t offset_start;
|
||||||
|
size_t offset_limit;
|
||||||
|
std::string message;
|
||||||
|
};
|
||||||
|
|
||||||
/** \brief Constructs a Reader allowing all features
|
/** \brief Constructs a Reader allowing all features
|
||||||
* for parsing.
|
* for parsing.
|
||||||
*/
|
*/
|
||||||
@@ -88,6 +108,14 @@ namespace Json {
|
|||||||
*/
|
*/
|
||||||
std::string getFormattedErrorMessages() const;
|
std::string getFormattedErrorMessages() const;
|
||||||
|
|
||||||
|
/** \brief Returns a vector of structured erros encounted while parsing.
|
||||||
|
* \return A (possibly empty) vector of StructuredError objects. Currently
|
||||||
|
* only one error can be returned, but the caller should tolerate multiple
|
||||||
|
* errors. This can occur if the parser recovers from a non-fatal
|
||||||
|
* parse error and then encounters additional errors.
|
||||||
|
*/
|
||||||
|
std::vector<StructuredError> getStructuredErrors() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
enum TokenType
|
enum TokenType
|
||||||
{
|
{
|
||||||
@@ -139,9 +167,11 @@ namespace Json {
|
|||||||
bool readObject( Token &token );
|
bool readObject( Token &token );
|
||||||
bool readArray( Token &token );
|
bool readArray( Token &token );
|
||||||
bool decodeNumber( Token &token );
|
bool decodeNumber( Token &token );
|
||||||
|
bool decodeNumber( Token &token, Value &decoded );
|
||||||
bool decodeString( Token &token );
|
bool decodeString( Token &token );
|
||||||
bool decodeString( Token &token, std::string &decoded );
|
bool decodeString( Token &token, std::string &decoded );
|
||||||
bool decodeDouble( Token &token );
|
bool decodeDouble( Token &token );
|
||||||
|
bool decodeDouble( Token &token, Value &decoded );
|
||||||
bool decodeUnicodeCodePoint( Token &token,
|
bool decodeUnicodeCodePoint( Token &token,
|
||||||
Location ¤t,
|
Location ¤t,
|
||||||
Location end,
|
Location end,
|
||||||
@@ -197,18 +227,23 @@ namespace Json {
|
|||||||
Result:
|
Result:
|
||||||
\verbatim
|
\verbatim
|
||||||
{
|
{
|
||||||
"dir": {
|
"dir": {
|
||||||
"file": {
|
"file": {
|
||||||
// The input stream JSON would be nested here.
|
// The input stream JSON would be nested here.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
\endverbatim
|
\endverbatim
|
||||||
\throw std::exception on parse error.
|
\throw std::exception on parse error.
|
||||||
\see Json::operator<<()
|
\see Json::operator<<()
|
||||||
*/
|
*/
|
||||||
std::istream& operator>>( std::istream&, Value& );
|
JSON_API std::istream& operator>>( std::istream&, Value& );
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
|
||||||
|
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
# pragma warning(pop)
|
||||||
|
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
|
||||||
|
|
||||||
#endif // CPPTL_JSON_READER_H_INCLUDED
|
#endif // CPPTL_JSON_READER_H_INCLUDED
|
||||||
|
|||||||
@@ -6,9 +6,9 @@
|
|||||||
#ifndef CPPTL_JSON_H_INCLUDED
|
#ifndef CPPTL_JSON_H_INCLUDED
|
||||||
# define CPPTL_JSON_H_INCLUDED
|
# define CPPTL_JSON_H_INCLUDED
|
||||||
|
|
||||||
#if !defined(JSON_IS_AMALGATED)
|
#if !defined(JSON_IS_AMALGAMATION)
|
||||||
# include "forwards.h"
|
# include "forwards.h"
|
||||||
#endif // if !defined(JSON_IS_AMALGATED)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
# include <string>
|
# include <string>
|
||||||
# include <vector>
|
# include <vector>
|
||||||
|
|
||||||
@@ -21,6 +21,13 @@
|
|||||||
# include <cpptl/forwards.h>
|
# include <cpptl/forwards.h>
|
||||||
# endif
|
# endif
|
||||||
|
|
||||||
|
// Disable warning C4251: <data member>: <type> needs to have dll-interface to be used by...
|
||||||
|
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
# pragma warning(push)
|
||||||
|
# pragma warning(disable:4251)
|
||||||
|
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
|
||||||
|
|
||||||
/** \brief JSON (JavaScript Object Notation).
|
/** \brief JSON (JavaScript Object Notation).
|
||||||
*/
|
*/
|
||||||
namespace Json {
|
namespace Json {
|
||||||
@@ -132,31 +139,33 @@ namespace Json {
|
|||||||
typedef Json::UInt64 UInt64;
|
typedef Json::UInt64 UInt64;
|
||||||
typedef Json::Int64 Int64;
|
typedef Json::Int64 Int64;
|
||||||
#endif // defined(JSON_HAS_INT64)
|
#endif // defined(JSON_HAS_INT64)
|
||||||
typedef Json::LargestInt LargestInt;
|
typedef Json::LargestInt LargestInt;
|
||||||
typedef Json::LargestUInt LargestUInt;
|
typedef Json::LargestUInt LargestUInt;
|
||||||
typedef Json::ArrayIndex ArrayIndex;
|
typedef Json::ArrayIndex ArrayIndex;
|
||||||
|
|
||||||
static const Value null;
|
static const Value null;
|
||||||
/// Minimum signed integer value that can be stored in a Json::Value.
|
/// Minimum signed integer value that can be stored in a Json::Value.
|
||||||
static const LargestInt minLargestInt;
|
static const LargestInt minLargestInt;
|
||||||
/// Maximum signed integer value that can be stored in a Json::Value.
|
/// Maximum signed integer value that can be stored in a Json::Value.
|
||||||
static const LargestInt maxLargestInt;
|
static const LargestInt maxLargestInt;
|
||||||
/// Maximum unsigned integer value that can be stored in a Json::Value.
|
/// Maximum unsigned integer value that can be stored in a Json::Value.
|
||||||
static const LargestUInt maxLargestUInt;
|
static const LargestUInt maxLargestUInt;
|
||||||
|
|
||||||
/// Minimum signed int value that can be stored in a Json::Value.
|
/// Minimum signed int value that can be stored in a Json::Value.
|
||||||
static const Int minInt;
|
static const Int minInt;
|
||||||
/// Maximum signed int value that can be stored in a Json::Value.
|
/// Maximum signed int value that can be stored in a Json::Value.
|
||||||
static const Int maxInt;
|
static const Int maxInt;
|
||||||
/// Maximum unsigned int value that can be stored in a Json::Value.
|
/// Maximum unsigned int value that can be stored in a Json::Value.
|
||||||
static const UInt maxUInt;
|
static const UInt maxUInt;
|
||||||
|
|
||||||
/// Minimum signed 64 bits int value that can be stored in a Json::Value.
|
# if defined(JSON_HAS_INT64)
|
||||||
static const Int64 minInt64;
|
/// Minimum signed 64 bits int value that can be stored in a Json::Value.
|
||||||
/// Maximum signed 64 bits int value that can be stored in a Json::Value.
|
static const Int64 minInt64;
|
||||||
|
/// Maximum signed 64 bits int value that can be stored in a Json::Value.
|
||||||
static const Int64 maxInt64;
|
static const Int64 maxInt64;
|
||||||
/// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
|
/// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
|
||||||
static const UInt64 maxUInt64;
|
static const UInt64 maxUInt64;
|
||||||
|
#endif // defined(JSON_HAS_INT64)
|
||||||
|
|
||||||
private:
|
private:
|
||||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||||
@@ -202,14 +211,14 @@ namespace Json {
|
|||||||
To create an empty array, pass arrayValue.
|
To create an empty array, pass arrayValue.
|
||||||
To create an empty object, pass objectValue.
|
To create an empty object, pass objectValue.
|
||||||
Another Value can then be set to this one by assignment.
|
Another Value can then be set to this one by assignment.
|
||||||
This is useful since clear() and resize() will not alter types.
|
This is useful since clear() and resize() will not alter types.
|
||||||
|
|
||||||
Examples:
|
Examples:
|
||||||
\code
|
\code
|
||||||
Json::Value null_value; // null
|
Json::Value null_value; // null
|
||||||
Json::Value arr_value(Json::arrayValue); // []
|
Json::Value arr_value(Json::arrayValue); // []
|
||||||
Json::Value obj_value(Json::objectValue); // {}
|
Json::Value obj_value(Json::objectValue); // {}
|
||||||
\endcode
|
\endcode
|
||||||
*/
|
*/
|
||||||
Value( ValueType type = nullValue );
|
Value( ValueType type = nullValue );
|
||||||
Value( Int value );
|
Value( Int value );
|
||||||
@@ -256,7 +265,7 @@ namespace Json {
|
|||||||
bool operator ==( const Value &other ) const;
|
bool operator ==( const Value &other ) const;
|
||||||
bool operator !=( const Value &other ) const;
|
bool operator !=( const Value &other ) const;
|
||||||
|
|
||||||
int compare( const Value &other );
|
int compare( const Value &other ) const;
|
||||||
|
|
||||||
const char *asCString() const;
|
const char *asCString() const;
|
||||||
std::string asString() const;
|
std::string asString() const;
|
||||||
@@ -265,8 +274,10 @@ namespace Json {
|
|||||||
# endif
|
# endif
|
||||||
Int asInt() const;
|
Int asInt() const;
|
||||||
UInt asUInt() const;
|
UInt asUInt() const;
|
||||||
|
#if defined(JSON_HAS_INT64)
|
||||||
Int64 asInt64() const;
|
Int64 asInt64() const;
|
||||||
UInt64 asUInt64() const;
|
UInt64 asUInt64() const;
|
||||||
|
#endif // if defined(JSON_HAS_INT64)
|
||||||
LargestInt asLargestInt() const;
|
LargestInt asLargestInt() const;
|
||||||
LargestUInt asLargestUInt() const;
|
LargestUInt asLargestUInt() const;
|
||||||
float asFloat() const;
|
float asFloat() const;
|
||||||
@@ -276,7 +287,9 @@ namespace Json {
|
|||||||
bool isNull() const;
|
bool isNull() const;
|
||||||
bool isBool() const;
|
bool isBool() const;
|
||||||
bool isInt() const;
|
bool isInt() const;
|
||||||
|
bool isInt64() const;
|
||||||
bool isUInt() const;
|
bool isUInt() const;
|
||||||
|
bool isUInt64() const;
|
||||||
bool isIntegral() const;
|
bool isIntegral() const;
|
||||||
bool isDouble() const;
|
bool isDouble() const;
|
||||||
bool isNumeric() const;
|
bool isNumeric() const;
|
||||||
@@ -315,24 +328,24 @@ namespace Json {
|
|||||||
/// this from the operator[] which takes a string.)
|
/// this from the operator[] which takes a string.)
|
||||||
Value &operator[]( ArrayIndex index );
|
Value &operator[]( ArrayIndex index );
|
||||||
|
|
||||||
/// Access an array element (zero based index ).
|
/// Access an array element (zero based index ).
|
||||||
/// If the array contains less than index element, then null value are inserted
|
/// If the array contains less than index element, then null value are inserted
|
||||||
/// in the array so that its size is index+1.
|
/// in the array so that its size is index+1.
|
||||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||||
/// this from the operator[] which takes a string.)
|
/// this from the operator[] which takes a string.)
|
||||||
Value &operator[]( int index );
|
Value &operator[]( int index );
|
||||||
|
|
||||||
/// Access an array element (zero based index )
|
/// Access an array element (zero based index )
|
||||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||||
/// this from the operator[] which takes a string.)
|
/// this from the operator[] which takes a string.)
|
||||||
const Value &operator[]( ArrayIndex index ) const;
|
const Value &operator[]( ArrayIndex index ) const;
|
||||||
|
|
||||||
/// Access an array element (zero based index )
|
/// Access an array element (zero based index )
|
||||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||||
/// this from the operator[] which takes a string.)
|
/// this from the operator[] which takes a string.)
|
||||||
const Value &operator[]( int index ) const;
|
const Value &operator[]( int index ) const;
|
||||||
|
|
||||||
/// If the array contains at least index+1 elements, returns the element value,
|
/// If the array contains at least index+1 elements, returns the element value,
|
||||||
/// otherwise returns defaultValue.
|
/// otherwise returns defaultValue.
|
||||||
Value get( ArrayIndex index,
|
Value get( ArrayIndex index,
|
||||||
const Value &defaultValue ) const;
|
const Value &defaultValue ) const;
|
||||||
@@ -429,6 +442,13 @@ namespace Json {
|
|||||||
iterator begin();
|
iterator begin();
|
||||||
iterator end();
|
iterator end();
|
||||||
|
|
||||||
|
// Accessors for the [start, limit) range of bytes within the JSON text from
|
||||||
|
// which this value was parsed, if any.
|
||||||
|
void setOffsetStart( size_t start );
|
||||||
|
void setOffsetLimit( size_t limit );
|
||||||
|
size_t getOffsetStart() const;
|
||||||
|
size_t getOffsetLimit() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
Value &resolveReference( const char *key,
|
Value &resolveReference( const char *key,
|
||||||
bool isStatic );
|
bool isStatic );
|
||||||
@@ -496,12 +516,17 @@ namespace Json {
|
|||||||
int memberNameIsStatic_ : 1; // used by the ValueInternalMap container.
|
int memberNameIsStatic_ : 1; // used by the ValueInternalMap container.
|
||||||
# endif
|
# endif
|
||||||
CommentInfo *comments_;
|
CommentInfo *comments_;
|
||||||
|
|
||||||
|
// [start, limit) byte offsets in the source JSON text from which this Value
|
||||||
|
// was extracted.
|
||||||
|
size_t start_;
|
||||||
|
size_t limit_;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
/** \brief Experimental and untested: represents an element of the "path" to access a node.
|
/** \brief Experimental and untested: represents an element of the "path" to access a node.
|
||||||
*/
|
*/
|
||||||
class PathArgument
|
class JSON_API PathArgument
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
friend class Path;
|
friend class Path;
|
||||||
@@ -534,7 +559,7 @@ namespace Json {
|
|||||||
* - ".%" => member name is provided as parameter
|
* - ".%" => member name is provided as parameter
|
||||||
* - ".[%]" => index is provied as parameter
|
* - ".[%]" => index is provied as parameter
|
||||||
*/
|
*/
|
||||||
class Path
|
class JSON_API Path
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
Path( const std::string &path,
|
Path( const std::string &path,
|
||||||
@@ -910,9 +935,10 @@ public: // overridden from ValueArrayAllocator
|
|||||||
/** \brief base class for Value iterators.
|
/** \brief base class for Value iterators.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
class ValueIteratorBase
|
class JSON_API ValueIteratorBase
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
|
typedef std::bidirectional_iterator_tag iterator_category;
|
||||||
typedef unsigned int size_t;
|
typedef unsigned int size_t;
|
||||||
typedef int difference_type;
|
typedef int difference_type;
|
||||||
typedef ValueIteratorBase SelfType;
|
typedef ValueIteratorBase SelfType;
|
||||||
@@ -980,10 +1006,11 @@ public: // overridden from ValueArrayAllocator
|
|||||||
/** \brief const iterator for object and array value.
|
/** \brief const iterator for object and array value.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
class ValueConstIterator : public ValueIteratorBase
|
class JSON_API ValueConstIterator : public ValueIteratorBase
|
||||||
{
|
{
|
||||||
friend class Value;
|
friend class Value;
|
||||||
public:
|
public:
|
||||||
|
typedef const Value value_type;
|
||||||
typedef unsigned int size_t;
|
typedef unsigned int size_t;
|
||||||
typedef int difference_type;
|
typedef int difference_type;
|
||||||
typedef const Value &reference;
|
typedef const Value &reference;
|
||||||
@@ -1038,10 +1065,11 @@ public: // overridden from ValueArrayAllocator
|
|||||||
|
|
||||||
/** \brief Iterator for object and array value.
|
/** \brief Iterator for object and array value.
|
||||||
*/
|
*/
|
||||||
class ValueIterator : public ValueIteratorBase
|
class JSON_API ValueIterator : public ValueIteratorBase
|
||||||
{
|
{
|
||||||
friend class Value;
|
friend class Value;
|
||||||
public:
|
public:
|
||||||
|
typedef Value value_type;
|
||||||
typedef unsigned int size_t;
|
typedef unsigned int size_t;
|
||||||
typedef int difference_type;
|
typedef int difference_type;
|
||||||
typedef Value &reference;
|
typedef Value &reference;
|
||||||
@@ -1100,4 +1128,9 @@ public: // overridden from ValueArrayAllocator
|
|||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
|
||||||
|
|
||||||
|
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
# pragma warning(pop)
|
||||||
|
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
|
||||||
|
|
||||||
#endif // CPPTL_JSON_H_INCLUDED
|
#endif // CPPTL_JSON_H_INCLUDED
|
||||||
|
|||||||
14
include/json/version.h
Normal file
14
include/json/version.h
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
// DO NOT EDIT. This file is generated by CMake from "version"
|
||||||
|
// and "version.h.in" files.
|
||||||
|
// Run CMake configure step to update it.
|
||||||
|
#ifndef JSON_VERSION_H_INCLUDED
|
||||||
|
# define JSON_VERSION_H_INCLUDED
|
||||||
|
|
||||||
|
# define JSONCPP_VERSION_STRING "0.6.0-dev"
|
||||||
|
# define JSONCPP_VERSION_MAJOR 0
|
||||||
|
# define JSONCPP_VERSION_MINOR 6
|
||||||
|
# define JSONCPP_VERSION_PATCH 0
|
||||||
|
# define JSONCPP_VERSION_QUALIFIER -dev
|
||||||
|
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
|
||||||
|
|
||||||
|
#endif // JSON_VERSION_H_INCLUDED
|
||||||
@@ -6,12 +6,18 @@
|
|||||||
#ifndef JSON_WRITER_H_INCLUDED
|
#ifndef JSON_WRITER_H_INCLUDED
|
||||||
# define JSON_WRITER_H_INCLUDED
|
# define JSON_WRITER_H_INCLUDED
|
||||||
|
|
||||||
#if !defined(JSON_IS_AMALGATED)
|
#if !defined(JSON_IS_AMALGAMATION)
|
||||||
# include "value.h"
|
# include "value.h"
|
||||||
#endif // if !defined(JSON_IS_AMALGATED)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
# include <vector>
|
# include <vector>
|
||||||
# include <string>
|
# include <string>
|
||||||
# include <iostream>
|
|
||||||
|
// Disable warning C4251: <data member>: <type> needs to have dll-interface to be used by...
|
||||||
|
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
# pragma warning(push)
|
||||||
|
# pragma warning(disable:4251)
|
||||||
|
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
|
||||||
|
|
||||||
namespace Json {
|
namespace Json {
|
||||||
|
|
||||||
@@ -41,6 +47,13 @@ namespace Json {
|
|||||||
|
|
||||||
void enableYAMLCompatibility();
|
void enableYAMLCompatibility();
|
||||||
|
|
||||||
|
/** \brief Drop the "null" string from the writer's output for nullValues.
|
||||||
|
* Strictly speaking, this is not valid JSON. But when the output is being
|
||||||
|
* fed to a browser's Javascript, it makes for smaller output and the
|
||||||
|
* browser can handle the output just fine.
|
||||||
|
*/
|
||||||
|
void dropNullPlaceholders();
|
||||||
|
|
||||||
public: // overridden from Writer
|
public: // overridden from Writer
|
||||||
virtual std::string write( const Value &root );
|
virtual std::string write( const Value &root );
|
||||||
|
|
||||||
@@ -49,6 +62,7 @@ namespace Json {
|
|||||||
|
|
||||||
std::string document_;
|
std::string document_;
|
||||||
bool yamlCompatiblityEnabled_;
|
bool yamlCompatiblityEnabled_;
|
||||||
|
bool dropNullPlaceholders_;
|
||||||
};
|
};
|
||||||
|
|
||||||
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a human friendly way.
|
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a human friendly way.
|
||||||
@@ -176,10 +190,14 @@ namespace Json {
|
|||||||
|
|
||||||
/// \brief Output using the StyledStreamWriter.
|
/// \brief Output using the StyledStreamWriter.
|
||||||
/// \see Json::operator>>()
|
/// \see Json::operator>>()
|
||||||
std::ostream& operator<<( std::ostream&, const Value &root );
|
JSON_API std::ostream& operator<<( std::ostream&, const Value &root );
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
|
||||||
|
|
||||||
|
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
# pragma warning(pop)
|
||||||
|
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
|
||||||
|
|
||||||
#endif // JSON_WRITER_H_INCLUDED
|
#endif // JSON_WRITER_H_INCLUDED
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ import tempfile
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from devtools import antglob, fixeol, tarball
|
from devtools import antglob, fixeol, tarball
|
||||||
import amalgate
|
import amalgamate
|
||||||
|
|
||||||
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
||||||
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
||||||
@@ -323,13 +323,13 @@ Warning: --force should only be used when developping/testing the release script
|
|||||||
print 'Generating source tarball to', source_tarball_path
|
print 'Generating source tarball to', source_tarball_path
|
||||||
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
|
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
|
||||||
|
|
||||||
amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir
|
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
|
||||||
print 'Generating amalgated source tarball to', amalgated_tarball_path
|
print 'Generating amalgamation source tarball to', amalgamation_tarball_path
|
||||||
amalgated_dir = 'dist/amalgated'
|
amalgamation_dir = 'dist/amalgamation'
|
||||||
amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' )
|
amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
|
||||||
amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version
|
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
|
||||||
tarball.make_tarball( amalgated_tarball_path, [amalgated_dir],
|
tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir],
|
||||||
amalgated_dir, prefix_dir=amalgated_source_dir )
|
amalgamation_dir, prefix_dir=amalgamation_source_dir )
|
||||||
|
|
||||||
# Decompress source tarball, download and install scons-local
|
# Decompress source tarball, download and install scons-local
|
||||||
distcheck_dir = 'dist/distcheck'
|
distcheck_dir = 'dist/distcheck'
|
||||||
|
|||||||
@@ -1,53 +1,53 @@
|
|||||||
import fnmatch
|
import fnmatch
|
||||||
import os
|
import os
|
||||||
|
|
||||||
def generate( env ):
|
def generate( env ):
|
||||||
def Glob( env, includes = None, excludes = None, dir = '.' ):
|
def Glob( env, includes = None, excludes = None, dir = '.' ):
|
||||||
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
|
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
|
||||||
helper function to environment.
|
helper function to environment.
|
||||||
|
|
||||||
Glob both the file-system files.
|
Glob both the file-system files.
|
||||||
|
|
||||||
includes: list of file name pattern included in the return list when matched.
|
includes: list of file name pattern included in the return list when matched.
|
||||||
excludes: list of file name pattern exluced from the return list.
|
excludes: list of file name pattern exluced from the return list.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
|
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
|
||||||
"""
|
"""
|
||||||
def filterFilename(path):
|
def filterFilename(path):
|
||||||
abs_path = os.path.join( dir, path )
|
abs_path = os.path.join( dir, path )
|
||||||
if not os.path.isfile(abs_path):
|
if not os.path.isfile(abs_path):
|
||||||
return 0
|
return 0
|
||||||
fn = os.path.basename(path)
|
fn = os.path.basename(path)
|
||||||
match = 0
|
match = 0
|
||||||
for include in includes:
|
for include in includes:
|
||||||
if fnmatch.fnmatchcase( fn, include ):
|
if fnmatch.fnmatchcase( fn, include ):
|
||||||
match = 1
|
match = 1
|
||||||
break
|
break
|
||||||
if match == 1 and not excludes is None:
|
if match == 1 and not excludes is None:
|
||||||
for exclude in excludes:
|
for exclude in excludes:
|
||||||
if fnmatch.fnmatchcase( fn, exclude ):
|
if fnmatch.fnmatchcase( fn, exclude ):
|
||||||
match = 0
|
match = 0
|
||||||
break
|
break
|
||||||
return match
|
return match
|
||||||
if includes is None:
|
if includes is None:
|
||||||
includes = ('*',)
|
includes = ('*',)
|
||||||
elif type(includes) in ( type(''), type(u'') ):
|
elif type(includes) in ( type(''), type(u'') ):
|
||||||
includes = (includes,)
|
includes = (includes,)
|
||||||
if type(excludes) in ( type(''), type(u'') ):
|
if type(excludes) in ( type(''), type(u'') ):
|
||||||
excludes = (excludes,)
|
excludes = (excludes,)
|
||||||
dir = env.Dir(dir).abspath
|
dir = env.Dir(dir).abspath
|
||||||
paths = os.listdir( dir )
|
paths = os.listdir( dir )
|
||||||
def makeAbsFileNode( path ):
|
def makeAbsFileNode( path ):
|
||||||
return env.File( os.path.join( dir, path ) )
|
return env.File( os.path.join( dir, path ) )
|
||||||
nodes = filter( filterFilename, paths )
|
nodes = filter( filterFilename, paths )
|
||||||
return map( makeAbsFileNode, nodes )
|
return map( makeAbsFileNode, nodes )
|
||||||
|
|
||||||
from SCons.Script import Environment
|
from SCons.Script import Environment
|
||||||
Environment.Glob = Glob
|
Environment.Glob = Glob
|
||||||
|
|
||||||
def exists(env):
|
def exists(env):
|
||||||
"""
|
"""
|
||||||
Tool always exists.
|
Tool always exists.
|
||||||
"""
|
"""
|
||||||
return True
|
return True
|
||||||
|
|||||||
5
src/CMakeLists.txt
Normal file
5
src/CMakeLists.txt
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
ADD_SUBDIRECTORY(lib_json)
|
||||||
|
IF(JSONCPP_WITH_TESTS)
|
||||||
|
ADD_SUBDIRECTORY(jsontestrunner)
|
||||||
|
ADD_SUBDIRECTORY(test_lib_json)
|
||||||
|
ENDIF(JSONCPP_WITH_TESTS)
|
||||||
23
src/jsontestrunner/CMakeLists.txt
Normal file
23
src/jsontestrunner/CMakeLists.txt
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
FIND_PACKAGE(PythonInterp 2.6 REQUIRED)
|
||||||
|
|
||||||
|
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||||
|
ADD_DEFINITIONS( -DJSON_DLL )
|
||||||
|
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||||
|
|
||||||
|
ADD_EXECUTABLE(jsontestrunner_exe
|
||||||
|
main.cpp
|
||||||
|
)
|
||||||
|
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
|
||||||
|
SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
|
||||||
|
|
||||||
|
IF(PYTHONINTERP_FOUND)
|
||||||
|
# Run end to end parser/writer tests
|
||||||
|
GET_PROPERTY(JSONTESTRUNNER_EXE_PATH TARGET jsontestrunner_exe PROPERTY LOCATION)
|
||||||
|
SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test)
|
||||||
|
SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py)
|
||||||
|
ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests ALL
|
||||||
|
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" "${JSONTESTRUNNER_EXE_PATH}" "${TEST_DIR}/data"
|
||||||
|
DEPENDS jsontestrunner_exe jsoncpp_test
|
||||||
|
)
|
||||||
|
ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests)
|
||||||
|
ENDIF(PYTHONINTERP_FOUND)
|
||||||
@@ -15,6 +15,35 @@
|
|||||||
# pragma warning( disable: 4996 ) // disable fopen deprecation warning
|
# pragma warning( disable: 4996 ) // disable fopen deprecation warning
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
static std::string
|
||||||
|
normalizeFloatingPointStr( double value )
|
||||||
|
{
|
||||||
|
char buffer[32];
|
||||||
|
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||||
|
sprintf_s( buffer, sizeof(buffer), "%.16g", value );
|
||||||
|
#else
|
||||||
|
snprintf( buffer, sizeof(buffer), "%.16g", value );
|
||||||
|
#endif
|
||||||
|
buffer[sizeof(buffer)-1] = 0;
|
||||||
|
std::string s( buffer );
|
||||||
|
std::string::size_type index = s.find_last_of( "eE" );
|
||||||
|
if ( index != std::string::npos )
|
||||||
|
{
|
||||||
|
std::string::size_type hasSign = (s[index+1] == '+' || s[index+1] == '-') ? 1 : 0;
|
||||||
|
std::string::size_type exponentStartIndex = index + 1 + hasSign;
|
||||||
|
std::string normalized = s.substr( 0, exponentStartIndex );
|
||||||
|
std::string::size_type indexDigit = s.find_first_not_of( '0', exponentStartIndex );
|
||||||
|
std::string exponent = "0";
|
||||||
|
if ( indexDigit != std::string::npos ) // There is an exponent different from 0
|
||||||
|
{
|
||||||
|
exponent = s.substr( indexDigit );
|
||||||
|
}
|
||||||
|
return normalized + exponent;
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
static std::string
|
static std::string
|
||||||
readInputTestFile( const char *path )
|
readInputTestFile( const char *path )
|
||||||
{
|
{
|
||||||
@@ -34,7 +63,6 @@ readInputTestFile( const char *path )
|
|||||||
return text;
|
return text;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static void
|
static void
|
||||||
printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." )
|
printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." )
|
||||||
{
|
{
|
||||||
@@ -50,7 +78,7 @@ printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." )
|
|||||||
fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() );
|
fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() );
|
||||||
break;
|
break;
|
||||||
case Json::realValue:
|
case Json::realValue:
|
||||||
fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() );
|
fprintf( fout, "%s=%s\n", path.c_str(), normalizeFloatingPointStr(value.asDouble()).c_str() );
|
||||||
break;
|
break;
|
||||||
case Json::stringValue:
|
case Json::stringValue:
|
||||||
fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() );
|
fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() );
|
||||||
@@ -65,7 +93,11 @@ printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." )
|
|||||||
for ( int index =0; index < size; ++index )
|
for ( int index =0; index < size; ++index )
|
||||||
{
|
{
|
||||||
static char buffer[16];
|
static char buffer[16];
|
||||||
sprintf( buffer, "[%d]", index );
|
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||||
|
sprintf_s( buffer, sizeof(buffer), "[%d]", index );
|
||||||
|
#else
|
||||||
|
snprintf( buffer, sizeof(buffer), "[%d]", index );
|
||||||
|
#endif
|
||||||
printValueTree( fout, value[index], path + buffer );
|
printValueTree( fout, value[index], path + buffer );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -266,4 +298,4 @@ int main( int argc, const char *argv[] )
|
|||||||
|
|
||||||
return exitCode;
|
return exitCode;
|
||||||
}
|
}
|
||||||
|
// vim: et ts=4 sts=4 sw=4 tw=0
|
||||||
|
|||||||
43
src/lib_json/CMakeLists.txt
Normal file
43
src/lib_json/CMakeLists.txt
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF)
|
||||||
|
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||||
|
SET(JSONCPP_LIB_TYPE SHARED)
|
||||||
|
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
|
||||||
|
ELSE(JSONCPP_LIB_BUILD_SHARED)
|
||||||
|
SET(JSONCPP_LIB_TYPE STATIC)
|
||||||
|
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||||
|
|
||||||
|
|
||||||
|
SET( JSONCPP_INCLUDE_DIR ../../include )
|
||||||
|
|
||||||
|
SET( PUBLIC_HEADERS
|
||||||
|
${JSONCPP_INCLUDE_DIR}/json/config.h
|
||||||
|
${JSONCPP_INCLUDE_DIR}/json/forwards.h
|
||||||
|
${JSONCPP_INCLUDE_DIR}/json/features.h
|
||||||
|
${JSONCPP_INCLUDE_DIR}/json/value.h
|
||||||
|
${JSONCPP_INCLUDE_DIR}/json/reader.h
|
||||||
|
${JSONCPP_INCLUDE_DIR}/json/writer.h
|
||||||
|
${JSONCPP_INCLUDE_DIR}/json/assertions.h
|
||||||
|
${JSONCPP_INCLUDE_DIR}/json/version.h
|
||||||
|
)
|
||||||
|
|
||||||
|
SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} )
|
||||||
|
|
||||||
|
ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE}
|
||||||
|
${PUBLIC_HEADERS}
|
||||||
|
json_tool.h
|
||||||
|
json_reader.cpp
|
||||||
|
json_batchallocator.h
|
||||||
|
json_valueiterator.inl
|
||||||
|
json_value.cpp
|
||||||
|
json_writer.cpp
|
||||||
|
version.h.in
|
||||||
|
)
|
||||||
|
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp )
|
||||||
|
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSON_CPP_VERSION} SOVERSION ${JSON_CPP_VERSION} )
|
||||||
|
|
||||||
|
# Install instructions for this target
|
||||||
|
INSTALL( TARGETS jsoncpp_lib
|
||||||
|
RUNTIME DESTINATION bin
|
||||||
|
LIBRARY DESTINATION lib
|
||||||
|
ARCHIVE DESTINATION lib
|
||||||
|
)
|
||||||
@@ -30,8 +30,6 @@ template<typename AllocatedType
|
|||||||
class BatchAllocator
|
class BatchAllocator
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
typedef AllocatedType Type;
|
|
||||||
|
|
||||||
BatchAllocator( unsigned int objectsPerPage = 255 )
|
BatchAllocator( unsigned int objectsPerPage = 255 )
|
||||||
: freeHead_( 0 )
|
: freeHead_( 0 )
|
||||||
, objectsPerPage_( objectsPerPage )
|
, objectsPerPage_( objectsPerPage )
|
||||||
@@ -127,4 +125,4 @@ private:
|
|||||||
# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION
|
# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION
|
||||||
|
|
||||||
#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED
|
#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED
|
||||||
|
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||||
|
|||||||
@@ -53,8 +53,7 @@ public: // overridden from ValueArrayAllocator
|
|||||||
if ( minNewIndexCount > newIndexCount )
|
if ( minNewIndexCount > newIndexCount )
|
||||||
newIndexCount = minNewIndexCount;
|
newIndexCount = minNewIndexCount;
|
||||||
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
|
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
|
||||||
if ( !newIndexes )
|
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
|
||||||
throw std::bad_alloc();
|
|
||||||
indexCount = newIndexCount;
|
indexCount = newIndexCount;
|
||||||
indexes = static_cast<Value **>( newIndexes );
|
indexes = static_cast<Value **>( newIndexes );
|
||||||
}
|
}
|
||||||
@@ -117,8 +116,7 @@ public: // overridden from ValueArrayAllocator
|
|||||||
if ( minNewIndexCount > newIndexCount )
|
if ( minNewIndexCount > newIndexCount )
|
||||||
newIndexCount = minNewIndexCount;
|
newIndexCount = minNewIndexCount;
|
||||||
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
|
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
|
||||||
if ( !newIndexes )
|
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
|
||||||
throw std::bad_alloc();
|
|
||||||
indexCount = newIndexCount;
|
indexCount = newIndexCount;
|
||||||
indexes = static_cast<Value **>( newIndexes );
|
indexes = static_cast<Value **>( newIndexes );
|
||||||
}
|
}
|
||||||
@@ -258,8 +256,8 @@ ValueInternalArray::ValueInternalArray()
|
|||||||
|
|
||||||
ValueInternalArray::ValueInternalArray( const ValueInternalArray &other )
|
ValueInternalArray::ValueInternalArray( const ValueInternalArray &other )
|
||||||
: pages_( 0 )
|
: pages_( 0 )
|
||||||
, pageCount_( 0 )
|
|
||||||
, size_( other.size_ )
|
, size_( other.size_ )
|
||||||
|
, pageCount_( 0 )
|
||||||
{
|
{
|
||||||
PageIndex minNewPages = other.size_ / itemsPerPage;
|
PageIndex minNewPages = other.size_ / itemsPerPage;
|
||||||
arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages );
|
arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages );
|
||||||
@@ -454,3 +452,4 @@ ValueInternalArray::compare( const ValueInternalArray &other ) const
|
|||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||||
|
|||||||
@@ -613,3 +613,4 @@ ValueInternalMap::distance( const IteratorState &x, const IteratorState &y )
|
|||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||||
|
|||||||
@@ -1,21 +1,21 @@
|
|||||||
// Copyright 2007-2010 Baptiste Lepilleur
|
// Copyright 2007-2011 Baptiste Lepilleur
|
||||||
// Distributed under MIT license, or public domain if desired and
|
// Distributed under MIT license, or public domain if desired and
|
||||||
// recognized in your jurisdiction.
|
// recognized in your jurisdiction.
|
||||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
#if !defined(JSON_IS_AMALGATED)
|
#if !defined(JSON_IS_AMALGAMATION)
|
||||||
|
# include <json/assertions.h>
|
||||||
# include <json/reader.h>
|
# include <json/reader.h>
|
||||||
# include <json/value.h>
|
# include <json/value.h>
|
||||||
# include "json_tool.h"
|
# include "json_tool.h"
|
||||||
#endif // if !defined(JSON_IS_AMALGATED)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include <cstdio>
|
#include <cstdio>
|
||||||
#include <cassert>
|
#include <cassert>
|
||||||
#include <cstring>
|
#include <cstring>
|
||||||
#include <iostream>
|
#include <istream>
|
||||||
#include <stdexcept>
|
|
||||||
|
|
||||||
#if _MSC_VER >= 1400 // VC++ 8.0
|
#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
|
||||||
#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated.
|
#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated.
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -27,6 +27,8 @@ namespace Json {
|
|||||||
Features::Features()
|
Features::Features()
|
||||||
: allowComments_( true )
|
: allowComments_( true )
|
||||||
, strictRoot_( false )
|
, strictRoot_( false )
|
||||||
|
, allowDroppedNullPlaceholders_ ( false )
|
||||||
|
, allowNumericKeys_ ( false )
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -44,6 +46,8 @@ Features::strictMode()
|
|||||||
Features features;
|
Features features;
|
||||||
features.allowComments_ = false;
|
features.allowComments_ = false;
|
||||||
features.strictRoot_ = true;
|
features.strictRoot_ = true;
|
||||||
|
features.allowDroppedNullPlaceholders_ = false;
|
||||||
|
features.allowNumericKeys_ = false;
|
||||||
return features;
|
return features;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -79,13 +83,31 @@ containsNewLine( Reader::Location begin,
|
|||||||
// //////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
Reader::Reader()
|
Reader::Reader()
|
||||||
: features_( Features::all() )
|
: errors_(),
|
||||||
|
document_(),
|
||||||
|
begin_(),
|
||||||
|
end_(),
|
||||||
|
current_(),
|
||||||
|
lastValueEnd_(),
|
||||||
|
lastValue_(),
|
||||||
|
commentsBefore_(),
|
||||||
|
features_( Features::all() ),
|
||||||
|
collectComments_()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
Reader::Reader( const Features &features )
|
Reader::Reader( const Features &features )
|
||||||
: features_( features )
|
: errors_(),
|
||||||
|
document_(),
|
||||||
|
begin_(),
|
||||||
|
end_(),
|
||||||
|
current_(),
|
||||||
|
lastValueEnd_(),
|
||||||
|
lastValue_(),
|
||||||
|
commentsBefore_(),
|
||||||
|
features_( features ),
|
||||||
|
collectComments_()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -172,6 +194,17 @@ Reader::readValue()
|
|||||||
|
|
||||||
if ( collectComments_ && !commentsBefore_.empty() )
|
if ( collectComments_ && !commentsBefore_.empty() )
|
||||||
{
|
{
|
||||||
|
// Remove newline characters at the end of the comments
|
||||||
|
size_t lastNonNewline = commentsBefore_.find_last_not_of("\r\n");
|
||||||
|
if (lastNonNewline != std::string::npos)
|
||||||
|
{
|
||||||
|
commentsBefore_.erase(lastNonNewline+1);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
commentsBefore_.clear();
|
||||||
|
}
|
||||||
|
|
||||||
currentValue().setComment( commentsBefore_, commentBefore );
|
currentValue().setComment( commentsBefore_, commentBefore );
|
||||||
commentsBefore_ = "";
|
commentsBefore_ = "";
|
||||||
}
|
}
|
||||||
@@ -181,9 +214,11 @@ Reader::readValue()
|
|||||||
{
|
{
|
||||||
case tokenObjectBegin:
|
case tokenObjectBegin:
|
||||||
successful = readObject( token );
|
successful = readObject( token );
|
||||||
|
currentValue().setOffsetLimit(current_ - begin_);
|
||||||
break;
|
break;
|
||||||
case tokenArrayBegin:
|
case tokenArrayBegin:
|
||||||
successful = readArray( token );
|
successful = readArray( token );
|
||||||
|
currentValue().setOffsetLimit(current_ - begin_);
|
||||||
break;
|
break;
|
||||||
case tokenNumber:
|
case tokenNumber:
|
||||||
successful = decodeNumber( token );
|
successful = decodeNumber( token );
|
||||||
@@ -193,14 +228,34 @@ Reader::readValue()
|
|||||||
break;
|
break;
|
||||||
case tokenTrue:
|
case tokenTrue:
|
||||||
currentValue() = true;
|
currentValue() = true;
|
||||||
|
currentValue().setOffsetStart(token.start_ - begin_);
|
||||||
|
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||||
break;
|
break;
|
||||||
case tokenFalse:
|
case tokenFalse:
|
||||||
currentValue() = false;
|
currentValue() = false;
|
||||||
|
currentValue().setOffsetStart(token.start_ - begin_);
|
||||||
|
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||||
break;
|
break;
|
||||||
case tokenNull:
|
case tokenNull:
|
||||||
currentValue() = Value();
|
currentValue() = Value();
|
||||||
|
currentValue().setOffsetStart(token.start_ - begin_);
|
||||||
|
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||||
break;
|
break;
|
||||||
|
case tokenArraySeparator:
|
||||||
|
if ( features_.allowDroppedNullPlaceholders_ )
|
||||||
|
{
|
||||||
|
// "Un-read" the current token and mark the current value as a null
|
||||||
|
// token.
|
||||||
|
current_--;
|
||||||
|
currentValue() = Value();
|
||||||
|
currentValue().setOffsetStart(current_ - begin_ - 1);
|
||||||
|
currentValue().setOffsetLimit(current_ - begin_);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// Else, fall through...
|
||||||
default:
|
default:
|
||||||
|
currentValue().setOffsetStart(token.start_ - begin_);
|
||||||
|
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||||
return addError( "Syntax error: value, object or array expected.", token );
|
return addError( "Syntax error: value, object or array expected.", token );
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -454,6 +509,7 @@ Reader::readObject( Token &tokenStart )
|
|||||||
Token tokenName;
|
Token tokenName;
|
||||||
std::string name;
|
std::string name;
|
||||||
currentValue() = Value( objectValue );
|
currentValue() = Value( objectValue );
|
||||||
|
currentValue().setOffsetStart(tokenStart.start_ - begin_);
|
||||||
while ( readToken( tokenName ) )
|
while ( readToken( tokenName ) )
|
||||||
{
|
{
|
||||||
bool initialTokenOk = true;
|
bool initialTokenOk = true;
|
||||||
@@ -463,12 +519,24 @@ Reader::readObject( Token &tokenStart )
|
|||||||
break;
|
break;
|
||||||
if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object
|
if ( tokenName.type_ == tokenObjectEnd && name.empty() ) // empty object
|
||||||
return true;
|
return true;
|
||||||
if ( tokenName.type_ != tokenString )
|
|
||||||
break;
|
|
||||||
|
|
||||||
name = "";
|
name = "";
|
||||||
if ( !decodeString( tokenName, name ) )
|
if ( tokenName.type_ == tokenString )
|
||||||
return recoverFromError( tokenObjectEnd );
|
{
|
||||||
|
if ( !decodeString( tokenName, name ) )
|
||||||
|
return recoverFromError( tokenObjectEnd );
|
||||||
|
}
|
||||||
|
else if ( tokenName.type_ == tokenNumber &&
|
||||||
|
features_.allowNumericKeys_ )
|
||||||
|
{
|
||||||
|
Value numberName;
|
||||||
|
if ( !decodeNumber( tokenName, numberName ) )
|
||||||
|
return recoverFromError( tokenObjectEnd );
|
||||||
|
name = numberName.asString();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
Token colon;
|
Token colon;
|
||||||
if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator )
|
if ( !readToken( colon ) || colon.type_ != tokenMemberSeparator )
|
||||||
@@ -488,7 +556,7 @@ Reader::readObject( Token &tokenStart )
|
|||||||
if ( !readToken( comma )
|
if ( !readToken( comma )
|
||||||
|| ( comma.type_ != tokenObjectEnd &&
|
|| ( comma.type_ != tokenObjectEnd &&
|
||||||
comma.type_ != tokenArraySeparator &&
|
comma.type_ != tokenArraySeparator &&
|
||||||
comma.type_ != tokenComment ) )
|
comma.type_ != tokenComment ) )
|
||||||
{
|
{
|
||||||
return addErrorAndRecover( "Missing ',' or '}' in object declaration",
|
return addErrorAndRecover( "Missing ',' or '}' in object declaration",
|
||||||
comma,
|
comma,
|
||||||
@@ -511,6 +579,7 @@ bool
|
|||||||
Reader::readArray( Token &tokenStart )
|
Reader::readArray( Token &tokenStart )
|
||||||
{
|
{
|
||||||
currentValue() = Value( arrayValue );
|
currentValue() = Value( arrayValue );
|
||||||
|
currentValue().setOffsetStart(tokenStart.start_ - begin_);
|
||||||
skipSpaces();
|
skipSpaces();
|
||||||
if ( *current_ == ']' ) // empty array
|
if ( *current_ == ']' ) // empty array
|
||||||
{
|
{
|
||||||
@@ -519,7 +588,7 @@ Reader::readArray( Token &tokenStart )
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
int index = 0;
|
int index = 0;
|
||||||
while ( true )
|
for (;;)
|
||||||
{
|
{
|
||||||
Value &value = currentValue()[ index++ ];
|
Value &value = currentValue()[ index++ ];
|
||||||
nodes_.push( &value );
|
nodes_.push( &value );
|
||||||
@@ -552,6 +621,19 @@ Reader::readArray( Token &tokenStart )
|
|||||||
|
|
||||||
bool
|
bool
|
||||||
Reader::decodeNumber( Token &token )
|
Reader::decodeNumber( Token &token )
|
||||||
|
{
|
||||||
|
Value decoded;
|
||||||
|
if ( !decodeNumber( token, decoded ) )
|
||||||
|
return false;
|
||||||
|
currentValue() = decoded;
|
||||||
|
currentValue().setOffsetStart(token.start_ - begin_);
|
||||||
|
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool
|
||||||
|
Reader::decodeNumber( Token &token, Value &decoded )
|
||||||
{
|
{
|
||||||
bool isDouble = false;
|
bool isDouble = false;
|
||||||
for ( Location inspect = token.start_; inspect != token.end_; ++inspect )
|
for ( Location inspect = token.start_; inspect != token.end_; ++inspect )
|
||||||
@@ -561,7 +643,7 @@ Reader::decodeNumber( Token &token )
|
|||||||
|| ( *inspect == '-' && inspect != token.start_ );
|
|| ( *inspect == '-' && inspect != token.start_ );
|
||||||
}
|
}
|
||||||
if ( isDouble )
|
if ( isDouble )
|
||||||
return decodeDouble( token );
|
return decodeDouble( token, decoded );
|
||||||
// Attempts to parse the number as an integer. If the number is
|
// Attempts to parse the number as an integer. If the number is
|
||||||
// larger than the maximum supported value of an integer then
|
// larger than the maximum supported value of an integer then
|
||||||
// we decode the number as a double.
|
// we decode the number as a double.
|
||||||
@@ -572,8 +654,6 @@ Reader::decodeNumber( Token &token )
|
|||||||
Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt)
|
Value::LargestUInt maxIntegerValue = isNegative ? Value::LargestUInt(-Value::minLargestInt)
|
||||||
: Value::maxLargestUInt;
|
: Value::maxLargestUInt;
|
||||||
Value::LargestUInt threshold = maxIntegerValue / 10;
|
Value::LargestUInt threshold = maxIntegerValue / 10;
|
||||||
Value::UInt lastDigitThreshold = Value::UInt( maxIntegerValue % 10 );
|
|
||||||
assert( lastDigitThreshold >=0 && lastDigitThreshold <= 9 );
|
|
||||||
Value::LargestUInt value = 0;
|
Value::LargestUInt value = 0;
|
||||||
while ( current < token.end_ )
|
while ( current < token.end_ )
|
||||||
{
|
{
|
||||||
@@ -583,49 +663,78 @@ Reader::decodeNumber( Token &token )
|
|||||||
Value::UInt digit(c - '0');
|
Value::UInt digit(c - '0');
|
||||||
if ( value >= threshold )
|
if ( value >= threshold )
|
||||||
{
|
{
|
||||||
// If the current digit is not the last one, or if it is
|
// We've hit or exceeded the max value divided by 10 (rounded down). If
|
||||||
// greater than the last digit of the maximum integer value,
|
// a) we've only just touched the limit, b) this is the last digit, and
|
||||||
// the parse the number as a double.
|
// c) it's small enough to fit in that rounding delta, we're okay.
|
||||||
if ( current != token.end_ || digit > lastDigitThreshold )
|
// Otherwise treat this number as a double to avoid overflow.
|
||||||
|
if (value > threshold ||
|
||||||
|
current != token.end_ ||
|
||||||
|
digit > maxIntegerValue % 10)
|
||||||
{
|
{
|
||||||
return decodeDouble( token );
|
return decodeDouble( token, decoded );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
value = value * 10 + digit;
|
value = value * 10 + digit;
|
||||||
}
|
}
|
||||||
if ( isNegative )
|
if ( isNegative )
|
||||||
currentValue() = -Value::LargestInt( value );
|
decoded = -Value::LargestInt( value );
|
||||||
else if ( value <= Value::LargestUInt(Value::maxInt) )
|
else if ( value <= Value::LargestUInt(Value::maxInt) )
|
||||||
currentValue() = Value::LargestInt( value );
|
decoded = Value::LargestInt( value );
|
||||||
else
|
else
|
||||||
currentValue() = value;
|
decoded = value;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
bool
|
bool
|
||||||
Reader::decodeDouble( Token &token )
|
Reader::decodeDouble( Token &token )
|
||||||
|
{
|
||||||
|
Value decoded;
|
||||||
|
if ( !decodeDouble( token, decoded ) )
|
||||||
|
return false;
|
||||||
|
currentValue() = decoded;
|
||||||
|
currentValue().setOffsetStart(token.start_ - begin_);
|
||||||
|
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool
|
||||||
|
Reader::decodeDouble( Token &token, Value &decoded )
|
||||||
{
|
{
|
||||||
double value = 0;
|
double value = 0;
|
||||||
const int bufferSize = 32;
|
const int bufferSize = 32;
|
||||||
int count;
|
int count;
|
||||||
int length = int(token.end_ - token.start_);
|
int length = int(token.end_ - token.start_);
|
||||||
|
|
||||||
|
// Sanity check to avoid buffer overflow exploits.
|
||||||
|
if (length < 0) {
|
||||||
|
return addError( "Unable to parse token length", token );
|
||||||
|
}
|
||||||
|
|
||||||
|
// Avoid using a string constant for the format control string given to
|
||||||
|
// sscanf, as this can cause hard to debug crashes on OS X. See here for more
|
||||||
|
// info:
|
||||||
|
//
|
||||||
|
// http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html
|
||||||
|
char format[] = "%lf";
|
||||||
|
|
||||||
if ( length <= bufferSize )
|
if ( length <= bufferSize )
|
||||||
{
|
{
|
||||||
Char buffer[bufferSize+1];
|
Char buffer[bufferSize+1];
|
||||||
memcpy( buffer, token.start_, length );
|
memcpy( buffer, token.start_, length );
|
||||||
buffer[length] = 0;
|
buffer[length] = 0;
|
||||||
count = sscanf( buffer, "%lf", &value );
|
count = sscanf( buffer, format, &value );
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
std::string buffer( token.start_, token.end_ );
|
std::string buffer( token.start_, token.end_ );
|
||||||
count = sscanf( buffer.c_str(), "%lf", &value );
|
count = sscanf( buffer.c_str(), format, &value );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( count != 1 )
|
if ( count != 1 )
|
||||||
return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token );
|
return addError( "'" + std::string( token.start_, token.end_ ) + "' is not a number.", token );
|
||||||
currentValue() = value;
|
decoded = value;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -637,6 +746,8 @@ Reader::decodeString( Token &token )
|
|||||||
if ( !decodeString( token, decoded ) )
|
if ( !decodeString( token, decoded ) )
|
||||||
return false;
|
return false;
|
||||||
currentValue() = decoded;
|
currentValue() = decoded;
|
||||||
|
currentValue().setOffsetStart(token.start_ - begin_);
|
||||||
|
currentValue().setOffsetLimit(token.end_ - begin_);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -762,7 +873,7 @@ Reader::recoverFromError( TokenType skipUntilToken )
|
|||||||
{
|
{
|
||||||
int errorCount = int(errors_.size());
|
int errorCount = int(errors_.size());
|
||||||
Token skip;
|
Token skip;
|
||||||
while ( true )
|
for (;;)
|
||||||
{
|
{
|
||||||
if ( !readToken(skip) )
|
if ( !readToken(skip) )
|
||||||
errors_.resize( errorCount ); // discard errors caused by recovery
|
errors_.resize( errorCount ); // discard errors caused by recovery
|
||||||
@@ -836,7 +947,11 @@ Reader::getLocationLineAndColumn( Location location ) const
|
|||||||
int line, column;
|
int line, column;
|
||||||
getLocationLineAndColumn( location, line, column );
|
getLocationLineAndColumn( location, line, column );
|
||||||
char buffer[18+16+16+1];
|
char buffer[18+16+16+1];
|
||||||
sprintf( buffer, "Line %d, Column %d", line, column );
|
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||||
|
sprintf_s(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||||
|
#else
|
||||||
|
snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
|
||||||
|
#endif
|
||||||
return buffer;
|
return buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -867,14 +982,40 @@ Reader::getFormattedErrorMessages() const
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
std::vector<Reader::StructuredError>
|
||||||
|
Reader::getStructuredErrors() const
|
||||||
|
{
|
||||||
|
std::vector<Reader::StructuredError> allErrors;
|
||||||
|
for ( Errors::const_iterator itError = errors_.begin();
|
||||||
|
itError != errors_.end();
|
||||||
|
++itError )
|
||||||
|
{
|
||||||
|
const ErrorInfo &error = *itError;
|
||||||
|
Reader::StructuredError structured;
|
||||||
|
structured.offset_start = error.token_.start_ - begin_;
|
||||||
|
structured.offset_limit = error.token_.end_ - begin_;
|
||||||
|
structured.message = error.message_;
|
||||||
|
allErrors.push_back(structured);
|
||||||
|
}
|
||||||
|
return allErrors;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
std::istream& operator>>( std::istream &sin, Value &root )
|
std::istream& operator>>( std::istream &sin, Value &root )
|
||||||
{
|
{
|
||||||
Json::Reader reader;
|
Json::Reader reader;
|
||||||
bool ok = reader.parse(sin, root, true);
|
bool ok = reader.parse(sin, root, true);
|
||||||
//JSON_ASSERT( ok );
|
if (!ok) {
|
||||||
if (!ok) throw std::runtime_error(reader.getFormattedErrorMessages());
|
fprintf(
|
||||||
|
stderr,
|
||||||
|
"Error from reader: %s",
|
||||||
|
reader.getFormattedErrorMessages().c_str());
|
||||||
|
|
||||||
|
JSON_FAIL_MESSAGE("reader error");
|
||||||
|
}
|
||||||
return sin;
|
return sin;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -149,6 +149,7 @@ ValueIteratorBase::copy( const SelfType &other )
|
|||||||
{
|
{
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
||||||
current_ = other.current_;
|
current_ = other.current_;
|
||||||
|
isNull_ = other.isNull_;
|
||||||
#else
|
#else
|
||||||
if ( isArray_ )
|
if ( isArray_ )
|
||||||
iterator_.array_ = other.iterator_.array_;
|
iterator_.array_ = other.iterator_.array_;
|
||||||
@@ -297,3 +298,4 @@ ValueIterator::operator =( const SelfType &other )
|
|||||||
}
|
}
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||||
|
|||||||
@@ -1,21 +1,20 @@
|
|||||||
// Copyright 2007-2010 Baptiste Lepilleur
|
// Copyright 2011 Baptiste Lepilleur
|
||||||
// Distributed under MIT license, or public domain if desired and
|
// Distributed under MIT license, or public domain if desired and
|
||||||
// recognized in your jurisdiction.
|
// recognized in your jurisdiction.
|
||||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
#if !defined(JSON_IS_AMALGATED)
|
#if !defined(JSON_IS_AMALGAMATION)
|
||||||
# include <json/writer.h>
|
# include <json/writer.h>
|
||||||
# include "json_tool.h"
|
# include "json_tool.h"
|
||||||
#endif // if !defined(JSON_IS_AMALGATED)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
#include <utility>
|
#include <utility>
|
||||||
#include <assert.h>
|
#include <assert.h>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
#include <iostream>
|
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
|
|
||||||
#if _MSC_VER >= 1400 // VC++ 8.0
|
#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
|
||||||
#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated.
|
#pragma warning( disable : 4996 ) // disable warning about strdup being deprecated.
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -74,40 +73,19 @@ std::string valueToString( UInt value )
|
|||||||
|
|
||||||
std::string valueToString( double value )
|
std::string valueToString( double value )
|
||||||
{
|
{
|
||||||
|
// Allocate a buffer that is more than large enough to store the 16 digits of
|
||||||
|
// precision requested below.
|
||||||
char buffer[32];
|
char buffer[32];
|
||||||
|
|
||||||
|
// Print into the buffer. We need not request the alternative representation
|
||||||
|
// that always has a decimal point because JSON doesn't distingish the
|
||||||
|
// concepts of reals and integers.
|
||||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning.
|
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with visual studio 2005 to avoid warning.
|
||||||
sprintf_s(buffer, sizeof(buffer), "%#.16g", value);
|
sprintf_s(buffer, sizeof(buffer), "%.16g", value);
|
||||||
#else
|
#else
|
||||||
sprintf(buffer, "%#.16g", value);
|
snprintf(buffer, sizeof(buffer), "%.16g", value);
|
||||||
#endif
|
#endif
|
||||||
char* ch = buffer + strlen(buffer) - 1;
|
|
||||||
if (*ch != '0') return buffer; // nothing to truncate, so save time
|
|
||||||
while(ch > buffer && *ch == '0'){
|
|
||||||
--ch;
|
|
||||||
}
|
|
||||||
char* last_nonzero = ch;
|
|
||||||
while(ch >= buffer){
|
|
||||||
switch(*ch){
|
|
||||||
case '0':
|
|
||||||
case '1':
|
|
||||||
case '2':
|
|
||||||
case '3':
|
|
||||||
case '4':
|
|
||||||
case '5':
|
|
||||||
case '6':
|
|
||||||
case '7':
|
|
||||||
case '8':
|
|
||||||
case '9':
|
|
||||||
--ch;
|
|
||||||
continue;
|
|
||||||
case '.':
|
|
||||||
// Truncate zeroes to save bytes in output, but keep one.
|
|
||||||
*(last_nonzero+2) = '\0';
|
|
||||||
return buffer;
|
|
||||||
default:
|
|
||||||
return buffer;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return buffer;
|
return buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -119,6 +97,8 @@ std::string valueToString( bool value )
|
|||||||
|
|
||||||
std::string valueToQuotedString( const char *value )
|
std::string valueToQuotedString( const char *value )
|
||||||
{
|
{
|
||||||
|
if (value == NULL)
|
||||||
|
return "";
|
||||||
// Not sure how to handle unicode...
|
// Not sure how to handle unicode...
|
||||||
if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value ))
|
if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL && !containsControlCharacter( value ))
|
||||||
return std::string("\"") + value + "\"";
|
return std::string("\"") + value + "\"";
|
||||||
@@ -191,7 +171,8 @@ Writer::~Writer()
|
|||||||
// //////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
FastWriter::FastWriter()
|
FastWriter::FastWriter()
|
||||||
: yamlCompatiblityEnabled_( false )
|
: yamlCompatiblityEnabled_( false ),
|
||||||
|
dropNullPlaceholders_( false )
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -203,6 +184,13 @@ FastWriter::enableYAMLCompatibility()
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void
|
||||||
|
FastWriter::dropNullPlaceholders()
|
||||||
|
{
|
||||||
|
dropNullPlaceholders_ = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
std::string
|
std::string
|
||||||
FastWriter::write( const Value &root )
|
FastWriter::write( const Value &root )
|
||||||
{
|
{
|
||||||
@@ -219,7 +207,7 @@ FastWriter::writeValue( const Value &value )
|
|||||||
switch ( value.type() )
|
switch ( value.type() )
|
||||||
{
|
{
|
||||||
case nullValue:
|
case nullValue:
|
||||||
document_ += "null";
|
if (!dropNullPlaceholders_) document_ += "null";
|
||||||
break;
|
break;
|
||||||
case intValue:
|
case intValue:
|
||||||
document_ += valueToString( value.asLargestInt() );
|
document_ += valueToString( value.asLargestInt() );
|
||||||
@@ -278,6 +266,7 @@ FastWriter::writeValue( const Value &value )
|
|||||||
StyledWriter::StyledWriter()
|
StyledWriter::StyledWriter()
|
||||||
: rightMargin_( 74 )
|
: rightMargin_( 74 )
|
||||||
, indentSize_( 3 )
|
, indentSize_( 3 )
|
||||||
|
, addChildValues_()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -332,7 +321,7 @@ StyledWriter::writeValue( const Value &value )
|
|||||||
writeWithIndent( "{" );
|
writeWithIndent( "{" );
|
||||||
indent();
|
indent();
|
||||||
Value::Members::iterator it = members.begin();
|
Value::Members::iterator it = members.begin();
|
||||||
while ( true )
|
for (;;)
|
||||||
{
|
{
|
||||||
const std::string &name = *it;
|
const std::string &name = *it;
|
||||||
const Value &childValue = value[name];
|
const Value &childValue = value[name];
|
||||||
@@ -372,7 +361,7 @@ StyledWriter::writeArrayValue( const Value &value )
|
|||||||
indent();
|
indent();
|
||||||
bool hasChildValue = !childValues_.empty();
|
bool hasChildValue = !childValues_.empty();
|
||||||
unsigned index =0;
|
unsigned index =0;
|
||||||
while ( true )
|
for (;;)
|
||||||
{
|
{
|
||||||
const Value &childValue = value[index];
|
const Value &childValue = value[index];
|
||||||
writeCommentBeforeValue( childValue );
|
writeCommentBeforeValue( childValue );
|
||||||
@@ -494,7 +483,20 @@ StyledWriter::writeCommentBeforeValue( const Value &root )
|
|||||||
{
|
{
|
||||||
if ( !root.hasComment( commentBefore ) )
|
if ( !root.hasComment( commentBefore ) )
|
||||||
return;
|
return;
|
||||||
document_ += normalizeEOL( root.getComment( commentBefore ) );
|
|
||||||
|
document_ += "\n";
|
||||||
|
writeIndent();
|
||||||
|
std::string normalizedComment = normalizeEOL( root.getComment( commentBefore ) );
|
||||||
|
std::string::const_iterator iter = normalizedComment.begin();
|
||||||
|
while ( iter != normalizedComment.end() )
|
||||||
|
{
|
||||||
|
document_ += *iter;
|
||||||
|
if ( *iter == '\n' && *(iter+1) == '/' )
|
||||||
|
writeIndent();
|
||||||
|
++iter;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Comments are stripped of newlines, so add one here
|
||||||
document_ += "\n";
|
document_ += "\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -554,6 +556,7 @@ StyledStreamWriter::StyledStreamWriter( std::string indentation )
|
|||||||
: document_(NULL)
|
: document_(NULL)
|
||||||
, rightMargin_( 74 )
|
, rightMargin_( 74 )
|
||||||
, indentation_( indentation )
|
, indentation_( indentation )
|
||||||
|
, addChildValues_()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -608,7 +611,7 @@ StyledStreamWriter::writeValue( const Value &value )
|
|||||||
writeWithIndent( "{" );
|
writeWithIndent( "{" );
|
||||||
indent();
|
indent();
|
||||||
Value::Members::iterator it = members.begin();
|
Value::Members::iterator it = members.begin();
|
||||||
while ( true )
|
for (;;)
|
||||||
{
|
{
|
||||||
const std::string &name = *it;
|
const std::string &name = *it;
|
||||||
const Value &childValue = value[name];
|
const Value &childValue = value[name];
|
||||||
@@ -648,7 +651,7 @@ StyledStreamWriter::writeArrayValue( const Value &value )
|
|||||||
indent();
|
indent();
|
||||||
bool hasChildValue = !childValues_.empty();
|
bool hasChildValue = !childValues_.empty();
|
||||||
unsigned index =0;
|
unsigned index =0;
|
||||||
while ( true )
|
for (;;)
|
||||||
{
|
{
|
||||||
const Value &childValue = value[index];
|
const Value &childValue = value[index];
|
||||||
writeCommentBeforeValue( childValue );
|
writeCommentBeforeValue( childValue );
|
||||||
@@ -656,7 +659,7 @@ StyledStreamWriter::writeArrayValue( const Value &value )
|
|||||||
writeWithIndent( childValues_[index] );
|
writeWithIndent( childValues_[index] );
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
writeIndent();
|
writeIndent();
|
||||||
writeValue( childValue );
|
writeValue( childValue );
|
||||||
}
|
}
|
||||||
if ( ++index == size )
|
if ( ++index == size )
|
||||||
@@ -836,3 +839,4 @@ std::ostream& operator<<( std::ostream &sout, const Value &root )
|
|||||||
|
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
// vim: et ts=3 sts=3 sw=3 tw=0
|
||||||
|
|||||||
14
src/lib_json/version.h.in
Normal file
14
src/lib_json/version.h.in
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
// DO NOT EDIT. This file is generated by CMake from "version"
|
||||||
|
// and "version.h.in" files.
|
||||||
|
// Run CMake configure step to update it.
|
||||||
|
#ifndef JSON_VERSION_H_INCLUDED
|
||||||
|
# define JSON_VERSION_H_INCLUDED
|
||||||
|
|
||||||
|
# define JSONCPP_VERSION_STRING "@JSONCPP_VERSION@"
|
||||||
|
# define JSONCPP_VERSION_MAJOR @JSONCPP_VERSION_MAJOR@
|
||||||
|
# define JSONCPP_VERSION_MINOR @JSONCPP_VERSION_MINOR@
|
||||||
|
# define JSONCPP_VERSION_PATCH @JSONCPP_VERSION_PATCH@
|
||||||
|
# define JSONCPP_VERSION_QUALIFIER @JSONCPP_VERSION_QUALIFIER@
|
||||||
|
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
|
||||||
|
|
||||||
|
#endif // JSON_VERSION_H_INCLUDED
|
||||||
22
src/test_lib_json/CMakeLists.txt
Normal file
22
src/test_lib_json/CMakeLists.txt
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
|
||||||
|
IF(JSONCPP_LIB_BUILD_SHARED)
|
||||||
|
ADD_DEFINITIONS( -DJSON_DLL )
|
||||||
|
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
||||||
|
|
||||||
|
ADD_EXECUTABLE( jsoncpp_test
|
||||||
|
jsontest.cpp
|
||||||
|
jsontest.h
|
||||||
|
main.cpp
|
||||||
|
)
|
||||||
|
|
||||||
|
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
|
||||||
|
|
||||||
|
# Run unit tests in post-build
|
||||||
|
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
|
||||||
|
IF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||||
|
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
||||||
|
POST_BUILD
|
||||||
|
COMMAND jsoncpp_test)
|
||||||
|
ENDIF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||||
|
|
||||||
|
SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)
|
||||||
@@ -249,57 +249,24 @@ TestResult::addToLastFailure( const std::string &message )
|
|||||||
return *this;
|
return *this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
TestResult &
|
TestResult &
|
||||||
TestResult::operator << ( bool value )
|
TestResult::operator << ( Json::Int64 value ) {
|
||||||
{
|
return addToLastFailure( Json::valueToString(value) );
|
||||||
return addToLastFailure( value ? "true" : "false" );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
TestResult &
|
TestResult &
|
||||||
TestResult::operator << ( int value )
|
TestResult::operator << ( Json::UInt64 value ) {
|
||||||
{
|
return addToLastFailure( Json::valueToString(value) );
|
||||||
char buffer[32];
|
|
||||||
sprintf( buffer, "%d", value );
|
|
||||||
return addToLastFailure( buffer );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
TestResult &
|
TestResult &
|
||||||
TestResult::operator << ( unsigned int value )
|
TestResult::operator << ( bool value ) {
|
||||||
{
|
return addToLastFailure(value ? "true" : "false");
|
||||||
char buffer[32];
|
|
||||||
sprintf( buffer, "%u", value );
|
|
||||||
return addToLastFailure( buffer );
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
TestResult &
|
|
||||||
TestResult::operator << ( double value )
|
|
||||||
{
|
|
||||||
char buffer[32];
|
|
||||||
sprintf( buffer, "%16g", value );
|
|
||||||
return addToLastFailure( buffer );
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
TestResult &
|
|
||||||
TestResult::operator << ( const char *value )
|
|
||||||
{
|
|
||||||
return addToLastFailure( value ? value
|
|
||||||
: "<NULL>" );
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
TestResult &
|
|
||||||
TestResult::operator << ( const std::string &value )
|
|
||||||
{
|
|
||||||
return addToLastFailure( value );
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
// class TestCase
|
// class TestCase
|
||||||
// //////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
@@ -373,7 +340,7 @@ Runner::runTestAt( unsigned int index, TestResult &result ) const
|
|||||||
catch ( const std::exception &e )
|
catch ( const std::exception &e )
|
||||||
{
|
{
|
||||||
result.addFailure( __FILE__, __LINE__,
|
result.addFailure( __FILE__, __LINE__,
|
||||||
"Unexpected exception caugth:" ) << e.what();
|
"Unexpected exception caught:" ) << e.what();
|
||||||
}
|
}
|
||||||
#endif // if JSON_USE_EXCEPTION
|
#endif // if JSON_USE_EXCEPTION
|
||||||
delete test;
|
delete test;
|
||||||
@@ -513,10 +480,10 @@ Runner::runCommandLine( int argc, const char *argv[] ) const
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#if defined(_MSC_VER)
|
#if defined(_MSC_VER) && defined(_DEBUG)
|
||||||
// Hook MSVCRT assertions to prevent dialog from appearing
|
// Hook MSVCRT assertions to prevent dialog from appearing
|
||||||
static int
|
static int
|
||||||
msvcrtSilentReportHook( int reportType, char *message, int *returnValue )
|
msvcrtSilentReportHook( int reportType, char *message, int * /*returnValue*/ )
|
||||||
{
|
{
|
||||||
// The default CRT handling of error and assertion is to display
|
// The default CRT handling of error and assertion is to display
|
||||||
// an error dialog to the user.
|
// an error dialog to the user.
|
||||||
@@ -550,9 +517,11 @@ msvcrtSilentReportHook( int reportType, char *message, int *returnValue )
|
|||||||
void
|
void
|
||||||
Runner::preventDialogOnCrash()
|
Runner::preventDialogOnCrash()
|
||||||
{
|
{
|
||||||
#if defined(_MSC_VER)
|
#if defined(_MSC_VER) && defined(_DEBUG)
|
||||||
// Install a hook to prevent MSVCRT error and assertion from
|
// Install a hook to prevent MSVCRT error and assertion from
|
||||||
// popping a dialog.
|
// popping a dialog
|
||||||
|
// This function a NO-OP in release configuration
|
||||||
|
// (which cause warning since msvcrtSilentReportHook is not referenced)
|
||||||
_CrtSetReportHook( &msvcrtSilentReportHook );
|
_CrtSetReportHook( &msvcrtSilentReportHook );
|
||||||
#endif // if defined(_MSC_VER)
|
#endif // if defined(_MSC_VER)
|
||||||
|
|
||||||
@@ -606,3 +575,4 @@ checkStringEqual( TestResult &result,
|
|||||||
|
|
||||||
|
|
||||||
} // namespace JsonTest
|
} // namespace JsonTest
|
||||||
|
// vim: et ts=4 sts=4 sw=4 tw=0
|
||||||
|
|||||||
@@ -7,8 +7,11 @@
|
|||||||
# define JSONTEST_H_INCLUDED
|
# define JSONTEST_H_INCLUDED
|
||||||
|
|
||||||
# include <json/config.h>
|
# include <json/config.h>
|
||||||
|
# include <json/value.h>
|
||||||
|
# include <json/writer.h>
|
||||||
# include <stdio.h>
|
# include <stdio.h>
|
||||||
# include <deque>
|
# include <deque>
|
||||||
|
# include <sstream>
|
||||||
# include <string>
|
# include <string>
|
||||||
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////
|
||||||
@@ -84,12 +87,21 @@ namespace JsonTest {
|
|||||||
|
|
||||||
void printFailure( bool printTestName ) const;
|
void printFailure( bool printTestName ) const;
|
||||||
|
|
||||||
|
// Generic operator that will work with anything ostream can deal with.
|
||||||
|
template <typename T>
|
||||||
|
TestResult &operator << ( const T& value ) {
|
||||||
|
std::ostringstream oss;
|
||||||
|
oss.precision( 16 );
|
||||||
|
oss.setf( std::ios_base::floatfield );
|
||||||
|
oss << value;
|
||||||
|
return addToLastFailure(oss.str());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Specialized versions.
|
||||||
TestResult &operator << ( bool value );
|
TestResult &operator << ( bool value );
|
||||||
TestResult &operator << ( int value );
|
// std:ostream does not support 64bits integers on all STL implementation
|
||||||
TestResult &operator << ( unsigned int value );
|
TestResult &operator << ( Json::Int64 value );
|
||||||
TestResult &operator << ( double value );
|
TestResult &operator << ( Json::UInt64 value );
|
||||||
TestResult &operator << ( const char *value );
|
|
||||||
TestResult &operator << ( const std::string &value );
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
TestResult &addToLastFailure( const std::string &message );
|
TestResult &addToLastFailure( const std::string &message );
|
||||||
@@ -173,20 +185,21 @@ namespace JsonTest {
|
|||||||
Factories tests_;
|
Factories tests_;
|
||||||
};
|
};
|
||||||
|
|
||||||
template<typename T>
|
template<typename T, typename U>
|
||||||
TestResult &
|
TestResult &
|
||||||
checkEqual( TestResult &result, const T &expected, const T &actual,
|
checkEqual( TestResult &result, const T &expected, const U &actual,
|
||||||
const char *file, unsigned int line, const char *expr )
|
const char *file, unsigned int line, const char *expr )
|
||||||
{
|
{
|
||||||
if ( expected != actual )
|
if ( static_cast< U >( expected ) != actual )
|
||||||
{
|
{
|
||||||
result.addFailure( file, line, expr );
|
result.addFailure( file, line, expr );
|
||||||
result << "Expected: " << expected << "\n";
|
result << "Expected: " << static_cast< U >( expected ) << "\n";
|
||||||
result << "Actual : " << actual;
|
result << "Actual : " << actual;
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
TestResult &
|
TestResult &
|
||||||
checkStringEqual( TestResult &result,
|
checkStringEqual( TestResult &result,
|
||||||
const std::string &expected, const std::string &actual,
|
const std::string &expected, const std::string &actual,
|
||||||
@@ -216,8 +229,7 @@ namespace JsonTest {
|
|||||||
result_->predicateStackTail_ = &_minitest_Context; \
|
result_->predicateStackTail_ = &_minitest_Context; \
|
||||||
(expr); \
|
(expr); \
|
||||||
result_->popPredicateContext(); \
|
result_->popPredicateContext(); \
|
||||||
} \
|
}
|
||||||
*result_
|
|
||||||
|
|
||||||
/// \brief Asserts that two values are equals.
|
/// \brief Asserts that two values are equals.
|
||||||
#define JSONTEST_ASSERT_EQUAL( expected, actual ) \
|
#define JSONTEST_ASSERT_EQUAL( expected, actual ) \
|
||||||
@@ -229,6 +241,7 @@ namespace JsonTest {
|
|||||||
#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \
|
#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \
|
||||||
JsonTest::checkStringEqual( *result_, \
|
JsonTest::checkStringEqual( *result_, \
|
||||||
std::string(expected), std::string(actual), \
|
std::string(expected), std::string(actual), \
|
||||||
|
__FILE__, __LINE__, \
|
||||||
#expected " == " #actual )
|
#expected " == " #actual )
|
||||||
|
|
||||||
/// \brief Begin a fixture test case.
|
/// \brief Begin a fixture test case.
|
||||||
@@ -257,3 +270,4 @@ namespace JsonTest {
|
|||||||
(runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) )
|
(runner).add( JSONTEST_FIXTURE_FACTORY( FixtureType, name ) )
|
||||||
|
|
||||||
#endif // ifndef JSONTEST_H_INCLUDED
|
#endif // ifndef JSONTEST_H_INCLUDED
|
||||||
|
// vim: et ts=4 sts=4 sw=4 tw=0
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1 +1 @@
|
|||||||
[ 1 2 3]
|
[ 1 2 3]
|
||||||
|
|||||||
2122
test/data/test_array_07.expected
Normal file
2122
test/data/test_array_07.expected
Normal file
File diff suppressed because it is too large
Load Diff
2
test/data/test_array_07.json
Normal file
2
test/data/test_array_07.json
Normal file
File diff suppressed because one or more lines are too long
@@ -1,8 +1,8 @@
|
|||||||
.={}
|
.={}
|
||||||
.test=[]
|
.test=[]
|
||||||
.test[0]={}
|
.test[0]={}
|
||||||
.test[0].a="aaa"
|
.test[0].a="aaa"
|
||||||
.test[1]={}
|
.test[1]={}
|
||||||
.test[1].b="bbb"
|
.test[1].b="bbb"
|
||||||
.test[2]={}
|
.test[2]={}
|
||||||
.test[2].c="ccc"
|
.test[2].c="ccc"
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"test":
|
"test":
|
||||||
[
|
[
|
||||||
{ "a" : "aaa" }, // Comment for a
|
{ "a" : "aaa" }, // Comment for a
|
||||||
{ "b" : "bbb" }, // Comment for b
|
{ "b" : "bbb" }, // Comment for b
|
||||||
{ "c" : "ccc" } // Comment for c
|
{ "c" : "ccc" } // Comment for c
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
7
test/data/test_comment_02.expected
Normal file
7
test/data/test_comment_02.expected
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
.={}
|
||||||
|
.c-test={}
|
||||||
|
.c-test.a=1
|
||||||
|
.c-test.b=2
|
||||||
|
.cpp-test={}
|
||||||
|
.cpp-test.c=3
|
||||||
|
.cpp-test.d=4
|
||||||
16
test/data/test_comment_02.json
Normal file
16
test/data/test_comment_02.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
/* C-style comment
|
||||||
|
|
||||||
|
C-style-2 comment */
|
||||||
|
"c-test" : {
|
||||||
|
"a" : 1,
|
||||||
|
/* Internal comment c-style */
|
||||||
|
"b" : 2
|
||||||
|
},
|
||||||
|
// C++-style comment
|
||||||
|
"cpp-test" : {
|
||||||
|
// Internal comment cpp-style
|
||||||
|
"c" : 3,
|
||||||
|
"d" : 4
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1 +1 @@
|
|||||||
.=9223372036854775808
|
.=9223372036854775808
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
9223372036854775808
|
9223372036854775808
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
.=-9223372036854775808
|
.=-9223372036854775808
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
-9223372036854775808
|
-9223372036854775808
|
||||||
|
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
.=18446744073709551615
|
.=18446744073709551615
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
18446744073709551615
|
18446744073709551615
|
||||||
|
|
||||||
|
|||||||
1
test/data/test_real_08.expected
Normal file
1
test/data/test_real_08.expected
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.=4300000001
|
||||||
4
test/data/test_real_08.json
Normal file
4
test/data/test_real_08.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
// Out of 32-bit integer range, switch to double in 32-bit mode. Length the
|
||||||
|
// same as UINT_MAX in base 10 and digit less than UINT_MAX's last digit in
|
||||||
|
// order to catch a bug in the parsing code.
|
||||||
|
4300000001
|
||||||
1
test/data/test_real_09.expected
Normal file
1
test/data/test_real_09.expected
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.=1.9e+19
|
||||||
4
test/data/test_real_09.json
Normal file
4
test/data/test_real_09.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
// Out of 64-bit integer range, switch to double in all modes. Length the same
|
||||||
|
// as ULONG_MAX in base 10 and digit less than ULONG_MAX's last digit in order
|
||||||
|
// to catch a bug in the parsing code.
|
||||||
|
19000000000000000001
|
||||||
1
test/data/test_real_10.expected
Normal file
1
test/data/test_real_10.expected
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.=-2200000001
|
||||||
4
test/data/test_real_10.json
Normal file
4
test/data/test_real_10.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
// Out of 32-bit signed integer range, switch to double in all modes. Length
|
||||||
|
// the same as INT_MIN in base 10 and digit less than INT_MIN's last digit in
|
||||||
|
// order to catch a bug in the parsing code.
|
||||||
|
-2200000001
|
||||||
1
test/data/test_real_11.expected
Normal file
1
test/data/test_real_11.expected
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.=-9.3e+18
|
||||||
4
test/data/test_real_11.json
Normal file
4
test/data/test_real_11.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
// Out of 64-bit signed integer range, switch to double in all modes. Length
|
||||||
|
// the same as LONG_MIN in base 10 and digit less than LONG_MIN's last digit in
|
||||||
|
// order to catch a bug in the parsing code.
|
||||||
|
-9300000000000000001
|
||||||
1
test/data/test_real_12.expected
Normal file
1
test/data/test_real_12.expected
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.=1.844674407370955e+19
|
||||||
2
test/data/test_real_12.json
Normal file
2
test/data/test_real_12.json
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
// 2^64 -> switch to double.
|
||||||
|
18446744073709551616
|
||||||
@@ -1 +1 @@
|
|||||||
"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
|
"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
|
"!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~!\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~"
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
"http:\/\/jsoncpp.sourceforge.net\/"
|
"http:\/\/jsoncpp.sourceforge.net\/"
|
||||||
|
|||||||
2
test/data/test_string_04.expected
Normal file
2
test/data/test_string_04.expected
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
.=""abc\def""
|
||||||
|
|
||||||
2
test/data/test_string_04.json
Normal file
2
test/data/test_string_04.json
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
"\"abc\\def\""
|
||||||
|
|
||||||
2
test/data/test_string_05.expected
Normal file
2
test/data/test_string_05.expected
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
.="\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"
|
||||||
|
|
||||||
2
test/data/test_string_05.json
Normal file
2
test/data/test_string_05.json
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
"\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"
|
||||||
|
|
||||||
@@ -1 +1 @@
|
|||||||
.="a"
|
.="a"
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
.="¢"
|
.="¢"
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
.="€"
|
.="€"
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
.="𝄞"
|
.="𝄞"
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
.="Zażółć gęślą jaźń"
|
.="Zażółć gęślą jaźń"
|
||||||
|
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
Test suite from http://json.org/JSON_checker/.
|
Test suite from http://json.org/JSON_checker/.
|
||||||
|
|
||||||
If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files.
|
If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files.
|
||||||
|
|||||||
@@ -1,73 +1,73 @@
|
|||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
from glob import glob
|
from glob import glob
|
||||||
import optparse
|
import optparse
|
||||||
|
|
||||||
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
VALGRIND_CMD = 'valgrind --tool=memcheck --leak-check=yes --undef-value-errors=yes'
|
||||||
|
|
||||||
class TestProxy(object):
|
class TestProxy(object):
|
||||||
def __init__( self, test_exe_path, use_valgrind=False ):
|
def __init__( self, test_exe_path, use_valgrind=False ):
|
||||||
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
|
self.test_exe_path = os.path.normpath( os.path.abspath( test_exe_path ) )
|
||||||
self.use_valgrind = use_valgrind
|
self.use_valgrind = use_valgrind
|
||||||
|
|
||||||
def run( self, options ):
|
def run( self, options ):
|
||||||
if self.use_valgrind:
|
if self.use_valgrind:
|
||||||
cmd = VALGRIND_CMD.split()
|
cmd = VALGRIND_CMD.split()
|
||||||
else:
|
else:
|
||||||
cmd = []
|
cmd = []
|
||||||
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
|
cmd.extend( [self.test_exe_path, '--test-auto'] + options )
|
||||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
||||||
stdout = process.communicate()[0]
|
stdout = process.communicate()[0]
|
||||||
if process.returncode:
|
if process.returncode:
|
||||||
return False, stdout
|
return False, stdout
|
||||||
return True, stdout
|
return True, stdout
|
||||||
|
|
||||||
def runAllTests( exe_path, use_valgrind=False ):
|
def runAllTests( exe_path, use_valgrind=False ):
|
||||||
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
test_proxy = TestProxy( exe_path, use_valgrind=use_valgrind )
|
||||||
status, test_names = test_proxy.run( ['--list-tests'] )
|
status, test_names = test_proxy.run( ['--list-tests'] )
|
||||||
if not status:
|
if not status:
|
||||||
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
|
print >> sys.stderr, "Failed to obtain unit tests list:\n" + test_names
|
||||||
return 1
|
return 1
|
||||||
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
test_names = [name.strip() for name in test_names.strip().split('\n')]
|
||||||
failures = []
|
failures = []
|
||||||
for name in test_names:
|
for name in test_names:
|
||||||
print 'TESTING %s:' % name,
|
print 'TESTING %s:' % name,
|
||||||
succeed, result = test_proxy.run( ['--test', name] )
|
succeed, result = test_proxy.run( ['--test', name] )
|
||||||
if succeed:
|
if succeed:
|
||||||
print 'OK'
|
print 'OK'
|
||||||
else:
|
else:
|
||||||
failures.append( (name, result) )
|
failures.append( (name, result) )
|
||||||
print 'FAILED'
|
print 'FAILED'
|
||||||
failed_count = len(failures)
|
failed_count = len(failures)
|
||||||
pass_count = len(test_names) - failed_count
|
pass_count = len(test_names) - failed_count
|
||||||
if failed_count:
|
if failed_count:
|
||||||
print
|
print
|
||||||
for name, result in failures:
|
for name, result in failures:
|
||||||
print result
|
print result
|
||||||
print '%d/%d tests passed (%d failure(s))' % (
|
print '%d/%d tests passed (%d failure(s))' % (
|
||||||
pass_count, len(test_names), failed_count)
|
pass_count, len(test_names), failed_count)
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
print 'All %d tests passed' % len(test_names)
|
print 'All %d tests passed' % len(test_names)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
|
parser = OptionParser( usage="%prog [options] <path to test_lib_json.exe>" )
|
||||||
parser.add_option("--valgrind",
|
parser.add_option("--valgrind",
|
||||||
action="store_true", dest="valgrind", default=False,
|
action="store_true", dest="valgrind", default=False,
|
||||||
help="run all the tests using valgrind to detect memory leaks")
|
help="run all the tests using valgrind to detect memory leaks")
|
||||||
parser.enable_interspersed_args()
|
parser.enable_interspersed_args()
|
||||||
options, args = parser.parse_args()
|
options, args = parser.parse_args()
|
||||||
|
|
||||||
if len(args) != 1:
|
if len(args) != 1:
|
||||||
parser.error( 'Must provides at least path to test_lib_json executable.' )
|
parser.error( 'Must provides at least path to test_lib_json executable.' )
|
||||||
sys.exit( 1 )
|
sys.exit( 1 )
|
||||||
|
|
||||||
exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
|
exit_code = runAllTests( args[0], use_valgrind=options.valgrind )
|
||||||
sys.exit( exit_code )
|
sys.exit( exit_code )
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
Reference in New Issue
Block a user