Compare commits

..

3 Commits

Author SHA1 Message Date
Baptiste Lepilleur
1e9892addb Release 0.6.0-rc1 2011-05-02 07:10:38 +00:00
Baptiste Lepilleur
c2eabf0cf8 Release 0.6.0-rc1 2011-05-02 06:31:34 +00:00
Baptiste Lepilleur
3269eefd84 Release 0.6.0-rc1 2011-05-01 22:12:16 +00:00
141 changed files with 10604 additions and 19591 deletions

View File

@@ -1,47 +0,0 @@
---
# BasedOnStyle: LLVM
AccessModifierOffset: -2
ConstructorInitializerIndentWidth: 4
AlignEscapedNewlinesLeft: false
AlignTrailingComments: true
AllowAllParametersOfDeclarationOnNextLine: true
AllowShortIfStatementsOnASingleLine: false
AllowShortLoopsOnASingleLine: false
AlwaysBreakTemplateDeclarations: false
AlwaysBreakBeforeMultilineStrings: false
BreakBeforeBinaryOperators: false
BreakBeforeTernaryOperators: true
BreakConstructorInitializersBeforeComma: false
BinPackParameters: false
ColumnLimit: 80
ConstructorInitializerAllOnOneLineOrOnePerLine: false
DerivePointerBinding: false
ExperimentalAutoDetectBinPacking: false
IndentCaseLabels: false
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ObjCSpaceBeforeProtocolList: true
PenaltyBreakBeforeFirstCallParameter: 19
PenaltyBreakComment: 60
PenaltyBreakString: 1000
PenaltyBreakFirstLessLess: 120
PenaltyExcessCharacter: 1000000
PenaltyReturnTypeOnItsOwnLine: 60
PointerBindsToType: true
SpacesBeforeTrailingComments: 1
Cpp11BracedListStyle: false
Standard: Cpp03
IndentWidth: 2
TabWidth: 8
UseTab: Never
BreakBeforeBraces: Attach
IndentFunctionDeclarationAfterType: false
SpacesInParentheses: false
SpacesInAngles: false
SpaceInEmptyParentheses: false
SpacesInCStyleCastParentheses: false
SpaceAfterControlStatementKeyword: true
SpaceBeforeAssignmentOperators: true
ContinuationIndentWidth: 4
...

11
.gitattributes vendored
View File

@@ -1,11 +0,0 @@
* text=auto
*.h text
*.cpp text
*.json text
*.in text
*.sh eol=lf
*.bat eol=crlf
*.vcproj eol=crlf
*.vcxproj eol=crlf
*.sln eol=crlf
devtools/agent_vm* eol=crlf

55
.gitignore vendored
View File

@@ -1,55 +0,0 @@
/build/
/build-*/
*.pyc
*.swp
*.actual
*.actual-rewrite
*.process-output
*.rewrite
/bin/
/libs/
/doc/doxyfile
/dist/
#/version
#/include/json/version.h
# MSVC project files:
*.sln
*.vcxproj
*.filters
*.user
*.sdf
*.opensdf
*.suo
# MSVC build files:
*.lib
*.obj
*.tlog/
*.pdb
# CMake-generated files:
CMakeFiles/
*.cmake
/pkg-config/jsoncpp.pc
jsoncpp_lib_static.dir/
# In case someone runs cmake in the root-dir:
/CMakeCache.txt
/Makefile
/include/Makefile
/src/Makefile
/src/jsontestrunner/Makefile
/src/jsontestrunner/jsontestrunner_exe
/src/lib_json/Makefile
/src/test_lib_json/Makefile
/src/test_lib_json/jsoncpp_test
*.a
# eclipse project files
.project
.cproject
/.settings/
# DS_Store
.DS_Store

View File

@@ -1,55 +0,0 @@
# Build matrix / environment variable are explained on:
# http://about.travis-ci.org/docs/user/build-configuration/
# This file can be validated on:
# http://lint.travis-ci.org/
# See also
# http://stackoverflow.com/questions/22111549/travis-ci-with-clang-3-4-and-c11/30925448#30925448
# to allow C++11, though we are not yet building with -std=c++11
install:
- if [[ $TRAVIS_OS_NAME == osx ]]; then
brew update;
brew install python3 ninja;
python3 -m venv venv;
source venv/bin/activate;
elif [[ $TRAVIS_OS_NAME == linux ]]; then
wget https://github.com/ninja-build/ninja/releases/download/v1.7.2/ninja-linux.zip;
unzip -q ninja-linux.zip -d build;
fi
- pip3 install meson
# /usr/bin/gcc is 4.6 always, but gcc-X.Y is available.
- if [[ $CXX = g++ ]]; then export CXX="g++-4.9" CC="gcc-4.9"; fi
# /usr/bin/clang has a conflict with gcc, so use clang-X.Y.
- if [[ $CXX = clang++ ]]; then export CXX="clang++-3.5" CC="clang-3.5"; fi
- echo ${PATH}
- ls /usr/local
- ls /usr/local/bin
- export PATH="${PWD}"/build:/usr/local/bin:/usr/bin:${PATH}
- echo ${CXX}
- ${CXX} --version
- which valgrind
addons:
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-precise-3.5
packages:
- gcc-4.9
- g++-4.9
- clang-3.5
- valgrind
os:
- linux
language: cpp
compiler:
- gcc
- clang
script: ./travis.sh
env:
matrix:
- LIB_TYPE=static BUILD_TYPE=release
- LIB_TYPE=shared BUILD_TYPE=debug
notifications:
email: false
dist: trusty
sudo: false

110
AUTHORS
View File

@@ -1,111 +1 @@
Baptiste Lepilleur <blep@users.sourceforge.net>
Aaron Jacobs <aaronjjacobs@gmail.com>
Aaron Jacobs <jacobsa@google.com>
Adam Boseley <ABoseley@agjunction.com>
Adam Boseley <adam.boseley@gmail.com>
Aleksandr Derbenev <13alexac@gmail.com>
Alexander Gazarov <DrMetallius@users.noreply.github.com>
Alexander V. Brezgin <abrezgin@appliedtech.ru>
Alexandr Brezgin <albrezgin@mail.ru>
Alexey Kruchinin <alexey@mopals.com>
Anton Indrawan <anton.indrawan@gmail.com>
Baptiste Jonglez <git@bitsofnetworks.org>
Baptiste Lepilleur <baptiste.lepilleur@gmail.com>
Baruch Siach <baruch@tkos.co.il>
Ben Boeckel <mathstuf@gmail.com>
Benjamin Knecht <bknecht@logitech.com>
Bernd Kuhls <bernd.kuhls@t-online.de>
Billy Donahue <billydonahue@google.com>
Braden McDorman <bmcdorman@gmail.com>
Brandon Myers <bmyers1788@gmail.com>
Brendan Drew <brendan.drew@daqri.com>
chason <cxchao802@gmail.com>
Chris Gilling <cgilling@iparadigms.com>
Christopher Dawes <christopher.dawes.1981@googlemail.com>
Christopher Dunn <cdunn2001@gmail.com>
Chuck Atkins <chuck.atkins@kitware.com>
Cody P Schafer <dev@codyps.com>
Connor Manning <connor@hobu.co>
Cory Quammen <cory.quammen@kitware.com>
Cristóvão B da Cruz e Silva <CrisXed@gmail.com>
Daniel Krügler <daniel.kruegler@gmail.com>
Dani-Hub <daniel.kruegler@googlemail.com>
Dan Liu <gzliudan>
datadiode <datadiode@users.noreply.github.com>
datadiode <jochen.neubeck@vodafone.de>
David Seifert <soap@gentoo.org>
David West <david-west@idexx.com>
dawesc <chris.dawes@eftlab.co.uk>
Dmitry Marakasov <amdmi3@amdmi3.ru>
dominicpezzuto <dom@dompezzuto.com>
Don Milham <dmilham@gmail.com>
drgler <daniel.kruegler@gmail.com>
ds283 <D.Seery@sussex.ac.uk>
Egor Tensin <Egor.Tensin@gmail.com>
eightnoteight <mr.eightnoteight@gmail.com>
Evince <baneyue@gmail.com>
filipjs <filipjs@users.noreply.github.com>
findblar <ft@finbarr.ca>
Florian Meier <florian.meier@koalo.de>
Gaëtan Lehmann <gaetan.lehmann@gmail.com>
Gaurav <g.gupta@samsung.com>
Gergely Nagy <ngg@ngg.hu>
Gida Pataki <gida.pataki@prezi.com>
I3ck <buckmartin@buckmartin.de>
Iñaki Baz Castillo <ibc@aliax.net>
Jacco <jacco@geul.net>
Jean-Christophe Fillion-Robin <jchris.fillionr@kitware.com>
Jonas Platte <mail@jonasplatte.de>
Jörg Krause <joerg.krause@embedded.rocks>
Keith Lea <keith@whamcitylights.com>
Kevin Grant <kbradleygrant@gmail.com>
Kirill V. Lyadvinsky <jia3ep@gmail.com>
Kirill V. Lyadvinsky <mail@codeatcpp.com>
Kobi Gurkan <kobigurk@gmail.com>
Magnus Bjerke Vik <mbvett@gmail.com>
Malay Shah <malays@users.sourceforge.net>
Mara Kim <hacker.root@gmail.com>
Marek Kotewicz <marek.kotewicz@gmail.com>
Mark Lakata <mark@lakata.org>
Mark Zeren <mzeren@vmware.com>
Martin Buck <buckmartin@buckmartin.de>
Martyn Gigg <martyn.gigg@gmail.com>
Mattes D <github@xoft.cz>
Matthias Loy <matthias.loy@hbm.com>
Merlyn Morgan-Graham <kavika@gmail.com>
Michael Shields <mshields@google.com>
Michał Górny <mgorny@gentoo.org>
Mike Naberezny <mike@naberezny.com>
mloy <matthias.loy@googlemail.com>
Motti <lanzkron@gmail.com>
nnkur <nnkur@mail.ru>
Omkar Wagh <owagh@owaghlinux.ny.tower-research.com>
paulo <paulobrizolara@users.noreply.github.com>
pavel.pimenov <pavel.pimenov@gmail.com>
Paweł Bylica <chfast@gmail.com>
Péricles Lopes Machado <pericles.raskolnikoff@gmail.com>
Peter Spiess-Knafl <psk@autistici.org>
pffang <pffang@vip.qq.com>
Rémi Verschelde <remi@verschelde.fr>
renu555 <renu.tyagi@samsung.com>
Robert Dailey <rcdailey@gmail.com>
Sam Clegg <sbc@chromium.org>
selaselah <selah@outlook.com>
Sergiy80 <sil2004@gmail.com>
sergzub <sergzub@gmail.com>
Stefan Schweter <stefan@schweter.it>
Steffen Kieß <Steffen.Kiess@ipvs.uni-stuttgart.de>
Steven Hahn <hahnse@ornl.gov>
Stuart Eichert <stuart@fivemicro.com>
SuperManitu <supermanitu@gmail.com>
Techwolf <dring@g33kworld.net>
Tengiz Sharafiev <btolfa+github@gmail.com>
Tomasz Maciejewski <tmaciejewsk@gmail.com>
Vicente Olivert Riera <Vincent.Riera@imgtec.com>
xiaoyur347 <xiaoyur347@gmail.com>
ycqiu <429148848@qq.com>
yiqiju <fred_ju@selinc.com>
Yu Xiaolei <dreifachstein@gmail.com>
Google Inc.

View File

@@ -1,159 +0,0 @@
# vim: et ts=4 sts=4 sw=4 tw=0
CMAKE_MINIMUM_REQUIRED(VERSION 3.1)
PROJECT(jsoncpp)
ENABLE_TESTING()
OPTION(JSONCPP_WITH_TESTS "Compile and (for jsoncpp_check) run JsonCpp test executables" ON)
OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF)
OPTION(JSONCPP_WITH_STRICT_ISO "Issue all the warnings demanded by strict ISO C and ISO C++" ON)
OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON)
OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF)
OPTION(BUILD_SHARED_LIBS "Build jsoncpp_lib as a shared library." OFF)
OPTION(BUILD_STATIC_LIBS "Build jsoncpp_lib static library." ON)
# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix
IF(NOT WIN32)
IF(NOT CMAKE_BUILD_TYPE)
SET(CMAKE_BUILD_TYPE Release CACHE STRING
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage."
FORCE)
ENDIF()
ENDIF()
# Enable runtime search path support for dynamic libraries on OSX
IF(APPLE)
SET(CMAKE_MACOSX_RPATH 1)
ENDIF()
# Adhere to GNU filesystem layout conventions
INCLUDE(GNUInstallDirs)
SET(DEBUG_LIBNAME_SUFFIX "" CACHE STRING "Optional suffix to append to the library name for a debug build")
# Set variable named ${VAR_NAME} to value ${VALUE}
FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
ENDFUNCTION()
# Extract major, minor, patch from version text
# Parse a version string "X.Y.Z" and outputs
# version parts in ${OUPUT_PREFIX}_MAJOR, _MINOR, _PATCH.
# If parse succeeds then ${OUPUT_PREFIX}_FOUND is TRUE.
MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX)
SET(VERSION_REGEX "[0-9]+\\.[0-9]+\\.[0-9]+(-[a-zA-Z0-9_]+)?")
IF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
STRING(REGEX MATCHALL "[0-9]+|-([A-Za-z0-9_]+)" VERSION_PARTS ${VERSION_TEXT})
LIST(GET VERSION_PARTS 0 ${OUPUT_PREFIX}_MAJOR)
LIST(GET VERSION_PARTS 1 ${OUPUT_PREFIX}_MINOR)
LIST(GET VERSION_PARTS 2 ${OUPUT_PREFIX}_PATCH)
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE )
ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE )
ENDIF()
ENDMACRO()
# Read out version from "version" file
#FILE(STRINGS "version" JSONCPP_VERSION)
#SET( JSONCPP_VERSION_MAJOR X )
#SET( JSONCPP_VERSION_MINOR Y )
#SET( JSONCPP_VERSION_PATCH Z )
SET( JSONCPP_VERSION 1.8.3 )
jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION )
#IF(NOT JSONCPP_VERSION_FOUND)
# MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z")
#ENDIF(NOT JSONCPP_VERSION_FOUND)
SET( JSONCPP_SOVERSION 19 )
SET( JSONCPP_USE_SECURE_MEMORY "0" CACHE STRING "-D...=1 to use memory-wiping allocator for STL" )
MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}")
# File version.h is only regenerated on CMake configure step
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in"
"${PROJECT_SOURCE_DIR}/include/json/version.h"
NEWLINE_STYLE UNIX )
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/version.in"
"${PROJECT_SOURCE_DIR}/version"
NEWLINE_STYLE UNIX )
MACRO(UseCompilationWarningAsError)
IF(MSVC)
# Only enabled in debug because some old versions of VS STL generate
# warnings when compiled in release configuration.
SET(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ")
ELSEIF(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
IF(JSONCPP_WITH_STRICT_ISO)
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pedantic-errors")
ENDIF()
ENDIF()
ENDMACRO()
# Include our configuration header
INCLUDE_DIRECTORIES( ${jsoncpp_SOURCE_DIR}/include )
IF(MSVC)
# Only enabled in debug because some old versions of VS STL generate
# unreachable code warning when compiled in release configuration.
SET(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ")
ENDIF()
# Require C++11 support, prefer ISO C++ over GNU variants,
# as relying solely on ISO C++ is more portable.
SET(CMAKE_CXX_STANDARD 11)
SET(CMAKE_CXX_STANDARD_REQUIRED ON)
SET(CMAKE_CXX_EXTENSIONS OFF)
IF(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
# using regular Clang or AppleClang
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wconversion -Wshadow -Werror=conversion -Werror=sign-compare")
ELSEIF(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
# using GCC
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wconversion -Wshadow -Wextra")
# not yet ready for -Wsign-conversion
IF(JSONCPP_WITH_STRICT_ISO)
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pedantic")
ENDIF()
IF(JSONCPP_WITH_WARNING_AS_ERROR)
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=conversion")
ENDIF()
ELSEIF(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
# using Intel compiler
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wconversion -Wshadow -Wextra -Werror=conversion")
IF(JSONCPP_WITH_STRICT_ISO AND NOT JSONCPP_WITH_WARNING_AS_ERROR)
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -pedantic")
ENDIF()
ENDIF()
FIND_PROGRAM(CCACHE_FOUND ccache)
IF(CCACHE_FOUND)
SET_PROPERTY(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
SET_PROPERTY(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
ENDIF(CCACHE_FOUND)
IF(JSONCPP_WITH_WARNING_AS_ERROR)
UseCompilationWarningAsError()
ENDIF()
IF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
CONFIGURE_FILE(
"pkg-config/jsoncpp.pc.in"
"pkg-config/jsoncpp.pc"
@ONLY)
INSTALL(FILES "${CMAKE_CURRENT_BINARY_DIR}/pkg-config/jsoncpp.pc"
DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
ENDIF()
IF(JSONCPP_WITH_CMAKE_PACKAGE)
INSTALL(EXPORT jsoncpp
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/jsoncpp
FILE jsoncppConfig.cmake)
ENDIF()
# Build the different applications
ADD_SUBDIRECTORY( src )
#install the includes
ADD_SUBDIRECTORY( include )

View File

@@ -2,13 +2,13 @@ The JsonCpp library's source code, including accompanying documentation,
tests and demonstration applications, are licensed under the following
conditions...
Baptiste Lepilleur and The JsonCpp Authors explicitly disclaim copyright in all
The author (Baptiste Lepilleur) explicitly disclaims copyright in all
jurisdictions which recognize such a disclaimer. In such jurisdictions,
this software is released into the Public Domain.
In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur and
The JsonCpp Authors, and is released under the terms of the MIT License (see below).
2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
released under the terms of the MIT License (see below).
In jurisdictions which recognize Public Domain property, the user of this
software may choose to accept it either as 1) Public Domain, 2) under the
@@ -23,7 +23,7 @@ described in clear, concise terms at:
The full text of the MIT License follows:
========================================================================
Copyright (c) 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
Copyright (c) 2007-2010 Baptiste Lepilleur
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation

95
NEWS.txt Normal file
View File

@@ -0,0 +1,95 @@
New in JsonCpp 0.6.0:
---------------------
* Compilation
- LD_LIBRARY_PATH and LIBRARY_PATH environment variables are now
propagated to the build environment as this is required for some
compiler installation.
- Added support for Microsoft Visual Studio 2008 (bug #2930462):
The platform "msvc90" has been added.
Notes: you need to setup the environment by running vcvars32.bat
(e.g. MSVC 2008 command prompt in start menu) before running scons.
- Added support for amalgated source and header generation (a la sqlite).
Refer to README.txt section "Generating amalgated source and header"
for detail.
* Value
- Removed experimental ValueAllocator, it caused static
initialization/destruction order issues (bug #2934500).
The DefaultValueAllocator has been inlined in code.
- Added support for 64 bits integer:
Types Json::Int64 and Json::UInt64 have been added. They are aliased
to 64 bits integers on system that support them (based on __int64 on
Microsoft Visual Studio platform, and long long on other platforms).
Types Json::LargestInt and Json::LargestUInt have been added. They are
aliased to the largest integer type supported:
either Json::Int/Json::UInt or Json::Int64/Json::UInt64 respectively.
Json::Value::asInt() and Json::Value::asUInt() still returns plain
"int" based types, but asserts if an attempt is made to retrieve
a 64 bits value that can not represented as the return type.
Json::Value::asInt64() and Json::Value::asUInt64() have been added
to obtain the 64 bits integer value.
Json::Value::asLargestInt() and Json::Value::asLargestUInt() returns
the integer as a LargestInt/LargestUInt respectively. Those functions
functions are typically used when implementing writer.
The reader attempts to read number as 64 bits integer, and fall back
to reading a double if the number is not in the range of 64 bits
integer.
Warning: Json::Value::asInt() and Json::Value::asUInt() now returns
long long. This changes break code that was passing the return value
to *printf() function.
Support for 64 bits integer can be disabled by defining the macro
JSON_NO_INT64 (uncomment it in json/config.h for example), though
it should have no impact on existing usage.
- The type Json::ArrayIndex is used for indexes of a JSON value array. It
is an unsigned int (typically 32 bits).
- Array index can be passed as int to operator[], allowing use of literal:
Json::Value array;
array.append( 1234 );
int value = array[0].asInt(); // did not compile previously
- Added float Json::Value::asFloat() to obtain a floating point value as a
float (avoid lost of precision warning caused by used of asDouble()
to initialize a float).
* Reader
- Renamed Reader::getFormatedErrorMessages() to getFormattedErrorMessages.
Bug #3023708 (Formatted has 2 't'). The old member function is deprecated
but still present for backward compatibility.
* Tests
- Added test to ensure that the escape sequence "\/" is corrected handled
by the parser.
* Bug fixes
- Bug #3139677: JSON [1 2 3] was incorrectly parsed as [1, 3]. Error is now
correctly detected.
- Bug #3139678: stack buffer overflow when parsing a double with a
length of 32 characters.
* License
- See file LICENSE for details. Basically JsonCpp is now licensed under
MIT license, or public domain if desired and recognized in your jurisdiction.
Thanks to Stephan G. Beal [http://wanderinghorse.net/home/stephan/]) who
helped figuring out the solution to the public domain issue.

131
README.md
View File

@@ -1,131 +0,0 @@
# JsonCpp
[![badge](https://img.shields.io/badge/conan.io-jsoncpp%2F1.8.0-green.svg?logo=data:image/png;base64%2CiVBORw0KGgoAAAANSUhEUgAAAA4AAAAOCAMAAAAolt3jAAAA1VBMVEUAAABhlctjlstkl8tlmMtlmMxlmcxmmcxnmsxpnMxpnM1qnc1sn85voM91oM11oc1xotB2oc56pNF6pNJ2ptJ8ptJ8ptN9ptN8p9N5qNJ9p9N9p9R8qtOBqdSAqtOAqtR%2BrNSCrNJ/rdWDrNWCsNWCsNaJs9eLs9iRvNuVvdyVv9yXwd2Zwt6axN6dxt%2Bfx%2BChyeGiyuGjyuCjyuGly%2BGlzOKmzOGozuKoz%2BKqz%2BOq0OOv1OWw1OWw1eWx1eWy1uay1%2Baz1%2Baz1%2Bez2Oe02Oe12ee22ujUGwH3AAAAAXRSTlMAQObYZgAAAAFiS0dEAIgFHUgAAAAJcEhZcwAACxMAAAsTAQCanBgAAAAHdElNRQfgBQkREyOxFIh/AAAAiklEQVQI12NgAAMbOwY4sLZ2NtQ1coVKWNvoc/Eq8XDr2wB5Ig62ekza9vaOqpK2TpoMzOxaFtwqZua2Bm4makIM7OzMAjoaCqYuxooSUqJALjs7o4yVpbowvzSUy87KqSwmxQfnsrPISyFzWeWAXCkpMaBVIC4bmCsOdgiUKwh3JojLgAQ4ZCE0AMm2D29tZwe6AAAAAElFTkSuQmCC)](http://www.conan.io/source/jsoncpp/1.8.0/theirix/ci)
[JSON][json-org] is a lightweight data-interchange format. It can represent
numbers, strings, ordered sequences of values, and collections of name/value
pairs.
[json-org]: http://json.org/
JsonCpp is a C++ library that allows manipulating JSON values, including
serialization and deserialization to and from strings. It can also preserve
existing comment in unserialization/serialization steps, making it a convenient
format to store user input files.
## Documentation
[JsonCpp documentation][JsonCpp-documentation] is generated using [Doxygen][].
[JsonCpp-documentation]: http://open-source-parsers.github.io/jsoncpp-docs/doxygen/index.html
[Doxygen]: http://www.doxygen.org
## A note on backward-compatibility
* `1.y.z` is built with C++11.
* `0.y.z` can be used with older compilers.
* Major versions maintain binary-compatibility.
## Contributing to JsonCpp
### Building and testing with Meson/Ninja
Thanks to David Seifert (@SoapGentoo), we (the maintainers) now use [meson](http://mesonbuild.com/) and [ninja](https://ninja-build.org/) to build for debugging, as well as for continuous integration (see [`travis.sh`](travis.sh) ). Other systems may work, but minor things like version strings might break.
First, install both meson (which requires Python3) and ninja.
Then,
cd jsoncpp/
BUILD_TYPE=shared
#BUILD_TYPE=static
LIB_TYPE=debug
#LIB_TYPE=release
meson --buildtype ${BUILD_TYPE} --default-library ${LIB_TYPE} . build-${LIB_TYPE}
ninja -v -C build-${LIB_TYPE} test
### Building and testing with other build systems
See https://github.com/open-source-parsers/jsoncpp/wiki/Building
### Running the tests manually
You need to run tests manually only if you are troubleshooting an issue.
In the instructions below, replace `path/to/jsontest` with the path of the
`jsontest` executable that was compiled on your platform.
cd test
# This will run the Reader/Writer tests
python runjsontests.py path/to/jsontest
# This will run the Reader/Writer tests, using JSONChecker test suite
# (http://www.json.org/JSON_checker/).
# Notes: not all tests pass: JsonCpp is too lenient (for example,
# it allows an integer to start with '0'). The goal is to improve
# strict mode parsing to get all tests to pass.
python runjsontests.py --with-json-checker path/to/jsontest
# This will run the unit tests (mostly Value)
python rununittests.py path/to/test_lib_json
# You can run the tests using valgrind:
python rununittests.py --valgrind path/to/test_lib_json
### Building the documentation
Run the Python script `doxybuild.py` from the top directory:
python doxybuild.py --doxygen=$(which doxygen) --open --with-dot
See `doxybuild.py --help` for options.
### Adding a reader/writer test
To add a test, you need to create two files in test/data:
* a `TESTNAME.json` file, that contains the input document in JSON format.
* a `TESTNAME.expected` file, that contains a flatened representation of the
input document.
The `TESTNAME.expected` file format is as follows:
* Each line represents a JSON element of the element tree represented by the
input document.
* Each line has two parts: the path to access the element separated from the
element value by `=`. Array and object values are always empty (i.e.
represented by either `[]` or `{}`).
* Element path `.` represents the root element, and is used to separate object
members. `[N]` is used to specify the value of an array element at index `N`.
See the examples `test_complex_01.json` and `test_complex_01.expected` to better understand element paths.
### Understanding reader/writer test output
When a test is run, output files are generated beside the input test files. Below is a short description of the content of each file:
* `test_complex_01.json`: input JSON document.
* `test_complex_01.expected`: flattened JSON element tree used to check if
parsing was corrected.
* `test_complex_01.actual`: flattened JSON element tree produced by `jsontest`
from reading `test_complex_01.json`.
* `test_complex_01.rewrite`: JSON document written by `jsontest` using the
`Json::Value` parsed from `test_complex_01.json` and serialized using
`Json::StyledWritter`.
* `test_complex_01.actual-rewrite`: flattened JSON element tree produced by
`jsontest` from reading `test_complex_01.rewrite`.
* `test_complex_01.process-output`: `jsontest` output, typically useful for
understanding parsing errors.
## Using JsonCpp in your project
### Amalgamated source
https://github.com/open-source-parsers/jsoncpp/wiki/Amalgamated
### Other ways
If you have trouble, see the Wiki, or post a question as an Issue.
## License
See the `LICENSE` file for details. In summary, JsonCpp is licensed under the
MIT license, or public domain if desired and recognized in your jurisdiction.

172
README.txt Normal file
View File

@@ -0,0 +1,172 @@
* Introduction:
=============
JSON (JavaScript Object Notation) is a lightweight data-interchange format.
It can represent integer, real number, string, an ordered sequence of
value, and a collection of name/value pairs.
JsonCpp (http://jsoncpp.sourceforge.net/) is a simple API to manipulate
JSON value, handle serialization and unserialization to string.
It can also preserve existing comment in unserialization/serialization steps,
making it a convenient format to store user input files.
Unserialization parsing is user friendly and provides precise error reports.
* Building/Testing:
=================
JsonCpp uses Scons (http://www.scons.org) as a build system. Scons requires
python to be installed (http://www.python.org).
You download scons-local distribution from the following url:
http://sourceforge.net/projects/scons/files/scons-local/1.2.0/
Unzip it in the directory where you found this README file. scons.py Should be
at the same level as README.
python scons.py platform=PLTFRM [TARGET]
where PLTFRM may be one of:
suncc Sun C++ (Solaris)
vacpp Visual Age C++ (AIX)
mingw
msvc6 Microsoft Visual Studio 6 service pack 5-6
msvc70 Microsoft Visual Studio 2002
msvc71 Microsoft Visual Studio 2003
msvc80 Microsoft Visual Studio 2005
msvc90 Microsoft Visual Studio 2008
linux-gcc Gnu C++ (linux, also reported to work for Mac OS X)
Notes: if you are building with Microsoft Visual Studio 2008, you need to
setup the environment by running vcvars32.bat (e.g. MSVC 2008 command prompt)
before running scons.
Adding platform is fairly simple. You need to change the Sconstruct file
to do so.
and TARGET may be:
check: build library and run unit tests.
* Running the test manually:
==========================
Notes that test can be run by scons using the 'check' target (see above).
You need to run test manually only if you are troubleshooting an issue.
In the instruction below, replace "path to jsontest.exe" with the path
of the 'jsontest' executable that was compiled on your platform.
cd test
# This will run the Reader/Writer tests
python runjsontests.py "path to jsontest.exe"
# This will run the Reader/Writer tests, using JSONChecker test suite
# (http://www.json.org/JSON_checker/).
# Notes: not all tests pass: JsonCpp is too lenient (for example,
# it allows an integer to start with '0'). The goal is to improve
# strict mode parsing to get all tests to pass.
python runjsontests.py --with-json-checker "path to jsontest.exe"
# This will run the unit tests (mostly Value)
python rununittests.py "path to test_lib_json.exe"
You can run the tests using valgrind:
python rununittests.py --valgrind "path to test_lib_json.exe"
* Building the documentation:
===========================
Run the python script doxybuild.py from the top directory:
python doxybuild.py --open --with-dot
See doxybuild.py --help for options.
Notes that the documentation is also available for download as a tarball.
The documentation of the latest release is available online at:
http://jsoncpp.sourceforge.net/
* Generating amalgated source and header
======================================
JsonCpp is provided with a script to generate a single header and a single
source file to ease inclusion in an existing project.
The amalgated source can be generated at any time by running the following
command from the top-directory (requires python 2.6):
python amalgate.py
It is possible to specify header name. See -h options for detail. By default,
the following files are generated:
- dist/jsoncpp.cpp: source file that need to be added to your project
- dist/json/json.h: header file corresponding to use in your project. It is
equivalent to including json/json.h in non-amalgated source. This header
only depends on standard headers.
- dist/json/json-forwards.h: header the provides forward declaration
of all JsonCpp types. This typically what should be included in headers to
speed-up compilation.
The amalgated sources are generated by concatenating JsonCpp source in the
correct order and defining macro JSON_IS_AMALGATED to prevent inclusion of
other headers.
* Using json-cpp in your project:
===============================
include/ should be added to your compiler include path. jsoncpp headers
should be included as follow:
#include <json/json.h>
* Adding a reader/writer test:
============================
To add a test, you need to create two files in test/data:
- a TESTNAME.json file, that contains the input document in JSON format.
- a TESTNAME.expected file, that contains a flatened representation of
the input document.
TESTNAME.expected file format:
- each line represents a JSON element of the element tree represented
by the input document.
- each line has two parts: the path to access the element separated from
the element value by '='. Array and object values are always empty
(e.g. represented by either [] or {}).
- element path: '.' represented the root element, and is used to separate
object members. [N] is used to specify the value of an array element
at index N.
See test_complex_01.json and test_complex_01.expected to better understand
element path.
* Understanding reader/writer test output:
========================================
When a test is run, output files are generated aside the input test files.
Below is a short description of the content of each file:
- test_complex_01.json: input JSON document
- test_complex_01.expected: flattened JSON element tree used to check if
parsing was corrected.
- test_complex_01.actual: flattened JSON element tree produced by
jsontest.exe from reading test_complex_01.json
- test_complex_01.rewrite: JSON document written by jsontest.exe using the
Json::Value parsed from test_complex_01.json and serialized using
Json::StyledWritter.
- test_complex_01.actual-rewrite: flattened JSON element tree produced by
jsontest.exe from reading test_complex_01.rewrite.
test_complex_01.process-output: jsontest.exe output, typically useful to
understand parsing error.
* License
=======
See file LICENSE for details. Basically JsonCpp is licensed under
MIT license, or public domain if desired and recognized in your jurisdiction.

248
SConstruct Normal file
View File

@@ -0,0 +1,248 @@
"""
Notes:
- shared library support is buggy: it assumes that a static and dynamic library can be build from the same object files. This is not true on many platforms. For this reason it is only enabled on linux-gcc at the current time.
To add a platform:
- add its name in options allowed_values below
- add tool initialization for this platform. Search for "if platform == 'suncc'" as an example.
"""
import os
import os.path
import sys
JSONCPP_VERSION = open(File('#version').abspath,'rt').read().strip()
DIST_DIR = '#dist'
options = Variables()
options.Add( EnumVariable('platform',
'Platform (compiler/stl) used to build the project',
'msvc71',
allowed_values='suncc vacpp mingw msvc6 msvc7 msvc71 msvc80 msvc90 linux-gcc'.split(),
ignorecase=2) )
try:
platform = ARGUMENTS['platform']
if platform == 'linux-gcc':
CXX = 'g++' # not quite right, but env is not yet available.
import commands
version = commands.getoutput('%s -dumpversion' %CXX)
platform = 'linux-gcc-%s' %version
print "Using platform '%s'" %platform
LD_LIBRARY_PATH = os.environ.get('LD_LIBRARY_PATH', '')
LD_LIBRARY_PATH = "%s:libs/%s" %(LD_LIBRARY_PATH, platform)
os.environ['LD_LIBRARY_PATH'] = LD_LIBRARY_PATH
print "LD_LIBRARY_PATH =", LD_LIBRARY_PATH
except KeyError:
print 'You must specify a "platform"'
sys.exit(2)
print "Building using PLATFORM =", platform
rootbuild_dir = Dir('#buildscons')
build_dir = os.path.join( '#buildscons', platform )
bin_dir = os.path.join( '#bin', platform )
lib_dir = os.path.join( '#libs', platform )
sconsign_dir_path = Dir(build_dir).abspath
sconsign_path = os.path.join( sconsign_dir_path, '.sconsign.dbm' )
# Ensure build directory exist (SConsignFile fail otherwise!)
if not os.path.exists( sconsign_dir_path ):
os.makedirs( sconsign_dir_path )
# Store all dependencies signature in a database
SConsignFile( sconsign_path )
def make_environ_vars():
"""Returns a dictionnary with environment variable to use when compiling."""
# PATH is required to find the compiler
# TEMP is required for at least mingw
# LD_LIBRARY_PATH & co is required on some system for the compiler
vars = {}
for name in ('PATH', 'TEMP', 'TMP', 'LD_LIBRARY_PATH', 'LIBRARY_PATH'):
if name in os.environ:
vars[name] = os.environ[name]
return vars
env = Environment( ENV = make_environ_vars(),
toolpath = ['scons-tools'],
tools=[] ) #, tools=['default'] )
if platform == 'suncc':
env.Tool( 'sunc++' )
env.Tool( 'sunlink' )
env.Tool( 'sunar' )
env.Append( CCFLAGS = ['-mt'] )
elif platform == 'vacpp':
env.Tool( 'default' )
env.Tool( 'aixcc' )
env['CXX'] = 'xlC_r' #scons does not pick-up the correct one !
# using xlC_r ensure multi-threading is enabled:
# http://publib.boulder.ibm.com/infocenter/pseries/index.jsp?topic=/com.ibm.vacpp7a.doc/compiler/ref/cuselect.htm
env.Append( CCFLAGS = '-qrtti=all',
LINKFLAGS='-bh:5' ) # -bh:5 remove duplicate symbol warning
elif platform == 'msvc6':
env['MSVS_VERSION']='6.0'
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -GX /nologo /MT'
elif platform == 'msvc70':
env['MSVS_VERSION']='7.0'
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -GX /nologo /MT'
elif platform == 'msvc71':
env['MSVS_VERSION']='7.1'
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -GX /nologo /MT'
elif platform == 'msvc80':
env['MSVS_VERSION']='8.0'
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -EHsc /nologo /MT'
elif platform == 'msvc90':
env['MSVS_VERSION']='9.0'
# Scons 1.2 fails to detect the correct location of the platform SDK.
# So we propagate those from the environment. This requires that the
# user run vcvars32.bat before compiling.
if 'INCLUDE' in os.environ:
env['ENV']['INCLUDE'] = os.environ['INCLUDE']
if 'LIB' in os.environ:
env['ENV']['LIB'] = os.environ['LIB']
for tool in ['msvc', 'msvs', 'mslink', 'masm', 'mslib']:
env.Tool( tool )
env['CXXFLAGS']='-GR -EHsc /nologo /MT'
elif platform == 'mingw':
env.Tool( 'mingw' )
env.Append( CPPDEFINES=[ "WIN32", "NDEBUG", "_MT" ] )
elif platform.startswith('linux-gcc'):
env.Tool( 'default' )
env.Append( LIBS = ['pthread'], CCFLAGS = "-Wall" )
env['SHARED_LIB_ENABLED'] = True
else:
print "UNSUPPORTED PLATFORM."
env.Exit(1)
env.Tool('targz')
env.Tool('srcdist')
env.Tool('globtool')
env.Append( CPPPATH = ['#include'],
LIBPATH = lib_dir )
short_platform = platform
if short_platform.startswith('msvc'):
short_platform = short_platform[2:]
# Notes: on Windows you need to rebuild the source for each variant
# Build script does not support that yet so we only build static libraries.
# This also fails on AIX because both dynamic and static library ends with
# extension .a.
env['SHARED_LIB_ENABLED'] = env.get('SHARED_LIB_ENABLED', False)
env['LIB_PLATFORM'] = short_platform
env['LIB_LINK_TYPE'] = 'lib' # static
env['LIB_CRUNTIME'] = 'mt'
env['LIB_NAME_SUFFIX'] = '${LIB_PLATFORM}_${LIB_LINK_TYPE}${LIB_CRUNTIME}' # must match autolink naming convention
env['JSONCPP_VERSION'] = JSONCPP_VERSION
env['BUILD_DIR'] = env.Dir(build_dir)
env['ROOTBUILD_DIR'] = env.Dir(rootbuild_dir)
env['DIST_DIR'] = DIST_DIR
if 'TarGz' in env['BUILDERS']:
class SrcDistAdder:
def __init__( self, env ):
self.env = env
def __call__( self, *args, **kw ):
apply( self.env.SrcDist, (self.env['SRCDIST_TARGET'],) + args, kw )
env['SRCDIST_BUILDER'] = env.TarGz
else: # If tarfile module is missing
class SrcDistAdder:
def __init__( self, env ):
pass
def __call__( self, *args, **kw ):
pass
env['SRCDIST_ADD'] = SrcDistAdder( env )
env['SRCDIST_TARGET'] = os.path.join( DIST_DIR, 'jsoncpp-src-%s.tar.gz' % env['JSONCPP_VERSION'] )
env_testing = env.Clone( )
env_testing.Append( LIBS = ['json_${LIB_NAME_SUFFIX}'] )
def buildJSONExample( env, target_sources, target_name ):
env = env.Clone()
env.Append( CPPPATH = ['#'] )
exe = env.Program( target=target_name,
source=target_sources )
env['SRCDIST_ADD']( source=[target_sources] )
global bin_dir
return env.Install( bin_dir, exe )
def buildJSONTests( env, target_sources, target_name ):
jsontests_node = buildJSONExample( env, target_sources, target_name )
check_alias_target = env.Alias( 'check', jsontests_node, RunJSONTests( jsontests_node, jsontests_node ) )
env.AlwaysBuild( check_alias_target )
def buildUnitTests( env, target_sources, target_name ):
jsontests_node = buildJSONExample( env, target_sources, target_name )
check_alias_target = env.Alias( 'check', jsontests_node,
RunUnitTests( jsontests_node, jsontests_node ) )
env.AlwaysBuild( check_alias_target )
def buildLibrary( env, target_sources, target_name ):
static_lib = env.StaticLibrary( target=target_name + '_${LIB_NAME_SUFFIX}',
source=target_sources )
global lib_dir
env.Install( lib_dir, static_lib )
if env['SHARED_LIB_ENABLED']:
shared_lib = env.SharedLibrary( target=target_name + '_${LIB_NAME_SUFFIX}',
source=target_sources )
env.Install( lib_dir, shared_lib )
env['SRCDIST_ADD']( source=[target_sources] )
Export( 'env env_testing buildJSONExample buildLibrary buildJSONTests buildUnitTests' )
def buildProjectInDirectory( target_directory ):
global build_dir
target_build_dir = os.path.join( build_dir, target_directory )
target = os.path.join( target_directory, 'sconscript' )
SConscript( target, build_dir=target_build_dir, duplicate=0 )
env['SRCDIST_ADD']( source=[target] )
def runJSONTests_action( target, source = None, env = None ):
# Add test scripts to python path
jsontest_path = Dir( '#test' ).abspath
sys.path.insert( 0, jsontest_path )
data_path = os.path.join( jsontest_path, 'data' )
import runjsontests
return runjsontests.runAllTests( os.path.abspath(source[0].path), data_path )
def runJSONTests_string( target, source = None, env = None ):
return 'RunJSONTests("%s")' % source[0]
import SCons.Action
ActionFactory = SCons.Action.ActionFactory
RunJSONTests = ActionFactory(runJSONTests_action, runJSONTests_string )
def runUnitTests_action( target, source = None, env = None ):
# Add test scripts to python path
jsontest_path = Dir( '#test' ).abspath
sys.path.insert( 0, jsontest_path )
import rununittests
return rununittests.runAllTests( os.path.abspath(source[0].path) )
def runUnitTests_string( target, source = None, env = None ):
return 'RunUnitTests("%s")' % source[0]
RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string )
env.Alias( 'check' )
srcdist_cmd = env['SRCDIST_ADD']( source = """
AUTHORS README.txt SConstruct
""".split() )
env.Alias( 'src-dist', srcdist_cmd )
buildProjectInDirectory( 'src/jsontestrunner' )
buildProjectInDirectory( 'src/lib_json' )
buildProjectInDirectory( 'src/test_lib_json' )
#print env.Dump()

View File

@@ -1,155 +0,0 @@
"""Amalgate json-cpp library sources into a single source and header file.
Works with python2.6+ and python3.4+.
Example of invocation (must be invoked from json-cpp top directory):
python amalgate.py
"""
import os
import os.path
import sys
class AmalgamationFile:
def __init__(self, top_dir):
self.top_dir = top_dir
self.blocks = []
def add_text(self, text):
if not text.endswith("\n"):
text += "\n"
self.blocks.append(text)
def add_file(self, relative_input_path, wrap_in_comment=False):
def add_marker(prefix):
self.add_text("")
self.add_text("// " + "/"*70)
self.add_text("// %s of content of file: %s" % (prefix, relative_input_path.replace("\\","/")))
self.add_text("// " + "/"*70)
self.add_text("")
add_marker("Beginning")
f = open(os.path.join(self.top_dir, relative_input_path), "rt")
content = f.read()
if wrap_in_comment:
content = "/*\n" + content + "\n*/"
self.add_text(content)
f.close()
add_marker("End")
self.add_text("\n\n\n\n")
def get_value(self):
return "".join(self.blocks).replace("\r\n","\n")
def write_to(self, output_path):
output_dir = os.path.dirname(output_path)
if output_dir and not os.path.isdir(output_dir):
os.makedirs(output_dir)
f = open(output_path, "wb")
f.write(str.encode(self.get_value(), 'UTF-8'))
f.close()
def amalgamate_source(source_top_dir=None,
target_source_path=None,
header_include_path=None):
"""Produces amalgated source.
Parameters:
source_top_dir: top-directory
target_source_path: output .cpp path
header_include_path: generated header path relative to target_source_path.
"""
print("Amalgating header...")
header = AmalgamationFile(source_top_dir)
header.add_text("/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).")
header.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
header.add_file("LICENSE", wrap_in_comment=True)
header.add_text("#ifndef JSON_AMALGATED_H_INCLUDED")
header.add_text("# define JSON_AMALGATED_H_INCLUDED")
header.add_text("/// If defined, indicates that the source file is amalgated")
header.add_text("/// to prevent private header inclusion.")
header.add_text("#define JSON_IS_AMALGAMATION")
header.add_file("include/json/version.h")
#header.add_file("include/json/allocator.h") # Not available here.
header.add_file("include/json/config.h")
header.add_file("include/json/forwards.h")
header.add_file("include/json/features.h")
header.add_file("include/json/value.h")
header.add_file("include/json/reader.h")
header.add_file("include/json/writer.h")
header.add_file("include/json/assertions.h")
header.add_text("#endif //ifndef JSON_AMALGATED_H_INCLUDED")
target_header_path = os.path.join(os.path.dirname(target_source_path), header_include_path)
print("Writing amalgated header to %r" % target_header_path)
header.write_to(target_header_path)
base, ext = os.path.splitext(header_include_path)
forward_header_include_path = base + "-forwards" + ext
print("Amalgating forward header...")
header = AmalgamationFile(source_top_dir)
header.add_text("/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).")
header.add_text('/// It is intended to be used with #include "%s"' % forward_header_include_path)
header.add_text("/// This header provides forward declaration for all JsonCpp types.")
header.add_file("LICENSE", wrap_in_comment=True)
header.add_text("#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
header.add_text("# define JSON_FORWARD_AMALGATED_H_INCLUDED")
header.add_text("/// If defined, indicates that the source file is amalgated")
header.add_text("/// to prevent private header inclusion.")
header.add_text("#define JSON_IS_AMALGAMATION")
header.add_file("include/json/config.h")
header.add_file("include/json/forwards.h")
header.add_text("#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
target_forward_header_path = os.path.join(os.path.dirname(target_source_path),
forward_header_include_path)
print("Writing amalgated forward header to %r" % target_forward_header_path)
header.write_to(target_forward_header_path)
print("Amalgating source...")
source = AmalgamationFile(source_top_dir)
source.add_text("/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).")
source.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
source.add_file("LICENSE", wrap_in_comment=True)
source.add_text("")
source.add_text('#include "%s"' % header_include_path)
source.add_text("""
#ifndef JSON_IS_AMALGAMATION
#error "Compile with -I PATH_TO_JSON_DIRECTORY"
#endif
""")
source.add_text("")
lib_json = "src/lib_json"
source.add_file(os.path.join(lib_json, "json_tool.h"))
source.add_file(os.path.join(lib_json, "json_reader.cpp"))
source.add_file(os.path.join(lib_json, "json_valueiterator.inl"))
source.add_file(os.path.join(lib_json, "json_value.cpp"))
source.add_file(os.path.join(lib_json, "json_writer.cpp"))
print("Writing amalgated source to %r" % target_source_path)
source.write_to(target_source_path)
def main():
usage = """%prog [options]
Generate a single amalgated source and header file from the sources.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option("-s", "--source", dest="target_source_path", action="store", default="dist/jsoncpp.cpp",
help="""Output .cpp source path. [Default: %default]""")
parser.add_option("-i", "--include", dest="header_include_path", action="store", default="json/json.h",
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
parser.add_option("-t", "--top-dir", dest="top_dir", action="store", default=os.getcwd(),
help="""Source top-directory. [Default: %default]""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
msg = amalgamate_source(source_top_dir=options.top_dir,
target_source_path=options.target_source_path,
header_include_path=options.header_include_path)
if msg:
sys.stderr.write(msg + "\n")
sys.exit(1)
else:
print("Source succesfully amalagated")
if __name__ == "__main__":
main()

147
amalgate.py Normal file
View File

@@ -0,0 +1,147 @@
"""Amalgate json-cpp library sources into a single source and header file.
Requires Python 2.6
Example of invocation (must be invoked from json-cpp top directory):
python amalgate.py
"""
import os
import os.path
import sys
class AmalagatedFile:
def __init__( self, top_dir ):
self.top_dir = top_dir
self.blocks = []
def add_text( self, text ):
if not text.endswith( '\n' ):
text += '\n'
self.blocks.append( text )
def add_file( self, relative_input_path, wrap_in_comment=False ):
def add_marker( prefix ):
self.add_text( '' )
self.add_text( '// ' + '/'*70 )
self.add_text( '// %s of content of file: %s' % (prefix, relative_input_path.replace('\\','/')) )
self.add_text( '// ' + '/'*70 )
self.add_text( '' )
add_marker( 'Beginning' )
f = open( os.path.join( self.top_dir, relative_input_path ), 'rt' )
content = f.read()
if wrap_in_comment:
content = '/*\n' + content + '\n*/'
self.add_text( content )
f.close()
add_marker( 'End' )
self.add_text( '\n\n\n\n' )
def get_value( self ):
return ''.join( self.blocks ).replace('\r\n','\n')
def write_to( self, output_path ):
output_dir = os.path.dirname( output_path )
if output_dir and not os.path.isdir( output_dir ):
os.makedirs( output_dir )
f = open( output_path, 'wb' )
f.write( self.get_value() )
f.close()
def amalgate_source( source_top_dir=None,
target_source_path=None,
header_include_path=None ):
"""Produces amalgated source.
Parameters:
source_top_dir: top-directory
target_source_path: output .cpp path
header_include_path: generated header path relative to target_source_path.
"""
print 'Amalgating header...'
header = AmalagatedFile( source_top_dir )
header.add_text( '/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).' )
header.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
header.add_file( 'LICENSE', wrap_in_comment=True )
header.add_text( '#ifndef JSON_AMALGATED_H_INCLUDED' )
header.add_text( '# define JSON_AMALGATED_H_INCLUDED' )
header.add_text( '/// If defined, indicates that the source file is amalgated' )
header.add_text( '/// to prevent private header inclusion.' )
header.add_text( '#define JSON_IS_AMALGATED' )
header.add_file( 'include/json/config.h' )
header.add_file( 'include/json/forwards.h' )
header.add_file( 'include/json/features.h' )
header.add_file( 'include/json/value.h' )
header.add_file( 'include/json/reader.h' )
header.add_file( 'include/json/writer.h' )
header.add_text( '#endif //ifndef JSON_AMALGATED_H_INCLUDED' )
target_header_path = os.path.join( os.path.dirname(target_source_path), header_include_path )
print 'Writing amalgated header to %r' % target_header_path
header.write_to( target_header_path )
base, ext = os.path.splitext( header_include_path )
forward_header_include_path = base + '-forwards' + ext
print 'Amalgating forward header...'
header = AmalagatedFile( source_top_dir )
header.add_text( '/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).' )
header.add_text( '/// It is intented to be used with #include <%s>' % forward_header_include_path )
header.add_text( '/// This header provides forward declaration for all JsonCpp types.' )
header.add_file( 'LICENSE', wrap_in_comment=True )
header.add_text( '#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
header.add_text( '# define JSON_FORWARD_AMALGATED_H_INCLUDED' )
header.add_text( '/// If defined, indicates that the source file is amalgated' )
header.add_text( '/// to prevent private header inclusion.' )
header.add_text( '#define JSON_IS_AMALGATED' )
header.add_file( 'include/json/config.h' )
header.add_file( 'include/json/forwards.h' )
header.add_text( '#endif //ifndef JSON_FORWARD_AMALGATED_H_INCLUDED' )
target_forward_header_path = os.path.join( os.path.dirname(target_source_path),
forward_header_include_path )
print 'Writing amalgated forward header to %r' % target_forward_header_path
header.write_to( target_forward_header_path )
print 'Amalgating source...'
source = AmalagatedFile( source_top_dir )
source.add_text( '/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).' )
source.add_text( '/// It is intented to be used with #include <%s>' % header_include_path )
source.add_file( 'LICENSE', wrap_in_comment=True )
source.add_text( '' )
source.add_text( '#include <%s>' % header_include_path )
source.add_text( '' )
source.add_file( 'src/lib_json\json_tool.h' )
source.add_file( 'src/lib_json\json_reader.cpp' )
source.add_file( 'src/lib_json\json_batchallocator.h' )
source.add_file( 'src/lib_json\json_valueiterator.inl' )
source.add_file( 'src/lib_json\json_value.cpp' )
source.add_file( 'src/lib_json\json_writer.cpp' )
print 'Writing amalgated source to %r' % target_source_path
source.write_to( target_source_path )
def main():
usage = """%prog [options]
Generate a single amalgated source and header file from the sources.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('-s', '--source', dest="target_source_path", action='store', default='dist/jsoncpp.cpp',
help="""Output .cpp source path. [Default: %default]""")
parser.add_option('-i', '--include', dest="header_include_path", action='store', default='json/json.h',
help="""Header include path. Used to include the header from the amalgated source file. [Default: %default]""")
parser.add_option('-t', '--top-dir', dest="top_dir", action='store', default=os.getcwd(),
help="""Source top-directory. [Default: %default]""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
msg = amalgate_source( source_top_dir=options.top_dir,
target_source_path=options.target_source_path,
header_include_path=options.header_include_path )
if msg:
sys.stderr.write( msg + '\n' )
sys.exit( 1 )
else:
print 'Source succesfully amalagated'
if __name__ == '__main__':
main()

View File

@@ -1,22 +0,0 @@
clone_folder: c:\projects\jsoncpp
environment:
matrix:
- CMAKE_GENERATOR: Visual Studio 12 2013
- CMAKE_GENERATOR: Visual Studio 12 2013 Win64
- CMAKE_GENERATOR: Visual Studio 14 2015
- CMAKE_GENERATOR: Visual Studio 14 2015 Win64
build_script:
- cmake --version
- cd c:\projects\jsoncpp
- cmake -G "%CMAKE_GENERATOR%" -DCMAKE_INSTALL_PREFIX=%CD:\=/%/install -DCMAKE_BUILD_TYPE=Release -DBUILD_SHARED_LIBS=ON .
- cmake --build . --config Release --target install
deploy:
provider: GitHub
auth_token:
secure: K2Tp1q8pIZ7rs0Ot24ZMWuwr12Ev6Tc6QkhMjGQxoQG3ng1pXtgPasiJ45IDXGdg
on:
branch: master
appveyor_repo_tag: true

View File

@@ -1,35 +0,0 @@
# This is only for jsoncpp developers/contributors.
# We use this to sign releases, generate documentation, etc.
VER?=$(shell cat version)
default:
@echo "VER=${VER}"
sign: jsoncpp-${VER}.tar.gz
gpg --armor --detach-sign $<
gpg --verify $<.asc
# Then upload .asc to the release.
jsoncpp-%.tar.gz:
curl https://github.com/open-source-parsers/jsoncpp/archive/$*.tar.gz -o $@
dox:
python doxybuild.py --doxygen=$$(which doxygen) --in doc/web_doxyfile.in
rsync -va --delete dist/doxygen/jsoncpp-api-html-${VER}/ ../jsoncpp-docs/doxygen/
# Then 'git add -A' and 'git push' in jsoncpp-docs.
build:
mkdir -p build/debug
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_SHARED_LIBS=ON -G "Unix Makefiles" ../..
make -C build/debug
# Currently, this depends on include/json/version.h generated
# by cmake.
test-amalgamate:
python2.7 amalgamate.py
python3.4 amalgamate.py
cd dist; gcc -I. -c jsoncpp.cpp
valgrind:
valgrind --error-exitcode=42 --leak-check=full ./build/debug/src/test_lib_json/jsoncpp_test
clean:
\rm -rf *.gz *.asc dist/
.PHONY: build

View File

@@ -1,6 +1 @@
# Copyright 2010 Baptiste Lepilleur and The JsonCpp Authors
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
# module

View File

@@ -1,33 +0,0 @@
{
"cmake_variants" : [
{"name": "generator",
"generators": [
{"generator": [
"Visual Studio 7 .NET 2003",
"Visual Studio 9 2008",
"Visual Studio 9 2008 Win64",
"Visual Studio 10",
"Visual Studio 10 Win64",
"Visual Studio 11",
"Visual Studio 11 Win64"
]
},
{"generator": ["MinGW Makefiles"],
"env_prepend": [{"path": "c:/wut/prg/MinGW/bin"}]
}
]
},
{"name": "shared_dll",
"variables": [
["BUILD_SHARED_LIBS=true"],
["BUILD_SHARED_LIBS=false"]
]
},
{"name": "build_type",
"build_types": [
"debug",
"release"
]
}
]
}

View File

@@ -1,26 +0,0 @@
{
"cmake_variants" : [
{"name": "generator",
"generators": [
{"generator": [
"Visual Studio 6",
"Visual Studio 7",
"Visual Studio 8 2005"
]
}
]
},
{"name": "shared_dll",
"variables": [
["BUILD_SHARED_LIBS=true"],
["BUILD_SHARED_LIBS=false"]
]
},
{"name": "build_type",
"build_types": [
"debug",
"release"
]
}
]
}

View File

@@ -1,11 +1,7 @@
#!/usr/bin/env python
# encoding: utf-8
# Copyright 2009 Baptiste Lepilleur and The JsonCpp Authors
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
# Baptiste Lepilleur, 2009
from __future__ import print_function
from dircache import listdir
import re
import fnmatch
@@ -194,12 +190,12 @@ if __name__ == "__main__":
test_cases.append( (ant_pattern, local_path(accepted_matches), local_path( rejected_matches )) )
for ant_pattern, accepted_matches, rejected_matches in test_cases:
rex = ant_pattern_to_re( ant_pattern )
print('ant_pattern:', ant_pattern, ' => ', rex.pattern)
print 'ant_pattern:', ant_pattern, ' => ', rex.pattern
for accepted_match in accepted_matches:
print('Accepted?:', accepted_match)
self.assertTrue(rex.match(accepted_match) is not None)
print 'Accepted?:', accepted_match
self.assert_( rex.match( accepted_match ) is not None )
for rejected_match in rejected_matches:
print('Rejected?:', rejected_match)
self.assertTrue(rex.match(rejected_match) is None)
print 'Rejected?:', rejected_match
self.assert_( rex.match( rejected_match ) is None )
unittest.main()

View File

@@ -1,278 +0,0 @@
from __future__ import print_function
import collections
import itertools
import json
import os
import os.path
import re
import shutil
import string
import subprocess
import sys
import cgi
class BuildDesc:
def __init__(self, prepend_envs=None, variables=None, build_type=None, generator=None):
self.prepend_envs = prepend_envs or [] # [ { "var": "value" } ]
self.variables = variables or []
self.build_type = build_type
self.generator = generator
def merged_with(self, build_desc):
"""Returns a new BuildDesc by merging field content.
Prefer build_desc fields to self fields for single valued field.
"""
return BuildDesc(self.prepend_envs + build_desc.prepend_envs,
self.variables + build_desc.variables,
build_desc.build_type or self.build_type,
build_desc.generator or self.generator)
def env(self):
environ = os.environ.copy()
for values_by_name in self.prepend_envs:
for var, value in list(values_by_name.items()):
var = var.upper()
if type(value) is unicode:
value = value.encode(sys.getdefaultencoding())
if var in environ:
environ[var] = value + os.pathsep + environ[var]
else:
environ[var] = value
return environ
def cmake_args(self):
args = ["-D%s" % var for var in self.variables]
# skip build type for Visual Studio solution as it cause warning
if self.build_type and 'Visual' not in self.generator:
args.append("-DCMAKE_BUILD_TYPE=%s" % self.build_type)
if self.generator:
args.extend(['-G', self.generator])
return args
def __repr__(self):
return "BuildDesc(%s, build_type=%s)" % (" ".join(self.cmake_args()), self.build_type)
class BuildData:
def __init__(self, desc, work_dir, source_dir):
self.desc = desc
self.work_dir = work_dir
self.source_dir = source_dir
self.cmake_log_path = os.path.join(work_dir, 'batchbuild_cmake.log')
self.build_log_path = os.path.join(work_dir, 'batchbuild_build.log')
self.cmake_succeeded = False
self.build_succeeded = False
def execute_build(self):
print('Build %s' % self.desc)
self._make_new_work_dir()
self.cmake_succeeded = self._generate_makefiles()
if self.cmake_succeeded:
self.build_succeeded = self._build_using_makefiles()
return self.build_succeeded
def _generate_makefiles(self):
print(' Generating makefiles: ', end=' ')
cmd = ['cmake'] + self.desc.cmake_args() + [os.path.abspath(self.source_dir)]
succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.cmake_log_path)
print('done' if succeeded else 'FAILED')
return succeeded
def _build_using_makefiles(self):
print(' Building:', end=' ')
cmd = ['cmake', '--build', self.work_dir]
if self.desc.build_type:
cmd += ['--config', self.desc.build_type]
succeeded = self._execute_build_subprocess(cmd, self.desc.env(), self.build_log_path)
print('done' if succeeded else 'FAILED')
return succeeded
def _execute_build_subprocess(self, cmd, env, log_path):
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=self.work_dir,
env=env)
stdout, _ = process.communicate()
succeeded = (process.returncode == 0)
with open(log_path, 'wb') as flog:
log = ' '.join(cmd) + '\n' + stdout + '\nExit code: %r\n' % process.returncode
flog.write(fix_eol(log))
return succeeded
def _make_new_work_dir(self):
if os.path.isdir(self.work_dir):
print(' Removing work directory', self.work_dir)
shutil.rmtree(self.work_dir, ignore_errors=True)
if not os.path.isdir(self.work_dir):
os.makedirs(self.work_dir)
def fix_eol(stdout):
"""Fixes wrong EOL produced by cmake --build on Windows (\r\r\n instead of \r\n).
"""
return re.sub('\r*\n', os.linesep, stdout)
def load_build_variants_from_config(config_path):
with open(config_path, 'rb') as fconfig:
data = json.load(fconfig)
variants = data[ 'cmake_variants' ]
build_descs_by_axis = collections.defaultdict(list)
for axis in variants:
axis_name = axis["name"]
build_descs = []
if "generators" in axis:
for generator_data in axis["generators"]:
for generator in generator_data["generator"]:
build_desc = BuildDesc(generator=generator,
prepend_envs=generator_data.get("env_prepend"))
build_descs.append(build_desc)
elif "variables" in axis:
for variables in axis["variables"]:
build_desc = BuildDesc(variables=variables)
build_descs.append(build_desc)
elif "build_types" in axis:
for build_type in axis["build_types"]:
build_desc = BuildDesc(build_type=build_type)
build_descs.append(build_desc)
build_descs_by_axis[axis_name].extend(build_descs)
return build_descs_by_axis
def generate_build_variants(build_descs_by_axis):
"""Returns a list of BuildDesc generated for the partial BuildDesc for each axis."""
axis_names = list(build_descs_by_axis.keys())
build_descs = []
for axis_name, axis_build_descs in list(build_descs_by_axis.items()):
if len(build_descs):
# for each existing build_desc and each axis build desc, create a new build_desc
new_build_descs = []
for prototype_build_desc, axis_build_desc in itertools.product(build_descs, axis_build_descs):
new_build_descs.append(prototype_build_desc.merged_with(axis_build_desc))
build_descs = new_build_descs
else:
build_descs = axis_build_descs
return build_descs
HTML_TEMPLATE = string.Template('''<html>
<head>
<title>$title</title>
<style type="text/css">
td.failed {background-color:#f08080;}
td.ok {background-color:#c0eec0;}
</style>
</head>
<body>
<table border="1">
<thead>
<tr>
<th>Variables</th>
$th_vars
</tr>
<tr>
<th>Build type</th>
$th_build_types
</tr>
</thead>
<tbody>
$tr_builds
</tbody>
</table>
</body></html>''')
def generate_html_report(html_report_path, builds):
report_dir = os.path.dirname(html_report_path)
# Vertical axis: generator
# Horizontal: variables, then build_type
builds_by_generator = collections.defaultdict(list)
variables = set()
build_types_by_variable = collections.defaultdict(set)
build_by_pos_key = {} # { (generator, var_key, build_type): build }
for build in builds:
builds_by_generator[build.desc.generator].append(build)
var_key = tuple(sorted(build.desc.variables))
variables.add(var_key)
build_types_by_variable[var_key].add(build.desc.build_type)
pos_key = (build.desc.generator, var_key, build.desc.build_type)
build_by_pos_key[pos_key] = build
variables = sorted(variables)
th_vars = []
th_build_types = []
for variable in variables:
build_types = sorted(build_types_by_variable[variable])
nb_build_type = len(build_types_by_variable[variable])
th_vars.append('<th colspan="%d">%s</th>' % (nb_build_type, cgi.escape(' '.join(variable))))
for build_type in build_types:
th_build_types.append('<th>%s</th>' % cgi.escape(build_type))
tr_builds = []
for generator in sorted(builds_by_generator):
tds = [ '<td>%s</td>\n' % cgi.escape(generator) ]
for variable in variables:
build_types = sorted(build_types_by_variable[variable])
for build_type in build_types:
pos_key = (generator, variable, build_type)
build = build_by_pos_key.get(pos_key)
if build:
cmake_status = 'ok' if build.cmake_succeeded else 'FAILED'
build_status = 'ok' if build.build_succeeded else 'FAILED'
cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir)
build_log_url = os.path.relpath(build.build_log_path, report_dir)
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % ( build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
if build.cmake_succeeded:
td += '<br><a href="%s" class="%s">Build: %s</a>' % ( build_log_url, build_status.lower(), build_status)
td += '</td>'
else:
td = '<td></td>'
tds.append(td)
tr_builds.append('<tr>%s</tr>' % '\n'.join(tds))
html = HTML_TEMPLATE.substitute( title='Batch build report',
th_vars=' '.join(th_vars),
th_build_types=' '.join(th_build_types),
tr_builds='\n'.join(tr_builds))
with open(html_report_path, 'wt') as fhtml:
fhtml.write(html)
print('HTML report generated in:', html_report_path)
def main():
usage = r"""%prog WORK_DIR SOURCE_DIR CONFIG_JSON_PATH [CONFIG2_JSON_PATH...]
Build a given CMake based project located in SOURCE_DIR with multiple generators/options.dry_run
as described in CONFIG_JSON_PATH building in WORK_DIR.
Example of call:
python devtools\batchbuild.py e:\buildbots\jsoncpp\build . devtools\agent_vmw7.json
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = True
# parser.add_option('-v', '--verbose', dest="verbose", action='store_true',
# help="""Be verbose.""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
if len(args) < 3:
parser.error("Missing one of WORK_DIR SOURCE_DIR CONFIG_JSON_PATH.")
work_dir = args[0]
source_dir = args[1].rstrip('/\\')
config_paths = args[2:]
for config_path in config_paths:
if not os.path.isfile(config_path):
parser.error("Can not read: %r" % config_path)
# generate build variants
build_descs = []
for config_path in config_paths:
build_descs_by_axis = load_build_variants_from_config(config_path)
build_descs.extend(generate_build_variants(build_descs_by_axis))
print('Build variants (%d):' % len(build_descs))
# assign build directory for each variant
if not os.path.isdir(work_dir):
os.makedirs(work_dir)
builds = []
with open(os.path.join(work_dir, 'matrix-dir-map.txt'), 'wt') as fmatrixmap:
for index, build_desc in enumerate(build_descs):
build_desc_work_dir = os.path.join(work_dir, '%03d' % (index+1))
builds.append(BuildData(build_desc, build_desc_work_dir, source_dir))
fmatrixmap.write('%s: %s\n' % (build_desc_work_dir, build_desc))
for build in builds:
build.execute_build()
html_report_path = os.path.join(work_dir, 'batchbuild-report.html')
generate_html_report(html_report_path, builds)
print('Done')
if __name__ == '__main__':
main()

View File

@@ -1,11 +1,4 @@
# Copyright 2010 Baptiste Lepilleur and The JsonCpp Authors
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from __future__ import print_function
import os.path
import sys
def fix_source_eol( path, is_dry_run = True, verbose = True, eol = '\n' ):
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
@@ -13,8 +6,8 @@ def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
raise ValueError( 'Path "%s" is not a file' % path )
try:
f = open(path, 'rb')
except IOError as msg:
print("%s: I/O Error: %s" % (file, str(msg)), file=sys.stderr)
except IOError, msg:
print >> sys.stderr, "%s: I/O Error: %s" % (file, str(msg))
return False
try:
raw_lines = f.readlines()
@@ -22,7 +15,7 @@ def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
f.close()
fixed_lines = [line.rstrip('\r\n') + eol for line in raw_lines]
if raw_lines != fixed_lines:
print('%s =>' % path, end=' ')
print '%s =>' % path,
if not is_dry_run:
f = open(path, "wb")
try:
@@ -30,7 +23,7 @@ def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
finally:
f.close()
if verbose:
print(is_dry_run and ' NEED FIX' or ' FIXED')
print is_dry_run and ' NEED FIX' or ' FIXED'
return True
##
##

View File

@@ -1,12 +1,11 @@
"""Updates the license text in source file.
"""
from __future__ import print_function
# An existing license is found if the file starts with the string below,
# and ends with the first blank line.
LICENSE_BEGIN = "// Copyright "
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur and The JsonCpp Authors
BRIEF_LICENSE = LICENSE_BEGIN + """2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -35,11 +34,11 @@ def update_license(path, dry_run, show_diff):
if not dry_run:
with open( path, 'wb' ) as fout:
fout.write( new_text.replace('\n', newline ) )
print('Updated', path)
print 'Updated', path
if show_diff:
import difflib
print('\n'.join(difflib.unified_diff(original_text.split('\n'),
new_text.split('\n'))))
print '\n'.join( difflib.unified_diff( original_text.split('\n'),
new_text.split('\n') ) )
return True
return False
@@ -84,7 +83,7 @@ python devtools\licenseupdater.py include src
parser.enable_interspersed_args()
options, args = parser.parse_args()
update_license_in_source_directories( args, options.dry_run, options.show_diff )
print('Done')
print 'Done'
if __name__ == '__main__':
import sys

View File

@@ -1,10 +1,5 @@
# Copyright 2010 Baptiste Lepilleur and The JsonCpp Authors
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
from contextlib import closing
import os
import os.path
import gzip
import tarfile
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
@@ -34,19 +29,25 @@ def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
path_in_tar = archive_name(path)
tar.add(path, path_in_tar )
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
with closing(tarfile.TarFile.open(tarball_path, 'w:gz',
compresslevel=compression)) as tar:
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
try:
for source in sources:
source_path = source
if os.path.isdir( source ):
for dirpath, dirnames, filenames in os.walk(source_path):
visit(tar, dirpath, filenames)
os.path.walk(source_path, visit, tar)
else:
path_in_tar = archive_name(source_path)
tar.add(source_path, path_in_tar ) # filename, arcname
finally:
tar.close()
def decompress( tarball_path, base_dir ):
"""Decompress the gzipped tarball into directory base_dir.
"""
with closing(tarfile.TarFile.open(tarball_path)) as tar:
# !!! This class method is not documented in the online doc
# nor is bz2open!
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
try:
tar.extractall( base_dir )
finally:
tar.close()

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +1,23 @@
<hr>
<table width="100%">
<tr>
<td width="10%" align="left" valign="center">
<a href="http://sourceforge.net">
<img
src="http://sourceforge.net/sflogo.php?group_id=144446"
width="88" height="31" border="0" alt="SourceForge Logo"></a>
</td>
<td width="20%" align="left" valign="center">
hosts this site.
</td>
<td>
</td>
<td align="right" valign="center">
Send comments to:<br>
<a href="mailto:jsoncpp-devel@lists.sourceforge.net">Json-cpp Developers</a>
</td>
</tr>
</table>
</body>
</html>

View File

@@ -11,12 +11,12 @@ JsonCpp - JSON data format manipulation library
<table width="100%">
<tr>
<td width="40%" align="left" valign="center">
<a href="https://github.com/open-source-parsers/jsoncpp">
<a href="http://sourceforge.net/projects/jsoncpp/">
JsonCpp project page
</a>
</td>
<td width="40%" align="right" valign="center">
<a href="http://open-source-parsers.github.io/jsoncpp-docs/doxygen/">JsonCpp home page</a>
<a href="http://jsoncpp.sourceforge.net">JsonCpp home page</a>
</td>
</tr>
</table>

View File

@@ -4,21 +4,11 @@
<a HREF="http://www.json.org/">JSON (JavaScript Object Notation)</a>
is a lightweight data-interchange format.
It can represent integer, real number, string, an ordered sequence of value, and
a collection of name/value pairs.
Here is an example of JSON data:
\verbatim
{
"encoding" : "UTF-8",
"plug-ins" : [
"python",
"c++",
"ruby"
],
"indent" : { "length" : 3, "use_space": true }
}
\endverbatim
<b>JsonCpp</b> supports comments as <i>meta-data</i>:
\code
// Configuration options
{
// Default encoding for text
@@ -27,22 +17,21 @@ Here is an example of JSON data:
// Plug-ins loaded at start-up
"plug-ins" : [
"python",
"c++", // trailing comment
"c++",
"ruby"
],
// Tab indent size
// (multi-line comment)
"indent" : { /*embedded comment*/ "length" : 3, "use_space": true }
"indent" : { "length" : 3, "use_space": true }
}
\endcode
\endverbatim
\section _features Features
- read and write JSON document
- attach C++ style comments to element during parsing
- attach C and C++ style comments to element during parsing
- rewrite JSON document preserving original comments
Notes: Comments used to be supported in JSON but were removed for
Notes: Comments used to be supported in JSON but where removed for
portability (C like comments are not supported in Python). Since
comments are useful in configuration/input file, this feature was
preserved.
@@ -50,115 +39,88 @@ preserved.
\section _example Code example
\code
Json::Value root; // 'root' will contain the root value after parsing.
std::cin >> root;
Json::Value root; // will contains the root value after parsing.
Json::Reader reader;
bool parsingSuccessful = reader.parse( config_doc, root );
if ( !parsingSuccessful )
{
// report to the user the failure and their locations in the document.
std::cout << "Failed to parse configuration\n"
<< reader.getFormattedErrorMessages();
return;
}
// You can also read into a particular sub-value.
std::cin >> root["subtree"];
// Get the value of the member of root named 'encoding',
// and return 'UTF-8' if there is no such member.
// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no
// such member.
std::string encoding = root.get("encoding", "UTF-8" ).asString();
// Get the value of the member of root named 'plug-ins'; return a 'null' value if
// Get the value of the member of root named 'encoding', return a 'null' value if
// there is no such member.
const Json::Value plugins = root["plug-ins"];
// Iterate over the sequence elements.
for ( int index = 0; index < plugins.size(); ++index )
for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements.
loadPlugIn( plugins[index].asString() );
// Try other datatypes. Some are auto-convertible to others.
foo::setIndentLength( root["indent"].get("length", 3).asInt() );
foo::setIndentUseSpace( root["indent"].get("use_space", true).asBool() );
setIndentLength( root["indent"].get("length", 3).asInt() );
setIndentUseSpace( root["indent"].get("use_space", true).asBool() );
// Since Json::Value has an implicit constructor for all value types, it is not
// necessary to explicitly construct the Json::Value object.
root["encoding"] = foo::getCurrentEncoding();
root["indent"]["length"] = foo::getCurrentIndentLength();
root["indent"]["use_space"] = foo::getCurrentIndentUseSpace();
// If you like the defaults, you can insert directly into a stream.
std::cout << root;
// Of course, you can write to `std::ostringstream` if you prefer.
// If desired, remember to add a linefeed and flush.
std::cout << std::endl;
\endcode
\section _advanced Advanced usage
Configure *builders* to create *readers* and *writers*. For
configuration, we use our own `Json::Value` (rather than
standard setters/getters) so that we can add
features without losing binary-compatibility.
\code
// For convenience, use `writeString()` with a specialized builder.
Json::StreamWriterBuilder wbuilder;
wbuilder["indentation"] = "\t";
std::string document = Json::writeString(wbuilder, root);
// Here, using a specialized Builder, we discard comments and
// record errors as we parse.
Json::CharReaderBuilder rbuilder;
rbuilder["collectComments"] = false;
std::string errs;
bool ok = Json::parseFromStream(rbuilder, std::cin, &root, &errs);
\endcode
Yes, compile-time configuration-checking would be helpful,
but `Json::Value` lets you
write and read the builder configuration, which is better! In other words,
you can configure your JSON parser using JSON.
CharReaders and StreamWriters are not thread-safe, but they are re-usable.
\code
Json::CharReaderBuilder rbuilder;
cfg >> rbuilder.settings_;
std::unique_ptr<Json::CharReader> const reader(rbuilder.newCharReader());
reader->parse(start, stop, &value1, &errs);
// ...
reader->parse(start, stop, &value2, &errs);
// etc.
// At application shutdown to make the new configuration document:
// Since Json::Value has implicit constructor for all value types, it is not
// necessary to explicitly construct the Json::Value object:
root["encoding"] = getCurrentEncoding();
root["indent"]["length"] = getCurrentIndentLength();
root["indent"]["use_space"] = getCurrentIndentUseSpace();
Json::StyledWriter writer;
// Make a new JSON document for the configuration. Preserve original comments.
std::string outputConfig = writer.write( root );
// You can also use streams. This will put the contents of any JSON
// stream at a particular sub-value, if you'd like.
std::cin >> root["subtree"];
// And you can write to a stream, using the StyledWriter automatically.
std::cout << root;
\endcode
\section _pbuild Build instructions
The build instructions are located in the file
<a HREF="https://github.com/open-source-parsers/jsoncpp/blob/master/README.md">README.md</a> in the top-directory of the project.
<a HREF="README.txt">README.txt</a> in the top-directory of the project.
The latest version of the source is available in the project's GitHub repository:
<a HREF="https://github.com/open-source-parsers/jsoncpp/">
jsoncpp</a>
Permanent link to the latest revision of the file in subversion:
<a HREF="http://jsoncpp.svn.sourceforge.net/viewvc/jsoncpp/trunk/jsoncpp/README.txt?view=markup">latest README.txt</a>
\section _pdownload Download
The sources can be downloaded from
<a HREF="http://sourceforge.net/projects/jsoncpp/files/">SourceForge download page</a>.
The latest version of the source is available in the project's subversion repository:
<a HREF="http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/">
http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/</a>
To checkout the source, see the following
<a HREF="http://sourceforge.net/scm/?type=svn&group_id=144446">instructions</a>.
\section _news What's New?
The description of latest changes can be found in
<a HREF="https://github.com/open-source-parsers/jsoncpp/wiki/NEWS">
the NEWS wiki
</a>.
<a HREF="NEWS.txt">NEWS.txt</a> in the top-directory of the project.
Permanent link to the latest revision of the file in subversion:
<a HREF="http://svn.sourceforge.net/viewcvs.cgi/jsoncpp/README.txt?view=markup">latest NEWS.txt</a>
\section _plinks Project links
- <a HREF="http://jsoncpp.sourceforge.net">json-cpp home</a>
- <a HREF="http://www.sourceforge.net/projects/jsoncpp/">json-cpp sourceforge project</a>
\section _rlinks Related links
- <a HREF="http://www.json.org/">JSON</a> Specification and alternate language implementations.
- <a HREF="http://www.yaml.org/">YAML</a> A data format designed for human readability.
- <a HREF="http://www.cl.cam.ac.uk/~mgk25/unicode.html">UTF-8 and Unicode FAQ</a>.
\section _plinks Old project links
- <a href="https://sourceforge.net/projects/jsoncpp/">https://sourceforge.net/projects/jsoncpp/</a>
- <a href="http://jsoncpp.sourceforge.net">http://jsoncpp.sourceforge.net</a>
- <a href="http://sourceforge.net/projects/jsoncpp/files/">http://sourceforge.net/projects/jsoncpp/files/</a>
- <a href="http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/">http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/</a>
- <a href="http://jsoncpp.sourceforge.net/old.html">http://jsoncpp.sourceforge.net/old.html</a>
\section _license License
See file <a href="https://github.com/open-source-parsers/jsoncpp/blob/master/LICENSE"><code>LICENSE</code></a> in the top-directory of the project.
See file <a HREF="LICENSE">LICENSE</a> in the top-directory of the project.
Basically JsonCpp is licensed under MIT license, or public domain if desired
and recognized in your jurisdiction.
\author Baptiste Lepilleur <blep@users.sourceforge.net> (originator)
\author Christopher Dunn <cdunn2001@gmail.com> (primary maintainer)
\version \include version
We make strong guarantees about binary-compatibility, consistent with
<a href="http://apr.apache.org/versioning.html">the Apache versioning scheme</a>.
\sa version.h
\author Baptiste Lepilleur <blep@users.sourceforge.net>
*/

View File

@@ -1,3 +1,37 @@
/*! \page roadmap JsonCpp roadmap
Moved to: https://github.com/open-source-parsers/jsoncpp/wiki/Roadmap
\section ms_release Makes JsonCpp ready for release
- Build system clean-up:
- Fix build on Windows (shared-library build is broken)
- Add enable/disable flag for static and shared library build
- Enhance help
- Platform portability check: (Notes: was ok on last check)
- linux/gcc,
- solaris/cc,
- windows/msvc678,
- aix/vacpp
- Add JsonCpp version to header as numeric for use in preprocessor test
- Remove buggy experimental hash stuff
\section ms_strict Adds a strict mode to reader/parser
Strict JSON support as specific in RFC 4627 (http://www.ietf.org/rfc/rfc4627.txt?number=4627).
- Enforce only object or array as root element
- Disable comment support
- Get jsonchecker failing tests to pass in strict mode
\section ms_writer Writter control
Provides more control to determine how specific items are serialized when JSON allow choice:
- Optionally allow escaping of non-ASCII characters using unicode escape sequence "\\u".
- Optionally allow escaping of "/" using "\/".
\section ms_separation Expose json reader/writer API that do not impose using Json::Value.
Some typical use-case involve an application specific structure to/from a JSON document.
- Event base parser to allow unserializing a Json document directly in datastructure instead of
using the intermediate Json::Value.
- Stream based parser to serialized a Json document without using Json::Value as input.
- Performance oriented parser/writer:
- Provides an event based parser. Should allow pulling & skipping events for ease of use.
- Provides a JSON document builder: fast only.
\section ms_perfo Performance tuning
- Provides support for static property name definition avoiding allocation
- Static property dictionnary can be provided to JSON reader
- Performance scenario & benchmarking
\section testing Testing
- Adds more tests for unicode parsing (e.g. including surrogate and error detection).
*/

File diff suppressed because it is too large Load Diff

View File

@@ -1,27 +1,12 @@
"""Script to generate doxygen documentation.
"""
from __future__ import print_function
from __future__ import unicode_literals
from devtools import tarball
from contextlib import contextmanager
import subprocess
import traceback
import re
import os
import os.path
import sys
import shutil
@contextmanager
def cd(newdir):
"""
http://stackoverflow.com/questions/431684/how-do-i-cd-in-python
"""
prevdir = os.getcwd()
os.chdir(newdir)
try:
yield
finally:
os.chdir(prevdir)
from devtools import tarball
def find_program(*filenames):
"""find a program in folders path_lst, and sets env[var]
@@ -31,7 +16,7 @@ def find_program(*filenames):
paths = os.environ.get('PATH', '').split(os.pathsep)
suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or ''
for filename in filenames:
for name in [filename+ext for ext in suffixes.split(' ')]:
for name in [filename+ext for ext in suffixes.split()]:
for directory in paths:
full_path = os.path.join(directory, name)
if os.path.isfile(full_path):
@@ -43,54 +28,51 @@ def do_subst_in_file(targetfile, sourcefile, dict):
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
"""
with open(sourcefile, 'r') as f:
try:
f = open(sourcefile, 'rb')
contents = f.read()
for (k,v) in list(dict.items()):
f.close()
except:
print "Can't read source file %s"%sourcefile
raise
for (k,v) in dict.items():
v = v.replace('\\','\\\\')
contents = re.sub(k, v, contents)
with open(targetfile, 'w') as f:
f.write(contents)
def getstatusoutput(cmd):
"""cmd is a list.
"""
try:
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output, _ = process.communicate()
status = process.returncode
f = open(targetfile, 'wb')
f.write(contents)
f.close()
except:
status = -1
output = traceback.format_exc()
return status, output
def run_cmd(cmd, silent=False):
"""Raise exception on failure.
"""
info = 'Running: %r in %r' %(' '.join(cmd), os.getcwd())
print(info)
sys.stdout.flush()
if silent:
status, output = getstatusoutput(cmd)
else:
status, output = subprocess.call(cmd), ''
if status:
msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output)
raise Exception(msg)
def assert_is_exe(path):
if not path:
raise Exception('path is empty.')
if not os.path.isfile(path):
raise Exception('%r is not a file.' %path)
if not os.access(path, os.X_OK):
raise Exception('%r is not executable by this user.' %path)
print "Can't write target file %s"%targetfile
raise
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
assert_is_exe(doxygen_path)
config_file = os.path.abspath( config_file )
with cd(working_dir):
doxygen_path = doxygen_path
old_cwd = os.getcwd()
try:
os.chdir( working_dir )
cmd = [doxygen_path, config_file]
run_cmd(cmd, is_silent)
print 'Running:', ' '.join( cmd )
try:
import subprocess
except:
if os.system( ' '.join( cmd ) ) != 0:
print 'Documentation generation failed'
return False
else:
if is_silent:
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
else:
process = subprocess.Popen( cmd )
stdout, _ = process.communicate()
if process.returncode:
print 'Documentation generation failed:'
print stdout
return False
return True
finally:
os.chdir( old_cwd )
def build_doc( options, make_release=False ):
if make_release:
@@ -125,26 +107,26 @@ def build_doc(options, make_release=False):
}
if os.path.isdir( output_dir ):
print('Deleting directory:', output_dir)
print 'Deleting directory:', output_dir
shutil.rmtree( output_dir )
if not os.path.isdir( output_dir ):
os.makedirs( output_dir )
do_subst_in_file('doc/doxyfile', options.doxyfile_input_path, subst_keys)
run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent)
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
if not options.silent:
print(open(warning_log_path, 'r').read())
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
print('Generated documentation can be found in:')
print(index_path)
print open(warning_log_path, 'rb').read()
index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html'))
print 'Generated documentation can be found in:'
print index_path
if options.open:
import webbrowser
webbrowser.open( 'file://' + index_path )
if options.make_tarball:
print('Generating doc tarball to', tarball_path)
print 'Generating doc tarball to', tarball_path
tarball_sources = [
output_dir,
'README.md',
'README.txt',
'LICENSE',
'NEWS.txt',
'version'
@@ -169,8 +151,6 @@ def main():
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'),
help="""Path to Doxygen tool. [Default: %default]""")
parser.add_option('--in', dest="doxyfile_input_path", action='store', default='doc/doxyfile.in',
help="""Path to doxygen inputs. [Default: %default]""")
parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False,
help="""Enable generation of Microsoft HTML HELP""")
parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True,

View File

@@ -1,2 +0,0 @@
FILE(GLOB INCLUDE_FILES "json/*.h")
INSTALL(FILES ${INCLUDE_FILES} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/json)

View File

@@ -1,98 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef CPPTL_JSON_ALLOCATOR_H_INCLUDED
#define CPPTL_JSON_ALLOCATOR_H_INCLUDED
#include <cstring>
#include <memory>
#pragma pack(push, 8)
namespace Json {
template<typename T>
class SecureAllocator {
public:
// Type definitions
using value_type = T;
using pointer = T*;
using const_pointer = const T*;
using reference = T&;
using const_reference = const T&;
using size_type = std::size_t;
using difference_type = std::ptrdiff_t;
/**
* Allocate memory for N items using the standard allocator.
*/
pointer allocate(size_type n) {
// allocate using "global operator new"
return static_cast<pointer>(::operator new(n * sizeof(T)));
}
/**
* Release memory which was allocated for N items at pointer P.
*
* The memory block is filled with zeroes before being released.
* The pointer argument is tagged as "volatile" to prevent the
* compiler optimizing out this critical step.
*/
void deallocate(volatile pointer p, size_type n) {
std::memset(p, 0, n * sizeof(T));
// free using "global operator delete"
::operator delete(p);
}
/**
* Construct an item in-place at pointer P.
*/
template<typename... Args>
void construct(pointer p, Args&&... args) {
// construct using "placement new" and "perfect forwarding"
::new (static_cast<void*>(p)) T(std::forward<Args>(args)...);
}
size_type max_size() const {
return size_t(-1) / sizeof(T);
}
pointer address( reference x ) const {
return std::addressof(x);
}
const_pointer address( const_reference x ) const {
return std::addressof(x);
}
/**
* Destroy an item in-place at pointer P.
*/
void destroy(pointer p) {
// destroy using "explicit destructor"
p->~T();
}
// Boilerplate
SecureAllocator() {}
template<typename U> SecureAllocator(const SecureAllocator<U>&) {}
template<typename U> struct rebind { using other = SecureAllocator<U>; };
};
template<typename T, typename U>
bool operator==(const SecureAllocator<T>&, const SecureAllocator<U>&) {
return true;
}
template<typename T, typename U>
bool operator!=(const SecureAllocator<T>&, const SecureAllocator<U>&) {
return false;
}
} //namespace Json
#pragma pack(pop)
#endif // CPPTL_JSON_ALLOCATOR_H_INCLUDED

View File

@@ -1,54 +0,0 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED
#define CPPTL_JSON_ASSERTIONS_H_INCLUDED
#include <stdlib.h>
#include <sstream>
#if !defined(JSON_IS_AMALGAMATION)
#include "config.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
/** It should not be possible for a maliciously designed file to
* cause an abort() or seg-fault, so these macros are used only
* for pre-condition violations and internal logic errors.
*/
#if JSON_USE_EXCEPTION
// @todo <= add detail about condition in exception
# define JSON_ASSERT(condition) \
{if (!(condition)) {Json::throwLogicError( "assert json failed" );}}
# define JSON_FAIL_MESSAGE(message) \
{ \
JSONCPP_OSTRINGSTREAM oss; oss << message; \
Json::throwLogicError(oss.str()); \
abort(); \
}
#else // JSON_USE_EXCEPTION
# define JSON_ASSERT(condition) assert(condition)
// The call to assert() will show the failure message in debug builds. In
// release builds we abort, for a core-dump or debugger.
# define JSON_FAIL_MESSAGE(message) \
{ \
JSONCPP_OSTRINGSTREAM oss; oss << message; \
assert(false && oss.str().c_str()); \
abort(); \
}
#endif
#define JSON_ASSERT_MESSAGE(condition, message) \
if (!(condition)) { \
JSON_FAIL_MESSAGE(message); \
}
#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -12,8 +12,7 @@
# include <cpptl/cpptl_autolink.h>
# endif
#if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && \
!defined(JSON_IN_CPPTL)
# if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && !defined(JSON_IN_CPPTL)
# define CPPTL_AUTOLINK_NAME "json"
# undef CPPTL_AUTOLINK_DLL
# ifdef JSON_DLL

View File

@@ -1,34 +1,38 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSON_CONFIG_H_INCLUDED
# define JSON_CONFIG_H_INCLUDED
#include <stddef.h>
#include <string> //typedef String
#include <stdint.h> //typedef int64_t, uint64_t
/// If defined, indicates that json library is embedded in CppTL library.
//# define JSON_IN_CPPTL 1
/// If defined, indicates that json may leverage CppTL library
//# define JSON_USE_CPPTL 1
/// If defined, indicates that cpptl vector based map should be used instead of
/// std::map
/// If defined, indicates that cpptl vector based map should be used instead of std::map
/// as Value container.
//# define JSON_USE_CPPTL_SMALLMAP 1
/// If defined, indicates that Json specific container should be used
/// (hash table & simple deque container with customizable allocator).
/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332
//# define JSON_VALUE_USE_INTERNAL_MAP 1
/// Force usage of standard new/malloc based allocator instead of memory pool based allocator.
/// The memory pools allocator used optimization (initializing Value and ValueInternalLink
/// as if it was a POD) that may cause some validation tool to report errors.
/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
// If non-zero, the library uses exceptions to report bad input instead of C
// assertion macros. The default is to use exceptions.
#ifndef JSON_USE_EXCEPTION
/// If defined, indicates that Json use exception to report invalid type manipulation
/// instead of C assert macro.
# define JSON_USE_EXCEPTION 1
#endif
/// If defined, indicates that the source file is amalgated
/// to prevent private header inclusion.
/// Remarks: it is automatically defined in the generated amalgated header.
// #define JSON_IS_AMALGAMATION
// #define JSON_IS_AMALGATED
# ifdef JSON_IN_CPPTL
# include <cpptl/config.h>
@@ -40,115 +44,32 @@
# ifdef JSON_IN_CPPTL
# define JSON_API CPPTL_API
# elif defined(JSON_DLL_BUILD)
#if defined(_MSC_VER) || defined(__MINGW32__)
# define JSON_API __declspec(dllexport)
#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
#endif // if defined(_MSC_VER)
# elif defined(JSON_DLL)
#if defined(_MSC_VER) || defined(__MINGW32__)
# define JSON_API __declspec(dllimport)
#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
#endif // if defined(_MSC_VER)
#endif // ifdef JSON_IN_CPPTL
#if !defined(JSON_API)
# else
# define JSON_API
# endif
// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for
// integer
// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer
// Storages, and 64 bits integer support is disabled.
// #define JSON_NO_INT64 1
#if defined(_MSC_VER) // MSVC
# if _MSC_VER <= 1200 // MSVC 6
#if defined(_MSC_VER) && _MSC_VER <= 1200 // MSVC 6
// Microsoft Visual Studio 6 only support conversion from __int64 to double
// (no conversion from unsigned __int64).
#define JSON_USE_INT64_DOUBLE_CONVERSION 1
// Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255'
// characters in the debug information)
// All projects I've ever seen with VS6 were using this globally (not bothering
// with pragma push/pop).
# pragma warning(disable : 4786)
# endif // MSVC 6
#endif // if defined(_MSC_VER) && _MSC_VER < 1200 // MSVC 6
# if _MSC_VER >= 1500 // MSVC 2008
#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
/// Indicates that the following function is deprecated.
# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
#endif
#endif // defined(_MSC_VER)
// In c++11 the override keyword allows you to explicity define that a function
// is intended to override the base-class version. This makes the code more
// managable and fixes a set of common hard-to-find bugs.
#if __cplusplus >= 201103L
# define JSONCPP_OVERRIDE override
# define JSONCPP_NOEXCEPT noexcept
#elif defined(_MSC_VER) && _MSC_VER > 1600 && _MSC_VER < 1900
# define JSONCPP_OVERRIDE override
# define JSONCPP_NOEXCEPT throw()
#elif defined(_MSC_VER) && _MSC_VER >= 1900
# define JSONCPP_OVERRIDE override
# define JSONCPP_NOEXCEPT noexcept
#else
# define JSONCPP_OVERRIDE
# define JSONCPP_NOEXCEPT throw()
#endif
#ifndef JSON_HAS_RVALUE_REFERENCES
#if defined(_MSC_VER) && _MSC_VER >= 1600 // MSVC >= 2010
#define JSON_HAS_RVALUE_REFERENCES 1
#endif // MSVC >= 2010
#ifdef __clang__
#if __has_feature(cxx_rvalue_references)
#define JSON_HAS_RVALUE_REFERENCES 1
#endif // has_feature
#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
#if defined(__GXX_EXPERIMENTAL_CXX0X__) || (__cplusplus >= 201103L)
#define JSON_HAS_RVALUE_REFERENCES 1
#endif // GXX_EXPERIMENTAL
#endif // __clang__ || __GNUC__
#endif // not defined JSON_HAS_RVALUE_REFERENCES
#ifndef JSON_HAS_RVALUE_REFERENCES
#define JSON_HAS_RVALUE_REFERENCES 0
#endif
#ifdef __clang__
# if __has_extension(attribute_deprecated_with_message)
# define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
# endif
#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
# if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
# define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
# elif (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
# define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__))
# endif // GNUC version
#endif // __clang__ || __GNUC__
#if !defined(JSONCPP_DEPRECATED)
# define JSONCPP_DEPRECATED(message)
#endif // if !defined(JSONCPP_DEPRECATED)
#if __GNUC__ >= 6
# define JSON_USE_INT64_DOUBLE_CONVERSION 1
#endif
#if !defined(JSON_IS_AMALGAMATION)
# include "version.h"
# if JSONCPP_USING_SECURE_MEMORY
# include "allocator.h" //typedef Allocator
# endif
#endif // if !defined(JSON_IS_AMALGAMATION)
namespace Json {
typedef int Int;
typedef unsigned int UInt;
@@ -162,26 +83,14 @@ typedef unsigned int LargestUInt;
typedef __int64 Int64;
typedef unsigned __int64 UInt64;
# else // if defined(_MSC_VER) // Other platforms, use long long
typedef int64_t Int64;
typedef uint64_t UInt64;
typedef long long int Int64;
typedef unsigned long long int UInt64;
# endif // if defined(_MSC_VER)
typedef Int64 LargestInt;
typedef UInt64 LargestUInt;
# define JSON_HAS_INT64
# endif // if defined(JSON_NO_INT64)
#if JSONCPP_USING_SECURE_MEMORY
#define JSONCPP_STRING std::basic_string<char, std::char_traits<char>, Json::SecureAllocator<char> >
#define JSONCPP_OSTRINGSTREAM std::basic_ostringstream<char, std::char_traits<char>, Json::SecureAllocator<char> >
#define JSONCPP_OSTREAM std::basic_ostream<char, std::char_traits<char>>
#define JSONCPP_ISTRINGSTREAM std::basic_istringstream<char, std::char_traits<char>, Json::SecureAllocator<char> >
#define JSONCPP_ISTREAM std::istream
#else
#define JSONCPP_STRING std::string
#define JSONCPP_OSTRINGSTREAM std::ostringstream
#define JSONCPP_OSTREAM std::ostream
#define JSONCPP_ISTRINGSTREAM std::istringstream
#define JSONCPP_ISTREAM std::istream
#endif // if JSONCPP_USING_SECURE_MEMORY
} // end namespace Json
#endif // JSON_CONFIG_H_INCLUDED

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -6,11 +6,9 @@
#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
# define CPPTL_JSON_FEATURES_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION)
#if !defined(JSON_IS_AMALGATED)
# include "forwards.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
#pragma pack(push, 8)
#endif // if !defined(JSON_IS_AMALGATED)
namespace Json {
@@ -18,18 +16,17 @@ namespace Json {
* This configuration object can be used to force the Reader or Writer
* to behave in a standard conforming way.
*/
class JSON_API Features {
class JSON_API Features
{
public:
/** \brief A configuration that allows all features and assumes all strings
* are UTF-8.
/** \brief A configuration that allows all features and assumes all strings are UTF-8.
* - C & C++ comments are allowed
* - Root object can be any JSON value
* - Assumes Value strings are encoded in UTF-8
*/
static Features all();
/** \brief A configuration that is strictly compatible with the JSON
* specification.
/** \brief A configuration that is strictly compatible with the JSON specification.
* - Comments are forbidden.
* - Root object must be either an array or an object value.
* - Assumes Value strings are encoded in UTF-8
@@ -43,19 +40,10 @@ public:
/// \c true if comments are allowed. Default: \c true.
bool allowComments_;
/// \c true if root must be either an array or an object value. Default: \c
/// false.
/// \c true if root must be either an array or an object value. Default: \c false.
bool strictRoot_;
/// \c true if dropped null placeholders are allowed. Default: \c false.
bool allowDroppedNullPlaceholders_;
/// \c true if numeric object key are allowed. Default: \c false.
bool allowNumericKeys_;
};
} // namespace Json
#pragma pack(pop)
#endif // CPPTL_JSON_FEATURES_H_INCLUDED

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -6,9 +6,9 @@
#ifndef JSON_FORWARDS_H_INCLUDED
# define JSON_FORWARDS_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION)
#if !defined(JSON_IS_AMALGATED)
# include "config.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
#endif // if !defined(JSON_IS_AMALGATED)
namespace Json {
@@ -31,7 +31,14 @@ class Value;
class ValueIteratorBase;
class ValueIterator;
class ValueConstIterator;
#ifdef JSON_VALUE_USE_INTERNAL_MAP
class ValueMapAllocator;
class ValueInternalLink;
class ValueInternalArray;
class ValueInternalMap;
#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
} // namespace Json
#endif // JSON_FORWARDS_H_INCLUDED

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -6,49 +6,26 @@
#ifndef CPPTL_JSON_READER_H_INCLUDED
# define CPPTL_JSON_READER_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION)
#if !defined(JSON_IS_AMALGATED)
# include "features.h"
# include "value.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
#endif // if !defined(JSON_IS_AMALGATED)
# include <deque>
#include <iosfwd>
# include <stack>
# include <string>
#include <istream>
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
// be used by...
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(push)
#pragma warning(disable : 4251)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma pack(push, 8)
# include <iostream>
namespace Json {
/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a
*Value.
/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a Value.
*
* \deprecated Use CharReader and CharReaderBuilder.
*/
class JSONCPP_DEPRECATED("Use CharReader and CharReaderBuilder instead") JSON_API Reader {
class JSON_API Reader
{
public:
typedef char Char;
typedef const Char *Location;
/** \brief An error tagged with where in the JSON text it was encountered.
*
* The offsets give the [start, limit) range of bytes within the text. Note
* that this is bytes, not codepoints.
*
*/
struct StructuredError {
ptrdiff_t offset_start;
ptrdiff_t offset_limit;
JSONCPP_STRING message;
};
/** \brief Constructs a Reader allowing all features
* for parsing.
*/
@@ -59,106 +36,61 @@ public:
*/
Reader( const Features &features );
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
* document.
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a> document.
* \param document UTF-8 encoded string containing the document to read.
* \param root [out] Contains the root value of the document if it was
* successfully parsed.
* \param collectComments \c true to collect comment and allow writing them
* back during
* \param collectComments \c true to collect comment and allow writing them back during
* serialization, \c false to discard comments.
* This parameter is ignored if
* Features::allowComments_
* This parameter is ignored if Features::allowComments_
* is \c false.
* \return \c true if the document was successfully parsed, \c false if an
* error occurred.
* \return \c true if the document was successfully parsed, \c false if an error occurred.
*/
bool
parse(const std::string& document, Value& root, bool collectComments = true);
bool parse( const std::string &document,
Value &root,
bool collectComments = true );
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
document.
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
document to read.
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
document to read.
* Must be >= beginDoc.
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a> document.
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read.
* \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read.
\ Must be >= beginDoc.
* \param root [out] Contains the root value of the document if it was
* successfully parsed.
* \param collectComments \c true to collect comment and allow writing them
back during
* \param collectComments \c true to collect comment and allow writing them back during
* serialization, \c false to discard comments.
* This parameter is ignored if
Features::allowComments_
* This parameter is ignored if Features::allowComments_
* is \c false.
* \return \c true if the document was successfully parsed, \c false if an
error occurred.
* \return \c true if the document was successfully parsed, \c false if an error occurred.
*/
bool parse(const char* beginDoc,
const char* endDoc,
bool parse( const char *beginDoc, const char *endDoc,
Value &root,
bool collectComments = true );
/// \brief Parse from input stream.
/// \see Json::operator>>(std::istream&, Json::Value&).
bool parse(JSONCPP_ISTREAM& is, Value& root, bool collectComments = true);
bool parse( std::istream &is,
Value &root,
bool collectComments = true );
/** \brief Returns a user friendly string that list errors in the parsed
* document.
* \return Formatted error message with the list of errors with their location
* in
* the parsed document. An empty string is returned if no error
* occurred
/** \brief Returns a user friendly string that list errors in the parsed document.
* \return Formatted error message with the list of errors with their location in
* the parsed document. An empty string is returned if no error occurred
* during parsing.
* \deprecated Use getFormattedErrorMessages() instead (typo fix).
*/
JSONCPP_DEPRECATED("Use getFormattedErrorMessages() instead.")
JSONCPP_STRING getFormatedErrorMessages() const;
JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead")
std::string getFormatedErrorMessages() const;
/** \brief Returns a user friendly string that list errors in the parsed
* document.
* \return Formatted error message with the list of errors with their location
* in
* the parsed document. An empty string is returned if no error
* occurred
/** \brief Returns a user friendly string that list errors in the parsed document.
* \return Formatted error message with the list of errors with their location in
* the parsed document. An empty string is returned if no error occurred
* during parsing.
*/
JSONCPP_STRING getFormattedErrorMessages() const;
/** \brief Returns a vector of structured erros encounted while parsing.
* \return A (possibly empty) vector of StructuredError objects. Currently
* only one error can be returned, but the caller should tolerate
* multiple
* errors. This can occur if the parser recovers from a non-fatal
* parse error and then encounters additional errors.
*/
std::vector<StructuredError> getStructuredErrors() const;
/** \brief Add a semantic error message.
* \param value JSON Value location associated with the error
* \param message The error message.
* \return \c true if the error was successfully added, \c false if the
* Value offset exceeds the document size.
*/
bool pushError(const Value& value, const JSONCPP_STRING& message);
/** \brief Add a semantic error message with extra context.
* \param value JSON Value location associated with the error
* \param message The error message.
* \param extra Additional JSON Value location to contextualize the error
* \return \c true if the error was successfully added, \c false if either
* Value offset exceeds the document size.
*/
bool pushError(const Value& value, const JSONCPP_STRING& message, const Value& extra);
/** \brief Return whether there are any errors.
* \return \c true if there are no errors to report \c false if
* errors have occurred.
*/
bool good() const;
std::string getFormattedErrorMessages() const;
private:
enum TokenType {
enum TokenType
{
tokenEndOfStream = 0,
tokenObjectBegin,
tokenObjectEnd,
@@ -175,25 +107,29 @@ private:
tokenError
};
class Token {
class Token
{
public:
TokenType type_;
Location start_;
Location end_;
};
class ErrorInfo {
class ErrorInfo
{
public:
Token token_;
JSONCPP_STRING message_;
std::string message_;
Location extra_;
};
typedef std::deque<ErrorInfo> Errors;
bool expectToken( TokenType type, Token &token, const char *message );
bool readToken( Token &token );
void skipSpaces();
bool match(Location pattern, int patternLength);
bool match( Location pattern,
int patternLength );
bool readComment();
bool readCStyleComment();
bool readCppStyleComment();
@@ -203,11 +139,9 @@ private:
bool readObject( Token &token );
bool readArray( Token &token );
bool decodeNumber( Token &token );
bool decodeNumber(Token& token, Value& decoded);
bool decodeString( Token &token );
bool decodeString(Token& token, JSONCPP_STRING& decoded);
bool decodeString( Token &token, std::string &decoded );
bool decodeDouble( Token &token );
bool decodeDouble(Token& token, Value& decoded);
bool decodeUnicodeCodePoint( Token &token,
Location &current,
Location end,
@@ -216,164 +150,39 @@ private:
Location &current,
Location end,
unsigned int &unicode );
bool addError(const JSONCPP_STRING& message, Token& token, Location extra = 0);
bool addError( const std::string &message,
Token &token,
Location extra = 0 );
bool recoverFromError( TokenType skipUntilToken );
bool addErrorAndRecover(const JSONCPP_STRING& message,
bool addErrorAndRecover( const std::string &message,
Token &token,
TokenType skipUntilToken );
void skipUntilSpace();
Value &currentValue();
Char getNextChar();
void
getLocationLineAndColumn(Location location, int& line, int& column) const;
JSONCPP_STRING getLocationLineAndColumn(Location location) const;
void addComment(Location begin, Location end, CommentPlacement placement);
void getLocationLineAndColumn( Location location,
int &line,
int &column ) const;
std::string getLocationLineAndColumn( Location location ) const;
void addComment( Location begin,
Location end,
CommentPlacement placement );
void skipCommentTokens( Token &token );
static bool containsNewLine(Location begin, Location end);
static JSONCPP_STRING normalizeEOL(Location begin, Location end);
typedef std::stack<Value *> Nodes;
Nodes nodes_;
Errors errors_;
JSONCPP_STRING document_;
std::string document_;
Location begin_;
Location end_;
Location current_;
Location lastValueEnd_;
Value *lastValue_;
JSONCPP_STRING commentsBefore_;
std::string commentsBefore_;
Features features_;
bool collectComments_;
}; // Reader
/** Interface for reading JSON from a char array.
*/
class JSON_API CharReader {
public:
virtual ~CharReader() {}
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
document.
* The document must be a UTF-8 encoded string containing the document to read.
*
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
document to read.
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
document to read.
* Must be >= beginDoc.
* \param root [out] Contains the root value of the document if it was
* successfully parsed.
* \param errs [out] Formatted error messages (if not NULL)
* a user friendly string that lists errors in the parsed
* document.
* \return \c true if the document was successfully parsed, \c false if an
error occurred.
*/
virtual bool parse(
char const* beginDoc, char const* endDoc,
Value* root, JSONCPP_STRING* errs) = 0;
class JSON_API Factory {
public:
virtual ~Factory() {}
/** \brief Allocate a CharReader via operator new().
* \throw std::exception if something goes wrong (e.g. invalid settings)
*/
virtual CharReader* newCharReader() const = 0;
}; // Factory
}; // CharReader
/** \brief Build a CharReader implementation.
Usage:
\code
using namespace Json;
CharReaderBuilder builder;
builder["collectComments"] = false;
Value value;
JSONCPP_STRING errs;
bool ok = parseFromStream(builder, std::cin, &value, &errs);
\endcode
*/
class JSON_API CharReaderBuilder : public CharReader::Factory {
public:
// Note: We use a Json::Value so that we can add data-members to this class
// without a major version bump.
/** Configuration of this builder.
These are case-sensitive.
Available settings (case-sensitive):
- `"collectComments": false or true`
- true to collect comment and allow writing them
back during serialization, false to discard comments.
This parameter is ignored if allowComments is false.
- `"allowComments": false or true`
- true if comments are allowed.
- `"strictRoot": false or true`
- true if root must be either an array or an object value
- `"allowDroppedNullPlaceholders": false or true`
- true if dropped null placeholders are allowed. (See StreamWriterBuilder.)
- `"allowNumericKeys": false or true`
- true if numeric object keys are allowed.
- `"allowSingleQuotes": false or true`
- true if '' are allowed for strings (both keys and values)
- `"stackLimit": integer`
- Exceeding stackLimit (recursive depth of `readValue()`) will
cause an exception.
- This is a security issue (seg-faults caused by deeply nested JSON),
so the default is low.
- `"failIfExtra": false or true`
- If true, `parse()` returns false when extra non-whitespace trails
the JSON value in the input string.
- `"rejectDupKeys": false or true`
- If true, `parse()` returns false when a key is duplicated within an object.
- `"allowSpecialFloats": false or true`
- If true, special float values (NaNs and infinities) are allowed
and their values are lossfree restorable.
You can examine 'settings_` yourself
to see the defaults. You can also write and read them just like any
JSON Value.
\sa setDefaults()
*/
Json::Value settings_;
CharReaderBuilder();
~CharReaderBuilder() JSONCPP_OVERRIDE;
CharReader* newCharReader() const JSONCPP_OVERRIDE;
/** \return true if 'settings' are legal and consistent;
* otherwise, indicate bad settings via 'invalid'.
*/
bool validate(Json::Value* invalid) const;
/** A simple way to update a specific setting.
*/
Value& operator[](JSONCPP_STRING key);
/** Called by ctor, but you can use this to reset settings_.
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults
*/
static void setDefaults(Json::Value* settings);
/** Same as old Features::strictMode().
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
*/
static void strictMode(Json::Value* settings);
};
/** Consume entire stream and use its begin/end.
* Someday we might have a real StreamReader, but for now this
* is convenient.
*/
bool JSON_API parseFromStream(
CharReader::Factory const&,
JSONCPP_ISTREAM&,
Value* root, std::string* errs);
/** \brief Read from 'sin' into 'root'.
Always keep comments from the input JSON.
@@ -398,14 +207,8 @@ bool JSON_API parseFromStream(
\throw std::exception on parse error.
\see Json::operator<<()
*/
JSON_API JSONCPP_ISTREAM& operator>>(JSONCPP_ISTREAM&, Value&);
std::istream& operator>>( std::istream&, Value& );
} // namespace Json
#pragma pack(pop)
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(pop)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#endif // CPPTL_JSON_READER_H_INCLUDED

File diff suppressed because it is too large Load Diff

View File

@@ -1,20 +0,0 @@
// DO NOT EDIT. This file (and "version") is generated by CMake.
// Run CMake configure step to update it.
#ifndef JSON_VERSION_H_INCLUDED
# define JSON_VERSION_H_INCLUDED
# define JSONCPP_VERSION_STRING "1.8.3"
# define JSONCPP_VERSION_MAJOR 1
# define JSONCPP_VERSION_MINOR 8
# define JSONCPP_VERSION_PATCH 3
# define JSONCPP_VERSION_QUALIFIER
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
#ifdef JSONCPP_USING_SECURE_MEMORY
#undef JSONCPP_USING_SECURE_MEMORY
#endif
#define JSONCPP_USING_SECURE_MEMORY 0
// If non-zero, the library zeroes any memory that it has allocated before
// it frees its memory.
#endif // JSON_VERSION_H_INCLUDED

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -6,332 +6,180 @@
#ifndef JSON_WRITER_H_INCLUDED
# define JSON_WRITER_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION)
#if !defined(JSON_IS_AMALGATED)
# include "value.h"
#endif // if !defined(JSON_IS_AMALGAMATION)
#endif // if !defined(JSON_IS_AMALGATED)
# include <vector>
# include <string>
#include <ostream>
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
// be used by...
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(push)
#pragma warning(disable : 4251)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma pack(push, 8)
# include <iostream>
namespace Json {
class Value;
/**
Usage:
\code
using namespace Json;
void writeToStdout(StreamWriter::Factory const& factory, Value const& value) {
std::unique_ptr<StreamWriter> const writer(
factory.newStreamWriter());
writer->write(value, &std::cout);
std::cout << std::endl; // add lf and flush
}
\endcode
*/
class JSON_API StreamWriter {
protected:
JSONCPP_OSTREAM* sout_; // not owned; will not delete
public:
StreamWriter();
virtual ~StreamWriter();
/** Write Value into document as configured in sub-class.
Do not take ownership of sout, but maintain a reference during function.
\pre sout != NULL
\return zero on success (For now, we always return zero, so check the stream instead.)
\throw std::exception possibly, depending on configuration
*/
virtual int write(Value const& root, JSONCPP_OSTREAM* sout) = 0;
/** \brief A simple abstract factory.
*/
class JSON_API Factory {
public:
virtual ~Factory();
/** \brief Allocate a CharReader via operator new().
* \throw std::exception if something goes wrong (e.g. invalid settings)
*/
virtual StreamWriter* newStreamWriter() const = 0;
}; // Factory
}; // StreamWriter
/** \brief Write into stringstream, then return string, for convenience.
* A StreamWriter will be created from the factory, used, and then deleted.
*/
JSONCPP_STRING JSON_API writeString(StreamWriter::Factory const& factory, Value const& root);
/** \brief Build a StreamWriter implementation.
Usage:
\code
using namespace Json;
Value value = ...;
StreamWriterBuilder builder;
builder["commentStyle"] = "None";
builder["indentation"] = " "; // or whatever you like
std::unique_ptr<Json::StreamWriter> writer(
builder.newStreamWriter());
writer->write(value, &std::cout);
std::cout << std::endl; // add lf and flush
\endcode
*/
class JSON_API StreamWriterBuilder : public StreamWriter::Factory {
public:
// Note: We use a Json::Value so that we can add data-members to this class
// without a major version bump.
/** Configuration of this builder.
Available settings (case-sensitive):
- "commentStyle": "None" or "All"
- "indentation": "<anything>"
- "enableYAMLCompatibility": false or true
- slightly change the whitespace around colons
- "dropNullPlaceholders": false or true
- Drop the "null" string from the writer's output for nullValues.
Strictly speaking, this is not valid JSON. But when the output is being
fed to a browser's Javascript, it makes for smaller output and the
browser can handle the output just fine.
- "useSpecialFloats": false or true
- If true, outputs non-finite floating point values in the following way:
NaN values as "NaN", positive infinity as "Infinity", and negative infinity
as "-Infinity".
You can examine 'settings_` yourself
to see the defaults. You can also write and read them just like any
JSON Value.
\sa setDefaults()
*/
Json::Value settings_;
StreamWriterBuilder();
~StreamWriterBuilder() JSONCPP_OVERRIDE;
/**
* \throw std::exception if something goes wrong (e.g. invalid settings)
*/
StreamWriter* newStreamWriter() const JSONCPP_OVERRIDE;
/** \return true if 'settings' are legal and consistent;
* otherwise, indicate bad settings via 'invalid'.
*/
bool validate(Json::Value* invalid) const;
/** A simple way to update a specific setting.
*/
Value& operator[](JSONCPP_STRING key);
/** Called by ctor, but you can use this to reset settings_.
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_writer.cpp StreamWriterBuilderDefaults
*/
static void setDefaults(Json::Value* settings);
};
/** \brief Abstract class for writers.
* \deprecated Use StreamWriter. (And really, this is an implementation detail.)
*/
class JSONCPP_DEPRECATED("Use StreamWriter instead") JSON_API Writer {
class JSON_API Writer
{
public:
virtual ~Writer();
virtual JSONCPP_STRING write(const Value& root) = 0;
virtual std::string write( const Value &root ) = 0;
};
/** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format
*without formatting (not human friendly).
/** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format without formatting (not human friendly).
*
* The JSON document is written in a single line. It is not intended for 'human'
*consumption,
* The JSON document is written in a single line. It is not intended for 'human' consumption,
* but may be usefull to support feature such as RPC where bandwith is limited.
* \sa Reader, Value
* \deprecated Use StreamWriterBuilder.
*/
class JSONCPP_DEPRECATED("Use StreamWriterBuilder instead") JSON_API FastWriter : public Writer {
class JSON_API FastWriter : public Writer
{
public:
FastWriter();
~FastWriter() JSONCPP_OVERRIDE {}
virtual ~FastWriter(){}
void enableYAMLCompatibility();
/** \brief Drop the "null" string from the writer's output for nullValues.
* Strictly speaking, this is not valid JSON. But when the output is being
* fed to a browser's Javascript, it makes for smaller output and the
* browser can handle the output just fine.
*/
void dropNullPlaceholders();
void omitEndingLineFeed();
public: // overridden from Writer
JSONCPP_STRING write(const Value& root) JSONCPP_OVERRIDE;
virtual std::string write( const Value &root );
private:
void writeValue( const Value &value );
JSONCPP_STRING document_;
std::string document_;
bool yamlCompatiblityEnabled_;
bool dropNullPlaceholders_;
bool omitEndingLineFeed_;
};
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
*human friendly way.
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a human friendly way.
*
* The rules for line break and indent are as follow:
* - Object value:
* - if empty then print {} without indent and line break
* - if not empty the print '{', line break & indent, print one value per
*line
* - if not empty the print '{', line break & indent, print one value per line
* and then unindent and line break and print '}'.
* - Array value:
* - if empty then print [] without indent and line break
* - if the array contains no object value, empty array or some other value
*types,
* and all the values fit on one lines, then print the array on a single
*line.
* - if the array contains no object value, empty array or some other value types,
* and all the values fit on one lines, then print the array on a single line.
* - otherwise, it the values do not fit on one line, or the array contains
* object or non empty array, then print one value per line.
*
* If the Value have comments then they are outputed according to their
*#CommentPlacement.
* If the Value have comments then they are outputed according to their #CommentPlacement.
*
* \sa Reader, Value, Value::setComment()
* \deprecated Use StreamWriterBuilder.
*/
class JSONCPP_DEPRECATED("Use StreamWriterBuilder instead") JSON_API StyledWriter : public Writer {
class JSON_API StyledWriter: public Writer
{
public:
StyledWriter();
~StyledWriter() JSONCPP_OVERRIDE {}
virtual ~StyledWriter(){}
public: // overridden from Writer
/** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
* \param root Value to serialize.
* \return String containing the JSON document that represents the root value.
*/
JSONCPP_STRING write(const Value& root) JSONCPP_OVERRIDE;
virtual std::string write( const Value &root );
private:
void writeValue( const Value &value );
void writeArrayValue( const Value &value );
bool isMultineArray( const Value &value );
void pushValue(const JSONCPP_STRING& value);
void pushValue( const std::string &value );
void writeIndent();
void writeWithIndent(const JSONCPP_STRING& value);
void writeWithIndent( const std::string &value );
void indent();
void unindent();
void writeCommentBeforeValue( const Value &root );
void writeCommentAfterValueOnSameLine( const Value &root );
bool hasCommentForValue( const Value &value );
static JSONCPP_STRING normalizeEOL(const JSONCPP_STRING& text);
static std::string normalizeEOL( const std::string &text );
typedef std::vector<JSONCPP_STRING> ChildValues;
typedef std::vector<std::string> ChildValues;
ChildValues childValues_;
JSONCPP_STRING document_;
JSONCPP_STRING indentString_;
unsigned int rightMargin_;
unsigned int indentSize_;
std::string document_;
std::string indentString_;
int rightMargin_;
int indentSize_;
bool addChildValues_;
};
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
human friendly way,
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a human friendly way,
to a stream rather than to a string.
*
* The rules for line break and indent are as follow:
* - Object value:
* - if empty then print {} without indent and line break
* - if not empty the print '{', line break & indent, print one value per
line
* - if not empty the print '{', line break & indent, print one value per line
* and then unindent and line break and print '}'.
* - Array value:
* - if empty then print [] without indent and line break
* - if the array contains no object value, empty array or some other value
types,
* and all the values fit on one lines, then print the array on a single
line.
* - if the array contains no object value, empty array or some other value types,
* and all the values fit on one lines, then print the array on a single line.
* - otherwise, it the values do not fit on one line, or the array contains
* object or non empty array, then print one value per line.
*
* If the Value have comments then they are outputed according to their
#CommentPlacement.
* If the Value have comments then they are outputed according to their #CommentPlacement.
*
* \sa Reader, Value, Value::setComment()
* \deprecated Use StreamWriterBuilder.
*/
class JSONCPP_DEPRECATED("Use StreamWriterBuilder instead") JSON_API StyledStreamWriter {
public:
/**
* \param indentation Each level will be indented by this amount extra.
* \sa Reader, Value, Value::setComment()
*/
StyledStreamWriter(JSONCPP_STRING indentation = "\t");
class JSON_API StyledStreamWriter
{
public:
StyledStreamWriter( std::string indentation="\t" );
~StyledStreamWriter(){}
public:
/** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
* \param out Stream to write to. (Can be ostringstream, e.g.)
* \param root Value to serialize.
* \note There is no point in deriving from Writer, since write() should not
* return a value.
* \note There is no point in deriving from Writer, since write() should not return a value.
*/
void write(JSONCPP_OSTREAM& out, const Value& root);
void write( std::ostream &out, const Value &root );
private:
void writeValue( const Value &value );
void writeArrayValue( const Value &value );
bool isMultineArray( const Value &value );
void pushValue(const JSONCPP_STRING& value);
void pushValue( const std::string &value );
void writeIndent();
void writeWithIndent(const JSONCPP_STRING& value);
void writeWithIndent( const std::string &value );
void indent();
void unindent();
void writeCommentBeforeValue( const Value &root );
void writeCommentAfterValueOnSameLine( const Value &root );
bool hasCommentForValue( const Value &value );
static JSONCPP_STRING normalizeEOL(const JSONCPP_STRING& text);
static std::string normalizeEOL( const std::string &text );
typedef std::vector<JSONCPP_STRING> ChildValues;
typedef std::vector<std::string> ChildValues;
ChildValues childValues_;
JSONCPP_OSTREAM* document_;
JSONCPP_STRING indentString_;
unsigned int rightMargin_;
JSONCPP_STRING indentation_;
bool addChildValues_ : 1;
bool indented_ : 1;
std::ostream* document_;
std::string indentString_;
int rightMargin_;
std::string indentation_;
bool addChildValues_;
};
# if defined(JSON_HAS_INT64)
JSONCPP_STRING JSON_API valueToString(Int value);
JSONCPP_STRING JSON_API valueToString(UInt value);
std::string JSON_API valueToString( Int value );
std::string JSON_API valueToString( UInt value );
# endif // if defined(JSON_HAS_INT64)
JSONCPP_STRING JSON_API valueToString(LargestInt value);
JSONCPP_STRING JSON_API valueToString(LargestUInt value);
JSONCPP_STRING JSON_API valueToString(double value);
JSONCPP_STRING JSON_API valueToString(bool value);
JSONCPP_STRING JSON_API valueToQuotedString(const char* value);
std::string JSON_API valueToString( LargestInt value );
std::string JSON_API valueToString( LargestUInt value );
std::string JSON_API valueToString( double value );
std::string JSON_API valueToString( bool value );
std::string JSON_API valueToQuotedString( const char *value );
/// \brief Output using the StyledStreamWriter.
/// \see Json::operator>>()
JSON_API JSONCPP_OSTREAM& operator<<(JSONCPP_OSTREAM&, const Value& root);
std::ostream& operator<<( std::ostream&, const Value &root );
} // namespace Json
#pragma pack(pop)
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma warning(pop)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#endif // JSON_WRITER_H_INCLUDED

View File

@@ -1,42 +0,0 @@

Microsoft Visual Studio Solution File, Format Version 11.00
# Visual Studio 2010
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "lib_json", "lib_json.vcxproj", "{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "jsontest", "jsontest.vcxproj", "{25AF2DD2-D396-4668-B188-488C33B8E620}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "test_lib_json", "test_lib_json.vcxproj", "{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Win32 = Debug|Win32
Debug|x64 = Debug|x64
Release|Win32 = Release|Win32
Release|x64 = Release|x64
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|Win32.ActiveCfg = Debug|Win32
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|Win32.Build.0 = Debug|Win32
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|x64.ActiveCfg = Debug|x64
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Debug|x64.Build.0 = Debug|x64
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|Win32.ActiveCfg = Release|Win32
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|Win32.Build.0 = Release|Win32
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|x64.ActiveCfg = Release|x64
{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}.Release|x64.Build.0 = Release|x64
{25AF2DD2-D396-4668-B188-488C33B8E620}.Debug|Win32.ActiveCfg = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Debug|Win32.Build.0 = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Debug|x64.ActiveCfg = Debug|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release|Win32.ActiveCfg = Release|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release|Win32.Build.0 = Release|Win32
{25AF2DD2-D396-4668-B188-488C33B8E620}.Release|x64.ActiveCfg = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug|Win32.ActiveCfg = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug|Win32.Build.0 = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Debug|x64.ActiveCfg = Debug|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release|Win32.ActiveCfg = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release|Win32.Build.0 = Release|Win32
{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}.Release|x64.ActiveCfg = Release|Win32
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
EndGlobal

View File

@@ -1,96 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{25AF2DD2-D396-4668-B188-488C33B8E620}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/jsontest\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/jsontest\</IntDir>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/jsontest\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/jsontest\</IntDir>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<Optimization>Disabled</Optimization>
<AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<MinimalRebuild>true</MinimalRebuild>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>EditAndContinue</DebugInformationFormat>
</ClCompile>
<Link>
<OutputFile>$(OutDir)jsontest.exe</OutputFile>
<GenerateDebugInformation>true</GenerateDebugInformation>
<ProgramDatabaseFile>$(OutDir)jsontest.pdb</ProgramDatabaseFile>
<SubSystem>Console</SubSystem>
<TargetMachine>MachineX86</TargetMachine>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
</ClCompile>
<Link>
<OutputFile>$(OutDir)jsontest.exe</OutputFile>
<GenerateDebugInformation>true</GenerateDebugInformation>
<SubSystem>Console</SubSystem>
<OptimizeReferences>true</OptimizeReferences>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<TargetMachine>MachineX86</TargetMachine>
</Link>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="..\..\src\jsontestrunner\main.cpp" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="lib_json.vcxproj">
<Project>{1e6c2c1c-6453-4129-ae3f-0ee8e6599c89}</Project>
</ProjectReference>
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -1,13 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Source Files">
<UniqueIdentifier>{903591b3-ade3-4ce4-b1f9-1e175e62b014}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\..\src\jsontestrunner\main.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
</Project>

View File

@@ -1,143 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Debug|x64">
<Configuration>Debug</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|x64">
<Configuration>Release</Configuration>
<Platform>x64</Platform>
</ProjectConfiguration>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\..\src\lib_json\json_reader.cpp" />
<ClCompile Include="..\..\src\lib_json\json_value.cpp" />
<ClCompile Include="..\..\src\lib_json\json_writer.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\include\json\reader.h" />
<ClInclude Include="..\..\include\json\value.h" />
<ClInclude Include="..\..\include\json\writer.h" />
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{1E6C2C1C-6453-4129-AE3F-0EE8E6599C89}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
<RootNamespace>jsoncpp</RootNamespace>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>true</UseDebugLibraries>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
<ConfigurationType>StaticLibrary</ConfigurationType>
<UseDebugLibraries>false</UseDebugLibraries>
<WholeProgramOptimization>true</WholeProgramOptimization>
<CharacterSet>Unicode</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup />
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<ClCompile>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<Optimization>Disabled</Optimization>
<PreprocessorDefinitions>WIN32;_DEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<WarningLevel>Level3</WarningLevel>
<PrecompiledHeader>NotUsing</PrecompiledHeader>
<Optimization>MaxSpeed</Optimization>
<FunctionLevelLinking>true</FunctionLevelLinking>
<IntrinsicFunctions>true</IntrinsicFunctions>
<PreprocessorDefinitions>WIN32;NDEBUG;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<AdditionalIncludeDirectories>../../include</AdditionalIncludeDirectories>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
</Link>
</ItemDefinitionGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -1,33 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Header Files">
<UniqueIdentifier>{c110bc57-c46e-476c-97ea-84d8014f431c}</UniqueIdentifier>
</Filter>
<Filter Include="Source Files">
<UniqueIdentifier>{ed718592-5acf-47b5-8f2b-b8224590da6a}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClCompile Include="..\..\src\lib_json\json_reader.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\..\src\lib_json\json_value.cpp">
<Filter>Source Files</Filter>
</ClCompile>
<ClCompile Include="..\..\src\lib_json\json_writer.cpp">
<Filter>Source Files</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\include\json\reader.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="..\..\include\json\value.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="..\..\include\json\writer.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
</Project>

View File

@@ -1,109 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup Label="ProjectConfigurations">
<ProjectConfiguration Include="Debug|Win32">
<Configuration>Debug</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
<ProjectConfiguration Include="Release|Win32">
<Configuration>Release</Configuration>
<Platform>Win32</Platform>
</ProjectConfiguration>
</ItemGroup>
<PropertyGroup Label="Globals">
<ProjectGuid>{B7A96B78-2782-40D2-8F37-A2DEF2B9C26D}</ProjectGuid>
<RootNamespace>test_lib_json</RootNamespace>
<Keyword>Win32Proj</Keyword>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
<ConfigurationType>Application</ConfigurationType>
<CharacterSet>MultiByte</CharacterSet>
</PropertyGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
<ImportGroup Label="ExtensionSettings">
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
</ImportGroup>
<PropertyGroup Label="UserMacros" />
<PropertyGroup>
<_ProjectFileVersion>10.0.40219.1</_ProjectFileVersion>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/test_lib_json\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">../../build/vs71/debug/test_lib_json\</IntDir>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental>
<OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/test_lib_json\</OutDir>
<IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">../../build/vs71/release/test_lib_json\</IntDir>
<LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
<Optimization>Disabled</Optimization>
<AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<MinimalRebuild>true</MinimalRebuild>
<BasicRuntimeChecks>EnableFastChecks</BasicRuntimeChecks>
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>EditAndContinue</DebugInformationFormat>
</ClCompile>
<Link>
<OutputFile>$(OutDir)test_lib_json.exe</OutputFile>
<GenerateDebugInformation>true</GenerateDebugInformation>
<ProgramDatabaseFile>$(OutDir)test_lib_json.pdb</ProgramDatabaseFile>
<SubSystem>Console</SubSystem>
<TargetMachine>MachineX86</TargetMachine>
</Link>
<PostBuildEvent>
<Message>Running all unit tests</Message>
<Command>$(TargetPath)</Command>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<ClCompile>
<AdditionalIncludeDirectories>../../include;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
<PrecompiledHeader>
</PrecompiledHeader>
<WarningLevel>Level3</WarningLevel>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
</ClCompile>
<Link>
<OutputFile>$(OutDir)test_lib_json.exe</OutputFile>
<GenerateDebugInformation>true</GenerateDebugInformation>
<SubSystem>Console</SubSystem>
<OptimizeReferences>true</OptimizeReferences>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<TargetMachine>MachineX86</TargetMachine>
</Link>
<PostBuildEvent>
<Message>Running all unit tests</Message>
<Command>$(TargetPath)</Command>
</PostBuildEvent>
</ItemDefinitionGroup>
<ItemGroup>
<ClCompile Include="..\..\src\test_lib_json\jsontest.cpp" />
<ClCompile Include="..\..\src\test_lib_json\main.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\src\test_lib_json\jsontest.h" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="lib_json.vcxproj">
<Project>{1e6c2c1c-6453-4129-ae3f-0ee8e6599c89}</Project>
</ProjectReference>
</ItemGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
</Project>

View File

@@ -1,24 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<ClCompile Include="..\..\src\test_lib_json\jsontest.cpp">
<Filter>Source Filter</Filter>
</ClCompile>
<ClCompile Include="..\..\src\test_lib_json\main.cpp">
<Filter>Source Filter</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<Filter Include="Source Filter">
<UniqueIdentifier>{bf40cbfc-8e98-40b4-b9f3-7e8d579cbae2}</UniqueIdentifier>
</Filter>
<Filter Include="Header Files">
<UniqueIdentifier>{5fd39074-89e6-4939-aa3f-694fefd296b1}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<ClInclude Include="..\..\src\test_lib_json\jsontest.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup>
</Project>

View File

@@ -178,6 +178,15 @@
<File
RelativePath="..\..\include\json\json.h">
</File>
<File
RelativePath="..\..\src\lib_json\json_batchallocator.h">
</File>
<File
RelativePath="..\..\src\lib_json\json_internalarray.inl">
</File>
<File
RelativePath="..\..\src\lib_json\json_internalmap.inl">
</File>
<File
RelativePath="..\..\src\lib_json\json_reader.cpp">
</File>

View File

@@ -1,8 +1,3 @@
# Copyright 2010 Baptiste Lepilleur and The JsonCpp Authors
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
"""Tag the sandbox for release, make source and doc tarballs.
Requires Python 2.6
@@ -15,12 +10,7 @@ python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -uble
Example of invocation when doing a release:
python makerelease.py 0.5.0 0.6.0-dev
Note: This was for Subversion. Now that we are in GitHub, we do not
need to build versioned tarballs anymore, so makerelease.py is defunct.
"""
from __future__ import print_function
import os.path
import subprocess
import sys
@@ -33,7 +23,7 @@ import tempfile
import os
import time
from devtools import antglob, fixeol, tarball
import amalgamate
import amalgate
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
@@ -53,7 +43,7 @@ class SVNError(Exception):
def svn_command( command, *args ):
cmd = ['svn', '--non-interactive', command] + list(args)
print('Running:', ' '.join(cmd))
print 'Running:', ' '.join( cmd )
process = subprocess.Popen( cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT )
@@ -91,7 +81,7 @@ def svn_check_if_tag_exist(tag_url):
"""
try:
list_stdout = svn_command( 'list', tag_url )
except SVNError as e:
except SVNError, e:
if e.returncode != 1 or not str(e).find('tag_url'):
raise e
# otherwise ignore error, meaning tag does not exist
@@ -124,7 +114,7 @@ def svn_export(tag_url, export_dir):
def fix_sources_eol( dist_dir ):
"""Set file EOL for tarball distribution.
"""
print('Preparing exported source file EOL for distribution...')
print 'Preparing exported source file EOL for distribution...'
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
win_sources = antglob.glob( dist_dir,
includes = '**/*.sln **/*.vcproj',
@@ -155,7 +145,7 @@ def download(url, target_path):
def check_compile( distcheck_top_dir, platform ):
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
print('Running:', ' '.join(cmd))
print 'Running:', ' '.join( cmd )
log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
flog = open( log_path, 'wb' )
try:
@@ -186,9 +176,9 @@ def run_sftp_batch(userhost, sftp, batch, retry=0):
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
error = None
for retry_index in range(0, max(1,retry)):
for retry_index in xrange(0, max(1,retry)):
heading = retry_index == 0 and 'Running:' or 'Retrying:'
print(heading, ' '.join(cmd))
print heading, ' '.join( cmd )
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
stdout = process.communicate()[0]
if process.returncode != 0:
@@ -226,21 +216,21 @@ exit
upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
paths_to_remove = existing_paths - upload_paths
if paths_to_remove:
print('Removing the following file from web:')
print('\n'.join(paths_to_remove))
print 'Removing the following file from web:'
print '\n'.join( paths_to_remove )
stdout = run_sftp_batch( userhost, sftp, """cd htdocs
rm %s
exit""" % ' '.join(paths_to_remove) )
print('Uploading %d files:' % len(upload_paths))
print 'Uploading %d files:' % len(upload_paths)
batch_size = 10
upload_paths = list(upload_paths)
start_time = time.time()
for index in range(0,len(upload_paths),batch_size):
for index in xrange(0,len(upload_paths),batch_size):
paths = upload_paths[index:index+batch_size]
file_per_sec = (time.time() - start_time) / (index+1)
remaining_files = len(upload_paths) - index
remaining_sec = file_per_sec * remaining_files
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)
run_sftp_batch( userhost, sftp, """cd htdocs
lcd %s
mput %s
@@ -304,7 +294,7 @@ Warning: --force should only be used when developping/testing the release script
else:
msg = check_no_pending_commit()
if not msg:
print('Setting version to', release_version)
print 'Setting version to', release_version
set_version( release_version )
svn_commit( 'Release ' + release_version )
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
@@ -312,11 +302,11 @@ Warning: --force should only be used when developping/testing the release script
if options.retag_release:
svn_remove_tag( tag_url, 'Overwriting previous tag' )
else:
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
sys.exit( 1 )
svn_tag_sandbox( tag_url, 'Release ' + release_version )
print('Generated doxygen document...')
print 'Generated doxygen document...'
## doc_dirname = r'jsoncpp-api-html-0.5.0'
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
@@ -330,55 +320,55 @@ Warning: --force should only be used when developping/testing the release script
source_dir = 'jsoncpp-src-' + release_version
source_tarball_path = 'dist/%s.tar.gz' % source_dir
print('Generating source tarball to', source_tarball_path)
print 'Generating source tarball to', source_tarball_path
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
print('Generating amalgamation source tarball to', amalgamation_tarball_path)
amalgamation_dir = 'dist/amalgamation'
amalgamate.amalgamate_source(export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h')
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
tarball.make_tarball(amalgamation_tarball_path, [amalgamation_dir],
amalgamation_dir, prefix_dir=amalgamation_source_dir)
amalgated_tarball_path = 'dist/%s-amalgated.tar.gz' % source_dir
print 'Generating amalgated source tarball to', amalgated_tarball_path
amalgated_dir = 'dist/amalgated'
amalgate.amalgate_source( export_dir, '%s/jsoncpp.cpp' % amalgated_dir, 'json/json.h' )
amalgated_source_dir = 'jsoncpp-src-amalgated' + release_version
tarball.make_tarball( amalgated_tarball_path, [amalgated_dir],
amalgated_dir, prefix_dir=amalgated_source_dir )
# Decompress source tarball, download and install scons-local
distcheck_dir = 'dist/distcheck'
distcheck_top_dir = distcheck_dir + '/' + source_dir
print('Decompressing source tarball to', distcheck_dir)
print 'Decompressing source tarball to', distcheck_dir
rmdir_if_exist( distcheck_dir )
tarball.decompress( source_tarball_path, distcheck_dir )
scons_local_path = 'dist/scons-local.tar.gz'
print('Downloading scons-local to', scons_local_path)
print 'Downloading scons-local to', scons_local_path
download( SCONS_LOCAL_URL, scons_local_path )
print('Decompressing scons-local to', distcheck_top_dir)
print 'Decompressing scons-local to', distcheck_top_dir
tarball.decompress( scons_local_path, distcheck_top_dir )
# Run compilation
print('Compiling decompressed tarball')
print 'Compiling decompressed tarball'
all_build_status = True
for platform in options.platforms.split(','):
print('Testing platform:', platform)
print 'Testing platform:', platform
build_status, log_path = check_compile( distcheck_top_dir, platform )
print('see build log:', log_path)
print(build_status and '=> ok' or '=> FAILED')
print 'see build log:', log_path
print build_status and '=> ok' or '=> FAILED'
all_build_status = all_build_status and build_status
if not build_status:
print('Testing failed on at least one platform, aborting...')
print 'Testing failed on at least one platform, aborting...'
svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
sys.exit(1)
if options.user:
if not options.no_web:
print('Uploading documentation using user', options.user)
print 'Uploading documentation using user', options.user
sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
print('Completed documentation upload')
print('Uploading source and documentation tarballs for release using user', options.user)
print 'Completed documentation upload'
print 'Uploading source and documentation tarballs for release using user', options.user
sourceforge_release_tarball( SOURCEFORGE_PROJECT,
[source_tarball_path, doc_tarball_path],
user=options.user, sftp=options.sftp )
print('Source and doc release tarballs uploaded')
print 'Source and doc release tarballs uploaded'
else:
print('No upload user specified. Web site and download tarbal were not uploaded.')
print('Tarball can be found at:', doc_tarball_path)
print 'No upload user specified. Web site and download tarbal were not uploaded.'
print 'Tarball can be found at:', doc_tarball_path
# Set next version number and commit
set_version( next_version )

View File

@@ -1,102 +0,0 @@
project(
'jsoncpp',
'cpp',
version : '1.8.3',
default_options : [
'buildtype=release',
'warning_level=1'],
license : 'Public Domain',
meson_version : '>= 0.41.1')
jsoncpp_ver_arr = meson.project_version().split('.')
jsoncpp_major_version = jsoncpp_ver_arr[0]
jsoncpp_minor_version = jsoncpp_ver_arr[1]
jsoncpp_patch_version = jsoncpp_ver_arr[2]
jsoncpp_cdata = configuration_data()
jsoncpp_cdata.set('JSONCPP_VERSION', meson.project_version())
jsoncpp_cdata.set('JSONCPP_VERSION_MAJOR', jsoncpp_major_version)
jsoncpp_cdata.set('JSONCPP_VERSION_MINOR', jsoncpp_minor_version)
jsoncpp_cdata.set('JSONCPP_VERSION_PATCH', jsoncpp_patch_version)
jsoncpp_gen_sources = configure_file(
input : 'src/lib_json/version.h.in',
output : 'version.h',
configuration : jsoncpp_cdata,
install : true,
install_dir : join_paths(get_option('prefix'), get_option('includedir'), 'json')
)
jsoncpp_headers = [
'include/json/allocator.h',
'include/json/assertions.h',
'include/json/autolink.h',
'include/json/config.h',
'include/json/features.h',
'include/json/forwards.h',
'include/json/json.h',
'include/json/reader.h',
'include/json/value.h',
'include/json/writer.h']
jsoncpp_include_directories = include_directories('include')
install_headers(
jsoncpp_headers,
subdir : 'json')
jsoncpp_lib = library(
'jsoncpp',
[ jsoncpp_gen_sources,
jsoncpp_headers,
'src/lib_json/json_tool.h',
'src/lib_json/json_reader.cpp',
'src/lib_json/json_value.cpp',
'src/lib_json/json_writer.cpp'],
soversion : 19,
install : true,
include_directories : jsoncpp_include_directories)
import('pkgconfig').generate(
libraries : jsoncpp_lib,
version : meson.project_version(),
name : 'jsoncpp',
filebase : 'jsoncpp',
description : 'A C++ library for interacting with JSON')
# for libraries bundling jsoncpp
declare_dependency(
include_directories : jsoncpp_include_directories,
link_with : jsoncpp_lib,
version : meson.project_version(),
sources : jsoncpp_gen_sources)
# tests
python = import('python3').find_python()
jsoncpp_test = executable(
'jsoncpp_test',
[ 'src/test_lib_json/jsontest.cpp',
'src/test_lib_json/jsontest.h',
'src/test_lib_json/main.cpp'],
include_directories : jsoncpp_include_directories,
link_with : jsoncpp_lib,
install : false)
test(
'unittest_jsoncpp_test',
jsoncpp_test)
jsontestrunner = executable(
'jsontestrunner',
'src/jsontestrunner/main.cpp',
include_directories : jsoncpp_include_directories,
link_with : jsoncpp_lib,
install : false)
test(
'unittest_jsontestrunner',
python,
args : [
'-B',
join_paths(meson.current_source_dir(), 'test/runjsontests.py'),
jsontestrunner,
join_paths(meson.current_source_dir(), 'test/data')]
)

View File

@@ -1,9 +0,0 @@
libdir=@CMAKE_INSTALL_FULL_LIBDIR@
includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@
Name: jsoncpp
Description: A C++ library for interacting with JSON
Version: @JSONCPP_VERSION@
URL: https://github.com/open-source-parsers/jsoncpp
Libs: -L${libdir} -ljsoncpp
Cflags: -I${includedir}

53
scons-tools/globtool.py Normal file
View File

@@ -0,0 +1,53 @@
import fnmatch
import os
def generate( env ):
def Glob( env, includes = None, excludes = None, dir = '.' ):
"""Adds Glob( includes = Split( '*' ), excludes = None, dir = '.')
helper function to environment.
Glob both the file-system files.
includes: list of file name pattern included in the return list when matched.
excludes: list of file name pattern exluced from the return list.
Example:
sources = env.Glob( ("*.cpp", '*.h'), "~*.cpp", "#src" )
"""
def filterFilename(path):
abs_path = os.path.join( dir, path )
if not os.path.isfile(abs_path):
return 0
fn = os.path.basename(path)
match = 0
for include in includes:
if fnmatch.fnmatchcase( fn, include ):
match = 1
break
if match == 1 and not excludes is None:
for exclude in excludes:
if fnmatch.fnmatchcase( fn, exclude ):
match = 0
break
return match
if includes is None:
includes = ('*',)
elif type(includes) in ( type(''), type(u'') ):
includes = (includes,)
if type(excludes) in ( type(''), type(u'') ):
excludes = (excludes,)
dir = env.Dir(dir).abspath
paths = os.listdir( dir )
def makeAbsFileNode( path ):
return env.File( os.path.join( dir, path ) )
nodes = filter( filterFilename, paths )
return map( makeAbsFileNode, nodes )
from SCons.Script import Environment
Environment.Glob = Glob
def exists(env):
"""
Tool always exists.
"""
return True

179
scons-tools/srcdist.py Normal file
View File

@@ -0,0 +1,179 @@
import os
import os.path
from fnmatch import fnmatch
import targz
##def DoxyfileParse(file_contents):
## """
## Parse a Doxygen source file and return a dictionary of all the values.
## Values will be strings and lists of strings.
## """
## data = {}
##
## import shlex
## lex = shlex.shlex(instream = file_contents, posix = True)
## lex.wordchars += "*+./-:"
## lex.whitespace = lex.whitespace.replace("\n", "")
## lex.escape = ""
##
## lineno = lex.lineno
## last_backslash_lineno = lineno
## token = lex.get_token()
## key = token # the first token should be a key
## last_token = ""
## key_token = False
## next_key = False
## new_data = True
##
## def append_data(data, key, new_data, token):
## if new_data or len(data[key]) == 0:
## data[key].append(token)
## else:
## data[key][-1] += token
##
## while token:
## if token in ['\n']:
## if last_token not in ['\\']:
## key_token = True
## elif token in ['\\']:
## pass
## elif key_token:
## key = token
## key_token = False
## else:
## if token == "+=":
## if not data.has_key(key):
## data[key] = list()
## elif token == "=":
## data[key] = list()
## else:
## append_data( data, key, new_data, token )
## new_data = True
##
## last_token = token
## token = lex.get_token()
##
## if last_token == '\\' and token != '\n':
## new_data = False
## append_data( data, key, new_data, '\\' )
##
## # compress lists of len 1 into single strings
## for (k, v) in data.items():
## if len(v) == 0:
## data.pop(k)
##
## # items in the following list will be kept as lists and not converted to strings
## if k in ["INPUT", "FILE_PATTERNS", "EXCLUDE_PATTERNS"]:
## continue
##
## if len(v) == 1:
## data[k] = v[0]
##
## return data
##
##def DoxySourceScan(node, env, path):
## """
## Doxygen Doxyfile source scanner. This should scan the Doxygen file and add
## any files used to generate docs to the list of source files.
## """
## default_file_patterns = [
## '*.c', '*.cc', '*.cxx', '*.cpp', '*.c++', '*.java', '*.ii', '*.ixx',
## '*.ipp', '*.i++', '*.inl', '*.h', '*.hh ', '*.hxx', '*.hpp', '*.h++',
## '*.idl', '*.odl', '*.cs', '*.php', '*.php3', '*.inc', '*.m', '*.mm',
## '*.py',
## ]
##
## default_exclude_patterns = [
## '*~',
## ]
##
## sources = []
##
## data = DoxyfileParse(node.get_contents())
##
## if data.get("RECURSIVE", "NO") == "YES":
## recursive = True
## else:
## recursive = False
##
## file_patterns = data.get("FILE_PATTERNS", default_file_patterns)
## exclude_patterns = data.get("EXCLUDE_PATTERNS", default_exclude_patterns)
##
## for node in data.get("INPUT", []):
## if os.path.isfile(node):
## sources.add(node)
## elif os.path.isdir(node):
## if recursive:
## for root, dirs, files in os.walk(node):
## for f in files:
## filename = os.path.join(root, f)
##
## pattern_check = reduce(lambda x, y: x or bool(fnmatch(filename, y)), file_patterns, False)
## exclude_check = reduce(lambda x, y: x and fnmatch(filename, y), exclude_patterns, True)
##
## if pattern_check and not exclude_check:
## sources.append(filename)
## else:
## for pattern in file_patterns:
## sources.extend(glob.glob("/".join([node, pattern])))
## sources = map( lambda path: env.File(path), sources )
## return sources
##
##
##def DoxySourceScanCheck(node, env):
## """Check if we should scan this file"""
## return os.path.isfile(node.path)
def srcDistEmitter(source, target, env):
## """Doxygen Doxyfile emitter"""
## # possible output formats and their default values and output locations
## output_formats = {
## "HTML": ("YES", "html"),
## "LATEX": ("YES", "latex"),
## "RTF": ("NO", "rtf"),
## "MAN": ("YES", "man"),
## "XML": ("NO", "xml"),
## }
##
## data = DoxyfileParse(source[0].get_contents())
##
## targets = []
## out_dir = data.get("OUTPUT_DIRECTORY", ".")
##
## # add our output locations
## for (k, v) in output_formats.items():
## if data.get("GENERATE_" + k, v[0]) == "YES":
## targets.append(env.Dir( os.path.join(out_dir, data.get(k + "_OUTPUT", v[1]))) )
##
## # don't clobber targets
## for node in targets:
## env.Precious(node)
##
## # set up cleaning stuff
## for node in targets:
## env.Clean(node, node)
##
## return (targets, source)
return (target,source)
def generate(env):
"""
Add builders and construction variables for the
SrcDist tool.
"""
## doxyfile_scanner = env.Scanner(
## DoxySourceScan,
## "DoxySourceScan",
## scan_check = DoxySourceScanCheck,
## )
if targz.exists(env):
srcdist_builder = targz.makeBuilder( srcDistEmitter )
env['BUILDERS']['SrcDist'] = srcdist_builder
def exists(env):
"""
Make sure srcdist exists.
"""
return targz.exists(env)

View File

@@ -0,0 +1,79 @@
import re
from SCons.Script import * # the usual scons stuff you get in a SConscript
def generate(env):
"""
Add builders and construction variables for the
SubstInFile tool.
Adds SubstInFile builder, which substitutes the keys->values of SUBST_DICT
from the source to the target.
The values of SUBST_DICT first have any construction variables expanded
(its keys are not expanded).
If a value of SUBST_DICT is a python callable function, it is called and
the result is expanded as the value.
If there's more than one source and more than one target, each target gets
substituted from the corresponding source.
"""
def do_subst_in_file(targetfile, sourcefile, dict):
"""Replace all instances of the keys of dict with their values.
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
"""
try:
f = open(sourcefile, 'rb')
contents = f.read()
f.close()
except:
raise SCons.Errors.UserError, "Can't read source file %s"%sourcefile
for (k,v) in dict.items():
contents = re.sub(k, v, contents)
try:
f = open(targetfile, 'wb')
f.write(contents)
f.close()
except:
raise SCons.Errors.UserError, "Can't write target file %s"%targetfile
return 0 # success
def subst_in_file(target, source, env):
if not env.has_key('SUBST_DICT'):
raise SCons.Errors.UserError, "SubstInFile requires SUBST_DICT to be set."
d = dict(env['SUBST_DICT']) # copy it
for (k,v) in d.items():
if callable(v):
d[k] = env.subst(v()).replace('\\','\\\\')
elif SCons.Util.is_String(v):
d[k] = env.subst(v).replace('\\','\\\\')
else:
raise SCons.Errors.UserError, "SubstInFile: key %s: %s must be a string or callable"%(k, repr(v))
for (t,s) in zip(target, source):
return do_subst_in_file(str(t), str(s), d)
def subst_in_file_string(target, source, env):
"""This is what gets printed on the console."""
return '\n'.join(['Substituting vars from %s into %s'%(str(s), str(t))
for (t,s) in zip(target, source)])
def subst_emitter(target, source, env):
"""Add dependency from substituted SUBST_DICT to target.
Returns original target, source tuple unchanged.
"""
d = env['SUBST_DICT'].copy() # copy it
for (k,v) in d.items():
if callable(v):
d[k] = env.subst(v())
elif SCons.Util.is_String(v):
d[k]=env.subst(v)
Depends(target, SCons.Node.Python.Value(d))
return target, source
## env.Append(TOOLS = 'substinfile') # this should be automaticaly done by Scons ?!?
subst_action = SCons.Action.Action( subst_in_file, subst_in_file_string )
env['BUILDERS']['SubstInFile'] = Builder(action=subst_action, emitter=subst_emitter)
def exists(env):
"""
Make sure tool exists.
"""
return True

82
scons-tools/targz.py Normal file
View File

@@ -0,0 +1,82 @@
"""tarball
Tool-specific initialization for tarball.
"""
## Commands to tackle a command based implementation:
##to unpack on the fly...
##gunzip < FILE.tar.gz | tar xvf -
##to pack on the fly...
##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
try:
import gzip
import tarfile
internal_targz = 1
except ImportError:
internal_targz = 0
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
if internal_targz:
def targz(target, source, env):
def archive_name( path ):
path = os.path.normpath( os.path.abspath( path ) )
common_path = os.path.commonprefix( (base_dir, path) )
archive_name = path[len(common_path):]
return archive_name
def visit(tar, dirname, names):
for name in names:
path = os.path.join(dirname, name)
if os.path.isfile(path):
tar.add(path, archive_name(path) )
compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL)
base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath )
target_path = str(target[0])
fileobj = gzip.GzipFile( target_path, 'wb', compression )
tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj)
for source in source:
source_path = str(source)
if source.isdir():
os.path.walk(source_path, visit, tar)
else:
tar.add(source_path, archive_name(source_path) ) # filename, arcname
tar.close()
targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR'])
def makeBuilder( emitter = None ):
return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'),
source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner,
suffix = '$TARGZ_SUFFIX',
multi = 1)
TarGzBuilder = makeBuilder()
def generate(env):
"""Add Builders and construction variables for zip to an Environment.
The following environnement variables may be set:
TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level).
TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative
to something other than top-dir).
"""
env['BUILDERS']['TarGz'] = TarGzBuilder
env['TARGZ_COM'] = targzAction
env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9
env['TARGZ_SUFFIX'] = '.tar.gz'
env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory.
else:
def generate(env):
pass
def exists(env):
return internal_targz

View File

@@ -1,5 +0,0 @@
ADD_SUBDIRECTORY(lib_json)
IF(JSONCPP_WITH_TESTS)
ADD_SUBDIRECTORY(jsontestrunner)
ADD_SUBDIRECTORY(test_lib_json)
ENDIF()

View File

@@ -1,25 +0,0 @@
FIND_PACKAGE(PythonInterp 2.6)
ADD_EXECUTABLE(jsontestrunner_exe
main.cpp
)
IF(BUILD_SHARED_LIBS)
ADD_DEFINITIONS( -DJSON_DLL )
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
ELSE(BUILD_SHARED_LIBS)
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib_static)
ENDIF()
SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
IF(PYTHONINTERP_FOUND)
# Run end to end parser/writer tests
SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test)
SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py)
ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
DEPENDS jsontestrunner_exe jsoncpp_test
)
ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests)
ENDIF()

View File

@@ -1,104 +1,56 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
/* This executable is used for testing parser/writer using real JSON files.
*/
#include <json/json.h>
#include <algorithm> // sort
#include <sstream>
#include <stdio.h>
#if defined(_MSC_VER) && _MSC_VER >= 1310
# pragma warning( disable: 4996 ) // disable fopen deprecation warning
#endif
struct Options
static std::string
readInputTestFile( const char *path )
{
JSONCPP_STRING path;
Json::Features features;
bool parseOnly;
typedef JSONCPP_STRING (*writeFuncType)(Json::Value const&);
writeFuncType write;
};
static JSONCPP_STRING normalizeFloatingPointStr(double value) {
char buffer[32];
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
sprintf_s(buffer, sizeof(buffer), "%.16g", value);
#else
snprintf(buffer, sizeof(buffer), "%.16g", value);
#endif
buffer[sizeof(buffer) - 1] = 0;
JSONCPP_STRING s(buffer);
JSONCPP_STRING::size_type index = s.find_last_of("eE");
if (index != JSONCPP_STRING::npos) {
JSONCPP_STRING::size_type hasSign =
(s[index + 1] == '+' || s[index + 1] == '-') ? 1 : 0;
JSONCPP_STRING::size_type exponentStartIndex = index + 1 + hasSign;
JSONCPP_STRING normalized = s.substr(0, exponentStartIndex);
JSONCPP_STRING::size_type indexDigit =
s.find_first_not_of('0', exponentStartIndex);
JSONCPP_STRING exponent = "0";
if (indexDigit !=
JSONCPP_STRING::npos) // There is an exponent different from 0
{
exponent = s.substr(indexDigit);
}
return normalized + exponent;
}
return s;
}
static JSONCPP_STRING readInputTestFile(const char* path) {
FILE *file = fopen( path, "rb" );
if ( !file )
return JSONCPP_STRING("");
return std::string("");
fseek( file, 0, SEEK_END );
long const size = ftell(file);
unsigned long const usize = static_cast<unsigned long>(size);
long size = ftell( file );
fseek( file, 0, SEEK_SET );
JSONCPP_STRING text;
std::string text;
char *buffer = new char[size+1];
buffer[size] = 0;
if (fread(buffer, 1, usize, file) == usize)
if ( fread( buffer, 1, size, file ) == (unsigned long)size )
text = buffer;
fclose( file );
delete[] buffer;
return text;
}
static void
printValueTree(FILE* fout, Json::Value& value, const JSONCPP_STRING& path = ".") {
if (value.hasComment(Json::commentBefore)) {
fprintf(fout, "%s\n", value.getComment(Json::commentBefore).c_str());
}
switch (value.type()) {
printValueTree( FILE *fout, Json::Value &value, const std::string &path = "." )
{
switch ( value.type() )
{
case Json::nullValue:
fprintf( fout, "%s=null\n", path.c_str() );
break;
case Json::intValue:
fprintf(fout,
"%s=%s\n",
path.c_str(),
Json::valueToString(value.asLargestInt()).c_str());
fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestInt() ).c_str() );
break;
case Json::uintValue:
fprintf(fout,
"%s=%s\n",
path.c_str(),
Json::valueToString(value.asLargestUInt()).c_str());
fprintf( fout, "%s=%s\n", path.c_str(), Json::valueToString( value.asLargestUInt() ).c_str() );
break;
case Json::realValue:
fprintf(fout,
"%s=%s\n",
path.c_str(),
normalizeFloatingPointStr(value.asDouble()).c_str());
fprintf( fout, "%s=%.16g\n", path.c_str(), value.asDouble() );
break;
case Json::stringValue:
fprintf( fout, "%s=\"%s\"\n", path.c_str(), value.asString().c_str() );
@@ -106,119 +58,109 @@ printValueTree(FILE* fout, Json::Value& value, const JSONCPP_STRING& path = ".")
case Json::booleanValue:
fprintf( fout, "%s=%s\n", path.c_str(), value.asBool() ? "true" : "false" );
break;
case Json::arrayValue: {
case Json::arrayValue:
{
fprintf( fout, "%s=[]\n", path.c_str() );
Json::ArrayIndex size = value.size();
for (Json::ArrayIndex index = 0; index < size; ++index) {
int size = value.size();
for ( int index =0; index < size; ++index )
{
static char buffer[16];
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
sprintf_s(buffer, sizeof(buffer), "[%d]", index);
#else
snprintf(buffer, sizeof(buffer), "[%d]", index);
#endif
sprintf( buffer, "[%d]", index );
printValueTree( fout, value[index], path + buffer );
}
} break;
case Json::objectValue: {
}
break;
case Json::objectValue:
{
fprintf( fout, "%s={}\n", path.c_str() );
Json::Value::Members members( value.getMemberNames() );
std::sort( members.begin(), members.end() );
JSONCPP_STRING suffix = *(path.end() - 1) == '.' ? "" : ".";
std::string suffix = *(path.end()-1) == '.' ? "" : ".";
for ( Json::Value::Members::iterator it = members.begin();
it != members.end();
++it) {
const JSONCPP_STRING name = *it;
++it )
{
const std::string &name = *it;
printValueTree( fout, value[name], path + suffix + name );
}
} break;
}
break;
default:
break;
}
if (value.hasComment(Json::commentAfter)) {
fprintf(fout, "%s\n", value.getComment(Json::commentAfter).c_str());
}
}
static int parseAndSaveValueTree(const JSONCPP_STRING& input,
const JSONCPP_STRING& actual,
const JSONCPP_STRING& kind,
static int
parseAndSaveValueTree( const std::string &input,
const std::string &actual,
const std::string &kind,
Json::Value &root,
const Json::Features &features,
bool parseOnly,
Json::Value* root)
bool parseOnly )
{
Json::Reader reader( features );
bool parsingSuccessful = reader.parse(input.data(), input.data() + input.size(), *root);
if (!parsingSuccessful) {
bool parsingSuccessful = reader.parse( input, root );
if ( !parsingSuccessful )
{
printf( "Failed to parse %s file: \n%s\n",
kind.c_str(),
reader.getFormattedErrorMessages().c_str() );
return 1;
}
if (!parseOnly) {
if ( !parseOnly )
{
FILE *factual = fopen( actual.c_str(), "wt" );
if (!factual) {
if ( !factual )
{
printf( "Failed to create %s actual file.\n", kind.c_str() );
return 2;
}
printValueTree(factual, *root);
printValueTree( factual, root );
fclose( factual );
}
return 0;
}
// static JSONCPP_STRING useFastWriter(Json::Value const& root) {
static int
rewriteValueTree( const std::string &rewritePath,
const Json::Value &root,
std::string &rewrite )
{
//Json::FastWriter writer;
//writer.enableYAMLCompatibility();
// return writer.write(root);
// }
static JSONCPP_STRING useStyledWriter(
Json::Value const& root)
{
Json::StyledWriter writer;
return writer.write(root);
}
static JSONCPP_STRING useStyledStreamWriter(
Json::Value const& root)
{
Json::StyledStreamWriter writer;
JSONCPP_OSTRINGSTREAM sout;
writer.write(sout, root);
return sout.str();
}
static JSONCPP_STRING useBuiltStyledStreamWriter(
Json::Value const& root)
{
Json::StreamWriterBuilder builder;
return Json::writeString(builder, root);
}
static int rewriteValueTree(
const JSONCPP_STRING& rewritePath,
const Json::Value& root,
Options::writeFuncType write,
JSONCPP_STRING* rewrite)
{
*rewrite = write(root);
rewrite = writer.write( root );
FILE *fout = fopen( rewritePath.c_str(), "wt" );
if (!fout) {
if ( !fout )
{
printf( "Failed to create rewrite file: %s\n", rewritePath.c_str() );
return 2;
}
fprintf(fout, "%s\n", rewrite->c_str());
fprintf( fout, "%s\n", rewrite.c_str() );
fclose( fout );
return 0;
}
static JSONCPP_STRING removeSuffix(const JSONCPP_STRING& path,
const JSONCPP_STRING& extension) {
static std::string
removeSuffix( const std::string &path,
const std::string &extension )
{
if ( extension.length() >= path.length() )
return JSONCPP_STRING("");
JSONCPP_STRING suffix = path.substr(path.length() - extension.length());
return std::string("");
std::string suffix = path.substr( path.length() - extension.length() );
if ( suffix != extension )
return JSONCPP_STRING("");
return std::string("");
return path.substr( 0, path.length() - extension.length() );
}
static void printConfig() {
static void
printConfig()
{
// Print the configuration used to compile JsonCpp
#if defined(JSON_NO_INT64)
printf( "JSON_NO_INT64=1\n" );
@@ -227,105 +169,101 @@ static void printConfig() {
#endif
}
static int printUsage(const char* argv[]) {
static int
printUsage( const char *argv[] )
{
printf( "Usage: %s [--strict] input-json-file", argv[0] );
return 3;
}
static int parseCommandLine(
int argc, const char* argv[], Options* opts)
int
parseCommandLine( int argc, const char *argv[],
Json::Features &features, std::string &path,
bool &parseOnly )
{
parseOnly = false;
if ( argc < 2 )
{
opts->parseOnly = false;
opts->write = &useStyledWriter;
if (argc < 2) {
return printUsage( argv );
}
int index = 1;
if (JSONCPP_STRING(argv[index]) == "--json-checker") {
opts->features = Json::Features::strictMode();
opts->parseOnly = true;
if ( std::string(argv[1]) == "--json-checker" )
{
features = Json::Features::strictMode();
parseOnly = true;
++index;
}
if (JSONCPP_STRING(argv[index]) == "--json-config") {
if ( std::string(argv[1]) == "--json-config" )
{
printConfig();
return 3;
}
if (JSONCPP_STRING(argv[index]) == "--json-writer") {
++index;
JSONCPP_STRING const writerName(argv[index++]);
if (writerName == "StyledWriter") {
opts->write = &useStyledWriter;
} else if (writerName == "StyledStreamWriter") {
opts->write = &useStyledStreamWriter;
} else if (writerName == "BuiltStyledStreamWriter") {
opts->write = &useBuiltStyledStreamWriter;
} else {
printf("Unknown '--json-writer %s'\n", writerName.c_str());
return 4;
}
}
if (index == argc || index + 1 < argc) {
if ( index == argc || index + 1 < argc )
{
return printUsage( argv );
}
opts->path = argv[index];
path = argv[index];
return 0;
}
static int runTest(Options const& opts)
int main( int argc, const char *argv[] )
{
int exitCode = 0;
std::string path;
Json::Features features;
bool parseOnly;
int exitCode = parseCommandLine( argc, argv, features, path, parseOnly );
if ( exitCode != 0 )
{
return exitCode;
}
JSONCPP_STRING input = readInputTestFile(opts.path.c_str());
if (input.empty()) {
printf("Failed to read input or empty input: %s\n", opts.path.c_str());
try
{
std::string input = readInputTestFile( path.c_str() );
if ( input.empty() )
{
printf( "Failed to read input or empty input: %s\n", path.c_str() );
return 3;
}
JSONCPP_STRING basePath = removeSuffix(opts.path, ".json");
if (!opts.parseOnly && basePath.empty()) {
printf("Bad input path. Path does not end with '.expected':\n%s\n",
opts.path.c_str());
std::string basePath = removeSuffix( argv[1], ".json" );
if ( !parseOnly && basePath.empty() )
{
printf( "Bad input path. Path does not end with '.expected':\n%s\n", path.c_str() );
return 3;
}
JSONCPP_STRING const actualPath = basePath + ".actual";
JSONCPP_STRING const rewritePath = basePath + ".rewrite";
JSONCPP_STRING const rewriteActualPath = basePath + ".actual-rewrite";
std::string actualPath = basePath + ".actual";
std::string rewritePath = basePath + ".rewrite";
std::string rewriteActualPath = basePath + ".actual-rewrite";
Json::Value root;
exitCode = parseAndSaveValueTree(
input, actualPath, "input",
opts.features, opts.parseOnly, &root);
if (exitCode || opts.parseOnly) {
return exitCode;
}
JSONCPP_STRING rewrite;
exitCode = rewriteValueTree(rewritePath, root, opts.write, &rewrite);
if (exitCode) {
return exitCode;
}
exitCode = parseAndSaveValueTree( input, actualPath, "input", root, features, parseOnly );
if ( exitCode == 0 && !parseOnly )
{
std::string rewrite;
exitCode = rewriteValueTree( rewritePath, root, rewrite );
if ( exitCode == 0 )
{
Json::Value rewriteRoot;
exitCode = parseAndSaveValueTree(
rewrite, rewriteActualPath, "rewrite",
opts.features, opts.parseOnly, &rewriteRoot);
if (exitCode) {
return exitCode;
exitCode = parseAndSaveValueTree( rewrite, rewriteActualPath,
"rewrite", rewriteRoot, features, parseOnly );
}
return 0;
}
int main(int argc, const char* argv[]) {
Options opts;
try {
int exitCode = parseCommandLine(argc, argv, &opts);
if (exitCode != 0) {
printf("Failed to parse command-line.");
return exitCode;
}
return runTest(opts);
}
catch (const std::exception& e) {
catch ( const std::exception &e )
{
printf( "Unhandled exception:\n%s\n", e.what() );
return 1;
}
exitCode = 1;
}
return exitCode;
}
#pragma GCC diagnostic pop

View File

@@ -0,0 +1,9 @@
Import( 'env_testing buildJSONTests' )
buildJSONTests( env_testing, Split( """
main.cpp
""" ),
'jsontestrunner' )
# For 'check' to work, 'libs' must be built first.
env_testing.Depends('jsontestrunner', '#libs')

View File

@@ -1,117 +0,0 @@
IF( CMAKE_COMPILER_IS_GNUCXX )
#Get compiler version.
EXECUTE_PROCESS( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion
OUTPUT_VARIABLE GNUCXX_VERSION )
#-Werror=* was introduced -after- GCC 4.1.2
IF( GNUCXX_VERSION VERSION_GREATER 4.1.2 )
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing")
ENDIF()
ENDIF( CMAKE_COMPILER_IS_GNUCXX )
INCLUDE(CheckIncludeFileCXX)
INCLUDE(CheckTypeSize)
INCLUDE(CheckStructHasMember)
INCLUDE(CheckCXXSymbolExists)
check_include_file_cxx(clocale HAVE_CLOCALE)
check_cxx_symbol_exists(localeconv clocale HAVE_LOCALECONV)
IF(CMAKE_VERSION VERSION_LESS 3.0.0)
# The "LANGUAGE CXX" parameter is not supported in CMake versions below 3,
# so the C compiler and header has to be used.
check_include_file(locale.h HAVE_LOCALE_H)
SET(CMAKE_EXTRA_INCLUDE_FILES locale.h)
check_type_size("struct lconv" LCONV_SIZE)
UNSET(CMAKE_EXTRA_INCLUDE_FILES)
check_struct_has_member("struct lconv" decimal_point locale.h HAVE_DECIMAL_POINT)
ELSE()
SET(CMAKE_EXTRA_INCLUDE_FILES clocale)
check_type_size(lconv LCONV_SIZE LANGUAGE CXX)
UNSET(CMAKE_EXTRA_INCLUDE_FILES)
check_struct_has_member(lconv decimal_point clocale HAVE_DECIMAL_POINT LANGUAGE CXX)
ENDIF()
IF(NOT (HAVE_CLOCALE AND HAVE_LCONV_SIZE AND HAVE_DECIMAL_POINT AND HAVE_LOCALECONV))
MESSAGE(WARNING "Locale functionality is not supported")
ADD_DEFINITIONS(-DJSONCPP_NO_LOCALE_SUPPORT)
ENDIF()
SET( JSONCPP_INCLUDE_DIR ../../include )
SET( PUBLIC_HEADERS
${JSONCPP_INCLUDE_DIR}/json/config.h
${JSONCPP_INCLUDE_DIR}/json/forwards.h
${JSONCPP_INCLUDE_DIR}/json/features.h
${JSONCPP_INCLUDE_DIR}/json/value.h
${JSONCPP_INCLUDE_DIR}/json/reader.h
${JSONCPP_INCLUDE_DIR}/json/writer.h
${JSONCPP_INCLUDE_DIR}/json/assertions.h
${JSONCPP_INCLUDE_DIR}/json/version.h
)
SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} )
SET(jsoncpp_sources
json_tool.h
json_reader.cpp
json_valueiterator.inl
json_value.cpp
json_writer.cpp
version.h.in)
# Install instructions for this target
IF(JSONCPP_WITH_CMAKE_PACKAGE)
SET(INSTALL_EXPORT EXPORT jsoncpp)
ELSE(JSONCPP_WITH_CMAKE_PACKAGE)
SET(INSTALL_EXPORT)
ENDIF()
IF(BUILD_SHARED_LIBS)
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
ADD_LIBRARY(jsoncpp_lib SHARED ${PUBLIC_HEADERS} ${jsoncpp_sources})
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_SOVERSION})
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
# Set library's runtime search path on OSX
IF(APPLE)
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES INSTALL_RPATH "@loader_path/." )
ENDIF()
INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR})
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>)
ENDIF()
ENDIF()
IF(BUILD_STATIC_LIBS)
ADD_LIBRARY(jsoncpp_lib_static STATIC ${PUBLIC_HEADERS} ${jsoncpp_sources})
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_SOVERSION})
# avoid name clashes on windows as the shared import lib is also named jsoncpp.lib
if (NOT DEFINED STATIC_SUFFIX AND BUILD_SHARED_LIBS)
set (STATIC_SUFFIX "_static")
endif ()
set_target_properties (jsoncpp_lib_static PROPERTIES OUTPUT_NAME jsoncpp${STATIC_SUFFIX}
DEBUG_OUTPUT_NAME jsoncpp${STATIC_SUFFIX}${DEBUG_LIBNAME_SUFFIX})
INSTALL( TARGETS jsoncpp_lib_static ${INSTALL_EXPORT}
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR})
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib_static PUBLIC
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
)
ENDIF()
ENDIF()

View File

@@ -0,0 +1,130 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED
# define JSONCPP_BATCHALLOCATOR_H_INCLUDED
# include <stdlib.h>
# include <assert.h>
# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
namespace Json {
/* Fast memory allocator.
*
* This memory allocator allocates memory for a batch of object (specified by
* the page size, the number of object in each page).
*
* It does not allow the destruction of a single object. All the allocated objects
* can be destroyed at once. The memory can be either released or reused for future
* allocation.
*
* The in-place new operator must be used to construct the object using the pointer
* returned by allocate.
*/
template<typename AllocatedType
,const unsigned int objectPerAllocation>
class BatchAllocator
{
public:
typedef AllocatedType Type;
BatchAllocator( unsigned int objectsPerPage = 255 )
: freeHead_( 0 )
, objectsPerPage_( objectsPerPage )
{
// printf( "Size: %d => %s\n", sizeof(AllocatedType), typeid(AllocatedType).name() );
assert( sizeof(AllocatedType) * objectPerAllocation >= sizeof(AllocatedType *) ); // We must be able to store a slist in the object free space.
assert( objectsPerPage >= 16 );
batches_ = allocateBatch( 0 ); // allocated a dummy page
currentBatch_ = batches_;
}
~BatchAllocator()
{
for ( BatchInfo *batch = batches_; batch; )
{
BatchInfo *nextBatch = batch->next_;
free( batch );
batch = nextBatch;
}
}
/// allocate space for an array of objectPerAllocation object.
/// @warning it is the responsability of the caller to call objects constructors.
AllocatedType *allocate()
{
if ( freeHead_ ) // returns node from free list.
{
AllocatedType *object = freeHead_;
freeHead_ = *(AllocatedType **)object;
return object;
}
if ( currentBatch_->used_ == currentBatch_->end_ )
{
currentBatch_ = currentBatch_->next_;
while ( currentBatch_ && currentBatch_->used_ == currentBatch_->end_ )
currentBatch_ = currentBatch_->next_;
if ( !currentBatch_ ) // no free batch found, allocate a new one
{
currentBatch_ = allocateBatch( objectsPerPage_ );
currentBatch_->next_ = batches_; // insert at the head of the list
batches_ = currentBatch_;
}
}
AllocatedType *allocated = currentBatch_->used_;
currentBatch_->used_ += objectPerAllocation;
return allocated;
}
/// Release the object.
/// @warning it is the responsability of the caller to actually destruct the object.
void release( AllocatedType *object )
{
assert( object != 0 );
*(AllocatedType **)object = freeHead_;
freeHead_ = object;
}
private:
struct BatchInfo
{
BatchInfo *next_;
AllocatedType *used_;
AllocatedType *end_;
AllocatedType buffer_[objectPerAllocation];
};
// disabled copy constructor and assignement operator.
BatchAllocator( const BatchAllocator & );
void operator =( const BatchAllocator &);
static BatchInfo *allocateBatch( unsigned int objectsPerPage )
{
const unsigned int mallocSize = sizeof(BatchInfo) - sizeof(AllocatedType)* objectPerAllocation
+ sizeof(AllocatedType) * objectPerAllocation * objectsPerPage;
BatchInfo *batch = static_cast<BatchInfo*>( malloc( mallocSize ) );
batch->next_ = 0;
batch->used_ = batch->buffer_;
batch->end_ = batch->buffer_ + objectsPerPage;
return batch;
}
BatchInfo *batches_;
BatchInfo *currentBatch_;
/// Head of a single linked list within the allocated space of freeed object
AllocatedType *freeHead_;
unsigned int objectsPerPage_;
};
} // namespace Json
# endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION
#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED

View File

@@ -0,0 +1,456 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
// included by json_value.cpp
namespace Json {
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// class ValueInternalArray
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
ValueArrayAllocator::~ValueArrayAllocator()
{
}
// //////////////////////////////////////////////////////////////////
// class DefaultValueArrayAllocator
// //////////////////////////////////////////////////////////////////
#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
class DefaultValueArrayAllocator : public ValueArrayAllocator
{
public: // overridden from ValueArrayAllocator
virtual ~DefaultValueArrayAllocator()
{
}
virtual ValueInternalArray *newArray()
{
return new ValueInternalArray();
}
virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other )
{
return new ValueInternalArray( other );
}
virtual void destructArray( ValueInternalArray *array )
{
delete array;
}
virtual void reallocateArrayPageIndex( Value **&indexes,
ValueInternalArray::PageIndex &indexCount,
ValueInternalArray::PageIndex minNewIndexCount )
{
ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1;
if ( minNewIndexCount > newIndexCount )
newIndexCount = minNewIndexCount;
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
if ( !newIndexes )
throw std::bad_alloc();
indexCount = newIndexCount;
indexes = static_cast<Value **>( newIndexes );
}
virtual void releaseArrayPageIndex( Value **indexes,
ValueInternalArray::PageIndex indexCount )
{
if ( indexes )
free( indexes );
}
virtual Value *allocateArrayPage()
{
return static_cast<Value *>( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) );
}
virtual void releaseArrayPage( Value *value )
{
if ( value )
free( value );
}
};
#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
/// @todo make this thread-safe (lock when accessign batch allocator)
class DefaultValueArrayAllocator : public ValueArrayAllocator
{
public: // overridden from ValueArrayAllocator
virtual ~DefaultValueArrayAllocator()
{
}
virtual ValueInternalArray *newArray()
{
ValueInternalArray *array = arraysAllocator_.allocate();
new (array) ValueInternalArray(); // placement new
return array;
}
virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other )
{
ValueInternalArray *array = arraysAllocator_.allocate();
new (array) ValueInternalArray( other ); // placement new
return array;
}
virtual void destructArray( ValueInternalArray *array )
{
if ( array )
{
array->~ValueInternalArray();
arraysAllocator_.release( array );
}
}
virtual void reallocateArrayPageIndex( Value **&indexes,
ValueInternalArray::PageIndex &indexCount,
ValueInternalArray::PageIndex minNewIndexCount )
{
ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1;
if ( minNewIndexCount > newIndexCount )
newIndexCount = minNewIndexCount;
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
if ( !newIndexes )
throw std::bad_alloc();
indexCount = newIndexCount;
indexes = static_cast<Value **>( newIndexes );
}
virtual void releaseArrayPageIndex( Value **indexes,
ValueInternalArray::PageIndex indexCount )
{
if ( indexes )
free( indexes );
}
virtual Value *allocateArrayPage()
{
return static_cast<Value *>( pagesAllocator_.allocate() );
}
virtual void releaseArrayPage( Value *value )
{
if ( value )
pagesAllocator_.release( value );
}
private:
BatchAllocator<ValueInternalArray,1> arraysAllocator_;
BatchAllocator<Value,ValueInternalArray::itemsPerPage> pagesAllocator_;
};
#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
static ValueArrayAllocator *&arrayAllocator()
{
static DefaultValueArrayAllocator defaultAllocator;
static ValueArrayAllocator *arrayAllocator = &defaultAllocator;
return arrayAllocator;
}
static struct DummyArrayAllocatorInitializer {
DummyArrayAllocatorInitializer()
{
arrayAllocator(); // ensure arrayAllocator() statics are initialized before main().
}
} dummyArrayAllocatorInitializer;
// //////////////////////////////////////////////////////////////////
// class ValueInternalArray
// //////////////////////////////////////////////////////////////////
bool
ValueInternalArray::equals( const IteratorState &x,
const IteratorState &other )
{
return x.array_ == other.array_
&& x.currentItemIndex_ == other.currentItemIndex_
&& x.currentPageIndex_ == other.currentPageIndex_;
}
void
ValueInternalArray::increment( IteratorState &it )
{
JSON_ASSERT_MESSAGE( it.array_ &&
(it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_
!= it.array_->size_,
"ValueInternalArray::increment(): moving iterator beyond end" );
++(it.currentItemIndex_);
if ( it.currentItemIndex_ == itemsPerPage )
{
it.currentItemIndex_ = 0;
++(it.currentPageIndex_);
}
}
void
ValueInternalArray::decrement( IteratorState &it )
{
JSON_ASSERT_MESSAGE( it.array_ && it.currentPageIndex_ == it.array_->pages_
&& it.currentItemIndex_ == 0,
"ValueInternalArray::decrement(): moving iterator beyond end" );
if ( it.currentItemIndex_ == 0 )
{
it.currentItemIndex_ = itemsPerPage-1;
--(it.currentPageIndex_);
}
else
{
--(it.currentItemIndex_);
}
}
Value &
ValueInternalArray::unsafeDereference( const IteratorState &it )
{
return (*(it.currentPageIndex_))[it.currentItemIndex_];
}
Value &
ValueInternalArray::dereference( const IteratorState &it )
{
JSON_ASSERT_MESSAGE( it.array_ &&
(it.currentPageIndex_ - it.array_->pages_)*itemsPerPage + it.currentItemIndex_
< it.array_->size_,
"ValueInternalArray::dereference(): dereferencing invalid iterator" );
return unsafeDereference( it );
}
void
ValueInternalArray::makeBeginIterator( IteratorState &it ) const
{
it.array_ = const_cast<ValueInternalArray *>( this );
it.currentItemIndex_ = 0;
it.currentPageIndex_ = pages_;
}
void
ValueInternalArray::makeIterator( IteratorState &it, ArrayIndex index ) const
{
it.array_ = const_cast<ValueInternalArray *>( this );
it.currentItemIndex_ = index % itemsPerPage;
it.currentPageIndex_ = pages_ + index / itemsPerPage;
}
void
ValueInternalArray::makeEndIterator( IteratorState &it ) const
{
makeIterator( it, size_ );
}
ValueInternalArray::ValueInternalArray()
: pages_( 0 )
, size_( 0 )
, pageCount_( 0 )
{
}
ValueInternalArray::ValueInternalArray( const ValueInternalArray &other )
: pages_( 0 )
, pageCount_( 0 )
, size_( other.size_ )
{
PageIndex minNewPages = other.size_ / itemsPerPage;
arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages );
JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages,
"ValueInternalArray::reserve(): bad reallocation" );
IteratorState itOther;
other.makeBeginIterator( itOther );
Value *value;
for ( ArrayIndex index = 0; index < size_; ++index, increment(itOther) )
{
if ( index % itemsPerPage == 0 )
{
PageIndex pageIndex = index / itemsPerPage;
value = arrayAllocator()->allocateArrayPage();
pages_[pageIndex] = value;
}
new (value) Value( dereference( itOther ) );
}
}
ValueInternalArray &
ValueInternalArray::operator =( const ValueInternalArray &other )
{
ValueInternalArray temp( other );
swap( temp );
return *this;
}
ValueInternalArray::~ValueInternalArray()
{
// destroy all constructed items
IteratorState it;
IteratorState itEnd;
makeBeginIterator( it);
makeEndIterator( itEnd );
for ( ; !equals(it,itEnd); increment(it) )
{
Value *value = &dereference(it);
value->~Value();
}
// release all pages
PageIndex lastPageIndex = size_ / itemsPerPage;
for ( PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex )
arrayAllocator()->releaseArrayPage( pages_[pageIndex] );
// release pages index
arrayAllocator()->releaseArrayPageIndex( pages_, pageCount_ );
}
void
ValueInternalArray::swap( ValueInternalArray &other )
{
Value **tempPages = pages_;
pages_ = other.pages_;
other.pages_ = tempPages;
ArrayIndex tempSize = size_;
size_ = other.size_;
other.size_ = tempSize;
PageIndex tempPageCount = pageCount_;
pageCount_ = other.pageCount_;
other.pageCount_ = tempPageCount;
}
void
ValueInternalArray::clear()
{
ValueInternalArray dummy;
swap( dummy );
}
void
ValueInternalArray::resize( ArrayIndex newSize )
{
if ( newSize == 0 )
clear();
else if ( newSize < size_ )
{
IteratorState it;
IteratorState itEnd;
makeIterator( it, newSize );
makeIterator( itEnd, size_ );
for ( ; !equals(it,itEnd); increment(it) )
{
Value *value = &dereference(it);
value->~Value();
}
PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage;
PageIndex lastPageIndex = size_ / itemsPerPage;
for ( ; pageIndex < lastPageIndex; ++pageIndex )
arrayAllocator()->releaseArrayPage( pages_[pageIndex] );
size_ = newSize;
}
else if ( newSize > size_ )
resolveReference( newSize );
}
void
ValueInternalArray::makeIndexValid( ArrayIndex index )
{
// Need to enlarge page index ?
if ( index >= pageCount_ * itemsPerPage )
{
PageIndex minNewPages = (index + 1) / itemsPerPage;
arrayAllocator()->reallocateArrayPageIndex( pages_, pageCount_, minNewPages );
JSON_ASSERT_MESSAGE( pageCount_ >= minNewPages, "ValueInternalArray::reserve(): bad reallocation" );
}
// Need to allocate new pages ?
ArrayIndex nextPageIndex =
(size_ % itemsPerPage) != 0 ? size_ - (size_%itemsPerPage) + itemsPerPage
: size_;
if ( nextPageIndex <= index )
{
PageIndex pageIndex = nextPageIndex / itemsPerPage;
PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1;
for ( ; pageToAllocate-- > 0; ++pageIndex )
pages_[pageIndex] = arrayAllocator()->allocateArrayPage();
}
// Initialize all new entries
IteratorState it;
IteratorState itEnd;
makeIterator( it, size_ );
size_ = index + 1;
makeIterator( itEnd, size_ );
for ( ; !equals(it,itEnd); increment(it) )
{
Value *value = &dereference(it);
new (value) Value(); // Construct a default value using placement new
}
}
Value &
ValueInternalArray::resolveReference( ArrayIndex index )
{
if ( index >= size_ )
makeIndexValid( index );
return pages_[index/itemsPerPage][index%itemsPerPage];
}
Value *
ValueInternalArray::find( ArrayIndex index ) const
{
if ( index >= size_ )
return 0;
return &(pages_[index/itemsPerPage][index%itemsPerPage]);
}
ValueInternalArray::ArrayIndex
ValueInternalArray::size() const
{
return size_;
}
int
ValueInternalArray::distance( const IteratorState &x, const IteratorState &y )
{
return indexOf(y) - indexOf(x);
}
ValueInternalArray::ArrayIndex
ValueInternalArray::indexOf( const IteratorState &iterator )
{
if ( !iterator.array_ )
return ArrayIndex(-1);
return ArrayIndex(
(iterator.currentPageIndex_ - iterator.array_->pages_) * itemsPerPage
+ iterator.currentItemIndex_ );
}
int
ValueInternalArray::compare( const ValueInternalArray &other ) const
{
int sizeDiff( size_ - other.size_ );
if ( sizeDiff != 0 )
return sizeDiff;
for ( ArrayIndex index =0; index < size_; ++index )
{
int diff = pages_[index/itemsPerPage][index%itemsPerPage].compare(
other.pages_[index/itemsPerPage][index%itemsPerPage] );
if ( diff != 0 )
return diff;
}
return 0;
}
} // namespace Json

View File

@@ -0,0 +1,615 @@
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
// included by json_value.cpp
namespace Json {
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// class ValueInternalMap
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
/** \internal MUST be safely initialized using memset( this, 0, sizeof(ValueInternalLink) );
* This optimization is used by the fast allocator.
*/
ValueInternalLink::ValueInternalLink()
: previous_( 0 )
, next_( 0 )
{
}
ValueInternalLink::~ValueInternalLink()
{
for ( int index =0; index < itemPerLink; ++index )
{
if ( !items_[index].isItemAvailable() )
{
if ( !items_[index].isMemberNameStatic() )
free( keys_[index] );
}
else
break;
}
}
ValueMapAllocator::~ValueMapAllocator()
{
}
#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
class DefaultValueMapAllocator : public ValueMapAllocator
{
public: // overridden from ValueMapAllocator
virtual ValueInternalMap *newMap()
{
return new ValueInternalMap();
}
virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other )
{
return new ValueInternalMap( other );
}
virtual void destructMap( ValueInternalMap *map )
{
delete map;
}
virtual ValueInternalLink *allocateMapBuckets( unsigned int size )
{
return new ValueInternalLink[size];
}
virtual void releaseMapBuckets( ValueInternalLink *links )
{
delete [] links;
}
virtual ValueInternalLink *allocateMapLink()
{
return new ValueInternalLink();
}
virtual void releaseMapLink( ValueInternalLink *link )
{
delete link;
}
};
#else
/// @todo make this thread-safe (lock when accessign batch allocator)
class DefaultValueMapAllocator : public ValueMapAllocator
{
public: // overridden from ValueMapAllocator
virtual ValueInternalMap *newMap()
{
ValueInternalMap *map = mapsAllocator_.allocate();
new (map) ValueInternalMap(); // placement new
return map;
}
virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other )
{
ValueInternalMap *map = mapsAllocator_.allocate();
new (map) ValueInternalMap( other ); // placement new
return map;
}
virtual void destructMap( ValueInternalMap *map )
{
if ( map )
{
map->~ValueInternalMap();
mapsAllocator_.release( map );
}
}
virtual ValueInternalLink *allocateMapBuckets( unsigned int size )
{
return new ValueInternalLink[size];
}
virtual void releaseMapBuckets( ValueInternalLink *links )
{
delete [] links;
}
virtual ValueInternalLink *allocateMapLink()
{
ValueInternalLink *link = linksAllocator_.allocate();
memset( link, 0, sizeof(ValueInternalLink) );
return link;
}
virtual void releaseMapLink( ValueInternalLink *link )
{
link->~ValueInternalLink();
linksAllocator_.release( link );
}
private:
BatchAllocator<ValueInternalMap,1> mapsAllocator_;
BatchAllocator<ValueInternalLink,1> linksAllocator_;
};
#endif
static ValueMapAllocator *&mapAllocator()
{
static DefaultValueMapAllocator defaultAllocator;
static ValueMapAllocator *mapAllocator = &defaultAllocator;
return mapAllocator;
}
static struct DummyMapAllocatorInitializer {
DummyMapAllocatorInitializer()
{
mapAllocator(); // ensure mapAllocator() statics are initialized before main().
}
} dummyMapAllocatorInitializer;
// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32.
/*
use linked list hash map.
buckets array is a container.
linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124)
value have extra state: valid, available, deleted
*/
ValueInternalMap::ValueInternalMap()
: buckets_( 0 )
, tailLink_( 0 )
, bucketsSize_( 0 )
, itemCount_( 0 )
{
}
ValueInternalMap::ValueInternalMap( const ValueInternalMap &other )
: buckets_( 0 )
, tailLink_( 0 )
, bucketsSize_( 0 )
, itemCount_( 0 )
{
reserve( other.itemCount_ );
IteratorState it;
IteratorState itEnd;
other.makeBeginIterator( it );
other.makeEndIterator( itEnd );
for ( ; !equals(it,itEnd); increment(it) )
{
bool isStatic;
const char *memberName = key( it, isStatic );
const Value &aValue = value( it );
resolveReference(memberName, isStatic) = aValue;
}
}
ValueInternalMap &
ValueInternalMap::operator =( const ValueInternalMap &other )
{
ValueInternalMap dummy( other );
swap( dummy );
return *this;
}
ValueInternalMap::~ValueInternalMap()
{
if ( buckets_ )
{
for ( BucketIndex bucketIndex =0; bucketIndex < bucketsSize_; ++bucketIndex )
{
ValueInternalLink *link = buckets_[bucketIndex].next_;
while ( link )
{
ValueInternalLink *linkToRelease = link;
link = link->next_;
mapAllocator()->releaseMapLink( linkToRelease );
}
}
mapAllocator()->releaseMapBuckets( buckets_ );
}
}
void
ValueInternalMap::swap( ValueInternalMap &other )
{
ValueInternalLink *tempBuckets = buckets_;
buckets_ = other.buckets_;
other.buckets_ = tempBuckets;
ValueInternalLink *tempTailLink = tailLink_;
tailLink_ = other.tailLink_;
other.tailLink_ = tempTailLink;
BucketIndex tempBucketsSize = bucketsSize_;
bucketsSize_ = other.bucketsSize_;
other.bucketsSize_ = tempBucketsSize;
BucketIndex tempItemCount = itemCount_;
itemCount_ = other.itemCount_;
other.itemCount_ = tempItemCount;
}
void
ValueInternalMap::clear()
{
ValueInternalMap dummy;
swap( dummy );
}
ValueInternalMap::BucketIndex
ValueInternalMap::size() const
{
return itemCount_;
}
bool
ValueInternalMap::reserveDelta( BucketIndex growth )
{
return reserve( itemCount_ + growth );
}
bool
ValueInternalMap::reserve( BucketIndex newItemCount )
{
if ( !buckets_ && newItemCount > 0 )
{
buckets_ = mapAllocator()->allocateMapBuckets( 1 );
bucketsSize_ = 1;
tailLink_ = &buckets_[0];
}
// BucketIndex idealBucketCount = (newItemCount + ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink;
return true;
}
const Value *
ValueInternalMap::find( const char *key ) const
{
if ( !bucketsSize_ )
return 0;
HashKey hashedKey = hash( key );
BucketIndex bucketIndex = hashedKey % bucketsSize_;
for ( const ValueInternalLink *current = &buckets_[bucketIndex];
current != 0;
current = current->next_ )
{
for ( BucketIndex index=0; index < ValueInternalLink::itemPerLink; ++index )
{
if ( current->items_[index].isItemAvailable() )
return 0;
if ( strcmp( key, current->keys_[index] ) == 0 )
return &current->items_[index];
}
}
return 0;
}
Value *
ValueInternalMap::find( const char *key )
{
const ValueInternalMap *constThis = this;
return const_cast<Value *>( constThis->find( key ) );
}
Value &
ValueInternalMap::resolveReference( const char *key,
bool isStatic )
{
HashKey hashedKey = hash( key );
if ( bucketsSize_ )
{
BucketIndex bucketIndex = hashedKey % bucketsSize_;
ValueInternalLink **previous = 0;
BucketIndex index;
for ( ValueInternalLink *current = &buckets_[bucketIndex];
current != 0;
previous = &current->next_, current = current->next_ )
{
for ( index=0; index < ValueInternalLink::itemPerLink; ++index )
{
if ( current->items_[index].isItemAvailable() )
return setNewItem( key, isStatic, current, index );
if ( strcmp( key, current->keys_[index] ) == 0 )
return current->items_[index];
}
}
}
reserveDelta( 1 );
return unsafeAdd( key, isStatic, hashedKey );
}
void
ValueInternalMap::remove( const char *key )
{
HashKey hashedKey = hash( key );
if ( !bucketsSize_ )
return;
BucketIndex bucketIndex = hashedKey % bucketsSize_;
for ( ValueInternalLink *link = &buckets_[bucketIndex];
link != 0;
link = link->next_ )
{
BucketIndex index;
for ( index =0; index < ValueInternalLink::itemPerLink; ++index )
{
if ( link->items_[index].isItemAvailable() )
return;
if ( strcmp( key, link->keys_[index] ) == 0 )
{
doActualRemove( link, index, bucketIndex );
return;
}
}
}
}
void
ValueInternalMap::doActualRemove( ValueInternalLink *link,
BucketIndex index,
BucketIndex bucketIndex )
{
// find last item of the bucket and swap it with the 'removed' one.
// set removed items flags to 'available'.
// if last page only contains 'available' items, then desallocate it (it's empty)
ValueInternalLink *&lastLink = getLastLinkInBucket( index );
BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1
for ( ;
lastItemIndex < ValueInternalLink::itemPerLink;
++lastItemIndex ) // may be optimized with dicotomic search
{
if ( lastLink->items_[lastItemIndex].isItemAvailable() )
break;
}
BucketIndex lastUsedIndex = lastItemIndex - 1;
Value *valueToDelete = &link->items_[index];
Value *valueToPreserve = &lastLink->items_[lastUsedIndex];
if ( valueToDelete != valueToPreserve )
valueToDelete->swap( *valueToPreserve );
if ( lastUsedIndex == 0 ) // page is now empty
{ // remove it from bucket linked list and delete it.
ValueInternalLink *linkPreviousToLast = lastLink->previous_;
if ( linkPreviousToLast != 0 ) // can not deleted bucket link.
{
mapAllocator()->releaseMapLink( lastLink );
linkPreviousToLast->next_ = 0;
lastLink = linkPreviousToLast;
}
}
else
{
Value dummy;
valueToPreserve->swap( dummy ); // restore deleted to default Value.
valueToPreserve->setItemUsed( false );
}
--itemCount_;
}
ValueInternalLink *&
ValueInternalMap::getLastLinkInBucket( BucketIndex bucketIndex )
{
if ( bucketIndex == bucketsSize_ - 1 )
return tailLink_;
ValueInternalLink *&previous = buckets_[bucketIndex+1].previous_;
if ( !previous )
previous = &buckets_[bucketIndex];
return previous;
}
Value &
ValueInternalMap::setNewItem( const char *key,
bool isStatic,
ValueInternalLink *link,
BucketIndex index )
{
char *duplicatedKey = makeMemberName( key );
++itemCount_;
link->keys_[index] = duplicatedKey;
link->items_[index].setItemUsed();
link->items_[index].setMemberNameIsStatic( isStatic );
return link->items_[index]; // items already default constructed.
}
Value &
ValueInternalMap::unsafeAdd( const char *key,
bool isStatic,
HashKey hashedKey )
{
JSON_ASSERT_MESSAGE( bucketsSize_ > 0, "ValueInternalMap::unsafeAdd(): internal logic error." );
BucketIndex bucketIndex = hashedKey % bucketsSize_;
ValueInternalLink *&previousLink = getLastLinkInBucket( bucketIndex );
ValueInternalLink *link = previousLink;
BucketIndex index;
for ( index =0; index < ValueInternalLink::itemPerLink; ++index )
{
if ( link->items_[index].isItemAvailable() )
break;
}
if ( index == ValueInternalLink::itemPerLink ) // need to add a new page
{
ValueInternalLink *newLink = mapAllocator()->allocateMapLink();
index = 0;
link->next_ = newLink;
previousLink = newLink;
link = newLink;
}
return setNewItem( key, isStatic, link, index );
}
ValueInternalMap::HashKey
ValueInternalMap::hash( const char *key ) const
{
HashKey hash = 0;
while ( *key )
hash += *key++ * 37;
return hash;
}
int
ValueInternalMap::compare( const ValueInternalMap &other ) const
{
int sizeDiff( itemCount_ - other.itemCount_ );
if ( sizeDiff != 0 )
return sizeDiff;
// Strict order guaranty is required. Compare all keys FIRST, then compare values.
IteratorState it;
IteratorState itEnd;
makeBeginIterator( it );
makeEndIterator( itEnd );
for ( ; !equals(it,itEnd); increment(it) )
{
if ( !other.find( key( it ) ) )
return 1;
}
// All keys are equals, let's compare values
makeBeginIterator( it );
for ( ; !equals(it,itEnd); increment(it) )
{
const Value *otherValue = other.find( key( it ) );
int valueDiff = value(it).compare( *otherValue );
if ( valueDiff != 0 )
return valueDiff;
}
return 0;
}
void
ValueInternalMap::makeBeginIterator( IteratorState &it ) const
{
it.map_ = const_cast<ValueInternalMap *>( this );
it.bucketIndex_ = 0;
it.itemIndex_ = 0;
it.link_ = buckets_;
}
void
ValueInternalMap::makeEndIterator( IteratorState &it ) const
{
it.map_ = const_cast<ValueInternalMap *>( this );
it.bucketIndex_ = bucketsSize_;
it.itemIndex_ = 0;
it.link_ = 0;
}
bool
ValueInternalMap::equals( const IteratorState &x, const IteratorState &other )
{
return x.map_ == other.map_
&& x.bucketIndex_ == other.bucketIndex_
&& x.link_ == other.link_
&& x.itemIndex_ == other.itemIndex_;
}
void
ValueInternalMap::incrementBucket( IteratorState &iterator )
{
++iterator.bucketIndex_;
JSON_ASSERT_MESSAGE( iterator.bucketIndex_ <= iterator.map_->bucketsSize_,
"ValueInternalMap::increment(): attempting to iterate beyond end." );
if ( iterator.bucketIndex_ == iterator.map_->bucketsSize_ )
iterator.link_ = 0;
else
iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]);
iterator.itemIndex_ = 0;
}
void
ValueInternalMap::increment( IteratorState &iterator )
{
JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterator using invalid iterator." );
++iterator.itemIndex_;
if ( iterator.itemIndex_ == ValueInternalLink::itemPerLink )
{
JSON_ASSERT_MESSAGE( iterator.link_ != 0,
"ValueInternalMap::increment(): attempting to iterate beyond end." );
iterator.link_ = iterator.link_->next_;
if ( iterator.link_ == 0 )
incrementBucket( iterator );
}
else if ( iterator.link_->items_[iterator.itemIndex_].isItemAvailable() )
{
incrementBucket( iterator );
}
}
void
ValueInternalMap::decrement( IteratorState &iterator )
{
if ( iterator.itemIndex_ == 0 )
{
JSON_ASSERT_MESSAGE( iterator.map_, "Attempting to iterate using invalid iterator." );
if ( iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_] )
{
JSON_ASSERT_MESSAGE( iterator.bucketIndex_ > 0, "Attempting to iterate beyond beginning." );
--(iterator.bucketIndex_);
}
iterator.link_ = iterator.link_->previous_;
iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1;
}
}
const char *
ValueInternalMap::key( const IteratorState &iterator )
{
JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." );
return iterator.link_->keys_[iterator.itemIndex_];
}
const char *
ValueInternalMap::key( const IteratorState &iterator, bool &isStatic )
{
JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." );
isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic();
return iterator.link_->keys_[iterator.itemIndex_];
}
Value &
ValueInternalMap::value( const IteratorState &iterator )
{
JSON_ASSERT_MESSAGE( iterator.link_, "Attempting to iterate using invalid iterator." );
return iterator.link_->items_[iterator.itemIndex_];
}
int
ValueInternalMap::distance( const IteratorState &x, const IteratorState &y )
{
int offset = 0;
IteratorState it = x;
while ( !equals( it, y ) )
increment( it );
return offset;
}
} // namespace Json

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -6,16 +6,6 @@
#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED
# define LIB_JSONCPP_JSON_TOOL_H_INCLUDED
// Also support old flag NO_LOCALE_SUPPORT
#ifdef NO_LOCALE_SUPPORT
#define JSONCPP_NO_LOCALE_SUPPORT
#endif
#ifndef JSONCPP_NO_LOCALE_SUPPORT
#include <clocale>
#endif
/* This header provides common string manipulation support, such as UTF-8,
* portable conversion from/to string...
*
@@ -23,34 +13,35 @@
*/
namespace Json {
static char getDecimalPoint() {
#ifdef JSONCPP_NO_LOCALE_SUPPORT
return '\0';
#else
struct lconv* lc = localeconv();
return lc ? *(lc->decimal_point) : '\0';
#endif
}
/// Converts a unicode code-point to UTF-8.
static inline JSONCPP_STRING codePointToUTF8(unsigned int cp) {
JSONCPP_STRING result;
static inline std::string
codePointToUTF8(unsigned int cp)
{
std::string result;
// based on description from http://en.wikipedia.org/wiki/UTF-8
if (cp <= 0x7f) {
if (cp <= 0x7f)
{
result.resize(1);
result[0] = static_cast<char>(cp);
} else if (cp <= 0x7FF) {
}
else if (cp <= 0x7FF)
{
result.resize(2);
result[1] = static_cast<char>(0x80 | (0x3f & cp));
result[0] = static_cast<char>(0xC0 | (0x1f & (cp >> 6)));
} else if (cp <= 0xFFFF) {
}
else if (cp <= 0xFFFF)
{
result.resize(3);
result[2] = static_cast<char>(0x80 | (0x3f & cp));
result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
result[0] = static_cast<char>(0xE0 | (0xf & (cp >> 12)));
} else if (cp <= 0x10FFFF) {
result[1] = 0x80 | static_cast<char>((0x3f & (cp >> 6)));
result[0] = 0xE0 | static_cast<char>((0xf & (cp >> 12)));
}
else if (cp <= 0x10FFFF)
{
result.resize(4);
result[3] = static_cast<char>(0x80 | (0x3f & cp));
result[2] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
@@ -61,55 +52,40 @@ static inline JSONCPP_STRING codePointToUTF8(unsigned int cp) {
return result;
}
/// Returns true if ch is a control character (in range [1,31]).
static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; }
/// Returns true if ch is a control character (in range [0,32[).
static inline bool
isControlCharacter(char ch)
{
return ch > 0 && ch <= 0x1F;
}
enum {
/// Constant that specify the size of the buffer that must be passed to
/// uintToString.
/// Constant that specify the size of the buffer that must be passed to uintToString.
uintToStringBufferSize = 3*sizeof(LargestUInt)+1
};
// Defines a char buffer for use with uintToString().
typedef char UIntToStringBuffer[uintToStringBufferSize];
/** Converts an unsigned integer to string.
* @param value Unsigned interger to convert to string
* @param current Input/Output string buffer.
* Must have at least uintToStringBufferSize chars free.
*/
static inline void uintToString(LargestUInt value, char*& current) {
static inline void
uintToString( LargestUInt value,
char *&current )
{
*--current = 0;
do {
*--current = static_cast<char>(value % 10U + static_cast<unsigned>('0'));
do
{
*--current = char(value % 10) + '0';
value /= 10;
} while (value != 0);
}
/** Change ',' to '.' everywhere in buffer.
*
* We had a sophisticated way, but it did not work in WinCE.
* @see https://github.com/open-source-parsers/jsoncpp/pull/9
*/
static inline void fixNumericLocale(char* begin, char* end) {
while (begin < end) {
if (*begin == ',') {
*begin = '.';
}
++begin;
}
}
static inline void fixNumericLocaleInput(char* begin, char* end) {
char decimalPoint = getDecimalPoint();
if (decimalPoint != '\0' && decimalPoint != '.') {
while (begin < end) {
if (*begin == '.') {
*begin = decimalPoint;
}
++begin;
}
}
while ( value != 0 );
}
} // namespace Json {

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -16,104 +16,200 @@ namespace Json {
// //////////////////////////////////////////////////////////////////
ValueIteratorBase::ValueIteratorBase()
: current_(), isNull_(true) {
#ifndef JSON_VALUE_USE_INTERNAL_MAP
: current_()
, isNull_( true )
{
}
#else
: isArray_( true )
, isNull_( true )
{
iterator_.array_ = ValueInternalArray::IteratorState();
}
#endif
#ifndef JSON_VALUE_USE_INTERNAL_MAP
ValueIteratorBase::ValueIteratorBase( const Value::ObjectValues::iterator &current )
: current_( current )
, isNull_( false )
{
}
#else
ValueIteratorBase::ValueIteratorBase( const ValueInternalArray::IteratorState &state )
: isArray_( true )
{
iterator_.array_ = state;
}
ValueIteratorBase::ValueIteratorBase(
const Value::ObjectValues::iterator& current)
: current_(current), isNull_(false) {}
Value& ValueIteratorBase::deref() const {
ValueIteratorBase::ValueIteratorBase( const ValueInternalMap::IteratorState &state )
: isArray_( false )
{
iterator_.map_ = state;
}
#endif
Value &
ValueIteratorBase::deref() const
{
#ifndef JSON_VALUE_USE_INTERNAL_MAP
return current_->second;
#else
if ( isArray_ )
return ValueInternalArray::dereference( iterator_.array_ );
return ValueInternalMap::value( iterator_.map_ );
#endif
}
void ValueIteratorBase::increment() {
void
ValueIteratorBase::increment()
{
#ifndef JSON_VALUE_USE_INTERNAL_MAP
++current_;
#else
if ( isArray_ )
ValueInternalArray::increment( iterator_.array_ );
ValueInternalMap::increment( iterator_.map_ );
#endif
}
void ValueIteratorBase::decrement() {
void
ValueIteratorBase::decrement()
{
#ifndef JSON_VALUE_USE_INTERNAL_MAP
--current_;
#else
if ( isArray_ )
ValueInternalArray::decrement( iterator_.array_ );
ValueInternalMap::decrement( iterator_.map_ );
#endif
}
ValueIteratorBase::difference_type
ValueIteratorBase::computeDistance(const SelfType& other) const {
ValueIteratorBase::computeDistance( const SelfType &other ) const
{
#ifndef JSON_VALUE_USE_INTERNAL_MAP
# ifdef JSON_USE_CPPTL_SMALLMAP
return other.current_ - current_;
return current_ - other.current_;
# else
// Iterator for null value are initialized using the default
// constructor, which initialize current_ to the default
// std::map::iterator. As begin() and end() are two instance
// of the default std::map::iterator, they can not be compared.
// To allow this, we handle this comparison specifically.
if (isNull_ && other.isNull_) {
if ( isNull_ && other.isNull_ )
{
return 0;
}
// Usage of std::distance is not portable (does not compile with Sun Studio 12
// RogueWave STL,
// Usage of std::distance is not portable (does not compile with Sun Studio 12 RogueWave STL,
// which is the one used by default).
// Using a portable hand-made version for non random iterator instead:
// return difference_type( std::distance( current_, other.current_ ) );
difference_type myDistance = 0;
for (Value::ObjectValues::iterator it = current_; it != other.current_;
++it) {
for ( Value::ObjectValues::iterator it = current_; it != other.current_; ++it )
{
++myDistance;
}
return myDistance;
# endif
#else
if ( isArray_ )
return ValueInternalArray::distance( iterator_.array_, other.iterator_.array_ );
return ValueInternalMap::distance( iterator_.map_, other.iterator_.map_ );
#endif
}
bool ValueIteratorBase::isEqual(const SelfType& other) const {
if (isNull_) {
bool
ValueIteratorBase::isEqual( const SelfType &other ) const
{
#ifndef JSON_VALUE_USE_INTERNAL_MAP
if ( isNull_ )
{
return other.isNull_;
}
return current_ == other.current_;
#else
if ( isArray_ )
return ValueInternalArray::equals( iterator_.array_, other.iterator_.array_ );
return ValueInternalMap::equals( iterator_.map_, other.iterator_.map_ );
#endif
}
void ValueIteratorBase::copy(const SelfType& other) {
void
ValueIteratorBase::copy( const SelfType &other )
{
#ifndef JSON_VALUE_USE_INTERNAL_MAP
current_ = other.current_;
isNull_ = other.isNull_;
#else
if ( isArray_ )
iterator_.array_ = other.iterator_.array_;
iterator_.map_ = other.iterator_.map_;
#endif
}
Value ValueIteratorBase::key() const {
Value
ValueIteratorBase::key() const
{
#ifndef JSON_VALUE_USE_INTERNAL_MAP
const Value::CZString czstring = (*current_).first;
if (czstring.data()) {
if ( czstring.c_str() )
{
if ( czstring.isStaticString() )
return Value(StaticString(czstring.data()));
return Value(czstring.data(), czstring.data() + czstring.length());
return Value( StaticString( czstring.c_str() ) );
return Value( czstring.c_str() );
}
return Value( czstring.index() );
#else
if ( isArray_ )
return Value( ValueInternalArray::indexOf( iterator_.array_ ) );
bool isStatic;
const char *memberName = ValueInternalMap::key( iterator_.map_, isStatic );
if ( isStatic )
return Value( StaticString( memberName ) );
return Value( memberName );
#endif
}
UInt ValueIteratorBase::index() const {
UInt
ValueIteratorBase::index() const
{
#ifndef JSON_VALUE_USE_INTERNAL_MAP
const Value::CZString czstring = (*current_).first;
if (!czstring.data())
if ( !czstring.c_str() )
return czstring.index();
return Value::UInt( -1 );
#else
if ( isArray_ )
return Value::UInt( ValueInternalArray::indexOf( iterator_.array_ ) );
return Value::UInt( -1 );
#endif
}
JSONCPP_STRING ValueIteratorBase::name() const {
char const* keey;
char const* end;
keey = memberName(&end);
if (!keey) return JSONCPP_STRING();
return JSONCPP_STRING(keey, end);
const char *
ValueIteratorBase::memberName() const
{
#ifndef JSON_VALUE_USE_INTERNAL_MAP
const char *name = (*current_).first.c_str();
return name ? name : "";
#else
if ( !isArray_ )
return ValueInternalMap::key( iterator_.map_ );
return "";
#endif
}
char const* ValueIteratorBase::memberName() const {
const char* cname = (*current_).first.data();
return cname ? cname : "";
}
char const* ValueIteratorBase::memberName(char const** end) const {
const char* cname = (*current_).first.data();
if (!cname) {
*end = NULL;
return NULL;
}
*end = cname + (*current_).first.length();
return cname;
}
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
@@ -123,21 +219,36 @@ char const* ValueIteratorBase::memberName(char const** end) const {
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
ValueConstIterator::ValueConstIterator() {}
ValueConstIterator::ValueConstIterator()
{
}
ValueConstIterator::ValueConstIterator(
const Value::ObjectValues::iterator& current)
: ValueIteratorBase(current) {}
ValueConstIterator::ValueConstIterator(ValueIterator const& other)
: ValueIteratorBase(other) {}
#ifndef JSON_VALUE_USE_INTERNAL_MAP
ValueConstIterator::ValueConstIterator( const Value::ObjectValues::iterator &current )
: ValueIteratorBase( current )
{
}
#else
ValueConstIterator::ValueConstIterator( const ValueInternalArray::IteratorState &state )
: ValueIteratorBase( state )
{
}
ValueConstIterator& ValueConstIterator::
operator=(const ValueIteratorBase& other) {
ValueConstIterator::ValueConstIterator( const ValueInternalMap::IteratorState &state )
: ValueIteratorBase( state )
{
}
#endif
ValueConstIterator &
ValueConstIterator::operator =( const ValueIteratorBase &other )
{
copy( other );
return *this;
}
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
@@ -146,20 +257,41 @@ operator=(const ValueIteratorBase& other) {
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
ValueIterator::ValueIterator() {}
ValueIterator::ValueIterator()
{
}
#ifndef JSON_VALUE_USE_INTERNAL_MAP
ValueIterator::ValueIterator( const Value::ObjectValues::iterator &current )
: ValueIteratorBase(current) {}
: ValueIteratorBase( current )
{
}
#else
ValueIterator::ValueIterator( const ValueInternalArray::IteratorState &state )
: ValueIteratorBase( state )
{
}
ValueIterator::ValueIterator( const ValueInternalMap::IteratorState &state )
: ValueIteratorBase( state )
{
}
#endif
ValueIterator::ValueIterator( const ValueConstIterator &other )
: ValueIteratorBase(other) {
throwRuntimeError("ConstIterator to Iterator should never be allowed.");
: ValueIteratorBase( other )
{
}
ValueIterator::ValueIterator( const ValueIterator &other )
: ValueIteratorBase(other) {}
: ValueIteratorBase( other )
{
}
ValueIterator& ValueIterator::operator=(const SelfType& other) {
ValueIterator &
ValueIterator::operator =( const SelfType &other )
{
copy( other );
return *this;
}

File diff suppressed because it is too large Load Diff

8
src/lib_json/sconscript Normal file
View File

@@ -0,0 +1,8 @@
Import( 'env buildLibrary' )
buildLibrary( env, Split( """
json_reader.cpp
json_value.cpp
json_writer.cpp
""" ),
'json' )

View File

@@ -1,20 +0,0 @@
// DO NOT EDIT. This file (and "version") is generated by CMake.
// Run CMake configure step to update it.
#ifndef JSON_VERSION_H_INCLUDED
# define JSON_VERSION_H_INCLUDED
# define JSONCPP_VERSION_STRING "@JSONCPP_VERSION@"
# define JSONCPP_VERSION_MAJOR @JSONCPP_VERSION_MAJOR@
# define JSONCPP_VERSION_MINOR @JSONCPP_VERSION_MINOR@
# define JSONCPP_VERSION_PATCH @JSONCPP_VERSION_PATCH@
# define JSONCPP_VERSION_QUALIFIER
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
#ifdef JSONCPP_USING_SECURE_MEMORY
#undef JSONCPP_USING_SECURE_MEMORY
#endif
#define JSONCPP_USING_SECURE_MEMORY @JSONCPP_USE_SECURE_MEMORY@
// If non-zero, the library zeroes any memory that it has allocated before
// it frees its memory.
#endif // JSON_VERSION_H_INCLUDED

View File

@@ -1,38 +0,0 @@
# vim: et ts=4 sts=4 sw=4 tw=0
ADD_EXECUTABLE( jsoncpp_test
jsontest.cpp
jsontest.h
main.cpp
)
IF(BUILD_SHARED_LIBS)
ADD_DEFINITIONS( -DJSON_DLL )
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
ELSE(BUILD_SHARED_LIBS)
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib_static)
ENDIF()
# another way to solve issue #90
#set_target_properties(jsoncpp_test PROPERTIES COMPILE_FLAGS -ffloat-store)
# Run unit tests in post-build
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
IF(JSONCPP_WITH_POST_BUILD_UNITTEST)
IF(BUILD_SHARED_LIBS)
# First, copy the shared lib, for Microsoft.
# Then, run the test executable.
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
POST_BUILD
COMMAND ${CMAKE_COMMAND} -E copy_if_different $<TARGET_FILE:jsoncpp_lib> $<TARGET_FILE_DIR:jsoncpp_test>
COMMAND $<TARGET_FILE:jsoncpp_test>)
ELSE(BUILD_SHARED_LIBS)
# Just run the test executable.
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
POST_BUILD
COMMAND $<TARGET_FILE:jsoncpp_test>)
ENDIF()
ENDIF()
SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -70,31 +70,42 @@
namespace JsonTest {
// class TestResult
// //////////////////////////////////////////////////////////////////
TestResult::TestResult()
: predicateId_(1), lastUsedPredicateId_(0), messageTarget_(0) {
: predicateId_( 1 )
, lastUsedPredicateId_( 0 )
, messageTarget_( 0 )
{
// The root predicate has id 0
rootPredicateNode_.id_ = 0;
rootPredicateNode_.next_ = 0;
predicateStackTail_ = &rootPredicateNode_;
}
void TestResult::setTestName(const JSONCPP_STRING& name) { name_ = name; }
void
TestResult::setTestName( const std::string &name )
{
name_ = name;
}
TestResult &
TestResult::addFailure(const char* file, unsigned int line, const char* expr) {
/// Walks the PredicateContext stack adding them to failures_ if not already
/// added.
TestResult::addFailure( const char *file, unsigned int line,
const char *expr )
{
/// Walks the PredicateContext stack adding them to failures_ if not already added.
unsigned int nestingLevel = 0;
PredicateContext *lastNode = rootPredicateNode_.next_;
for (; lastNode != 0; lastNode = lastNode->next_) {
for ( ; lastNode != 0; lastNode = lastNode->next_ )
{
if ( lastNode->id_ > lastUsedPredicateId_ ) // new PredicateContext
{
lastUsedPredicateId_ = lastNode->id_;
addFailureInfo(
lastNode->file_, lastNode->line_, lastNode->expr_, nestingLevel);
addFailureInfo( lastNode->file_, lastNode->line_, lastNode->expr_,
nestingLevel );
// Link the PredicateContext to the failure for message target when
// popping the PredicateContext.
lastNode->failure_ = &( failures_.back() );
@@ -108,28 +119,35 @@ TestResult::addFailure(const char* file, unsigned int line, const char* expr) {
return *this;
}
void TestResult::addFailureInfo(const char* file,
unsigned int line,
const char* expr,
unsigned int nestingLevel) {
void
TestResult::addFailureInfo( const char *file, unsigned int line,
const char *expr, unsigned int nestingLevel )
{
Failure failure;
failure.file_ = file;
failure.line_ = line;
if (expr) {
if ( expr )
{
failure.expr_ = expr;
}
failure.nestingLevel_ = nestingLevel;
failures_.push_back( failure );
}
TestResult& TestResult::popPredicateContext() {
TestResult &
TestResult::popPredicateContext()
{
PredicateContext *lastNode = &rootPredicateNode_;
while (lastNode->next_ != 0 && lastNode->next_->next_ != 0) {
while ( lastNode->next_ != 0 && lastNode->next_->next_ != 0 )
{
lastNode = lastNode->next_;
}
// Set message target to popped failure
PredicateContext *tail = lastNode->next_;
if (tail != 0 && tail->failure_ != 0) {
if ( tail != 0 && tail->failure_ != 0 )
{
messageTarget_ = tail->failure_;
}
// Remove tail from list
@@ -138,54 +156,79 @@ TestResult& TestResult::popPredicateContext() {
return *this;
}
bool TestResult::failed() const { return !failures_.empty(); }
unsigned int TestResult::getAssertionNestingLevel() const {
bool
TestResult::failed() const
{
return !failures_.empty();
}
unsigned int
TestResult::getAssertionNestingLevel() const
{
unsigned int level = 0;
const PredicateContext *lastNode = &rootPredicateNode_;
while (lastNode->next_ != 0) {
while ( lastNode->next_ != 0 )
{
lastNode = lastNode->next_;
++level;
}
return level;
}
void TestResult::printFailure(bool printTestName) const {
if (failures_.empty()) {
void
TestResult::printFailure( bool printTestName ) const
{
if ( failures_.empty() )
{
return;
}
if (printTestName) {
if ( printTestName )
{
printf( "* Detail of %s test failure:\n", name_.c_str() );
}
// Print in reverse to display the callstack in the right order
Failures::const_iterator itEnd = failures_.end();
for (Failures::const_iterator it = failures_.begin(); it != itEnd; ++it) {
for ( Failures::const_iterator it = failures_.begin(); it != itEnd; ++it )
{
const Failure &failure = *it;
JSONCPP_STRING indent(failure.nestingLevel_ * 2, ' ');
if (failure.file_) {
std::string indent( failure.nestingLevel_ * 2, ' ' );
if ( failure.file_ )
{
printf( "%s%s(%d): ", indent.c_str(), failure.file_, failure.line_ );
}
if (!failure.expr_.empty()) {
if ( !failure.expr_.empty() )
{
printf( "%s\n", failure.expr_.c_str() );
} else if (failure.file_) {
}
else if ( failure.file_ )
{
printf( "\n" );
}
if (!failure.message_.empty()) {
JSONCPP_STRING reindented = indentText(failure.message_, indent + " ");
if ( !failure.message_.empty() )
{
std::string reindented = indentText( failure.message_, indent + " " );
printf( "%s\n", reindented.c_str() );
}
}
}
JSONCPP_STRING TestResult::indentText(const JSONCPP_STRING& text,
const JSONCPP_STRING& indent) {
JSONCPP_STRING reindented;
JSONCPP_STRING::size_type lastIndex = 0;
while (lastIndex < text.size()) {
JSONCPP_STRING::size_type nextIndex = text.find('\n', lastIndex);
if (nextIndex == JSONCPP_STRING::npos) {
std::string
TestResult::indentText( const std::string &text,
const std::string &indent )
{
std::string reindented;
std::string::size_type lastIndex = 0;
while ( lastIndex < text.size() )
{
std::string::size_type nextIndex = text.find( '\n', lastIndex );
if ( nextIndex == std::string::npos )
{
nextIndex = text.size() - 1;
}
reindented += indent;
@@ -195,119 +238,203 @@ JSONCPP_STRING TestResult::indentText(const JSONCPP_STRING& text,
return reindented;
}
TestResult& TestResult::addToLastFailure(const JSONCPP_STRING& message) {
if (messageTarget_ != 0) {
TestResult &
TestResult::addToLastFailure( const std::string &message )
{
if ( messageTarget_ != 0 )
{
messageTarget_->message_ += message;
}
return *this;
}
TestResult& TestResult::operator<<(Json::Int64 value) {
return addToLastFailure(Json::valueToString(value));
}
TestResult& TestResult::operator<<(Json::UInt64 value) {
return addToLastFailure(Json::valueToString(value));
}
TestResult& TestResult::operator<<(bool value) {
TestResult &
TestResult::operator << ( bool value )
{
return addToLastFailure( value ? "true" : "false" );
}
TestResult &
TestResult::operator << ( int value )
{
char buffer[32];
sprintf( buffer, "%d", value );
return addToLastFailure( buffer );
}
TestResult &
TestResult::operator << ( unsigned int value )
{
char buffer[32];
sprintf( buffer, "%u", value );
return addToLastFailure( buffer );
}
TestResult &
TestResult::operator << ( double value )
{
char buffer[32];
sprintf( buffer, "%16g", value );
return addToLastFailure( buffer );
}
TestResult &
TestResult::operator << ( const char *value )
{
return addToLastFailure( value ? value
: "<NULL>" );
}
TestResult &
TestResult::operator << ( const std::string &value )
{
return addToLastFailure( value );
}
// class TestCase
// //////////////////////////////////////////////////////////////////
TestCase::TestCase() : result_(0) {}
TestCase::TestCase()
: result_( 0 )
{
}
TestCase::~TestCase() {}
void TestCase::run(TestResult& result) {
TestCase::~TestCase()
{
}
void
TestCase::run( TestResult &result )
{
result_ = &result;
runTestCase();
}
// class Runner
// //////////////////////////////////////////////////////////////////
Runner::Runner() {}
Runner::Runner()
{
}
Runner& Runner::add(TestCaseFactory factory) {
Runner &
Runner::add( TestCaseFactory factory )
{
tests_.push_back( factory );
return *this;
}
unsigned int Runner::testCount() const {
unsigned int
Runner::testCount() const
{
return static_cast<unsigned int>( tests_.size() );
}
JSONCPP_STRING Runner::testNameAt(unsigned int index) const {
std::string
Runner::testNameAt( unsigned int index ) const
{
TestCase *test = tests_[index]();
JSONCPP_STRING name = test->testName();
std::string name = test->testName();
delete test;
return name;
}
void Runner::runTestAt(unsigned int index, TestResult& result) const {
void
Runner::runTestAt( unsigned int index, TestResult &result ) const
{
TestCase *test = tests_[index]();
result.setTestName( test->testName() );
printf( "Testing %s: ", test->testName() );
fflush( stdout );
#if JSON_USE_EXCEPTION
try {
try
{
#endif // if JSON_USE_EXCEPTION
test->run( result );
#if JSON_USE_EXCEPTION
}
catch (const std::exception& e) {
result.addFailure(__FILE__, __LINE__, "Unexpected exception caught:")
<< e.what();
catch ( const std::exception &e )
{
result.addFailure( __FILE__, __LINE__,
"Unexpected exception caugth:" ) << e.what();
}
#endif // if JSON_USE_EXCEPTION
delete test;
const char* status = result.failed() ? "FAILED" : "OK";
const char *status = result.failed() ? "FAILED"
: "OK";
printf( "%s\n", status );
fflush( stdout );
}
bool Runner::runAllTest(bool printSummary) const {
bool
Runner::runAllTest( bool printSummary ) const
{
unsigned int count = testCount();
std::deque<TestResult> failures;
for (unsigned int index = 0; index < count; ++index) {
for ( unsigned int index = 0; index < count; ++index )
{
TestResult result;
runTestAt( index, result );
if (result.failed()) {
if ( result.failed() )
{
failures.push_back( result );
}
}
if (failures.empty()) {
if (printSummary) {
if ( failures.empty() )
{
if ( printSummary )
{
printf( "All %d tests passed\n", count );
}
return true;
} else {
for (unsigned int index = 0; index < failures.size(); ++index) {
}
else
{
for ( unsigned int index = 0; index < failures.size(); ++index )
{
TestResult &result = failures[index];
result.printFailure( count > 1 );
}
if (printSummary) {
if ( printSummary )
{
unsigned int failedCount = static_cast<unsigned int>( failures.size() );
unsigned int passedCount = count - failedCount;
printf("%d/%d tests passed (%d failure(s))\n",
passedCount,
count,
failedCount);
printf( "%d/%d tests passed (%d failure(s))\n", passedCount, count, failedCount );
}
return false;
}
}
bool Runner::testIndex(const JSONCPP_STRING& testName,
unsigned int& indexOut) const {
bool
Runner::testIndex( const std::string &testName,
unsigned int &indexOut ) const
{
unsigned int count = testCount();
for (unsigned int index = 0; index < count; ++index) {
if (testNameAt(index) == testName) {
for ( unsigned int index = 0; index < count; ++index )
{
if ( testNameAt(index) == testName )
{
indexOut = index;
return true;
}
@@ -315,67 +442,97 @@ bool Runner::testIndex(const JSONCPP_STRING& testName,
return false;
}
void Runner::listTests() const {
void
Runner::listTests() const
{
unsigned int count = testCount();
for (unsigned int index = 0; index < count; ++index) {
for ( unsigned int index = 0; index < count; ++index )
{
printf( "%s\n", testNameAt( index ).c_str() );
}
}
int Runner::runCommandLine(int argc, const char* argv[]) const {
// typedef std::deque<JSONCPP_STRING> TestNames;
int
Runner::runCommandLine( int argc, const char *argv[] ) const
{
typedef std::deque<std::string> TestNames;
Runner subrunner;
for (int index = 1; index < argc; ++index) {
JSONCPP_STRING opt = argv[index];
if (opt == "--list-tests") {
for ( int index = 1; index < argc; ++index )
{
std::string opt = argv[index];
if ( opt == "--list-tests" )
{
listTests();
return 0;
} else if (opt == "--test-auto") {
}
else if ( opt == "--test-auto" )
{
preventDialogOnCrash();
} else if (opt == "--test") {
}
else if ( opt == "--test" )
{
++index;
if (index < argc) {
if ( index < argc )
{
unsigned int testNameIndex;
if (testIndex(argv[index], testNameIndex)) {
if ( testIndex( argv[index], testNameIndex ) )
{
subrunner.add( tests_[testNameIndex] );
} else {
}
else
{
fprintf( stderr, "Test '%s' does not exist!\n", argv[index] );
return 2;
}
} else {
}
else
{
printUsage( argv[0] );
return 2;
}
} else {
}
else
{
printUsage( argv[0] );
return 2;
}
}
bool succeeded;
if (subrunner.testCount() > 0) {
if ( subrunner.testCount() > 0 )
{
succeeded = subrunner.runAllTest( subrunner.testCount() > 1 );
} else {
}
else
{
succeeded = runAllTest( true );
}
return succeeded ? 0 : 1;
return succeeded ? 0
: 1;
}
#if defined(_MSC_VER) && defined(_DEBUG)
#if defined(_MSC_VER)
// Hook MSVCRT assertions to prevent dialog from appearing
static int
msvcrtSilentReportHook(int reportType, char* message, int* /*returnValue*/) {
msvcrtSilentReportHook( int reportType, char *message, int *returnValue )
{
// The default CRT handling of error and assertion is to display
// an error dialog to the user.
// Instead, when an error or an assertion occurs, we force the
// application to terminate using abort() after display
// the message on stderr.
if (reportType == _CRT_ERROR || reportType == _CRT_ASSERT) {
if ( reportType == _CRT_ERROR ||
reportType == _CRT_ASSERT )
{
// calling abort() cause the ReportHook to be called
// The following is used to detect this case and let's the
// error handler fallback on its default behaviour (
// display a warning message)
static volatile bool isAborting = false;
if (isAborting) {
if ( isAborting )
{
return TRUE;
}
isAborting = true;
@@ -389,12 +546,13 @@ msvcrtSilentReportHook(int reportType, char* message, int* /*returnValue*/) {
}
#endif // if defined(_MSC_VER)
void Runner::preventDialogOnCrash() {
#if defined(_MSC_VER) && defined(_DEBUG)
void
Runner::preventDialogOnCrash()
{
#if defined(_MSC_VER)
// Install a hook to prevent MSVCRT error and assertion from
// popping a dialog
// This function a NO-OP in release configuration
// (which cause warning since msvcrtSilentReportHook is not referenced)
// popping a dialog.
_CrtSetReportHook( &msvcrtSilentReportHook );
#endif // if defined(_MSC_VER)
@@ -404,13 +562,17 @@ void Runner::preventDialogOnCrash() {
#if defined(_WIN32)
// Prevents the system from popping a dialog for debugging if the
// application fails due to invalid memory access.
SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX |
SEM_NOOPENFILEERRORBOX);
SetErrorMode( SEM_FAILCRITICALERRORS
| SEM_NOGPFAULTERRORBOX
| SEM_NOOPENFILEERRORBOX );
#endif // if defined(_WIN32)
}
void Runner::printUsage(const char* appName) {
printf("Usage: %s [options]\n"
void
Runner::printUsage( const char *appName )
{
printf(
"Usage: %s [options]\n"
"\n"
"If --test is not specified, then all the test cases be run.\n"
"\n"
@@ -419,34 +581,22 @@ void Runner::printUsage(const char* appName) {
" output and exit.\n"
"--test TESTNAME: executes the test case with the specified name.\n"
" May be repeated.\n"
"--test-auto: prevent dialog prompting for debugging on crash.\n",
appName);
"--test-auto: prevent dialog prompting for debugging on crash.\n"
, appName );
}
// Assertion functions
// //////////////////////////////////////////////////////////////////
JSONCPP_STRING ToJsonString(const char* toConvert) {
return JSONCPP_STRING(toConvert);
}
JSONCPP_STRING ToJsonString(JSONCPP_STRING in) {
return in;
}
#if JSONCPP_USING_SECURE_MEMORY
JSONCPP_STRING ToJsonString(std::string in) {
return JSONCPP_STRING(in.data(), in.data() + in.length());
}
#endif
TestResult& checkStringEqual(TestResult& result,
const JSONCPP_STRING& expected,
const JSONCPP_STRING& actual,
const char* file,
unsigned int line,
const char* expr) {
if (expected != actual) {
TestResult &
checkStringEqual( TestResult &result,
const std::string &expected, const std::string &actual,
const char *file, unsigned int line, const char *expr )
{
if ( expected != actual )
{
result.addFailure( file, line, expr );
result << "Expected: '" << expected << "'\n";
result << "Actual : '" << actual << "'";
@@ -454,4 +604,5 @@ TestResult& checkStringEqual(TestResult& result,
return result;
}
} // namespace JsonTest

View File

@@ -1,4 +1,4 @@
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
// Copyright 2007-2010 Baptiste Lepilleur
// Distributed under MIT license, or public domain if desired and
// recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
@@ -7,11 +7,8 @@
# define JSONTEST_H_INCLUDED
# include <json/config.h>
#include <json/value.h>
#include <json/writer.h>
# include <stdio.h>
# include <deque>
#include <sstream>
# include <string>
// //////////////////////////////////////////////////////////////////
@@ -20,6 +17,8 @@
// //////////////////////////////////////////////////////////////////
// //////////////////////////////////////////////////////////////////
/** \brief Unit testing framework.
* \warning: all assertions are non-aborting, test case execution will continue
* even if an assertion namespace.
@@ -28,19 +27,23 @@
*/
namespace JsonTest {
class Failure {
class Failure
{
public:
const char *file_;
unsigned int line_;
JSONCPP_STRING expr_;
JSONCPP_STRING message_;
std::string expr_;
std::string message_;
unsigned int nestingLevel_;
};
/// Context used to create the assertion callstack on failure.
/// Must be a POD to allow inline initialisation without stepping
/// into the debugger.
struct PredicateContext {
struct PredicateContext
{
typedef unsigned int Id;
Id id_;
const char *file_;
@@ -52,7 +55,8 @@ struct PredicateContext {
Failure *failure_;
};
class TestResult {
class TestResult
{
public:
TestResult();
@@ -65,11 +69,11 @@ public:
/// \internal Implementation detail for predicate macros
PredicateContext *predicateStackTail_;
void setTestName(const JSONCPP_STRING& name);
void setTestName( const std::string &name );
/// Adds an assertion failure.
TestResult&
addFailure(const char* file, unsigned int line, const char* expr = 0);
TestResult &addFailure( const char *file, unsigned int line,
const char *expr = 0 );
/// Removes the last PredicateContext added to the predicate stack
/// chained list.
@@ -80,42 +84,34 @@ public:
void printFailure( bool printTestName ) const;
// Generic operator that will work with anything ostream can deal with.
template <typename T> TestResult& operator<<(const T& value) {
JSONCPP_OSTRINGSTREAM oss;
oss.precision(16);
oss.setf(std::ios_base::floatfield);
oss << value;
return addToLastFailure(oss.str());
}
// Specialized versions.
TestResult &operator << ( bool value );
// std:ostream does not support 64bits integers on all STL implementation
TestResult& operator<<(Json::Int64 value);
TestResult& operator<<(Json::UInt64 value);
TestResult &operator << ( int value );
TestResult &operator << ( unsigned int value );
TestResult &operator << ( double value );
TestResult &operator << ( const char *value );
TestResult &operator << ( const std::string &value );
private:
TestResult& addToLastFailure(const JSONCPP_STRING& message);
TestResult &addToLastFailure( const std::string &message );
unsigned int getAssertionNestingLevel() const;
/// Adds a failure or a predicate context
void addFailureInfo(const char* file,
unsigned int line,
const char* expr,
unsigned int nestingLevel);
static JSONCPP_STRING indentText(const JSONCPP_STRING& text,
const JSONCPP_STRING& indent);
void addFailureInfo( const char *file, unsigned int line,
const char *expr, unsigned int nestingLevel );
static std::string indentText( const std::string &text,
const std::string &indent );
typedef std::deque<Failure> Failures;
Failures failures_;
JSONCPP_STRING name_;
std::string name_;
PredicateContext rootPredicateNode_;
PredicateContext::Id lastUsedPredicateId_;
/// Failure which is the target of the messages added using operator <<
Failure *messageTarget_;
};
class TestCase {
class TestCase
{
public:
TestCase();
@@ -135,7 +131,8 @@ private:
/// Function pointer type for TestCase factory
typedef TestCase *(*TestCaseFactory)();
class Runner {
class Runner
{
public:
Runner();
@@ -155,7 +152,7 @@ public:
unsigned int testCount() const;
/// Returns the name of the test case at the specified index
JSONCPP_STRING testNameAt(unsigned int index) const;
std::string testNameAt( unsigned int index ) const;
/// Runs the test case at the specified index using the specified TestResult
void runTestAt( unsigned int index, TestResult &result ) const;
@@ -168,7 +165,7 @@ private: // prevents copy construction and assignment
private:
void listTests() const;
bool testIndex(const JSONCPP_STRING& testName, unsigned int& index) const;
bool testIndex( const std::string &testName, unsigned int &index ) const;
static void preventDialogOnCrash();
private:
@@ -176,103 +173,79 @@ private:
Factories tests_;
};
template <typename T, typename U>
TestResult& checkEqual(TestResult& result,
T expected,
U actual,
const char* file,
unsigned int line,
const char* expr) {
if (static_cast<U>(expected) != actual) {
template<typename T>
TestResult &
checkEqual( TestResult &result, const T &expected, const T &actual,
const char *file, unsigned int line, const char *expr )
{
if ( expected != actual )
{
result.addFailure( file, line, expr );
result << "Expected: " << static_cast<U>(expected) << "\n";
result << "Expected: " << expected << "\n";
result << "Actual : " << actual;
}
return result;
}
JSONCPP_STRING ToJsonString(const char* toConvert);
JSONCPP_STRING ToJsonString(JSONCPP_STRING in);
#if JSONCPP_USING_SECURE_MEMORY
JSONCPP_STRING ToJsonString(std::string in);
#endif
TestResult& checkStringEqual(TestResult& result,
const JSONCPP_STRING& expected,
const JSONCPP_STRING& actual,
const char* file,
unsigned int line,
const char* expr);
TestResult &
checkStringEqual( TestResult &result,
const std::string &expected, const std::string &actual,
const char *file, unsigned int line, const char *expr );
} // namespace JsonTest
/// \brief Asserts that the given expression is true.
/// JSONTEST_ASSERT( x == y ) << "x=" << x << ", y=" << y;
/// JSONTEST_ASSERT( x == y );
#define JSONTEST_ASSERT( expr ) \
if (expr) { \
} else \
if ( expr ) \
{ \
} \
else \
result_->addFailure( __FILE__, __LINE__, #expr )
/// \brief Asserts that the given predicate is true.
/// The predicate may do other assertions and be a member function of the
/// fixture.
/// The predicate may do other assertions and be a member function of the fixture.
#define JSONTEST_ASSERT_PRED( expr ) \
{ \
JsonTest::PredicateContext _minitest_Context = { \
result_->predicateId_, __FILE__, __LINE__, #expr, NULL, NULL \
}; \
result_->predicateId_, __FILE__, __LINE__, #expr }; \
result_->predicateStackTail_->next_ = &_minitest_Context; \
result_->predicateId_ += 1; \
result_->predicateStackTail_ = &_minitest_Context; \
(expr); \
result_->popPredicateContext(); \
}
} \
*result_
/// \brief Asserts that two values are equals.
#define JSONTEST_ASSERT_EQUAL( expected, actual ) \
JsonTest::checkEqual(*result_, \
expected, \
actual, \
__FILE__, \
__LINE__, \
JsonTest::checkEqual( *result_, expected, actual, \
__FILE__, __LINE__, \
#expected " == " #actual )
/// \brief Asserts that two values are equals.
#define JSONTEST_ASSERT_STRING_EQUAL( expected, actual ) \
JsonTest::checkStringEqual( *result_, \
JsonTest::ToJsonString(expected), \
JsonTest::ToJsonString(actual), \
__FILE__, \
__LINE__, \
std::string(expected), std::string(actual), \
#expected " == " #actual )
/// \brief Asserts that a given expression throws an exception
#define JSONTEST_ASSERT_THROWS(expr) \
{ \
bool _threw = false; \
try { \
expr; \
} \
catch (...) { \
_threw = true; \
} \
if (!_threw) \
result_->addFailure( \
__FILE__, __LINE__, "expected exception thrown: " #expr); \
}
/// \brief Begin a fixture test case.
#define JSONTEST_FIXTURE( FixtureType, name ) \
class Test##FixtureType##name : public FixtureType { \
class Test##FixtureType##name : public FixtureType \
{ \
public: \
static JsonTest::TestCase* factory() { \
static JsonTest::TestCase *factory() \
{ \
return new Test##FixtureType##name(); \
} \
\
public: /* overidden from TestCase */ \
const char* testName() const JSONCPP_OVERRIDE { return #FixtureType "/" #name; } \
void runTestCase() JSONCPP_OVERRIDE; \
virtual const char *testName() const \
{ \
return #FixtureType "/" #name; \
} \
virtual void runTestCase(); \
}; \
\
void Test##FixtureType##name::runTestCase()

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,10 @@
Import( 'env_testing buildUnitTests' )
buildUnitTests( env_testing, Split( """
main.cpp
jsontest.cpp
""" ),
'test_lib_json' )
# For 'check' to work, 'libs' must be built first.
env_testing.Depends('test_lib_json', '#libs')

View File

@@ -1,10 +1,4 @@
# Copyright 2007 Baptiste Lepilleur and The JsonCpp Authors
# Distributed under MIT license, or public domain if desired and
# recognized in your jurisdiction.
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
"""Removes all files created during testing."""
# removes all files created during testing
import glob
import os

View File

@@ -1 +0,0 @@
[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -1,3 +1,2 @@
// C++ style comment
.=null

View File

@@ -1,4 +1,2 @@
/* C style comment
*/
.=null

View File

@@ -1,4 +0,0 @@
// Comment for array
.=[]
// Comment within array
.[0]="one-element"

View File

@@ -1,5 +0,0 @@
// Comment for array
[
// Comment within array
"one-element"
]

View File

@@ -1,7 +1,5 @@
.={}
// Comment for array
.test=[]
// Comment within array
.test[0]={}
.test[0].a="aaa"
.test[1]={}

View File

@@ -1,8 +1,6 @@
{
"test":
// Comment for array
[
// Comment within array
{ "a" : "aaa" }, // Comment for a
{ "b" : "bbb" }, // Comment for b
{ "c" : "ccc" } // Comment for c

View File

@@ -1,23 +0,0 @@
.={}
/* C-style comment
C-style-2 comment */
.c-test={}
.c-test.a=1
/* Internal comment c-style */
.c-test.b=2
// C++-style comment
.cpp-test={}
// Multiline comment cpp-style
// Second line
.cpp-test.c=3
// Comment before double
.cpp-test.d=4.1
// Comment before string
.cpp-test.e="e-string"
// Comment before true
.cpp-test.f=true
// Comment before false
.cpp-test.g=false
// Comment before null
.cpp-test.h=null

View File

@@ -1,26 +0,0 @@
{
/* C-style comment
C-style-2 comment */
"c-test" : {
"a" : 1,
/* Internal comment c-style */
"b" : 2
},
// C++-style comment
"cpp-test" : {
// Multiline comment cpp-style
// Second line
"c" : 3,
// Comment before double
"d" : 4.1,
// Comment before string
"e" : "e-string",
// Comment before true
"f" : true,
// Comment before false
"g" : false,
// Comment before null
"h" : null
}
}

View File

@@ -1,2 +1 @@
// Max signed integer
.=2147483647

View File

@@ -1,2 +1 @@
// Min signed integer
.=-2147483648

View File

@@ -1,2 +1 @@
// Max unsigned integer
.=4294967295

View File

@@ -1,3 +1,2 @@
// Min unsigned integer
.=0

View File

@@ -1,11 +1,3 @@
/* A comment
at the beginning of the file.
*/
.={}
.first=1
/* Comment before 'second'
*/
.second=2
/* A comment at
the end of the file.
*/

View File

@@ -1,3 +1,2 @@
// 2^33 => out of integer range, switch to double
.=8589934592

View File

@@ -1,3 +1,2 @@
// -2^32 => out of signed integer range, switch to double
.=-4294967295

View File

@@ -1,3 +1,2 @@
// -2^32 => out of signed integer range, switch to double
.=-4294967295

View File

@@ -1,3 +1,2 @@
// 1.2345678
.=1.2345678

View File

@@ -1,4 +1,3 @@
// 1234567.8
.=1234567.8

View File

@@ -1,4 +1,3 @@
// -1.2345678
.=-1.2345678

Some files were not shown because too many files have changed in this diff Show More