mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-10-15 07:14:45 +02:00
Compare commits
408 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
6edfae4890 | ||
![]() |
de95c1115a | ||
![]() |
ba3fd41292 | ||
![]() |
3cde9a9912 | ||
![]() |
347e1ae46b | ||
![]() |
1335f70bbb | ||
![]() |
3eda8a63ca | ||
![]() |
0e3fffd242 | ||
![]() |
5ec82e482c | ||
![]() |
b84332249b | ||
![]() |
cb3b38c3ea | ||
![]() |
226f7a2b6b | ||
![]() |
d2fc18af7f | ||
![]() |
aec261a899 | ||
![]() |
20564b3f0c | ||
![]() |
0fc51120c0 | ||
![]() |
4a984c24b5 | ||
![]() |
2f9a6a682c | ||
![]() |
26159b96f1 | ||
![]() |
e105003bb5 | ||
![]() |
dde8426ded | ||
![]() |
ecfd658e6a | ||
![]() |
eec6794106 | ||
![]() |
0ea25978d5 | ||
![]() |
9297822cde | ||
![]() |
a4354d782b | ||
![]() |
a7b80fea65 | ||
![]() |
8bdb07dd52 | ||
![]() |
22dade5c97 | ||
![]() |
1380aff595 | ||
![]() |
8644269675 | ||
![]() |
28dcc3589a | ||
![]() |
aaf3417654 | ||
![]() |
22ec823cc7 | ||
![]() |
ffea45bbe1 | ||
![]() |
1b32e3e869 | ||
![]() |
18e4d04e8e | ||
![]() |
d84702c903 | ||
![]() |
949babd7b0 | ||
![]() |
6ed877c77c | ||
![]() |
1c69568f8d | ||
![]() |
770fdda28b | ||
![]() |
d48bff5fcb | ||
![]() |
93d74678da | ||
![]() |
ed5d73fe1d | ||
![]() |
b97e624ac1 | ||
![]() |
542354902e | ||
![]() |
17360f3d4b | ||
![]() |
75747db30c | ||
![]() |
81cf237917 | ||
![]() |
cac79543f8 | ||
![]() |
d8186f36a6 | ||
![]() |
7f240623d3 | ||
![]() |
784433ac72 | ||
![]() |
7275e3ce3c | ||
![]() |
46aa9d75fa | ||
![]() |
f94a0e8989 | ||
![]() |
e22a2f36f7 | ||
![]() |
fac87108a4 | ||
![]() |
14fc9f124e | ||
![]() |
658fa37e63 | ||
![]() |
02839ef5b1 | ||
![]() |
138c48b7e8 | ||
![]() |
72fb4a5b08 | ||
![]() |
c6fa959318 | ||
![]() |
056e5f9b64 | ||
![]() |
d8e8c14ffc | ||
![]() |
fb1301274f | ||
![]() |
f4e6fccd46 | ||
![]() |
7670e5151b | ||
![]() |
2e185081b2 | ||
![]() |
cb8259f545 | ||
![]() |
bb5d0249ed | ||
![]() |
710260d070 | ||
![]() |
2428889813 | ||
![]() |
89704039a0 | ||
![]() |
dbda2aa36f | ||
![]() |
ef38374e99 | ||
![]() |
f1cafb6ded | ||
![]() |
a13303e456 | ||
![]() |
7bc97db496 | ||
![]() |
6ca8ffcb91 | ||
![]() |
b5e70f950e | ||
![]() |
b26804d1c2 | ||
![]() |
702a539762 | ||
![]() |
81cb7e5c5b | ||
![]() |
d259f608fd | ||
![]() |
4652f818fe | ||
![]() |
ce32274ba5 | ||
![]() |
717c791d4e | ||
![]() |
17181acf74 | ||
![]() |
6e52e272da | ||
![]() |
6416350438 | ||
![]() |
07623b71f2 | ||
![]() |
bcb83b921c | ||
![]() |
f1ff13767c | ||
![]() |
2760c7902a | ||
![]() |
3f05b1a897 | ||
![]() |
de2c85f576 | ||
![]() |
b389d81bf9 | ||
![]() |
89c51f3457 | ||
![]() |
e6f1cffdd3 | ||
![]() |
64441486ac | ||
![]() |
d5e54d2609 | ||
![]() |
97e093a361 | ||
![]() |
d57ac97db8 | ||
![]() |
31e9962754 | ||
![]() |
f40dd0f3ed | ||
![]() |
5256551b03 | ||
![]() |
6649009ffa | ||
![]() |
2a10f4a3b8 | ||
![]() |
c334ac0376 | ||
![]() |
28d086e1d9 | ||
![]() |
a0a7c5f6de | ||
![]() |
fcbab02e4a | ||
![]() |
f4ee48bc21 | ||
![]() |
88184d142b | ||
![]() |
ae177fd901 | ||
![]() |
3f6345234f | ||
![]() |
a53070c42b | ||
![]() |
c09e121aeb | ||
![]() |
4f8ec9d207 | ||
![]() |
0fe61a68f8 | ||
![]() |
43019088f0 | ||
![]() |
0c1c076b7c | ||
![]() |
11130997c3 | ||
![]() |
30bb4ccb67 | ||
![]() |
74143f39e7 | ||
![]() |
56650e83c5 | ||
![]() |
c66cc277f5 | ||
![]() |
441f8cdfa1 | ||
![]() |
a658759039 | ||
![]() |
0eb0e502c8 | ||
![]() |
e983204906 | ||
![]() |
fe06acb587 | ||
![]() |
1b49a55ea1 | ||
![]() |
13c36e9807 | ||
![]() |
db7ad75794 | ||
![]() |
f4bdc1b602 | ||
![]() |
93f45d065c | ||
![]() |
6f6ddaa91c | ||
![]() |
254fe6a07a | ||
![]() |
00d7bea0f6 | ||
![]() |
a9d06d2650 | ||
![]() |
1c4f6a2d79 | ||
![]() |
e49bd30950 | ||
![]() |
13d78e3da3 | ||
![]() |
50069d72da | ||
![]() |
24682e37bf | ||
![]() |
c2b988ee74 | ||
![]() |
e255ce31a4 | ||
![]() |
779b5bc5ba | ||
![]() |
63860617b3 | ||
![]() |
9cb88d2ca6 | ||
![]() |
363e51c0a9 | ||
![]() |
240ddb6a1b | ||
![]() |
9dd77dc0ef | ||
![]() |
244b1496e1 | ||
![]() |
c083835261 | ||
![]() |
cbe7e7c9cb | ||
![]() |
be183def8f | ||
![]() |
951bd3d05d | ||
![]() |
1c58876185 | ||
![]() |
2f2034629e | ||
![]() |
7020451b44 | ||
![]() |
80497f102e | ||
![]() |
f9feb66be2 | ||
![]() |
ed495edcc1 | ||
![]() |
3c0a383877 | ||
![]() |
5003983029 | ||
![]() |
871b311e7e | ||
![]() |
cdbc35f6ac | ||
![]() |
4e30c4fcdb | ||
![]() |
0d33cb3639 | ||
![]() |
2250b3c29d | ||
![]() |
9376368d86 | ||
![]() |
5383794cc9 | ||
![]() |
75279ccec2 | ||
![]() |
717b08695e | ||
![]() |
ee4ea0ec3f | ||
![]() |
ce19001238 | ||
![]() |
078f991c57 | ||
![]() |
72b5293695 | ||
![]() |
a63d82d78a | ||
![]() |
ee83f8891c | ||
![]() |
5c448687e1 | ||
![]() |
401e98269e | ||
![]() |
b2a7438d08 | ||
![]() |
62ad140d18 | ||
![]() |
527332d5d5 | ||
![]() |
cada3b951f | ||
![]() |
ff61752444 | ||
![]() |
7f439f4276 | ||
![]() |
3976f17ffd | ||
![]() |
80ca11bb41 | ||
![]() |
2fc08b4ebd | ||
![]() |
239c733ab5 | ||
![]() |
295e73ff3c | ||
![]() |
2a840c105c | ||
![]() |
7ec98dc9fe | ||
![]() |
0fd2875a44 | ||
![]() |
d31151d150 | ||
![]() |
f50145fbda | ||
![]() |
b3e6f3d70f | ||
![]() |
13e063c336 | ||
![]() |
f57da48f48 | ||
![]() |
eaa3fd8eca | ||
![]() |
ff63d034e5 | ||
![]() |
2aa4557e79 | ||
![]() |
37dde9d29d | ||
![]() |
c312dd5ef7 | ||
![]() |
42d7e59fe0 | ||
![]() |
a8104a8035 | ||
![]() |
7b22768c33 | ||
![]() |
19c49a459d | ||
![]() |
99822b27cd | ||
![]() |
8a70297869 | ||
![]() |
24f544996f | ||
![]() |
0c91927da2 | ||
![]() |
493f6dcebe | ||
![]() |
eaa06355e1 | ||
![]() |
effd732aa1 | ||
![]() |
70cd04d49a | ||
![]() |
9e49e3d84a | ||
![]() |
2d653bd15d | ||
![]() |
585b267595 | ||
![]() |
c28610fb5d | ||
![]() |
a53283568f | ||
![]() |
ef21fbc785 | ||
![]() |
25342bac13 | ||
![]() |
2b9abc3ebf | ||
![]() |
e6b46e4503 | ||
![]() |
8a77037320 | ||
![]() |
57ad051f67 | ||
![]() |
b383fdc61e | ||
![]() |
5d79275a5b | ||
![]() |
c1e834a110 | ||
![]() |
0570f9eefb | ||
![]() |
b947b0b3df | ||
![]() |
8051cf6ba7 | ||
![]() |
c8bf184ea9 | ||
![]() |
9998094eee | ||
![]() |
4ac4cac2be | ||
![]() |
4788764844 | ||
![]() |
6c898441e3 | ||
![]() |
7d1f656859 | ||
![]() |
0c66e698fb | ||
![]() |
b9229b7400 | ||
![]() |
f9db82af17 | ||
![]() |
ae71879549 | ||
![]() |
7b3683ccd1 | ||
![]() |
58499031a4 | ||
![]() |
2c8c1ac0ec | ||
![]() |
c58e93b014 | ||
![]() |
eed193e151 | ||
![]() |
4382a7b585 | ||
![]() |
30d923f155 | ||
![]() |
2f4e40bc95 | ||
![]() |
505e086ebc | ||
![]() |
c6582415d8 | ||
![]() |
0ee7e2426f | ||
![]() |
1522e4dfb1 | ||
![]() |
09b8670536 | ||
![]() |
f164288646 | ||
![]() |
3bfd215938 | ||
![]() |
400b744195 | ||
![]() |
bd55164089 | ||
![]() |
4c5832a0be | ||
![]() |
8ba9875962 | ||
![]() |
9c91b995dd | ||
![]() |
e7233bf056 | ||
![]() |
9c90456890 | ||
![]() |
f4be815c86 | ||
![]() |
aa13a8ba40 | ||
![]() |
da0fcfbaa2 | ||
![]() |
3ebba5cea8 | ||
![]() |
acbf4eb2ef | ||
![]() |
56df206847 | ||
![]() |
4dca80da49 | ||
![]() |
249ad9f47f | ||
![]() |
99b8e856f6 | ||
![]() |
89b72e1653 | ||
![]() |
2474989f24 | ||
![]() |
315b8c9f2c | ||
![]() |
29501c4d9f | ||
![]() |
7796f20eab | ||
![]() |
20d09676c2 | ||
![]() |
5a744708fc | ||
![]() |
07f0e9308d | ||
![]() |
052050df07 | ||
![]() |
435d2a2f8d | ||
![]() |
6123bd1505 | ||
![]() |
7477bcfa3a | ||
![]() |
5e3e68af2e | ||
![]() |
04a607d95b | ||
![]() |
db75cdf21e | ||
![]() |
c41609b9f9 | ||
![]() |
b56381a636 | ||
![]() |
f757c18ca0 | ||
![]() |
3cf9175bde | ||
![]() |
a9e1ab302d | ||
![]() |
694dbcb328 | ||
![]() |
732abb80ef | ||
![]() |
f3b3358a0e | ||
![]() |
1357cddf1e | ||
![]() |
8df98f6112 | ||
![]() |
16bdfd8af3 | ||
![]() |
ce799b3aa3 | ||
![]() |
3a65581b20 | ||
![]() |
6451412c99 | ||
![]() |
66a8ba255f | ||
![]() |
249fd18114 | ||
![]() |
a587d04f77 | ||
![]() |
2c1197c2c8 | ||
![]() |
2a94618589 | ||
![]() |
dee4602b8f | ||
![]() |
ea2d167a38 | ||
![]() |
41edda5ebe | ||
![]() |
2941cb3fe2 | ||
![]() |
636121485c | ||
![]() |
fe855fb4dd | ||
![]() |
198cc350c5 | ||
![]() |
5e8595c0e2 | ||
![]() |
38042b3892 | ||
![]() |
3b5f2b85ca | ||
![]() |
7eca3b4e88 | ||
![]() |
999f5912f0 | ||
![]() |
472d29f57b | ||
![]() |
6065a1c142 | ||
![]() |
28a20917b0 | ||
![]() |
177b7b8f22 | ||
![]() |
9da9f84903 | ||
![]() |
54b8e6939a | ||
![]() |
c7b39c2e25 | ||
![]() |
d78caa3851 | ||
![]() |
c6e0688e5a | ||
![]() |
1e21e63853 | ||
![]() |
dea6f8d9a6 | ||
![]() |
648843d148 | ||
![]() |
fe3979cd8a | ||
![]() |
94665eab72 | ||
![]() |
9e4bcf354f | ||
![]() |
9243d602fe | ||
![]() |
beb6f35c63 | ||
![]() |
ceef7f5219 | ||
![]() |
77ce057f14 | ||
![]() |
d49ab5aee1 | ||
![]() |
4d649402b0 | ||
![]() |
489707ff60 | ||
![]() |
5fbfe3cdb9 | ||
![]() |
948f29032e | ||
![]() |
964affd333 | ||
![]() |
c038e08efc | ||
![]() |
74c2d82e19 | ||
![]() |
30726082f3 | ||
![]() |
1e3149ab75 | ||
![]() |
7312b1022d | ||
![]() |
2f046b584d | ||
![]() |
dd91914b1b | ||
![]() |
2a46e295ec | ||
![]() |
f4bc0bf4ec | ||
![]() |
f357688893 | ||
![]() |
bb0c80b3e5 | ||
![]() |
ff5abe76a5 | ||
![]() |
9cc0bb80b2 | ||
![]() |
494950a63d | ||
![]() |
7d82b14726 | ||
![]() |
2bc6137ada | ||
![]() |
201904bfbb | ||
![]() |
216ecd3085 | ||
![]() |
8d15e51228 | ||
![]() |
9fbd12b27c | ||
![]() |
f8ca6cbb25 | ||
![]() |
d383056fbb | ||
![]() |
ddb4ff7dec | ||
![]() |
3efc587fba | ||
![]() |
70704b9a70 | ||
![]() |
ac6bbbc739 | ||
![]() |
26c52861b9 | ||
![]() |
3682f60927 | ||
![]() |
58c31ac550 | ||
![]() |
08cfd02d8c | ||
![]() |
79211e1aeb | ||
![]() |
632c9b5032 | ||
![]() |
05810a7607 | ||
![]() |
942e2c999a | ||
![]() |
2160c9a042 | ||
![]() |
ee8b58f82f | ||
![]() |
9132aa94b1 | ||
![]() |
76746b09fc | ||
![]() |
70b795bd45 | ||
![]() |
26842530f2 | ||
![]() |
e3f24286c1 | ||
![]() |
00b8ce81db | ||
![]() |
40810fe326 | ||
![]() |
59167d8627 | ||
![]() |
05c1b8344d | ||
![]() |
e893625e88 | ||
![]() |
e87e41cdb0 | ||
![]() |
9de2c2d84d | ||
![]() |
7956ccd61e | ||
![]() |
9454e687a3 | ||
![]() |
46a925ba4a | ||
![]() |
c407f1407f | ||
![]() |
ec251df6b7 | ||
![]() |
51c0afab22 | ||
![]() |
e39fb0083c | ||
![]() |
ec727e2f6b |
11
.gitattributes
vendored
Normal file
11
.gitattributes
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
* text=auto
|
||||||
|
*.h text
|
||||||
|
*.cpp text
|
||||||
|
*.json text
|
||||||
|
*.in text
|
||||||
|
*.sh eol=lf
|
||||||
|
*.bat eol=crlf
|
||||||
|
*.vcproj eol=crlf
|
||||||
|
*.vcxproj eol=crlf
|
||||||
|
*.sln eol=crlf
|
||||||
|
devtools/agent_vm* eol=crlf
|
23
.gitignore
vendored
23
.gitignore
vendored
@@ -10,4 +10,27 @@
|
|||||||
/libs/
|
/libs/
|
||||||
/doc/doxyfile
|
/doc/doxyfile
|
||||||
/dist/
|
/dist/
|
||||||
|
#/version
|
||||||
#/include/json/version.h
|
#/include/json/version.h
|
||||||
|
|
||||||
|
# MSVC project files:
|
||||||
|
*.sln
|
||||||
|
*.vcxproj
|
||||||
|
*.filters
|
||||||
|
*.user
|
||||||
|
*.sdf
|
||||||
|
*.opensdf
|
||||||
|
*.suo
|
||||||
|
|
||||||
|
# MSVC build files:
|
||||||
|
*.lib
|
||||||
|
*.obj
|
||||||
|
*.tlog/
|
||||||
|
*.pdb
|
||||||
|
|
||||||
|
# CMake-generated files:
|
||||||
|
CMakeFiles/
|
||||||
|
CTestTestFile.cmake
|
||||||
|
cmake_install.cmake
|
||||||
|
pkg-config/jsoncpp.pc
|
||||||
|
jsoncpp_lib_static.dir/
|
||||||
|
38
.travis.yml
38
.travis.yml
@@ -2,17 +2,41 @@
|
|||||||
# http://about.travis-ci.org/docs/user/build-configuration/
|
# http://about.travis-ci.org/docs/user/build-configuration/
|
||||||
# This file can be validated on:
|
# This file can be validated on:
|
||||||
# http://lint.travis-ci.org/
|
# http://lint.travis-ci.org/
|
||||||
before_install: sudo apt-get install cmake
|
# See also
|
||||||
|
# http://stackoverflow.com/questions/22111549/travis-ci-with-clang-3-4-and-c11/30925448#30925448
|
||||||
|
# to allow C++11, though we are not yet building with -std=c++11
|
||||||
|
|
||||||
|
install:
|
||||||
|
# /usr/bin/gcc is 4.6 always, but gcc-X.Y is available.
|
||||||
|
#- if [ "$CXX" = "g++" ]; then export CXX="g++-4.8" CC="gcc-4.8"; fi
|
||||||
|
- if [ "$CXX" = "g++" ]; then export CXX="g++-4.6" CC="gcc-4.6"; fi
|
||||||
|
# /usr/bin/clang is our version already, and clang-X.Y does not exist.
|
||||||
|
#- if [ "$CXX" = "clang++" ]; then export CXX="clang++-3.0" CC="clang-3.0"; fi
|
||||||
|
- echo ${PATH}
|
||||||
|
- ls /usr/local
|
||||||
|
- export PATH=/usr/bin:${PATH}
|
||||||
|
- echo ${CXX}
|
||||||
|
- ${CXX} --version
|
||||||
|
addons:
|
||||||
|
apt:
|
||||||
|
sources:
|
||||||
|
- ubuntu-toolchain-r-test
|
||||||
|
packages:
|
||||||
|
- gcc-4.6
|
||||||
|
- g++-4.6
|
||||||
|
- clang
|
||||||
|
- valgrind
|
||||||
|
os:
|
||||||
|
- linux
|
||||||
language: cpp
|
language: cpp
|
||||||
compiler:
|
compiler:
|
||||||
- gcc
|
- gcc
|
||||||
- clang
|
- clang
|
||||||
script: cmake -DJSONCPP_LIB_BUILD_SHARED=$SHARED_LIBRARY -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE . && make
|
script: ./travis.sh
|
||||||
env:
|
env:
|
||||||
matrix:
|
matrix:
|
||||||
- SHARED_LIBRARY=ON BUILD_TYPE=release VERBOSE_MAKE=false
|
- SHARED_LIB=ON STATIC_LIB=ON CMAKE_PKG=ON BUILD_TYPE=release VERBOSE_MAKE=false
|
||||||
- SHARED_LIBRARY=OFF BUILD_TYPE=release VERBOSE_MAKE=false
|
- SHARED_LIB=OFF STATIC_LIB=ON CMAKE_PKG=OFF BUILD_TYPE=debug VERBOSE_MAKE=true VERBOSE
|
||||||
- SHARED_LIBRARY=OFF BUILD_TYPE=debug VERBOSE VERBOSE_MAKE=true
|
|
||||||
notifications:
|
notifications:
|
||||||
email:
|
email: false
|
||||||
- aaronjjacobs@gmail.com
|
sudo: false
|
||||||
|
@@ -1,12 +1,18 @@
|
|||||||
|
# vim: et ts=4 sts=4 sw=4 tw=0
|
||||||
|
|
||||||
CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5)
|
CMAKE_MINIMUM_REQUIRED(VERSION 2.8.5)
|
||||||
PROJECT(jsoncpp)
|
PROJECT(jsoncpp)
|
||||||
ENABLE_TESTING()
|
ENABLE_TESTING()
|
||||||
|
|
||||||
OPTION(JSONCPP_WITH_TESTS "Compile and run JsonCpp test executables" ON)
|
OPTION(JSONCPP_WITH_TESTS "Compile and (for jsoncpp_check) run JsonCpp test executables" ON)
|
||||||
OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
|
OPTION(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
|
||||||
OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF)
|
OPTION(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occurs" OFF)
|
||||||
OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON)
|
OPTION(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON)
|
||||||
OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF)
|
OPTION(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" OFF)
|
||||||
|
OPTION(BUILD_SHARED_LIBS "Build jsoncpp_lib as a shared library." OFF)
|
||||||
|
OPTION(BUILD_STATIC_LIBS "Build jsoncpp_lib static library." ON)
|
||||||
|
|
||||||
|
include(GNUInstallDirs)
|
||||||
|
|
||||||
# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix
|
# Ensures that CMAKE_BUILD_TYPE is visible in cmake-gui on Unix
|
||||||
IF(NOT WIN32)
|
IF(NOT WIN32)
|
||||||
@@ -14,18 +20,19 @@ IF(NOT WIN32)
|
|||||||
SET(CMAKE_BUILD_TYPE Release CACHE STRING
|
SET(CMAKE_BUILD_TYPE Release CACHE STRING
|
||||||
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage."
|
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage."
|
||||||
FORCE)
|
FORCE)
|
||||||
ENDIF(NOT CMAKE_BUILD_TYPE)
|
ENDIF()
|
||||||
ENDIF(NOT WIN32)
|
ENDIF()
|
||||||
|
|
||||||
|
SET(DEBUG_LIBNAME_SUFFIX "" CACHE STRING "Optional suffix to append to the library name for a debug build")
|
||||||
SET(LIB_SUFFIX "" CACHE STRING "Optional arch-dependent suffix for the library installation directory")
|
SET(LIB_SUFFIX "" CACHE STRING "Optional arch-dependent suffix for the library installation directory")
|
||||||
|
|
||||||
SET(RUNTIME_INSTALL_DIR bin
|
SET(RUNTIME_INSTALL_DIR bin
|
||||||
CACHE PATH "Install dir for executables and dlls")
|
CACHE PATH "Install dir for executables and dlls")
|
||||||
SET(ARCHIVE_INSTALL_DIR lib${LIB_SUFFIX}
|
SET(ARCHIVE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}
|
||||||
CACHE PATH "Install dir for static libraries")
|
CACHE PATH "Install dir for static libraries")
|
||||||
SET(LIBRARY_INSTALL_DIR lib${LIB_SUFFIX}
|
SET(LIBRARY_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}
|
||||||
CACHE PATH "Install dir for shared libraries")
|
CACHE PATH "Install dir for shared libraries")
|
||||||
SET(INCLUDE_INSTALL_DIR include
|
SET(INCLUDE_INSTALL_DIR ${CMAKE_INSTALL_PREFIX}/include/jsoncpp
|
||||||
CACHE PATH "Install dir for headers")
|
CACHE PATH "Install dir for headers")
|
||||||
SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake
|
SET(PACKAGE_INSTALL_DIR lib${LIB_SUFFIX}/cmake
|
||||||
CACHE PATH "Install dir for cmake package config files")
|
CACHE PATH "Install dir for cmake package config files")
|
||||||
@@ -34,7 +41,7 @@ MARK_AS_ADVANCED( RUNTIME_INSTALL_DIR ARCHIVE_INSTALL_DIR INCLUDE_INSTALL_DIR PA
|
|||||||
# Set variable named ${VAR_NAME} to value ${VALUE}
|
# Set variable named ${VAR_NAME} to value ${VALUE}
|
||||||
FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
|
FUNCTION(set_using_dynamic_name VAR_NAME VALUE)
|
||||||
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
|
SET( "${VAR_NAME}" "${VALUE}" PARENT_SCOPE)
|
||||||
ENDFUNCTION(set_using_dynamic_name)
|
ENDFUNCTION()
|
||||||
|
|
||||||
# Extract major, minor, patch from version text
|
# Extract major, minor, patch from version text
|
||||||
# Parse a version string "X.Y.Z" and outputs
|
# Parse a version string "X.Y.Z" and outputs
|
||||||
@@ -50,28 +57,37 @@ MACRO(jsoncpp_parse_version VERSION_TEXT OUPUT_PREFIX)
|
|||||||
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE )
|
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" TRUE )
|
||||||
ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
ELSE( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
||||||
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE )
|
set_using_dynamic_name( "${OUPUT_PREFIX}_FOUND" FALSE )
|
||||||
ENDIF( ${VERSION_TEXT} MATCHES ${VERSION_REGEX} )
|
ENDIF()
|
||||||
ENDMACRO(jsoncpp_parse_version)
|
ENDMACRO()
|
||||||
|
|
||||||
# Read out version from "version" file
|
# Read out version from "version" file
|
||||||
FILE(STRINGS "version" JSONCPP_VERSION)
|
#FILE(STRINGS "version" JSONCPP_VERSION)
|
||||||
|
#SET( JSONCPP_VERSION_MAJOR X )
|
||||||
|
#SET( JSONCPP_VERSION_MINOR Y )
|
||||||
|
#SET( JSONCPP_VERSION_PATCH Z )
|
||||||
|
SET( JSONCPP_VERSION 0.10.7 )
|
||||||
jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION )
|
jsoncpp_parse_version( ${JSONCPP_VERSION} JSONCPP_VERSION )
|
||||||
IF(NOT JSONCPP_VERSION_FOUND)
|
#IF(NOT JSONCPP_VERSION_FOUND)
|
||||||
MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z")
|
# MESSAGE(FATAL_ERROR "Failed to parse version string properly. Expect X.Y.Z")
|
||||||
ENDIF(NOT JSONCPP_VERSION_FOUND)
|
#ENDIF(NOT JSONCPP_VERSION_FOUND)
|
||||||
|
|
||||||
MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}")
|
MESSAGE(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}")
|
||||||
# File version.h is only regenerated on CMake configure step
|
# File version.h is only regenerated on CMake configure step
|
||||||
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in"
|
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in"
|
||||||
"${PROJECT_SOURCE_DIR}/include/json/version.h" )
|
"${PROJECT_SOURCE_DIR}/include/json/version.h"
|
||||||
|
NEWLINE_STYLE UNIX )
|
||||||
|
CONFIGURE_FILE( "${PROJECT_SOURCE_DIR}/version.in"
|
||||||
|
"${PROJECT_SOURCE_DIR}/version"
|
||||||
|
NEWLINE_STYLE UNIX )
|
||||||
|
|
||||||
macro(UseCompilationWarningAsError)
|
macro(UseCompilationWarningAsError)
|
||||||
if ( MSVC )
|
if ( MSVC )
|
||||||
# Only enabled in debug because some old versions of VS STL generate
|
# Only enabled in debug because some old versions of VS STL generate
|
||||||
# warnings when compiled in release configuration.
|
# warnings when compiled in release configuration.
|
||||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ")
|
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /WX ")
|
||||||
endif( MSVC )
|
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_XX_COMPILER_ID MATCHES "Clang" )
|
||||||
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror")
|
||||||
|
endif()
|
||||||
endmacro()
|
endmacro()
|
||||||
|
|
||||||
# Include our configuration header
|
# Include our configuration header
|
||||||
@@ -81,11 +97,20 @@ if ( MSVC )
|
|||||||
# Only enabled in debug because some old versions of VS STL generate
|
# Only enabled in debug because some old versions of VS STL generate
|
||||||
# unreachable code warning when compiled in release configuration.
|
# unreachable code warning when compiled in release configuration.
|
||||||
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ")
|
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /W4 ")
|
||||||
endif( MSVC )
|
endif()
|
||||||
|
|
||||||
|
if (CMAKE_CXX_COMPILER_ID MATCHES "Clang")
|
||||||
|
# using regular Clang or AppleClang
|
||||||
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wshadow -Wshorten-64-to-32")
|
||||||
|
elseif (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||||
|
# using GCC
|
||||||
|
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wshadow -Wextra -pedantic -Wno-long-long")
|
||||||
|
# not yet ready for -Wsign-conversion
|
||||||
|
endif()
|
||||||
|
|
||||||
IF(JSONCPP_WITH_WARNING_AS_ERROR)
|
IF(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||||
UseCompilationWarningAsError()
|
UseCompilationWarningAsError()
|
||||||
ENDIF(JSONCPP_WITH_WARNING_AS_ERROR)
|
ENDIF()
|
||||||
|
|
||||||
IF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
IF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
||||||
CONFIGURE_FILE(
|
CONFIGURE_FILE(
|
||||||
@@ -93,14 +118,14 @@ IF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
|||||||
"pkg-config/jsoncpp.pc"
|
"pkg-config/jsoncpp.pc"
|
||||||
@ONLY)
|
@ONLY)
|
||||||
INSTALL(FILES "${CMAKE_BINARY_DIR}/pkg-config/jsoncpp.pc"
|
INSTALL(FILES "${CMAKE_BINARY_DIR}/pkg-config/jsoncpp.pc"
|
||||||
DESTINATION "${CMAKE_INSTALL_PREFIX}/lib${LIB_SUFFIX}/pkgconfig")
|
DESTINATION "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
|
||||||
ENDIF(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
ENDIF()
|
||||||
|
|
||||||
IF(JSONCPP_WITH_CMAKE_PACKAGE)
|
IF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||||
INSTALL(EXPORT jsoncpp
|
INSTALL(EXPORT jsoncpp
|
||||||
DESTINATION ${PACKAGE_INSTALL_DIR}/jsoncpp
|
DESTINATION ${PACKAGE_INSTALL_DIR}/jsoncpp
|
||||||
FILE jsoncppConfig.cmake)
|
FILE jsoncppConfig.cmake)
|
||||||
ENDIF(JSONCPP_WITH_CMAKE_PACKAGE)
|
ENDIF()
|
||||||
|
|
||||||
# Build the different applications
|
# Build the different applications
|
||||||
ADD_SUBDIRECTORY( src )
|
ADD_SUBDIRECTORY( src )
|
||||||
|
2
NEWS.txt
2
NEWS.txt
@@ -80,7 +80,7 @@ New in SVN
|
|||||||
(e.g. MSVC 2008 command prompt in start menu) before running scons.
|
(e.g. MSVC 2008 command prompt in start menu) before running scons.
|
||||||
|
|
||||||
- Added support for amalgamated source and header generation (a la sqlite).
|
- Added support for amalgamated source and header generation (a la sqlite).
|
||||||
Refer to README.txt section "Generating amalgamated source and header"
|
Refer to README.md section "Generating amalgamated source and header"
|
||||||
for detail.
|
for detail.
|
||||||
|
|
||||||
* Value
|
* Value
|
||||||
|
104
README.md
104
README.md
@@ -7,34 +7,62 @@ pairs.
|
|||||||
|
|
||||||
[json-org]: http://json.org/
|
[json-org]: http://json.org/
|
||||||
|
|
||||||
JsonCpp is a C++ library that allows manipulating JSON values, including
|
[JsonCpp][] is a C++ library that allows manipulating JSON values, including
|
||||||
serialization and deserialization to and from strings. It can also preserve
|
serialization and deserialization to and from strings. It can also preserve
|
||||||
existing comment in unserialization/serialization steps, making it a convenient
|
existing comment in unserialization/serialization steps, making it a convenient
|
||||||
format to store user input files.
|
format to store user input files.
|
||||||
|
|
||||||
|
[JsonCpp]: http://open-source-parsers.github.io/jsoncpp-docs/doxygen/index.html
|
||||||
|
|
||||||
## A note on backward-compatibility
|
## A note on backward-compatibility
|
||||||
Very soon, we are switching to C++11 only. For older compilers, try the `pre-C++11` branch.
|
* `1.y.z` is built with C++11.
|
||||||
|
* `0.y.z` can be used with older compilers.
|
||||||
|
* Major versions maintain binary-compatibility.
|
||||||
|
|
||||||
Using JsonCpp in your project
|
# Using JsonCpp in your project
|
||||||
-----------------------------
|
-----------------------------
|
||||||
|
The recommended approach to integrating JsonCpp in your project is to include
|
||||||
The recommended approach to integrating JsonCpp in your project is to build
|
the [amalgamated source](#generating-amalgamated-source-and-header) (a single
|
||||||
the amalgamated source (a single `.cpp` file) with your own build system. This
|
`.cpp` file and two `.h` files) in your project, and compile and build as you
|
||||||
ensures consistency of compilation flags and ABI compatibility. See the section
|
would any other source file. This ensures consistency of compilation flags and
|
||||||
"Generating amalgamated source and header" for instructions.
|
ABI compatibility, issues which arise when building shared or static
|
||||||
|
libraries. See the next section for instructions.
|
||||||
|
|
||||||
The `include/` should be added to your compiler include path. Jsoncpp headers
|
The `include/` should be added to your compiler include path. Jsoncpp headers
|
||||||
should be included as follow:
|
should be included as follow:
|
||||||
|
|
||||||
#include <json/json.h>
|
#include <json/json.h>
|
||||||
|
|
||||||
If JsonCpp was build as a dynamic library on Windows, then your project needs to
|
If JsonCpp was built as a dynamic library on Windows, then your project needs to
|
||||||
define the macro `JSON_DLL`.
|
define the macro `JSON_DLL`.
|
||||||
|
|
||||||
|
Generating amalgamated source and header
|
||||||
|
----------------------------------------
|
||||||
|
JsonCpp is provided with a script to generate a single header and a single
|
||||||
|
source file to ease inclusion into an existing project. The amalgamated source
|
||||||
|
can be generated at any time by running the following command from the
|
||||||
|
top-directory (this requires Python 2.6):
|
||||||
|
|
||||||
Building and testing with new CMake
|
python amalgamate.py
|
||||||
-----------------------------------
|
|
||||||
|
|
||||||
|
It is possible to specify header name. See the `-h` option for detail.
|
||||||
|
|
||||||
|
By default, the following files are generated:
|
||||||
|
* `dist/jsoncpp.cpp`: source file that needs to be added to your project.
|
||||||
|
* `dist/json/json.h`: corresponding header file for use in your project. It is
|
||||||
|
equivalent to including `json/json.h` in non-amalgamated source. This header
|
||||||
|
only depends on standard headers.
|
||||||
|
* `dist/json/json-forwards.h`: header that provides forward declaration of all
|
||||||
|
JsonCpp types.
|
||||||
|
|
||||||
|
The amalgamated sources are generated by concatenating JsonCpp source in the
|
||||||
|
correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion
|
||||||
|
of other headers.
|
||||||
|
|
||||||
|
# Contributing to JsonCpp
|
||||||
|
|
||||||
|
Building and testing with CMake
|
||||||
|
-------------------------------
|
||||||
[CMake][] is a C++ Makefiles/Solution generator. It is usually available on most
|
[CMake][] is a C++ Makefiles/Solution generator. It is usually available on most
|
||||||
Linux system as package. On Ubuntu:
|
Linux system as package. On Ubuntu:
|
||||||
|
|
||||||
@@ -57,7 +85,7 @@ Steps for generating solution/makefiles using `cmake-gui`:
|
|||||||
* Make "source code" point to the source directory.
|
* Make "source code" point to the source directory.
|
||||||
* Make "where to build the binary" point to the directory to use for the build.
|
* Make "where to build the binary" point to the directory to use for the build.
|
||||||
* Click on the "Grouped" check box.
|
* Click on the "Grouped" check box.
|
||||||
* Review JsonCpp build options (tick `JSONCPP_LIB_BUILD_SHARED` to build as a
|
* Review JsonCpp build options (tick `BUILD_SHARED_LIBS` to build as a
|
||||||
dynamic library).
|
dynamic library).
|
||||||
* Click the configure button at the bottom, then the generate button.
|
* Click the configure button at the bottom, then the generate button.
|
||||||
* The generated solution/makefiles can be found in the binary directory.
|
* The generated solution/makefiles can be found in the binary directory.
|
||||||
@@ -66,19 +94,17 @@ Alternatively, from the command-line on Unix in the source directory:
|
|||||||
|
|
||||||
mkdir -p build/debug
|
mkdir -p build/debug
|
||||||
cd build/debug
|
cd build/debug
|
||||||
cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=OFF -G "Unix Makefiles" ../..
|
cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_STATIC_LIBS=ON -DBUILD_SHARED_LIBS=OFF -DARCHIVE_INSTALL_DIR=. -G "Unix Makefiles" ../..
|
||||||
make
|
make
|
||||||
|
|
||||||
Running `cmake -`" will display the list of available generators (passed using
|
Running `cmake -h` will display the list of available generators (passed using
|
||||||
the `-G` option).
|
the `-G` option).
|
||||||
|
|
||||||
By default CMake hides compilation commands. This can be modified by specifying
|
By default CMake hides compilation commands. This can be modified by specifying
|
||||||
`-DCMAKE_VERBOSE_MAKEFILE=true` when generating makefiles.
|
`-DCMAKE_VERBOSE_MAKEFILE=true` when generating makefiles.
|
||||||
|
|
||||||
|
|
||||||
Building and testing with SCons
|
Building and testing with SCons
|
||||||
-------------------------------
|
-------------------------------
|
||||||
|
|
||||||
**Note:** The SCons-based build system is deprecated. Please use CMake; see the
|
**Note:** The SCons-based build system is deprecated. Please use CMake; see the
|
||||||
section above.
|
section above.
|
||||||
|
|
||||||
@@ -107,14 +133,7 @@ If you are building with Microsoft Visual Studio 2008, you need to set up the
|
|||||||
environment by running `vcvars32.bat` (e.g. MSVC 2008 command prompt) before
|
environment by running `vcvars32.bat` (e.g. MSVC 2008 command prompt) before
|
||||||
running SCons.
|
running SCons.
|
||||||
|
|
||||||
|
## Running the tests manually
|
||||||
Running the tests manually
|
|
||||||
--------------------------
|
|
||||||
|
|
||||||
Note that test can be run using SCons using the `check` target:
|
|
||||||
|
|
||||||
scons platform=$PLATFORM check
|
|
||||||
|
|
||||||
You need to run tests manually only if you are troubleshooting an issue.
|
You need to run tests manually only if you are troubleshooting an issue.
|
||||||
|
|
||||||
In the instructions below, replace `path/to/jsontest` with the path of the
|
In the instructions below, replace `path/to/jsontest` with the path of the
|
||||||
@@ -137,45 +156,21 @@ In the instructions below, replace `path/to/jsontest` with the path of the
|
|||||||
# You can run the tests using valgrind:
|
# You can run the tests using valgrind:
|
||||||
python rununittests.py --valgrind path/to/test_lib_json
|
python rununittests.py --valgrind path/to/test_lib_json
|
||||||
|
|
||||||
|
## Running the tests using scons
|
||||||
|
Note that tests can be run using SCons using the `check` target:
|
||||||
|
|
||||||
|
scons platform=$PLATFORM check
|
||||||
|
|
||||||
Building the documentation
|
Building the documentation
|
||||||
--------------------------
|
--------------------------
|
||||||
|
|
||||||
Run the Python script `doxybuild.py` from the top directory:
|
Run the Python script `doxybuild.py` from the top directory:
|
||||||
|
|
||||||
python doxybuild.py --doxygen=$(which doxygen) --open --with-dot
|
python doxybuild.py --doxygen=$(which doxygen) --open --with-dot
|
||||||
|
|
||||||
See `doxybuild.py --help` for options.
|
See `doxybuild.py --help` for options.
|
||||||
|
|
||||||
|
|
||||||
Generating amalgamated source and header
|
|
||||||
----------------------------------------
|
|
||||||
|
|
||||||
JsonCpp is provided with a script to generate a single header and a single
|
|
||||||
source file to ease inclusion into an existing project. The amalgamated source
|
|
||||||
can be generated at any time by running the following command from the
|
|
||||||
top-directory (this requires Python 2.6):
|
|
||||||
|
|
||||||
python amalgamate.py
|
|
||||||
|
|
||||||
It is possible to specify header name. See the `-h` option for detail.
|
|
||||||
|
|
||||||
By default, the following files are generated:
|
|
||||||
* `dist/jsoncpp.cpp`: source file that needs to be added to your project.
|
|
||||||
* `dist/json/json.h`: corresponding header file for use in your project. It is
|
|
||||||
equivalent to including `json/json.h` in non-amalgamated source. This header
|
|
||||||
only depends on standard headers.
|
|
||||||
* `dist/json/json-forwards.h`: header that provides forward declaration of all
|
|
||||||
JsonCpp types.
|
|
||||||
|
|
||||||
The amalgamated sources are generated by concatenating JsonCpp source in the
|
|
||||||
correct order and defining the macro `JSON_IS_AMALGAMATION` to prevent inclusion
|
|
||||||
of other headers.
|
|
||||||
|
|
||||||
|
|
||||||
Adding a reader/writer test
|
Adding a reader/writer test
|
||||||
---------------------------
|
---------------------------
|
||||||
|
|
||||||
To add a test, you need to create two files in test/data:
|
To add a test, you need to create two files in test/data:
|
||||||
|
|
||||||
* a `TESTNAME.json` file, that contains the input document in JSON format.
|
* a `TESTNAME.json` file, that contains the input document in JSON format.
|
||||||
@@ -195,10 +190,8 @@ The `TESTNAME.expected` file format is as follows:
|
|||||||
See the examples `test_complex_01.json` and `test_complex_01.expected` to better
|
See the examples `test_complex_01.json` and `test_complex_01.expected` to better
|
||||||
understand element paths.
|
understand element paths.
|
||||||
|
|
||||||
|
|
||||||
Understanding reader/writer test output
|
Understanding reader/writer test output
|
||||||
---------------------------------------
|
---------------------------------------
|
||||||
|
|
||||||
When a test is run, output files are generated beside the input test files.
|
When a test is run, output files are generated beside the input test files.
|
||||||
Below is a short description of the content of each file:
|
Below is a short description of the content of each file:
|
||||||
|
|
||||||
@@ -215,10 +208,7 @@ Below is a short description of the content of each file:
|
|||||||
* `test_complex_01.process-output`: `jsontest` output, typically useful for
|
* `test_complex_01.process-output`: `jsontest` output, typically useful for
|
||||||
understanding parsing errors.
|
understanding parsing errors.
|
||||||
|
|
||||||
|
|
||||||
License
|
License
|
||||||
-------
|
-------
|
||||||
|
|
||||||
See the `LICENSE` file for details. In summary, JsonCpp is licensed under the
|
See the `LICENSE` file for details. In summary, JsonCpp is licensed under the
|
||||||
MIT license, or public domain if desired and recognized in your jurisdiction.
|
MIT license, or public domain if desired and recognized in your jurisdiction.
|
||||||
|
|
||||||
|
@@ -237,7 +237,7 @@ RunUnitTests = ActionFactory(runUnitTests_action, runUnitTests_string )
|
|||||||
env.Alias( 'check' )
|
env.Alias( 'check' )
|
||||||
|
|
||||||
srcdist_cmd = env['SRCDIST_ADD']( source = """
|
srcdist_cmd = env['SRCDIST_ADD']( source = """
|
||||||
AUTHORS README.txt SConstruct
|
AUTHORS README.md SConstruct
|
||||||
""".split() )
|
""".split() )
|
||||||
env.Alias( 'src-dist', srcdist_cmd )
|
env.Alias( 'src-dist', srcdist_cmd )
|
||||||
|
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
"""Amalgate json-cpp library sources into a single source and header file.
|
"""Amalgate json-cpp library sources into a single source and header file.
|
||||||
|
|
||||||
Requires Python 2.6
|
Works with python2.6+ and python3.4+.
|
||||||
|
|
||||||
Example of invocation (must be invoked from json-cpp top directory):
|
Example of invocation (must be invoked from json-cpp top directory):
|
||||||
python amalgate.py
|
python amalgate.py
|
||||||
@@ -59,7 +59,7 @@ def amalgamate_source( source_top_dir=None,
|
|||||||
print("Amalgating header...")
|
print("Amalgating header...")
|
||||||
header = AmalgamationFile(source_top_dir)
|
header = AmalgamationFile(source_top_dir)
|
||||||
header.add_text("/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).")
|
header.add_text("/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).")
|
||||||
header.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
header.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
|
||||||
header.add_file("LICENSE", wrap_in_comment=True)
|
header.add_file("LICENSE", wrap_in_comment=True)
|
||||||
header.add_text("#ifndef JSON_AMALGATED_H_INCLUDED")
|
header.add_text("#ifndef JSON_AMALGATED_H_INCLUDED")
|
||||||
header.add_text("# define JSON_AMALGATED_H_INCLUDED")
|
header.add_text("# define JSON_AMALGATED_H_INCLUDED")
|
||||||
@@ -85,7 +85,7 @@ def amalgamate_source( source_top_dir=None,
|
|||||||
print("Amalgating forward header...")
|
print("Amalgating forward header...")
|
||||||
header = AmalgamationFile(source_top_dir)
|
header = AmalgamationFile(source_top_dir)
|
||||||
header.add_text("/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).")
|
header.add_text("/// Json-cpp amalgated forward header (http://jsoncpp.sourceforge.net/).")
|
||||||
header.add_text( "/// It is intented to be used with #include <%s>" % forward_header_include_path )
|
header.add_text('/// It is intended to be used with #include "%s"' % forward_header_include_path)
|
||||||
header.add_text("/// This header provides forward declaration for all JsonCpp types.")
|
header.add_text("/// This header provides forward declaration for all JsonCpp types.")
|
||||||
header.add_file("LICENSE", wrap_in_comment=True)
|
header.add_file("LICENSE", wrap_in_comment=True)
|
||||||
header.add_text("#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
|
header.add_text("#ifndef JSON_FORWARD_AMALGATED_H_INCLUDED")
|
||||||
@@ -105,15 +105,19 @@ def amalgamate_source( source_top_dir=None,
|
|||||||
print("Amalgating source...")
|
print("Amalgating source...")
|
||||||
source = AmalgamationFile(source_top_dir)
|
source = AmalgamationFile(source_top_dir)
|
||||||
source.add_text("/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).")
|
source.add_text("/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).")
|
||||||
source.add_text( "/// It is intented to be used with #include <%s>" % header_include_path )
|
source.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
|
||||||
source.add_file("LICENSE", wrap_in_comment=True)
|
source.add_file("LICENSE", wrap_in_comment=True)
|
||||||
source.add_text("")
|
source.add_text("")
|
||||||
source.add_text( "#include <%s>" % header_include_path )
|
source.add_text('#include "%s"' % header_include_path)
|
||||||
|
source.add_text("""
|
||||||
|
#ifndef JSON_IS_AMALGAMATION
|
||||||
|
#error "Compile with -I PATH_TO_JSON_DIRECTORY"
|
||||||
|
#endif
|
||||||
|
""")
|
||||||
source.add_text("")
|
source.add_text("")
|
||||||
lib_json = "src/lib_json"
|
lib_json = "src/lib_json"
|
||||||
source.add_file(os.path.join(lib_json, "json_tool.h"))
|
source.add_file(os.path.join(lib_json, "json_tool.h"))
|
||||||
source.add_file(os.path.join(lib_json, "json_reader.cpp"))
|
source.add_file(os.path.join(lib_json, "json_reader.cpp"))
|
||||||
source.add_file( os.path.join(lib_json, "json_batchallocator.h") )
|
|
||||||
source.add_file(os.path.join(lib_json, "json_valueiterator.inl"))
|
source.add_file(os.path.join(lib_json, "json_valueiterator.inl"))
|
||||||
source.add_file(os.path.join(lib_json, "json_value.cpp"))
|
source.add_file(os.path.join(lib_json, "json_value.cpp"))
|
||||||
source.add_file(os.path.join(lib_json, "json_writer.cpp"))
|
source.add_file(os.path.join(lib_json, "json_writer.cpp"))
|
||||||
|
35
appveyor.yml
Normal file
35
appveyor.yml
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
# This is a comment.
|
||||||
|
|
||||||
|
version: build.{build}
|
||||||
|
|
||||||
|
os: Windows Server 2012 R2
|
||||||
|
|
||||||
|
clone_folder: c:\projects\jsoncpp
|
||||||
|
|
||||||
|
platform:
|
||||||
|
- Win32
|
||||||
|
- x64
|
||||||
|
|
||||||
|
configuration:
|
||||||
|
- Debug
|
||||||
|
- Release
|
||||||
|
|
||||||
|
# scripts to run before build
|
||||||
|
before_build:
|
||||||
|
- echo "Running cmake..."
|
||||||
|
- cd c:\projects\jsoncpp
|
||||||
|
- cmake --version
|
||||||
|
- set PATH=C:\Program Files (x86)\MSBuild\14.0\Bin;%PATH%
|
||||||
|
- if %PLATFORM% == Win32 cmake .
|
||||||
|
- if %PLATFORM% == x64 cmake -G "Visual Studio 12 2013 Win64" .
|
||||||
|
|
||||||
|
build:
|
||||||
|
project: jsoncpp.sln # path to Visual Studio solution or project
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
provider: GitHub
|
||||||
|
auth_token:
|
||||||
|
secure: K2Tp1q8pIZ7rs0Ot24ZMWuwr12Ev6Tc6QkhMjGQxoQG3ng1pXtgPasiJ45IDXGdg
|
||||||
|
on:
|
||||||
|
branch: master
|
||||||
|
appveyor_repo_tag: true
|
27
dev.makefile
27
dev.makefile
@@ -1,14 +1,35 @@
|
|||||||
all: build test-amalgamate
|
# This is only for jsoncpp developers/contributors.
|
||||||
|
# We use this to sign releases, generate documentation, etc.
|
||||||
|
VER?=$(shell cat version)
|
||||||
|
|
||||||
|
default:
|
||||||
|
@echo "VER=${VER}"
|
||||||
|
sign: jsoncpp-${VER}.tar.gz
|
||||||
|
gpg --armor --detach-sign $<
|
||||||
|
gpg --verify $<.asc
|
||||||
|
# Then upload .asc to the release.
|
||||||
|
jsoncpp-%.tar.gz:
|
||||||
|
curl https://github.com/open-source-parsers/jsoncpp/archive/$*.tar.gz -o $@
|
||||||
|
dox:
|
||||||
|
python doxybuild.py --doxygen=$$(which doxygen) --in doc/web_doxyfile.in
|
||||||
|
rsync -va --delete dist/doxygen/jsoncpp-api-html-${VER}/ ../jsoncpp-docs/doxygen/
|
||||||
|
# Then 'git add -A' and 'git push' in jsoncpp-docs.
|
||||||
build:
|
build:
|
||||||
mkdir -p build/debug
|
mkdir -p build/debug
|
||||||
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DJSONCPP_LIB_BUILD_SHARED=ON -G "Unix Makefiles" ../..
|
cd build/debug; cmake -DCMAKE_BUILD_TYPE=debug -DBUILD_SHARED_LIBS=ON -G "Unix Makefiles" ../..
|
||||||
make -C build/debug
|
make -C build/debug
|
||||||
|
|
||||||
# Currently, this depends on include/json/version.h generated
|
# Currently, this depends on include/json/version.h generated
|
||||||
# by cmake.
|
# by cmake.
|
||||||
test-amalgamate: build
|
test-amalgamate:
|
||||||
python2.7 amalgamate.py
|
python2.7 amalgamate.py
|
||||||
python3.4 amalgamate.py
|
python3.4 amalgamate.py
|
||||||
|
cd dist; gcc -I. -c jsoncpp.cpp
|
||||||
|
|
||||||
|
valgrind:
|
||||||
|
valgrind --error-exitcode=42 --leak-check=full ./build/debug/src/test_lib_json/jsoncpp_test
|
||||||
|
|
||||||
|
clean:
|
||||||
|
\rm -rf *.gz *.asc dist/
|
||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
|
@@ -1 +1,6 @@
|
|||||||
|
# Copyright 2010 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
# module
|
# module
|
@@ -19,8 +19,8 @@
|
|||||||
},
|
},
|
||||||
{"name": "shared_dll",
|
{"name": "shared_dll",
|
||||||
"variables": [
|
"variables": [
|
||||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
["BUILD_SHARED_LIBS=true"],
|
||||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
["BUILD_SHARED_LIBS=false"]
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{"name": "build_type",
|
{"name": "build_type",
|
||||||
|
@@ -12,8 +12,8 @@
|
|||||||
},
|
},
|
||||||
{"name": "shared_dll",
|
{"name": "shared_dll",
|
||||||
"variables": [
|
"variables": [
|
||||||
["JSONCPP_LIB_BUILD_SHARED=true"],
|
["BUILD_SHARED_LIBS=true"],
|
||||||
["JSONCPP_LIB_BUILD_SHARED=false"]
|
["BUILD_SHARED_LIBS=false"]
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{"name": "build_type",
|
{"name": "build_type",
|
||||||
|
@@ -1,6 +1,9 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
# Baptiste Lepilleur, 2009
|
# Copyright 2009 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from dircache import listdir
|
from dircache import listdir
|
||||||
|
@@ -211,18 +211,15 @@ def generate_html_report( html_report_path, builds ):
|
|||||||
build_status = 'ok' if build.build_succeeded else 'FAILED'
|
build_status = 'ok' if build.build_succeeded else 'FAILED'
|
||||||
cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir)
|
cmake_log_url = os.path.relpath(build.cmake_log_path, report_dir)
|
||||||
build_log_url = os.path.relpath(build.build_log_path, report_dir)
|
build_log_url = os.path.relpath(build.build_log_path, report_dir)
|
||||||
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % (
|
td = '<td class="%s"><a href="%s" class="%s">CMake: %s</a>' % ( build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
|
||||||
build_status.lower(), cmake_log_url, cmake_status.lower(), cmake_status)
|
|
||||||
if build.cmake_succeeded:
|
if build.cmake_succeeded:
|
||||||
td += '<br><a href="%s" class="%s">Build: %s</a>' % (
|
td += '<br><a href="%s" class="%s">Build: %s</a>' % ( build_log_url, build_status.lower(), build_status)
|
||||||
build_log_url, build_status.lower(), build_status)
|
|
||||||
td += '</td>'
|
td += '</td>'
|
||||||
else:
|
else:
|
||||||
td = '<td></td>'
|
td = '<td></td>'
|
||||||
tds.append(td)
|
tds.append(td)
|
||||||
tr_builds.append('<tr>%s</tr>' % '\n'.join(tds))
|
tr_builds.append('<tr>%s</tr>' % '\n'.join(tds))
|
||||||
html = HTML_TEMPLATE.substitute(
|
html = HTML_TEMPLATE.substitute( title='Batch build report',
|
||||||
title='Batch build report',
|
|
||||||
th_vars=' '.join(th_vars),
|
th_vars=' '.join(th_vars),
|
||||||
th_build_types=' '.join(th_build_types),
|
th_build_types=' '.join(th_build_types),
|
||||||
tr_builds='\n'.join(tr_builds))
|
tr_builds='\n'.join(tr_builds))
|
||||||
|
@@ -1,5 +1,11 @@
|
|||||||
|
# Copyright 2010 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import os.path
|
import os.path
|
||||||
|
import sys
|
||||||
|
|
||||||
def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
|
def fix_source_eol(path, is_dry_run = True, verbose = True, eol = '\n'):
|
||||||
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
"""Makes sure that all sources have the specified eol sequence (default: unix)."""
|
||||||
|
@@ -1,5 +1,10 @@
|
|||||||
import os.path
|
# Copyright 2010 Baptiste Lepilleur
|
||||||
import gzip
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
|
from contextlib import closing
|
||||||
|
import os
|
||||||
import tarfile
|
import tarfile
|
||||||
|
|
||||||
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
|
||||||
@@ -29,25 +34,19 @@ def make_tarball(tarball_path, sources, base_dir, prefix_dir=''):
|
|||||||
path_in_tar = archive_name(path)
|
path_in_tar = archive_name(path)
|
||||||
tar.add(path, path_in_tar)
|
tar.add(path, path_in_tar)
|
||||||
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
compression = TARGZ_DEFAULT_COMPRESSION_LEVEL
|
||||||
tar = tarfile.TarFile.gzopen( tarball_path, 'w', compresslevel=compression )
|
with closing(tarfile.TarFile.open(tarball_path, 'w:gz',
|
||||||
try:
|
compresslevel=compression)) as tar:
|
||||||
for source in sources:
|
for source in sources:
|
||||||
source_path = source
|
source_path = source
|
||||||
if os.path.isdir(source):
|
if os.path.isdir(source):
|
||||||
os.path.walk(source_path, visit, tar)
|
for dirpath, dirnames, filenames in os.walk(source_path):
|
||||||
|
visit(tar, dirpath, filenames)
|
||||||
else:
|
else:
|
||||||
path_in_tar = archive_name(source_path)
|
path_in_tar = archive_name(source_path)
|
||||||
tar.add(source_path, path_in_tar) # filename, arcname
|
tar.add(source_path, path_in_tar) # filename, arcname
|
||||||
finally:
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
def decompress(tarball_path, base_dir):
|
def decompress(tarball_path, base_dir):
|
||||||
"""Decompress the gzipped tarball into directory base_dir.
|
"""Decompress the gzipped tarball into directory base_dir.
|
||||||
"""
|
"""
|
||||||
# !!! This class method is not documented in the online doc
|
with closing(tarfile.TarFile.open(tarball_path)) as tar:
|
||||||
# nor is bz2open!
|
|
||||||
tar = tarfile.TarFile.gzopen(tarball_path, mode='r')
|
|
||||||
try:
|
|
||||||
tar.extractall(base_dir)
|
tar.extractall(base_dir)
|
||||||
finally:
|
|
||||||
tar.close()
|
|
||||||
|
@@ -819,7 +819,7 @@ EXCLUDE_SYMBOLS =
|
|||||||
# that contain example code fragments that are included (see the \include
|
# that contain example code fragments that are included (see the \include
|
||||||
# command).
|
# command).
|
||||||
|
|
||||||
EXAMPLE_PATH =
|
EXAMPLE_PATH = ..
|
||||||
|
|
||||||
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
|
# If the value of the EXAMPLE_PATH tag contains directories, you can use the
|
||||||
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
|
# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
|
||||||
@@ -1946,8 +1946,7 @@ INCLUDE_FILE_PATTERNS = *.h
|
|||||||
PREDEFINED = "_MSC_VER=1400" \
|
PREDEFINED = "_MSC_VER=1400" \
|
||||||
_CPPRTTI \
|
_CPPRTTI \
|
||||||
_WIN32 \
|
_WIN32 \
|
||||||
JSONCPP_DOC_EXCLUDE_IMPLEMENTATION \
|
JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||||
JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
|
|
||||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||||
# tag can be used to specify a list of macro names that should be expanded. The
|
# tag can be used to specify a list of macro names that should be expanded. The
|
||||||
|
@@ -16,7 +16,7 @@ JsonCpp - JSON data format manipulation library
|
|||||||
</a>
|
</a>
|
||||||
</td>
|
</td>
|
||||||
<td width="40%" align="right" valign="center">
|
<td width="40%" align="right" valign="center">
|
||||||
<a href="https://github.com/open-source-parsers/jsoncpp">JsonCpp home page</a>
|
<a href="http://open-source-parsers.github.io/jsoncpp-docs/doxygen/">JsonCpp home page</a>
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
133
doc/jsoncpp.dox
133
doc/jsoncpp.dox
@@ -4,11 +4,21 @@
|
|||||||
|
|
||||||
<a HREF="http://www.json.org/">JSON (JavaScript Object Notation)</a>
|
<a HREF="http://www.json.org/">JSON (JavaScript Object Notation)</a>
|
||||||
is a lightweight data-interchange format.
|
is a lightweight data-interchange format.
|
||||||
It can represent integer, real number, string, an ordered sequence of value, and
|
|
||||||
a collection of name/value pairs.
|
|
||||||
|
|
||||||
Here is an example of JSON data:
|
Here is an example of JSON data:
|
||||||
\verbatim
|
\verbatim
|
||||||
|
{
|
||||||
|
"encoding" : "UTF-8",
|
||||||
|
"plug-ins" : [
|
||||||
|
"python",
|
||||||
|
"c++",
|
||||||
|
"ruby"
|
||||||
|
],
|
||||||
|
"indent" : { "length" : 3, "use_space": true }
|
||||||
|
}
|
||||||
|
\endverbatim
|
||||||
|
<b>JsonCpp</b> supports comments as <i>meta-data</i>:
|
||||||
|
\code
|
||||||
// Configuration options
|
// Configuration options
|
||||||
{
|
{
|
||||||
// Default encoding for text
|
// Default encoding for text
|
||||||
@@ -17,22 +27,22 @@ Here is an example of JSON data:
|
|||||||
// Plug-ins loaded at start-up
|
// Plug-ins loaded at start-up
|
||||||
"plug-ins" : [
|
"plug-ins" : [
|
||||||
"python",
|
"python",
|
||||||
"c++",
|
"c++", // trailing comment
|
||||||
"ruby"
|
"ruby"
|
||||||
],
|
],
|
||||||
|
|
||||||
// Tab indent size
|
// Tab indent size
|
||||||
"indent" : { "length" : 3, "use_space": true }
|
// (multi-line comment)
|
||||||
|
"indent" : { /*embedded comment*/ "length" : 3, "use_space": true }
|
||||||
}
|
}
|
||||||
\endverbatim
|
\endcode
|
||||||
<code>jsoncpp</code> supports comments as <i>meta-data</i>.
|
|
||||||
|
|
||||||
\section _features Features
|
\section _features Features
|
||||||
- read and write JSON document
|
- read and write JSON document
|
||||||
- attach C++ style comments to element during parsing
|
- attach C++ style comments to element during parsing
|
||||||
- rewrite JSON document preserving original comments
|
- rewrite JSON document preserving original comments
|
||||||
|
|
||||||
Notes: Comments used to be supported in JSON but where removed for
|
Notes: Comments used to be supported in JSON but were removed for
|
||||||
portability (C like comments are not supported in Python). Since
|
portability (C like comments are not supported in Python). Since
|
||||||
comments are useful in configuration/input file, this feature was
|
comments are useful in configuration/input file, this feature was
|
||||||
preserved.
|
preserved.
|
||||||
@@ -40,47 +50,77 @@ preserved.
|
|||||||
\section _example Code example
|
\section _example Code example
|
||||||
|
|
||||||
\code
|
\code
|
||||||
Json::Value root; // will contains the root value after parsing.
|
Json::Value root; // 'root' will contain the root value after parsing.
|
||||||
Json::Reader reader;
|
std::cin >> root;
|
||||||
bool parsingSuccessful = reader.parse( config_doc, root );
|
|
||||||
if ( !parsingSuccessful )
|
|
||||||
{
|
|
||||||
// report to the user the failure and their locations in the document.
|
|
||||||
std::cout << "Failed to parse configuration\n"
|
|
||||||
<< reader.getFormattedErrorMessages();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the value of the member of root named 'encoding', return 'UTF-8' if there is no
|
// You can also read into a particular sub-value.
|
||||||
// such member.
|
|
||||||
std::string encoding = root.get("encoding", "UTF-8" ).asString();
|
|
||||||
// Get the value of the member of root named 'encoding', return a 'null' value if
|
|
||||||
// there is no such member.
|
|
||||||
const Json::Value plugins = root["plug-ins"];
|
|
||||||
for ( int index = 0; index < plugins.size(); ++index ) // Iterates over the sequence elements.
|
|
||||||
loadPlugIn( plugins[index].asString() );
|
|
||||||
|
|
||||||
setIndentLength( root["indent"].get("length", 3).asInt() );
|
|
||||||
setIndentUseSpace( root["indent"].get("use_space", true).asBool() );
|
|
||||||
|
|
||||||
// ...
|
|
||||||
// At application shutdown to make the new configuration document:
|
|
||||||
// Since Json::Value has implicit constructor for all value types, it is not
|
|
||||||
// necessary to explicitly construct the Json::Value object:
|
|
||||||
root["encoding"] = getCurrentEncoding();
|
|
||||||
root["indent"]["length"] = getCurrentIndentLength();
|
|
||||||
root["indent"]["use_space"] = getCurrentIndentUseSpace();
|
|
||||||
|
|
||||||
Json::StyledWriter writer;
|
|
||||||
// Make a new JSON document for the configuration. Preserve original comments.
|
|
||||||
std::string outputConfig = writer.write( root );
|
|
||||||
|
|
||||||
// You can also use streams. This will put the contents of any JSON
|
|
||||||
// stream at a particular sub-value, if you'd like.
|
|
||||||
std::cin >> root["subtree"];
|
std::cin >> root["subtree"];
|
||||||
|
|
||||||
// And you can write to a stream, using the StyledWriter automatically.
|
// Get the value of the member of root named 'encoding',
|
||||||
|
// and return 'UTF-8' if there is no such member.
|
||||||
|
std::string encoding = root.get("encoding", "UTF-8" ).asString();
|
||||||
|
|
||||||
|
// Get the value of the member of root named 'plug-ins'; return a 'null' value if
|
||||||
|
// there is no such member.
|
||||||
|
const Json::Value plugins = root["plug-ins"];
|
||||||
|
|
||||||
|
// Iterate over the sequence elements.
|
||||||
|
for ( int index = 0; index < plugins.size(); ++index )
|
||||||
|
loadPlugIn( plugins[index].asString() );
|
||||||
|
|
||||||
|
// Try other datatypes. Some are auto-convertible to others.
|
||||||
|
foo::setIndentLength( root["indent"].get("length", 3).asInt() );
|
||||||
|
foo::setIndentUseSpace( root["indent"].get("use_space", true).asBool() );
|
||||||
|
|
||||||
|
// Since Json::Value has an implicit constructor for all value types, it is not
|
||||||
|
// necessary to explicitly construct the Json::Value object.
|
||||||
|
root["encoding"] = foo::getCurrentEncoding();
|
||||||
|
root["indent"]["length"] = foo::getCurrentIndentLength();
|
||||||
|
root["indent"]["use_space"] = foo::getCurrentIndentUseSpace();
|
||||||
|
|
||||||
|
// If you like the defaults, you can insert directly into a stream.
|
||||||
std::cout << root;
|
std::cout << root;
|
||||||
|
// Of course, you can write to `std::ostringstream` if you prefer.
|
||||||
|
|
||||||
|
// If desired, remember to add a linefeed and flush.
|
||||||
|
std::cout << std::endl;
|
||||||
|
\endcode
|
||||||
|
|
||||||
|
\section _advanced Advanced usage
|
||||||
|
|
||||||
|
Configure *builders* to create *readers* and *writers*. For
|
||||||
|
configuration, we use our own `Json::Value` (rather than
|
||||||
|
standard setters/getters) so that we can add
|
||||||
|
features without losing binary-compatibility.
|
||||||
|
|
||||||
|
\code
|
||||||
|
// For convenience, use `writeString()` with a specialized builder.
|
||||||
|
Json::StreamWriterBuilder wbuilder;
|
||||||
|
wbuilder["indentation"] = "\t";
|
||||||
|
std::string document = Json::writeString(wbuilder, root);
|
||||||
|
|
||||||
|
// Here, using a specialized Builder, we discard comments and
|
||||||
|
// record errors as we parse.
|
||||||
|
Json::CharReaderBuilder rbuilder;
|
||||||
|
rbuilder["collectComments"] = false;
|
||||||
|
std::string errs;
|
||||||
|
bool ok = Json::parseFromStream(rbuilder, std::cin, &root, &errs);
|
||||||
|
\endcode
|
||||||
|
|
||||||
|
Yes, compile-time configuration-checking would be helpful,
|
||||||
|
but `Json::Value` lets you
|
||||||
|
write and read the builder configuration, which is better! In other words,
|
||||||
|
you can configure your JSON parser using JSON.
|
||||||
|
|
||||||
|
CharReaders and StreamWriters are not thread-safe, but they are re-usable.
|
||||||
|
\code
|
||||||
|
Json::CharReaderBuilder rbuilder;
|
||||||
|
cfg >> rbuilder.settings_;
|
||||||
|
std::unique_ptr<Json::CharReader> const reader(rbuilder.newCharReader());
|
||||||
|
reader->parse(start, stop, &value1, &errs);
|
||||||
|
// ...
|
||||||
|
reader->parse(start, stop, &value2, &errs);
|
||||||
|
// etc.
|
||||||
\endcode
|
\endcode
|
||||||
|
|
||||||
\section _pbuild Build instructions
|
\section _pbuild Build instructions
|
||||||
@@ -116,4 +156,9 @@ Basically JsonCpp is licensed under MIT license, or public domain if desired
|
|||||||
and recognized in your jurisdiction.
|
and recognized in your jurisdiction.
|
||||||
|
|
||||||
\author Baptiste Lepilleur <blep@users.sourceforge.net> (originator)
|
\author Baptiste Lepilleur <blep@users.sourceforge.net> (originator)
|
||||||
|
\author Christopher Dunn <cdunn2001@gmail.com> (primary maintainer)
|
||||||
|
\version \include version
|
||||||
|
We make strong guarantees about binary-compatibility, consistent with
|
||||||
|
<a href="http://apr.apache.org/versioning.html">the Apache versioning scheme</a>.
|
||||||
|
\sa version.h
|
||||||
*/
|
*/
|
||||||
|
2301
doc/web_doxyfile.in
Normal file
2301
doc/web_doxyfile.in
Normal file
File diff suppressed because it is too large
Load Diff
102
doxybuild.py
102
doxybuild.py
@@ -1,13 +1,28 @@
|
|||||||
"""Script to generate doxygen documentation.
|
"""Script to generate doxygen documentation.
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
from __future__ import unicode_literals
|
||||||
from devtools import tarball
|
from devtools import tarball
|
||||||
|
from contextlib import contextmanager
|
||||||
|
import subprocess
|
||||||
|
import traceback
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def cd(newdir):
|
||||||
|
"""
|
||||||
|
http://stackoverflow.com/questions/431684/how-do-i-cd-in-python
|
||||||
|
"""
|
||||||
|
prevdir = os.getcwd()
|
||||||
|
os.chdir(newdir)
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
finally:
|
||||||
|
os.chdir(prevdir)
|
||||||
|
|
||||||
def find_program(*filenames):
|
def find_program(*filenames):
|
||||||
"""find a program in folders path_lst, and sets env[var]
|
"""find a program in folders path_lst, and sets env[var]
|
||||||
@param filenames: a list of possible names of the program to search for
|
@param filenames: a list of possible names of the program to search for
|
||||||
@@ -16,7 +31,7 @@ def find_program(*filenames):
|
|||||||
paths = os.environ.get('PATH', '').split(os.pathsep)
|
paths = os.environ.get('PATH', '').split(os.pathsep)
|
||||||
suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or ''
|
suffixes = ('win32' in sys.platform) and '.exe .com .bat .cmd' or ''
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
for name in [filename+ext for ext in suffixes.split()]:
|
for name in [filename+ext for ext in suffixes.split(' ')]:
|
||||||
for directory in paths:
|
for directory in paths:
|
||||||
full_path = os.path.join(directory, name)
|
full_path = os.path.join(directory, name)
|
||||||
if os.path.isfile(full_path):
|
if os.path.isfile(full_path):
|
||||||
@@ -28,51 +43,54 @@ def do_subst_in_file(targetfile, sourcefile, dict):
|
|||||||
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
|
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
|
||||||
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
|
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
|
||||||
"""
|
"""
|
||||||
try:
|
with open(sourcefile, 'r') as f:
|
||||||
f = open(sourcefile, 'rb')
|
|
||||||
contents = f.read()
|
contents = f.read()
|
||||||
f.close()
|
|
||||||
except:
|
|
||||||
print("Can't read source file %s"%sourcefile)
|
|
||||||
raise
|
|
||||||
for (k,v) in list(dict.items()):
|
for (k,v) in list(dict.items()):
|
||||||
v = v.replace('\\','\\\\')
|
v = v.replace('\\','\\\\')
|
||||||
contents = re.sub(k, v, contents)
|
contents = re.sub(k, v, contents)
|
||||||
try:
|
with open(targetfile, 'w') as f:
|
||||||
f = open(targetfile, 'wb')
|
|
||||||
f.write(contents)
|
f.write(contents)
|
||||||
f.close()
|
|
||||||
|
def getstatusoutput(cmd):
|
||||||
|
"""cmd is a list.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
output, _ = process.communicate()
|
||||||
|
status = process.returncode
|
||||||
except:
|
except:
|
||||||
print("Can't write target file %s"%targetfile)
|
status = -1
|
||||||
raise
|
output = traceback.format_exc()
|
||||||
|
return status, output
|
||||||
|
|
||||||
|
def run_cmd(cmd, silent=False):
|
||||||
|
"""Raise exception on failure.
|
||||||
|
"""
|
||||||
|
info = 'Running: %r in %r' %(' '.join(cmd), os.getcwd())
|
||||||
|
print(info)
|
||||||
|
sys.stdout.flush()
|
||||||
|
if silent:
|
||||||
|
status, output = getstatusoutput(cmd)
|
||||||
|
else:
|
||||||
|
status, output = subprocess.call(cmd), ''
|
||||||
|
if status:
|
||||||
|
msg = 'Error while %s ...\n\terror=%d, output="""%s"""' %(info, status, output)
|
||||||
|
raise Exception(msg)
|
||||||
|
|
||||||
|
def assert_is_exe(path):
|
||||||
|
if not path:
|
||||||
|
raise Exception('path is empty.')
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
raise Exception('%r is not a file.' %path)
|
||||||
|
if not os.access(path, os.X_OK):
|
||||||
|
raise Exception('%r is not executable by this user.' %path)
|
||||||
|
|
||||||
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
|
||||||
|
assert_is_exe(doxygen_path)
|
||||||
config_file = os.path.abspath(config_file)
|
config_file = os.path.abspath(config_file)
|
||||||
doxygen_path = doxygen_path
|
with cd(working_dir):
|
||||||
old_cwd = os.getcwd()
|
|
||||||
try:
|
|
||||||
os.chdir( working_dir )
|
|
||||||
cmd = [doxygen_path, config_file]
|
cmd = [doxygen_path, config_file]
|
||||||
print('Running:', ' '.join( cmd ))
|
run_cmd(cmd, is_silent)
|
||||||
try:
|
|
||||||
import subprocess
|
|
||||||
except:
|
|
||||||
if os.system( ' '.join( cmd ) ) != 0:
|
|
||||||
print('Documentation generation failed')
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
if is_silent:
|
|
||||||
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
|
|
||||||
else:
|
|
||||||
process = subprocess.Popen( cmd )
|
|
||||||
stdout, _ = process.communicate()
|
|
||||||
if process.returncode:
|
|
||||||
print('Documentation generation failed:')
|
|
||||||
print(stdout)
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
finally:
|
|
||||||
os.chdir( old_cwd )
|
|
||||||
|
|
||||||
def build_doc(options, make_release=False):
|
def build_doc(options, make_release=False):
|
||||||
if make_release:
|
if make_release:
|
||||||
@@ -112,10 +130,10 @@ def build_doc( options, make_release=False ):
|
|||||||
if not os.path.isdir(output_dir):
|
if not os.path.isdir(output_dir):
|
||||||
os.makedirs(output_dir)
|
os.makedirs(output_dir)
|
||||||
|
|
||||||
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
|
do_subst_in_file('doc/doxyfile', options.doxyfile_input_path, subst_keys)
|
||||||
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
|
run_doxygen(options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent)
|
||||||
if not options.silent:
|
if not options.silent:
|
||||||
print(open(warning_log_path, 'rb').read())
|
print(open(warning_log_path, 'r').read())
|
||||||
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
|
index_path = os.path.abspath(os.path.join('doc', subst_keys['%HTML_OUTPUT%'], 'index.html'))
|
||||||
print('Generated documentation can be found in:')
|
print('Generated documentation can be found in:')
|
||||||
print(index_path)
|
print(index_path)
|
||||||
@@ -126,7 +144,7 @@ def build_doc( options, make_release=False ):
|
|||||||
print('Generating doc tarball to', tarball_path)
|
print('Generating doc tarball to', tarball_path)
|
||||||
tarball_sources = [
|
tarball_sources = [
|
||||||
output_dir,
|
output_dir,
|
||||||
'README.txt',
|
'README.md',
|
||||||
'LICENSE',
|
'LICENSE',
|
||||||
'NEWS.txt',
|
'NEWS.txt',
|
||||||
'version'
|
'version'
|
||||||
@@ -151,6 +169,8 @@ def main():
|
|||||||
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
|
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
|
||||||
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'),
|
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'),
|
||||||
help="""Path to Doxygen tool. [Default: %default]""")
|
help="""Path to Doxygen tool. [Default: %default]""")
|
||||||
|
parser.add_option('--in', dest="doxyfile_input_path", action='store', default='doc/doxyfile.in',
|
||||||
|
help="""Path to doxygen inputs. [Default: %default]""")
|
||||||
parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False,
|
parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False,
|
||||||
help="""Enable generation of Microsoft HTML HELP""")
|
help="""Enable generation of Microsoft HTML HELP""")
|
||||||
parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True,
|
parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True,
|
||||||
|
@@ -7,35 +7,48 @@
|
|||||||
#define CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
#define CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||||
|
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
|
#include <sstream>
|
||||||
|
|
||||||
#if !defined(JSON_IS_AMALGAMATION)
|
#if !defined(JSON_IS_AMALGAMATION)
|
||||||
#include "config.h"
|
#include "config.h"
|
||||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
|
|
||||||
|
/** It should not be possible for a maliciously designed file to
|
||||||
|
* cause an abort() or seg-fault, so these macros are used only
|
||||||
|
* for pre-condition violations and internal logic errors.
|
||||||
|
*/
|
||||||
#if JSON_USE_EXCEPTION
|
#if JSON_USE_EXCEPTION
|
||||||
#include <stdexcept>
|
|
||||||
#define JSON_ASSERT(condition) \
|
|
||||||
assert(condition); // @todo <= change this into an exception throw
|
|
||||||
#define JSON_FAIL_MESSAGE(message) throw std::runtime_error(message);
|
|
||||||
#else // JSON_USE_EXCEPTION
|
|
||||||
#define JSON_ASSERT(condition) assert(condition);
|
|
||||||
|
|
||||||
// The call to assert() will show the failure message in debug builds. In
|
// @todo <= add detail about condition in exception
|
||||||
// release bugs we write to invalid memory in order to crash hard, so that a
|
# define JSON_ASSERT(condition) \
|
||||||
// debugger or crash reporter gets the chance to take over. We still call exit()
|
{if (!(condition)) {Json::throwLogicError( "assert json failed" );}}
|
||||||
// afterward in order to tell the compiler that this macro doesn't return.
|
|
||||||
# define JSON_FAIL_MESSAGE(message) \
|
# define JSON_FAIL_MESSAGE(message) \
|
||||||
{ \
|
{ \
|
||||||
assert(false&& message); \
|
std::ostringstream oss; oss << message; \
|
||||||
strcpy(reinterpret_cast<char*>(666), message); \
|
Json::throwLogicError(oss.str()); \
|
||||||
exit(123); \
|
abort(); \
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#else // JSON_USE_EXCEPTION
|
||||||
|
|
||||||
|
# define JSON_ASSERT(condition) assert(condition)
|
||||||
|
|
||||||
|
// The call to assert() will show the failure message in debug builds. In
|
||||||
|
// release builds we abort, for a core-dump or debugger.
|
||||||
|
# define JSON_FAIL_MESSAGE(message) \
|
||||||
|
{ \
|
||||||
|
std::ostringstream oss; oss << message; \
|
||||||
|
assert(false && oss.str().c_str()); \
|
||||||
|
abort(); \
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#define JSON_ASSERT_MESSAGE(condition, message) \
|
#define JSON_ASSERT_MESSAGE(condition, message) \
|
||||||
if (!(condition)) { \
|
if (!(condition)) { \
|
||||||
JSON_FAIL_MESSAGE(message) \
|
JSON_FAIL_MESSAGE(message); \
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||||
|
@@ -15,17 +15,6 @@
|
|||||||
/// std::map
|
/// std::map
|
||||||
/// as Value container.
|
/// as Value container.
|
||||||
//# define JSON_USE_CPPTL_SMALLMAP 1
|
//# define JSON_USE_CPPTL_SMALLMAP 1
|
||||||
/// If defined, indicates that Json specific container should be used
|
|
||||||
/// (hash table & simple deque container with customizable allocator).
|
|
||||||
/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332
|
|
||||||
//# define JSON_VALUE_USE_INTERNAL_MAP 1
|
|
||||||
/// Force usage of standard new/malloc based allocator instead of memory pool
|
|
||||||
/// based allocator.
|
|
||||||
/// The memory pools allocator used optimization (initializing Value and
|
|
||||||
/// ValueInternalLink
|
|
||||||
/// as if it was a POD) that may cause some validation tool to report errors.
|
|
||||||
/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
|
|
||||||
//# define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
|
|
||||||
|
|
||||||
// If non-zero, the library uses exceptions to report bad input instead of C
|
// If non-zero, the library uses exceptions to report bad input instead of C
|
||||||
// assertion macros. The default is to use exceptions.
|
// assertion macros. The default is to use exceptions.
|
||||||
@@ -62,6 +51,16 @@
|
|||||||
#define JSON_API
|
#define JSON_API
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#if !defined(JSON_HAS_UNIQUE_PTR)
|
||||||
|
#if __cplusplus >= 201103L
|
||||||
|
#define JSON_HAS_UNIQUE_PTR (1)
|
||||||
|
#elif _MSC_VER >= 1600
|
||||||
|
#define JSON_HAS_UNIQUE_PTR (1)
|
||||||
|
#else
|
||||||
|
#define JSON_HAS_UNIQUE_PTR (0)
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
|
||||||
// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for
|
// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for
|
||||||
// integer
|
// integer
|
||||||
// Storages, and 64 bits integer support is disabled.
|
// Storages, and 64 bits integer support is disabled.
|
||||||
@@ -81,6 +80,14 @@
|
|||||||
#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
|
#if defined(_MSC_VER) && _MSC_VER >= 1500 // MSVC 2008
|
||||||
/// Indicates that the following function is deprecated.
|
/// Indicates that the following function is deprecated.
|
||||||
#define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
|
#define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
|
||||||
|
#elif defined(__clang__) && defined(__has_feature)
|
||||||
|
#if __has_feature(attribute_deprecated_with_message)
|
||||||
|
#define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
|
||||||
|
#endif
|
||||||
|
#elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
|
||||||
|
#define JSONCPP_DEPRECATED(message) __attribute__ ((deprecated(message)))
|
||||||
|
#elif defined(__GNUC__) && (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
|
||||||
|
#define JSONCPP_DEPRECATED(message) __attribute__((__deprecated__))
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if !defined(JSONCPP_DEPRECATED)
|
#if !defined(JSONCPP_DEPRECATED)
|
||||||
|
@@ -44,12 +44,6 @@ public:
|
|||||||
/// \c true if root must be either an array or an object value. Default: \c
|
/// \c true if root must be either an array or an object value. Default: \c
|
||||||
/// false.
|
/// false.
|
||||||
bool strictRoot_;
|
bool strictRoot_;
|
||||||
|
|
||||||
/// \c true if dropped null placeholders are allowed. Default: \c false.
|
|
||||||
bool allowDroppedNullPlaceholders_;
|
|
||||||
|
|
||||||
/// \c true if numeric object key are allowed. Default: \c false.
|
|
||||||
bool allowNumericKeys_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
@@ -31,12 +31,6 @@ class Value;
|
|||||||
class ValueIteratorBase;
|
class ValueIteratorBase;
|
||||||
class ValueIterator;
|
class ValueIterator;
|
||||||
class ValueConstIterator;
|
class ValueConstIterator;
|
||||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
class ValueMapAllocator;
|
|
||||||
class ValueInternalLink;
|
|
||||||
class ValueInternalArray;
|
|
||||||
class ValueInternalMap;
|
|
||||||
#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
|
||||||
|
@@ -14,6 +14,7 @@
|
|||||||
#include <iosfwd>
|
#include <iosfwd>
|
||||||
#include <stack>
|
#include <stack>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include <istream>
|
||||||
|
|
||||||
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
|
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
|
||||||
// be used by...
|
// be used by...
|
||||||
@@ -27,24 +28,13 @@ namespace Json {
|
|||||||
/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a
|
/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a
|
||||||
*Value.
|
*Value.
|
||||||
*
|
*
|
||||||
|
* \deprecated Use CharReader and CharReaderBuilder.
|
||||||
*/
|
*/
|
||||||
class JSON_API Reader {
|
class JSON_API Reader {
|
||||||
public:
|
public:
|
||||||
typedef char Char;
|
typedef char Char;
|
||||||
typedef const Char* Location;
|
typedef const Char* Location;
|
||||||
|
|
||||||
/** \brief An error tagged with where in the JSON text it was encountered.
|
|
||||||
*
|
|
||||||
* The offsets give the [start, limit) range of bytes within the text. Note
|
|
||||||
* that this is bytes, not codepoints.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
struct StructuredError {
|
|
||||||
size_t offset_start;
|
|
||||||
size_t offset_limit;
|
|
||||||
std::string message;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** \brief Constructs a Reader allowing all features
|
/** \brief Constructs a Reader allowing all features
|
||||||
* for parsing.
|
* for parsing.
|
||||||
*/
|
*/
|
||||||
@@ -78,7 +68,7 @@ public:
|
|||||||
document to read.
|
document to read.
|
||||||
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
|
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
|
||||||
document to read.
|
document to read.
|
||||||
\ Must be >= beginDoc.
|
* Must be >= beginDoc.
|
||||||
* \param root [out] Contains the root value of the document if it was
|
* \param root [out] Contains the root value of the document if it was
|
||||||
* successfully parsed.
|
* successfully parsed.
|
||||||
* \param collectComments \c true to collect comment and allow writing them
|
* \param collectComments \c true to collect comment and allow writing them
|
||||||
@@ -108,7 +98,7 @@ public:
|
|||||||
* during parsing.
|
* during parsing.
|
||||||
* \deprecated Use getFormattedErrorMessages() instead (typo fix).
|
* \deprecated Use getFormattedErrorMessages() instead (typo fix).
|
||||||
*/
|
*/
|
||||||
JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead")
|
JSONCPP_DEPRECATED("Use getFormattedErrorMessages() instead.")
|
||||||
std::string getFormatedErrorMessages() const;
|
std::string getFormatedErrorMessages() const;
|
||||||
|
|
||||||
/** \brief Returns a user friendly string that list errors in the parsed
|
/** \brief Returns a user friendly string that list errors in the parsed
|
||||||
@@ -121,38 +111,6 @@ public:
|
|||||||
*/
|
*/
|
||||||
std::string getFormattedErrorMessages() const;
|
std::string getFormattedErrorMessages() const;
|
||||||
|
|
||||||
/** \brief Returns a vector of structured erros encounted while parsing.
|
|
||||||
* \return A (possibly empty) vector of StructuredError objects. Currently
|
|
||||||
* only one error can be returned, but the caller should tolerate
|
|
||||||
* multiple
|
|
||||||
* errors. This can occur if the parser recovers from a non-fatal
|
|
||||||
* parse error and then encounters additional errors.
|
|
||||||
*/
|
|
||||||
std::vector<StructuredError> getStructuredErrors() const;
|
|
||||||
|
|
||||||
/** \brief Add a semantic error message.
|
|
||||||
* \param value JSON Value location associated with the error
|
|
||||||
* \param message The error message.
|
|
||||||
* \return \c true if the error was successfully added, \c false if the
|
|
||||||
* Value offset exceeds the document size.
|
|
||||||
*/
|
|
||||||
bool pushError(const Value& value, const std::string& message);
|
|
||||||
|
|
||||||
/** \brief Add a semantic error message with extra context.
|
|
||||||
* \param value JSON Value location associated with the error
|
|
||||||
* \param message The error message.
|
|
||||||
* \param extra Additional JSON Value location to contextualize the error
|
|
||||||
* \return \c true if the error was successfully added, \c false if either
|
|
||||||
* Value offset exceeds the document size.
|
|
||||||
*/
|
|
||||||
bool pushError(const Value& value, const std::string& message, const Value& extra);
|
|
||||||
|
|
||||||
/** \brief Return whether there are any errors.
|
|
||||||
* \return \c true if there are no errors to report \c false if
|
|
||||||
* errors have occurred.
|
|
||||||
*/
|
|
||||||
bool good() const;
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
enum TokenType {
|
enum TokenType {
|
||||||
tokenEndOfStream = 0,
|
tokenEndOfStream = 0,
|
||||||
@@ -238,8 +196,135 @@ private:
|
|||||||
std::string commentsBefore_;
|
std::string commentsBefore_;
|
||||||
Features features_;
|
Features features_;
|
||||||
bool collectComments_;
|
bool collectComments_;
|
||||||
|
}; // Reader
|
||||||
|
|
||||||
|
/** Interface for reading JSON from a char array.
|
||||||
|
*/
|
||||||
|
class JSON_API CharReader {
|
||||||
|
public:
|
||||||
|
virtual ~CharReader() {}
|
||||||
|
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
|
||||||
|
document.
|
||||||
|
* The document must be a UTF-8 encoded string containing the document to read.
|
||||||
|
*
|
||||||
|
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
|
||||||
|
document to read.
|
||||||
|
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
|
||||||
|
document to read.
|
||||||
|
* Must be >= beginDoc.
|
||||||
|
* \param root [out] Contains the root value of the document if it was
|
||||||
|
* successfully parsed.
|
||||||
|
* \param errs [out] Formatted error messages (if not NULL)
|
||||||
|
* a user friendly string that lists errors in the parsed
|
||||||
|
* document.
|
||||||
|
* \return \c true if the document was successfully parsed, \c false if an
|
||||||
|
error occurred.
|
||||||
|
*/
|
||||||
|
virtual bool parse(
|
||||||
|
char const* beginDoc, char const* endDoc,
|
||||||
|
Value* root, std::string* errs) = 0;
|
||||||
|
|
||||||
|
class Factory {
|
||||||
|
public:
|
||||||
|
virtual ~Factory() {}
|
||||||
|
/** \brief Allocate a CharReader via operator new().
|
||||||
|
* \throw std::exception if something goes wrong (e.g. invalid settings)
|
||||||
|
*/
|
||||||
|
virtual CharReader* newCharReader() const = 0;
|
||||||
|
}; // Factory
|
||||||
|
}; // CharReader
|
||||||
|
|
||||||
|
/** \brief Build a CharReader implementation.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
\code
|
||||||
|
using namespace Json;
|
||||||
|
CharReaderBuilder builder;
|
||||||
|
builder["collectComments"] = false;
|
||||||
|
Value value;
|
||||||
|
std::string errs;
|
||||||
|
bool ok = parseFromStream(builder, std::cin, &value, &errs);
|
||||||
|
\endcode
|
||||||
|
*/
|
||||||
|
class JSON_API CharReaderBuilder : public CharReader::Factory {
|
||||||
|
public:
|
||||||
|
// Note: We use a Json::Value so that we can add data-members to this class
|
||||||
|
// without a major version bump.
|
||||||
|
/** Configuration of this builder.
|
||||||
|
These are case-sensitive.
|
||||||
|
Available settings (case-sensitive):
|
||||||
|
- `"collectComments": false or true`
|
||||||
|
- true to collect comment and allow writing them
|
||||||
|
back during serialization, false to discard comments.
|
||||||
|
This parameter is ignored if allowComments is false.
|
||||||
|
- `"allowComments": false or true`
|
||||||
|
- true if comments are allowed.
|
||||||
|
- `"strictRoot": false or true`
|
||||||
|
- true if root must be either an array or an object value
|
||||||
|
- `"allowDroppedNullPlaceholders": false or true`
|
||||||
|
- true if dropped null placeholders are allowed. (See StreamWriterBuilder.)
|
||||||
|
- `"allowNumericKeys": false or true`
|
||||||
|
- true if numeric object keys are allowed.
|
||||||
|
- `"allowSingleQuotes": false or true`
|
||||||
|
- true if '' are allowed for strings (both keys and values)
|
||||||
|
- `"stackLimit": integer`
|
||||||
|
- Exceeding stackLimit (recursive depth of `readValue()`) will
|
||||||
|
cause an exception.
|
||||||
|
- This is a security issue (seg-faults caused by deeply nested JSON),
|
||||||
|
so the default is low.
|
||||||
|
- `"failIfExtra": false or true`
|
||||||
|
- If true, `parse()` returns false when extra non-whitespace trails
|
||||||
|
the JSON value in the input string.
|
||||||
|
- `"rejectDupKeys": false or true`
|
||||||
|
- If true, `parse()` returns false when a key is duplicated within an object.
|
||||||
|
- `"allowSpecialFloats": false or true`
|
||||||
|
- If true, special float values (NaNs and infinities) are allowed
|
||||||
|
and their values are lossfree restorable.
|
||||||
|
|
||||||
|
You can examine 'settings_` yourself
|
||||||
|
to see the defaults. You can also write and read them just like any
|
||||||
|
JSON Value.
|
||||||
|
\sa setDefaults()
|
||||||
|
*/
|
||||||
|
Json::Value settings_;
|
||||||
|
|
||||||
|
CharReaderBuilder();
|
||||||
|
virtual ~CharReaderBuilder();
|
||||||
|
|
||||||
|
virtual CharReader* newCharReader() const;
|
||||||
|
|
||||||
|
/** \return true if 'settings' are legal and consistent;
|
||||||
|
* otherwise, indicate bad settings via 'invalid'.
|
||||||
|
*/
|
||||||
|
bool validate(Json::Value* invalid) const;
|
||||||
|
|
||||||
|
/** A simple way to update a specific setting.
|
||||||
|
*/
|
||||||
|
Value& operator[](std::string key);
|
||||||
|
|
||||||
|
/** Called by ctor, but you can use this to reset settings_.
|
||||||
|
* \pre 'settings' != NULL (but Json::null is fine)
|
||||||
|
* \remark Defaults:
|
||||||
|
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults
|
||||||
|
*/
|
||||||
|
static void setDefaults(Json::Value* settings);
|
||||||
|
/** Same as old Features::strictMode().
|
||||||
|
* \pre 'settings' != NULL (but Json::null is fine)
|
||||||
|
* \remark Defaults:
|
||||||
|
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
|
||||||
|
*/
|
||||||
|
static void strictMode(Json::Value* settings);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/** Consume entire stream and use its begin/end.
|
||||||
|
* Someday we might have a real StreamReader, but for now this
|
||||||
|
* is convenient.
|
||||||
|
*/
|
||||||
|
bool JSON_API parseFromStream(
|
||||||
|
CharReader::Factory const&,
|
||||||
|
std::istream&,
|
||||||
|
Value* root, std::string* errs);
|
||||||
|
|
||||||
/** \brief Read from 'sin' into 'root'.
|
/** \brief Read from 'sin' into 'root'.
|
||||||
|
|
||||||
Always keep comments from the input JSON.
|
Always keep comments from the input JSON.
|
||||||
|
@@ -11,6 +11,7 @@
|
|||||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
#include <exception>
|
||||||
|
|
||||||
#ifndef JSON_USE_CPPTL_SMALLMAP
|
#ifndef JSON_USE_CPPTL_SMALLMAP
|
||||||
#include <map>
|
#include <map>
|
||||||
@@ -28,10 +29,63 @@
|
|||||||
#pragma warning(disable : 4251)
|
#pragma warning(disable : 4251)
|
||||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
|
||||||
|
//Conditional NORETURN attribute on the throw functions would:
|
||||||
|
// a) suppress false positives from static code analysis
|
||||||
|
// b) possibly improve optimization opportunities.
|
||||||
|
#if !defined(JSONCPP_NORETURN)
|
||||||
|
# if defined(_MSC_VER)
|
||||||
|
# define JSONCPP_NORETURN __declspec(noreturn)
|
||||||
|
# elif defined(__GNUC__)
|
||||||
|
# define JSONCPP_NORETURN __attribute__ ((__noreturn__))
|
||||||
|
# else
|
||||||
|
# define JSONCPP_NORETURN
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
|
||||||
/** \brief JSON (JavaScript Object Notation).
|
/** \brief JSON (JavaScript Object Notation).
|
||||||
*/
|
*/
|
||||||
namespace Json {
|
namespace Json {
|
||||||
|
|
||||||
|
/** Base class for all exceptions we throw.
|
||||||
|
*
|
||||||
|
* We use nothing but these internally. Of course, STL can throw others.
|
||||||
|
*/
|
||||||
|
class JSON_API Exception : public std::exception {
|
||||||
|
public:
|
||||||
|
Exception(std::string const& msg);
|
||||||
|
virtual ~Exception() throw();
|
||||||
|
virtual char const* what() const throw();
|
||||||
|
protected:
|
||||||
|
std::string const msg_;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Exceptions which the user cannot easily avoid.
|
||||||
|
*
|
||||||
|
* E.g. out-of-memory (when we use malloc), stack-overflow, malicious input
|
||||||
|
*
|
||||||
|
* \remark derived from Json::Exception
|
||||||
|
*/
|
||||||
|
class JSON_API RuntimeError : public Exception {
|
||||||
|
public:
|
||||||
|
RuntimeError(std::string const& msg);
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros.
|
||||||
|
*
|
||||||
|
* These are precondition-violations (user bugs) and internal errors (our bugs).
|
||||||
|
*
|
||||||
|
* \remark derived from Json::Exception
|
||||||
|
*/
|
||||||
|
class JSON_API LogicError : public Exception {
|
||||||
|
public:
|
||||||
|
LogicError(std::string const& msg);
|
||||||
|
};
|
||||||
|
|
||||||
|
/// used internally
|
||||||
|
JSONCPP_NORETURN void throwRuntimeError(std::string const& msg);
|
||||||
|
/// used internally
|
||||||
|
JSONCPP_NORETURN void throwLogicError(std::string const& msg);
|
||||||
|
|
||||||
/** \brief Type of the value held by a Value object.
|
/** \brief Type of the value held by a Value object.
|
||||||
*/
|
*/
|
||||||
enum ValueType {
|
enum ValueType {
|
||||||
@@ -74,14 +128,14 @@ enum CommentPlacement {
|
|||||||
*/
|
*/
|
||||||
class JSON_API StaticString {
|
class JSON_API StaticString {
|
||||||
public:
|
public:
|
||||||
explicit StaticString(const char* czstring) : str_(czstring) {}
|
explicit StaticString(const char* czstring) : c_str_(czstring) {}
|
||||||
|
|
||||||
operator const char*() const { return str_; }
|
operator const char*() const { return c_str_; }
|
||||||
|
|
||||||
const char* c_str() const { return str_; }
|
const char* c_str() const { return c_str_; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
const char* str_;
|
const char* c_str_;
|
||||||
};
|
};
|
||||||
|
|
||||||
/** \brief Represents a <a HREF="http://www.json.org">JSON</a> value.
|
/** \brief Represents a <a HREF="http://www.json.org">JSON</a> value.
|
||||||
@@ -99,26 +153,27 @@ private:
|
|||||||
* The type of the held value is represented by a #ValueType and
|
* The type of the held value is represented by a #ValueType and
|
||||||
* can be obtained using type().
|
* can be obtained using type().
|
||||||
*
|
*
|
||||||
* values of an #objectValue or #arrayValue can be accessed using operator[]()
|
* Values of an #objectValue or #arrayValue can be accessed using operator[]()
|
||||||
* methods.
|
* methods.
|
||||||
* Non const methods will automatically create the a #nullValue element
|
* Non-const methods will automatically create the a #nullValue element
|
||||||
* if it does not exist.
|
* if it does not exist.
|
||||||
* The sequence of an #arrayValue will be automatically resize and initialized
|
* The sequence of an #arrayValue will be automatically resized and initialized
|
||||||
* with #nullValue. resize() can be used to enlarge or truncate an #arrayValue.
|
* with #nullValue. resize() can be used to enlarge or truncate an #arrayValue.
|
||||||
*
|
*
|
||||||
* The get() methods can be used to obtanis default value in the case the
|
* The get() methods can be used to obtain default value in the case the
|
||||||
*required element
|
* required element does not exist.
|
||||||
* does not exist.
|
|
||||||
*
|
*
|
||||||
* It is possible to iterate over the list of a #objectValue values using
|
* It is possible to iterate over the list of a #objectValue values using
|
||||||
* the getMemberNames() method.
|
* the getMemberNames() method.
|
||||||
|
*
|
||||||
|
* \note #Value string-length fit in size_t, but keys must be < 2^30.
|
||||||
|
* (The reason is an implementation detail.) A #CharReader will raise an
|
||||||
|
* exception if a bound is exceeded to avoid security holes in your app,
|
||||||
|
* but the Value API does *not* check bounds. That is the responsibility
|
||||||
|
* of the caller.
|
||||||
*/
|
*/
|
||||||
class JSON_API Value {
|
class JSON_API Value {
|
||||||
friend class ValueIteratorBase;
|
friend class ValueIteratorBase;
|
||||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
friend class ValueInternalLink;
|
|
||||||
friend class ValueInternalMap;
|
|
||||||
#endif
|
|
||||||
public:
|
public:
|
||||||
typedef std::vector<std::string> Members;
|
typedef std::vector<std::string> Members;
|
||||||
typedef ValueIterator iterator;
|
typedef ValueIterator iterator;
|
||||||
@@ -133,7 +188,11 @@ public:
|
|||||||
typedef Json::LargestUInt LargestUInt;
|
typedef Json::LargestUInt LargestUInt;
|
||||||
typedef Json::ArrayIndex ArrayIndex;
|
typedef Json::ArrayIndex ArrayIndex;
|
||||||
|
|
||||||
static const Value& null;
|
static const Value& nullRef;
|
||||||
|
#if !defined(__ARMEL__)
|
||||||
|
/// \deprecated This exists for binary compatibility only. Use nullRef.
|
||||||
|
static const Value null;
|
||||||
|
#endif
|
||||||
/// Minimum signed integer value that can be stored in a Json::Value.
|
/// Minimum signed integer value that can be stored in a Json::Value.
|
||||||
static const LargestInt minLargestInt;
|
static const LargestInt minLargestInt;
|
||||||
/// Maximum signed integer value that can be stored in a Json::Value.
|
/// Maximum signed integer value that can be stored in a Json::Value.
|
||||||
@@ -157,9 +216,13 @@ public:
|
|||||||
static const UInt64 maxUInt64;
|
static const UInt64 maxUInt64;
|
||||||
#endif // defined(JSON_HAS_INT64)
|
#endif // defined(JSON_HAS_INT64)
|
||||||
|
|
||||||
|
//MW: workaround for bug in NVIDIAs CUDA 7.5 nvcc compiler
|
||||||
|
#ifdef __NVCC__
|
||||||
|
public:
|
||||||
|
#else
|
||||||
private:
|
private:
|
||||||
|
#endif //__NVCC__
|
||||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
class CZString {
|
class CZString {
|
||||||
public:
|
public:
|
||||||
enum DuplicationPolicy {
|
enum DuplicationPolicy {
|
||||||
@@ -168,20 +231,31 @@ private:
|
|||||||
duplicateOnCopy
|
duplicateOnCopy
|
||||||
};
|
};
|
||||||
CZString(ArrayIndex index);
|
CZString(ArrayIndex index);
|
||||||
CZString(const char* cstr, DuplicationPolicy allocate);
|
CZString(char const* str, unsigned length, DuplicationPolicy allocate);
|
||||||
CZString(const CZString& other);
|
CZString(CZString const& other);
|
||||||
~CZString();
|
~CZString();
|
||||||
CZString& operator=(CZString other);
|
CZString& operator=(CZString other);
|
||||||
bool operator<(const CZString& other) const;
|
bool operator<(CZString const& other) const;
|
||||||
bool operator==(const CZString& other) const;
|
bool operator==(CZString const& other) const;
|
||||||
ArrayIndex index() const;
|
ArrayIndex index() const;
|
||||||
const char* c_str() const;
|
//const char* c_str() const; ///< \deprecated
|
||||||
|
char const* data() const;
|
||||||
|
unsigned length() const;
|
||||||
bool isStaticString() const;
|
bool isStaticString() const;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void swap(CZString& other);
|
void swap(CZString& other);
|
||||||
const char* cstr_;
|
|
||||||
|
struct StringStorage {
|
||||||
|
unsigned policy_: 2;
|
||||||
|
unsigned length_: 30; // 1GB max
|
||||||
|
};
|
||||||
|
|
||||||
|
char const* cstr_; // actually, a prefixed string, unless policy is noDup
|
||||||
|
union {
|
||||||
ArrayIndex index_;
|
ArrayIndex index_;
|
||||||
|
StringStorage storage_;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@@ -190,7 +264,6 @@ public:
|
|||||||
#else
|
#else
|
||||||
typedef CppTL::SmallMap<CZString, Value> ObjectValues;
|
typedef CppTL::SmallMap<CZString, Value> ObjectValues;
|
||||||
#endif // ifndef JSON_USE_CPPTL_SMALLMAP
|
#endif // ifndef JSON_USE_CPPTL_SMALLMAP
|
||||||
#endif // ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||||
|
|
||||||
public:
|
public:
|
||||||
@@ -217,20 +290,25 @@ Json::Value obj_value(Json::objectValue); // {}
|
|||||||
Value(UInt64 value);
|
Value(UInt64 value);
|
||||||
#endif // if defined(JSON_HAS_INT64)
|
#endif // if defined(JSON_HAS_INT64)
|
||||||
Value(double value);
|
Value(double value);
|
||||||
Value(const char* value);
|
Value(const char* value); ///< Copy til first 0. (NULL causes to seg-fault.)
|
||||||
Value(const char* beginValue, const char* endValue);
|
Value(const char* begin, const char* end); ///< Copy all, incl zeroes.
|
||||||
/** \brief Constructs a value from a static string.
|
/** \brief Constructs a value from a static string.
|
||||||
|
|
||||||
* Like other value string constructor but do not duplicate the string for
|
* Like other value string constructor but do not duplicate the string for
|
||||||
* internal storage. The given string must remain alive after the call to this
|
* internal storage. The given string must remain alive after the call to this
|
||||||
* constructor.
|
* constructor.
|
||||||
|
* \note This works only for null-terminated strings. (We cannot change the
|
||||||
|
* size of this class, so we have nowhere to store the length,
|
||||||
|
* which might be computed later for various operations.)
|
||||||
|
*
|
||||||
* Example of usage:
|
* Example of usage:
|
||||||
* \code
|
* \code
|
||||||
* Json::Value aValue( StaticString("some text") );
|
* static StaticString foo("some text");
|
||||||
|
* Json::Value aValue(foo);
|
||||||
* \endcode
|
* \endcode
|
||||||
*/
|
*/
|
||||||
Value(const StaticString& value);
|
Value(const StaticString& value);
|
||||||
Value(const std::string& value);
|
Value(const std::string& value); ///< Copy data() til size(). Embedded zeroes too.
|
||||||
#ifdef JSON_USE_CPPTL
|
#ifdef JSON_USE_CPPTL
|
||||||
Value(const CppTL::ConstString& value);
|
Value(const CppTL::ConstString& value);
|
||||||
#endif
|
#endif
|
||||||
@@ -239,8 +317,9 @@ Json::Value obj_value(Json::objectValue); // {}
|
|||||||
Value(const Value& other);
|
Value(const Value& other);
|
||||||
~Value();
|
~Value();
|
||||||
|
|
||||||
// Deep copy, then swap(other).
|
/// Deep copy, then swap(other).
|
||||||
Value& operator=(Value other);
|
/// \note Over-write existing comments. To preserve comments, use #swapPayload().
|
||||||
|
Value &operator=(const Value &other);
|
||||||
/// Swap everything.
|
/// Swap everything.
|
||||||
void swap(Value& other);
|
void swap(Value& other);
|
||||||
/// Swap values but leave comments and source offsets in place.
|
/// Swap values but leave comments and source offsets in place.
|
||||||
@@ -257,8 +336,13 @@ Json::Value obj_value(Json::objectValue); // {}
|
|||||||
bool operator!=(const Value& other) const;
|
bool operator!=(const Value& other) const;
|
||||||
int compare(const Value& other) const;
|
int compare(const Value& other) const;
|
||||||
|
|
||||||
const char* asCString() const;
|
const char* asCString() const; ///< Embedded zeroes could cause you trouble!
|
||||||
std::string asString() const;
|
std::string asString() const; ///< Embedded zeroes are possible.
|
||||||
|
/** Get raw char* of string-value.
|
||||||
|
* \return false if !string. (Seg-fault if str or end are NULL.)
|
||||||
|
*/
|
||||||
|
bool getString(
|
||||||
|
char const** begin, char const** end) const;
|
||||||
#ifdef JSON_USE_CPPTL
|
#ifdef JSON_USE_CPPTL
|
||||||
CppTL::ConstString asConstString() const;
|
CppTL::ConstString asConstString() const;
|
||||||
#endif
|
#endif
|
||||||
@@ -349,19 +433,23 @@ Json::Value obj_value(Json::objectValue); // {}
|
|||||||
Value& append(const Value& value);
|
Value& append(const Value& value);
|
||||||
|
|
||||||
/// Access an object value by name, create a null member if it does not exist.
|
/// Access an object value by name, create a null member if it does not exist.
|
||||||
|
/// \note Because of our implementation, keys are limited to 2^30 -1 chars.
|
||||||
|
/// Exceeding that will cause an exception.
|
||||||
Value& operator[](const char* key);
|
Value& operator[](const char* key);
|
||||||
/// Access an object value by name, returns null if there is no member with
|
/// Access an object value by name, returns null if there is no member with
|
||||||
/// that name.
|
/// that name.
|
||||||
const Value& operator[](const char* key) const;
|
const Value& operator[](const char* key) const;
|
||||||
/// Access an object value by name, create a null member if it does not exist.
|
/// Access an object value by name, create a null member if it does not exist.
|
||||||
|
/// \param key may contain embedded nulls.
|
||||||
Value& operator[](const std::string& key);
|
Value& operator[](const std::string& key);
|
||||||
/// Access an object value by name, returns null if there is no member with
|
/// Access an object value by name, returns null if there is no member with
|
||||||
/// that name.
|
/// that name.
|
||||||
|
/// \param key may contain embedded nulls.
|
||||||
const Value& operator[](const std::string& key) const;
|
const Value& operator[](const std::string& key) const;
|
||||||
/** \brief Access an object value by name, create a null member if it does not
|
/** \brief Access an object value by name, create a null member if it does not
|
||||||
exist.
|
exist.
|
||||||
|
|
||||||
* If the object as no entry for that name, then the member name used to store
|
* If the object has no entry for that name, then the member name used to store
|
||||||
* the new entry is not duplicated.
|
* the new entry is not duplicated.
|
||||||
* Example of use:
|
* Example of use:
|
||||||
* \code
|
* \code
|
||||||
@@ -379,27 +467,69 @@ Json::Value obj_value(Json::objectValue); // {}
|
|||||||
const Value& operator[](const CppTL::ConstString& key) const;
|
const Value& operator[](const CppTL::ConstString& key) const;
|
||||||
#endif
|
#endif
|
||||||
/// Return the member named key if it exist, defaultValue otherwise.
|
/// Return the member named key if it exist, defaultValue otherwise.
|
||||||
|
/// \note deep copy
|
||||||
Value get(const char* key, const Value& defaultValue) const;
|
Value get(const char* key, const Value& defaultValue) const;
|
||||||
/// Return the member named key if it exist, defaultValue otherwise.
|
/// Return the member named key if it exist, defaultValue otherwise.
|
||||||
|
/// \note deep copy
|
||||||
|
/// \note key may contain embedded nulls.
|
||||||
|
Value get(const char* begin, const char* end, const Value& defaultValue) const;
|
||||||
|
/// Return the member named key if it exist, defaultValue otherwise.
|
||||||
|
/// \note deep copy
|
||||||
|
/// \param key may contain embedded nulls.
|
||||||
Value get(const std::string& key, const Value& defaultValue) const;
|
Value get(const std::string& key, const Value& defaultValue) const;
|
||||||
#ifdef JSON_USE_CPPTL
|
#ifdef JSON_USE_CPPTL
|
||||||
/// Return the member named key if it exist, defaultValue otherwise.
|
/// Return the member named key if it exist, defaultValue otherwise.
|
||||||
|
/// \note deep copy
|
||||||
Value get(const CppTL::ConstString& key, const Value& defaultValue) const;
|
Value get(const CppTL::ConstString& key, const Value& defaultValue) const;
|
||||||
#endif
|
#endif
|
||||||
|
/// Most general and efficient version of isMember()const, get()const,
|
||||||
|
/// and operator[]const
|
||||||
|
/// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
|
||||||
|
Value const* find(char const* begin, char const* end) const;
|
||||||
|
/// Most general and efficient version of object-mutators.
|
||||||
|
/// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
|
||||||
|
/// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue.
|
||||||
|
Value const* demand(char const* begin, char const* end);
|
||||||
/// \brief Remove and return the named member.
|
/// \brief Remove and return the named member.
|
||||||
///
|
///
|
||||||
/// Do nothing if it did not exist.
|
/// Do nothing if it did not exist.
|
||||||
/// \return the removed Value, or null.
|
/// \return the removed Value, or null.
|
||||||
/// \pre type() is objectValue or nullValue
|
/// \pre type() is objectValue or nullValue
|
||||||
/// \post type() is unchanged
|
/// \post type() is unchanged
|
||||||
|
/// \deprecated
|
||||||
Value removeMember(const char* key);
|
Value removeMember(const char* key);
|
||||||
/// Same as removeMember(const char*)
|
/// Same as removeMember(const char*)
|
||||||
|
/// \param key may contain embedded nulls.
|
||||||
|
/// \deprecated
|
||||||
Value removeMember(const std::string& key);
|
Value removeMember(const std::string& key);
|
||||||
|
/// Same as removeMember(const char* begin, const char* end, Value* removed),
|
||||||
|
/// but 'key' is null-terminated.
|
||||||
|
bool removeMember(const char* key, Value* removed);
|
||||||
|
/** \brief Remove the named map member.
|
||||||
|
|
||||||
|
Update 'removed' iff removed.
|
||||||
|
\param key may contain embedded nulls.
|
||||||
|
\return true iff removed (no exceptions)
|
||||||
|
*/
|
||||||
|
bool removeMember(std::string const& key, Value* removed);
|
||||||
|
/// Same as removeMember(std::string const& key, Value* removed)
|
||||||
|
bool removeMember(const char* begin, const char* end, Value* removed);
|
||||||
|
/** \brief Remove the indexed array element.
|
||||||
|
|
||||||
|
O(n) expensive operations.
|
||||||
|
Update 'removed' iff removed.
|
||||||
|
\return true iff removed (no exceptions)
|
||||||
|
*/
|
||||||
|
bool removeIndex(ArrayIndex i, Value* removed);
|
||||||
|
|
||||||
/// Return true if the object has a member named key.
|
/// Return true if the object has a member named key.
|
||||||
|
/// \note 'key' must be null-terminated.
|
||||||
bool isMember(const char* key) const;
|
bool isMember(const char* key) const;
|
||||||
/// Return true if the object has a member named key.
|
/// Return true if the object has a member named key.
|
||||||
|
/// \param key may contain embedded nulls.
|
||||||
bool isMember(const std::string& key) const;
|
bool isMember(const std::string& key) const;
|
||||||
|
/// Same as isMember(std::string const& key)const
|
||||||
|
bool isMember(const char* begin, const char* end) const;
|
||||||
#ifdef JSON_USE_CPPTL
|
#ifdef JSON_USE_CPPTL
|
||||||
/// Return true if the object has a member named key.
|
/// Return true if the object has a member named key.
|
||||||
bool isMember(const CppTL::ConstString& key) const;
|
bool isMember(const CppTL::ConstString& key) const;
|
||||||
@@ -417,9 +547,12 @@ Json::Value obj_value(Json::objectValue); // {}
|
|||||||
// EnumValues enumValues() const;
|
// EnumValues enumValues() const;
|
||||||
//# endif
|
//# endif
|
||||||
|
|
||||||
/// Comments must be //... or /* ... */
|
/// \deprecated Always pass len.
|
||||||
|
JSONCPP_DEPRECATED("Use setComment(std::string const&) instead.")
|
||||||
void setComment(const char* comment, CommentPlacement placement);
|
void setComment(const char* comment, CommentPlacement placement);
|
||||||
/// Comments must be //... or /* ... */
|
/// Comments must be //... or /* ... */
|
||||||
|
void setComment(const char* comment, size_t len, CommentPlacement placement);
|
||||||
|
/// Comments must be //... or /* ... */
|
||||||
void setComment(const std::string& comment, CommentPlacement placement);
|
void setComment(const std::string& comment, CommentPlacement placement);
|
||||||
bool hasComment(CommentPlacement placement) const;
|
bool hasComment(CommentPlacement placement) const;
|
||||||
/// Include delimiters and embedded newlines.
|
/// Include delimiters and embedded newlines.
|
||||||
@@ -433,36 +566,17 @@ Json::Value obj_value(Json::objectValue); // {}
|
|||||||
iterator begin();
|
iterator begin();
|
||||||
iterator end();
|
iterator end();
|
||||||
|
|
||||||
// Accessors for the [start, limit) range of bytes within the JSON text from
|
|
||||||
// which this value was parsed, if any.
|
|
||||||
void setOffsetStart(size_t start);
|
|
||||||
void setOffsetLimit(size_t limit);
|
|
||||||
size_t getOffsetStart() const;
|
|
||||||
size_t getOffsetLimit() const;
|
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void initBasic(ValueType type, bool allocated = false);
|
void initBasic(ValueType type, bool allocated = false);
|
||||||
|
|
||||||
Value& resolveReference(const char* key, bool isStatic);
|
Value& resolveReference(const char* key);
|
||||||
|
Value& resolveReference(const char* key, const char* end);
|
||||||
|
|
||||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
inline bool isItemAvailable() const { return itemIsUsed_ == 0; }
|
|
||||||
|
|
||||||
inline void setItemUsed(bool isUsed = true) { itemIsUsed_ = isUsed ? 1 : 0; }
|
|
||||||
|
|
||||||
inline bool isMemberNameStatic() const { return memberNameIsStatic_ == 0; }
|
|
||||||
|
|
||||||
inline void setMemberNameIsStatic(bool isStatic) {
|
|
||||||
memberNameIsStatic_ = isStatic ? 1 : 0;
|
|
||||||
}
|
|
||||||
#endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
|
|
||||||
private:
|
|
||||||
struct CommentInfo {
|
struct CommentInfo {
|
||||||
CommentInfo();
|
CommentInfo();
|
||||||
~CommentInfo();
|
~CommentInfo();
|
||||||
|
|
||||||
void setComment(const char* text);
|
void setComment(const char* text, size_t len);
|
||||||
|
|
||||||
char* comment_;
|
char* comment_;
|
||||||
};
|
};
|
||||||
@@ -481,26 +595,13 @@ private:
|
|||||||
LargestUInt uint_;
|
LargestUInt uint_;
|
||||||
double real_;
|
double real_;
|
||||||
bool bool_;
|
bool bool_;
|
||||||
char* string_;
|
char* string_; // actually ptr to unsigned, followed by str, unless !allocated_
|
||||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
ValueInternalArray* array_;
|
|
||||||
ValueInternalMap* map_;
|
|
||||||
#else
|
|
||||||
ObjectValues* map_;
|
ObjectValues* map_;
|
||||||
#endif
|
|
||||||
} value_;
|
} value_;
|
||||||
ValueType type_ : 8;
|
ValueType type_ : 8;
|
||||||
int allocated_ : 1; // Notes: if declared as bool, bitfield is useless.
|
unsigned int allocated_ : 1; // Notes: if declared as bool, bitfield is useless.
|
||||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
// If not allocated_, string_ must be null-terminated.
|
||||||
unsigned int itemIsUsed_ : 1; // used by the ValueInternalMap container.
|
|
||||||
int memberNameIsStatic_ : 1; // used by the ValueInternalMap container.
|
|
||||||
#endif
|
|
||||||
CommentInfo* comments_;
|
CommentInfo* comments_;
|
||||||
|
|
||||||
// [start, limit) byte offsets in the source JSON text from which this Value
|
|
||||||
// was extracted.
|
|
||||||
size_t start_;
|
|
||||||
size_t limit_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/** \brief Experimental and untested: represents an element of the "path" to
|
/** \brief Experimental and untested: represents an element of the "path" to
|
||||||
@@ -566,345 +667,6 @@ private:
|
|||||||
Args args_;
|
Args args_;
|
||||||
};
|
};
|
||||||
|
|
||||||
#ifdef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
/** \brief Allocator to customize Value internal map.
|
|
||||||
* Below is an example of a simple implementation (default implementation
|
|
||||||
actually
|
|
||||||
* use memory pool for speed).
|
|
||||||
* \code
|
|
||||||
class DefaultValueMapAllocator : public ValueMapAllocator
|
|
||||||
{
|
|
||||||
public: // overridden from ValueMapAllocator
|
|
||||||
virtual ValueInternalMap *newMap()
|
|
||||||
{
|
|
||||||
return new ValueInternalMap();
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other )
|
|
||||||
{
|
|
||||||
return new ValueInternalMap( other );
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void destructMap( ValueInternalMap *map )
|
|
||||||
{
|
|
||||||
delete map;
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual ValueInternalLink *allocateMapBuckets( unsigned int size )
|
|
||||||
{
|
|
||||||
return new ValueInternalLink[size];
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void releaseMapBuckets( ValueInternalLink *links )
|
|
||||||
{
|
|
||||||
delete [] links;
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual ValueInternalLink *allocateMapLink()
|
|
||||||
{
|
|
||||||
return new ValueInternalLink();
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void releaseMapLink( ValueInternalLink *link )
|
|
||||||
{
|
|
||||||
delete link;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
* \endcode
|
|
||||||
*/
|
|
||||||
class JSON_API ValueMapAllocator {
|
|
||||||
public:
|
|
||||||
virtual ~ValueMapAllocator();
|
|
||||||
virtual ValueInternalMap* newMap() = 0;
|
|
||||||
virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) = 0;
|
|
||||||
virtual void destructMap(ValueInternalMap* map) = 0;
|
|
||||||
virtual ValueInternalLink* allocateMapBuckets(unsigned int size) = 0;
|
|
||||||
virtual void releaseMapBuckets(ValueInternalLink* links) = 0;
|
|
||||||
virtual ValueInternalLink* allocateMapLink() = 0;
|
|
||||||
virtual void releaseMapLink(ValueInternalLink* link) = 0;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** \brief ValueInternalMap hash-map bucket chain link (for internal use only).
|
|
||||||
* \internal previous_ & next_ allows for bidirectional traversal.
|
|
||||||
*/
|
|
||||||
class JSON_API ValueInternalLink {
|
|
||||||
public:
|
|
||||||
enum {
|
|
||||||
itemPerLink = 6
|
|
||||||
}; // sizeof(ValueInternalLink) = 128 on 32 bits architecture.
|
|
||||||
enum InternalFlags {
|
|
||||||
flagAvailable = 0,
|
|
||||||
flagUsed = 1
|
|
||||||
};
|
|
||||||
|
|
||||||
ValueInternalLink();
|
|
||||||
|
|
||||||
~ValueInternalLink();
|
|
||||||
|
|
||||||
Value items_[itemPerLink];
|
|
||||||
char* keys_[itemPerLink];
|
|
||||||
ValueInternalLink* previous_;
|
|
||||||
ValueInternalLink* next_;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** \brief A linked page based hash-table implementation used internally by
|
|
||||||
*Value.
|
|
||||||
* \internal ValueInternalMap is a tradional bucket based hash-table, with a
|
|
||||||
*linked
|
|
||||||
* list in each bucket to handle collision. There is an addional twist in that
|
|
||||||
* each node of the collision linked list is a page containing a fixed amount of
|
|
||||||
* value. This provides a better compromise between memory usage and speed.
|
|
||||||
*
|
|
||||||
* Each bucket is made up of a chained list of ValueInternalLink. The last
|
|
||||||
* link of a given bucket can be found in the 'previous_' field of the following
|
|
||||||
*bucket.
|
|
||||||
* The last link of the last bucket is stored in tailLink_ as it has no
|
|
||||||
*following bucket.
|
|
||||||
* Only the last link of a bucket may contains 'available' item. The last link
|
|
||||||
*always
|
|
||||||
* contains at least one element unless is it the bucket one very first link.
|
|
||||||
*/
|
|
||||||
class JSON_API ValueInternalMap {
|
|
||||||
friend class ValueIteratorBase;
|
|
||||||
friend class Value;
|
|
||||||
|
|
||||||
public:
|
|
||||||
typedef unsigned int HashKey;
|
|
||||||
typedef unsigned int BucketIndex;
|
|
||||||
|
|
||||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
|
||||||
struct IteratorState {
|
|
||||||
IteratorState() : map_(0), link_(0), itemIndex_(0), bucketIndex_(0) {}
|
|
||||||
ValueInternalMap* map_;
|
|
||||||
ValueInternalLink* link_;
|
|
||||||
BucketIndex itemIndex_;
|
|
||||||
BucketIndex bucketIndex_;
|
|
||||||
};
|
|
||||||
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
|
||||||
|
|
||||||
ValueInternalMap();
|
|
||||||
ValueInternalMap(const ValueInternalMap& other);
|
|
||||||
ValueInternalMap& operator=(ValueInternalMap other);
|
|
||||||
~ValueInternalMap();
|
|
||||||
|
|
||||||
void swap(ValueInternalMap& other);
|
|
||||||
|
|
||||||
BucketIndex size() const;
|
|
||||||
|
|
||||||
void clear();
|
|
||||||
|
|
||||||
bool reserveDelta(BucketIndex growth);
|
|
||||||
|
|
||||||
bool reserve(BucketIndex newItemCount);
|
|
||||||
|
|
||||||
const Value* find(const char* key) const;
|
|
||||||
|
|
||||||
Value* find(const char* key);
|
|
||||||
|
|
||||||
Value& resolveReference(const char* key, bool isStatic);
|
|
||||||
|
|
||||||
void remove(const char* key);
|
|
||||||
|
|
||||||
void doActualRemove(ValueInternalLink* link,
|
|
||||||
BucketIndex index,
|
|
||||||
BucketIndex bucketIndex);
|
|
||||||
|
|
||||||
ValueInternalLink*& getLastLinkInBucket(BucketIndex bucketIndex);
|
|
||||||
|
|
||||||
Value& setNewItem(const char* key,
|
|
||||||
bool isStatic,
|
|
||||||
ValueInternalLink* link,
|
|
||||||
BucketIndex index);
|
|
||||||
|
|
||||||
Value& unsafeAdd(const char* key, bool isStatic, HashKey hashedKey);
|
|
||||||
|
|
||||||
HashKey hash(const char* key) const;
|
|
||||||
|
|
||||||
int compare(const ValueInternalMap& other) const;
|
|
||||||
|
|
||||||
private:
|
|
||||||
void makeBeginIterator(IteratorState& it) const;
|
|
||||||
void makeEndIterator(IteratorState& it) const;
|
|
||||||
static bool equals(const IteratorState& x, const IteratorState& other);
|
|
||||||
static void increment(IteratorState& iterator);
|
|
||||||
static void incrementBucket(IteratorState& iterator);
|
|
||||||
static void decrement(IteratorState& iterator);
|
|
||||||
static const char* key(const IteratorState& iterator);
|
|
||||||
static const char* key(const IteratorState& iterator, bool& isStatic);
|
|
||||||
static Value& value(const IteratorState& iterator);
|
|
||||||
static int distance(const IteratorState& x, const IteratorState& y);
|
|
||||||
|
|
||||||
private:
|
|
||||||
ValueInternalLink* buckets_;
|
|
||||||
ValueInternalLink* tailLink_;
|
|
||||||
BucketIndex bucketsSize_;
|
|
||||||
BucketIndex itemCount_;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** \brief A simplified deque implementation used internally by Value.
|
|
||||||
* \internal
|
|
||||||
* It is based on a list of fixed "page", each page contains a fixed number of
|
|
||||||
*items.
|
|
||||||
* Instead of using a linked-list, a array of pointer is used for fast item
|
|
||||||
*look-up.
|
|
||||||
* Look-up for an element is as follow:
|
|
||||||
* - compute page index: pageIndex = itemIndex / itemsPerPage
|
|
||||||
* - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage]
|
|
||||||
*
|
|
||||||
* Insertion is amortized constant time (only the array containing the index of
|
|
||||||
*pointers
|
|
||||||
* need to be reallocated when items are appended).
|
|
||||||
*/
|
|
||||||
class JSON_API ValueInternalArray {
|
|
||||||
friend class Value;
|
|
||||||
friend class ValueIteratorBase;
|
|
||||||
|
|
||||||
public:
|
|
||||||
enum {
|
|
||||||
itemsPerPage = 8
|
|
||||||
}; // should be a power of 2 for fast divide and modulo.
|
|
||||||
typedef Value::ArrayIndex ArrayIndex;
|
|
||||||
typedef unsigned int PageIndex;
|
|
||||||
|
|
||||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
|
||||||
struct IteratorState // Must be a POD
|
|
||||||
{
|
|
||||||
IteratorState() : array_(0), currentPageIndex_(0), currentItemIndex_(0) {}
|
|
||||||
ValueInternalArray* array_;
|
|
||||||
Value** currentPageIndex_;
|
|
||||||
unsigned int currentItemIndex_;
|
|
||||||
};
|
|
||||||
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
|
||||||
|
|
||||||
ValueInternalArray();
|
|
||||||
ValueInternalArray(const ValueInternalArray& other);
|
|
||||||
ValueInternalArray& operator=(ValueInternalArray other);
|
|
||||||
~ValueInternalArray();
|
|
||||||
void swap(ValueInternalArray& other);
|
|
||||||
|
|
||||||
void clear();
|
|
||||||
void resize(ArrayIndex newSize);
|
|
||||||
|
|
||||||
Value& resolveReference(ArrayIndex index);
|
|
||||||
|
|
||||||
Value* find(ArrayIndex index) const;
|
|
||||||
|
|
||||||
ArrayIndex size() const;
|
|
||||||
|
|
||||||
int compare(const ValueInternalArray& other) const;
|
|
||||||
|
|
||||||
private:
|
|
||||||
static bool equals(const IteratorState& x, const IteratorState& other);
|
|
||||||
static void increment(IteratorState& iterator);
|
|
||||||
static void decrement(IteratorState& iterator);
|
|
||||||
static Value& dereference(const IteratorState& iterator);
|
|
||||||
static Value& unsafeDereference(const IteratorState& iterator);
|
|
||||||
static int distance(const IteratorState& x, const IteratorState& y);
|
|
||||||
static ArrayIndex indexOf(const IteratorState& iterator);
|
|
||||||
void makeBeginIterator(IteratorState& it) const;
|
|
||||||
void makeEndIterator(IteratorState& it) const;
|
|
||||||
void makeIterator(IteratorState& it, ArrayIndex index) const;
|
|
||||||
|
|
||||||
void makeIndexValid(ArrayIndex index);
|
|
||||||
|
|
||||||
Value** pages_;
|
|
||||||
ArrayIndex size_;
|
|
||||||
PageIndex pageCount_;
|
|
||||||
};
|
|
||||||
|
|
||||||
/** \brief Experimental: do not use. Allocator to customize Value internal
|
|
||||||
array.
|
|
||||||
* Below is an example of a simple implementation (actual implementation use
|
|
||||||
* memory pool).
|
|
||||||
\code
|
|
||||||
class DefaultValueArrayAllocator : public ValueArrayAllocator
|
|
||||||
{
|
|
||||||
public: // overridden from ValueArrayAllocator
|
|
||||||
virtual ~DefaultValueArrayAllocator()
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual ValueInternalArray *newArray()
|
|
||||||
{
|
|
||||||
return new ValueInternalArray();
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other )
|
|
||||||
{
|
|
||||||
return new ValueInternalArray( other );
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void destruct( ValueInternalArray *array )
|
|
||||||
{
|
|
||||||
delete array;
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void reallocateArrayPageIndex( Value **&indexes,
|
|
||||||
ValueInternalArray::PageIndex
|
|
||||||
&indexCount,
|
|
||||||
ValueInternalArray::PageIndex
|
|
||||||
minNewIndexCount )
|
|
||||||
{
|
|
||||||
ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1;
|
|
||||||
if ( minNewIndexCount > newIndexCount )
|
|
||||||
newIndexCount = minNewIndexCount;
|
|
||||||
void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
|
|
||||||
if ( !newIndexes )
|
|
||||||
throw std::bad_alloc();
|
|
||||||
indexCount = newIndexCount;
|
|
||||||
indexes = static_cast<Value **>( newIndexes );
|
|
||||||
}
|
|
||||||
virtual void releaseArrayPageIndex( Value **indexes,
|
|
||||||
ValueInternalArray::PageIndex indexCount )
|
|
||||||
{
|
|
||||||
if ( indexes )
|
|
||||||
free( indexes );
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual Value *allocateArrayPage()
|
|
||||||
{
|
|
||||||
return static_cast<Value *>( malloc( sizeof(Value) *
|
|
||||||
ValueInternalArray::itemsPerPage ) );
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void releaseArrayPage( Value *value )
|
|
||||||
{
|
|
||||||
if ( value )
|
|
||||||
free( value );
|
|
||||||
}
|
|
||||||
};
|
|
||||||
\endcode
|
|
||||||
*/
|
|
||||||
class JSON_API ValueArrayAllocator {
|
|
||||||
public:
|
|
||||||
virtual ~ValueArrayAllocator();
|
|
||||||
virtual ValueInternalArray* newArray() = 0;
|
|
||||||
virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) = 0;
|
|
||||||
virtual void destructArray(ValueInternalArray* array) = 0;
|
|
||||||
/** \brief Reallocate array page index.
|
|
||||||
* Reallocates an array of pointer on each page.
|
|
||||||
* \param indexes [input] pointer on the current index. May be \c NULL.
|
|
||||||
* [output] pointer on the new index of at least
|
|
||||||
* \a minNewIndexCount pages.
|
|
||||||
* \param indexCount [input] current number of pages in the index.
|
|
||||||
* [output] number of page the reallocated index can handle.
|
|
||||||
* \b MUST be >= \a minNewIndexCount.
|
|
||||||
* \param minNewIndexCount Minimum number of page the new index must be able
|
|
||||||
* to
|
|
||||||
* handle.
|
|
||||||
*/
|
|
||||||
virtual void
|
|
||||||
reallocateArrayPageIndex(Value**& indexes,
|
|
||||||
ValueInternalArray::PageIndex& indexCount,
|
|
||||||
ValueInternalArray::PageIndex minNewIndexCount) = 0;
|
|
||||||
virtual void
|
|
||||||
releaseArrayPageIndex(Value** indexes,
|
|
||||||
ValueInternalArray::PageIndex indexCount) = 0;
|
|
||||||
virtual Value* allocateArrayPage() = 0;
|
|
||||||
virtual void releaseArrayPage(Value* value) = 0;
|
|
||||||
};
|
|
||||||
#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
|
|
||||||
/** \brief base class for Value iterators.
|
/** \brief base class for Value iterators.
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
@@ -915,32 +677,35 @@ public:
|
|||||||
typedef int difference_type;
|
typedef int difference_type;
|
||||||
typedef ValueIteratorBase SelfType;
|
typedef ValueIteratorBase SelfType;
|
||||||
|
|
||||||
ValueIteratorBase();
|
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
|
|
||||||
#else
|
|
||||||
ValueIteratorBase(const ValueInternalArray::IteratorState& state);
|
|
||||||
ValueIteratorBase(const ValueInternalMap::IteratorState& state);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
bool operator==(const SelfType& other) const { return isEqual(other); }
|
bool operator==(const SelfType& other) const { return isEqual(other); }
|
||||||
|
|
||||||
bool operator!=(const SelfType& other) const { return !isEqual(other); }
|
bool operator!=(const SelfType& other) const { return !isEqual(other); }
|
||||||
|
|
||||||
difference_type operator-(const SelfType& other) const {
|
difference_type operator-(const SelfType& other) const {
|
||||||
return computeDistance(other);
|
return other.computeDistance(*this);
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return either the index or the member name of the referenced value as a
|
/// Return either the index or the member name of the referenced value as a
|
||||||
/// Value.
|
/// Value.
|
||||||
Value key() const;
|
Value key() const;
|
||||||
|
|
||||||
/// Return the index of the referenced Value. -1 if it is not an arrayValue.
|
/// Return the index of the referenced Value, or -1 if it is not an arrayValue.
|
||||||
UInt index() const;
|
UInt index() const;
|
||||||
|
|
||||||
|
/// Return the member name of the referenced Value, or "" if it is not an
|
||||||
|
/// objectValue.
|
||||||
|
/// \note Avoid `c_str()` on result, as embedded zeroes are possible.
|
||||||
|
std::string name() const;
|
||||||
|
|
||||||
/// Return the member name of the referenced Value. "" if it is not an
|
/// Return the member name of the referenced Value. "" if it is not an
|
||||||
/// objectValue.
|
/// objectValue.
|
||||||
const char* memberName() const;
|
/// \deprecated This cannot be used for UTF-8 strings, since there can be embedded nulls.
|
||||||
|
JSONCPP_DEPRECATED("Use `key = name();` instead.")
|
||||||
|
char const* memberName() const;
|
||||||
|
/// Return the member name of the referenced Value, or NULL if it is not an
|
||||||
|
/// objectValue.
|
||||||
|
/// \note Better version than memberName(). Allows embedded nulls.
|
||||||
|
char const* memberName(char const** end) const;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
Value& deref() const;
|
Value& deref() const;
|
||||||
@@ -956,17 +721,15 @@ protected:
|
|||||||
void copy(const SelfType& other);
|
void copy(const SelfType& other);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
Value::ObjectValues::iterator current_;
|
Value::ObjectValues::iterator current_;
|
||||||
// Indicates that iterator is for a null value.
|
// Indicates that iterator is for a null value.
|
||||||
bool isNull_;
|
bool isNull_;
|
||||||
#else
|
|
||||||
union {
|
public:
|
||||||
ValueInternalArray::IteratorState array_;
|
// For some reason, BORLAND needs these at the end, rather
|
||||||
ValueInternalMap::IteratorState map_;
|
// than earlier. No idea why.
|
||||||
} iterator_;
|
ValueIteratorBase();
|
||||||
bool isArray_;
|
explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
|
||||||
#endif
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/** \brief const iterator for object and array value.
|
/** \brief const iterator for object and array value.
|
||||||
@@ -977,8 +740,8 @@ class JSON_API ValueConstIterator : public ValueIteratorBase {
|
|||||||
|
|
||||||
public:
|
public:
|
||||||
typedef const Value value_type;
|
typedef const Value value_type;
|
||||||
typedef unsigned int size_t;
|
//typedef unsigned int size_t;
|
||||||
typedef int difference_type;
|
//typedef int difference_type;
|
||||||
typedef const Value& reference;
|
typedef const Value& reference;
|
||||||
typedef const Value* pointer;
|
typedef const Value* pointer;
|
||||||
typedef ValueConstIterator SelfType;
|
typedef ValueConstIterator SelfType;
|
||||||
@@ -988,12 +751,7 @@ public:
|
|||||||
private:
|
private:
|
||||||
/*! \internal Use by Value to create an iterator.
|
/*! \internal Use by Value to create an iterator.
|
||||||
*/
|
*/
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
explicit ValueConstIterator(const Value::ObjectValues::iterator& current);
|
explicit ValueConstIterator(const Value::ObjectValues::iterator& current);
|
||||||
#else
|
|
||||||
ValueConstIterator(const ValueInternalArray::IteratorState& state);
|
|
||||||
ValueConstIterator(const ValueInternalMap::IteratorState& state);
|
|
||||||
#endif
|
|
||||||
public:
|
public:
|
||||||
SelfType& operator=(const ValueIteratorBase& other);
|
SelfType& operator=(const ValueIteratorBase& other);
|
||||||
|
|
||||||
@@ -1044,12 +802,7 @@ public:
|
|||||||
private:
|
private:
|
||||||
/*! \internal Use by Value to create an iterator.
|
/*! \internal Use by Value to create an iterator.
|
||||||
*/
|
*/
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
explicit ValueIterator(const Value::ObjectValues::iterator& current);
|
explicit ValueIterator(const Value::ObjectValues::iterator& current);
|
||||||
#else
|
|
||||||
ValueIterator(const ValueInternalArray::IteratorState& state);
|
|
||||||
ValueIterator(const ValueInternalMap::IteratorState& state);
|
|
||||||
#endif
|
|
||||||
public:
|
public:
|
||||||
SelfType& operator=(const SelfType& other);
|
SelfType& operator=(const SelfType& other);
|
||||||
|
|
||||||
@@ -1082,6 +835,14 @@ public:
|
|||||||
|
|
||||||
} // namespace Json
|
} // namespace Json
|
||||||
|
|
||||||
|
|
||||||
|
namespace std {
|
||||||
|
/// Specialize std::swap() for Json::Value.
|
||||||
|
template<>
|
||||||
|
inline void swap(Json::Value& a, Json::Value& b) { a.swap(b); }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
#pragma warning(pop)
|
#pragma warning(pop)
|
||||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||||
|
@@ -1,13 +1,12 @@
|
|||||||
// DO NOT EDIT. This file is generated by CMake from "version"
|
// DO NOT EDIT. This file (and "version") is generated by CMake.
|
||||||
// and "version.h.in" files.
|
|
||||||
// Run CMake configure step to update it.
|
// Run CMake configure step to update it.
|
||||||
#ifndef JSON_VERSION_H_INCLUDED
|
#ifndef JSON_VERSION_H_INCLUDED
|
||||||
# define JSON_VERSION_H_INCLUDED
|
# define JSON_VERSION_H_INCLUDED
|
||||||
|
|
||||||
# define JSONCPP_VERSION_STRING "1.2.0"
|
# define JSONCPP_VERSION_STRING "0.10.7"
|
||||||
# define JSONCPP_VERSION_MAJOR 1
|
# define JSONCPP_VERSION_MAJOR 0
|
||||||
# define JSONCPP_VERSION_MINOR 2
|
# define JSONCPP_VERSION_MINOR 10
|
||||||
# define JSONCPP_VERSION_PATCH 0
|
# define JSONCPP_VERSION_PATCH 7
|
||||||
# define JSONCPP_VERSION_QUALIFIER
|
# define JSONCPP_VERSION_QUALIFIER
|
||||||
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
|
# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
|
||||||
|
|
||||||
|
@@ -11,6 +11,7 @@
|
|||||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <string>
|
#include <string>
|
||||||
|
#include <ostream>
|
||||||
|
|
||||||
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
|
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
|
||||||
// be used by...
|
// be used by...
|
||||||
@@ -23,7 +24,119 @@ namespace Json {
|
|||||||
|
|
||||||
class Value;
|
class Value;
|
||||||
|
|
||||||
|
/**
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
\code
|
||||||
|
using namespace Json;
|
||||||
|
void writeToStdout(StreamWriter::Factory const& factory, Value const& value) {
|
||||||
|
std::unique_ptr<StreamWriter> const writer(
|
||||||
|
factory.newStreamWriter());
|
||||||
|
writer->write(value, &std::cout);
|
||||||
|
std::cout << std::endl; // add lf and flush
|
||||||
|
}
|
||||||
|
\endcode
|
||||||
|
*/
|
||||||
|
class JSON_API StreamWriter {
|
||||||
|
protected:
|
||||||
|
std::ostream* sout_; // not owned; will not delete
|
||||||
|
public:
|
||||||
|
StreamWriter();
|
||||||
|
virtual ~StreamWriter();
|
||||||
|
/** Write Value into document as configured in sub-class.
|
||||||
|
Do not take ownership of sout, but maintain a reference during function.
|
||||||
|
\pre sout != NULL
|
||||||
|
\return zero on success (For now, we always return zero, so check the stream instead.)
|
||||||
|
\throw std::exception possibly, depending on configuration
|
||||||
|
*/
|
||||||
|
virtual int write(Value const& root, std::ostream* sout) = 0;
|
||||||
|
|
||||||
|
/** \brief A simple abstract factory.
|
||||||
|
*/
|
||||||
|
class JSON_API Factory {
|
||||||
|
public:
|
||||||
|
virtual ~Factory();
|
||||||
|
/** \brief Allocate a CharReader via operator new().
|
||||||
|
* \throw std::exception if something goes wrong (e.g. invalid settings)
|
||||||
|
*/
|
||||||
|
virtual StreamWriter* newStreamWriter() const = 0;
|
||||||
|
}; // Factory
|
||||||
|
}; // StreamWriter
|
||||||
|
|
||||||
|
/** \brief Write into stringstream, then return string, for convenience.
|
||||||
|
* A StreamWriter will be created from the factory, used, and then deleted.
|
||||||
|
*/
|
||||||
|
std::string JSON_API writeString(StreamWriter::Factory const& factory, Value const& root);
|
||||||
|
|
||||||
|
|
||||||
|
/** \brief Build a StreamWriter implementation.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
\code
|
||||||
|
using namespace Json;
|
||||||
|
Value value = ...;
|
||||||
|
StreamWriterBuilder builder;
|
||||||
|
builder["commentStyle"] = "None";
|
||||||
|
builder["indentation"] = " "; // or whatever you like
|
||||||
|
std::unique_ptr<Json::StreamWriter> writer(
|
||||||
|
builder.newStreamWriter());
|
||||||
|
writer->write(value, &std::cout);
|
||||||
|
std::cout << std::endl; // add lf and flush
|
||||||
|
\endcode
|
||||||
|
*/
|
||||||
|
class JSON_API StreamWriterBuilder : public StreamWriter::Factory {
|
||||||
|
public:
|
||||||
|
// Note: We use a Json::Value so that we can add data-members to this class
|
||||||
|
// without a major version bump.
|
||||||
|
/** Configuration of this builder.
|
||||||
|
Available settings (case-sensitive):
|
||||||
|
- "commentStyle": "None" or "All"
|
||||||
|
- "indentation": "<anything>"
|
||||||
|
- "enableYAMLCompatibility": false or true
|
||||||
|
- slightly change the whitespace around colons
|
||||||
|
- "dropNullPlaceholders": false or true
|
||||||
|
- Drop the "null" string from the writer's output for nullValues.
|
||||||
|
Strictly speaking, this is not valid JSON. But when the output is being
|
||||||
|
fed to a browser's Javascript, it makes for smaller output and the
|
||||||
|
browser can handle the output just fine.
|
||||||
|
- "useSpecialFloats": false or true
|
||||||
|
- If true, outputs non-finite floating point values in the following way:
|
||||||
|
NaN values as "NaN", positive infinity as "Infinity", and negative infinity
|
||||||
|
as "-Infinity".
|
||||||
|
|
||||||
|
You can examine 'settings_` yourself
|
||||||
|
to see the defaults. You can also write and read them just like any
|
||||||
|
JSON Value.
|
||||||
|
\sa setDefaults()
|
||||||
|
*/
|
||||||
|
Json::Value settings_;
|
||||||
|
|
||||||
|
StreamWriterBuilder();
|
||||||
|
virtual ~StreamWriterBuilder();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* \throw std::exception if something goes wrong (e.g. invalid settings)
|
||||||
|
*/
|
||||||
|
virtual StreamWriter* newStreamWriter() const;
|
||||||
|
|
||||||
|
/** \return true if 'settings' are legal and consistent;
|
||||||
|
* otherwise, indicate bad settings via 'invalid'.
|
||||||
|
*/
|
||||||
|
bool validate(Json::Value* invalid) const;
|
||||||
|
/** A simple way to update a specific setting.
|
||||||
|
*/
|
||||||
|
Value& operator[](std::string key);
|
||||||
|
|
||||||
|
/** Called by ctor, but you can use this to reset settings_.
|
||||||
|
* \pre 'settings' != NULL (but Json::null is fine)
|
||||||
|
* \remark Defaults:
|
||||||
|
* \snippet src/lib_json/json_writer.cpp StreamWriterBuilderDefaults
|
||||||
|
*/
|
||||||
|
static void setDefaults(Json::Value* settings);
|
||||||
|
};
|
||||||
|
|
||||||
/** \brief Abstract class for writers.
|
/** \brief Abstract class for writers.
|
||||||
|
* \deprecated Use StreamWriter. (And really, this is an implementation detail.)
|
||||||
*/
|
*/
|
||||||
class JSON_API Writer {
|
class JSON_API Writer {
|
||||||
public:
|
public:
|
||||||
@@ -39,23 +152,16 @@ public:
|
|||||||
*consumption,
|
*consumption,
|
||||||
* but may be usefull to support feature such as RPC where bandwith is limited.
|
* but may be usefull to support feature such as RPC where bandwith is limited.
|
||||||
* \sa Reader, Value
|
* \sa Reader, Value
|
||||||
|
* \deprecated Use StreamWriterBuilder.
|
||||||
*/
|
*/
|
||||||
class JSON_API FastWriter : public Writer {
|
class JSON_API FastWriter : public Writer {
|
||||||
|
|
||||||
public:
|
public:
|
||||||
FastWriter();
|
FastWriter();
|
||||||
virtual ~FastWriter() {}
|
virtual ~FastWriter() {}
|
||||||
|
|
||||||
void enableYAMLCompatibility();
|
void enableYAMLCompatibility();
|
||||||
|
|
||||||
/** \brief Drop the "null" string from the writer's output for nullValues.
|
|
||||||
* Strictly speaking, this is not valid JSON. But when the output is being
|
|
||||||
* fed to a browser's Javascript, it makes for smaller output and the
|
|
||||||
* browser can handle the output just fine.
|
|
||||||
*/
|
|
||||||
void dropNullPlaceholders();
|
|
||||||
|
|
||||||
void omitEndingLineFeed();
|
|
||||||
|
|
||||||
public: // overridden from Writer
|
public: // overridden from Writer
|
||||||
virtual std::string write(const Value& root);
|
virtual std::string write(const Value& root);
|
||||||
|
|
||||||
@@ -64,8 +170,6 @@ private:
|
|||||||
|
|
||||||
std::string document_;
|
std::string document_;
|
||||||
bool yamlCompatiblityEnabled_;
|
bool yamlCompatiblityEnabled_;
|
||||||
bool dropNullPlaceholders_;
|
|
||||||
bool omitEndingLineFeed_;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
|
/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
|
||||||
@@ -90,6 +194,7 @@ private:
|
|||||||
*#CommentPlacement.
|
*#CommentPlacement.
|
||||||
*
|
*
|
||||||
* \sa Reader, Value, Value::setComment()
|
* \sa Reader, Value, Value::setComment()
|
||||||
|
* \deprecated Use StreamWriterBuilder.
|
||||||
*/
|
*/
|
||||||
class JSON_API StyledWriter : public Writer {
|
class JSON_API StyledWriter : public Writer {
|
||||||
public:
|
public:
|
||||||
@@ -151,6 +256,7 @@ private:
|
|||||||
*
|
*
|
||||||
* \param indentation Each level will be indented by this amount extra.
|
* \param indentation Each level will be indented by this amount extra.
|
||||||
* \sa Reader, Value, Value::setComment()
|
* \sa Reader, Value, Value::setComment()
|
||||||
|
* \deprecated Use StreamWriterBuilder.
|
||||||
*/
|
*/
|
||||||
class JSON_API StyledStreamWriter {
|
class JSON_API StyledStreamWriter {
|
||||||
public:
|
public:
|
||||||
@@ -187,7 +293,8 @@ private:
|
|||||||
std::string indentString_;
|
std::string indentString_;
|
||||||
int rightMargin_;
|
int rightMargin_;
|
||||||
std::string indentation_;
|
std::string indentation_;
|
||||||
bool addChildValues_;
|
bool addChildValues_ : 1;
|
||||||
|
bool indented_ : 1;
|
||||||
};
|
};
|
||||||
|
|
||||||
#if defined(JSON_HAS_INT64)
|
#if defined(JSON_HAS_INT64)
|
||||||
|
@@ -178,15 +178,6 @@
|
|||||||
<File
|
<File
|
||||||
RelativePath="..\..\include\json\json.h">
|
RelativePath="..\..\include\json\json.h">
|
||||||
</File>
|
</File>
|
||||||
<File
|
|
||||||
RelativePath="..\..\src\lib_json\json_batchallocator.h">
|
|
||||||
</File>
|
|
||||||
<File
|
|
||||||
RelativePath="..\..\src\lib_json\json_internalarray.inl">
|
|
||||||
</File>
|
|
||||||
<File
|
|
||||||
RelativePath="..\..\src\lib_json\json_internalmap.inl">
|
|
||||||
</File>
|
|
||||||
<File
|
<File
|
||||||
RelativePath="..\..\src\lib_json\json_reader.cpp">
|
RelativePath="..\..\src\lib_json\json_reader.cpp">
|
||||||
</File>
|
</File>
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
|
# Copyright 2010 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
"""Tag the sandbox for release, make source and doc tarballs.
|
"""Tag the sandbox for release, make source and doc tarballs.
|
||||||
|
|
||||||
Requires Python 2.6
|
Requires Python 2.6
|
||||||
@@ -14,6 +19,7 @@ python makerelease.py 0.5.0 0.6.0-dev
|
|||||||
Note: This was for Subversion. Now that we are in GitHub, we do not
|
Note: This was for Subversion. Now that we are in GitHub, we do not
|
||||||
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import os.path
|
import os.path
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
prefix=@CMAKE_INSTALL_PREFIX@
|
prefix=@CMAKE_INSTALL_PREFIX@
|
||||||
exec_prefix=${prefix}
|
exec_prefix=${prefix}
|
||||||
libdir=${exec_prefix}/@LIBRARY_INSTALL_DIR@
|
libdir=@LIBRARY_INSTALL_DIR@
|
||||||
includedir=${prefix}/@INCLUDE_INSTALL_DIR@
|
includedir=@INCLUDE_INSTALL_DIR@
|
||||||
|
|
||||||
Name: jsoncpp
|
Name: jsoncpp
|
||||||
Description: A C++ library for interacting with JSON
|
Description: A C++ library for interacting with JSON
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
|
# Copyright 2009 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
import fnmatch
|
import fnmatch
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
|
# Copyright 2007 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
from fnmatch import fnmatch
|
from fnmatch import fnmatch
|
||||||
@@ -161,8 +166,7 @@ def generate(env):
|
|||||||
Add builders and construction variables for the
|
Add builders and construction variables for the
|
||||||
SrcDist tool.
|
SrcDist tool.
|
||||||
"""
|
"""
|
||||||
## doxyfile_scanner = env.Scanner(
|
## doxyfile_scanner = env.Scanner(## DoxySourceScan,
|
||||||
## DoxySourceScan,
|
|
||||||
## "DoxySourceScan",
|
## "DoxySourceScan",
|
||||||
## scan_check = DoxySourceScanCheck,
|
## scan_check = DoxySourceScanCheck,
|
||||||
##)
|
##)
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
|
# Copyright 2010 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
import re
|
import re
|
||||||
from SCons.Script import * # the usual scons stuff you get in a SConscript
|
from SCons.Script import * # the usual scons stuff you get in a SConscript
|
||||||
import collections
|
import collections
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
|
# Copyright 2007 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
"""tarball
|
"""tarball
|
||||||
|
|
||||||
Tool-specific initialization for tarball.
|
Tool-specific initialization for tarball.
|
||||||
|
@@ -2,4 +2,4 @@ ADD_SUBDIRECTORY(lib_json)
|
|||||||
IF(JSONCPP_WITH_TESTS)
|
IF(JSONCPP_WITH_TESTS)
|
||||||
ADD_SUBDIRECTORY(jsontestrunner)
|
ADD_SUBDIRECTORY(jsontestrunner)
|
||||||
ADD_SUBDIRECTORY(test_lib_json)
|
ADD_SUBDIRECTORY(test_lib_json)
|
||||||
ENDIF(JSONCPP_WITH_TESTS)
|
ENDIF()
|
||||||
|
@@ -1,22 +1,25 @@
|
|||||||
FIND_PACKAGE(PythonInterp 2.6 REQUIRED)
|
FIND_PACKAGE(PythonInterp 2.6)
|
||||||
|
|
||||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
|
||||||
ADD_DEFINITIONS( -DJSON_DLL )
|
|
||||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
|
||||||
|
|
||||||
ADD_EXECUTABLE(jsontestrunner_exe
|
ADD_EXECUTABLE(jsontestrunner_exe
|
||||||
main.cpp
|
main.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
|
IF(BUILD_SHARED_LIBS)
|
||||||
|
ADD_DEFINITIONS( -DJSON_DLL )
|
||||||
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
|
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib)
|
||||||
|
ELSE(BUILD_SHARED_LIBS)
|
||||||
|
TARGET_LINK_LIBRARIES(jsontestrunner_exe jsoncpp_lib_static)
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
|
SET_TARGET_PROPERTIES(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
|
||||||
|
|
||||||
IF(PYTHONINTERP_FOUND)
|
IF(PYTHONINTERP_FOUND)
|
||||||
# Run end to end parser/writer tests
|
# Run end to end parser/writer tests
|
||||||
SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test)
|
SET(TEST_DIR ${CMAKE_CURRENT_SOURCE_DIR}/../../test)
|
||||||
SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py)
|
SET(RUNJSONTESTS_PATH ${TEST_DIR}/runjsontests.py)
|
||||||
ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests ALL
|
ADD_CUSTOM_TARGET(jsoncpp_readerwriter_tests
|
||||||
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
|
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
|
||||||
DEPENDS jsontestrunner_exe jsoncpp_test
|
DEPENDS jsontestrunner_exe jsoncpp_test
|
||||||
)
|
)
|
||||||
ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests)
|
ADD_CUSTOM_TARGET(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests)
|
||||||
ENDIF(PYTHONINTERP_FOUND)
|
ENDIF()
|
||||||
|
@@ -8,12 +8,22 @@
|
|||||||
|
|
||||||
#include <json/json.h>
|
#include <json/json.h>
|
||||||
#include <algorithm> // sort
|
#include <algorithm> // sort
|
||||||
|
#include <sstream>
|
||||||
#include <stdio.h>
|
#include <stdio.h>
|
||||||
|
|
||||||
#if defined(_MSC_VER) && _MSC_VER >= 1310
|
#if defined(_MSC_VER) && _MSC_VER >= 1310
|
||||||
#pragma warning(disable : 4996) // disable fopen deprecation warning
|
#pragma warning(disable : 4996) // disable fopen deprecation warning
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
struct Options
|
||||||
|
{
|
||||||
|
std::string path;
|
||||||
|
Json::Features features;
|
||||||
|
bool parseOnly;
|
||||||
|
typedef std::string (*writeFuncType)(Json::Value const&);
|
||||||
|
writeFuncType write;
|
||||||
|
};
|
||||||
|
|
||||||
static std::string normalizeFloatingPointStr(double value) {
|
static std::string normalizeFloatingPointStr(double value) {
|
||||||
char buffer[32];
|
char buffer[32];
|
||||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__)
|
||||||
@@ -129,43 +139,67 @@ printValueTree(FILE* fout, Json::Value& value, const std::string& path = ".") {
|
|||||||
static int parseAndSaveValueTree(const std::string& input,
|
static int parseAndSaveValueTree(const std::string& input,
|
||||||
const std::string& actual,
|
const std::string& actual,
|
||||||
const std::string& kind,
|
const std::string& kind,
|
||||||
Json::Value& root,
|
|
||||||
const Json::Features& features,
|
const Json::Features& features,
|
||||||
bool parseOnly) {
|
bool parseOnly,
|
||||||
|
Json::Value* root)
|
||||||
|
{
|
||||||
Json::Reader reader(features);
|
Json::Reader reader(features);
|
||||||
bool parsingSuccessful = reader.parse(input, root);
|
bool parsingSuccessful = reader.parse(input, *root);
|
||||||
if (!parsingSuccessful) {
|
if (!parsingSuccessful) {
|
||||||
printf("Failed to parse %s file: \n%s\n",
|
printf("Failed to parse %s file: \n%s\n",
|
||||||
kind.c_str(),
|
kind.c_str(),
|
||||||
reader.getFormattedErrorMessages().c_str());
|
reader.getFormattedErrorMessages().c_str());
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!parseOnly) {
|
if (!parseOnly) {
|
||||||
FILE* factual = fopen(actual.c_str(), "wt");
|
FILE* factual = fopen(actual.c_str(), "wt");
|
||||||
if (!factual) {
|
if (!factual) {
|
||||||
printf("Failed to create %s actual file.\n", kind.c_str());
|
printf("Failed to create %s actual file.\n", kind.c_str());
|
||||||
return 2;
|
return 2;
|
||||||
}
|
}
|
||||||
printValueTree(factual, root);
|
printValueTree(factual, *root);
|
||||||
fclose(factual);
|
fclose(factual);
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
// static std::string useFastWriter(Json::Value const& root) {
|
||||||
static int rewriteValueTree(const std::string& rewritePath,
|
|
||||||
const Json::Value& root,
|
|
||||||
std::string& rewrite) {
|
|
||||||
// Json::FastWriter writer;
|
// Json::FastWriter writer;
|
||||||
// writer.enableYAMLCompatibility();
|
// writer.enableYAMLCompatibility();
|
||||||
|
// return writer.write(root);
|
||||||
|
// }
|
||||||
|
static std::string useStyledWriter(
|
||||||
|
Json::Value const& root)
|
||||||
|
{
|
||||||
Json::StyledWriter writer;
|
Json::StyledWriter writer;
|
||||||
rewrite = writer.write(root);
|
return writer.write(root);
|
||||||
|
}
|
||||||
|
static std::string useStyledStreamWriter(
|
||||||
|
Json::Value const& root)
|
||||||
|
{
|
||||||
|
Json::StyledStreamWriter writer;
|
||||||
|
std::ostringstream sout;
|
||||||
|
writer.write(sout, root);
|
||||||
|
return sout.str();
|
||||||
|
}
|
||||||
|
static std::string useBuiltStyledStreamWriter(
|
||||||
|
Json::Value const& root)
|
||||||
|
{
|
||||||
|
Json::StreamWriterBuilder builder;
|
||||||
|
return Json::writeString(builder, root);
|
||||||
|
}
|
||||||
|
static int rewriteValueTree(
|
||||||
|
const std::string& rewritePath,
|
||||||
|
const Json::Value& root,
|
||||||
|
Options::writeFuncType write,
|
||||||
|
std::string* rewrite)
|
||||||
|
{
|
||||||
|
*rewrite = write(root);
|
||||||
FILE* fout = fopen(rewritePath.c_str(), "wt");
|
FILE* fout = fopen(rewritePath.c_str(), "wt");
|
||||||
if (!fout) {
|
if (!fout) {
|
||||||
printf("Failed to create rewrite file: %s\n", rewritePath.c_str());
|
printf("Failed to create rewrite file: %s\n", rewritePath.c_str());
|
||||||
return 2;
|
return 2;
|
||||||
}
|
}
|
||||||
fprintf(fout, "%s\n", rewrite.c_str());
|
fprintf(fout, "%s\n", rewrite->c_str());
|
||||||
fclose(fout);
|
fclose(fout);
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@@ -194,84 +228,98 @@ static int printUsage(const char* argv[]) {
|
|||||||
return 3;
|
return 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
int parseCommandLine(int argc,
|
static int parseCommandLine(
|
||||||
const char* argv[],
|
int argc, const char* argv[], Options* opts)
|
||||||
Json::Features& features,
|
{
|
||||||
std::string& path,
|
opts->parseOnly = false;
|
||||||
bool& parseOnly) {
|
opts->write = &useStyledWriter;
|
||||||
parseOnly = false;
|
|
||||||
if (argc < 2) {
|
if (argc < 2) {
|
||||||
return printUsage(argv);
|
return printUsage(argv);
|
||||||
}
|
}
|
||||||
|
|
||||||
int index = 1;
|
int index = 1;
|
||||||
if (std::string(argv[1]) == "--json-checker") {
|
if (std::string(argv[index]) == "--json-checker") {
|
||||||
features = Json::Features::strictMode();
|
opts->features = Json::Features::strictMode();
|
||||||
parseOnly = true;
|
opts->parseOnly = true;
|
||||||
++index;
|
++index;
|
||||||
}
|
}
|
||||||
|
if (std::string(argv[index]) == "--json-config") {
|
||||||
if (std::string(argv[1]) == "--json-config") {
|
|
||||||
printConfig();
|
printConfig();
|
||||||
return 3;
|
return 3;
|
||||||
}
|
}
|
||||||
|
if (std::string(argv[index]) == "--json-writer") {
|
||||||
|
++index;
|
||||||
|
std::string const writerName(argv[index++]);
|
||||||
|
if (writerName == "StyledWriter") {
|
||||||
|
opts->write = &useStyledWriter;
|
||||||
|
} else if (writerName == "StyledStreamWriter") {
|
||||||
|
opts->write = &useStyledStreamWriter;
|
||||||
|
} else if (writerName == "BuiltStyledStreamWriter") {
|
||||||
|
opts->write = &useBuiltStyledStreamWriter;
|
||||||
|
} else {
|
||||||
|
printf("Unknown '--json-writer %s'\n", writerName.c_str());
|
||||||
|
return 4;
|
||||||
|
}
|
||||||
|
}
|
||||||
if (index == argc || index + 1 < argc) {
|
if (index == argc || index + 1 < argc) {
|
||||||
return printUsage(argv);
|
return printUsage(argv);
|
||||||
}
|
}
|
||||||
|
opts->path = argv[index];
|
||||||
path = argv[index];
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
static int runTest(Options const& opts)
|
||||||
|
{
|
||||||
|
int exitCode = 0;
|
||||||
|
|
||||||
int main(int argc, const char* argv[]) {
|
std::string input = readInputTestFile(opts.path.c_str());
|
||||||
std::string path;
|
|
||||||
Json::Features features;
|
|
||||||
bool parseOnly;
|
|
||||||
int exitCode = parseCommandLine(argc, argv, features, path, parseOnly);
|
|
||||||
if (exitCode != 0) {
|
|
||||||
return exitCode;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
std::string input = readInputTestFile(path.c_str());
|
|
||||||
if (input.empty()) {
|
if (input.empty()) {
|
||||||
printf("Failed to read input or empty input: %s\n", path.c_str());
|
printf("Failed to read input or empty input: %s\n", opts.path.c_str());
|
||||||
return 3;
|
return 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string basePath = removeSuffix(argv[1], ".json");
|
std::string basePath = removeSuffix(opts.path, ".json");
|
||||||
if (!parseOnly && basePath.empty()) {
|
if (!opts.parseOnly && basePath.empty()) {
|
||||||
printf("Bad input path. Path does not end with '.expected':\n%s\n",
|
printf("Bad input path. Path does not end with '.expected':\n%s\n",
|
||||||
path.c_str());
|
opts.path.c_str());
|
||||||
return 3;
|
return 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string actualPath = basePath + ".actual";
|
std::string const actualPath = basePath + ".actual";
|
||||||
std::string rewritePath = basePath + ".rewrite";
|
std::string const rewritePath = basePath + ".rewrite";
|
||||||
std::string rewriteActualPath = basePath + ".actual-rewrite";
|
std::string const rewriteActualPath = basePath + ".actual-rewrite";
|
||||||
|
|
||||||
Json::Value root;
|
Json::Value root;
|
||||||
exitCode = parseAndSaveValueTree(
|
exitCode = parseAndSaveValueTree(
|
||||||
input, actualPath, "input", root, features, parseOnly);
|
input, actualPath, "input",
|
||||||
if (exitCode == 0 && !parseOnly) {
|
opts.features, opts.parseOnly, &root);
|
||||||
|
if (exitCode || opts.parseOnly) {
|
||||||
|
return exitCode;
|
||||||
|
}
|
||||||
std::string rewrite;
|
std::string rewrite;
|
||||||
exitCode = rewriteValueTree(rewritePath, root, rewrite);
|
exitCode = rewriteValueTree(rewritePath, root, opts.write, &rewrite);
|
||||||
if (exitCode == 0) {
|
if (exitCode) {
|
||||||
|
return exitCode;
|
||||||
|
}
|
||||||
Json::Value rewriteRoot;
|
Json::Value rewriteRoot;
|
||||||
exitCode = parseAndSaveValueTree(rewrite,
|
exitCode = parseAndSaveValueTree(
|
||||||
rewriteActualPath,
|
rewrite, rewriteActualPath, "rewrite",
|
||||||
"rewrite",
|
opts.features, opts.parseOnly, &rewriteRoot);
|
||||||
rewriteRoot,
|
if (exitCode) {
|
||||||
features,
|
return exitCode;
|
||||||
parseOnly);
|
|
||||||
}
|
}
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
int main(int argc, const char* argv[]) {
|
||||||
|
Options opts;
|
||||||
|
int exitCode = parseCommandLine(argc, argv, &opts);
|
||||||
|
if (exitCode != 0) {
|
||||||
|
printf("Failed to parse command-line.");
|
||||||
|
return exitCode;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return runTest(opts);
|
||||||
}
|
}
|
||||||
catch (const std::exception& e) {
|
catch (const std::exception& e) {
|
||||||
printf("Unhandled exception:\n%s\n", e.what());
|
printf("Unhandled exception:\n%s\n", e.what());
|
||||||
exitCode = 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
return exitCode;
|
|
||||||
}
|
}
|
||||||
|
@@ -1,15 +1,3 @@
|
|||||||
OPTION(JSONCPP_LIB_BUILD_SHARED "Build jsoncpp_lib as a shared library." OFF)
|
|
||||||
IF(BUILD_SHARED_LIBS)
|
|
||||||
SET(JSONCPP_LIB_BUILD_SHARED ON)
|
|
||||||
ENDIF(BUILD_SHARED_LIBS)
|
|
||||||
|
|
||||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
|
||||||
SET(JSONCPP_LIB_TYPE SHARED)
|
|
||||||
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
|
|
||||||
ELSE(JSONCPP_LIB_BUILD_SHARED)
|
|
||||||
SET(JSONCPP_LIB_TYPE STATIC)
|
|
||||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
|
||||||
|
|
||||||
if( CMAKE_COMPILER_IS_GNUCXX )
|
if( CMAKE_COMPILER_IS_GNUCXX )
|
||||||
#Get compiler version.
|
#Get compiler version.
|
||||||
execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion
|
execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion
|
||||||
@@ -36,35 +24,57 @@ SET( PUBLIC_HEADERS
|
|||||||
|
|
||||||
SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} )
|
SOURCE_GROUP( "Public API" FILES ${PUBLIC_HEADERS} )
|
||||||
|
|
||||||
ADD_LIBRARY( jsoncpp_lib ${JSONCPP_LIB_TYPE}
|
SET(jsoncpp_sources
|
||||||
${PUBLIC_HEADERS}
|
|
||||||
json_tool.h
|
json_tool.h
|
||||||
json_reader.cpp
|
json_reader.cpp
|
||||||
json_batchallocator.h
|
|
||||||
json_valueiterator.inl
|
json_valueiterator.inl
|
||||||
json_value.cpp
|
json_value.cpp
|
||||||
json_writer.cpp
|
json_writer.cpp
|
||||||
version.h.in
|
version.h.in)
|
||||||
)
|
|
||||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp )
|
|
||||||
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR} )
|
|
||||||
|
|
||||||
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
|
||||||
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC
|
|
||||||
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
|
||||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
|
||||||
)
|
|
||||||
ENDIF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
|
||||||
|
|
||||||
# Install instructions for this target
|
# Install instructions for this target
|
||||||
IF(JSONCPP_WITH_CMAKE_PACKAGE)
|
IF(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||||
SET(INSTALL_EXPORT EXPORT jsoncpp)
|
SET(INSTALL_EXPORT EXPORT jsoncpp)
|
||||||
ELSE(JSONCPP_WITH_CMAKE_PACKAGE)
|
ELSE(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||||
SET(INSTALL_EXPORT)
|
SET(INSTALL_EXPORT)
|
||||||
ENDIF(JSONCPP_WITH_CMAKE_PACKAGE)
|
ENDIF()
|
||||||
|
|
||||||
|
IF(BUILD_SHARED_LIBS)
|
||||||
|
ADD_DEFINITIONS( -DJSON_DLL_BUILD )
|
||||||
|
ADD_LIBRARY(jsoncpp_lib SHARED ${PUBLIC_HEADERS} ${jsoncpp_sources})
|
||||||
|
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
|
||||||
|
SET_TARGET_PROPERTIES( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp
|
||||||
|
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
|
||||||
|
|
||||||
INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
|
INSTALL( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
|
||||||
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
|
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
|
||||||
LIBRARY DESTINATION ${LIBRARY_INSTALL_DIR}
|
LIBRARY DESTINATION "${LIBRARY_INSTALL_DIR}/${CMAKE_LIBRARY_ARCHITECTURE}"
|
||||||
ARCHIVE DESTINATION ${ARCHIVE_INSTALL_DIR}
|
ARCHIVE DESTINATION "${ARCHIVE_INSTALL_DIR}/${CMAKE_LIBRARY_ARCHITECTURE}")
|
||||||
|
|
||||||
|
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||||
|
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib PUBLIC
|
||||||
|
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
||||||
|
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>)
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
IF(BUILD_STATIC_LIBS)
|
||||||
|
ADD_LIBRARY(jsoncpp_lib_static STATIC ${PUBLIC_HEADERS} ${jsoncpp_sources})
|
||||||
|
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_VERSION_MAJOR})
|
||||||
|
SET_TARGET_PROPERTIES( jsoncpp_lib_static PROPERTIES OUTPUT_NAME jsoncpp
|
||||||
|
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
|
||||||
|
|
||||||
|
INSTALL( TARGETS jsoncpp_lib_static ${INSTALL_EXPORT}
|
||||||
|
RUNTIME DESTINATION ${RUNTIME_INSTALL_DIR}
|
||||||
|
LIBRARY DESTINATION "${LIBRARY_INSTALL_DIR}/${CMAKE_LIBRARY_ARCHITECTURE}"
|
||||||
|
ARCHIVE DESTINATION "${ARCHIVE_INSTALL_DIR}/${CMAKE_LIBRARY_ARCHITECTURE}")
|
||||||
|
|
||||||
|
IF(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||||
|
TARGET_INCLUDE_DIRECTORIES( jsoncpp_lib_static PUBLIC
|
||||||
|
$<INSTALL_INTERFACE:${INCLUDE_INSTALL_DIR}>
|
||||||
|
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||||
)
|
)
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
ENDIF()
|
||||||
|
@@ -1,121 +0,0 @@
|
|||||||
// Copyright 2007-2010 Baptiste Lepilleur
|
|
||||||
// Distributed under MIT license, or public domain if desired and
|
|
||||||
// recognized in your jurisdiction.
|
|
||||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
|
||||||
|
|
||||||
#ifndef JSONCPP_BATCHALLOCATOR_H_INCLUDED
|
|
||||||
#define JSONCPP_BATCHALLOCATOR_H_INCLUDED
|
|
||||||
|
|
||||||
#include <stdlib.h>
|
|
||||||
#include <assert.h>
|
|
||||||
|
|
||||||
#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
|
||||||
|
|
||||||
namespace Json {
|
|
||||||
|
|
||||||
/* Fast memory allocator.
|
|
||||||
*
|
|
||||||
* This memory allocator allocates memory for a batch of object (specified by
|
|
||||||
* the page size, the number of object in each page).
|
|
||||||
*
|
|
||||||
* It does not allow the destruction of a single object. All the allocated
|
|
||||||
* objects can be destroyed at once. The memory can be either released or reused
|
|
||||||
* for future allocation.
|
|
||||||
*
|
|
||||||
* The in-place new operator must be used to construct the object using the
|
|
||||||
* pointer returned by allocate.
|
|
||||||
*/
|
|
||||||
template <typename AllocatedType, const unsigned int objectPerAllocation>
|
|
||||||
class BatchAllocator {
|
|
||||||
public:
|
|
||||||
BatchAllocator(unsigned int objectsPerPage = 255)
|
|
||||||
: freeHead_(0), objectsPerPage_(objectsPerPage) {
|
|
||||||
// printf( "Size: %d => %s\n", sizeof(AllocatedType),
|
|
||||||
// typeid(AllocatedType).name() );
|
|
||||||
assert(sizeof(AllocatedType) * objectPerAllocation >=
|
|
||||||
sizeof(AllocatedType*)); // We must be able to store a slist in the
|
|
||||||
// object free space.
|
|
||||||
assert(objectsPerPage >= 16);
|
|
||||||
batches_ = allocateBatch(0); // allocated a dummy page
|
|
||||||
currentBatch_ = batches_;
|
|
||||||
}
|
|
||||||
|
|
||||||
~BatchAllocator() {
|
|
||||||
for (BatchInfo* batch = batches_; batch;) {
|
|
||||||
BatchInfo* nextBatch = batch->next_;
|
|
||||||
free(batch);
|
|
||||||
batch = nextBatch;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// allocate space for an array of objectPerAllocation object.
|
|
||||||
/// @warning it is the responsability of the caller to call objects
|
|
||||||
/// constructors.
|
|
||||||
AllocatedType* allocate() {
|
|
||||||
if (freeHead_) // returns node from free list.
|
|
||||||
{
|
|
||||||
AllocatedType* object = freeHead_;
|
|
||||||
freeHead_ = *(AllocatedType**)object;
|
|
||||||
return object;
|
|
||||||
}
|
|
||||||
if (currentBatch_->used_ == currentBatch_->end_) {
|
|
||||||
currentBatch_ = currentBatch_->next_;
|
|
||||||
while (currentBatch_ && currentBatch_->used_ == currentBatch_->end_)
|
|
||||||
currentBatch_ = currentBatch_->next_;
|
|
||||||
|
|
||||||
if (!currentBatch_) // no free batch found, allocate a new one
|
|
||||||
{
|
|
||||||
currentBatch_ = allocateBatch(objectsPerPage_);
|
|
||||||
currentBatch_->next_ = batches_; // insert at the head of the list
|
|
||||||
batches_ = currentBatch_;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
AllocatedType* allocated = currentBatch_->used_;
|
|
||||||
currentBatch_->used_ += objectPerAllocation;
|
|
||||||
return allocated;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Release the object.
|
|
||||||
/// @warning it is the responsability of the caller to actually destruct the
|
|
||||||
/// object.
|
|
||||||
void release(AllocatedType* object) {
|
|
||||||
assert(object != 0);
|
|
||||||
*(AllocatedType**)object = freeHead_;
|
|
||||||
freeHead_ = object;
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
struct BatchInfo {
|
|
||||||
BatchInfo* next_;
|
|
||||||
AllocatedType* used_;
|
|
||||||
AllocatedType* end_;
|
|
||||||
AllocatedType buffer_[objectPerAllocation];
|
|
||||||
};
|
|
||||||
|
|
||||||
// disabled copy constructor and assignement operator.
|
|
||||||
BatchAllocator(const BatchAllocator&);
|
|
||||||
void operator=(const BatchAllocator&);
|
|
||||||
|
|
||||||
static BatchInfo* allocateBatch(unsigned int objectsPerPage) {
|
|
||||||
const unsigned int mallocSize =
|
|
||||||
sizeof(BatchInfo) - sizeof(AllocatedType) * objectPerAllocation +
|
|
||||||
sizeof(AllocatedType) * objectPerAllocation * objectsPerPage;
|
|
||||||
BatchInfo* batch = static_cast<BatchInfo*>(malloc(mallocSize));
|
|
||||||
batch->next_ = 0;
|
|
||||||
batch->used_ = batch->buffer_;
|
|
||||||
batch->end_ = batch->buffer_ + objectsPerPage;
|
|
||||||
return batch;
|
|
||||||
}
|
|
||||||
|
|
||||||
BatchInfo* batches_;
|
|
||||||
BatchInfo* currentBatch_;
|
|
||||||
/// Head of a single linked list within the allocated space of freeed object
|
|
||||||
AllocatedType* freeHead_;
|
|
||||||
unsigned int objectsPerPage_;
|
|
||||||
};
|
|
||||||
|
|
||||||
} // namespace Json
|
|
||||||
|
|
||||||
#endif // ifndef JSONCPP_DOC_INCLUDE_IMPLEMENTATION
|
|
||||||
|
|
||||||
#endif // JSONCPP_BATCHALLOCATOR_H_INCLUDED
|
|
@@ -1,360 +0,0 @@
|
|||||||
// Copyright 2007-2010 Baptiste Lepilleur
|
|
||||||
// Distributed under MIT license, or public domain if desired and
|
|
||||||
// recognized in your jurisdiction.
|
|
||||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
|
||||||
|
|
||||||
// included by json_value.cpp
|
|
||||||
|
|
||||||
namespace Json {
|
|
||||||
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// class ValueInternalArray
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
ValueArrayAllocator::~ValueArrayAllocator() {}
|
|
||||||
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// class DefaultValueArrayAllocator
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
|
|
||||||
class DefaultValueArrayAllocator : public ValueArrayAllocator {
|
|
||||||
public: // overridden from ValueArrayAllocator
|
|
||||||
virtual ~DefaultValueArrayAllocator() {}
|
|
||||||
|
|
||||||
virtual ValueInternalArray* newArray() { return new ValueInternalArray(); }
|
|
||||||
|
|
||||||
virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) {
|
|
||||||
return new ValueInternalArray(other);
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void destructArray(ValueInternalArray* array) { delete array; }
|
|
||||||
|
|
||||||
virtual void
|
|
||||||
reallocateArrayPageIndex(Value**& indexes,
|
|
||||||
ValueInternalArray::PageIndex& indexCount,
|
|
||||||
ValueInternalArray::PageIndex minNewIndexCount) {
|
|
||||||
ValueInternalArray::PageIndex newIndexCount = (indexCount * 3) / 2 + 1;
|
|
||||||
if (minNewIndexCount > newIndexCount)
|
|
||||||
newIndexCount = minNewIndexCount;
|
|
||||||
void* newIndexes = realloc(indexes, sizeof(Value*) * newIndexCount);
|
|
||||||
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
|
|
||||||
indexCount = newIndexCount;
|
|
||||||
indexes = static_cast<Value**>(newIndexes);
|
|
||||||
}
|
|
||||||
virtual void releaseArrayPageIndex(Value** indexes,
|
|
||||||
ValueInternalArray::PageIndex indexCount) {
|
|
||||||
if (indexes)
|
|
||||||
free(indexes);
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual Value* allocateArrayPage() {
|
|
||||||
return static_cast<Value*>(
|
|
||||||
malloc(sizeof(Value) * ValueInternalArray::itemsPerPage));
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void releaseArrayPage(Value* value) {
|
|
||||||
if (value)
|
|
||||||
free(value);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
#else // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
|
|
||||||
/// @todo make this thread-safe (lock when accessign batch allocator)
|
|
||||||
class DefaultValueArrayAllocator : public ValueArrayAllocator {
|
|
||||||
public: // overridden from ValueArrayAllocator
|
|
||||||
virtual ~DefaultValueArrayAllocator() {}
|
|
||||||
|
|
||||||
virtual ValueInternalArray* newArray() {
|
|
||||||
ValueInternalArray* array = arraysAllocator_.allocate();
|
|
||||||
new (array) ValueInternalArray(); // placement new
|
|
||||||
return array;
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual ValueInternalArray* newArrayCopy(const ValueInternalArray& other) {
|
|
||||||
ValueInternalArray* array = arraysAllocator_.allocate();
|
|
||||||
new (array) ValueInternalArray(other); // placement new
|
|
||||||
return array;
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void destructArray(ValueInternalArray* array) {
|
|
||||||
if (array) {
|
|
||||||
array->~ValueInternalArray();
|
|
||||||
arraysAllocator_.release(array);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void
|
|
||||||
reallocateArrayPageIndex(Value**& indexes,
|
|
||||||
ValueInternalArray::PageIndex& indexCount,
|
|
||||||
ValueInternalArray::PageIndex minNewIndexCount) {
|
|
||||||
ValueInternalArray::PageIndex newIndexCount = (indexCount * 3) / 2 + 1;
|
|
||||||
if (minNewIndexCount > newIndexCount)
|
|
||||||
newIndexCount = minNewIndexCount;
|
|
||||||
void* newIndexes = realloc(indexes, sizeof(Value*) * newIndexCount);
|
|
||||||
JSON_ASSERT_MESSAGE(newIndexes, "Couldn't realloc.");
|
|
||||||
indexCount = newIndexCount;
|
|
||||||
indexes = static_cast<Value**>(newIndexes);
|
|
||||||
}
|
|
||||||
virtual void releaseArrayPageIndex(Value** indexes,
|
|
||||||
ValueInternalArray::PageIndex indexCount) {
|
|
||||||
if (indexes)
|
|
||||||
free(indexes);
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual Value* allocateArrayPage() {
|
|
||||||
return static_cast<Value*>(pagesAllocator_.allocate());
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void releaseArrayPage(Value* value) {
|
|
||||||
if (value)
|
|
||||||
pagesAllocator_.release(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
BatchAllocator<ValueInternalArray, 1> arraysAllocator_;
|
|
||||||
BatchAllocator<Value, ValueInternalArray::itemsPerPage> pagesAllocator_;
|
|
||||||
};
|
|
||||||
#endif // #ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
|
|
||||||
|
|
||||||
static ValueArrayAllocator*& arrayAllocator() {
|
|
||||||
static DefaultValueArrayAllocator defaultAllocator;
|
|
||||||
static ValueArrayAllocator* arrayAllocator = &defaultAllocator;
|
|
||||||
return arrayAllocator;
|
|
||||||
}
|
|
||||||
|
|
||||||
static struct DummyArrayAllocatorInitializer {
|
|
||||||
DummyArrayAllocatorInitializer() {
|
|
||||||
arrayAllocator(); // ensure arrayAllocator() statics are initialized before
|
|
||||||
// main().
|
|
||||||
}
|
|
||||||
} dummyArrayAllocatorInitializer;
|
|
||||||
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// class ValueInternalArray
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
bool ValueInternalArray::equals(const IteratorState& x,
|
|
||||||
const IteratorState& other) {
|
|
||||||
return x.array_ == other.array_ &&
|
|
||||||
x.currentItemIndex_ == other.currentItemIndex_ &&
|
|
||||||
x.currentPageIndex_ == other.currentPageIndex_;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalArray::increment(IteratorState& it) {
|
|
||||||
JSON_ASSERT_MESSAGE(
|
|
||||||
it.array_ && (it.currentPageIndex_ - it.array_->pages_) * itemsPerPage +
|
|
||||||
it.currentItemIndex_ !=
|
|
||||||
it.array_->size_,
|
|
||||||
"ValueInternalArray::increment(): moving iterator beyond end");
|
|
||||||
++(it.currentItemIndex_);
|
|
||||||
if (it.currentItemIndex_ == itemsPerPage) {
|
|
||||||
it.currentItemIndex_ = 0;
|
|
||||||
++(it.currentPageIndex_);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalArray::decrement(IteratorState& it) {
|
|
||||||
JSON_ASSERT_MESSAGE(
|
|
||||||
it.array_ && it.currentPageIndex_ == it.array_->pages_ &&
|
|
||||||
it.currentItemIndex_ == 0,
|
|
||||||
"ValueInternalArray::decrement(): moving iterator beyond end");
|
|
||||||
if (it.currentItemIndex_ == 0) {
|
|
||||||
it.currentItemIndex_ = itemsPerPage - 1;
|
|
||||||
--(it.currentPageIndex_);
|
|
||||||
} else {
|
|
||||||
--(it.currentItemIndex_);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Value& ValueInternalArray::unsafeDereference(const IteratorState& it) {
|
|
||||||
return (*(it.currentPageIndex_))[it.currentItemIndex_];
|
|
||||||
}
|
|
||||||
|
|
||||||
Value& ValueInternalArray::dereference(const IteratorState& it) {
|
|
||||||
JSON_ASSERT_MESSAGE(
|
|
||||||
it.array_ && (it.currentPageIndex_ - it.array_->pages_) * itemsPerPage +
|
|
||||||
it.currentItemIndex_ <
|
|
||||||
it.array_->size_,
|
|
||||||
"ValueInternalArray::dereference(): dereferencing invalid iterator");
|
|
||||||
return unsafeDereference(it);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalArray::makeBeginIterator(IteratorState& it) const {
|
|
||||||
it.array_ = const_cast<ValueInternalArray*>(this);
|
|
||||||
it.currentItemIndex_ = 0;
|
|
||||||
it.currentPageIndex_ = pages_;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalArray::makeIterator(IteratorState& it,
|
|
||||||
ArrayIndex index) const {
|
|
||||||
it.array_ = const_cast<ValueInternalArray*>(this);
|
|
||||||
it.currentItemIndex_ = index % itemsPerPage;
|
|
||||||
it.currentPageIndex_ = pages_ + index / itemsPerPage;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalArray::makeEndIterator(IteratorState& it) const {
|
|
||||||
makeIterator(it, size_);
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalArray::ValueInternalArray() : pages_(0), size_(0), pageCount_(0) {}
|
|
||||||
|
|
||||||
ValueInternalArray::ValueInternalArray(const ValueInternalArray& other)
|
|
||||||
: pages_(0), size_(other.size_), pageCount_(0) {
|
|
||||||
PageIndex minNewPages = other.size_ / itemsPerPage;
|
|
||||||
arrayAllocator()->reallocateArrayPageIndex(pages_, pageCount_, minNewPages);
|
|
||||||
JSON_ASSERT_MESSAGE(pageCount_ >= minNewPages,
|
|
||||||
"ValueInternalArray::reserve(): bad reallocation");
|
|
||||||
IteratorState itOther;
|
|
||||||
other.makeBeginIterator(itOther);
|
|
||||||
Value* value;
|
|
||||||
for (ArrayIndex index = 0; index < size_; ++index, increment(itOther)) {
|
|
||||||
if (index % itemsPerPage == 0) {
|
|
||||||
PageIndex pageIndex = index / itemsPerPage;
|
|
||||||
value = arrayAllocator()->allocateArrayPage();
|
|
||||||
pages_[pageIndex] = value;
|
|
||||||
}
|
|
||||||
new (value) Value(dereference(itOther));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalArray& ValueInternalArray::operator=(ValueInternalArray other) {
|
|
||||||
swap(other);
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalArray::~ValueInternalArray() {
|
|
||||||
// destroy all constructed items
|
|
||||||
IteratorState it;
|
|
||||||
IteratorState itEnd;
|
|
||||||
makeBeginIterator(it);
|
|
||||||
makeEndIterator(itEnd);
|
|
||||||
for (; !equals(it, itEnd); increment(it)) {
|
|
||||||
Value* value = &dereference(it);
|
|
||||||
value->~Value();
|
|
||||||
}
|
|
||||||
// release all pages
|
|
||||||
PageIndex lastPageIndex = size_ / itemsPerPage;
|
|
||||||
for (PageIndex pageIndex = 0; pageIndex < lastPageIndex; ++pageIndex)
|
|
||||||
arrayAllocator()->releaseArrayPage(pages_[pageIndex]);
|
|
||||||
// release pages index
|
|
||||||
arrayAllocator()->releaseArrayPageIndex(pages_, pageCount_);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalArray::swap(ValueInternalArray& other) {
|
|
||||||
Value** tempPages = pages_;
|
|
||||||
pages_ = other.pages_;
|
|
||||||
other.pages_ = tempPages;
|
|
||||||
ArrayIndex tempSize = size_;
|
|
||||||
size_ = other.size_;
|
|
||||||
other.size_ = tempSize;
|
|
||||||
PageIndex tempPageCount = pageCount_;
|
|
||||||
pageCount_ = other.pageCount_;
|
|
||||||
other.pageCount_ = tempPageCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalArray::clear() {
|
|
||||||
ValueInternalArray dummy;
|
|
||||||
swap(dummy);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalArray::resize(ArrayIndex newSize) {
|
|
||||||
if (newSize == 0)
|
|
||||||
clear();
|
|
||||||
else if (newSize < size_) {
|
|
||||||
IteratorState it;
|
|
||||||
IteratorState itEnd;
|
|
||||||
makeIterator(it, newSize);
|
|
||||||
makeIterator(itEnd, size_);
|
|
||||||
for (; !equals(it, itEnd); increment(it)) {
|
|
||||||
Value* value = &dereference(it);
|
|
||||||
value->~Value();
|
|
||||||
}
|
|
||||||
PageIndex pageIndex = (newSize + itemsPerPage - 1) / itemsPerPage;
|
|
||||||
PageIndex lastPageIndex = size_ / itemsPerPage;
|
|
||||||
for (; pageIndex < lastPageIndex; ++pageIndex)
|
|
||||||
arrayAllocator()->releaseArrayPage(pages_[pageIndex]);
|
|
||||||
size_ = newSize;
|
|
||||||
} else if (newSize > size_)
|
|
||||||
resolveReference(newSize);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalArray::makeIndexValid(ArrayIndex index) {
|
|
||||||
// Need to enlarge page index ?
|
|
||||||
if (index >= pageCount_ * itemsPerPage) {
|
|
||||||
PageIndex minNewPages = (index + 1) / itemsPerPage;
|
|
||||||
arrayAllocator()->reallocateArrayPageIndex(pages_, pageCount_, minNewPages);
|
|
||||||
JSON_ASSERT_MESSAGE(pageCount_ >= minNewPages,
|
|
||||||
"ValueInternalArray::reserve(): bad reallocation");
|
|
||||||
}
|
|
||||||
|
|
||||||
// Need to allocate new pages ?
|
|
||||||
ArrayIndex nextPageIndex = (size_ % itemsPerPage) != 0
|
|
||||||
? size_ - (size_ % itemsPerPage) + itemsPerPage
|
|
||||||
: size_;
|
|
||||||
if (nextPageIndex <= index) {
|
|
||||||
PageIndex pageIndex = nextPageIndex / itemsPerPage;
|
|
||||||
PageIndex pageToAllocate = (index - nextPageIndex) / itemsPerPage + 1;
|
|
||||||
for (; pageToAllocate-- > 0; ++pageIndex)
|
|
||||||
pages_[pageIndex] = arrayAllocator()->allocateArrayPage();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Initialize all new entries
|
|
||||||
IteratorState it;
|
|
||||||
IteratorState itEnd;
|
|
||||||
makeIterator(it, size_);
|
|
||||||
size_ = index + 1;
|
|
||||||
makeIterator(itEnd, size_);
|
|
||||||
for (; !equals(it, itEnd); increment(it)) {
|
|
||||||
Value* value = &dereference(it);
|
|
||||||
new (value) Value(); // Construct a default value using placement new
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Value& ValueInternalArray::resolveReference(ArrayIndex index) {
|
|
||||||
if (index >= size_)
|
|
||||||
makeIndexValid(index);
|
|
||||||
return pages_[index / itemsPerPage][index % itemsPerPage];
|
|
||||||
}
|
|
||||||
|
|
||||||
Value* ValueInternalArray::find(ArrayIndex index) const {
|
|
||||||
if (index >= size_)
|
|
||||||
return 0;
|
|
||||||
return &(pages_[index / itemsPerPage][index % itemsPerPage]);
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalArray::ArrayIndex ValueInternalArray::size() const {
|
|
||||||
return size_;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ValueInternalArray::distance(const IteratorState& x,
|
|
||||||
const IteratorState& y) {
|
|
||||||
return indexOf(y) - indexOf(x);
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalArray::ArrayIndex
|
|
||||||
ValueInternalArray::indexOf(const IteratorState& iterator) {
|
|
||||||
if (!iterator.array_)
|
|
||||||
return ArrayIndex(-1);
|
|
||||||
return ArrayIndex((iterator.currentPageIndex_ - iterator.array_->pages_) *
|
|
||||||
itemsPerPage +
|
|
||||||
iterator.currentItemIndex_);
|
|
||||||
}
|
|
||||||
|
|
||||||
int ValueInternalArray::compare(const ValueInternalArray& other) const {
|
|
||||||
int sizeDiff(size_ - other.size_);
|
|
||||||
if (sizeDiff != 0)
|
|
||||||
return sizeDiff;
|
|
||||||
|
|
||||||
for (ArrayIndex index = 0; index < size_; ++index) {
|
|
||||||
int diff = pages_[index / itemsPerPage][index % itemsPerPage].compare(
|
|
||||||
other.pages_[index / itemsPerPage][index % itemsPerPage]);
|
|
||||||
if (diff != 0)
|
|
||||||
return diff;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace Json
|
|
@@ -1,473 +0,0 @@
|
|||||||
// Copyright 2007-2010 Baptiste Lepilleur
|
|
||||||
// Distributed under MIT license, or public domain if desired and
|
|
||||||
// recognized in your jurisdiction.
|
|
||||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
|
||||||
|
|
||||||
// included by json_value.cpp
|
|
||||||
|
|
||||||
namespace Json {
|
|
||||||
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// class ValueInternalMap
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
|
||||||
|
|
||||||
/** \internal MUST be safely initialized using memset( this, 0,
|
|
||||||
* sizeof(ValueInternalLink) );
|
|
||||||
* This optimization is used by the fast allocator.
|
|
||||||
*/
|
|
||||||
ValueInternalLink::ValueInternalLink() : previous_(0), next_(0) {}
|
|
||||||
|
|
||||||
ValueInternalLink::~ValueInternalLink() {
|
|
||||||
for (int index = 0; index < itemPerLink; ++index) {
|
|
||||||
if (!items_[index].isItemAvailable()) {
|
|
||||||
if (!items_[index].isMemberNameStatic())
|
|
||||||
free(keys_[index]);
|
|
||||||
} else
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueMapAllocator::~ValueMapAllocator() {}
|
|
||||||
|
|
||||||
#ifdef JSON_USE_SIMPLE_INTERNAL_ALLOCATOR
|
|
||||||
class DefaultValueMapAllocator : public ValueMapAllocator {
|
|
||||||
public: // overridden from ValueMapAllocator
|
|
||||||
virtual ValueInternalMap* newMap() { return new ValueInternalMap(); }
|
|
||||||
|
|
||||||
virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) {
|
|
||||||
return new ValueInternalMap(other);
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void destructMap(ValueInternalMap* map) { delete map; }
|
|
||||||
|
|
||||||
virtual ValueInternalLink* allocateMapBuckets(unsigned int size) {
|
|
||||||
return new ValueInternalLink[size];
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void releaseMapBuckets(ValueInternalLink* links) { delete[] links; }
|
|
||||||
|
|
||||||
virtual ValueInternalLink* allocateMapLink() {
|
|
||||||
return new ValueInternalLink();
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void releaseMapLink(ValueInternalLink* link) { delete link; }
|
|
||||||
};
|
|
||||||
#else
|
|
||||||
/// @todo make this thread-safe (lock when accessign batch allocator)
|
|
||||||
class DefaultValueMapAllocator : public ValueMapAllocator {
|
|
||||||
public: // overridden from ValueMapAllocator
|
|
||||||
virtual ValueInternalMap* newMap() {
|
|
||||||
ValueInternalMap* map = mapsAllocator_.allocate();
|
|
||||||
new (map) ValueInternalMap(); // placement new
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual ValueInternalMap* newMapCopy(const ValueInternalMap& other) {
|
|
||||||
ValueInternalMap* map = mapsAllocator_.allocate();
|
|
||||||
new (map) ValueInternalMap(other); // placement new
|
|
||||||
return map;
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void destructMap(ValueInternalMap* map) {
|
|
||||||
if (map) {
|
|
||||||
map->~ValueInternalMap();
|
|
||||||
mapsAllocator_.release(map);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual ValueInternalLink* allocateMapBuckets(unsigned int size) {
|
|
||||||
return new ValueInternalLink[size];
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void releaseMapBuckets(ValueInternalLink* links) { delete[] links; }
|
|
||||||
|
|
||||||
virtual ValueInternalLink* allocateMapLink() {
|
|
||||||
ValueInternalLink* link = linksAllocator_.allocate();
|
|
||||||
memset(link, 0, sizeof(ValueInternalLink));
|
|
||||||
return link;
|
|
||||||
}
|
|
||||||
|
|
||||||
virtual void releaseMapLink(ValueInternalLink* link) {
|
|
||||||
link->~ValueInternalLink();
|
|
||||||
linksAllocator_.release(link);
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
BatchAllocator<ValueInternalMap, 1> mapsAllocator_;
|
|
||||||
BatchAllocator<ValueInternalLink, 1> linksAllocator_;
|
|
||||||
};
|
|
||||||
#endif
|
|
||||||
|
|
||||||
static ValueMapAllocator*& mapAllocator() {
|
|
||||||
static DefaultValueMapAllocator defaultAllocator;
|
|
||||||
static ValueMapAllocator* mapAllocator = &defaultAllocator;
|
|
||||||
return mapAllocator;
|
|
||||||
}
|
|
||||||
|
|
||||||
static struct DummyMapAllocatorInitializer {
|
|
||||||
DummyMapAllocatorInitializer() {
|
|
||||||
mapAllocator(); // ensure mapAllocator() statics are initialized before
|
|
||||||
// main().
|
|
||||||
}
|
|
||||||
} dummyMapAllocatorInitializer;
|
|
||||||
|
|
||||||
// h(K) = value * K >> w ; with w = 32 & K prime w.r.t. 2^32.
|
|
||||||
|
|
||||||
/*
|
|
||||||
use linked list hash map.
|
|
||||||
buckets array is a container.
|
|
||||||
linked list element contains 6 key/values. (memory = (16+4) * 6 + 4 = 124)
|
|
||||||
value have extra state: valid, available, deleted
|
|
||||||
*/
|
|
||||||
|
|
||||||
ValueInternalMap::ValueInternalMap()
|
|
||||||
: buckets_(0), tailLink_(0), bucketsSize_(0), itemCount_(0) {}
|
|
||||||
|
|
||||||
ValueInternalMap::ValueInternalMap(const ValueInternalMap& other)
|
|
||||||
: buckets_(0), tailLink_(0), bucketsSize_(0), itemCount_(0) {
|
|
||||||
reserve(other.itemCount_);
|
|
||||||
IteratorState it;
|
|
||||||
IteratorState itEnd;
|
|
||||||
other.makeBeginIterator(it);
|
|
||||||
other.makeEndIterator(itEnd);
|
|
||||||
for (; !equals(it, itEnd); increment(it)) {
|
|
||||||
bool isStatic;
|
|
||||||
const char* memberName = key(it, isStatic);
|
|
||||||
const Value& aValue = value(it);
|
|
||||||
resolveReference(memberName, isStatic) = aValue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalMap& ValueInternalMap::operator=(ValueInternalMap other) {
|
|
||||||
swap(other);
|
|
||||||
return *this;
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalMap::~ValueInternalMap() {
|
|
||||||
if (buckets_) {
|
|
||||||
for (BucketIndex bucketIndex = 0; bucketIndex < bucketsSize_;
|
|
||||||
++bucketIndex) {
|
|
||||||
ValueInternalLink* link = buckets_[bucketIndex].next_;
|
|
||||||
while (link) {
|
|
||||||
ValueInternalLink* linkToRelease = link;
|
|
||||||
link = link->next_;
|
|
||||||
mapAllocator()->releaseMapLink(linkToRelease);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
mapAllocator()->releaseMapBuckets(buckets_);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalMap::swap(ValueInternalMap& other) {
|
|
||||||
ValueInternalLink* tempBuckets = buckets_;
|
|
||||||
buckets_ = other.buckets_;
|
|
||||||
other.buckets_ = tempBuckets;
|
|
||||||
ValueInternalLink* tempTailLink = tailLink_;
|
|
||||||
tailLink_ = other.tailLink_;
|
|
||||||
other.tailLink_ = tempTailLink;
|
|
||||||
BucketIndex tempBucketsSize = bucketsSize_;
|
|
||||||
bucketsSize_ = other.bucketsSize_;
|
|
||||||
other.bucketsSize_ = tempBucketsSize;
|
|
||||||
BucketIndex tempItemCount = itemCount_;
|
|
||||||
itemCount_ = other.itemCount_;
|
|
||||||
other.itemCount_ = tempItemCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalMap::clear() {
|
|
||||||
ValueInternalMap dummy;
|
|
||||||
swap(dummy);
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalMap::BucketIndex ValueInternalMap::size() const {
|
|
||||||
return itemCount_;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool ValueInternalMap::reserveDelta(BucketIndex growth) {
|
|
||||||
return reserve(itemCount_ + growth);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool ValueInternalMap::reserve(BucketIndex newItemCount) {
|
|
||||||
if (!buckets_ && newItemCount > 0) {
|
|
||||||
buckets_ = mapAllocator()->allocateMapBuckets(1);
|
|
||||||
bucketsSize_ = 1;
|
|
||||||
tailLink_ = &buckets_[0];
|
|
||||||
}
|
|
||||||
// BucketIndex idealBucketCount = (newItemCount +
|
|
||||||
// ValueInternalLink::itemPerLink) / ValueInternalLink::itemPerLink;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Value* ValueInternalMap::find(const char* key) const {
|
|
||||||
if (!bucketsSize_)
|
|
||||||
return 0;
|
|
||||||
HashKey hashedKey = hash(key);
|
|
||||||
BucketIndex bucketIndex = hashedKey % bucketsSize_;
|
|
||||||
for (const ValueInternalLink* current = &buckets_[bucketIndex]; current != 0;
|
|
||||||
current = current->next_) {
|
|
||||||
for (BucketIndex index = 0; index < ValueInternalLink::itemPerLink;
|
|
||||||
++index) {
|
|
||||||
if (current->items_[index].isItemAvailable())
|
|
||||||
return 0;
|
|
||||||
if (strcmp(key, current->keys_[index]) == 0)
|
|
||||||
return ¤t->items_[index];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
Value* ValueInternalMap::find(const char* key) {
|
|
||||||
const ValueInternalMap* constThis = this;
|
|
||||||
return const_cast<Value*>(constThis->find(key));
|
|
||||||
}
|
|
||||||
|
|
||||||
Value& ValueInternalMap::resolveReference(const char* key, bool isStatic) {
|
|
||||||
HashKey hashedKey = hash(key);
|
|
||||||
if (bucketsSize_) {
|
|
||||||
BucketIndex bucketIndex = hashedKey % bucketsSize_;
|
|
||||||
ValueInternalLink** previous = 0;
|
|
||||||
BucketIndex index;
|
|
||||||
for (ValueInternalLink* current = &buckets_[bucketIndex]; current != 0;
|
|
||||||
previous = ¤t->next_, current = current->next_) {
|
|
||||||
for (index = 0; index < ValueInternalLink::itemPerLink; ++index) {
|
|
||||||
if (current->items_[index].isItemAvailable())
|
|
||||||
return setNewItem(key, isStatic, current, index);
|
|
||||||
if (strcmp(key, current->keys_[index]) == 0)
|
|
||||||
return current->items_[index];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
reserveDelta(1);
|
|
||||||
return unsafeAdd(key, isStatic, hashedKey);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalMap::remove(const char* key) {
|
|
||||||
HashKey hashedKey = hash(key);
|
|
||||||
if (!bucketsSize_)
|
|
||||||
return;
|
|
||||||
BucketIndex bucketIndex = hashedKey % bucketsSize_;
|
|
||||||
for (ValueInternalLink* link = &buckets_[bucketIndex]; link != 0;
|
|
||||||
link = link->next_) {
|
|
||||||
BucketIndex index;
|
|
||||||
for (index = 0; index < ValueInternalLink::itemPerLink; ++index) {
|
|
||||||
if (link->items_[index].isItemAvailable())
|
|
||||||
return;
|
|
||||||
if (strcmp(key, link->keys_[index]) == 0) {
|
|
||||||
doActualRemove(link, index, bucketIndex);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalMap::doActualRemove(ValueInternalLink* link,
|
|
||||||
BucketIndex index,
|
|
||||||
BucketIndex bucketIndex) {
|
|
||||||
// find last item of the bucket and swap it with the 'removed' one.
|
|
||||||
// set removed items flags to 'available'.
|
|
||||||
// if last page only contains 'available' items, then desallocate it (it's
|
|
||||||
// empty)
|
|
||||||
ValueInternalLink*& lastLink = getLastLinkInBucket(index);
|
|
||||||
BucketIndex lastItemIndex = 1; // a link can never be empty, so start at 1
|
|
||||||
for (; lastItemIndex < ValueInternalLink::itemPerLink;
|
|
||||||
++lastItemIndex) // may be optimized with dicotomic search
|
|
||||||
{
|
|
||||||
if (lastLink->items_[lastItemIndex].isItemAvailable())
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
BucketIndex lastUsedIndex = lastItemIndex - 1;
|
|
||||||
Value* valueToDelete = &link->items_[index];
|
|
||||||
Value* valueToPreserve = &lastLink->items_[lastUsedIndex];
|
|
||||||
if (valueToDelete != valueToPreserve)
|
|
||||||
valueToDelete->swap(*valueToPreserve);
|
|
||||||
if (lastUsedIndex == 0) // page is now empty
|
|
||||||
{ // remove it from bucket linked list and delete it.
|
|
||||||
ValueInternalLink* linkPreviousToLast = lastLink->previous_;
|
|
||||||
if (linkPreviousToLast != 0) // can not deleted bucket link.
|
|
||||||
{
|
|
||||||
mapAllocator()->releaseMapLink(lastLink);
|
|
||||||
linkPreviousToLast->next_ = 0;
|
|
||||||
lastLink = linkPreviousToLast;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Value dummy;
|
|
||||||
valueToPreserve->swap(dummy); // restore deleted to default Value.
|
|
||||||
valueToPreserve->setItemUsed(false);
|
|
||||||
}
|
|
||||||
--itemCount_;
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalLink*&
|
|
||||||
ValueInternalMap::getLastLinkInBucket(BucketIndex bucketIndex) {
|
|
||||||
if (bucketIndex == bucketsSize_ - 1)
|
|
||||||
return tailLink_;
|
|
||||||
ValueInternalLink*& previous = buckets_[bucketIndex + 1].previous_;
|
|
||||||
if (!previous)
|
|
||||||
previous = &buckets_[bucketIndex];
|
|
||||||
return previous;
|
|
||||||
}
|
|
||||||
|
|
||||||
Value& ValueInternalMap::setNewItem(const char* key,
|
|
||||||
bool isStatic,
|
|
||||||
ValueInternalLink* link,
|
|
||||||
BucketIndex index) {
|
|
||||||
char* duplicatedKey = makeMemberName(key);
|
|
||||||
++itemCount_;
|
|
||||||
link->keys_[index] = duplicatedKey;
|
|
||||||
link->items_[index].setItemUsed();
|
|
||||||
link->items_[index].setMemberNameIsStatic(isStatic);
|
|
||||||
return link->items_[index]; // items already default constructed.
|
|
||||||
}
|
|
||||||
|
|
||||||
Value&
|
|
||||||
ValueInternalMap::unsafeAdd(const char* key, bool isStatic, HashKey hashedKey) {
|
|
||||||
JSON_ASSERT_MESSAGE(bucketsSize_ > 0,
|
|
||||||
"ValueInternalMap::unsafeAdd(): internal logic error.");
|
|
||||||
BucketIndex bucketIndex = hashedKey % bucketsSize_;
|
|
||||||
ValueInternalLink*& previousLink = getLastLinkInBucket(bucketIndex);
|
|
||||||
ValueInternalLink* link = previousLink;
|
|
||||||
BucketIndex index;
|
|
||||||
for (index = 0; index < ValueInternalLink::itemPerLink; ++index) {
|
|
||||||
if (link->items_[index].isItemAvailable())
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
if (index == ValueInternalLink::itemPerLink) // need to add a new page
|
|
||||||
{
|
|
||||||
ValueInternalLink* newLink = mapAllocator()->allocateMapLink();
|
|
||||||
index = 0;
|
|
||||||
link->next_ = newLink;
|
|
||||||
previousLink = newLink;
|
|
||||||
link = newLink;
|
|
||||||
}
|
|
||||||
return setNewItem(key, isStatic, link, index);
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueInternalMap::HashKey ValueInternalMap::hash(const char* key) const {
|
|
||||||
HashKey hash = 0;
|
|
||||||
while (*key)
|
|
||||||
hash += *key++ * 37;
|
|
||||||
return hash;
|
|
||||||
}
|
|
||||||
|
|
||||||
int ValueInternalMap::compare(const ValueInternalMap& other) const {
|
|
||||||
int sizeDiff(itemCount_ - other.itemCount_);
|
|
||||||
if (sizeDiff != 0)
|
|
||||||
return sizeDiff;
|
|
||||||
// Strict order guaranty is required. Compare all keys FIRST, then compare
|
|
||||||
// values.
|
|
||||||
IteratorState it;
|
|
||||||
IteratorState itEnd;
|
|
||||||
makeBeginIterator(it);
|
|
||||||
makeEndIterator(itEnd);
|
|
||||||
for (; !equals(it, itEnd); increment(it)) {
|
|
||||||
if (!other.find(key(it)))
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// All keys are equals, let's compare values
|
|
||||||
makeBeginIterator(it);
|
|
||||||
for (; !equals(it, itEnd); increment(it)) {
|
|
||||||
const Value* otherValue = other.find(key(it));
|
|
||||||
int valueDiff = value(it).compare(*otherValue);
|
|
||||||
if (valueDiff != 0)
|
|
||||||
return valueDiff;
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalMap::makeBeginIterator(IteratorState& it) const {
|
|
||||||
it.map_ = const_cast<ValueInternalMap*>(this);
|
|
||||||
it.bucketIndex_ = 0;
|
|
||||||
it.itemIndex_ = 0;
|
|
||||||
it.link_ = buckets_;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalMap::makeEndIterator(IteratorState& it) const {
|
|
||||||
it.map_ = const_cast<ValueInternalMap*>(this);
|
|
||||||
it.bucketIndex_ = bucketsSize_;
|
|
||||||
it.itemIndex_ = 0;
|
|
||||||
it.link_ = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool ValueInternalMap::equals(const IteratorState& x,
|
|
||||||
const IteratorState& other) {
|
|
||||||
return x.map_ == other.map_ && x.bucketIndex_ == other.bucketIndex_ &&
|
|
||||||
x.link_ == other.link_ && x.itemIndex_ == other.itemIndex_;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalMap::incrementBucket(IteratorState& iterator) {
|
|
||||||
++iterator.bucketIndex_;
|
|
||||||
JSON_ASSERT_MESSAGE(
|
|
||||||
iterator.bucketIndex_ <= iterator.map_->bucketsSize_,
|
|
||||||
"ValueInternalMap::increment(): attempting to iterate beyond end.");
|
|
||||||
if (iterator.bucketIndex_ == iterator.map_->bucketsSize_)
|
|
||||||
iterator.link_ = 0;
|
|
||||||
else
|
|
||||||
iterator.link_ = &(iterator.map_->buckets_[iterator.bucketIndex_]);
|
|
||||||
iterator.itemIndex_ = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalMap::increment(IteratorState& iterator) {
|
|
||||||
JSON_ASSERT_MESSAGE(iterator.map_,
|
|
||||||
"Attempting to iterator using invalid iterator.");
|
|
||||||
++iterator.itemIndex_;
|
|
||||||
if (iterator.itemIndex_ == ValueInternalLink::itemPerLink) {
|
|
||||||
JSON_ASSERT_MESSAGE(
|
|
||||||
iterator.link_ != 0,
|
|
||||||
"ValueInternalMap::increment(): attempting to iterate beyond end.");
|
|
||||||
iterator.link_ = iterator.link_->next_;
|
|
||||||
if (iterator.link_ == 0)
|
|
||||||
incrementBucket(iterator);
|
|
||||||
} else if (iterator.link_->items_[iterator.itemIndex_].isItemAvailable()) {
|
|
||||||
incrementBucket(iterator);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
void ValueInternalMap::decrement(IteratorState& iterator) {
|
|
||||||
if (iterator.itemIndex_ == 0) {
|
|
||||||
JSON_ASSERT_MESSAGE(iterator.map_,
|
|
||||||
"Attempting to iterate using invalid iterator.");
|
|
||||||
if (iterator.link_ == &iterator.map_->buckets_[iterator.bucketIndex_]) {
|
|
||||||
JSON_ASSERT_MESSAGE(iterator.bucketIndex_ > 0,
|
|
||||||
"Attempting to iterate beyond beginning.");
|
|
||||||
--(iterator.bucketIndex_);
|
|
||||||
}
|
|
||||||
iterator.link_ = iterator.link_->previous_;
|
|
||||||
iterator.itemIndex_ = ValueInternalLink::itemPerLink - 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const char* ValueInternalMap::key(const IteratorState& iterator) {
|
|
||||||
JSON_ASSERT_MESSAGE(iterator.link_,
|
|
||||||
"Attempting to iterate using invalid iterator.");
|
|
||||||
return iterator.link_->keys_[iterator.itemIndex_];
|
|
||||||
}
|
|
||||||
|
|
||||||
const char* ValueInternalMap::key(const IteratorState& iterator,
|
|
||||||
bool& isStatic) {
|
|
||||||
JSON_ASSERT_MESSAGE(iterator.link_,
|
|
||||||
"Attempting to iterate using invalid iterator.");
|
|
||||||
isStatic = iterator.link_->items_[iterator.itemIndex_].isMemberNameStatic();
|
|
||||||
return iterator.link_->keys_[iterator.itemIndex_];
|
|
||||||
}
|
|
||||||
|
|
||||||
Value& ValueInternalMap::value(const IteratorState& iterator) {
|
|
||||||
JSON_ASSERT_MESSAGE(iterator.link_,
|
|
||||||
"Attempting to iterate using invalid iterator.");
|
|
||||||
return iterator.link_->items_[iterator.itemIndex_];
|
|
||||||
}
|
|
||||||
|
|
||||||
int ValueInternalMap::distance(const IteratorState& x, const IteratorState& y) {
|
|
||||||
int offset = 0;
|
|
||||||
IteratorState it = x;
|
|
||||||
while (!equals(it, y))
|
|
||||||
increment(it);
|
|
||||||
return offset;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace Json
|
|
File diff suppressed because it is too large
Load Diff
@@ -30,8 +30,8 @@ static inline std::string codePointToUTF8(unsigned int cp) {
|
|||||||
} else if (cp <= 0xFFFF) {
|
} else if (cp <= 0xFFFF) {
|
||||||
result.resize(3);
|
result.resize(3);
|
||||||
result[2] = static_cast<char>(0x80 | (0x3f & cp));
|
result[2] = static_cast<char>(0x80 | (0x3f & cp));
|
||||||
result[1] = 0x80 | static_cast<char>((0x3f & (cp >> 6)));
|
result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
|
||||||
result[0] = 0xE0 | static_cast<char>((0xf & (cp >> 12)));
|
result[0] = static_cast<char>(0xE0 | (0xf & (cp >> 12)));
|
||||||
} else if (cp <= 0x10FFFF) {
|
} else if (cp <= 0x10FFFF) {
|
||||||
result.resize(4);
|
result.resize(4);
|
||||||
result[3] = static_cast<char>(0x80 | (0x3f & cp));
|
result[3] = static_cast<char>(0x80 | (0x3f & cp));
|
||||||
@@ -43,7 +43,7 @@ static inline std::string codePointToUTF8(unsigned int cp) {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns true if ch is a control character (in range [0,32[).
|
/// Returns true if ch is a control character (in range [1,31]).
|
||||||
static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; }
|
static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; }
|
||||||
|
|
||||||
enum {
|
enum {
|
||||||
@@ -63,7 +63,7 @@ typedef char UIntToStringBuffer[uintToStringBufferSize];
|
|||||||
static inline void uintToString(LargestUInt value, char*& current) {
|
static inline void uintToString(LargestUInt value, char*& current) {
|
||||||
*--current = 0;
|
*--current = 0;
|
||||||
do {
|
do {
|
||||||
*--current = char(value % 10) + '0';
|
*--current = static_cast<signed char>(value % 10U + static_cast<unsigned>('0'));
|
||||||
value /= 10;
|
value /= 10;
|
||||||
} while (value != 0);
|
} while (value != 0);
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -16,68 +16,29 @@ namespace Json {
|
|||||||
// //////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
ValueIteratorBase::ValueIteratorBase()
|
ValueIteratorBase::ValueIteratorBase()
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
: current_(), isNull_(true) {
|
: current_(), isNull_(true) {
|
||||||
}
|
}
|
||||||
#else
|
|
||||||
: isArray_(true), isNull_(true) {
|
|
||||||
iterator_.array_ = ValueInternalArray::IteratorState();
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
ValueIteratorBase::ValueIteratorBase(
|
ValueIteratorBase::ValueIteratorBase(
|
||||||
const Value::ObjectValues::iterator& current)
|
const Value::ObjectValues::iterator& current)
|
||||||
: current_(current), isNull_(false) {}
|
: current_(current), isNull_(false) {}
|
||||||
#else
|
|
||||||
ValueIteratorBase::ValueIteratorBase(
|
|
||||||
const ValueInternalArray::IteratorState& state)
|
|
||||||
: isArray_(true) {
|
|
||||||
iterator_.array_ = state;
|
|
||||||
}
|
|
||||||
|
|
||||||
ValueIteratorBase::ValueIteratorBase(
|
|
||||||
const ValueInternalMap::IteratorState& state)
|
|
||||||
: isArray_(false) {
|
|
||||||
iterator_.map_ = state;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
Value& ValueIteratorBase::deref() const {
|
Value& ValueIteratorBase::deref() const {
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
return current_->second;
|
return current_->second;
|
||||||
#else
|
|
||||||
if (isArray_)
|
|
||||||
return ValueInternalArray::dereference(iterator_.array_);
|
|
||||||
return ValueInternalMap::value(iterator_.map_);
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void ValueIteratorBase::increment() {
|
void ValueIteratorBase::increment() {
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
++current_;
|
++current_;
|
||||||
#else
|
|
||||||
if (isArray_)
|
|
||||||
ValueInternalArray::increment(iterator_.array_);
|
|
||||||
ValueInternalMap::increment(iterator_.map_);
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void ValueIteratorBase::decrement() {
|
void ValueIteratorBase::decrement() {
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
--current_;
|
--current_;
|
||||||
#else
|
|
||||||
if (isArray_)
|
|
||||||
ValueInternalArray::decrement(iterator_.array_);
|
|
||||||
ValueInternalMap::decrement(iterator_.map_);
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ValueIteratorBase::difference_type
|
ValueIteratorBase::difference_type
|
||||||
ValueIteratorBase::computeDistance(const SelfType& other) const {
|
ValueIteratorBase::computeDistance(const SelfType& other) const {
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
#ifdef JSON_USE_CPPTL_SMALLMAP
|
#ifdef JSON_USE_CPPTL_SMALLMAP
|
||||||
return current_ - other.current_;
|
return other.current_ - current_;
|
||||||
#else
|
#else
|
||||||
// Iterator for null value are initialized using the default
|
// Iterator for null value are initialized using the default
|
||||||
// constructor, which initialize current_ to the default
|
// constructor, which initialize current_ to the default
|
||||||
@@ -100,80 +61,58 @@ ValueIteratorBase::computeDistance(const SelfType& other) const {
|
|||||||
}
|
}
|
||||||
return myDistance;
|
return myDistance;
|
||||||
#endif
|
#endif
|
||||||
#else
|
|
||||||
if (isArray_)
|
|
||||||
return ValueInternalArray::distance(iterator_.array_,
|
|
||||||
other.iterator_.array_);
|
|
||||||
return ValueInternalMap::distance(iterator_.map_, other.iterator_.map_);
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ValueIteratorBase::isEqual(const SelfType& other) const {
|
bool ValueIteratorBase::isEqual(const SelfType& other) const {
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
if (isNull_) {
|
if (isNull_) {
|
||||||
return other.isNull_;
|
return other.isNull_;
|
||||||
}
|
}
|
||||||
return current_ == other.current_;
|
return current_ == other.current_;
|
||||||
#else
|
|
||||||
if (isArray_)
|
|
||||||
return ValueInternalArray::equals(iterator_.array_, other.iterator_.array_);
|
|
||||||
return ValueInternalMap::equals(iterator_.map_, other.iterator_.map_);
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void ValueIteratorBase::copy(const SelfType& other) {
|
void ValueIteratorBase::copy(const SelfType& other) {
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
current_ = other.current_;
|
current_ = other.current_;
|
||||||
isNull_ = other.isNull_;
|
isNull_ = other.isNull_;
|
||||||
#else
|
|
||||||
if (isArray_)
|
|
||||||
iterator_.array_ = other.iterator_.array_;
|
|
||||||
iterator_.map_ = other.iterator_.map_;
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Value ValueIteratorBase::key() const {
|
Value ValueIteratorBase::key() const {
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
const Value::CZString czstring = (*current_).first;
|
const Value::CZString czstring = (*current_).first;
|
||||||
if (czstring.c_str()) {
|
if (czstring.data()) {
|
||||||
if (czstring.isStaticString())
|
if (czstring.isStaticString())
|
||||||
return Value(StaticString(czstring.c_str()));
|
return Value(StaticString(czstring.data()));
|
||||||
return Value(czstring.c_str());
|
return Value(czstring.data(), czstring.data() + czstring.length());
|
||||||
}
|
}
|
||||||
return Value(czstring.index());
|
return Value(czstring.index());
|
||||||
#else
|
|
||||||
if (isArray_)
|
|
||||||
return Value(ValueInternalArray::indexOf(iterator_.array_));
|
|
||||||
bool isStatic;
|
|
||||||
const char* memberName = ValueInternalMap::key(iterator_.map_, isStatic);
|
|
||||||
if (isStatic)
|
|
||||||
return Value(StaticString(memberName));
|
|
||||||
return Value(memberName);
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
UInt ValueIteratorBase::index() const {
|
UInt ValueIteratorBase::index() const {
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
const Value::CZString czstring = (*current_).first;
|
const Value::CZString czstring = (*current_).first;
|
||||||
if (!czstring.c_str())
|
if (!czstring.data())
|
||||||
return czstring.index();
|
return czstring.index();
|
||||||
return Value::UInt(-1);
|
return Value::UInt(-1);
|
||||||
#else
|
|
||||||
if (isArray_)
|
|
||||||
return Value::UInt(ValueInternalArray::indexOf(iterator_.array_));
|
|
||||||
return Value::UInt(-1);
|
|
||||||
#endif
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const char* ValueIteratorBase::memberName() const {
|
std::string ValueIteratorBase::name() const {
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
char const* keey;
|
||||||
const char* name = (*current_).first.c_str();
|
char const* end;
|
||||||
return name ? name : "";
|
keey = memberName(&end);
|
||||||
#else
|
if (!keey) return std::string();
|
||||||
if (!isArray_)
|
return std::string(keey, end);
|
||||||
return ValueInternalMap::key(iterator_.map_);
|
}
|
||||||
return "";
|
|
||||||
#endif
|
char const* ValueIteratorBase::memberName() const {
|
||||||
|
const char* cname = (*current_).first.data();
|
||||||
|
return cname ? cname : "";
|
||||||
|
}
|
||||||
|
|
||||||
|
char const* ValueIteratorBase::memberName(char const** end) const {
|
||||||
|
const char* cname = (*current_).first.data();
|
||||||
|
if (!cname) {
|
||||||
|
*end = NULL;
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
*end = cname + (*current_).first.length();
|
||||||
|
return cname;
|
||||||
}
|
}
|
||||||
|
|
||||||
// //////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////
|
||||||
@@ -186,19 +125,9 @@ const char* ValueIteratorBase::memberName() const {
|
|||||||
|
|
||||||
ValueConstIterator::ValueConstIterator() {}
|
ValueConstIterator::ValueConstIterator() {}
|
||||||
|
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
ValueConstIterator::ValueConstIterator(
|
ValueConstIterator::ValueConstIterator(
|
||||||
const Value::ObjectValues::iterator& current)
|
const Value::ObjectValues::iterator& current)
|
||||||
: ValueIteratorBase(current) {}
|
: ValueIteratorBase(current) {}
|
||||||
#else
|
|
||||||
ValueConstIterator::ValueConstIterator(
|
|
||||||
const ValueInternalArray::IteratorState& state)
|
|
||||||
: ValueIteratorBase(state) {}
|
|
||||||
|
|
||||||
ValueConstIterator::ValueConstIterator(
|
|
||||||
const ValueInternalMap::IteratorState& state)
|
|
||||||
: ValueIteratorBase(state) {}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
ValueConstIterator& ValueConstIterator::
|
ValueConstIterator& ValueConstIterator::
|
||||||
operator=(const ValueIteratorBase& other) {
|
operator=(const ValueIteratorBase& other) {
|
||||||
@@ -216,16 +145,8 @@ operator=(const ValueIteratorBase& other) {
|
|||||||
|
|
||||||
ValueIterator::ValueIterator() {}
|
ValueIterator::ValueIterator() {}
|
||||||
|
|
||||||
#ifndef JSON_VALUE_USE_INTERNAL_MAP
|
|
||||||
ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current)
|
ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current)
|
||||||
: ValueIteratorBase(current) {}
|
: ValueIteratorBase(current) {}
|
||||||
#else
|
|
||||||
ValueIterator::ValueIterator(const ValueInternalArray::IteratorState& state)
|
|
||||||
: ValueIteratorBase(state) {}
|
|
||||||
|
|
||||||
ValueIterator::ValueIterator(const ValueInternalMap::IteratorState& state)
|
|
||||||
: ValueIteratorBase(state) {}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
ValueIterator::ValueIterator(const ValueConstIterator& other)
|
ValueIterator::ValueIterator(const ValueConstIterator& other)
|
||||||
: ValueIteratorBase(other) {}
|
: ValueIteratorBase(other) {}
|
||||||
|
@@ -7,15 +7,44 @@
|
|||||||
#include <json/writer.h>
|
#include <json/writer.h>
|
||||||
#include "json_tool.h"
|
#include "json_tool.h"
|
||||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||||
#include <utility>
|
|
||||||
#include <assert.h>
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <string.h>
|
|
||||||
#include <sstream>
|
|
||||||
#include <iomanip>
|
#include <iomanip>
|
||||||
#include <math.h>
|
#include <memory>
|
||||||
|
#include <sstream>
|
||||||
|
#include <utility>
|
||||||
|
#include <set>
|
||||||
|
#include <cassert>
|
||||||
|
#include <cstring>
|
||||||
|
#include <cstdio>
|
||||||
|
|
||||||
#if defined(_MSC_VER) && _MSC_VER < 1500 // VC++ 8.0 and below
|
#if defined(__BORLANDC__)
|
||||||
|
#include <stdio.h>
|
||||||
|
#endif
|
||||||
|
#if defined(_MSC_VER) && _MSC_VER >= 1200 && _MSC_VER < 1800 // Between VC++ 6.0 and VC++ 11.0
|
||||||
|
#include <float.h>
|
||||||
|
#define isfinite _finite
|
||||||
|
#elif defined(__sun) && defined(__SVR4) //Solaris
|
||||||
|
#include <ieeefp.h>
|
||||||
|
#define isfinite finite
|
||||||
|
#else
|
||||||
|
#include <cmath>
|
||||||
|
#define isfinite std::isfinite
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if defined(_MSC_VER)
|
||||||
|
#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above
|
||||||
|
#define snprintf sprintf_s
|
||||||
|
#elif _MSC_VER >= 1900 // VC++ 14.0 and above
|
||||||
|
#define snprintf std::snprintf
|
||||||
|
#else
|
||||||
|
#define snprintf _snprintf
|
||||||
|
#endif
|
||||||
|
#elif defined(__ANDROID__)
|
||||||
|
#define snprintf snprintf
|
||||||
|
#elif __cplusplus >= 201103L
|
||||||
|
#define snprintf std::snprintf
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if defined(__BORLANDC__)
|
||||||
#include <float.h>
|
#include <float.h>
|
||||||
#define isfinite _finite
|
#define isfinite _finite
|
||||||
#define snprintf _snprintf
|
#define snprintf _snprintf
|
||||||
@@ -26,13 +55,14 @@
|
|||||||
#pragma warning(disable : 4996)
|
#pragma warning(disable : 4996)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if defined(__sun) && defined(__SVR4) //Solaris
|
|
||||||
#include <ieeefp.h>
|
|
||||||
#define isfinite finite
|
|
||||||
#endif
|
|
||||||
|
|
||||||
namespace Json {
|
namespace Json {
|
||||||
|
|
||||||
|
#if JSON_HAS_UNIQUE_PTR
|
||||||
|
typedef std::unique_ptr<StreamWriter> const StreamWriterPtr;
|
||||||
|
#else
|
||||||
|
typedef std::auto_ptr<StreamWriter> StreamWriterPtr;
|
||||||
|
#endif
|
||||||
|
|
||||||
static bool containsControlCharacter(const char* str) {
|
static bool containsControlCharacter(const char* str) {
|
||||||
while (*str) {
|
while (*str) {
|
||||||
if (isControlCharacter(*(str++)))
|
if (isControlCharacter(*(str++)))
|
||||||
@@ -41,15 +71,28 @@ static bool containsControlCharacter(const char* str) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static bool containsControlCharacter0(const char* str, unsigned len) {
|
||||||
|
char const* end = str + len;
|
||||||
|
while (end != str) {
|
||||||
|
if (isControlCharacter(*str) || 0==*str)
|
||||||
|
return true;
|
||||||
|
++str;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
std::string valueToString(LargestInt value) {
|
std::string valueToString(LargestInt value) {
|
||||||
UIntToStringBuffer buffer;
|
UIntToStringBuffer buffer;
|
||||||
char* current = buffer + sizeof(buffer);
|
char* current = buffer + sizeof(buffer);
|
||||||
bool isNegative = value < 0;
|
if (value == Value::minLargestInt) {
|
||||||
if (isNegative)
|
uintToString(LargestUInt(Value::maxLargestInt) + 1, current);
|
||||||
value = -value;
|
|
||||||
uintToString(LargestUInt(value), current);
|
|
||||||
if (isNegative)
|
|
||||||
*--current = '-';
|
*--current = '-';
|
||||||
|
} else if (value < 0) {
|
||||||
|
uintToString(LargestUInt(-value), current);
|
||||||
|
*--current = '-';
|
||||||
|
} else {
|
||||||
|
uintToString(LargestUInt(value), current);
|
||||||
|
}
|
||||||
assert(current >= buffer);
|
assert(current >= buffer);
|
||||||
return current;
|
return current;
|
||||||
}
|
}
|
||||||
@@ -74,43 +117,38 @@ std::string valueToString(UInt value) {
|
|||||||
|
|
||||||
#endif // # if defined(JSON_HAS_INT64)
|
#endif // # if defined(JSON_HAS_INT64)
|
||||||
|
|
||||||
std::string valueToString(double value) {
|
std::string valueToString(double value, bool useSpecialFloats, unsigned int precision) {
|
||||||
// Allocate a buffer that is more than large enough to store the 16 digits of
|
// Allocate a buffer that is more than large enough to store the 16 digits of
|
||||||
// precision requested below.
|
// precision requested below.
|
||||||
char buffer[32];
|
char buffer[32];
|
||||||
int len = -1;
|
int len = -1;
|
||||||
|
|
||||||
|
char formatString[6];
|
||||||
|
sprintf(formatString, "%%.%dg", precision);
|
||||||
|
|
||||||
// Print into the buffer. We need not request the alternative representation
|
// Print into the buffer. We need not request the alternative representation
|
||||||
// that always has a decimal point because JSON doesn't distingish the
|
// that always has a decimal point because JSON doesn't distingish the
|
||||||
// concepts of reals and integers.
|
// concepts of reals and integers.
|
||||||
#if defined(_MSC_VER) && defined(__STDC_SECURE_LIB__) // Use secure version with
|
|
||||||
// visual studio 2005 to
|
|
||||||
// avoid warning.
|
|
||||||
#if defined(WINCE)
|
|
||||||
len = _snprintf(buffer, sizeof(buffer), "%.17g", value);
|
|
||||||
#else
|
|
||||||
len = sprintf_s(buffer, sizeof(buffer), "%.17g", value);
|
|
||||||
#endif
|
|
||||||
#else
|
|
||||||
if (isfinite(value)) {
|
if (isfinite(value)) {
|
||||||
len = snprintf(buffer, sizeof(buffer), "%.17g", value);
|
len = snprintf(buffer, sizeof(buffer), formatString, value);
|
||||||
} else {
|
} else {
|
||||||
// IEEE standard states that NaN values will not compare to themselves
|
// IEEE standard states that NaN values will not compare to themselves
|
||||||
if (value != value) {
|
if (value != value) {
|
||||||
len = snprintf(buffer, sizeof(buffer), "null");
|
len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "NaN" : "null");
|
||||||
} else if (value < 0) {
|
} else if (value < 0) {
|
||||||
len = snprintf(buffer, sizeof(buffer), "-1e+9999");
|
len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "-Infinity" : "-1e+9999");
|
||||||
} else {
|
} else {
|
||||||
len = snprintf(buffer, sizeof(buffer), "1e+9999");
|
len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "Infinity" : "1e+9999");
|
||||||
}
|
}
|
||||||
// For those, we do not need to call fixNumLoc, but it is fast.
|
// For those, we do not need to call fixNumLoc, but it is fast.
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
assert(len >= 0);
|
assert(len >= 0);
|
||||||
fixNumericLocale(buffer, buffer + len);
|
fixNumericLocale(buffer, buffer + len);
|
||||||
return buffer;
|
return buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string valueToString(double value) { return valueToString(value, false, 17); }
|
||||||
|
|
||||||
std::string valueToString(bool value) { return value ? "true" : "false"; }
|
std::string valueToString(bool value) { return value ? "true" : "false"; }
|
||||||
|
|
||||||
std::string valueToQuotedString(const char* value) {
|
std::string valueToQuotedString(const char* value) {
|
||||||
@@ -175,6 +213,84 @@ std::string valueToQuotedString(const char* value) {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// https://github.com/upcaste/upcaste/blob/master/src/upcore/src/cstring/strnpbrk.cpp
|
||||||
|
static char const* strnpbrk(char const* s, char const* accept, size_t n) {
|
||||||
|
assert((s || !n) && accept);
|
||||||
|
|
||||||
|
char const* const end = s + n;
|
||||||
|
for (char const* cur = s; cur < end; ++cur) {
|
||||||
|
int const c = *cur;
|
||||||
|
for (char const* a = accept; *a; ++a) {
|
||||||
|
if (*a == c) {
|
||||||
|
return cur;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
static std::string valueToQuotedStringN(const char* value, unsigned length) {
|
||||||
|
if (value == NULL)
|
||||||
|
return "";
|
||||||
|
// Not sure how to handle unicode...
|
||||||
|
if (strnpbrk(value, "\"\\\b\f\n\r\t", length) == NULL &&
|
||||||
|
!containsControlCharacter0(value, length))
|
||||||
|
return std::string("\"") + value + "\"";
|
||||||
|
// We have to walk value and escape any special characters.
|
||||||
|
// Appending to std::string is not efficient, but this should be rare.
|
||||||
|
// (Note: forward slashes are *not* rare, but I am not escaping them.)
|
||||||
|
std::string::size_type maxsize =
|
||||||
|
length * 2 + 3; // allescaped+quotes+NULL
|
||||||
|
std::string result;
|
||||||
|
result.reserve(maxsize); // to avoid lots of mallocs
|
||||||
|
result += "\"";
|
||||||
|
char const* end = value + length;
|
||||||
|
for (const char* c = value; c != end; ++c) {
|
||||||
|
switch (*c) {
|
||||||
|
case '\"':
|
||||||
|
result += "\\\"";
|
||||||
|
break;
|
||||||
|
case '\\':
|
||||||
|
result += "\\\\";
|
||||||
|
break;
|
||||||
|
case '\b':
|
||||||
|
result += "\\b";
|
||||||
|
break;
|
||||||
|
case '\f':
|
||||||
|
result += "\\f";
|
||||||
|
break;
|
||||||
|
case '\n':
|
||||||
|
result += "\\n";
|
||||||
|
break;
|
||||||
|
case '\r':
|
||||||
|
result += "\\r";
|
||||||
|
break;
|
||||||
|
case '\t':
|
||||||
|
result += "\\t";
|
||||||
|
break;
|
||||||
|
// case '/':
|
||||||
|
// Even though \/ is considered a legal escape in JSON, a bare
|
||||||
|
// slash is also legal, so I see no reason to escape it.
|
||||||
|
// (I hope I am not misunderstanding something.)
|
||||||
|
// blep notes: actually escaping \/ may be useful in javascript to avoid </
|
||||||
|
// sequence.
|
||||||
|
// Should add a flag to allow this compatibility mode and prevent this
|
||||||
|
// sequence from occurring.
|
||||||
|
default:
|
||||||
|
if ((isControlCharacter(*c)) || (*c == 0)) {
|
||||||
|
std::ostringstream oss;
|
||||||
|
oss << "\\u" << std::hex << std::uppercase << std::setfill('0')
|
||||||
|
<< std::setw(4) << static_cast<int>(*c);
|
||||||
|
result += oss.str();
|
||||||
|
} else {
|
||||||
|
result += *c;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result += "\"";
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
// Class Writer
|
// Class Writer
|
||||||
// //////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////
|
||||||
Writer::~Writer() {}
|
Writer::~Writer() {}
|
||||||
@@ -183,19 +299,13 @@ Writer::~Writer() {}
|
|||||||
// //////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
FastWriter::FastWriter()
|
FastWriter::FastWriter()
|
||||||
: yamlCompatiblityEnabled_(false), dropNullPlaceholders_(false),
|
: yamlCompatiblityEnabled_(false) {}
|
||||||
omitEndingLineFeed_(false) {}
|
|
||||||
|
|
||||||
void FastWriter::enableYAMLCompatibility() { yamlCompatiblityEnabled_ = true; }
|
void FastWriter::enableYAMLCompatibility() { yamlCompatiblityEnabled_ = true; }
|
||||||
|
|
||||||
void FastWriter::dropNullPlaceholders() { dropNullPlaceholders_ = true; }
|
|
||||||
|
|
||||||
void FastWriter::omitEndingLineFeed() { omitEndingLineFeed_ = true; }
|
|
||||||
|
|
||||||
std::string FastWriter::write(const Value& root) {
|
std::string FastWriter::write(const Value& root) {
|
||||||
document_ = "";
|
document_ = "";
|
||||||
writeValue(root);
|
writeValue(root);
|
||||||
if (!omitEndingLineFeed_)
|
|
||||||
document_ += "\n";
|
document_ += "\n";
|
||||||
return document_;
|
return document_;
|
||||||
}
|
}
|
||||||
@@ -203,7 +313,6 @@ std::string FastWriter::write(const Value& root) {
|
|||||||
void FastWriter::writeValue(const Value& value) {
|
void FastWriter::writeValue(const Value& value) {
|
||||||
switch (value.type()) {
|
switch (value.type()) {
|
||||||
case nullValue:
|
case nullValue:
|
||||||
if (!dropNullPlaceholders_)
|
|
||||||
document_ += "null";
|
document_ += "null";
|
||||||
break;
|
break;
|
||||||
case intValue:
|
case intValue:
|
||||||
@@ -216,8 +325,14 @@ void FastWriter::writeValue(const Value& value) {
|
|||||||
document_ += valueToString(value.asDouble());
|
document_ += valueToString(value.asDouble());
|
||||||
break;
|
break;
|
||||||
case stringValue:
|
case stringValue:
|
||||||
document_ += valueToQuotedString(value.asCString());
|
{
|
||||||
|
// Is NULL possible for value.string_?
|
||||||
|
char const* str;
|
||||||
|
char const* end;
|
||||||
|
bool ok = value.getString(&str, &end);
|
||||||
|
if (ok) document_ += valueToQuotedStringN(str, static_cast<unsigned>(end-str));
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
case booleanValue:
|
case booleanValue:
|
||||||
document_ += valueToString(value.asBool());
|
document_ += valueToString(value.asBool());
|
||||||
break;
|
break;
|
||||||
@@ -239,7 +354,7 @@ void FastWriter::writeValue(const Value& value) {
|
|||||||
const std::string& name = *it;
|
const std::string& name = *it;
|
||||||
if (it != members.begin())
|
if (it != members.begin())
|
||||||
document_ += ',';
|
document_ += ',';
|
||||||
document_ += valueToQuotedString(name.c_str());
|
document_ += valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length()));
|
||||||
document_ += yamlCompatiblityEnabled_ ? ": " : ":";
|
document_ += yamlCompatiblityEnabled_ ? ": " : ":";
|
||||||
writeValue(value[name]);
|
writeValue(value[name]);
|
||||||
}
|
}
|
||||||
@@ -280,8 +395,15 @@ void StyledWriter::writeValue(const Value& value) {
|
|||||||
pushValue(valueToString(value.asDouble()));
|
pushValue(valueToString(value.asDouble()));
|
||||||
break;
|
break;
|
||||||
case stringValue:
|
case stringValue:
|
||||||
pushValue(valueToQuotedString(value.asCString()));
|
{
|
||||||
|
// Is NULL possible for value.string_?
|
||||||
|
char const* str;
|
||||||
|
char const* end;
|
||||||
|
bool ok = value.getString(&str, &end);
|
||||||
|
if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
|
||||||
|
else pushValue("");
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
case booleanValue:
|
case booleanValue:
|
||||||
pushValue(valueToString(value.asBool()));
|
pushValue(valueToString(value.asBool()));
|
||||||
break;
|
break;
|
||||||
@@ -376,6 +498,9 @@ bool StyledWriter::isMultineArray(const Value& value) {
|
|||||||
addChildValues_ = true;
|
addChildValues_ = true;
|
||||||
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||||
for (int index = 0; index < size; ++index) {
|
for (int index = 0; index < size; ++index) {
|
||||||
|
if (hasCommentForValue(value[index])) {
|
||||||
|
isMultiLine = true;
|
||||||
|
}
|
||||||
writeValue(value[index]);
|
writeValue(value[index]);
|
||||||
lineLength += int(childValues_[index].length());
|
lineLength += int(childValues_[index].length());
|
||||||
}
|
}
|
||||||
@@ -421,26 +546,27 @@ void StyledWriter::writeCommentBeforeValue(const Value& root) {
|
|||||||
|
|
||||||
document_ += "\n";
|
document_ += "\n";
|
||||||
writeIndent();
|
writeIndent();
|
||||||
std::string normalizedComment = normalizeEOL(root.getComment(commentBefore));
|
const std::string& comment = root.getComment(commentBefore);
|
||||||
std::string::const_iterator iter = normalizedComment.begin();
|
std::string::const_iterator iter = comment.begin();
|
||||||
while (iter != normalizedComment.end()) {
|
while (iter != comment.end()) {
|
||||||
document_ += *iter;
|
document_ += *iter;
|
||||||
if (*iter == '\n' && *(iter + 1) == '/')
|
if (*iter == '\n' &&
|
||||||
|
(iter != comment.end() && *(iter + 1) == '/'))
|
||||||
writeIndent();
|
writeIndent();
|
||||||
++iter;
|
++iter;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Comments are stripped of newlines, so add one here
|
// Comments are stripped of trailing newlines, so add one here
|
||||||
document_ += "\n";
|
document_ += "\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
void StyledWriter::writeCommentAfterValueOnSameLine(const Value& root) {
|
void StyledWriter::writeCommentAfterValueOnSameLine(const Value& root) {
|
||||||
if (root.hasComment(commentAfterOnSameLine))
|
if (root.hasComment(commentAfterOnSameLine))
|
||||||
document_ += " " + normalizeEOL(root.getComment(commentAfterOnSameLine));
|
document_ += " " + root.getComment(commentAfterOnSameLine);
|
||||||
|
|
||||||
if (root.hasComment(commentAfter)) {
|
if (root.hasComment(commentAfter)) {
|
||||||
document_ += "\n";
|
document_ += "\n";
|
||||||
document_ += normalizeEOL(root.getComment(commentAfter));
|
document_ += root.getComment(commentAfter);
|
||||||
document_ += "\n";
|
document_ += "\n";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -451,25 +577,6 @@ bool StyledWriter::hasCommentForValue(const Value& value) {
|
|||||||
value.hasComment(commentAfter);
|
value.hasComment(commentAfter);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string StyledWriter::normalizeEOL(const std::string& text) {
|
|
||||||
std::string normalized;
|
|
||||||
normalized.reserve(text.length());
|
|
||||||
const char* begin = text.c_str();
|
|
||||||
const char* end = begin + text.length();
|
|
||||||
const char* current = begin;
|
|
||||||
while (current != end) {
|
|
||||||
char c = *current++;
|
|
||||||
if (c == '\r') // mac or dos EOL
|
|
||||||
{
|
|
||||||
if (*current == '\n') // convert dos EOL
|
|
||||||
++current;
|
|
||||||
normalized += '\n';
|
|
||||||
} else // handle unix EOL & other char
|
|
||||||
normalized += c;
|
|
||||||
}
|
|
||||||
return normalized;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Class StyledStreamWriter
|
// Class StyledStreamWriter
|
||||||
// //////////////////////////////////////////////////////////////////
|
// //////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
@@ -481,7 +588,10 @@ void StyledStreamWriter::write(std::ostream& out, const Value& root) {
|
|||||||
document_ = &out;
|
document_ = &out;
|
||||||
addChildValues_ = false;
|
addChildValues_ = false;
|
||||||
indentString_ = "";
|
indentString_ = "";
|
||||||
|
indented_ = true;
|
||||||
writeCommentBeforeValue(root);
|
writeCommentBeforeValue(root);
|
||||||
|
if (!indented_) writeIndent();
|
||||||
|
indented_ = true;
|
||||||
writeValue(root);
|
writeValue(root);
|
||||||
writeCommentAfterValueOnSameLine(root);
|
writeCommentAfterValueOnSameLine(root);
|
||||||
*document_ << "\n";
|
*document_ << "\n";
|
||||||
@@ -503,8 +613,15 @@ void StyledStreamWriter::writeValue(const Value& value) {
|
|||||||
pushValue(valueToString(value.asDouble()));
|
pushValue(valueToString(value.asDouble()));
|
||||||
break;
|
break;
|
||||||
case stringValue:
|
case stringValue:
|
||||||
pushValue(valueToQuotedString(value.asCString()));
|
{
|
||||||
|
// Is NULL possible for value.string_?
|
||||||
|
char const* str;
|
||||||
|
char const* end;
|
||||||
|
bool ok = value.getString(&str, &end);
|
||||||
|
if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
|
||||||
|
else pushValue("");
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
case booleanValue:
|
case booleanValue:
|
||||||
pushValue(valueToString(value.asBool()));
|
pushValue(valueToString(value.asBool()));
|
||||||
break;
|
break;
|
||||||
@@ -557,8 +674,10 @@ void StyledStreamWriter::writeArrayValue(const Value& value) {
|
|||||||
if (hasChildValue)
|
if (hasChildValue)
|
||||||
writeWithIndent(childValues_[index]);
|
writeWithIndent(childValues_[index]);
|
||||||
else {
|
else {
|
||||||
writeIndent();
|
if (!indented_) writeIndent();
|
||||||
|
indented_ = true;
|
||||||
writeValue(childValue);
|
writeValue(childValue);
|
||||||
|
indented_ = false;
|
||||||
}
|
}
|
||||||
if (++index == size) {
|
if (++index == size) {
|
||||||
writeCommentAfterValueOnSameLine(childValue);
|
writeCommentAfterValueOnSameLine(childValue);
|
||||||
@@ -599,6 +718,9 @@ bool StyledStreamWriter::isMultineArray(const Value& value) {
|
|||||||
addChildValues_ = true;
|
addChildValues_ = true;
|
||||||
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||||
for (int index = 0; index < size; ++index) {
|
for (int index = 0; index < size; ++index) {
|
||||||
|
if (hasCommentForValue(value[index])) {
|
||||||
|
isMultiLine = true;
|
||||||
|
}
|
||||||
writeValue(value[index]);
|
writeValue(value[index]);
|
||||||
lineLength += int(childValues_[index].length());
|
lineLength += int(childValues_[index].length());
|
||||||
}
|
}
|
||||||
@@ -616,24 +738,17 @@ void StyledStreamWriter::pushValue(const std::string& value) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void StyledStreamWriter::writeIndent() {
|
void StyledStreamWriter::writeIndent() {
|
||||||
/*
|
// blep intended this to look at the so-far-written string
|
||||||
Some comments in this method would have been nice. ;-)
|
// to determine whether we are already indented, but
|
||||||
|
// with a stream we cannot do that. So we rely on some saved state.
|
||||||
if ( !document_.empty() )
|
// The caller checks indented_.
|
||||||
{
|
|
||||||
char last = document_[document_.length()-1];
|
|
||||||
if ( last == ' ' ) // already indented
|
|
||||||
return;
|
|
||||||
if ( last != '\n' ) // Comments may add new-line
|
|
||||||
*document_ << '\n';
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
*document_ << '\n' << indentString_;
|
*document_ << '\n' << indentString_;
|
||||||
}
|
}
|
||||||
|
|
||||||
void StyledStreamWriter::writeWithIndent(const std::string& value) {
|
void StyledStreamWriter::writeWithIndent(const std::string& value) {
|
||||||
writeIndent();
|
if (!indented_) writeIndent();
|
||||||
*document_ << value;
|
*document_ << value;
|
||||||
|
indented_ = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void StyledStreamWriter::indent() { indentString_ += indentation_; }
|
void StyledStreamWriter::indent() { indentString_ += indentation_; }
|
||||||
@@ -646,19 +761,30 @@ void StyledStreamWriter::unindent() {
|
|||||||
void StyledStreamWriter::writeCommentBeforeValue(const Value& root) {
|
void StyledStreamWriter::writeCommentBeforeValue(const Value& root) {
|
||||||
if (!root.hasComment(commentBefore))
|
if (!root.hasComment(commentBefore))
|
||||||
return;
|
return;
|
||||||
*document_ << normalizeEOL(root.getComment(commentBefore));
|
|
||||||
*document_ << "\n";
|
if (!indented_) writeIndent();
|
||||||
|
const std::string& comment = root.getComment(commentBefore);
|
||||||
|
std::string::const_iterator iter = comment.begin();
|
||||||
|
while (iter != comment.end()) {
|
||||||
|
*document_ << *iter;
|
||||||
|
if (*iter == '\n' &&
|
||||||
|
(iter != comment.end() && *(iter + 1) == '/'))
|
||||||
|
// writeIndent(); // would include newline
|
||||||
|
*document_ << indentString_;
|
||||||
|
++iter;
|
||||||
|
}
|
||||||
|
indented_ = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) {
|
void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) {
|
||||||
if (root.hasComment(commentAfterOnSameLine))
|
if (root.hasComment(commentAfterOnSameLine))
|
||||||
*document_ << " " + normalizeEOL(root.getComment(commentAfterOnSameLine));
|
*document_ << ' ' << root.getComment(commentAfterOnSameLine);
|
||||||
|
|
||||||
if (root.hasComment(commentAfter)) {
|
if (root.hasComment(commentAfter)) {
|
||||||
*document_ << "\n";
|
writeIndent();
|
||||||
*document_ << normalizeEOL(root.getComment(commentAfter));
|
*document_ << root.getComment(commentAfter);
|
||||||
*document_ << "\n";
|
|
||||||
}
|
}
|
||||||
|
indented_ = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool StyledStreamWriter::hasCommentForValue(const Value& value) {
|
bool StyledStreamWriter::hasCommentForValue(const Value& value) {
|
||||||
@@ -667,28 +793,401 @@ bool StyledStreamWriter::hasCommentForValue(const Value& value) {
|
|||||||
value.hasComment(commentAfter);
|
value.hasComment(commentAfter);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string StyledStreamWriter::normalizeEOL(const std::string& text) {
|
//////////////////////////
|
||||||
std::string normalized;
|
// BuiltStyledStreamWriter
|
||||||
normalized.reserve(text.length());
|
|
||||||
const char* begin = text.c_str();
|
/// Scoped enums are not available until C++11.
|
||||||
const char* end = begin + text.length();
|
struct CommentStyle {
|
||||||
const char* current = begin;
|
/// Decide whether to write comments.
|
||||||
while (current != end) {
|
enum Enum {
|
||||||
char c = *current++;
|
None, ///< Drop all comments.
|
||||||
if (c == '\r') // mac or dos EOL
|
Most, ///< Recover odd behavior of previous versions (not implemented yet).
|
||||||
|
All ///< Keep all comments.
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
struct BuiltStyledStreamWriter : public StreamWriter
|
||||||
|
{
|
||||||
|
BuiltStyledStreamWriter(
|
||||||
|
std::string const& indentation,
|
||||||
|
CommentStyle::Enum cs,
|
||||||
|
std::string const& colonSymbol,
|
||||||
|
std::string const& nullSymbol,
|
||||||
|
std::string const& endingLineFeedSymbol,
|
||||||
|
bool useSpecialFloats,
|
||||||
|
unsigned int precision);
|
||||||
|
virtual int write(Value const& root, std::ostream* sout);
|
||||||
|
private:
|
||||||
|
void writeValue(Value const& value);
|
||||||
|
void writeArrayValue(Value const& value);
|
||||||
|
bool isMultineArray(Value const& value);
|
||||||
|
void pushValue(std::string const& value);
|
||||||
|
void writeIndent();
|
||||||
|
void writeWithIndent(std::string const& value);
|
||||||
|
void indent();
|
||||||
|
void unindent();
|
||||||
|
void writeCommentBeforeValue(Value const& root);
|
||||||
|
void writeCommentAfterValueOnSameLine(Value const& root);
|
||||||
|
static bool hasCommentForValue(const Value& value);
|
||||||
|
|
||||||
|
typedef std::vector<std::string> ChildValues;
|
||||||
|
|
||||||
|
ChildValues childValues_;
|
||||||
|
std::string indentString_;
|
||||||
|
int rightMargin_;
|
||||||
|
std::string indentation_;
|
||||||
|
CommentStyle::Enum cs_;
|
||||||
|
std::string colonSymbol_;
|
||||||
|
std::string nullSymbol_;
|
||||||
|
std::string endingLineFeedSymbol_;
|
||||||
|
bool addChildValues_ : 1;
|
||||||
|
bool indented_ : 1;
|
||||||
|
bool useSpecialFloats_ : 1;
|
||||||
|
unsigned int precision_;
|
||||||
|
};
|
||||||
|
BuiltStyledStreamWriter::BuiltStyledStreamWriter(
|
||||||
|
std::string const& indentation,
|
||||||
|
CommentStyle::Enum cs,
|
||||||
|
std::string const& colonSymbol,
|
||||||
|
std::string const& nullSymbol,
|
||||||
|
std::string const& endingLineFeedSymbol,
|
||||||
|
bool useSpecialFloats,
|
||||||
|
unsigned int precision)
|
||||||
|
: rightMargin_(74)
|
||||||
|
, indentation_(indentation)
|
||||||
|
, cs_(cs)
|
||||||
|
, colonSymbol_(colonSymbol)
|
||||||
|
, nullSymbol_(nullSymbol)
|
||||||
|
, endingLineFeedSymbol_(endingLineFeedSymbol)
|
||||||
|
, addChildValues_(false)
|
||||||
|
, indented_(false)
|
||||||
|
, useSpecialFloats_(useSpecialFloats)
|
||||||
|
, precision_(precision)
|
||||||
{
|
{
|
||||||
if (*current == '\n') // convert dos EOL
|
|
||||||
++current;
|
|
||||||
normalized += '\n';
|
|
||||||
} else // handle unix EOL & other char
|
|
||||||
normalized += c;
|
|
||||||
}
|
}
|
||||||
return normalized;
|
int BuiltStyledStreamWriter::write(Value const& root, std::ostream* sout)
|
||||||
|
{
|
||||||
|
sout_ = sout;
|
||||||
|
addChildValues_ = false;
|
||||||
|
indented_ = true;
|
||||||
|
indentString_ = "";
|
||||||
|
writeCommentBeforeValue(root);
|
||||||
|
if (!indented_) writeIndent();
|
||||||
|
indented_ = true;
|
||||||
|
writeValue(root);
|
||||||
|
writeCommentAfterValueOnSameLine(root);
|
||||||
|
*sout_ << endingLineFeedSymbol_;
|
||||||
|
sout_ = NULL;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
void BuiltStyledStreamWriter::writeValue(Value const& value) {
|
||||||
|
switch (value.type()) {
|
||||||
|
case nullValue:
|
||||||
|
pushValue(nullSymbol_);
|
||||||
|
break;
|
||||||
|
case intValue:
|
||||||
|
pushValue(valueToString(value.asLargestInt()));
|
||||||
|
break;
|
||||||
|
case uintValue:
|
||||||
|
pushValue(valueToString(value.asLargestUInt()));
|
||||||
|
break;
|
||||||
|
case realValue:
|
||||||
|
pushValue(valueToString(value.asDouble(), useSpecialFloats_, precision_));
|
||||||
|
break;
|
||||||
|
case stringValue:
|
||||||
|
{
|
||||||
|
// Is NULL is possible for value.string_?
|
||||||
|
char const* str;
|
||||||
|
char const* end;
|
||||||
|
bool ok = value.getString(&str, &end);
|
||||||
|
if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
|
||||||
|
else pushValue("");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case booleanValue:
|
||||||
|
pushValue(valueToString(value.asBool()));
|
||||||
|
break;
|
||||||
|
case arrayValue:
|
||||||
|
writeArrayValue(value);
|
||||||
|
break;
|
||||||
|
case objectValue: {
|
||||||
|
Value::Members members(value.getMemberNames());
|
||||||
|
if (members.empty())
|
||||||
|
pushValue("{}");
|
||||||
|
else {
|
||||||
|
writeWithIndent("{");
|
||||||
|
indent();
|
||||||
|
Value::Members::iterator it = members.begin();
|
||||||
|
for (;;) {
|
||||||
|
std::string const& name = *it;
|
||||||
|
Value const& childValue = value[name];
|
||||||
|
writeCommentBeforeValue(childValue);
|
||||||
|
writeWithIndent(valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length())));
|
||||||
|
*sout_ << colonSymbol_;
|
||||||
|
writeValue(childValue);
|
||||||
|
if (++it == members.end()) {
|
||||||
|
writeCommentAfterValueOnSameLine(childValue);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
*sout_ << ",";
|
||||||
|
writeCommentAfterValueOnSameLine(childValue);
|
||||||
|
}
|
||||||
|
unindent();
|
||||||
|
writeWithIndent("}");
|
||||||
|
}
|
||||||
|
} break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::ostream& operator<<(std::ostream& sout, const Value& root) {
|
void BuiltStyledStreamWriter::writeArrayValue(Value const& value) {
|
||||||
Json::StyledStreamWriter writer;
|
unsigned size = value.size();
|
||||||
writer.write(sout, root);
|
if (size == 0)
|
||||||
|
pushValue("[]");
|
||||||
|
else {
|
||||||
|
bool isMultiLine = (cs_ == CommentStyle::All) || isMultineArray(value);
|
||||||
|
if (isMultiLine) {
|
||||||
|
writeWithIndent("[");
|
||||||
|
indent();
|
||||||
|
bool hasChildValue = !childValues_.empty();
|
||||||
|
unsigned index = 0;
|
||||||
|
for (;;) {
|
||||||
|
Value const& childValue = value[index];
|
||||||
|
writeCommentBeforeValue(childValue);
|
||||||
|
if (hasChildValue)
|
||||||
|
writeWithIndent(childValues_[index]);
|
||||||
|
else {
|
||||||
|
if (!indented_) writeIndent();
|
||||||
|
indented_ = true;
|
||||||
|
writeValue(childValue);
|
||||||
|
indented_ = false;
|
||||||
|
}
|
||||||
|
if (++index == size) {
|
||||||
|
writeCommentAfterValueOnSameLine(childValue);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
*sout_ << ",";
|
||||||
|
writeCommentAfterValueOnSameLine(childValue);
|
||||||
|
}
|
||||||
|
unindent();
|
||||||
|
writeWithIndent("]");
|
||||||
|
} else // output on a single line
|
||||||
|
{
|
||||||
|
assert(childValues_.size() == size);
|
||||||
|
*sout_ << "[";
|
||||||
|
if (!indentation_.empty()) *sout_ << " ";
|
||||||
|
for (unsigned index = 0; index < size; ++index) {
|
||||||
|
if (index > 0)
|
||||||
|
*sout_ << ", ";
|
||||||
|
*sout_ << childValues_[index];
|
||||||
|
}
|
||||||
|
if (!indentation_.empty()) *sout_ << " ";
|
||||||
|
*sout_ << "]";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
bool BuiltStyledStreamWriter::isMultineArray(Value const& value) {
|
||||||
|
int size = value.size();
|
||||||
|
bool isMultiLine = size * 3 >= rightMargin_;
|
||||||
|
childValues_.clear();
|
||||||
|
for (int index = 0; index < size && !isMultiLine; ++index) {
|
||||||
|
Value const& childValue = value[index];
|
||||||
|
isMultiLine =
|
||||||
|
isMultiLine || ((childValue.isArray() || childValue.isObject()) &&
|
||||||
|
childValue.size() > 0);
|
||||||
|
}
|
||||||
|
if (!isMultiLine) // check if line length > max line length
|
||||||
|
{
|
||||||
|
childValues_.reserve(size);
|
||||||
|
addChildValues_ = true;
|
||||||
|
int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
|
||||||
|
for (int index = 0; index < size; ++index) {
|
||||||
|
if (hasCommentForValue(value[index])) {
|
||||||
|
isMultiLine = true;
|
||||||
|
}
|
||||||
|
writeValue(value[index]);
|
||||||
|
lineLength += int(childValues_[index].length());
|
||||||
|
}
|
||||||
|
addChildValues_ = false;
|
||||||
|
isMultiLine = isMultiLine || lineLength >= rightMargin_;
|
||||||
|
}
|
||||||
|
return isMultiLine;
|
||||||
|
}
|
||||||
|
|
||||||
|
void BuiltStyledStreamWriter::pushValue(std::string const& value) {
|
||||||
|
if (addChildValues_)
|
||||||
|
childValues_.push_back(value);
|
||||||
|
else
|
||||||
|
*sout_ << value;
|
||||||
|
}
|
||||||
|
|
||||||
|
void BuiltStyledStreamWriter::writeIndent() {
|
||||||
|
// blep intended this to look at the so-far-written string
|
||||||
|
// to determine whether we are already indented, but
|
||||||
|
// with a stream we cannot do that. So we rely on some saved state.
|
||||||
|
// The caller checks indented_.
|
||||||
|
|
||||||
|
if (!indentation_.empty()) {
|
||||||
|
// In this case, drop newlines too.
|
||||||
|
*sout_ << '\n' << indentString_;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void BuiltStyledStreamWriter::writeWithIndent(std::string const& value) {
|
||||||
|
if (!indented_) writeIndent();
|
||||||
|
*sout_ << value;
|
||||||
|
indented_ = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void BuiltStyledStreamWriter::indent() { indentString_ += indentation_; }
|
||||||
|
|
||||||
|
void BuiltStyledStreamWriter::unindent() {
|
||||||
|
assert(indentString_.size() >= indentation_.size());
|
||||||
|
indentString_.resize(indentString_.size() - indentation_.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const& root) {
|
||||||
|
if (cs_ == CommentStyle::None) return;
|
||||||
|
if (!root.hasComment(commentBefore))
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (!indented_) writeIndent();
|
||||||
|
const std::string& comment = root.getComment(commentBefore);
|
||||||
|
std::string::const_iterator iter = comment.begin();
|
||||||
|
while (iter != comment.end()) {
|
||||||
|
*sout_ << *iter;
|
||||||
|
if (*iter == '\n' &&
|
||||||
|
(iter != comment.end() && *(iter + 1) == '/'))
|
||||||
|
// writeIndent(); // would write extra newline
|
||||||
|
*sout_ << indentString_;
|
||||||
|
++iter;
|
||||||
|
}
|
||||||
|
indented_ = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const& root) {
|
||||||
|
if (cs_ == CommentStyle::None) return;
|
||||||
|
if (root.hasComment(commentAfterOnSameLine))
|
||||||
|
*sout_ << " " + root.getComment(commentAfterOnSameLine);
|
||||||
|
|
||||||
|
if (root.hasComment(commentAfter)) {
|
||||||
|
writeIndent();
|
||||||
|
*sout_ << root.getComment(commentAfter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
|
bool BuiltStyledStreamWriter::hasCommentForValue(const Value& value) {
|
||||||
|
return value.hasComment(commentBefore) ||
|
||||||
|
value.hasComment(commentAfterOnSameLine) ||
|
||||||
|
value.hasComment(commentAfter);
|
||||||
|
}
|
||||||
|
|
||||||
|
///////////////
|
||||||
|
// StreamWriter
|
||||||
|
|
||||||
|
StreamWriter::StreamWriter()
|
||||||
|
: sout_(NULL)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
StreamWriter::~StreamWriter()
|
||||||
|
{
|
||||||
|
}
|
||||||
|
StreamWriter::Factory::~Factory()
|
||||||
|
{}
|
||||||
|
StreamWriterBuilder::StreamWriterBuilder()
|
||||||
|
{
|
||||||
|
setDefaults(&settings_);
|
||||||
|
}
|
||||||
|
StreamWriterBuilder::~StreamWriterBuilder()
|
||||||
|
{}
|
||||||
|
StreamWriter* StreamWriterBuilder::newStreamWriter() const
|
||||||
|
{
|
||||||
|
std::string indentation = settings_["indentation"].asString();
|
||||||
|
std::string cs_str = settings_["commentStyle"].asString();
|
||||||
|
bool eyc = settings_["enableYAMLCompatibility"].asBool();
|
||||||
|
bool dnp = settings_["dropNullPlaceholders"].asBool();
|
||||||
|
bool usf = settings_["useSpecialFloats"].asBool();
|
||||||
|
unsigned int pre = settings_["precision"].asUInt();
|
||||||
|
CommentStyle::Enum cs = CommentStyle::All;
|
||||||
|
if (cs_str == "All") {
|
||||||
|
cs = CommentStyle::All;
|
||||||
|
} else if (cs_str == "None") {
|
||||||
|
cs = CommentStyle::None;
|
||||||
|
} else {
|
||||||
|
throwRuntimeError("commentStyle must be 'All' or 'None'");
|
||||||
|
}
|
||||||
|
std::string colonSymbol = " : ";
|
||||||
|
if (eyc) {
|
||||||
|
colonSymbol = ": ";
|
||||||
|
} else if (indentation.empty()) {
|
||||||
|
colonSymbol = ":";
|
||||||
|
}
|
||||||
|
std::string nullSymbol = "null";
|
||||||
|
if (dnp) {
|
||||||
|
nullSymbol = "";
|
||||||
|
}
|
||||||
|
if (pre > 17) pre = 17;
|
||||||
|
std::string endingLineFeedSymbol = "";
|
||||||
|
return new BuiltStyledStreamWriter(
|
||||||
|
indentation, cs,
|
||||||
|
colonSymbol, nullSymbol, endingLineFeedSymbol, usf, pre);
|
||||||
|
}
|
||||||
|
static void getValidWriterKeys(std::set<std::string>* valid_keys)
|
||||||
|
{
|
||||||
|
valid_keys->clear();
|
||||||
|
valid_keys->insert("indentation");
|
||||||
|
valid_keys->insert("commentStyle");
|
||||||
|
valid_keys->insert("enableYAMLCompatibility");
|
||||||
|
valid_keys->insert("dropNullPlaceholders");
|
||||||
|
valid_keys->insert("useSpecialFloats");
|
||||||
|
valid_keys->insert("precision");
|
||||||
|
}
|
||||||
|
bool StreamWriterBuilder::validate(Json::Value* invalid) const
|
||||||
|
{
|
||||||
|
Json::Value my_invalid;
|
||||||
|
if (!invalid) invalid = &my_invalid; // so we do not need to test for NULL
|
||||||
|
Json::Value& inv = *invalid;
|
||||||
|
std::set<std::string> valid_keys;
|
||||||
|
getValidWriterKeys(&valid_keys);
|
||||||
|
Value::Members keys = settings_.getMemberNames();
|
||||||
|
size_t n = keys.size();
|
||||||
|
for (size_t i = 0; i < n; ++i) {
|
||||||
|
std::string const& key = keys[i];
|
||||||
|
if (valid_keys.find(key) == valid_keys.end()) {
|
||||||
|
inv[key] = settings_[key];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0u == inv.size();
|
||||||
|
}
|
||||||
|
Value& StreamWriterBuilder::operator[](std::string key)
|
||||||
|
{
|
||||||
|
return settings_[key];
|
||||||
|
}
|
||||||
|
// static
|
||||||
|
void StreamWriterBuilder::setDefaults(Json::Value* settings)
|
||||||
|
{
|
||||||
|
//! [StreamWriterBuilderDefaults]
|
||||||
|
(*settings)["commentStyle"] = "All";
|
||||||
|
(*settings)["indentation"] = "\t";
|
||||||
|
(*settings)["enableYAMLCompatibility"] = false;
|
||||||
|
(*settings)["dropNullPlaceholders"] = false;
|
||||||
|
(*settings)["useSpecialFloats"] = false;
|
||||||
|
(*settings)["precision"] = 17;
|
||||||
|
//! [StreamWriterBuilderDefaults]
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string writeString(StreamWriter::Factory const& builder, Value const& root) {
|
||||||
|
std::ostringstream sout;
|
||||||
|
StreamWriterPtr const writer(builder.newStreamWriter());
|
||||||
|
writer->write(root, &sout);
|
||||||
|
return sout.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
std::ostream& operator<<(std::ostream& sout, Value const& root) {
|
||||||
|
StreamWriterBuilder builder;
|
||||||
|
StreamWriterPtr const writer(builder.newStreamWriter());
|
||||||
|
writer->write(root, &sout);
|
||||||
return sout;
|
return sout;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
// DO NOT EDIT. This file is generated by CMake from "version"
|
// DO NOT EDIT. This file (and "version") is generated by CMake.
|
||||||
// and "version.h.in" files.
|
|
||||||
// Run CMake configure step to update it.
|
// Run CMake configure step to update it.
|
||||||
#ifndef JSON_VERSION_H_INCLUDED
|
#ifndef JSON_VERSION_H_INCLUDED
|
||||||
# define JSON_VERSION_H_INCLUDED
|
# define JSON_VERSION_H_INCLUDED
|
||||||
|
@@ -1,7 +1,4 @@
|
|||||||
|
# vim: et ts=4 sts=4 sw=4 tw=0
|
||||||
IF(JSONCPP_LIB_BUILD_SHARED)
|
|
||||||
ADD_DEFINITIONS( -DJSON_DLL )
|
|
||||||
ENDIF(JSONCPP_LIB_BUILD_SHARED)
|
|
||||||
|
|
||||||
ADD_EXECUTABLE( jsoncpp_test
|
ADD_EXECUTABLE( jsoncpp_test
|
||||||
jsontest.cpp
|
jsontest.cpp
|
||||||
@@ -9,14 +6,33 @@ ADD_EXECUTABLE( jsoncpp_test
|
|||||||
main.cpp
|
main.cpp
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
IF(BUILD_SHARED_LIBS)
|
||||||
|
ADD_DEFINITIONS( -DJSON_DLL )
|
||||||
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
|
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib)
|
||||||
|
ELSE(BUILD_SHARED_LIBS)
|
||||||
|
TARGET_LINK_LIBRARIES(jsoncpp_test jsoncpp_lib_static)
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
|
# another way to solve issue #90
|
||||||
|
#set_target_properties(jsoncpp_test PROPERTIES COMPILE_FLAGS -ffloat-store)
|
||||||
|
|
||||||
# Run unit tests in post-build
|
# Run unit tests in post-build
|
||||||
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
|
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
|
||||||
IF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
IF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||||
|
IF(BUILD_SHARED_LIBS)
|
||||||
|
# First, copy the shared lib, for Microsoft.
|
||||||
|
# Then, run the test executable.
|
||||||
|
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
||||||
|
POST_BUILD
|
||||||
|
COMMAND ${CMAKE_COMMAND} -E copy_if_different $<TARGET_FILE:jsoncpp_lib> $<TARGET_FILE_DIR:jsoncpp_test>
|
||||||
|
COMMAND $<TARGET_FILE:jsoncpp_test>)
|
||||||
|
ELSE(BUILD_SHARED_LIBS)
|
||||||
|
# Just run the test executable.
|
||||||
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
ADD_CUSTOM_COMMAND( TARGET jsoncpp_test
|
||||||
POST_BUILD
|
POST_BUILD
|
||||||
COMMAND $<TARGET_FILE:jsoncpp_test>)
|
COMMAND $<TARGET_FILE:jsoncpp_test>)
|
||||||
ENDIF(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
ENDIF()
|
||||||
|
ENDIF()
|
||||||
|
|
||||||
SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)
|
SET_TARGET_PROPERTIES(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)
|
||||||
|
@@ -323,7 +323,7 @@ void Runner::listTests() const {
|
|||||||
}
|
}
|
||||||
|
|
||||||
int Runner::runCommandLine(int argc, const char* argv[]) const {
|
int Runner::runCommandLine(int argc, const char* argv[]) const {
|
||||||
typedef std::deque<std::string> TestNames;
|
// typedef std::deque<std::string> TestNames;
|
||||||
Runner subrunner;
|
Runner subrunner;
|
||||||
for (int index = 1; index < argc; ++index) {
|
for (int index = 1; index < argc; ++index) {
|
||||||
std::string opt = argv[index];
|
std::string opt = argv[index];
|
||||||
|
@@ -178,8 +178,8 @@ private:
|
|||||||
|
|
||||||
template <typename T, typename U>
|
template <typename T, typename U>
|
||||||
TestResult& checkEqual(TestResult& result,
|
TestResult& checkEqual(TestResult& result,
|
||||||
const T& expected,
|
T expected,
|
||||||
const U& actual,
|
U actual,
|
||||||
const char* file,
|
const char* file,
|
||||||
unsigned int line,
|
unsigned int line,
|
||||||
const char* expr) {
|
const char* expr) {
|
||||||
@@ -214,7 +214,7 @@ TestResult& checkStringEqual(TestResult& result,
|
|||||||
#define JSONTEST_ASSERT_PRED(expr) \
|
#define JSONTEST_ASSERT_PRED(expr) \
|
||||||
{ \
|
{ \
|
||||||
JsonTest::PredicateContext _minitest_Context = { \
|
JsonTest::PredicateContext _minitest_Context = { \
|
||||||
result_->predicateId_, __FILE__, __LINE__, #expr \
|
result_->predicateId_, __FILE__, __LINE__, #expr, NULL, NULL \
|
||||||
}; \
|
}; \
|
||||||
result_->predicateStackTail_->next_ = &_minitest_Context; \
|
result_->predicateStackTail_->next_ = &_minitest_Context; \
|
||||||
result_->predicateId_ += 1; \
|
result_->predicateId_ += 1; \
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,10 @@
|
|||||||
# removes all files created during testing
|
# Copyright 2007 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
|
"""Removes all files created during testing."""
|
||||||
|
|
||||||
import glob
|
import glob
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
4
test/data/test_comment_00.expected
Normal file
4
test/data/test_comment_00.expected
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
// Comment for array
|
||||||
|
.=[]
|
||||||
|
// Comment within array
|
||||||
|
.[0]="one-element"
|
5
test/data/test_comment_00.json
Normal file
5
test/data/test_comment_00.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
// Comment for array
|
||||||
|
[
|
||||||
|
// Comment within array
|
||||||
|
"one-element"
|
||||||
|
]
|
@@ -1,6 +1,7 @@
|
|||||||
.={}
|
.={}
|
||||||
// Comment for array
|
// Comment for array
|
||||||
.test=[]
|
.test=[]
|
||||||
|
// Comment within array
|
||||||
.test[0]={}
|
.test[0]={}
|
||||||
.test[0].a="aaa"
|
.test[0].a="aaa"
|
||||||
.test[1]={}
|
.test[1]={}
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
"test":
|
"test":
|
||||||
// Comment for array
|
// Comment for array
|
||||||
[
|
[
|
||||||
|
// Comment within array
|
||||||
{ "a" : "aaa" }, // Comment for a
|
{ "a" : "aaa" }, // Comment for a
|
||||||
{ "b" : "bbb" }, // Comment for b
|
{ "b" : "bbb" }, // Comment for b
|
||||||
{ "c" : "ccc" } // Comment for c
|
{ "c" : "ccc" } // Comment for c
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
|
# Copyright 2007 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import glob
|
import glob
|
||||||
import os.path
|
import os.path
|
||||||
|
@@ -1,4 +1,11 @@
|
|||||||
# Simple implementation of a json test runner to run the test against json-py.
|
# Copyright 2007 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
|
"""Simple implementation of a json test runner to run the test against
|
||||||
|
json-py."""
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import sys
|
import sys
|
||||||
import os.path
|
import os.path
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
|
# Copyright 2007 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
from io import open
|
from io import open
|
||||||
@@ -14,6 +19,7 @@ def getStatusOutput(cmd):
|
|||||||
Return int, unicode (for both Python 2 and 3).
|
Return int, unicode (for both Python 2 and 3).
|
||||||
Note: os.popen().close() would return None for 0.
|
Note: os.popen().close() would return None for 0.
|
||||||
"""
|
"""
|
||||||
|
print(cmd, file=sys.stderr)
|
||||||
pipe = os.popen(cmd)
|
pipe = os.popen(cmd)
|
||||||
process_output = pipe.read()
|
process_output = pipe.read()
|
||||||
try:
|
try:
|
||||||
@@ -57,7 +63,8 @@ def safeReadFile( path ):
|
|||||||
return '<File "%s" is missing: %s>' % (path,e)
|
return '<File "%s" is missing: %s>' % (path,e)
|
||||||
|
|
||||||
def runAllTests(jsontest_executable_path, input_dir = None,
|
def runAllTests(jsontest_executable_path, input_dir = None,
|
||||||
use_valgrind=False, with_json_checker=False ):
|
use_valgrind=False, with_json_checker=False,
|
||||||
|
writerClass='StyledWriter'):
|
||||||
if not input_dir:
|
if not input_dir:
|
||||||
input_dir = os.path.join(os.getcwd(), 'data')
|
input_dir = os.path.join(os.getcwd(), 'data')
|
||||||
tests = glob(os.path.join(input_dir, '*.json'))
|
tests = glob(os.path.join(input_dir, '*.json'))
|
||||||
@@ -72,8 +79,8 @@ def runAllTests( jsontest_executable_path, input_dir = None,
|
|||||||
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
|
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
|
||||||
print('TESTING:', input_path, end=' ')
|
print('TESTING:', input_path, end=' ')
|
||||||
options = is_json_checker_test and '--json-checker' or ''
|
options = is_json_checker_test and '--json-checker' or ''
|
||||||
cmd = '%s%s %s "%s"' % (
|
options += ' --json-writer %s'%writerClass
|
||||||
valgrind_path, jsontest_executable_path, options,
|
cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options,
|
||||||
input_path)
|
input_path)
|
||||||
status, process_output = getStatusOutput(cmd)
|
status, process_output = getStatusOutput(cmd)
|
||||||
if is_json_checker_test:
|
if is_json_checker_test:
|
||||||
@@ -145,7 +152,22 @@ def main():
|
|||||||
else:
|
else:
|
||||||
input_path = None
|
input_path = None
|
||||||
status = runAllTests(jsontest_executable_path, input_path,
|
status = runAllTests(jsontest_executable_path, input_path,
|
||||||
use_valgrind=options.valgrind, with_json_checker=options.with_json_checker )
|
use_valgrind=options.valgrind,
|
||||||
|
with_json_checker=options.with_json_checker,
|
||||||
|
writerClass='StyledWriter')
|
||||||
|
if status:
|
||||||
|
sys.exit(status)
|
||||||
|
status = runAllTests(jsontest_executable_path, input_path,
|
||||||
|
use_valgrind=options.valgrind,
|
||||||
|
with_json_checker=options.with_json_checker,
|
||||||
|
writerClass='StyledStreamWriter')
|
||||||
|
if status:
|
||||||
|
sys.exit(status)
|
||||||
|
status = runAllTests(jsontest_executable_path, input_path,
|
||||||
|
use_valgrind=options.valgrind,
|
||||||
|
with_json_checker=options.with_json_checker,
|
||||||
|
writerClass='BuiltStyledStreamWriter')
|
||||||
|
if status:
|
||||||
sys.exit(status)
|
sys.exit(status)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
|
# Copyright 2009 Baptiste Lepilleur
|
||||||
|
# Distributed under MIT license, or public domain if desired and
|
||||||
|
# recognized in your jurisdiction.
|
||||||
|
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
from io import open
|
from io import open
|
||||||
@@ -53,8 +58,7 @@ def runAllTests( exe_path, use_valgrind=False ):
|
|||||||
print()
|
print()
|
||||||
for name, result in failures:
|
for name, result in failures:
|
||||||
print(result)
|
print(result)
|
||||||
print('%d/%d tests passed (%d failure(s))' % (
|
print('%d/%d tests passed (%d failure(s))' % ( pass_count, len(test_names), failed_count))
|
||||||
pass_count, len(test_names), failed_count))
|
|
||||||
return 1
|
return 1
|
||||||
else:
|
else:
|
||||||
print('All %d tests passed' % len(test_names))
|
print('All %d tests passed' % len(test_names))
|
||||||
|
29
travis.sh
Executable file
29
travis.sh
Executable file
@@ -0,0 +1,29 @@
|
|||||||
|
#!/usr/bin/env sh
|
||||||
|
# This is called by `.travis.yml` via Travis CI.
|
||||||
|
# Travis supplies $TRAVIS_OS_NAME.
|
||||||
|
# http://docs.travis-ci.com/user/multi-os/
|
||||||
|
# Our .travis.yml also defines:
|
||||||
|
# - SHARED_LIB=ON/OFF
|
||||||
|
# - STATIC_LIB=ON/OFF
|
||||||
|
# - CMAKE_PKG=ON/OFF
|
||||||
|
# - BUILD_TYPE=release/debug
|
||||||
|
# - VERBOSE_MAKE=false/true
|
||||||
|
# - VERBOSE (set or not)
|
||||||
|
|
||||||
|
# -e: fail on error
|
||||||
|
# -v: show commands
|
||||||
|
# -x: show expanded commands
|
||||||
|
set -vex
|
||||||
|
|
||||||
|
env | sort
|
||||||
|
|
||||||
|
cmake -DJSONCPP_WITH_CMAKE_PACKAGE=$CMAKE_PKG -DBUILD_SHARED_LIBS=$SHARED_LIB -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DCMAKE_VERBOSE_MAKEFILE=$VERBOSE_MAKE .
|
||||||
|
make
|
||||||
|
|
||||||
|
# Python is not available in Travis for osx.
|
||||||
|
# https://github.com/travis-ci/travis-ci/issues/2320
|
||||||
|
if [ "$TRAVIS_OS_NAME" != "osx" ]
|
||||||
|
then
|
||||||
|
make jsoncpp_check
|
||||||
|
valgrind --error-exitcode=42 --leak-check=full ./src/test_lib_json/jsoncpp_test
|
||||||
|
fi
|
1
version.in
Normal file
1
version.in
Normal file
@@ -0,0 +1 @@
|
|||||||
|
@JSONCPP_VERSION@
|
Reference in New Issue
Block a user