mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-10-15 15:16:47 +02:00
Compare commits
127 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9059f5cad0 | ||
![]() |
45733df96c | ||
![]() |
5be07bdc5e | ||
![]() |
bf0cfa5b46 | ||
![]() |
cfc1ad72ad | ||
![]() |
c8453d39d1 | ||
![]() |
632044ad95 | ||
![]() |
b3189a0800 | ||
![]() |
9be5895985 | ||
![]() |
6aba23f4a8 | ||
![]() |
c161f4ac69 | ||
![]() |
75b360af4a | ||
![]() |
e36cff19f0 | ||
![]() |
b8cb8889aa | ||
![]() |
d2d4c74a03 | ||
![]() |
8b7ea09b80 | ||
![]() |
a4fb5db543 | ||
![]() |
d517d598a7 | ||
![]() |
e9b0b96be6 | ||
![]() |
3f0d63b5a9 | ||
![]() |
524234e479 | ||
![]() |
9abf11935c | ||
![]() |
8a5e792f20 | ||
![]() |
30eb5ce128 | ||
![]() |
12ceb01485 | ||
![]() |
edc6239f39 | ||
![]() |
5a0152ae1b | ||
![]() |
c648b0378a | ||
![]() |
a3afd74b80 | ||
![]() |
2cb16b35dc | ||
![]() |
83946a28db | ||
![]() |
91f1553f2c | ||
![]() |
5813ab1bc1 | ||
![]() |
a0b8c3ecb4 | ||
![]() |
b349221938 | ||
![]() |
9e23f66f61 | ||
![]() |
411d88fae8 | ||
![]() |
1ff6bb65a0 | ||
![]() |
8b20b7a317 | ||
![]() |
2e54e8ff1c | ||
![]() |
90ca694e46 | ||
![]() |
3beb37ea14 | ||
![]() |
dc180eb25e | ||
![]() |
a6fe8e27d8 | ||
![]() |
edf528edfa | ||
![]() |
6317f9a406 | ||
![]() |
6bc55ec35d | ||
![]() |
f11611c878 | ||
![]() |
8f7f35c5cd | ||
![]() |
7e5485ab5b | ||
![]() |
92d90250f2 | ||
![]() |
d6c4a8fb2d | ||
![]() |
a3c8642886 | ||
![]() |
2983f5a89a | ||
![]() |
a0bd9adfef | ||
![]() |
9e0d70aa66 | ||
![]() |
f200239d5b | ||
![]() |
cfc3e927fc | ||
![]() |
a481201af1 | ||
![]() |
9704cedb20 | ||
![]() |
781eec4da8 | ||
![]() |
1c8f7d8ae5 | ||
![]() |
554d961625 | ||
![]() |
01db7b7430 | ||
![]() |
d2e6a971f4 | ||
![]() |
53c8e2cb3b | ||
![]() |
645cd0412c | ||
![]() |
ff58fdcc75 | ||
![]() |
82b736734d | ||
![]() |
a86e129983 | ||
![]() |
5fb17a66b8 | ||
![]() |
7429bb2bfa | ||
![]() |
2eb20a938c | ||
![]() |
638ad269e7 | ||
![]() |
ec9302c4ed | ||
![]() |
fb9aaf8112 | ||
![]() |
2703c306a3 | ||
![]() |
c634b98e7d | ||
![]() |
6c9408d128 | ||
![]() |
54bd178bd8 | ||
![]() |
41ffff01d3 | ||
![]() |
b082693b9e | ||
![]() |
a955529e47 | ||
![]() |
f59ac2a1d7 | ||
![]() |
a07b37e4ec | ||
![]() |
aebc7faa4f | ||
![]() |
bdacfd7bc0 | ||
![]() |
c5f66ab816 | ||
![]() |
bcad4e4de2 | ||
![]() |
7329223f58 | ||
![]() |
2e33c218cb | ||
![]() |
ddc0748c4f | ||
![]() |
2ee3b1dbb1 | ||
![]() |
f34bf24bbd | ||
![]() |
c4bc6da87d | ||
![]() |
736409f1b5 | ||
![]() |
7e97345e26 | ||
![]() |
227c7cdfa5 | ||
![]() |
00c2c9f6e4 | ||
![]() |
d448610770 | ||
![]() |
00b979f086 | ||
![]() |
ae4dc9aa62 | ||
![]() |
e9ccbe0145 | ||
![]() |
21e3d21243 | ||
![]() |
c97bd59ff2 | ||
![]() |
81ae1d55f7 | ||
![]() |
18f790fbe7 | ||
![]() |
3013ed48b3 | ||
![]() |
2cb9a5803e | ||
![]() |
c5cb313ca0 | ||
![]() |
abcd3f7b1f | ||
![]() |
d622250c3e | ||
![]() |
db61dba885 | ||
![]() |
7ef0f9fa5b | ||
![]() |
3550a0a939 | ||
![]() |
21ab82916b | ||
![]() |
fd940255ce | ||
![]() |
472adb60ee | ||
![]() |
c92c87b47d | ||
![]() |
b941149a37 | ||
![]() |
2cf939e8c3 | ||
![]() |
7b28698c5c | ||
![]() |
0d27381acf | ||
![]() |
12325b814f | ||
![]() |
b27c83f691 | ||
![]() |
483eba84a7 | ||
![]() |
b3507948e2 |
@@ -1,47 +1,4 @@
|
||||
---
|
||||
# BasedOnStyle: LLVM
|
||||
AccessModifierOffset: -2
|
||||
ConstructorInitializerIndentWidth: 4
|
||||
AlignEscapedNewlinesLeft: false
|
||||
AlignTrailingComments: true
|
||||
AllowAllParametersOfDeclarationOnNextLine: true
|
||||
AllowShortIfStatementsOnASingleLine: false
|
||||
AllowShortLoopsOnASingleLine: false
|
||||
AlwaysBreakTemplateDeclarations: false
|
||||
AlwaysBreakBeforeMultilineStrings: false
|
||||
BreakBeforeBinaryOperators: false
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializersBeforeComma: false
|
||||
BinPackParameters: false
|
||||
ColumnLimit: 80
|
||||
ConstructorInitializerAllOnOneLineOrOnePerLine: false
|
||||
DerivePointerBinding: false
|
||||
ExperimentalAutoDetectBinPacking: false
|
||||
IndentCaseLabels: false
|
||||
MaxEmptyLinesToKeep: 1
|
||||
NamespaceIndentation: None
|
||||
ObjCSpaceBeforeProtocolList: true
|
||||
PenaltyBreakBeforeFirstCallParameter: 19
|
||||
PenaltyBreakComment: 60
|
||||
PenaltyBreakString: 1000
|
||||
PenaltyBreakFirstLessLess: 120
|
||||
PenaltyExcessCharacter: 1000000
|
||||
PenaltyReturnTypeOnItsOwnLine: 60
|
||||
PointerBindsToType: true
|
||||
SpacesBeforeTrailingComments: 1
|
||||
Cpp11BracedListStyle: true
|
||||
Standard: Cpp11
|
||||
IndentWidth: 2
|
||||
TabWidth: 8
|
||||
UseTab: Never
|
||||
BreakBeforeBraces: Attach
|
||||
IndentFunctionDeclarationAfterType: false
|
||||
SpacesInParentheses: false
|
||||
SpacesInAngles: false
|
||||
SpaceInEmptyParentheses: false
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpaceAfterControlStatementKeyword: true
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
ContinuationIndentWidth: 4
|
||||
...
|
||||
BasedOnStyle: LLVM
|
||||
DerivePointerAlignment: false
|
||||
PointerAlignment: Left
|
||||
|
||||
|
11
.clang-tidy
Normal file
11
.clang-tidy
Normal file
@@ -0,0 +1,11 @@
|
||||
---
|
||||
Checks: 'google-readability-casting,modernize-deprecated-headers,modernize-loop-convert,modernize-use-auto,modernize-use-default-member-init,modernize-use-using,readability-else-after-return,readability-redundant-member-init,readability-redundant-string-cstr'
|
||||
WarningsAsErrors: ''
|
||||
HeaderFilterRegex: ''
|
||||
AnalyzeTemporaryDtors: false
|
||||
FormatStyle: none
|
||||
CheckOptions:
|
||||
- key: modernize-use-using.IgnoreMacros
|
||||
value: '0'
|
||||
...
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -10,8 +10,6 @@
|
||||
/libs/
|
||||
/doc/doxyfile
|
||||
/dist/
|
||||
#/version
|
||||
#/include/json/version.h
|
||||
|
||||
# MSVC project files:
|
||||
*.sln
|
||||
@@ -30,7 +28,6 @@
|
||||
|
||||
# CMake-generated files:
|
||||
CMakeFiles/
|
||||
*.cmake
|
||||
/pkg-config/jsoncpp.pc
|
||||
jsoncpp_lib_static.dir/
|
||||
|
||||
|
@@ -9,6 +9,7 @@ sudo: false
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
- clang-format
|
||||
- meson
|
||||
- ninja
|
||||
update: false # do not update homebrew by default
|
||||
@@ -17,6 +18,7 @@ addons:
|
||||
- ubuntu-toolchain-r-test
|
||||
- llvm-toolchain-xenial-8
|
||||
packages:
|
||||
- clang-format-8
|
||||
- clang-8
|
||||
- valgrind
|
||||
matrix:
|
||||
@@ -25,7 +27,7 @@ matrix:
|
||||
include:
|
||||
- name: Mac clang meson static release testing
|
||||
os: osx
|
||||
osx_image: xcode10.2
|
||||
osx_image: xcode11
|
||||
compiler: clang
|
||||
env:
|
||||
CXX="clang++"
|
||||
@@ -60,6 +62,10 @@ matrix:
|
||||
BUILD_TYPE=Debug
|
||||
LIB_TYPE=shared
|
||||
DESTDIR=/tmp/cmake_json_cpp
|
||||
before_install:
|
||||
- pip install --user cpp-coveralls
|
||||
script: ./.travis_scripts/cmake_builder.sh
|
||||
after_success:
|
||||
- coveralls --include src/lib_json --include include
|
||||
notifications:
|
||||
email: false
|
||||
|
@@ -66,7 +66,7 @@ cmake --version
|
||||
echo ${CXX}
|
||||
${CXX} --version
|
||||
_COMPILER_NAME=`basename ${CXX}`
|
||||
if [ "${BUILD_TYPE}" == "shared" ]; then
|
||||
if [ "${LIB_TYPE}" = "shared" ]; then
|
||||
_CMAKE_BUILD_SHARED_LIBS=ON
|
||||
else
|
||||
_CMAKE_BUILD_SHARED_LIBS=OFF
|
||||
|
@@ -63,9 +63,11 @@ meson --version
|
||||
ninja --version
|
||||
_COMPILER_NAME=`basename ${CXX}`
|
||||
_BUILD_DIR_NAME="build-${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}"
|
||||
meson --buildtype ${BUILD_TYPE} --default-library ${LIB_TYPE} . "${_BUILD_DIR_NAME}"
|
||||
|
||||
./.travis_scripts/run-clang-format.sh
|
||||
meson --fatal-meson-warnings --werror --buildtype ${BUILD_TYPE} --default-library ${LIB_TYPE} . "${_BUILD_DIR_NAME}"
|
||||
ninja -v -j 2 -C "${_BUILD_DIR_NAME}"
|
||||
#ninja -v -j 2 -C "${_BUILD_DIR_NAME}" test
|
||||
|
||||
cd "${_BUILD_DIR_NAME}"
|
||||
meson test --no-rebuild --print-errorlogs
|
||||
|
||||
|
356
.travis_scripts/run-clang-format.py
Executable file
356
.travis_scripts/run-clang-format.py
Executable file
@@ -0,0 +1,356 @@
|
||||
#!/usr/bin/env python
|
||||
"""A wrapper script around clang-format, suitable for linting multiple files
|
||||
and to use for continuous integration.
|
||||
This is an alternative API for the clang-format command line.
|
||||
It runs over multiple files and directories in parallel.
|
||||
A diff output is produced and a sensible exit code is returned.
|
||||
|
||||
NOTE: pulled from https://github.com/Sarcasm/run-clang-format, which is
|
||||
licensed under the MIT license.
|
||||
"""
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import codecs
|
||||
import difflib
|
||||
import fnmatch
|
||||
import io
|
||||
import multiprocessing
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from functools import partial
|
||||
|
||||
try:
|
||||
from subprocess import DEVNULL # py3k
|
||||
except ImportError:
|
||||
DEVNULL = open(os.devnull, "wb")
|
||||
|
||||
|
||||
DEFAULT_EXTENSIONS = 'c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx'
|
||||
|
||||
|
||||
class ExitStatus:
|
||||
SUCCESS = 0
|
||||
DIFF = 1
|
||||
TROUBLE = 2
|
||||
|
||||
|
||||
def list_files(files, recursive=False, extensions=None, exclude=None):
|
||||
if extensions is None:
|
||||
extensions = []
|
||||
if exclude is None:
|
||||
exclude = []
|
||||
|
||||
out = []
|
||||
for file in files:
|
||||
if recursive and os.path.isdir(file):
|
||||
for dirpath, dnames, fnames in os.walk(file):
|
||||
fpaths = [os.path.join(dirpath, fname) for fname in fnames]
|
||||
for pattern in exclude:
|
||||
# os.walk() supports trimming down the dnames list
|
||||
# by modifying it in-place,
|
||||
# to avoid unnecessary directory listings.
|
||||
dnames[:] = [
|
||||
x for x in dnames
|
||||
if
|
||||
not fnmatch.fnmatch(os.path.join(dirpath, x), pattern)
|
||||
]
|
||||
fpaths = [
|
||||
x for x in fpaths if not fnmatch.fnmatch(x, pattern)
|
||||
]
|
||||
for f in fpaths:
|
||||
ext = os.path.splitext(f)[1][1:]
|
||||
if ext in extensions:
|
||||
out.append(f)
|
||||
else:
|
||||
out.append(file)
|
||||
return out
|
||||
|
||||
|
||||
def make_diff(file, original, reformatted):
|
||||
return list(
|
||||
difflib.unified_diff(
|
||||
original,
|
||||
reformatted,
|
||||
fromfile='{}\t(original)'.format(file),
|
||||
tofile='{}\t(reformatted)'.format(file),
|
||||
n=3))
|
||||
|
||||
|
||||
class DiffError(Exception):
|
||||
def __init__(self, message, errs=None):
|
||||
super(DiffError, self).__init__(message)
|
||||
self.errs = errs or []
|
||||
|
||||
|
||||
class UnexpectedError(Exception):
|
||||
def __init__(self, message, exc=None):
|
||||
super(UnexpectedError, self).__init__(message)
|
||||
self.formatted_traceback = traceback.format_exc()
|
||||
self.exc = exc
|
||||
|
||||
|
||||
def run_clang_format_diff_wrapper(args, file):
|
||||
try:
|
||||
ret = run_clang_format_diff(args, file)
|
||||
return ret
|
||||
except DiffError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise UnexpectedError('{}: {}: {}'.format(file, e.__class__.__name__,
|
||||
e), e)
|
||||
|
||||
|
||||
def run_clang_format_diff(args, file):
|
||||
try:
|
||||
with io.open(file, 'r', encoding='utf-8') as f:
|
||||
original = f.readlines()
|
||||
except IOError as exc:
|
||||
raise DiffError(str(exc))
|
||||
invocation = [args.clang_format_executable, file]
|
||||
|
||||
# Use of utf-8 to decode the process output.
|
||||
#
|
||||
# Hopefully, this is the correct thing to do.
|
||||
#
|
||||
# It's done due to the following assumptions (which may be incorrect):
|
||||
# - clang-format will returns the bytes read from the files as-is,
|
||||
# without conversion, and it is already assumed that the files use utf-8.
|
||||
# - if the diagnostics were internationalized, they would use utf-8:
|
||||
# > Adding Translations to Clang
|
||||
# >
|
||||
# > Not possible yet!
|
||||
# > Diagnostic strings should be written in UTF-8,
|
||||
# > the client can translate to the relevant code page if needed.
|
||||
# > Each translation completely replaces the format string
|
||||
# > for the diagnostic.
|
||||
# > -- http://clang.llvm.org/docs/InternalsManual.html#internals-diag-translation
|
||||
#
|
||||
# It's not pretty, due to Python 2 & 3 compatibility.
|
||||
encoding_py3 = {}
|
||||
if sys.version_info[0] >= 3:
|
||||
encoding_py3['encoding'] = 'utf-8'
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
invocation,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
**encoding_py3)
|
||||
except OSError as exc:
|
||||
raise DiffError(
|
||||
"Command '{}' failed to start: {}".format(
|
||||
subprocess.list2cmdline(invocation), exc
|
||||
)
|
||||
)
|
||||
proc_stdout = proc.stdout
|
||||
proc_stderr = proc.stderr
|
||||
if sys.version_info[0] < 3:
|
||||
# make the pipes compatible with Python 3,
|
||||
# reading lines should output unicode
|
||||
encoding = 'utf-8'
|
||||
proc_stdout = codecs.getreader(encoding)(proc_stdout)
|
||||
proc_stderr = codecs.getreader(encoding)(proc_stderr)
|
||||
# hopefully the stderr pipe won't get full and block the process
|
||||
outs = list(proc_stdout.readlines())
|
||||
errs = list(proc_stderr.readlines())
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
raise DiffError(
|
||||
"Command '{}' returned non-zero exit status {}".format(
|
||||
subprocess.list2cmdline(invocation), proc.returncode
|
||||
),
|
||||
errs,
|
||||
)
|
||||
return make_diff(file, original, outs), errs
|
||||
|
||||
|
||||
def bold_red(s):
|
||||
return '\x1b[1m\x1b[31m' + s + '\x1b[0m'
|
||||
|
||||
|
||||
def colorize(diff_lines):
|
||||
def bold(s):
|
||||
return '\x1b[1m' + s + '\x1b[0m'
|
||||
|
||||
def cyan(s):
|
||||
return '\x1b[36m' + s + '\x1b[0m'
|
||||
|
||||
def green(s):
|
||||
return '\x1b[32m' + s + '\x1b[0m'
|
||||
|
||||
def red(s):
|
||||
return '\x1b[31m' + s + '\x1b[0m'
|
||||
|
||||
for line in diff_lines:
|
||||
if line[:4] in ['--- ', '+++ ']:
|
||||
yield bold(line)
|
||||
elif line.startswith('@@ '):
|
||||
yield cyan(line)
|
||||
elif line.startswith('+'):
|
||||
yield green(line)
|
||||
elif line.startswith('-'):
|
||||
yield red(line)
|
||||
else:
|
||||
yield line
|
||||
|
||||
|
||||
def print_diff(diff_lines, use_color):
|
||||
if use_color:
|
||||
diff_lines = colorize(diff_lines)
|
||||
if sys.version_info[0] < 3:
|
||||
sys.stdout.writelines((l.encode('utf-8') for l in diff_lines))
|
||||
else:
|
||||
sys.stdout.writelines(diff_lines)
|
||||
|
||||
|
||||
def print_trouble(prog, message, use_colors):
|
||||
error_text = 'error:'
|
||||
if use_colors:
|
||||
error_text = bold_red(error_text)
|
||||
print("{}: {} {}".format(prog, error_text, message), file=sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
'--clang-format-executable',
|
||||
metavar='EXECUTABLE',
|
||||
help='path to the clang-format executable',
|
||||
default='clang-format')
|
||||
parser.add_argument(
|
||||
'--extensions',
|
||||
help='comma separated list of file extensions (default: {})'.format(
|
||||
DEFAULT_EXTENSIONS),
|
||||
default=DEFAULT_EXTENSIONS)
|
||||
parser.add_argument(
|
||||
'-r',
|
||||
'--recursive',
|
||||
action='store_true',
|
||||
help='run recursively over directories')
|
||||
parser.add_argument('files', metavar='file', nargs='+')
|
||||
parser.add_argument(
|
||||
'-q',
|
||||
'--quiet',
|
||||
action='store_true')
|
||||
parser.add_argument(
|
||||
'-j',
|
||||
metavar='N',
|
||||
type=int,
|
||||
default=0,
|
||||
help='run N clang-format jobs in parallel'
|
||||
' (default number of cpus + 1)')
|
||||
parser.add_argument(
|
||||
'--color',
|
||||
default='auto',
|
||||
choices=['auto', 'always', 'never'],
|
||||
help='show colored diff (default: auto)')
|
||||
parser.add_argument(
|
||||
'-e',
|
||||
'--exclude',
|
||||
metavar='PATTERN',
|
||||
action='append',
|
||||
default=[],
|
||||
help='exclude paths matching the given glob-like pattern(s)'
|
||||
' from recursive search')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# use default signal handling, like diff return SIGINT value on ^C
|
||||
# https://bugs.python.org/issue14229#msg156446
|
||||
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
try:
|
||||
signal.SIGPIPE
|
||||
except AttributeError:
|
||||
# compatibility, SIGPIPE does not exist on Windows
|
||||
pass
|
||||
else:
|
||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
||||
|
||||
colored_stdout = False
|
||||
colored_stderr = False
|
||||
if args.color == 'always':
|
||||
colored_stdout = True
|
||||
colored_stderr = True
|
||||
elif args.color == 'auto':
|
||||
colored_stdout = sys.stdout.isatty()
|
||||
colored_stderr = sys.stderr.isatty()
|
||||
|
||||
version_invocation = [args.clang_format_executable, str("--version")]
|
||||
try:
|
||||
subprocess.check_call(version_invocation, stdout=DEVNULL)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
return ExitStatus.TROUBLE
|
||||
except OSError as e:
|
||||
print_trouble(
|
||||
parser.prog,
|
||||
"Command '{}' failed to start: {}".format(
|
||||
subprocess.list2cmdline(version_invocation), e
|
||||
),
|
||||
use_colors=colored_stderr,
|
||||
)
|
||||
return ExitStatus.TROUBLE
|
||||
|
||||
retcode = ExitStatus.SUCCESS
|
||||
files = list_files(
|
||||
args.files,
|
||||
recursive=args.recursive,
|
||||
exclude=args.exclude,
|
||||
extensions=args.extensions.split(','))
|
||||
|
||||
if not files:
|
||||
return
|
||||
|
||||
njobs = args.j
|
||||
if njobs == 0:
|
||||
njobs = multiprocessing.cpu_count() + 1
|
||||
njobs = min(len(files), njobs)
|
||||
|
||||
if njobs == 1:
|
||||
# execute directly instead of in a pool,
|
||||
# less overhead, simpler stacktraces
|
||||
it = (run_clang_format_diff_wrapper(args, file) for file in files)
|
||||
pool = None
|
||||
else:
|
||||
pool = multiprocessing.Pool(njobs)
|
||||
it = pool.imap_unordered(
|
||||
partial(run_clang_format_diff_wrapper, args), files)
|
||||
while True:
|
||||
try:
|
||||
outs, errs = next(it)
|
||||
except StopIteration:
|
||||
break
|
||||
except DiffError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
retcode = ExitStatus.TROUBLE
|
||||
sys.stderr.writelines(e.errs)
|
||||
except UnexpectedError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
sys.stderr.write(e.formatted_traceback)
|
||||
retcode = ExitStatus.TROUBLE
|
||||
# stop at the first unexpected error,
|
||||
# something could be very wrong,
|
||||
# don't process all files unnecessarily
|
||||
if pool:
|
||||
pool.terminate()
|
||||
break
|
||||
else:
|
||||
sys.stderr.writelines(errs)
|
||||
if outs == []:
|
||||
continue
|
||||
if not args.quiet:
|
||||
print_diff(outs, use_color=colored_stdout)
|
||||
if retcode == ExitStatus.SUCCESS:
|
||||
retcode = ExitStatus.DIFF
|
||||
return retcode
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
4
.travis_scripts/run-clang-format.sh
Executable file
4
.travis_scripts/run-clang-format.sh
Executable file
@@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
python $DIR/run-clang-format.py -r $DIR/../src/**/ $DIR/../include/**/
|
2
AUTHORS
2
AUTHORS
@@ -21,6 +21,7 @@ Braden McDorman <bmcdorman@gmail.com>
|
||||
Brandon Myers <bmyers1788@gmail.com>
|
||||
Brendan Drew <brendan.drew@daqri.com>
|
||||
chason <cxchao802@gmail.com>
|
||||
chenguoping <chenguopingdota@163.com>
|
||||
Chris Gilling <cgilling@iparadigms.com>
|
||||
Christopher Dawes <christopher.dawes.1981@googlemail.com>
|
||||
Christopher Dunn <cdunn2001@gmail.com>
|
||||
@@ -97,6 +98,7 @@ selaselah <selah@outlook.com>
|
||||
Sergiy80 <sil2004@gmail.com>
|
||||
sergzub <sergzub@gmail.com>
|
||||
Stefan Schweter <stefan@schweter.it>
|
||||
Stefano Fiorentino <stefano.fiore84@gmail.com>
|
||||
Steffen Kieß <Steffen.Kiess@ipvs.uni-stuttgart.de>
|
||||
Steven Hahn <hahnse@ornl.gov>
|
||||
Stuart Eichert <stuart@fivemicro.com>
|
||||
|
132
CMakeLists.txt
132
CMakeLists.txt
@@ -37,37 +37,43 @@ foreach(pold "") # Currently Empty
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# ==== Define language standard configurations requiring at least c++11 standard
|
||||
if(CMAKE_CXX_STANDARD EQUAL "98" )
|
||||
message(FATAL_ERROR "CMAKE_CXX_STANDARD:STRING=98 is not supported.")
|
||||
endif()
|
||||
# Build the library with C++11 standard support, independent from other including
|
||||
# software which may use a different CXX_STANDARD or CMAKE_CXX_STANDARD.
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
|
||||
#####
|
||||
## Set the default target properties
|
||||
if(NOT CMAKE_CXX_STANDARD)
|
||||
set(CMAKE_CXX_STANDARD 11) # Supported values are ``11``, ``14``, and ``17``.
|
||||
endif()
|
||||
if(NOT CMAKE_CXX_STANDARD_REQUIRED)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
endif()
|
||||
if(NOT CMAKE_CXX_EXTENSIONS)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
endif()
|
||||
|
||||
# ====
|
||||
|
||||
# Ensures that CMAKE_BUILD_TYPE has a default value
|
||||
if(NOT DEFINED CMAKE_BUILD_TYPE)
|
||||
# Ensure that CMAKE_BUILD_TYPE has a value specified for single configuration generators.
|
||||
if(NOT DEFINED CMAKE_BUILD_TYPE AND NOT DEFINED CMAKE_CONFIGURATION_TYPES)
|
||||
set(CMAKE_BUILD_TYPE Release CACHE STRING
|
||||
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel Coverage.")
|
||||
endif()
|
||||
|
||||
project(JSONCPP
|
||||
VERSION 1.9.0 # <major>[.<minor>[.<patch>[.<tweak>]]]
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# use ccache if found, has to be done before project()
|
||||
# ---------------------------------------------------------------------------
|
||||
find_program(CCACHE_EXECUTABLE "ccache" HINTS /usr/local/bin /opt/local/bin)
|
||||
if(CCACHE_EXECUTABLE)
|
||||
message(STATUS "use ccache")
|
||||
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_EXECUTABLE}" CACHE PATH "ccache" FORCE)
|
||||
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_EXECUTABLE}" CACHE PATH "ccache" FORCE)
|
||||
endif()
|
||||
|
||||
project(jsoncpp
|
||||
# Note: version must be updated in three places when doing a release. This
|
||||
# annoying process ensures that amalgamate, CMake, and meson all report the
|
||||
# correct version.
|
||||
# 1. ./meson.build
|
||||
# 2. ./include/json/version.h
|
||||
# 3. ./CMakeLists.txt
|
||||
# IMPORTANT: also update the PROJECT_SOVERSION!!
|
||||
VERSION 1.9.4 # <major>[.<minor>[.<patch>[.<tweak>]]]
|
||||
LANGUAGES CXX)
|
||||
|
||||
message(STATUS "JsonCpp Version: ${JSONCPP_VERSION_MAJOR}.${JSONCPP_VERSION_MINOR}.${JSONCPP_VERSION_PATCH}")
|
||||
set( JSONCPP_SOVERSION 21 )
|
||||
message(STATUS "JsonCpp Version: ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH}")
|
||||
set(PROJECT_SOVERSION 24)
|
||||
|
||||
option(JSONCPP_WITH_TESTS "Compile and (for jsoncpp_check) run JsonCpp test executables" ON)
|
||||
option(JSONCPP_WITH_POST_BUILD_UNITTEST "Automatically run unit-tests as a post build step" ON)
|
||||
@@ -75,29 +81,26 @@ option(JSONCPP_WITH_WARNING_AS_ERROR "Force compilation to fail if a warning occ
|
||||
option(JSONCPP_WITH_STRICT_ISO "Issue all the warnings demanded by strict ISO C and ISO C++" ON)
|
||||
option(JSONCPP_WITH_PKGCONFIG_SUPPORT "Generate and install .pc files" ON)
|
||||
option(JSONCPP_WITH_CMAKE_PACKAGE "Generate and install cmake package files" ON)
|
||||
option(BUILD_SHARED_LIBS "Build jsoncpp_lib as a shared library." OFF)
|
||||
|
||||
# Enable runtime search path support for dynamic libraries on OSX
|
||||
if(APPLE)
|
||||
set(CMAKE_MACOSX_RPATH 1)
|
||||
endif()
|
||||
option(JSONCPP_WITH_EXAMPLE "Compile JsonCpp example" OFF)
|
||||
option(BUILD_SHARED_LIBS "Build jsoncpp_lib as a shared library." ON)
|
||||
option(BUILD_STATIC_LIBS "Build jsoncpp_lib as a static library." ON)
|
||||
option(BUILD_OBJECT_LIBS "Build jsoncpp_lib as a object library." ON)
|
||||
|
||||
# Adhere to GNU filesystem layout conventions
|
||||
include(GNUInstallDirs)
|
||||
|
||||
set(DEBUG_LIBNAME_SUFFIX "" CACHE STRING "Optional suffix to append to the library name for a debug build")
|
||||
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib" CACHE PATH "Archive output dir.")
|
||||
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib" CACHE PATH "Library output dir.")
|
||||
set(CMAKE_PDB_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" CACHE PATH "PDB (MSVC debug symbol)output dir.")
|
||||
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" CACHE PATH "Executable/dll output dir.")
|
||||
|
||||
set(JSONCPP_USE_SECURE_MEMORY "0" CACHE STRING "-D...=1 to use memory-wiping allocator for STL" )
|
||||
set(JSONCPP_USE_SECURE_MEMORY "0" CACHE STRING "-D...=1 to use memory-wiping allocator for STL")
|
||||
|
||||
# File version.h is only regenerated on CMake configure step
|
||||
configure_file( "${PROJECT_SOURCE_DIR}/src/lib_json/version.h.in"
|
||||
"${PROJECT_BINARY_DIR}/include/json/version.h"
|
||||
NEWLINE_STYLE UNIX )
|
||||
configure_file( "${PROJECT_SOURCE_DIR}/version.in"
|
||||
"${PROJECT_BINARY_DIR}/version"
|
||||
NEWLINE_STYLE UNIX )
|
||||
configure_file("${PROJECT_SOURCE_DIR}/version.in"
|
||||
"${PROJECT_BINARY_DIR}/version"
|
||||
NEWLINE_STYLE UNIX)
|
||||
|
||||
macro(UseCompilationWarningAsError)
|
||||
macro(use_compilation_warning_as_error)
|
||||
if(MSVC)
|
||||
# Only enabled in debug because some old versions of VS STL generate
|
||||
# warnings when compiled in release configuration.
|
||||
@@ -111,7 +114,7 @@ macro(UseCompilationWarningAsError)
|
||||
endmacro()
|
||||
|
||||
# Include our configuration header
|
||||
include_directories( ${jsoncpp_SOURCE_DIR}/include )
|
||||
include_directories(${jsoncpp_SOURCE_DIR}/include)
|
||||
|
||||
if(MSVC)
|
||||
# Only enabled in debug because some old versions of VS STL generate
|
||||
@@ -128,7 +131,7 @@ elseif(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
|
||||
# not yet ready for -Wsign-conversion
|
||||
|
||||
if(JSONCPP_WITH_STRICT_ISO)
|
||||
add_compile_options(-pedantic)
|
||||
add_compile_options(-Wpedantic)
|
||||
endif()
|
||||
if(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
add_compile_options(-Werror=conversion)
|
||||
@@ -138,21 +141,20 @@ elseif(CMAKE_CXX_COMPILER_ID STREQUAL "Intel")
|
||||
add_compile_options(-Wall -Wconversion -Wshadow -Wextra -Werror=conversion)
|
||||
|
||||
if(JSONCPP_WITH_STRICT_ISO AND NOT JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
add_compile_options(-pedantic)
|
||||
add_compile_options(-Wpedantic)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
find_program(CCACHE_FOUND ccache)
|
||||
if(CCACHE_FOUND)
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
|
||||
endif(CCACHE_FOUND)
|
||||
|
||||
if(JSONCPP_WITH_WARNING_AS_ERROR)
|
||||
UseCompilationWarningAsError()
|
||||
use_compilation_warning_as_error()
|
||||
endif()
|
||||
|
||||
if(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
||||
include(JoinPaths)
|
||||
|
||||
join_paths(libdir_for_pc_file "\${exec_prefix}" "${CMAKE_INSTALL_LIBDIR}")
|
||||
join_paths(includedir_for_pc_file "\${prefix}" "${CMAKE_INSTALL_INCLUDEDIR}")
|
||||
|
||||
configure_file(
|
||||
"pkg-config/jsoncpp.pc.in"
|
||||
"pkg-config/jsoncpp.pc"
|
||||
@@ -162,25 +164,29 @@ if(JSONCPP_WITH_PKGCONFIG_SUPPORT)
|
||||
endif()
|
||||
|
||||
if(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
include (CMakePackageConfigHelpers)
|
||||
install(EXPORT jsoncpp
|
||||
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/jsoncpp
|
||||
FILE jsoncppConfig.cmake)
|
||||
write_basic_package_version_file ("${CMAKE_CURRENT_BINARY_DIR}/jsoncppConfigVersion.cmake"
|
||||
VERSION ${PROJECT_VERSION}
|
||||
COMPATIBILITY SameMajorVersion)
|
||||
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/jsoncppConfigVersion.cmake
|
||||
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/jsoncpp)
|
||||
include(CMakePackageConfigHelpers)
|
||||
install(EXPORT jsoncpp
|
||||
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/jsoncpp
|
||||
FILE jsoncppConfig.cmake)
|
||||
write_basic_package_version_file("${CMAKE_CURRENT_BINARY_DIR}/jsoncppConfigVersion.cmake"
|
||||
VERSION ${PROJECT_VERSION}
|
||||
COMPATIBILITY SameMajorVersion)
|
||||
install(FILES ${CMAKE_CURRENT_BINARY_DIR}/jsoncppConfigVersion.cmake
|
||||
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/jsoncpp)
|
||||
endif()
|
||||
|
||||
if(JSONCPP_WITH_TESTS)
|
||||
enable_testing()
|
||||
include(CTest)
|
||||
enable_testing()
|
||||
include(CTest)
|
||||
endif()
|
||||
|
||||
# Build the different applications
|
||||
add_subdirectory( src )
|
||||
add_subdirectory(src)
|
||||
|
||||
#install the includes
|
||||
add_subdirectory( include )
|
||||
add_subdirectory(include)
|
||||
|
||||
#install the example
|
||||
if(JSONCPP_WITH_EXAMPLE)
|
||||
add_subdirectory(example)
|
||||
endif()
|
||||
|
@@ -11,7 +11,7 @@ An example of a common Meson/Ninja environment is described next.
|
||||
Thanks to David Seifert (@SoapGentoo), we (the maintainers) now use
|
||||
[meson](http://mesonbuild.com/) and [ninja](https://ninja-build.org/) to build
|
||||
for debugging, as well as for continuous integration (see
|
||||
[`./travis_scripts/meson_builder.sh`](./travis_scripts/meson_builder.sh) ). Other systems may work, but minor
|
||||
[`./.travis_scripts/meson_builder.sh`](./.travis_scripts/meson_builder.sh) ). Other systems may work, but minor
|
||||
things like version strings might break.
|
||||
|
||||
First, install both meson (which requires Python3) and ninja.
|
||||
@@ -26,10 +26,14 @@ Then,
|
||||
LIB_TYPE=shared
|
||||
#LIB_TYPE=static
|
||||
meson --buildtype ${BUILD_TYPE} --default-library ${LIB_TYPE} . build-${LIB_TYPE}
|
||||
#ninja -v -C build-${LIB_TYPE} test # This stopped working on my Mac.
|
||||
ninja -v -C build-${LIB_TYPE}
|
||||
cd build-${LIB_TYPE}
|
||||
meson test --no-rebuild --print-errorlogs
|
||||
|
||||
ninja -C build-static/ test
|
||||
|
||||
# Or
|
||||
#cd build-${LIB_TYPE}
|
||||
#meson test --no-rebuild --print-errorlogs
|
||||
|
||||
sudo ninja install
|
||||
|
||||
## Building and testing with other build systems
|
||||
|
25
README.md
25
README.md
@@ -1,6 +1,10 @@
|
||||
# JsonCpp
|
||||
|
||||
[](https://bintray.com/theirix/conan-repo/jsoncpp%3Atheirix)
|
||||
[](https://github.com/open-source-parsers/jsoncpp/blob/master/LICENSE)
|
||||
[](http://open-source-parsers.github.io/jsoncpp-docs/doxygen/index.html)
|
||||
[](https://coveralls.io/github/open-source-parsers/jsoncpp?branch=master)
|
||||
|
||||
|
||||
[JSON][json-org] is a lightweight data-interchange format. It can represent
|
||||
numbers, strings, ordered sequences of values, and collections of name/value
|
||||
@@ -26,19 +30,36 @@ format to store user input files.
|
||||
|
||||
* `1.y.z` is built with C++11.
|
||||
* `0.y.z` can be used with older compilers.
|
||||
* `00.11.z` can be used both in old and new compilers.
|
||||
* Major versions maintain binary-compatibility.
|
||||
|
||||
### Special note
|
||||
The branch `00.11.z`is a new branch, its major version number `00` is to show that it is
|
||||
different from `0.y.z` and `1.y.z`, the main purpose of this branch is to make a balance
|
||||
between the other two branches. Thus, users can use some new features in this new branch
|
||||
that introduced in 1.y.z, but can hardly applied into 0.y.z.
|
||||
|
||||
## Using JsonCpp in your project
|
||||
|
||||
### The vcpkg dependency manager
|
||||
You can download and install JsonCpp using the [vcpkg](https://github.com/Microsoft/vcpkg/) dependency manager:
|
||||
|
||||
git clone https://github.com/Microsoft/vcpkg.git
|
||||
cd vcpkg
|
||||
./bootstrap-vcpkg.sh
|
||||
./vcpkg integrate install
|
||||
./vcpkg install jsoncpp
|
||||
|
||||
The JsonCpp port in vcpkg is kept up to date by Microsoft team members and community contributors. If the version is out of date, please [create an issue or pull request](https://github.com/Microsoft/vcpkg) on the vcpkg repository.
|
||||
|
||||
### Amalgamated source
|
||||
https://github.com/open-source-parsers/jsoncpp/wiki/Amalgamated
|
||||
https://github.com/open-source-parsers/jsoncpp/wiki/Amalgamated-(Possibly-outdated)
|
||||
|
||||
### The Meson Build System
|
||||
If you are using the [Meson Build System](http://mesonbuild.com), then you can get a wrap file by downloading it from [Meson WrapDB](https://wrapdb.mesonbuild.com/jsoncpp), or simply use `meson wrap install jsoncpp`.
|
||||
|
||||
### Other ways
|
||||
If you have trouble, see the Wiki, or post a question as an Issue.
|
||||
If you have trouble, see the [Wiki](https://github.com/open-source-parsers/jsoncpp/wiki), or post a question as an Issue.
|
||||
|
||||
## License
|
||||
|
||||
|
40
amalgamate.py
Normal file → Executable file
40
amalgamate.py
Normal file → Executable file
@@ -1,3 +1,5 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""Amalgamate json-cpp library sources into a single source and header file.
|
||||
|
||||
Works with python2.6+ and python3.4+.
|
||||
@@ -9,6 +11,9 @@ import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
INCLUDE_PATH = "include/json"
|
||||
SRC_PATH = "src/lib_json"
|
||||
|
||||
class AmalgamationFile:
|
||||
def __init__(self, top_dir):
|
||||
self.top_dir = top_dir
|
||||
@@ -66,15 +71,15 @@ def amalgamate_source(source_top_dir=None,
|
||||
header.add_text("/// If defined, indicates that the source file is amalgamated")
|
||||
header.add_text("/// to prevent private header inclusion.")
|
||||
header.add_text("#define JSON_IS_AMALGAMATION")
|
||||
header.add_file("include/json/version.h")
|
||||
header.add_file("include/json/allocator.h")
|
||||
header.add_file("include/json/config.h")
|
||||
header.add_file("include/json/forwards.h")
|
||||
header.add_file("include/json/features.h")
|
||||
header.add_file("include/json/value.h")
|
||||
header.add_file("include/json/reader.h")
|
||||
header.add_file("include/json/writer.h")
|
||||
header.add_file("include/json/assertions.h")
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "version.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "allocator.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "config.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "forwards.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "json_features.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "value.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "reader.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "writer.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "assertions.h"))
|
||||
header.add_text("#endif //ifndef JSON_AMALGAMATED_H_INCLUDED")
|
||||
|
||||
target_header_path = os.path.join(os.path.dirname(target_source_path), header_include_path)
|
||||
@@ -94,8 +99,10 @@ def amalgamate_source(source_top_dir=None,
|
||||
header.add_text("/// If defined, indicates that the source file is amalgamated")
|
||||
header.add_text("/// to prevent private header inclusion.")
|
||||
header.add_text("#define JSON_IS_AMALGAMATION")
|
||||
header.add_file("include/json/config.h")
|
||||
header.add_file("include/json/forwards.h")
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "version.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "allocator.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "config.h"))
|
||||
header.add_file(os.path.join(INCLUDE_PATH, "forwards.h"))
|
||||
header.add_text("#endif //ifndef JSON_FORWARD_AMALGAMATED_H_INCLUDED")
|
||||
|
||||
target_forward_header_path = os.path.join(os.path.dirname(target_source_path),
|
||||
@@ -116,12 +123,11 @@ def amalgamate_source(source_top_dir=None,
|
||||
#endif
|
||||
""")
|
||||
source.add_text("")
|
||||
lib_json = "src/lib_json"
|
||||
source.add_file(os.path.join(lib_json, "json_tool.h"))
|
||||
source.add_file(os.path.join(lib_json, "json_reader.cpp"))
|
||||
source.add_file(os.path.join(lib_json, "json_valueiterator.inl"))
|
||||
source.add_file(os.path.join(lib_json, "json_value.cpp"))
|
||||
source.add_file(os.path.join(lib_json, "json_writer.cpp"))
|
||||
source.add_file(os.path.join(SRC_PATH, "json_tool.h"))
|
||||
source.add_file(os.path.join(SRC_PATH, "json_reader.cpp"))
|
||||
source.add_file(os.path.join(SRC_PATH, "json_valueiterator.inl"))
|
||||
source.add_file(os.path.join(SRC_PATH, "json_value.cpp"))
|
||||
source.add_file(os.path.join(SRC_PATH, "json_writer.cpp"))
|
||||
|
||||
print("Writing amalgamated source to %r" % target_source_path)
|
||||
source.write_to(target_source_path)
|
||||
|
23
cmake/JoinPaths.cmake
Normal file
23
cmake/JoinPaths.cmake
Normal file
@@ -0,0 +1,23 @@
|
||||
# This module provides a function for joining paths
|
||||
# known from most languages
|
||||
#
|
||||
# SPDX-License-Identifier: (MIT OR CC0-1.0)
|
||||
# Copyright 2020 Jan Tojnar
|
||||
# https://github.com/jtojnar/cmake-snips
|
||||
#
|
||||
# Modelled after Python’s os.path.join
|
||||
# https://docs.python.org/3.7/library/os.path.html#os.path.join
|
||||
# Windows not supported
|
||||
function(join_paths joined_path first_path_segment)
|
||||
set(temp_path "${first_path_segment}")
|
||||
foreach(current_segment IN LISTS ARGN)
|
||||
if(NOT ("${current_segment}" STREQUAL ""))
|
||||
if(IS_ABSOLUTE "${current_segment}")
|
||||
set(temp_path "${current_segment}")
|
||||
else()
|
||||
set(temp_path "${temp_path}/${current_segment}")
|
||||
endif()
|
||||
endif()
|
||||
endforeach()
|
||||
set(${joined_path} "${temp_path}" PARENT_SCOPE)
|
||||
endfunction()
|
27
example/CMakeLists.txt
Normal file
27
example/CMakeLists.txt
Normal file
@@ -0,0 +1,27 @@
|
||||
#vim: et ts =4 sts = 4 sw = 4 tw = 0
|
||||
set(EXAMPLES
|
||||
readFromString
|
||||
readFromStream
|
||||
stringWrite
|
||||
streamWrite
|
||||
)
|
||||
add_definitions(-D_GLIBCXX_USE_CXX11_ABI)
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" OR CMAKE_CXX_COMPILER_ID STREQUAL "Clang")
|
||||
add_compile_options(-Wall -Wextra)
|
||||
elseif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
add_definitions(
|
||||
-D_SCL_SECURE_NO_WARNINGS
|
||||
-D_CRT_SECURE_NO_WARNINGS
|
||||
-D_WIN32_WINNT=0x601
|
||||
-D_WINSOCK_DEPRECATED_NO_WARNINGS
|
||||
)
|
||||
endif()
|
||||
|
||||
foreach(example ${EXAMPLES})
|
||||
add_executable(${example} ${example}/${example}.cpp)
|
||||
target_include_directories(${example} PUBLIC ${CMAKE_SOURCE_DIR}/include)
|
||||
target_link_libraries(${example} jsoncpp_lib)
|
||||
endforeach()
|
||||
|
||||
add_custom_target(examples ALL DEPENDS ${EXAMPLES})
|
13
example/README.md
Normal file
13
example/README.md
Normal file
@@ -0,0 +1,13 @@
|
||||
***NOTE***
|
||||
|
||||
If you get linker errors about undefined references to symbols that involve types in the `std::__cxx11` namespace or the tag
|
||||
`[abi:cxx11]` then it probably indicates that you are trying to link together object files that were compiled with different
|
||||
values for the _GLIBCXX_USE_CXX11_ABI marco. This commonly happens when linking to a third-party library that was compiled with
|
||||
an older version of GCC. If the third-party library cannot be rebuilt with the new ABI, then you need to recompile your code with
|
||||
the old ABI,just like:
|
||||
**g++ stringWrite.cpp -ljsoncpp -std=c++11 -D_GLIBCXX_USE_CXX11_ABI=0 -o stringWrite**
|
||||
|
||||
Not all of uses of the new ABI will cause changes in symbol names, for example a class with a `std::string` member variable will
|
||||
have the same mangled name whether compiled with the older or new ABI. In order to detect such problems, the new types and functions
|
||||
are annotated with the abi_tag attribute, allowing the compiler to warn about potential ABI incompatibilities in code using them.
|
||||
Those warnings can be enabled with the `-Wabi-tag` option.
|
3
example/readFromStream/errorFormat.json
Normal file
3
example/readFromStream/errorFormat.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
1: "value"
|
||||
}
|
30
example/readFromStream/readFromStream.cpp
Normal file
30
example/readFromStream/readFromStream.cpp
Normal file
@@ -0,0 +1,30 @@
|
||||
#include "json/json.h"
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
/** \brief Parse from stream, collect comments and capture error info.
|
||||
* Example Usage:
|
||||
* $g++ readFromStream.cpp -ljsoncpp -std=c++11 -o readFromStream
|
||||
* $./readFromStream
|
||||
* // comment head
|
||||
* {
|
||||
* // comment before
|
||||
* "key" : "value"
|
||||
* }
|
||||
* // comment after
|
||||
* // comment tail
|
||||
*/
|
||||
int main(int argc, char* argv[]) {
|
||||
Json::Value root;
|
||||
std::ifstream ifs;
|
||||
ifs.open(argv[1]);
|
||||
|
||||
Json::CharReaderBuilder builder;
|
||||
builder["collectComments"] = true;
|
||||
JSONCPP_STRING errs;
|
||||
if (!parseFromStream(builder, ifs, &root, &errs)) {
|
||||
std::cout << errs << std::endl;
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
std::cout << root << std::endl;
|
||||
return EXIT_SUCCESS;
|
||||
}
|
6
example/readFromStream/withComment.json
Normal file
6
example/readFromStream/withComment.json
Normal file
@@ -0,0 +1,6 @@
|
||||
// comment head
|
||||
{
|
||||
// comment before
|
||||
"key" : "value"
|
||||
// comment after
|
||||
}// comment tail
|
37
example/readFromString/readFromString.cpp
Normal file
37
example/readFromString/readFromString.cpp
Normal file
@@ -0,0 +1,37 @@
|
||||
#include "json/json.h"
|
||||
#include <iostream>
|
||||
/**
|
||||
* \brief Parse a raw string into Value object using the CharReaderBuilder
|
||||
* class, or the legacy Reader class.
|
||||
* Example Usage:
|
||||
* $g++ readFromString.cpp -ljsoncpp -std=c++11 -o readFromString
|
||||
* $./readFromString
|
||||
* colin
|
||||
* 20
|
||||
*/
|
||||
int main() {
|
||||
const std::string rawJson = R"({"Age": 20, "Name": "colin"})";
|
||||
const auto rawJsonLength = static_cast<int>(rawJson.length());
|
||||
constexpr bool shouldUseOldWay = false;
|
||||
JSONCPP_STRING err;
|
||||
Json::Value root;
|
||||
|
||||
if (shouldUseOldWay) {
|
||||
Json::Reader reader;
|
||||
reader.parse(rawJson, root);
|
||||
} else {
|
||||
Json::CharReaderBuilder builder;
|
||||
const std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
|
||||
if (!reader->parse(rawJson.c_str(), rawJson.c_str() + rawJsonLength, &root,
|
||||
&err)) {
|
||||
std::cout << "error" << std::endl;
|
||||
return EXIT_FAILURE;
|
||||
}
|
||||
}
|
||||
const std::string name = root["Name"].asString();
|
||||
const int age = root["Age"].asInt();
|
||||
|
||||
std::cout << name << std::endl;
|
||||
std::cout << age << std::endl;
|
||||
return EXIT_SUCCESS;
|
||||
}
|
22
example/streamWrite/streamWrite.cpp
Normal file
22
example/streamWrite/streamWrite.cpp
Normal file
@@ -0,0 +1,22 @@
|
||||
#include "json/json.h"
|
||||
#include <iostream>
|
||||
/** \brief Write the Value object to a stream.
|
||||
* Example Usage:
|
||||
* $g++ streamWrite.cpp -ljsoncpp -std=c++11 -o streamWrite
|
||||
* $./streamWrite
|
||||
* {
|
||||
* "Age" : 20,
|
||||
* "Name" : "robin"
|
||||
* }
|
||||
*/
|
||||
int main() {
|
||||
Json::Value root;
|
||||
Json::StreamWriterBuilder builder;
|
||||
const std::unique_ptr<Json::StreamWriter> writer(builder.newStreamWriter());
|
||||
|
||||
root["Name"] = "robin";
|
||||
root["Age"] = 20;
|
||||
writer->write(root, &std::cout);
|
||||
|
||||
return EXIT_SUCCESS;
|
||||
}
|
33
example/stringWrite/stringWrite.cpp
Normal file
33
example/stringWrite/stringWrite.cpp
Normal file
@@ -0,0 +1,33 @@
|
||||
#include "json/json.h"
|
||||
#include <iostream>
|
||||
/** \brief Write a Value object to a string.
|
||||
* Example Usage:
|
||||
* $g++ stringWrite.cpp -ljsoncpp -std=c++11 -o stringWrite
|
||||
* $./stringWrite
|
||||
* {
|
||||
* "action" : "run",
|
||||
* "data" :
|
||||
* {
|
||||
* "number" : 1
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
int main() {
|
||||
Json::Value root;
|
||||
Json::Value data;
|
||||
constexpr bool shouldUseOldWay = false;
|
||||
root["action"] = "run";
|
||||
data["number"] = 1;
|
||||
root["data"] = data;
|
||||
|
||||
if (shouldUseOldWay) {
|
||||
Json::FastWriter writer;
|
||||
const std::string json_file = writer.write(root);
|
||||
std::cout << json_file << std::endl;
|
||||
} else {
|
||||
Json::StreamWriterBuilder builder;
|
||||
const std::string json_file = Json::writeString(builder, root);
|
||||
std::cout << json_file << std::endl;
|
||||
}
|
||||
return EXIT_SUCCESS;
|
||||
}
|
@@ -1,6 +1,5 @@
|
||||
file(GLOB INCLUDE_FILES "json/*.h")
|
||||
install(FILES
|
||||
${INCLUDE_FILES}
|
||||
${PROJECT_BINARY_DIR}/include/json/version.h
|
||||
DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/json)
|
||||
|
||||
|
@@ -3,8 +3,8 @@
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#ifndef CPPTL_JSON_ALLOCATOR_H_INCLUDED
|
||||
#define CPPTL_JSON_ALLOCATOR_H_INCLUDED
|
||||
#ifndef JSON_ALLOCATOR_H_INCLUDED
|
||||
#define JSON_ALLOCATOR_H_INCLUDED
|
||||
|
||||
#include <cstring>
|
||||
#include <memory>
|
||||
@@ -86,4 +86,4 @@ bool operator!=(const SecureAllocator<T>&, const SecureAllocator<U>&) {
|
||||
|
||||
#pragma pack(pop)
|
||||
|
||||
#endif // CPPTL_JSON_ALLOCATOR_H_INCLUDED
|
||||
#endif // JSON_ALLOCATOR_H_INCLUDED
|
||||
|
@@ -3,8 +3,8 @@
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||
#define CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||
#ifndef JSON_ASSERTIONS_H_INCLUDED
|
||||
#define JSON_ASSERTIONS_H_INCLUDED
|
||||
|
||||
#include <cstdlib>
|
||||
#include <sstream>
|
||||
@@ -21,19 +21,19 @@
|
||||
|
||||
// @todo <= add detail about condition in exception
|
||||
#define JSON_ASSERT(condition) \
|
||||
{ \
|
||||
do { \
|
||||
if (!(condition)) { \
|
||||
Json::throwLogicError("assert json failed"); \
|
||||
} \
|
||||
}
|
||||
} while (0)
|
||||
|
||||
#define JSON_FAIL_MESSAGE(message) \
|
||||
{ \
|
||||
do { \
|
||||
OStringStream oss; \
|
||||
oss << message; \
|
||||
Json::throwLogicError(oss.str()); \
|
||||
abort(); \
|
||||
}
|
||||
} while (0)
|
||||
|
||||
#else // JSON_USE_EXCEPTION
|
||||
|
||||
@@ -52,8 +52,10 @@
|
||||
#endif
|
||||
|
||||
#define JSON_ASSERT_MESSAGE(condition, message) \
|
||||
if (!(condition)) { \
|
||||
JSON_FAIL_MESSAGE(message); \
|
||||
}
|
||||
do { \
|
||||
if (!(condition)) { \
|
||||
JSON_FAIL_MESSAGE(message); \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED
|
||||
#endif // JSON_ASSERTIONS_H_INCLUDED
|
||||
|
@@ -1,25 +0,0 @@
|
||||
// Copyright 2007-2010 Baptiste Lepilleur and The JsonCpp Authors
|
||||
// Distributed under MIT license, or public domain if desired and
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#ifndef JSON_AUTOLINK_H_INCLUDED
|
||||
#define JSON_AUTOLINK_H_INCLUDED
|
||||
|
||||
#include "config.h"
|
||||
|
||||
#ifdef JSON_IN_CPPTL
|
||||
#include <cpptl/cpptl_autolink.h>
|
||||
#endif
|
||||
|
||||
#if !defined(JSON_NO_AUTOLINK) && !defined(JSON_DLL_BUILD) && \
|
||||
!defined(JSON_IN_CPPTL)
|
||||
#define CPPTL_AUTOLINK_NAME "json"
|
||||
#undef CPPTL_AUTOLINK_DLL
|
||||
#ifdef JSON_DLL
|
||||
#define CPPTL_AUTOLINK_DLL
|
||||
#endif
|
||||
#include "autolink.h"
|
||||
#endif
|
||||
|
||||
#endif // JSON_AUTOLINK_H_INCLUDED
|
@@ -14,16 +14,6 @@
|
||||
#include <string>
|
||||
#include <type_traits>
|
||||
|
||||
/// If defined, indicates that json library is embedded in CppTL library.
|
||||
//# define JSON_IN_CPPTL 1
|
||||
|
||||
/// If defined, indicates that json may leverage CppTL library
|
||||
//# define JSON_USE_CPPTL 1
|
||||
/// If defined, indicates that cpptl vector based map should be used instead of
|
||||
/// std::map
|
||||
/// as Value container.
|
||||
//# define JSON_USE_CPPTL_SMALLMAP 1
|
||||
|
||||
// If non-zero, the library uses exceptions to report bad input instead of C
|
||||
// assertion macros. The default is to use exceptions.
|
||||
#ifndef JSON_USE_EXCEPTION
|
||||
@@ -40,28 +30,22 @@
|
||||
/// Remarks: it is automatically defined in the generated amalgamated header.
|
||||
// #define JSON_IS_AMALGAMATION
|
||||
|
||||
#ifdef JSON_IN_CPPTL
|
||||
#include <cpptl/config.h>
|
||||
#ifndef JSON_USE_CPPTL
|
||||
#define JSON_USE_CPPTL 1
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef JSON_IN_CPPTL
|
||||
#define JSON_API CPPTL_API
|
||||
#elif defined(JSON_DLL_BUILD)
|
||||
// Export macros for DLL visibility
|
||||
#if defined(JSON_DLL_BUILD)
|
||||
#if defined(_MSC_VER) || defined(__MINGW32__)
|
||||
#define JSON_API __declspec(dllexport)
|
||||
#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
|
||||
#elif defined(__GNUC__) || defined(__clang__)
|
||||
#define JSON_API __attribute__((visibility("default")))
|
||||
#endif // if defined(_MSC_VER)
|
||||
|
||||
#elif defined(JSON_DLL)
|
||||
#if defined(_MSC_VER) || defined(__MINGW32__)
|
||||
#define JSON_API __declspec(dllimport)
|
||||
#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
|
||||
#endif // if defined(_MSC_VER)
|
||||
#endif // ifdef JSON_IN_CPPTL
|
||||
#endif // ifdef JSON_DLL_BUILD
|
||||
|
||||
#if !defined(JSON_API)
|
||||
#define JSON_API
|
||||
#endif
|
||||
@@ -74,8 +58,8 @@
|
||||
#if defined(_MSC_VER) && _MSC_VER < 1900
|
||||
// As recommended at
|
||||
// https://stackoverflow.com/questions/2915672/snprintf-and-visual-studio-2010
|
||||
extern JSON_API int
|
||||
msvc_pre1900_c99_snprintf(char* outBuf, size_t size, const char* format, ...);
|
||||
extern JSON_API int msvc_pre1900_c99_snprintf(char* outBuf, size_t size,
|
||||
const char* format, ...);
|
||||
#define jsoncpp_snprintf msvc_pre1900_c99_snprintf
|
||||
#else
|
||||
#define jsoncpp_snprintf std::snprintf
|
||||
@@ -90,25 +74,11 @@ msvc_pre1900_c99_snprintf(char* outBuf, size_t size, const char* format, ...);
|
||||
// C++11 should be used directly in JSONCPP.
|
||||
#define JSONCPP_OVERRIDE override
|
||||
|
||||
#if __cplusplus >= 201103L
|
||||
#define JSONCPP_NOEXCEPT noexcept
|
||||
#define JSONCPP_OP_EXPLICIT explicit
|
||||
#elif defined(_MSC_VER) && _MSC_VER < 1900
|
||||
#define JSONCPP_NOEXCEPT throw()
|
||||
#define JSONCPP_OP_EXPLICIT explicit
|
||||
#elif defined(_MSC_VER) && _MSC_VER >= 1900
|
||||
#define JSONCPP_NOEXCEPT noexcept
|
||||
#define JSONCPP_OP_EXPLICIT explicit
|
||||
#else
|
||||
#define JSONCPP_NOEXCEPT throw()
|
||||
#define JSONCPP_OP_EXPLICIT
|
||||
#endif
|
||||
|
||||
#ifdef __clang__
|
||||
#if __has_extension(attribute_deprecated_with_message)
|
||||
#define JSONCPP_DEPRECATED(message) __attribute__((deprecated(message)))
|
||||
#endif
|
||||
#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
|
||||
#elif defined(__GNUC__) // not clang (gcc comes later since clang emulates gcc)
|
||||
#if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
|
||||
#define JSONCPP_DEPRECATED(message) __attribute__((deprecated(message)))
|
||||
#elif (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
|
||||
@@ -123,7 +93,7 @@ msvc_pre1900_c99_snprintf(char* outBuf, size_t size, const char* format, ...);
|
||||
#define JSONCPP_DEPRECATED(message)
|
||||
#endif // if !defined(JSONCPP_DEPRECATED)
|
||||
|
||||
#if __GNUC__ >= 6
|
||||
#if defined(__clang__) || (defined(__GNUC__) && (__GNUC__ >= 6))
|
||||
#define JSON_USE_INT64_DOUBLE_CONVERSION 1
|
||||
#endif
|
||||
|
||||
@@ -135,37 +105,37 @@ msvc_pre1900_c99_snprintf(char* outBuf, size_t size, const char* format, ...);
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
|
||||
namespace Json {
|
||||
typedef int Int;
|
||||
typedef unsigned int UInt;
|
||||
using Int = int;
|
||||
using UInt = unsigned int;
|
||||
#if defined(JSON_NO_INT64)
|
||||
typedef int LargestInt;
|
||||
typedef unsigned int LargestUInt;
|
||||
using LargestInt = int;
|
||||
using LargestUInt = unsigned int;
|
||||
#undef JSON_HAS_INT64
|
||||
#else // if defined(JSON_NO_INT64)
|
||||
// For Microsoft Visual use specific types as long long is not supported
|
||||
#if defined(_MSC_VER) // Microsoft Visual Studio
|
||||
typedef __int64 Int64;
|
||||
typedef unsigned __int64 UInt64;
|
||||
using Int64 = __int64;
|
||||
using UInt64 = unsigned __int64;
|
||||
#else // if defined(_MSC_VER) // Other platforms, use long long
|
||||
typedef int64_t Int64;
|
||||
typedef uint64_t UInt64;
|
||||
using Int64 = int64_t;
|
||||
using UInt64 = uint64_t;
|
||||
#endif // if defined(_MSC_VER)
|
||||
typedef Int64 LargestInt;
|
||||
typedef UInt64 LargestUInt;
|
||||
using LargestInt = Int64;
|
||||
using LargestUInt = UInt64;
|
||||
#define JSON_HAS_INT64
|
||||
#endif // if defined(JSON_NO_INT64)
|
||||
|
||||
template <typename T>
|
||||
using Allocator = typename std::conditional<JSONCPP_USING_SECURE_MEMORY,
|
||||
SecureAllocator<T>,
|
||||
std::allocator<T>>::type;
|
||||
using Allocator =
|
||||
typename std::conditional<JSONCPP_USING_SECURE_MEMORY, SecureAllocator<T>,
|
||||
std::allocator<T>>::type;
|
||||
using String = std::basic_string<char, std::char_traits<char>, Allocator<char>>;
|
||||
using IStringStream = std::basic_istringstream<String::value_type,
|
||||
String::traits_type,
|
||||
String::allocator_type>;
|
||||
using OStringStream = std::basic_ostringstream<String::value_type,
|
||||
String::traits_type,
|
||||
String::allocator_type>;
|
||||
using IStringStream =
|
||||
std::basic_istringstream<String::value_type, String::traits_type,
|
||||
String::allocator_type>;
|
||||
using OStringStream =
|
||||
std::basic_ostringstream<String::value_type, String::traits_type,
|
||||
String::allocator_type>;
|
||||
using IStream = std::istream;
|
||||
using OStream = std::ostream;
|
||||
} // namespace Json
|
||||
|
@@ -25,11 +25,11 @@ class Reader;
|
||||
class CharReader;
|
||||
class CharReaderBuilder;
|
||||
|
||||
// features.h
|
||||
// json_features.h
|
||||
class Features;
|
||||
|
||||
// value.h
|
||||
typedef unsigned int ArrayIndex;
|
||||
using ArrayIndex = unsigned int;
|
||||
class StaticString;
|
||||
class Path;
|
||||
class PathArgument;
|
||||
|
@@ -6,8 +6,8 @@
|
||||
#ifndef JSON_JSON_H_INCLUDED
|
||||
#define JSON_JSON_H_INCLUDED
|
||||
|
||||
#include "autolink.h"
|
||||
#include "features.h"
|
||||
#include "config.h"
|
||||
#include "json_features.h"
|
||||
#include "reader.h"
|
||||
#include "value.h"
|
||||
#include "writer.h"
|
||||
|
@@ -3,8 +3,8 @@
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
|
||||
#define CPPTL_JSON_FEATURES_H_INCLUDED
|
||||
#ifndef JSON_FEATURES_H_INCLUDED
|
||||
#define JSON_FEATURES_H_INCLUDED
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include "forwards.h"
|
||||
@@ -58,4 +58,4 @@ public:
|
||||
|
||||
#pragma pack(pop)
|
||||
|
||||
#endif // CPPTL_JSON_FEATURES_H_INCLUDED
|
||||
#endif // JSON_FEATURES_H_INCLUDED
|
@@ -3,11 +3,11 @@
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#ifndef CPPTL_JSON_READER_H_INCLUDED
|
||||
#define CPPTL_JSON_READER_H_INCLUDED
|
||||
#ifndef JSON_READER_H_INCLUDED
|
||||
#define JSON_READER_H_INCLUDED
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include "features.h"
|
||||
#include "json_features.h"
|
||||
#include "value.h"
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <deque>
|
||||
@@ -28,20 +28,21 @@
|
||||
namespace Json {
|
||||
|
||||
/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a
|
||||
*Value.
|
||||
* Value.
|
||||
*
|
||||
* \deprecated Use CharReader and CharReaderBuilder.
|
||||
*/
|
||||
class JSON_API Reader {
|
||||
|
||||
class JSONCPP_DEPRECATED(
|
||||
"Use CharReader and CharReaderBuilder instead.") JSON_API Reader {
|
||||
public:
|
||||
typedef char Char;
|
||||
typedef const Char* Location;
|
||||
using Char = char;
|
||||
using Location = const Char*;
|
||||
|
||||
/** \brief An error tagged with where in the JSON text it was encountered.
|
||||
*
|
||||
* The offsets give the [start, limit) range of bytes within the text. Note
|
||||
* that this is bytes, not codepoints.
|
||||
*
|
||||
*/
|
||||
struct StructuredError {
|
||||
ptrdiff_t offset_start;
|
||||
@@ -49,56 +50,50 @@ public:
|
||||
String message;
|
||||
};
|
||||
|
||||
/** \brief Constructs a Reader allowing all features
|
||||
* for parsing.
|
||||
/** \brief Constructs a Reader allowing all features for parsing.
|
||||
*/
|
||||
JSONCPP_DEPRECATED("Use CharReader and CharReaderBuilder instead")
|
||||
Reader();
|
||||
|
||||
/** \brief Constructs a Reader allowing the specified feature set
|
||||
* for parsing.
|
||||
/** \brief Constructs a Reader allowing the specified feature set for parsing.
|
||||
*/
|
||||
JSONCPP_DEPRECATED("Use CharReader and CharReaderBuilder instead")
|
||||
Reader(const Features& features);
|
||||
|
||||
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
|
||||
* document.
|
||||
* \param document UTF-8 encoded string containing the document to read.
|
||||
* \param root [out] Contains the root value of the document if it was
|
||||
* successfully parsed.
|
||||
* \param collectComments \c true to collect comment and allow writing them
|
||||
* back during
|
||||
* serialization, \c false to discard comments.
|
||||
* This parameter is ignored if
|
||||
* Features::allowComments_
|
||||
* is \c false.
|
||||
*
|
||||
* \param document UTF-8 encoded string containing the document
|
||||
* to read.
|
||||
* \param[out] root Contains the root value of the document if it
|
||||
* was successfully parsed.
|
||||
* \param collectComments \c true to collect comment and allow writing
|
||||
* them back during serialization, \c false to
|
||||
* discard comments. This parameter is ignored
|
||||
* if Features::allowComments_ is \c false.
|
||||
* \return \c true if the document was successfully parsed, \c false if an
|
||||
* error occurred.
|
||||
*/
|
||||
bool
|
||||
parse(const std::string& document, Value& root, bool collectComments = true);
|
||||
bool parse(const std::string& document, Value& root,
|
||||
bool collectComments = true);
|
||||
|
||||
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
|
||||
document.
|
||||
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
|
||||
document to read.
|
||||
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
|
||||
document to read.
|
||||
* Must be >= beginDoc.
|
||||
* \param root [out] Contains the root value of the document if it was
|
||||
* successfully parsed.
|
||||
* \param collectComments \c true to collect comment and allow writing them
|
||||
back during
|
||||
* serialization, \c false to discard comments.
|
||||
* This parameter is ignored if
|
||||
Features::allowComments_
|
||||
* is \c false.
|
||||
* document.
|
||||
*
|
||||
* \param beginDoc Pointer on the beginning of the UTF-8 encoded
|
||||
* string of the document to read.
|
||||
* \param endDoc Pointer on the end of the UTF-8 encoded string
|
||||
* of the document to read. Must be >= beginDoc.
|
||||
* \param[out] root Contains the root value of the document if it
|
||||
* was successfully parsed.
|
||||
* \param collectComments \c true to collect comment and allow writing
|
||||
* them back during serialization, \c false to
|
||||
* discard comments. This parameter is ignored
|
||||
* if Features::allowComments_ is \c false.
|
||||
* \return \c true if the document was successfully parsed, \c false if an
|
||||
error occurred.
|
||||
* error occurred.
|
||||
*/
|
||||
bool parse(const char* beginDoc,
|
||||
const char* endDoc,
|
||||
Value& root,
|
||||
bool parse(const char* beginDoc, const char* endDoc, Value& root,
|
||||
bool collectComments = true);
|
||||
|
||||
/// \brief Parse from input stream.
|
||||
@@ -107,11 +102,10 @@ public:
|
||||
|
||||
/** \brief Returns a user friendly string that list errors in the parsed
|
||||
* document.
|
||||
* \return Formatted error message with the list of errors with their location
|
||||
* in
|
||||
* the parsed document. An empty string is returned if no error
|
||||
* occurred
|
||||
* during parsing.
|
||||
*
|
||||
* \return Formatted error message with the list of errors with their
|
||||
* location in the parsed document. An empty string is returned if no error
|
||||
* occurred during parsing.
|
||||
* \deprecated Use getFormattedErrorMessages() instead (typo fix).
|
||||
*/
|
||||
JSONCPP_DEPRECATED("Use getFormattedErrorMessages() instead.")
|
||||
@@ -119,43 +113,45 @@ public:
|
||||
|
||||
/** \brief Returns a user friendly string that list errors in the parsed
|
||||
* document.
|
||||
* \return Formatted error message with the list of errors with their location
|
||||
* in
|
||||
* the parsed document. An empty string is returned if no error
|
||||
* occurred
|
||||
* during parsing.
|
||||
*
|
||||
* \return Formatted error message with the list of errors with their
|
||||
* location in the parsed document. An empty string is returned if no error
|
||||
* occurred during parsing.
|
||||
*/
|
||||
String getFormattedErrorMessages() const;
|
||||
|
||||
/** \brief Returns a vector of structured erros encounted while parsing.
|
||||
/** \brief Returns a vector of structured errors encountered while parsing.
|
||||
*
|
||||
* \return A (possibly empty) vector of StructuredError objects. Currently
|
||||
* only one error can be returned, but the caller should tolerate
|
||||
* multiple
|
||||
* errors. This can occur if the parser recovers from a non-fatal
|
||||
* parse error and then encounters additional errors.
|
||||
* only one error can be returned, but the caller should tolerate multiple
|
||||
* errors. This can occur if the parser recovers from a non-fatal parse
|
||||
* error and then encounters additional errors.
|
||||
*/
|
||||
std::vector<StructuredError> getStructuredErrors() const;
|
||||
|
||||
/** \brief Add a semantic error message.
|
||||
* \param value JSON Value location associated with the error
|
||||
*
|
||||
* \param value JSON Value location associated with the error
|
||||
* \param message The error message.
|
||||
* \return \c true if the error was successfully added, \c false if the
|
||||
* Value offset exceeds the document size.
|
||||
* \return \c true if the error was successfully added, \c false if the Value
|
||||
* offset exceeds the document size.
|
||||
*/
|
||||
bool pushError(const Value& value, const String& message);
|
||||
|
||||
/** \brief Add a semantic error message with extra context.
|
||||
* \param value JSON Value location associated with the error
|
||||
*
|
||||
* \param value JSON Value location associated with the error
|
||||
* \param message The error message.
|
||||
* \param extra Additional JSON Value location to contextualize the error
|
||||
* \param extra Additional JSON Value location to contextualize the error
|
||||
* \return \c true if the error was successfully added, \c false if either
|
||||
* Value offset exceeds the document size.
|
||||
*/
|
||||
bool pushError(const Value& value, const String& message, const Value& extra);
|
||||
|
||||
/** \brief Return whether there are any errors.
|
||||
* \return \c true if there are no errors to report \c false if
|
||||
* errors have occurred.
|
||||
*
|
||||
* \return \c true if there are no errors to report \c false if errors have
|
||||
* occurred.
|
||||
*/
|
||||
bool good() const;
|
||||
|
||||
@@ -191,11 +187,11 @@ private:
|
||||
Location extra_;
|
||||
};
|
||||
|
||||
typedef std::deque<ErrorInfo> Errors;
|
||||
using Errors = std::deque<ErrorInfo>;
|
||||
|
||||
bool readToken(Token& token);
|
||||
void skipSpaces();
|
||||
bool match(Location pattern, int patternLength);
|
||||
bool match(const Char* pattern, int patternLength);
|
||||
bool readComment();
|
||||
bool readCStyleComment();
|
||||
bool readCppStyleComment();
|
||||
@@ -210,24 +206,19 @@ private:
|
||||
bool decodeString(Token& token, String& decoded);
|
||||
bool decodeDouble(Token& token);
|
||||
bool decodeDouble(Token& token, Value& decoded);
|
||||
bool decodeUnicodeCodePoint(Token& token,
|
||||
Location& current,
|
||||
Location end,
|
||||
bool decodeUnicodeCodePoint(Token& token, Location& current, Location end,
|
||||
unsigned int& unicode);
|
||||
bool decodeUnicodeEscapeSequence(Token& token,
|
||||
Location& current,
|
||||
Location end,
|
||||
unsigned int& unicode);
|
||||
bool decodeUnicodeEscapeSequence(Token& token, Location& current,
|
||||
Location end, unsigned int& unicode);
|
||||
bool addError(const String& message, Token& token, Location extra = nullptr);
|
||||
bool recoverFromError(TokenType skipUntilToken);
|
||||
bool addErrorAndRecover(const String& message,
|
||||
Token& token,
|
||||
bool addErrorAndRecover(const String& message, Token& token,
|
||||
TokenType skipUntilToken);
|
||||
void skipUntilSpace();
|
||||
Value& currentValue();
|
||||
Char getNextChar();
|
||||
void
|
||||
getLocationLineAndColumn(Location location, int& line, int& column) const;
|
||||
void getLocationLineAndColumn(Location location, int& line,
|
||||
int& column) const;
|
||||
String getLocationLineAndColumn(Location location) const;
|
||||
void addComment(Location begin, Location end, CommentPlacement placement);
|
||||
void skipCommentTokens(Token& token);
|
||||
@@ -235,7 +226,7 @@ private:
|
||||
static bool containsNewLine(Location begin, Location end);
|
||||
static String normalizeEOL(Location begin, Location end);
|
||||
|
||||
typedef std::stack<Value*> Nodes;
|
||||
using Nodes = std::stack<Value*>;
|
||||
Nodes nodes_;
|
||||
Errors errors_;
|
||||
String document_;
|
||||
@@ -255,26 +246,22 @@ class JSON_API CharReader {
|
||||
public:
|
||||
virtual ~CharReader() = default;
|
||||
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
|
||||
document.
|
||||
* The document must be a UTF-8 encoded string containing the document to
|
||||
read.
|
||||
* document. The document must be a UTF-8 encoded string containing the
|
||||
* document to read.
|
||||
*
|
||||
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
|
||||
document to read.
|
||||
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
|
||||
document to read.
|
||||
* Must be >= beginDoc.
|
||||
* \param root [out] Contains the root value of the document if it was
|
||||
* successfully parsed.
|
||||
* \param errs [out] Formatted error messages (if not NULL)
|
||||
* a user friendly string that lists errors in the parsed
|
||||
* document.
|
||||
* \param beginDoc Pointer on the beginning of the UTF-8 encoded string
|
||||
* of the document to read.
|
||||
* \param endDoc Pointer on the end of the UTF-8 encoded string of the
|
||||
* document to read. Must be >= beginDoc.
|
||||
* \param[out] root Contains the root value of the document if it was
|
||||
* successfully parsed.
|
||||
* \param[out] errs Formatted error messages (if not NULL) a user
|
||||
* friendly string that lists errors in the parsed
|
||||
* document.
|
||||
* \return \c true if the document was successfully parsed, \c false if an
|
||||
error occurred.
|
||||
* error occurred.
|
||||
*/
|
||||
virtual bool parse(char const* beginDoc,
|
||||
char const* endDoc,
|
||||
Value* root,
|
||||
virtual bool parse(char const* beginDoc, char const* endDoc, Value* root,
|
||||
String* errs) = 0;
|
||||
|
||||
class JSON_API Factory {
|
||||
@@ -288,59 +275,60 @@ public:
|
||||
}; // CharReader
|
||||
|
||||
/** \brief Build a CharReader implementation.
|
||||
|
||||
Usage:
|
||||
\code
|
||||
using namespace Json;
|
||||
CharReaderBuilder builder;
|
||||
builder["collectComments"] = false;
|
||||
Value value;
|
||||
String errs;
|
||||
bool ok = parseFromStream(builder, std::cin, &value, &errs);
|
||||
\endcode
|
||||
*/
|
||||
*
|
||||
* Usage:
|
||||
* \code
|
||||
* using namespace Json;
|
||||
* CharReaderBuilder builder;
|
||||
* builder["collectComments"] = false;
|
||||
* Value value;
|
||||
* String errs;
|
||||
* bool ok = parseFromStream(builder, std::cin, &value, &errs);
|
||||
* \endcode
|
||||
*/
|
||||
class JSON_API CharReaderBuilder : public CharReader::Factory {
|
||||
public:
|
||||
// Note: We use a Json::Value so that we can add data-members to this class
|
||||
// without a major version bump.
|
||||
/** Configuration of this builder.
|
||||
These are case-sensitive.
|
||||
Available settings (case-sensitive):
|
||||
- `"collectComments": false or true`
|
||||
- true to collect comment and allow writing them
|
||||
back during serialization, false to discard comments.
|
||||
This parameter is ignored if allowComments is false.
|
||||
- `"allowComments": false or true`
|
||||
- true if comments are allowed.
|
||||
- `"strictRoot": false or true`
|
||||
- true if root must be either an array or an object value
|
||||
- `"allowDroppedNullPlaceholders": false or true`
|
||||
- true if dropped null placeholders are allowed. (See
|
||||
StreamWriterBuilder.)
|
||||
- `"allowNumericKeys": false or true`
|
||||
- true if numeric object keys are allowed.
|
||||
- `"allowSingleQuotes": false or true`
|
||||
- true if '' are allowed for strings (both keys and values)
|
||||
- `"stackLimit": integer`
|
||||
- Exceeding stackLimit (recursive depth of `readValue()`) will
|
||||
cause an exception.
|
||||
- This is a security issue (seg-faults caused by deeply nested JSON),
|
||||
so the default is low.
|
||||
- `"failIfExtra": false or true`
|
||||
- If true, `parse()` returns false when extra non-whitespace trails
|
||||
the JSON value in the input string.
|
||||
- `"rejectDupKeys": false or true`
|
||||
- If true, `parse()` returns false when a key is duplicated within an
|
||||
object.
|
||||
- `"allowSpecialFloats": false or true`
|
||||
- If true, special float values (NaNs and infinities) are allowed
|
||||
and their values are lossfree restorable.
|
||||
|
||||
You can examine 'settings_` yourself
|
||||
to see the defaults. You can also write and read them just like any
|
||||
JSON Value.
|
||||
\sa setDefaults()
|
||||
*/
|
||||
* These are case-sensitive.
|
||||
* Available settings (case-sensitive):
|
||||
* - `"collectComments": false or true`
|
||||
* - true to collect comment and allow writing them back during
|
||||
* serialization, false to discard comments. This parameter is ignored
|
||||
* if allowComments is false.
|
||||
* - `"allowComments": false or true`
|
||||
* - true if comments are allowed.
|
||||
* - `"allowTrailingCommas": false or true`
|
||||
* - true if trailing commas in objects and arrays are allowed.
|
||||
* - `"strictRoot": false or true`
|
||||
* - true if root must be either an array or an object value
|
||||
* - `"allowDroppedNullPlaceholders": false or true`
|
||||
* - true if dropped null placeholders are allowed. (See
|
||||
* StreamWriterBuilder.)
|
||||
* - `"allowNumericKeys": false or true`
|
||||
* - true if numeric object keys are allowed.
|
||||
* - `"allowSingleQuotes": false or true`
|
||||
* - true if '' are allowed for strings (both keys and values)
|
||||
* - `"stackLimit": integer`
|
||||
* - Exceeding stackLimit (recursive depth of `readValue()`) will cause an
|
||||
* exception.
|
||||
* - This is a security issue (seg-faults caused by deeply nested JSON), so
|
||||
* the default is low.
|
||||
* - `"failIfExtra": false or true`
|
||||
* - If true, `parse()` returns false when extra non-whitespace trails the
|
||||
* JSON value in the input string.
|
||||
* - `"rejectDupKeys": false or true`
|
||||
* - If true, `parse()` returns false when a key is duplicated within an
|
||||
* object.
|
||||
* - `"allowSpecialFloats": false or true`
|
||||
* - If true, special float values (NaNs and infinities) are allowed and
|
||||
* their values are lossfree restorable.
|
||||
*
|
||||
* You can examine 'settings_` yourself to see the defaults. You can also
|
||||
* write and read them just like any JSON Value.
|
||||
* \sa setDefaults()
|
||||
*/
|
||||
Json::Value settings_;
|
||||
|
||||
CharReaderBuilder();
|
||||
@@ -375,35 +363,33 @@ public:
|
||||
* Someday we might have a real StreamReader, but for now this
|
||||
* is convenient.
|
||||
*/
|
||||
bool JSON_API parseFromStream(CharReader::Factory const&,
|
||||
IStream&,
|
||||
Value* root,
|
||||
bool JSON_API parseFromStream(CharReader::Factory const&, IStream&, Value* root,
|
||||
String* errs);
|
||||
|
||||
/** \brief Read from 'sin' into 'root'.
|
||||
|
||||
Always keep comments from the input JSON.
|
||||
|
||||
This can be used to read a file into a particular sub-object.
|
||||
For example:
|
||||
\code
|
||||
Json::Value root;
|
||||
cin >> root["dir"]["file"];
|
||||
cout << root;
|
||||
\endcode
|
||||
Result:
|
||||
\verbatim
|
||||
{
|
||||
"dir": {
|
||||
"file": {
|
||||
// The input stream JSON would be nested here.
|
||||
}
|
||||
}
|
||||
}
|
||||
\endverbatim
|
||||
\throw std::exception on parse error.
|
||||
\see Json::operator<<()
|
||||
*/
|
||||
*
|
||||
* Always keep comments from the input JSON.
|
||||
*
|
||||
* This can be used to read a file into a particular sub-object.
|
||||
* For example:
|
||||
* \code
|
||||
* Json::Value root;
|
||||
* cin >> root["dir"]["file"];
|
||||
* cout << root;
|
||||
* \endcode
|
||||
* Result:
|
||||
* \verbatim
|
||||
* {
|
||||
* "dir": {
|
||||
* "file": {
|
||||
* // The input stream JSON would be nested here.
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* \endverbatim
|
||||
* \throw std::exception on parse error.
|
||||
* \see Json::operator<<()
|
||||
*/
|
||||
JSON_API IStream& operator>>(IStream&, Value&);
|
||||
|
||||
} // namespace Json
|
||||
@@ -414,4 +400,4 @@ JSON_API IStream& operator>>(IStream&, Value&);
|
||||
#pragma warning(pop)
|
||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
|
||||
#endif // CPPTL_JSON_READER_H_INCLUDED
|
||||
#endif // JSON_READER_H_INCLUDED
|
||||
|
@@ -3,27 +3,49 @@
|
||||
// recognized in your jurisdiction.
|
||||
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
#ifndef CPPTL_JSON_H_INCLUDED
|
||||
#define CPPTL_JSON_H_INCLUDED
|
||||
#ifndef JSON_H_INCLUDED
|
||||
#define JSON_H_INCLUDED
|
||||
|
||||
#if !defined(JSON_IS_AMALGAMATION)
|
||||
#include "forwards.h"
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
|
||||
// Conditional NORETURN attribute on the throw functions would:
|
||||
// a) suppress false positives from static code analysis
|
||||
// b) possibly improve optimization opportunities.
|
||||
#if !defined(JSONCPP_NORETURN)
|
||||
#if defined(_MSC_VER) && _MSC_VER == 1800
|
||||
#define JSONCPP_NORETURN __declspec(noreturn)
|
||||
#else
|
||||
#define JSONCPP_NORETURN [[noreturn]]
|
||||
#endif
|
||||
#endif
|
||||
|
||||
// Support for '= delete' with template declarations was a late addition
|
||||
// to the c++11 standard and is rejected by clang 3.8 and Apple clang 8.2
|
||||
// even though these declare themselves to be c++11 compilers.
|
||||
#if !defined(JSONCPP_TEMPLATE_DELETE)
|
||||
#if defined(__clang__) && defined(__apple_build_version__)
|
||||
#if __apple_build_version__ <= 8000042
|
||||
#define JSONCPP_TEMPLATE_DELETE
|
||||
#endif
|
||||
#elif defined(__clang__)
|
||||
#if __clang_major__ == 3 && __clang_minor__ <= 8
|
||||
#define JSONCPP_TEMPLATE_DELETE
|
||||
#endif
|
||||
#endif
|
||||
#if !defined(JSONCPP_TEMPLATE_DELETE)
|
||||
#define JSONCPP_TEMPLATE_DELETE = delete
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#include <array>
|
||||
#include <exception>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#ifndef JSON_USE_CPPTL_SMALLMAP
|
||||
#include <map>
|
||||
#else
|
||||
#include <cpptl/smallmap.h>
|
||||
#endif
|
||||
#ifdef JSON_USE_CPPTL
|
||||
#include <cpptl/forwards.h>
|
||||
#endif
|
||||
|
||||
// Disable warning C4251: <data member>: <type> needs to have dll-interface to
|
||||
// be used by...
|
||||
#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
@@ -45,8 +67,8 @@ namespace Json {
|
||||
class JSON_API Exception : public std::exception {
|
||||
public:
|
||||
Exception(String msg);
|
||||
~Exception() JSONCPP_NOEXCEPT override;
|
||||
char const* what() const JSONCPP_NOEXCEPT override;
|
||||
~Exception() noexcept override;
|
||||
char const* what() const noexcept override;
|
||||
|
||||
protected:
|
||||
String msg_;
|
||||
@@ -76,9 +98,9 @@ public:
|
||||
#endif
|
||||
|
||||
/// used internally
|
||||
[[noreturn]] void throwRuntimeError(String const& msg);
|
||||
JSONCPP_NORETURN void throwRuntimeError(String const& msg);
|
||||
/// used internally
|
||||
[[noreturn]] void throwLogicError(String const& msg);
|
||||
JSONCPP_NORETURN void throwLogicError(String const& msg);
|
||||
|
||||
/** \brief Type of the value held by a Value object.
|
||||
*/
|
||||
@@ -108,11 +130,6 @@ enum PrecisionType {
|
||||
decimalPlaces ///< we set max number of digits after "." in string
|
||||
};
|
||||
|
||||
//# ifdef JSON_USE_CPPTL
|
||||
// typedef CppTL::AnyEnumerator<const char *> EnumMemberNames;
|
||||
// typedef CppTL::AnyEnumerator<const Value &> EnumValues;
|
||||
//# endif
|
||||
|
||||
/** \brief Lightweight wrapper to tag static string.
|
||||
*
|
||||
* Value constructor and objectValue member assignment takes advantage of the
|
||||
@@ -177,21 +194,21 @@ class JSON_API Value {
|
||||
friend class ValueIteratorBase;
|
||||
|
||||
public:
|
||||
typedef std::vector<String> Members;
|
||||
typedef ValueIterator iterator;
|
||||
typedef ValueConstIterator const_iterator;
|
||||
typedef Json::UInt UInt;
|
||||
typedef Json::Int Int;
|
||||
using Members = std::vector<String>;
|
||||
using iterator = ValueIterator;
|
||||
using const_iterator = ValueConstIterator;
|
||||
using UInt = Json::UInt;
|
||||
using Int = Json::Int;
|
||||
#if defined(JSON_HAS_INT64)
|
||||
typedef Json::UInt64 UInt64;
|
||||
typedef Json::Int64 Int64;
|
||||
using UInt64 = Json::UInt64;
|
||||
using Int64 = Json::Int64;
|
||||
#endif // defined(JSON_HAS_INT64)
|
||||
typedef Json::LargestInt LargestInt;
|
||||
typedef Json::LargestUInt LargestUInt;
|
||||
typedef Json::ArrayIndex ArrayIndex;
|
||||
using LargestInt = Json::LargestInt;
|
||||
using LargestUInt = Json::LargestUInt;
|
||||
using ArrayIndex = Json::ArrayIndex;
|
||||
|
||||
// Required for boost integration, e. g. BOOST_TEST
|
||||
typedef std::string value_type;
|
||||
using value_type = std::string;
|
||||
|
||||
#if JSON_USE_NULLREF
|
||||
// Binary compatibility kludges, do not use.
|
||||
@@ -203,31 +220,34 @@ public:
|
||||
static Value const& nullSingleton();
|
||||
|
||||
/// Minimum signed integer value that can be stored in a Json::Value.
|
||||
static const LargestInt minLargestInt;
|
||||
static constexpr LargestInt minLargestInt =
|
||||
LargestInt(~(LargestUInt(-1) / 2));
|
||||
/// Maximum signed integer value that can be stored in a Json::Value.
|
||||
static const LargestInt maxLargestInt;
|
||||
static constexpr LargestInt maxLargestInt = LargestInt(LargestUInt(-1) / 2);
|
||||
/// Maximum unsigned integer value that can be stored in a Json::Value.
|
||||
static const LargestUInt maxLargestUInt;
|
||||
static constexpr LargestUInt maxLargestUInt = LargestUInt(-1);
|
||||
|
||||
/// Minimum signed int value that can be stored in a Json::Value.
|
||||
static const Int minInt;
|
||||
static constexpr Int minInt = Int(~(UInt(-1) / 2));
|
||||
/// Maximum signed int value that can be stored in a Json::Value.
|
||||
static const Int maxInt;
|
||||
static constexpr Int maxInt = Int(UInt(-1) / 2);
|
||||
/// Maximum unsigned int value that can be stored in a Json::Value.
|
||||
static const UInt maxUInt;
|
||||
static constexpr UInt maxUInt = UInt(-1);
|
||||
|
||||
#if defined(JSON_HAS_INT64)
|
||||
/// Minimum signed 64 bits int value that can be stored in a Json::Value.
|
||||
static const Int64 minInt64;
|
||||
static constexpr Int64 minInt64 = Int64(~(UInt64(-1) / 2));
|
||||
/// Maximum signed 64 bits int value that can be stored in a Json::Value.
|
||||
static const Int64 maxInt64;
|
||||
static constexpr Int64 maxInt64 = Int64(UInt64(-1) / 2);
|
||||
/// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
|
||||
static const UInt64 maxUInt64;
|
||||
static constexpr UInt64 maxUInt64 = UInt64(-1);
|
||||
#endif // defined(JSON_HAS_INT64)
|
||||
|
||||
/// Default precision for real value for string representation.
|
||||
static const UInt defaultRealPrecision;
|
||||
|
||||
static constexpr UInt defaultRealPrecision = 17;
|
||||
// The constant is hard-coded because some compiler have trouble
|
||||
// converting Value::maxUInt64 to a double correctly (AIX/xlC).
|
||||
// Assumes that UInt64 is a 64 bits integer.
|
||||
static constexpr double maxUInt64AsDouble = 18446744073709551615.0;
|
||||
// Workaround for bug in the NVIDIAs CUDA 9.1 nvcc compiler
|
||||
// when using gcc and clang backend compilers. CZString
|
||||
// cannot be defined as private. See issue #486
|
||||
@@ -272,29 +292,26 @@ private:
|
||||
};
|
||||
|
||||
public:
|
||||
#ifndef JSON_USE_CPPTL_SMALLMAP
|
||||
typedef std::map<CZString, Value> ObjectValues;
|
||||
#else
|
||||
typedef CppTL::SmallMap<CZString, Value> ObjectValues;
|
||||
#endif // ifndef JSON_USE_CPPTL_SMALLMAP
|
||||
#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
|
||||
|
||||
public:
|
||||
/** \brief Create a default Value of the given type.
|
||||
|
||||
This is a very useful constructor.
|
||||
To create an empty array, pass arrayValue.
|
||||
To create an empty object, pass objectValue.
|
||||
Another Value can then be set to this one by assignment.
|
||||
This is useful since clear() and resize() will not alter types.
|
||||
|
||||
Examples:
|
||||
\code
|
||||
Json::Value null_value; // null
|
||||
Json::Value arr_value(Json::arrayValue); // []
|
||||
Json::Value obj_value(Json::objectValue); // {}
|
||||
\endcode
|
||||
*/
|
||||
/**
|
||||
* \brief Create a default Value of the given type.
|
||||
*
|
||||
* This is a very useful constructor.
|
||||
* To create an empty array, pass arrayValue.
|
||||
* To create an empty object, pass objectValue.
|
||||
* Another Value can then be set to this one by assignment.
|
||||
* This is useful since clear() and resize() will not alter types.
|
||||
*
|
||||
* Examples:
|
||||
* \code
|
||||
* Json::Value null_value; // null
|
||||
* Json::Value arr_value(Json::arrayValue); // []
|
||||
* Json::Value obj_value(Json::objectValue); // {}
|
||||
* \endcode
|
||||
*/
|
||||
Value(ValueType type = nullValue);
|
||||
Value(Int value);
|
||||
Value(UInt value);
|
||||
@@ -305,28 +322,27 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
Value(double value);
|
||||
Value(const char* value); ///< Copy til first 0. (NULL causes to seg-fault.)
|
||||
Value(const char* begin, const char* end); ///< Copy all, incl zeroes.
|
||||
/** \brief Constructs a value from a static string.
|
||||
|
||||
/**
|
||||
* \brief Constructs a value from a static string.
|
||||
*
|
||||
* Like other value string constructor but do not duplicate the string for
|
||||
* internal storage. The given string must remain alive after the call to this
|
||||
* constructor.
|
||||
* internal storage. The given string must remain alive after the call to
|
||||
* this constructor.
|
||||
*
|
||||
* \note This works only for null-terminated strings. (We cannot change the
|
||||
* size of this class, so we have nowhere to store the length,
|
||||
* which might be computed later for various operations.)
|
||||
* size of this class, so we have nowhere to store the length, which might be
|
||||
* computed later for various operations.)
|
||||
*
|
||||
* Example of usage:
|
||||
* \code
|
||||
* static StaticString foo("some text");
|
||||
* Json::Value aValue(foo);
|
||||
* \endcode
|
||||
* \code
|
||||
* static StaticString foo("some text");
|
||||
* Json::Value aValue(foo);
|
||||
* \endcode
|
||||
*/
|
||||
Value(const StaticString& value);
|
||||
Value(const String& value); ///< Copy data() til size(). Embedded
|
||||
///< zeroes too.
|
||||
#ifdef JSON_USE_CPPTL
|
||||
Value(const CppTL::ConstString& value);
|
||||
#endif
|
||||
Value(const String& value);
|
||||
Value(bool value);
|
||||
Value(std::nullptr_t ptr) = delete;
|
||||
Value(const Value& other);
|
||||
Value(Value&& other);
|
||||
~Value();
|
||||
@@ -367,9 +383,6 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
* \return false if !string. (Seg-fault if str or end are NULL.)
|
||||
*/
|
||||
bool getString(char const** begin, char const** end) const;
|
||||
#ifdef JSON_USE_CPPTL
|
||||
CppTL::ConstString asConstString() const;
|
||||
#endif
|
||||
Int asInt() const;
|
||||
UInt asUInt() const;
|
||||
#if defined(JSON_HAS_INT64)
|
||||
@@ -395,6 +408,10 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
bool isArray() const;
|
||||
bool isObject() const;
|
||||
|
||||
/// The `as<T>` and `is<T>` member function templates and specializations.
|
||||
template <typename T> T as() const JSONCPP_TEMPLATE_DELETE;
|
||||
template <typename T> bool is() const JSONCPP_TEMPLATE_DELETE;
|
||||
|
||||
bool isConvertibleTo(ValueType other) const;
|
||||
|
||||
/// Number of values in array or object
|
||||
@@ -405,7 +422,7 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
bool empty() const;
|
||||
|
||||
/// Return !isNull()
|
||||
JSONCPP_OP_EXPLICIT operator bool() const;
|
||||
explicit operator bool() const;
|
||||
|
||||
/// Remove all object members and array elements.
|
||||
/// \pre type() is arrayValue, objectValue, or nullValue
|
||||
@@ -419,35 +436,26 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
/// \post type() is arrayValue
|
||||
void resize(ArrayIndex newSize);
|
||||
|
||||
/// Access an array element (zero based index ).
|
||||
/// If the array contains less than index element, then null value are
|
||||
/// inserted
|
||||
/// in the array so that its size is index+1.
|
||||
//@{
|
||||
/// Access an array element (zero based index). If the array contains less
|
||||
/// than index element, then null value are inserted in the array so that
|
||||
/// its size is index+1.
|
||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||
/// this from the operator[] which takes a string.)
|
||||
/// this from the operator[] which takes a string.)
|
||||
Value& operator[](ArrayIndex index);
|
||||
|
||||
/// Access an array element (zero based index ).
|
||||
/// If the array contains less than index element, then null value are
|
||||
/// inserted
|
||||
/// in the array so that its size is index+1.
|
||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||
/// this from the operator[] which takes a string.)
|
||||
Value& operator[](int index);
|
||||
//@}
|
||||
|
||||
/// Access an array element (zero based index )
|
||||
//@{
|
||||
/// Access an array element (zero based index).
|
||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||
/// this from the operator[] which takes a string.)
|
||||
/// this from the operator[] which takes a string.)
|
||||
const Value& operator[](ArrayIndex index) const;
|
||||
|
||||
/// Access an array element (zero based index )
|
||||
/// (You may need to say 'value[0u]' to get your compiler to distinguish
|
||||
/// this from the operator[] which takes a string.)
|
||||
const Value& operator[](int index) const;
|
||||
//@}
|
||||
|
||||
/// If the array contains at least index+1 elements, returns the element
|
||||
/// value,
|
||||
/// otherwise returns defaultValue.
|
||||
/// value, otherwise returns defaultValue.
|
||||
Value get(ArrayIndex index, const Value& defaultValue) const;
|
||||
/// Return true if index < size().
|
||||
bool isValidIndex(ArrayIndex index) const;
|
||||
@@ -457,9 +465,13 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
Value& append(const Value& value);
|
||||
Value& append(Value&& value);
|
||||
|
||||
/// \brief Insert value in array at specific index
|
||||
bool insert(ArrayIndex index, const Value& newValue);
|
||||
bool insert(ArrayIndex index, Value&& newValue);
|
||||
|
||||
/// Access an object value by name, create a null member if it does not exist.
|
||||
/// \note Because of our implementation, keys are limited to 2^30 -1 chars.
|
||||
/// Exceeding that will cause an exception.
|
||||
/// Exceeding that will cause an exception.
|
||||
Value& operator[](const char* key);
|
||||
/// Access an object value by name, returns null if there is no member with
|
||||
/// that name.
|
||||
@@ -472,43 +484,30 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
/// \param key may contain embedded nulls.
|
||||
const Value& operator[](const String& key) const;
|
||||
/** \brief Access an object value by name, create a null member if it does not
|
||||
exist.
|
||||
|
||||
* exist.
|
||||
*
|
||||
* If the object has no entry for that name, then the member name used to
|
||||
store
|
||||
* the new entry is not duplicated.
|
||||
* store the new entry is not duplicated.
|
||||
* Example of use:
|
||||
* \code
|
||||
* Json::Value object;
|
||||
* static const StaticString code("code");
|
||||
* object[code] = 1234;
|
||||
* \endcode
|
||||
* \code
|
||||
* Json::Value object;
|
||||
* static const StaticString code("code");
|
||||
* object[code] = 1234;
|
||||
* \endcode
|
||||
*/
|
||||
Value& operator[](const StaticString& key);
|
||||
#ifdef JSON_USE_CPPTL
|
||||
/// Access an object value by name, create a null member if it does not exist.
|
||||
Value& operator[](const CppTL::ConstString& key);
|
||||
/// Access an object value by name, returns null if there is no member with
|
||||
/// that name.
|
||||
const Value& operator[](const CppTL::ConstString& key) const;
|
||||
#endif
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
Value get(const char* key, const Value& defaultValue) const;
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
/// \note key may contain embedded nulls.
|
||||
Value
|
||||
get(const char* begin, const char* end, const Value& defaultValue) const;
|
||||
Value get(const char* begin, const char* end,
|
||||
const Value& defaultValue) const;
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
/// \param key may contain embedded nulls.
|
||||
Value get(const String& key, const Value& defaultValue) const;
|
||||
#ifdef JSON_USE_CPPTL
|
||||
/// Return the member named key if it exist, defaultValue otherwise.
|
||||
/// \note deep copy
|
||||
Value get(const CppTL::ConstString& key, const Value& defaultValue) const;
|
||||
#endif
|
||||
/// Most general and efficient version of isMember()const, get()const,
|
||||
/// and operator[]const
|
||||
/// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
|
||||
@@ -530,20 +529,20 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
/// but 'key' is null-terminated.
|
||||
bool removeMember(const char* key, Value* removed);
|
||||
/** \brief Remove the named map member.
|
||||
|
||||
Update 'removed' iff removed.
|
||||
\param key may contain embedded nulls.
|
||||
\return true iff removed (no exceptions)
|
||||
*/
|
||||
*
|
||||
* Update 'removed' iff removed.
|
||||
* \param key may contain embedded nulls.
|
||||
* \return true iff removed (no exceptions)
|
||||
*/
|
||||
bool removeMember(String const& key, Value* removed);
|
||||
/// Same as removeMember(String const& key, Value* removed)
|
||||
bool removeMember(const char* begin, const char* end, Value* removed);
|
||||
/** \brief Remove the indexed array element.
|
||||
|
||||
O(n) expensive operations.
|
||||
Update 'removed' iff removed.
|
||||
\return true if removed (no exceptions)
|
||||
*/
|
||||
*
|
||||
* O(n) expensive operations.
|
||||
* Update 'removed' iff removed.
|
||||
* \return true if removed (no exceptions)
|
||||
*/
|
||||
bool removeIndex(ArrayIndex index, Value* removed);
|
||||
|
||||
/// Return true if the object has a member named key.
|
||||
@@ -554,10 +553,6 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
bool isMember(const String& key) const;
|
||||
/// Same as isMember(String const& key)const
|
||||
bool isMember(const char* begin, const char* end) const;
|
||||
#ifdef JSON_USE_CPPTL
|
||||
/// Return true if the object has a member named key.
|
||||
bool isMember(const CppTL::ConstString& key) const;
|
||||
#endif
|
||||
|
||||
/// \brief Return a list of the member names.
|
||||
///
|
||||
@@ -566,11 +561,6 @@ Json::Value obj_value(Json::objectValue); // {}
|
||||
/// \post if type() was nullValue, it remains nullValue
|
||||
Members getMemberNames() const;
|
||||
|
||||
//# ifdef JSON_USE_CPPTL
|
||||
// EnumMemberNames enumMemberNames() const;
|
||||
// EnumValues enumValues() const;
|
||||
//# endif
|
||||
|
||||
/// \deprecated Always pass len.
|
||||
JSONCPP_DEPRECATED("Use setComment(String const&) instead.")
|
||||
void setComment(const char* comment, CommentPlacement placement) {
|
||||
@@ -650,7 +640,7 @@ private:
|
||||
Comments& operator=(Comments&& that);
|
||||
bool has(CommentPlacement slot) const;
|
||||
String get(CommentPlacement slot) const;
|
||||
void set(CommentPlacement slot, String s);
|
||||
void set(CommentPlacement slot, String comment);
|
||||
|
||||
private:
|
||||
using Array = std::array<String, numberOfCommentPlacement>;
|
||||
@@ -664,6 +654,36 @@ private:
|
||||
ptrdiff_t limit_;
|
||||
};
|
||||
|
||||
template <> inline bool Value::as<bool>() const { return asBool(); }
|
||||
template <> inline bool Value::is<bool>() const { return isBool(); }
|
||||
|
||||
template <> inline Int Value::as<Int>() const { return asInt(); }
|
||||
template <> inline bool Value::is<Int>() const { return isInt(); }
|
||||
|
||||
template <> inline UInt Value::as<UInt>() const { return asUInt(); }
|
||||
template <> inline bool Value::is<UInt>() const { return isUInt(); }
|
||||
|
||||
#if defined(JSON_HAS_INT64)
|
||||
template <> inline Int64 Value::as<Int64>() const { return asInt64(); }
|
||||
template <> inline bool Value::is<Int64>() const { return isInt64(); }
|
||||
|
||||
template <> inline UInt64 Value::as<UInt64>() const { return asUInt64(); }
|
||||
template <> inline bool Value::is<UInt64>() const { return isUInt64(); }
|
||||
#endif
|
||||
|
||||
template <> inline double Value::as<double>() const { return asDouble(); }
|
||||
template <> inline bool Value::is<double>() const { return isDouble(); }
|
||||
|
||||
template <> inline String Value::as<String>() const { return asString(); }
|
||||
template <> inline bool Value::is<String>() const { return isString(); }
|
||||
|
||||
/// These `as` specializations are type conversions, and do not have a
|
||||
/// corresponding `is`.
|
||||
template <> inline float Value::as<float>() const { return asFloat(); }
|
||||
template <> inline const char* Value::as<const char*>() const {
|
||||
return asCString();
|
||||
}
|
||||
|
||||
/** \brief Experimental and untested: represents an element of the "path" to
|
||||
* access a node.
|
||||
*/
|
||||
@@ -674,7 +694,7 @@ public:
|
||||
PathArgument();
|
||||
PathArgument(ArrayIndex index);
|
||||
PathArgument(const char* key);
|
||||
PathArgument(const String& key);
|
||||
PathArgument(String key);
|
||||
|
||||
private:
|
||||
enum Kind { kindNone = 0, kindIndex, kindKey };
|
||||
@@ -692,12 +712,11 @@ private:
|
||||
* - ".name1.name2.name3"
|
||||
* - ".[0][1][2].name1[3]"
|
||||
* - ".%" => member name is provided as parameter
|
||||
* - ".[%]" => index is provied as parameter
|
||||
* - ".[%]" => index is provided as parameter
|
||||
*/
|
||||
class JSON_API Path {
|
||||
public:
|
||||
Path(const String& path,
|
||||
const PathArgument& a1 = PathArgument(),
|
||||
Path(const String& path, const PathArgument& a1 = PathArgument(),
|
||||
const PathArgument& a2 = PathArgument(),
|
||||
const PathArgument& a3 = PathArgument(),
|
||||
const PathArgument& a4 = PathArgument(),
|
||||
@@ -710,14 +729,12 @@ public:
|
||||
Value& make(Value& root) const;
|
||||
|
||||
private:
|
||||
typedef std::vector<const PathArgument*> InArgs;
|
||||
typedef std::vector<PathArgument> Args;
|
||||
using InArgs = std::vector<const PathArgument*>;
|
||||
using Args = std::vector<PathArgument>;
|
||||
|
||||
void makePath(const String& path, const InArgs& in);
|
||||
void addPathInArg(const String& path,
|
||||
const InArgs& in,
|
||||
InArgs::const_iterator& itInArg,
|
||||
PathArgument::Kind kind);
|
||||
void addPathInArg(const String& path, const InArgs& in,
|
||||
InArgs::const_iterator& itInArg, PathArgument::Kind kind);
|
||||
static void invalidPath(const String& path, int location);
|
||||
|
||||
Args args_;
|
||||
@@ -728,10 +745,10 @@ private:
|
||||
*/
|
||||
class JSON_API ValueIteratorBase {
|
||||
public:
|
||||
typedef std::bidirectional_iterator_tag iterator_category;
|
||||
typedef unsigned int size_t;
|
||||
typedef int difference_type;
|
||||
typedef ValueIteratorBase SelfType;
|
||||
using iterator_category = std::bidirectional_iterator_tag;
|
||||
using size_t = unsigned int;
|
||||
using difference_type = int;
|
||||
using SelfType = ValueIteratorBase;
|
||||
|
||||
bool operator==(const SelfType& other) const { return isEqual(other); }
|
||||
|
||||
@@ -766,7 +783,14 @@ public:
|
||||
char const* memberName(char const** end) const;
|
||||
|
||||
protected:
|
||||
Value& deref() const;
|
||||
/*! Internal utility functions to assist with implementing
|
||||
* other iterator functions. The const and non-const versions
|
||||
* of the "deref" protected methods expose the protected
|
||||
* current_ member variable in a way that can often be
|
||||
* optimized away by the compiler.
|
||||
*/
|
||||
const Value& deref() const;
|
||||
Value& deref();
|
||||
|
||||
void increment();
|
||||
|
||||
@@ -797,12 +821,12 @@ class JSON_API ValueConstIterator : public ValueIteratorBase {
|
||||
friend class Value;
|
||||
|
||||
public:
|
||||
typedef const Value value_type;
|
||||
using value_type = const Value;
|
||||
// typedef unsigned int size_t;
|
||||
// typedef int difference_type;
|
||||
typedef const Value& reference;
|
||||
typedef const Value* pointer;
|
||||
typedef ValueConstIterator SelfType;
|
||||
using reference = const Value&;
|
||||
using pointer = const Value*;
|
||||
using SelfType = ValueConstIterator;
|
||||
|
||||
ValueConstIterator();
|
||||
ValueConstIterator(ValueIterator const& other);
|
||||
@@ -848,12 +872,12 @@ class JSON_API ValueIterator : public ValueIteratorBase {
|
||||
friend class Value;
|
||||
|
||||
public:
|
||||
typedef Value value_type;
|
||||
typedef unsigned int size_t;
|
||||
typedef int difference_type;
|
||||
typedef Value& reference;
|
||||
typedef Value* pointer;
|
||||
typedef ValueIterator SelfType;
|
||||
using value_type = Value;
|
||||
using size_t = unsigned int;
|
||||
using difference_type = int;
|
||||
using reference = Value&;
|
||||
using pointer = Value*;
|
||||
using SelfType = ValueIterator;
|
||||
|
||||
ValueIterator();
|
||||
explicit ValueIterator(const ValueConstIterator& other);
|
||||
@@ -889,9 +913,13 @@ public:
|
||||
return *this;
|
||||
}
|
||||
|
||||
reference operator*() const { return deref(); }
|
||||
|
||||
pointer operator->() const { return &deref(); }
|
||||
/*! The return value of non-const iterators can be
|
||||
* changed, so the these functions are not const
|
||||
* because the returned references/pointers can be used
|
||||
* to change state of the base class.
|
||||
*/
|
||||
reference operator*() { return deref(); }
|
||||
pointer operator->() { return &deref(); }
|
||||
};
|
||||
|
||||
inline void swap(Value& a, Value& b) { a.swap(b); }
|
||||
@@ -904,4 +932,4 @@ inline void swap(Value& a, Value& b) { a.swap(b); }
|
||||
#pragma warning(pop)
|
||||
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
|
||||
|
||||
#endif // CPPTL_JSON_H_INCLUDED
|
||||
#endif // JSON_H_INCLUDED
|
||||
|
28
include/json/version.h
Normal file
28
include/json/version.h
Normal file
@@ -0,0 +1,28 @@
|
||||
#ifndef JSON_VERSION_H_INCLUDED
|
||||
#define JSON_VERSION_H_INCLUDED
|
||||
|
||||
// Note: version must be updated in three places when doing a release. This
|
||||
// annoying process ensures that amalgamate, CMake, and meson all report the
|
||||
// correct version.
|
||||
// 1. /meson.build
|
||||
// 2. /include/json/version.h
|
||||
// 3. /CMakeLists.txt
|
||||
// IMPORTANT: also update the SOVERSION!!
|
||||
|
||||
#define JSONCPP_VERSION_STRING "1.9.4"
|
||||
#define JSONCPP_VERSION_MAJOR 1
|
||||
#define JSONCPP_VERSION_MINOR 9
|
||||
#define JSONCPP_VERSION_PATCH 3
|
||||
#define JSONCPP_VERSION_QUALIFIER
|
||||
#define JSONCPP_VERSION_HEXA \
|
||||
((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | \
|
||||
(JSONCPP_VERSION_PATCH << 8))
|
||||
|
||||
#ifdef JSONCPP_USING_SECURE_MEMORY
|
||||
#undef JSONCPP_USING_SECURE_MEMORY
|
||||
#endif
|
||||
#define JSONCPP_USING_SECURE_MEMORY 0
|
||||
// If non-zero, the library zeroes any memory that it has allocated before
|
||||
// it frees its memory.
|
||||
|
||||
#endif // JSON_VERSION_H_INCLUDED
|
@@ -27,18 +27,17 @@ namespace Json {
|
||||
class Value;
|
||||
|
||||
/**
|
||||
|
||||
Usage:
|
||||
\code
|
||||
using namespace Json;
|
||||
void writeToStdout(StreamWriter::Factory const& factory, Value const& value) {
|
||||
std::unique_ptr<StreamWriter> const writer(
|
||||
factory.newStreamWriter());
|
||||
writer->write(value, &std::cout);
|
||||
std::cout << std::endl; // add lf and flush
|
||||
}
|
||||
\endcode
|
||||
*/
|
||||
*
|
||||
* Usage:
|
||||
* \code
|
||||
* using namespace Json;
|
||||
* void writeToStdout(StreamWriter::Factory const& factory, Value const& value)
|
||||
* { std::unique_ptr<StreamWriter> const writer( factory.newStreamWriter());
|
||||
* writer->write(value, &std::cout);
|
||||
* std::cout << std::endl; // add lf and flush
|
||||
* }
|
||||
* \endcode
|
||||
*/
|
||||
class JSON_API StreamWriter {
|
||||
protected:
|
||||
OStream* sout_; // not owned; will not delete
|
||||
@@ -46,10 +45,11 @@ public:
|
||||
StreamWriter();
|
||||
virtual ~StreamWriter();
|
||||
/** Write Value into document as configured in sub-class.
|
||||
Do not take ownership of sout, but maintain a reference during function.
|
||||
\pre sout != NULL
|
||||
\return zero on success (For now, we always return zero, so check the
|
||||
stream instead.) \throw std::exception possibly, depending on configuration
|
||||
* Do not take ownership of sout, but maintain a reference during function.
|
||||
* \pre sout != NULL
|
||||
* \return zero on success (For now, we always return zero, so check the
|
||||
* stream instead.) \throw std::exception possibly, depending on
|
||||
* configuration
|
||||
*/
|
||||
virtual int write(Value const& root, OStream* sout) = 0;
|
||||
|
||||
@@ -73,49 +73,49 @@ String JSON_API writeString(StreamWriter::Factory const& factory,
|
||||
|
||||
/** \brief Build a StreamWriter implementation.
|
||||
|
||||
Usage:
|
||||
\code
|
||||
using namespace Json;
|
||||
Value value = ...;
|
||||
StreamWriterBuilder builder;
|
||||
builder["commentStyle"] = "None";
|
||||
builder["indentation"] = " "; // or whatever you like
|
||||
std::unique_ptr<Json::StreamWriter> writer(
|
||||
builder.newStreamWriter());
|
||||
writer->write(value, &std::cout);
|
||||
std::cout << std::endl; // add lf and flush
|
||||
\endcode
|
||||
* Usage:
|
||||
* \code
|
||||
* using namespace Json;
|
||||
* Value value = ...;
|
||||
* StreamWriterBuilder builder;
|
||||
* builder["commentStyle"] = "None";
|
||||
* builder["indentation"] = " "; // or whatever you like
|
||||
* std::unique_ptr<Json::StreamWriter> writer(
|
||||
* builder.newStreamWriter());
|
||||
* writer->write(value, &std::cout);
|
||||
* std::cout << std::endl; // add lf and flush
|
||||
* \endcode
|
||||
*/
|
||||
class JSON_API StreamWriterBuilder : public StreamWriter::Factory {
|
||||
public:
|
||||
// Note: We use a Json::Value so that we can add data-members to this class
|
||||
// without a major version bump.
|
||||
/** Configuration of this builder.
|
||||
Available settings (case-sensitive):
|
||||
- "commentStyle": "None" or "All"
|
||||
- "indentation": "<anything>".
|
||||
- Setting this to an empty string also omits newline characters.
|
||||
- "enableYAMLCompatibility": false or true
|
||||
- slightly change the whitespace around colons
|
||||
- "dropNullPlaceholders": false or true
|
||||
- Drop the "null" string from the writer's output for nullValues.
|
||||
Strictly speaking, this is not valid JSON. But when the output is being
|
||||
fed to a browser's JavaScript, it makes for smaller output and the
|
||||
browser can handle the output just fine.
|
||||
- "useSpecialFloats": false or true
|
||||
- If true, outputs non-finite floating point values in the following way:
|
||||
NaN values as "NaN", positive infinity as "Infinity", and negative
|
||||
infinity as "-Infinity".
|
||||
- "precision": int
|
||||
- Number of precision digits for formatting of real values.
|
||||
- "precisionType": "significant"(default) or "decimal"
|
||||
- Type of precision for formatting of real values.
|
||||
* Available settings (case-sensitive):
|
||||
* - "commentStyle": "None" or "All"
|
||||
* - "indentation": "<anything>".
|
||||
* - Setting this to an empty string also omits newline characters.
|
||||
* - "enableYAMLCompatibility": false or true
|
||||
* - slightly change the whitespace around colons
|
||||
* - "dropNullPlaceholders": false or true
|
||||
* - Drop the "null" string from the writer's output for nullValues.
|
||||
* Strictly speaking, this is not valid JSON. But when the output is being
|
||||
* fed to a browser's JavaScript, it makes for smaller output and the
|
||||
* browser can handle the output just fine.
|
||||
* - "useSpecialFloats": false or true
|
||||
* - If true, outputs non-finite floating point values in the following way:
|
||||
* NaN values as "NaN", positive infinity as "Infinity", and negative
|
||||
* infinity as "-Infinity".
|
||||
* - "precision": int
|
||||
* - Number of precision digits for formatting of real values.
|
||||
* - "precisionType": "significant"(default) or "decimal"
|
||||
* - Type of precision for formatting of real values.
|
||||
|
||||
You can examine 'settings_` yourself
|
||||
to see the defaults. You can also write and read them just like any
|
||||
JSON Value.
|
||||
\sa setDefaults()
|
||||
*/
|
||||
* You can examine 'settings_` yourself
|
||||
* to see the defaults. You can also write and read them just like any
|
||||
* JSON Value.
|
||||
* \sa setDefaults()
|
||||
*/
|
||||
Json::Value settings_;
|
||||
|
||||
StreamWriterBuilder();
|
||||
@@ -252,7 +252,7 @@ private:
|
||||
static bool hasCommentForValue(const Value& value);
|
||||
static String normalizeEOL(const String& text);
|
||||
|
||||
typedef std::vector<String> ChildValues;
|
||||
using ChildValues = std::vector<String>;
|
||||
|
||||
ChildValues childValues_;
|
||||
String document_;
|
||||
@@ -326,7 +326,7 @@ private:
|
||||
static bool hasCommentForValue(const Value& value);
|
||||
static String normalizeEOL(const String& text);
|
||||
|
||||
typedef std::vector<String> ChildValues;
|
||||
using ChildValues = std::vector<String>;
|
||||
|
||||
ChildValues childValues_;
|
||||
OStream* document_;
|
||||
@@ -346,10 +346,9 @@ String JSON_API valueToString(UInt value);
|
||||
#endif // if defined(JSON_HAS_INT64)
|
||||
String JSON_API valueToString(LargestInt value);
|
||||
String JSON_API valueToString(LargestUInt value);
|
||||
String JSON_API
|
||||
valueToString(double value,
|
||||
unsigned int precision = Value::defaultRealPrecision,
|
||||
PrecisionType precisionType = PrecisionType::significantDigits);
|
||||
String JSON_API valueToString(
|
||||
double value, unsigned int precision = Value::defaultRealPrecision,
|
||||
PrecisionType precisionType = PrecisionType::significantDigits);
|
||||
String JSON_API valueToString(bool value);
|
||||
String JSON_API valueToQuotedString(const char* value);
|
||||
|
||||
|
390
makerelease.py
390
makerelease.py
@@ -1,390 +0,0 @@
|
||||
# Copyright 2010 Baptiste Lepilleur and The JsonCpp Authors
|
||||
# Distributed under MIT license, or public domain if desired and
|
||||
# recognized in your jurisdiction.
|
||||
# See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
|
||||
|
||||
"""Tag the sandbox for release, make source and doc tarballs.
|
||||
|
||||
Requires Python 2.6
|
||||
|
||||
Example of invocation (use to test the script):
|
||||
python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev
|
||||
|
||||
When testing this script:
|
||||
python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev
|
||||
|
||||
Example of invocation when doing a release:
|
||||
python makerelease.py 0.5.0 0.6.0-dev
|
||||
|
||||
Note: This was for Subversion. Now that we are in GitHub, we do not
|
||||
need to build versioned tarballs anymore, so makerelease.py is defunct.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
import os.path
|
||||
import subprocess
|
||||
import sys
|
||||
import doxybuild
|
||||
import subprocess
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
import shutil
|
||||
import urllib2
|
||||
import tempfile
|
||||
import os
|
||||
import time
|
||||
from devtools import antglob, fixeol, tarball
|
||||
import amalgamate
|
||||
|
||||
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
|
||||
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
|
||||
SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
|
||||
SOURCEFORGE_PROJECT = 'jsoncpp'
|
||||
|
||||
def set_version(version):
|
||||
with open('version','wb') as f:
|
||||
f.write(version.strip())
|
||||
|
||||
def rmdir_if_exist(dir_path):
|
||||
if os.path.isdir(dir_path):
|
||||
shutil.rmtree(dir_path)
|
||||
|
||||
class SVNError(Exception):
|
||||
pass
|
||||
|
||||
def svn_command(command, *args):
|
||||
cmd = ['svn', '--non-interactive', command] + list(args)
|
||||
print('Running:', ' '.join(cmd))
|
||||
process = subprocess.Popen(cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode:
|
||||
error = SVNError('SVN command failed:\n' + stdout)
|
||||
error.returncode = process.returncode
|
||||
raise error
|
||||
return stdout
|
||||
|
||||
def check_no_pending_commit():
|
||||
"""Checks that there is no pending commit in the sandbox."""
|
||||
stdout = svn_command('status', '--xml')
|
||||
etree = ElementTree.fromstring(stdout)
|
||||
msg = []
|
||||
for entry in etree.getiterator('entry'):
|
||||
path = entry.get('path')
|
||||
status = entry.find('wc-status').get('item')
|
||||
if status != 'unversioned' and path != 'version':
|
||||
msg.append('File "%s" has pending change (status="%s")' % (path, status))
|
||||
if msg:
|
||||
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!')
|
||||
return '\n'.join(msg)
|
||||
|
||||
def svn_join_url(base_url, suffix):
|
||||
if not base_url.endswith('/'):
|
||||
base_url += '/'
|
||||
if suffix.startswith('/'):
|
||||
suffix = suffix[1:]
|
||||
return base_url + suffix
|
||||
|
||||
def svn_check_if_tag_exist(tag_url):
|
||||
"""Checks if a tag exist.
|
||||
Returns: True if the tag exist, False otherwise.
|
||||
"""
|
||||
try:
|
||||
list_stdout = svn_command('list', tag_url)
|
||||
except SVNError as e:
|
||||
if e.returncode != 1 or not str(e).find('tag_url'):
|
||||
raise e
|
||||
# otherwise ignore error, meaning tag does not exist
|
||||
return False
|
||||
return True
|
||||
|
||||
def svn_commit(message):
|
||||
"""Commit the sandbox, providing the specified comment.
|
||||
"""
|
||||
svn_command('ci', '-m', message)
|
||||
|
||||
def svn_tag_sandbox(tag_url, message):
|
||||
"""Makes a tag based on the sandbox revisions.
|
||||
"""
|
||||
svn_command('copy', '-m', message, '.', tag_url)
|
||||
|
||||
def svn_remove_tag(tag_url, message):
|
||||
"""Removes an existing tag.
|
||||
"""
|
||||
svn_command('delete', '-m', message, tag_url)
|
||||
|
||||
def svn_export(tag_url, export_dir):
|
||||
"""Exports the tag_url revision to export_dir.
|
||||
Target directory, including its parent is created if it does not exist.
|
||||
If the directory export_dir exist, it is deleted before export proceed.
|
||||
"""
|
||||
rmdir_if_exist(export_dir)
|
||||
svn_command('export', tag_url, export_dir)
|
||||
|
||||
def fix_sources_eol(dist_dir):
|
||||
"""Set file EOL for tarball distribution.
|
||||
"""
|
||||
print('Preparing exported source file EOL for distribution...')
|
||||
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
|
||||
win_sources = antglob.glob(dist_dir,
|
||||
includes = '**/*.sln **/*.vcproj',
|
||||
prune_dirs = prune_dirs)
|
||||
unix_sources = antglob.glob(dist_dir,
|
||||
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
|
||||
sconscript *.json *.expected AUTHORS LICENSE''',
|
||||
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
|
||||
prune_dirs = prune_dirs)
|
||||
for path in win_sources:
|
||||
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\r\n')
|
||||
for path in unix_sources:
|
||||
fixeol.fix_source_eol(path, is_dry_run = False, verbose = True, eol = '\n')
|
||||
|
||||
def download(url, target_path):
|
||||
"""Download file represented by url to target_path.
|
||||
"""
|
||||
f = urllib2.urlopen(url)
|
||||
try:
|
||||
data = f.read()
|
||||
finally:
|
||||
f.close()
|
||||
fout = open(target_path, 'wb')
|
||||
try:
|
||||
fout.write(data)
|
||||
finally:
|
||||
fout.close()
|
||||
|
||||
def check_compile(distcheck_top_dir, platform):
|
||||
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
|
||||
print('Running:', ' '.join(cmd))
|
||||
log_path = os.path.join(distcheck_top_dir, 'build-%s.log' % platform)
|
||||
flog = open(log_path, 'wb')
|
||||
try:
|
||||
process = subprocess.Popen(cmd,
|
||||
stdout=flog,
|
||||
stderr=subprocess.STDOUT,
|
||||
cwd=distcheck_top_dir)
|
||||
stdout = process.communicate()[0]
|
||||
status = (process.returncode == 0)
|
||||
finally:
|
||||
flog.close()
|
||||
return (status, log_path)
|
||||
|
||||
def write_tempfile(content, **kwargs):
|
||||
fd, path = tempfile.mkstemp(**kwargs)
|
||||
f = os.fdopen(fd, 'wt')
|
||||
try:
|
||||
f.write(content)
|
||||
finally:
|
||||
f.close()
|
||||
return path
|
||||
|
||||
class SFTPError(Exception):
|
||||
pass
|
||||
|
||||
def run_sftp_batch(userhost, sftp, batch, retry=0):
|
||||
path = write_tempfile(batch, suffix='.sftp', text=True)
|
||||
# psftp -agent -C blep,jsoncpp@web.sourceforge.net -batch -b batch.sftp -bc
|
||||
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
|
||||
error = None
|
||||
for retry_index in range(0, max(1,retry)):
|
||||
heading = retry_index == 0 and 'Running:' or 'Retrying:'
|
||||
print(heading, ' '.join(cmd))
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||
stdout = process.communicate()[0]
|
||||
if process.returncode != 0:
|
||||
error = SFTPError('SFTP batch failed:\n' + stdout)
|
||||
else:
|
||||
break
|
||||
if error:
|
||||
raise error
|
||||
return stdout
|
||||
|
||||
def sourceforge_web_synchro(sourceforge_project, doc_dir,
|
||||
user=None, sftp='sftp'):
|
||||
"""Notes: does not synchronize sub-directory of doc-dir.
|
||||
"""
|
||||
userhost = '%s,%s@web.sourceforge.net' % (user, sourceforge_project)
|
||||
stdout = run_sftp_batch(userhost, sftp, """
|
||||
cd htdocs
|
||||
dir
|
||||
exit
|
||||
""")
|
||||
existing_paths = set()
|
||||
collect = 0
|
||||
for line in stdout.split('\n'):
|
||||
line = line.strip()
|
||||
if not collect and line.endswith('> dir'):
|
||||
collect = True
|
||||
elif collect and line.endswith('> exit'):
|
||||
break
|
||||
elif collect == 1:
|
||||
collect = 2
|
||||
elif collect == 2:
|
||||
path = line.strip().split()[-1:]
|
||||
if path and path[0] not in ('.', '..'):
|
||||
existing_paths.add(path[0])
|
||||
upload_paths = set([os.path.basename(p) for p in antglob.glob(doc_dir)])
|
||||
paths_to_remove = existing_paths - upload_paths
|
||||
if paths_to_remove:
|
||||
print('Removing the following file from web:')
|
||||
print('\n'.join(paths_to_remove))
|
||||
stdout = run_sftp_batch(userhost, sftp, """cd htdocs
|
||||
rm %s
|
||||
exit""" % ' '.join(paths_to_remove))
|
||||
print('Uploading %d files:' % len(upload_paths))
|
||||
batch_size = 10
|
||||
upload_paths = list(upload_paths)
|
||||
start_time = time.time()
|
||||
for index in range(0,len(upload_paths),batch_size):
|
||||
paths = upload_paths[index:index+batch_size]
|
||||
file_per_sec = (time.time() - start_time) / (index+1)
|
||||
remaining_files = len(upload_paths) - index
|
||||
remaining_sec = file_per_sec * remaining_files
|
||||
print('%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec))
|
||||
run_sftp_batch(userhost, sftp, """cd htdocs
|
||||
lcd %s
|
||||
mput %s
|
||||
exit""" % (doc_dir, ' '.join(paths)), retry=3)
|
||||
|
||||
def sourceforge_release_tarball(sourceforge_project, paths, user=None, sftp='sftp'):
|
||||
userhost = '%s,%s@frs.sourceforge.net' % (user, sourceforge_project)
|
||||
run_sftp_batch(userhost, sftp, """
|
||||
mput %s
|
||||
exit
|
||||
""" % (' '.join(paths),))
|
||||
|
||||
|
||||
def main():
|
||||
usage = """%prog release_version next_dev_version
|
||||
Update 'version' file to release_version and commit.
|
||||
Generates the document tarball.
|
||||
Tags the sandbox revision with release_version.
|
||||
Update 'version' file to next_dev_version and commit.
|
||||
|
||||
Performs an svn export of tag release version, and build a source tarball.
|
||||
|
||||
Must be started in the project top directory.
|
||||
|
||||
Warning: --force should only be used when developing/testing the release script.
|
||||
"""
|
||||
from optparse import OptionParser
|
||||
parser = OptionParser(usage=usage)
|
||||
parser.allow_interspersed_args = False
|
||||
parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'),
|
||||
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
|
||||
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'),
|
||||
help="""Path to Doxygen tool. [Default: %default]""")
|
||||
parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False,
|
||||
help="""Ignore pending commit. [Default: %default]""")
|
||||
parser.add_option('--retag', dest="retag_release", action='store_true', default=False,
|
||||
help="""Overwrite release existing tag if it exist. [Default: %default]""")
|
||||
parser.add_option('-p', '--platforms', dest="platforms", action='store', default='',
|
||||
help="""Comma separated list of platform passed to scons for build check.""")
|
||||
parser.add_option('--no-test', dest="no_test", action='store_true', default=False,
|
||||
help="""Skips build check.""")
|
||||
parser.add_option('--no-web', dest="no_web", action='store_true', default=False,
|
||||
help="""Do not update web site.""")
|
||||
parser.add_option('-u', '--upload-user', dest="user", action='store',
|
||||
help="""Sourceforge user for SFTP documentation upload.""")
|
||||
parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'),
|
||||
help="""Path of the SFTP compatible binary used to upload the documentation.""")
|
||||
parser.enable_interspersed_args()
|
||||
options, args = parser.parse_args()
|
||||
|
||||
if len(args) != 2:
|
||||
parser.error('release_version missing on command-line.')
|
||||
release_version = args[0]
|
||||
next_version = args[1]
|
||||
|
||||
if not options.platforms and not options.no_test:
|
||||
parser.error('You must specify either --platform or --no-test option.')
|
||||
|
||||
if options.ignore_pending_commit:
|
||||
msg = ''
|
||||
else:
|
||||
msg = check_no_pending_commit()
|
||||
if not msg:
|
||||
print('Setting version to', release_version)
|
||||
set_version(release_version)
|
||||
svn_commit('Release ' + release_version)
|
||||
tag_url = svn_join_url(SVN_TAG_ROOT, release_version)
|
||||
if svn_check_if_tag_exist(tag_url):
|
||||
if options.retag_release:
|
||||
svn_remove_tag(tag_url, 'Overwriting previous tag')
|
||||
else:
|
||||
print('Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url)
|
||||
sys.exit(1)
|
||||
svn_tag_sandbox(tag_url, 'Release ' + release_version)
|
||||
|
||||
print('Generated doxygen document...')
|
||||
## doc_dirname = r'jsoncpp-api-html-0.5.0'
|
||||
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
|
||||
doc_tarball_path, doc_dirname = doxybuild.build_doc(options, make_release=True)
|
||||
doc_distcheck_dir = 'dist/doccheck'
|
||||
tarball.decompress(doc_tarball_path, doc_distcheck_dir)
|
||||
doc_distcheck_top_dir = os.path.join(doc_distcheck_dir, doc_dirname)
|
||||
|
||||
export_dir = 'dist/export'
|
||||
svn_export(tag_url, export_dir)
|
||||
fix_sources_eol(export_dir)
|
||||
|
||||
source_dir = 'jsoncpp-src-' + release_version
|
||||
source_tarball_path = 'dist/%s.tar.gz' % source_dir
|
||||
print('Generating source tarball to', source_tarball_path)
|
||||
tarball.make_tarball(source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir)
|
||||
|
||||
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
|
||||
print('Generating amalgamation source tarball to', amalgamation_tarball_path)
|
||||
amalgamation_dir = 'dist/amalgamation'
|
||||
amalgamate.amalgamate_source(export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h')
|
||||
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
|
||||
tarball.make_tarball(amalgamation_tarball_path, [amalgamation_dir],
|
||||
amalgamation_dir, prefix_dir=amalgamation_source_dir)
|
||||
|
||||
# Decompress source tarball, download and install scons-local
|
||||
distcheck_dir = 'dist/distcheck'
|
||||
distcheck_top_dir = distcheck_dir + '/' + source_dir
|
||||
print('Decompressing source tarball to', distcheck_dir)
|
||||
rmdir_if_exist(distcheck_dir)
|
||||
tarball.decompress(source_tarball_path, distcheck_dir)
|
||||
scons_local_path = 'dist/scons-local.tar.gz'
|
||||
print('Downloading scons-local to', scons_local_path)
|
||||
download(SCONS_LOCAL_URL, scons_local_path)
|
||||
print('Decompressing scons-local to', distcheck_top_dir)
|
||||
tarball.decompress(scons_local_path, distcheck_top_dir)
|
||||
|
||||
# Run compilation
|
||||
print('Compiling decompressed tarball')
|
||||
all_build_status = True
|
||||
for platform in options.platforms.split(','):
|
||||
print('Testing platform:', platform)
|
||||
build_status, log_path = check_compile(distcheck_top_dir, platform)
|
||||
print('see build log:', log_path)
|
||||
print(build_status and '=> ok' or '=> FAILED')
|
||||
all_build_status = all_build_status and build_status
|
||||
if not build_status:
|
||||
print('Testing failed on at least one platform, aborting...')
|
||||
svn_remove_tag(tag_url, 'Removing tag due to failed testing')
|
||||
sys.exit(1)
|
||||
if options.user:
|
||||
if not options.no_web:
|
||||
print('Uploading documentation using user', options.user)
|
||||
sourceforge_web_synchro(SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp)
|
||||
print('Completed documentation upload')
|
||||
print('Uploading source and documentation tarballs for release using user', options.user)
|
||||
sourceforge_release_tarball(SOURCEFORGE_PROJECT,
|
||||
[source_tarball_path, doc_tarball_path],
|
||||
user=options.user, sftp=options.sftp)
|
||||
print('Source and doc release tarballs uploaded')
|
||||
else:
|
||||
print('No upload user specified. Web site and download tarball were not uploaded.')
|
||||
print('Tarball can be found at:', doc_tarball_path)
|
||||
|
||||
# Set next version number and commit
|
||||
set_version(next_version)
|
||||
svn_commit('Released ' + release_version)
|
||||
else:
|
||||
sys.stderr.write(msg + '\n')
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
81
meson.build
81
meson.build
@@ -1,45 +1,35 @@
|
||||
project(
|
||||
'jsoncpp',
|
||||
'cpp',
|
||||
version : '1.9.0',
|
||||
|
||||
# Note: version must be updated in three places when doing a release. This
|
||||
# annoying process ensures that amalgamate, CMake, and meson all report the
|
||||
# correct version.
|
||||
# 1. /meson.build
|
||||
# 2. /include/json/version.h
|
||||
# 3. /CMakeLists.txt
|
||||
# IMPORTANT: also update the SOVERSION!!
|
||||
version : '1.9.4',
|
||||
default_options : [
|
||||
'buildtype=release',
|
||||
'cpp_std=c++11',
|
||||
'warning_level=1'],
|
||||
license : 'Public Domain',
|
||||
meson_version : '>= 0.50.0')
|
||||
meson_version : '>= 0.49.0')
|
||||
|
||||
jsoncpp_ver_arr = meson.project_version().split('.')
|
||||
jsoncpp_major_version = jsoncpp_ver_arr[0]
|
||||
jsoncpp_minor_version = jsoncpp_ver_arr[1]
|
||||
jsoncpp_patch_version = jsoncpp_ver_arr[2]
|
||||
|
||||
jsoncpp_cdata = configuration_data()
|
||||
jsoncpp_cdata.set('JSONCPP_VERSION', meson.project_version())
|
||||
jsoncpp_cdata.set('JSONCPP_VERSION_MAJOR', jsoncpp_major_version)
|
||||
jsoncpp_cdata.set('JSONCPP_VERSION_MINOR', jsoncpp_minor_version)
|
||||
jsoncpp_cdata.set('JSONCPP_VERSION_PATCH', jsoncpp_patch_version)
|
||||
jsoncpp_cdata.set('JSONCPP_USE_SECURE_MEMORY',0)
|
||||
|
||||
jsoncpp_gen_sources = configure_file(
|
||||
input : 'src/lib_json/version.h.in',
|
||||
output : 'version.h',
|
||||
configuration : jsoncpp_cdata,
|
||||
install : true,
|
||||
install_dir : join_paths(get_option('prefix'), get_option('includedir'), 'json')
|
||||
)
|
||||
|
||||
jsoncpp_headers = [
|
||||
jsoncpp_headers = files([
|
||||
'include/json/allocator.h',
|
||||
'include/json/assertions.h',
|
||||
'include/json/autolink.h',
|
||||
'include/json/config.h',
|
||||
'include/json/features.h',
|
||||
'include/json/json_features.h',
|
||||
'include/json/forwards.h',
|
||||
'include/json/json.h',
|
||||
'include/json/reader.h',
|
||||
'include/json/value.h',
|
||||
'include/json/writer.h']
|
||||
'include/json/version.h',
|
||||
'include/json/writer.h',
|
||||
])
|
||||
jsoncpp_include_directories = include_directories('include')
|
||||
|
||||
install_headers(
|
||||
@@ -55,14 +45,12 @@ else
|
||||
endif
|
||||
|
||||
jsoncpp_lib = library(
|
||||
'jsoncpp',
|
||||
[ jsoncpp_gen_sources,
|
||||
jsoncpp_headers,
|
||||
'src/lib_json/json_tool.h',
|
||||
'jsoncpp', files([
|
||||
'src/lib_json/json_reader.cpp',
|
||||
'src/lib_json/json_value.cpp',
|
||||
'src/lib_json/json_writer.cpp'],
|
||||
soversion : 21,
|
||||
'src/lib_json/json_writer.cpp',
|
||||
]),
|
||||
soversion : 24,
|
||||
install : true,
|
||||
include_directories : jsoncpp_include_directories,
|
||||
cpp_args: dll_export_flag)
|
||||
@@ -78,18 +66,21 @@ import('pkgconfig').generate(
|
||||
jsoncpp_dep = declare_dependency(
|
||||
include_directories : jsoncpp_include_directories,
|
||||
link_with : jsoncpp_lib,
|
||||
version : meson.project_version(),
|
||||
sources : jsoncpp_gen_sources)
|
||||
version : meson.project_version())
|
||||
|
||||
# tests
|
||||
python = import('python3').find_python()
|
||||
if meson.is_subproject() or not get_option('tests')
|
||||
subdir_done()
|
||||
endif
|
||||
|
||||
python = import('python').find_installation()
|
||||
|
||||
jsoncpp_test = executable(
|
||||
'jsoncpp_test',
|
||||
[ 'src/test_lib_json/jsontest.cpp',
|
||||
'src/test_lib_json/jsontest.h',
|
||||
'jsoncpp_test', files([
|
||||
'src/test_lib_json/jsontest.cpp',
|
||||
'src/test_lib_json/main.cpp',
|
||||
'src/test_lib_json/fuzz.cpp'],
|
||||
'src/test_lib_json/fuzz.cpp',
|
||||
]),
|
||||
include_directories : jsoncpp_include_directories,
|
||||
link_with : jsoncpp_lib,
|
||||
install : false,
|
||||
@@ -112,5 +103,17 @@ test(
|
||||
'-B',
|
||||
join_paths(meson.current_source_dir(), 'test/runjsontests.py'),
|
||||
jsontestrunner,
|
||||
join_paths(meson.current_source_dir(), 'test/data')]
|
||||
join_paths(meson.current_source_dir(), 'test/data')],
|
||||
)
|
||||
test(
|
||||
'jsonchecker_jsontestrunner',
|
||||
python,
|
||||
is_parallel : false,
|
||||
args : [
|
||||
'-B',
|
||||
join_paths(meson.current_source_dir(), 'test/runjsontests.py'),
|
||||
'--with-json-checker',
|
||||
jsontestrunner,
|
||||
join_paths(meson.current_source_dir(), 'test/data')],
|
||||
workdir : join_paths(meson.current_source_dir(), 'test/data'),
|
||||
)
|
||||
|
5
meson_options.txt
Normal file
5
meson_options.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
option(
|
||||
'tests',
|
||||
type : 'boolean',
|
||||
value : true,
|
||||
description : 'Enable building tests')
|
@@ -1,5 +1,7 @@
|
||||
libdir=@CMAKE_INSTALL_FULL_LIBDIR@
|
||||
includedir=@CMAKE_INSTALL_FULL_INCLUDEDIR@
|
||||
prefix=@CMAKE_INSTALL_PREFIX@
|
||||
exec_prefix=@CMAKE_INSTALL_PREFIX@
|
||||
libdir=@libdir_for_pc_file@
|
||||
includedir=@includedir_for_pc_file@
|
||||
|
||||
Name: jsoncpp
|
||||
Description: A C++ library for interacting with JSON
|
||||
|
@@ -1,13 +1,28 @@
|
||||
find_package(PythonInterp 2.6)
|
||||
if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.12.0)
|
||||
# The new Python3 module is much more robust than the previous PythonInterp
|
||||
find_package(Python3 COMPONENTS Interpreter)
|
||||
# Set variables for backwards compatibility with cmake < 3.12.0
|
||||
set(PYTHONINTERP_FOUND ${Python3_Interpreter_FOUND})
|
||||
set(PYTHON_EXECUTABLE ${Python3_EXECUTABLE})
|
||||
else()
|
||||
set(Python_ADDITIONAL_VERSIONS 3.8)
|
||||
find_package(PythonInterp 3)
|
||||
endif()
|
||||
|
||||
add_executable(jsontestrunner_exe
|
||||
main.cpp
|
||||
)
|
||||
main.cpp
|
||||
)
|
||||
|
||||
if(BUILD_SHARED_LIBS)
|
||||
add_compile_definitions( JSON_DLL )
|
||||
if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.12.0)
|
||||
add_compile_definitions( JSON_DLL )
|
||||
else()
|
||||
add_definitions(-DJSON_DLL)
|
||||
endif()
|
||||
target_link_libraries(jsontestrunner_exe jsoncpp_lib)
|
||||
else()
|
||||
target_link_libraries(jsontestrunner_exe jsoncpp_static)
|
||||
endif()
|
||||
target_link_libraries(jsontestrunner_exe jsoncpp_lib)
|
||||
|
||||
set_target_properties(jsontestrunner_exe PROPERTIES OUTPUT_NAME jsontestrunner_exe)
|
||||
|
||||
@@ -19,18 +34,18 @@ if(PYTHONINTERP_FOUND)
|
||||
# Run unit tests in post-build
|
||||
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
|
||||
add_custom_target(jsoncpp_readerwriter_tests
|
||||
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
|
||||
DEPENDS jsontestrunner_exe jsoncpp_test
|
||||
)
|
||||
"${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
|
||||
DEPENDS jsontestrunner_exe jsoncpp_test
|
||||
)
|
||||
add_custom_target(jsoncpp_check DEPENDS jsoncpp_readerwriter_tests)
|
||||
|
||||
## Create tests for dashboard submission, allows easy review of CI results https://my.cdash.org/index.php?project=jsoncpp
|
||||
add_test(NAME jsoncpp_readerwriter
|
||||
COMMAND "${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
|
||||
WORKING_DIRECTORY "${TEST_DIR}/data"
|
||||
COMMAND "${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
|
||||
WORKING_DIRECTORY "${TEST_DIR}/data"
|
||||
)
|
||||
add_test(NAME jsoncpp_readerwriter_json_checker
|
||||
COMMAND "${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" --with-json-checker $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
|
||||
WORKING_DIRECTORY "${TEST_DIR}/data"
|
||||
COMMAND "${PYTHON_EXECUTABLE}" -B "${RUNJSONTESTS_PATH}" --with-json-checker $<TARGET_FILE:jsontestrunner_exe> "${TEST_DIR}/data"
|
||||
WORKING_DIRECTORY "${TEST_DIR}/data"
|
||||
)
|
||||
endif()
|
||||
|
@@ -15,7 +15,9 @@
|
||||
|
||||
#include <algorithm> // sort
|
||||
#include <cstdio>
|
||||
#include <iostream>
|
||||
#include <json/json.h>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
|
||||
struct Options {
|
||||
@@ -55,10 +57,10 @@ static Json::String readInputTestFile(const char* path) {
|
||||
if (!file)
|
||||
return "";
|
||||
fseek(file, 0, SEEK_END);
|
||||
long const size = ftell(file);
|
||||
size_t const usize = static_cast<unsigned long>(size);
|
||||
auto const size = ftell(file);
|
||||
auto const usize = static_cast<size_t>(size);
|
||||
fseek(file, 0, SEEK_SET);
|
||||
char* buffer = new char[size + 1];
|
||||
auto buffer = new char[size + 1];
|
||||
buffer[size] = 0;
|
||||
Json::String text;
|
||||
if (fread(buffer, 1, usize, file) == usize)
|
||||
@@ -68,8 +70,8 @@ static Json::String readInputTestFile(const char* path) {
|
||||
return text;
|
||||
}
|
||||
|
||||
static void
|
||||
printValueTree(FILE* fout, Json::Value& value, const Json::String& path = ".") {
|
||||
static void printValueTree(FILE* fout, Json::Value& value,
|
||||
const Json::String& path = ".") {
|
||||
if (value.hasComment(Json::commentBefore)) {
|
||||
fprintf(fout, "%s\n", value.getComment(Json::commentBefore).c_str());
|
||||
}
|
||||
@@ -109,7 +111,7 @@ printValueTree(FILE* fout, Json::Value& value, const Json::String& path = ".") {
|
||||
Json::Value::Members members(value.getMemberNames());
|
||||
std::sort(members.begin(), members.end());
|
||||
Json::String suffix = *(path.end() - 1) == '.' ? "" : ".";
|
||||
for (auto name : members) {
|
||||
for (const auto& name : members) {
|
||||
printValueTree(fout, value[name], path + suffix + name);
|
||||
}
|
||||
} break;
|
||||
@@ -125,21 +127,46 @@ printValueTree(FILE* fout, Json::Value& value, const Json::String& path = ".") {
|
||||
static int parseAndSaveValueTree(const Json::String& input,
|
||||
const Json::String& actual,
|
||||
const Json::String& kind,
|
||||
const Json::Features& features,
|
||||
bool parseOnly,
|
||||
Json::Value* root) {
|
||||
Json::Reader reader(features);
|
||||
bool parsingSuccessful =
|
||||
reader.parse(input.data(), input.data() + input.size(), *root);
|
||||
if (!parsingSuccessful) {
|
||||
printf("Failed to parse %s file: \n%s\n", kind.c_str(),
|
||||
reader.getFormattedErrorMessages().c_str());
|
||||
return 1;
|
||||
const Json::Features& features, bool parseOnly,
|
||||
Json::Value* root, bool use_legacy) {
|
||||
if (!use_legacy) {
|
||||
Json::CharReaderBuilder builder;
|
||||
|
||||
builder.settings_["allowComments"] = features.allowComments_;
|
||||
builder.settings_["strictRoot"] = features.strictRoot_;
|
||||
builder.settings_["allowDroppedNullPlaceholders"] =
|
||||
features.allowDroppedNullPlaceholders_;
|
||||
builder.settings_["allowNumericKeys"] = features.allowNumericKeys_;
|
||||
|
||||
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
|
||||
Json::String errors;
|
||||
const bool parsingSuccessful =
|
||||
reader->parse(input.data(), input.data() + input.size(), root, &errors);
|
||||
|
||||
if (!parsingSuccessful) {
|
||||
std::cerr << "Failed to parse " << kind << " file: " << std::endl
|
||||
<< errors << std::endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// We may instead check the legacy implementation (to ensure it doesn't
|
||||
// randomly get broken).
|
||||
} else {
|
||||
Json::Reader reader(features);
|
||||
const bool parsingSuccessful =
|
||||
reader.parse(input.data(), input.data() + input.size(), *root);
|
||||
if (!parsingSuccessful) {
|
||||
std::cerr << "Failed to parse " << kind << " file: " << std::endl
|
||||
<< reader.getFormatedErrorMessages() << std::endl;
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (!parseOnly) {
|
||||
FILE* factual = fopen(actual.c_str(), "wt");
|
||||
if (!factual) {
|
||||
printf("Failed to create %s actual file.\n", kind.c_str());
|
||||
std::cerr << "Failed to create '" << kind << "' actual file."
|
||||
<< std::endl;
|
||||
return 2;
|
||||
}
|
||||
printValueTree(factual, *root);
|
||||
@@ -173,7 +200,7 @@ static int rewriteValueTree(const Json::String& rewritePath,
|
||||
*rewrite = write(root);
|
||||
FILE* fout = fopen(rewritePath.c_str(), "wt");
|
||||
if (!fout) {
|
||||
printf("Failed to create rewrite file: %s\n", rewritePath.c_str());
|
||||
std::cerr << "Failed to create rewrite file: " << rewritePath << std::endl;
|
||||
return 2;
|
||||
}
|
||||
fprintf(fout, "%s\n", rewrite->c_str());
|
||||
@@ -194,14 +221,15 @@ static Json::String removeSuffix(const Json::String& path,
|
||||
static void printConfig() {
|
||||
// Print the configuration used to compile JsonCpp
|
||||
#if defined(JSON_NO_INT64)
|
||||
printf("JSON_NO_INT64=1\n");
|
||||
std::cout << "JSON_NO_INT64=1" << std::endl;
|
||||
#else
|
||||
printf("JSON_NO_INT64=0\n");
|
||||
std::cout << "JSON_NO_INT64=0" << std::endl;
|
||||
#endif
|
||||
}
|
||||
|
||||
static int printUsage(const char* argv[]) {
|
||||
printf("Usage: %s [--strict] input-json-file", argv[0]);
|
||||
std::cout << "Usage: " << argv[0] << " [--strict] input-json-file"
|
||||
<< std::endl;
|
||||
return 3;
|
||||
}
|
||||
|
||||
@@ -231,7 +259,7 @@ static int parseCommandLine(int argc, const char* argv[], Options* opts) {
|
||||
} else if (writerName == "BuiltStyledStreamWriter") {
|
||||
opts->write = &useBuiltStyledStreamWriter;
|
||||
} else {
|
||||
printf("Unknown '--json-writer %s'\n", writerName.c_str());
|
||||
std::cerr << "Unknown '--json-writer' " << writerName << std::endl;
|
||||
return 4;
|
||||
}
|
||||
}
|
||||
@@ -241,19 +269,20 @@ static int parseCommandLine(int argc, const char* argv[], Options* opts) {
|
||||
opts->path = argv[index];
|
||||
return 0;
|
||||
}
|
||||
static int runTest(Options const& opts) {
|
||||
|
||||
static int runTest(Options const& opts, bool use_legacy) {
|
||||
int exitCode = 0;
|
||||
|
||||
Json::String input = readInputTestFile(opts.path.c_str());
|
||||
if (input.empty()) {
|
||||
printf("Failed to read input or empty input: %s\n", opts.path.c_str());
|
||||
std::cerr << "Invalid input file: " << opts.path << std::endl;
|
||||
return 3;
|
||||
}
|
||||
|
||||
Json::String basePath = removeSuffix(opts.path, ".json");
|
||||
if (!opts.parseOnly && basePath.empty()) {
|
||||
printf("Bad input path. Path does not end with '.expected':\n%s\n",
|
||||
opts.path.c_str());
|
||||
std::cerr << "Bad input path '" << opts.path
|
||||
<< "'. Must end with '.expected'" << std::endl;
|
||||
return 3;
|
||||
}
|
||||
|
||||
@@ -263,34 +292,47 @@ static int runTest(Options const& opts) {
|
||||
|
||||
Json::Value root;
|
||||
exitCode = parseAndSaveValueTree(input, actualPath, "input", opts.features,
|
||||
opts.parseOnly, &root);
|
||||
opts.parseOnly, &root, use_legacy);
|
||||
if (exitCode || opts.parseOnly) {
|
||||
return exitCode;
|
||||
}
|
||||
|
||||
Json::String rewrite;
|
||||
exitCode = rewriteValueTree(rewritePath, root, opts.write, &rewrite);
|
||||
if (exitCode) {
|
||||
return exitCode;
|
||||
}
|
||||
|
||||
Json::Value rewriteRoot;
|
||||
exitCode = parseAndSaveValueTree(rewrite, rewriteActualPath, "rewrite",
|
||||
opts.features, opts.parseOnly, &rewriteRoot);
|
||||
if (exitCode) {
|
||||
return exitCode;
|
||||
}
|
||||
return 0;
|
||||
opts.features, opts.parseOnly, &rewriteRoot,
|
||||
use_legacy);
|
||||
|
||||
return exitCode;
|
||||
}
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
Options opts;
|
||||
try {
|
||||
int exitCode = parseCommandLine(argc, argv, &opts);
|
||||
if (exitCode != 0) {
|
||||
printf("Failed to parse command-line.");
|
||||
std::cerr << "Failed to parse command-line." << std::endl;
|
||||
return exitCode;
|
||||
}
|
||||
return runTest(opts);
|
||||
|
||||
const int modern_return_code = runTest(opts, false);
|
||||
if (modern_return_code) {
|
||||
return modern_return_code;
|
||||
}
|
||||
|
||||
const std::string filename =
|
||||
opts.path.substr(opts.path.find_last_of("\\/") + 1);
|
||||
const bool should_run_legacy = (filename.rfind("legacy_", 0) == 0);
|
||||
if (should_run_legacy) {
|
||||
return runTest(opts, true);
|
||||
}
|
||||
} catch (const std::exception& e) {
|
||||
printf("Unhandled exception:\n%s\n", e.what());
|
||||
std::cerr << "Unhandled exception:" << std::endl << e.what() << std::endl;
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
@@ -1,13 +1,7 @@
|
||||
if( CMAKE_COMPILER_IS_GNUCXX )
|
||||
#Get compiler version.
|
||||
execute_process( COMMAND ${CMAKE_CXX_COMPILER} -dumpversion
|
||||
OUTPUT_VARIABLE GNUCXX_VERSION )
|
||||
|
||||
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU" AND CMAKE_CXX_COMPILER_VERSION VERSION_GREATER 4.1.2)
|
||||
#-Werror=* was introduced -after- GCC 4.1.2
|
||||
if( GNUCXX_VERSION VERSION_GREATER 4.1.2 )
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Werror=strict-aliasing")
|
||||
endif()
|
||||
endif( CMAKE_COMPILER_IS_GNUCXX )
|
||||
add_compile_options("-Werror=strict-aliasing")
|
||||
endif()
|
||||
|
||||
include(CheckIncludeFileCXX)
|
||||
include(CheckTypeSize)
|
||||
@@ -34,59 +28,47 @@ endif()
|
||||
|
||||
if(NOT (HAVE_CLOCALE AND HAVE_LCONV_SIZE AND HAVE_DECIMAL_POINT AND HAVE_LOCALECONV))
|
||||
message(WARNING "Locale functionality is not supported")
|
||||
add_compile_definitions(JSONCPP_NO_LOCALE_SUPPORT)
|
||||
if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.12.0)
|
||||
add_compile_definitions(JSONCPP_NO_LOCALE_SUPPORT)
|
||||
else()
|
||||
add_definitions(-DJSONCPP_NO_LOCALE_SUPPORT)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
set( JSONCPP_INCLUDE_DIR ../../include )
|
||||
set(JSONCPP_INCLUDE_DIR ../../include)
|
||||
|
||||
set( PUBLIC_HEADERS
|
||||
set(PUBLIC_HEADERS
|
||||
${JSONCPP_INCLUDE_DIR}/json/config.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/forwards.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/features.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/json_features.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/value.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/reader.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/version.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/writer.h
|
||||
${JSONCPP_INCLUDE_DIR}/json/assertions.h
|
||||
${PROJECT_BINARY_DIR}/include/json/version.h
|
||||
)
|
||||
)
|
||||
|
||||
source_group( "Public API" FILES ${PUBLIC_HEADERS} )
|
||||
source_group("Public API" FILES ${PUBLIC_HEADERS})
|
||||
|
||||
set(jsoncpp_sources
|
||||
json_tool.h
|
||||
json_reader.cpp
|
||||
json_valueiterator.inl
|
||||
json_value.cpp
|
||||
json_writer.cpp
|
||||
version.h.in)
|
||||
set(JSONCPP_SOURCES
|
||||
json_tool.h
|
||||
json_reader.cpp
|
||||
json_valueiterator.inl
|
||||
json_value.cpp
|
||||
json_writer.cpp
|
||||
)
|
||||
|
||||
# Install instructions for this target
|
||||
if(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
set(INSTALL_EXPORT EXPORT jsoncpp)
|
||||
else(JSONCPP_WITH_CMAKE_PACKAGE)
|
||||
else()
|
||||
set(INSTALL_EXPORT)
|
||||
endif()
|
||||
|
||||
if(BUILD_SHARED_LIBS)
|
||||
add_compile_definitions( JSON_DLL_BUILD )
|
||||
endif()
|
||||
|
||||
|
||||
add_library(jsoncpp_lib ${PUBLIC_HEADERS} ${jsoncpp_sources})
|
||||
set_target_properties( jsoncpp_lib PROPERTIES VERSION ${JSONCPP_VERSION} SOVERSION ${JSONCPP_SOVERSION})
|
||||
set_target_properties( jsoncpp_lib PROPERTIES OUTPUT_NAME jsoncpp
|
||||
DEBUG_OUTPUT_NAME jsoncpp${DEBUG_LIBNAME_SUFFIX} )
|
||||
set_target_properties( jsoncpp_lib PROPERTIES POSITION_INDEPENDENT_CODE ON)
|
||||
|
||||
# Set library's runtime search path on OSX
|
||||
if(APPLE)
|
||||
set_target_properties( jsoncpp_lib PROPERTIES INSTALL_RPATH "@loader_path/." )
|
||||
endif()
|
||||
|
||||
# Specify compiler features required when compiling a given target.
|
||||
# See https://cmake.org/cmake/help/v3.1/prop_gbl/CMAKE_CXX_KNOWN_FEATURES.html#prop_gbl:CMAKE_CXX_KNOWN_FEATURES
|
||||
# for complete list of features available
|
||||
target_compile_features(jsoncpp_lib PUBLIC
|
||||
list(APPEND REQUIRED_FEATURES
|
||||
cxx_std_11 # Compiler mode is aware of C++ 11.
|
||||
#MSVC 1900 cxx_alignas # Alignment control alignas, as defined in N2341.
|
||||
#MSVC 1900 cxx_alignof # Alignment control alignof, as defined in N2341.
|
||||
@@ -133,14 +115,106 @@ target_compile_features(jsoncpp_lib PUBLIC
|
||||
cxx_variadic_templates # Variadic templates, as defined in N2242.
|
||||
)
|
||||
|
||||
install( TARGETS jsoncpp_lib ${INSTALL_EXPORT}
|
||||
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
|
||||
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
|
||||
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR})
|
||||
|
||||
if(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
target_include_directories( jsoncpp_lib PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>)
|
||||
if(BUILD_SHARED_LIBS)
|
||||
if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.12.0)
|
||||
add_compile_definitions(JSON_DLL_BUILD)
|
||||
else()
|
||||
add_definitions(-DJSON_DLL_BUILD)
|
||||
endif()
|
||||
|
||||
set(SHARED_LIB ${PROJECT_NAME}_lib)
|
||||
add_library(${SHARED_LIB} SHARED ${PUBLIC_HEADERS} ${JSONCPP_SOURCES})
|
||||
set_target_properties(${SHARED_LIB} PROPERTIES
|
||||
OUTPUT_NAME jsoncpp
|
||||
VERSION ${PROJECT_VERSION}
|
||||
SOVERSION ${PROJECT_SOVERSION}
|
||||
POSITION_INDEPENDENT_CODE ON
|
||||
)
|
||||
|
||||
# Set library's runtime search path on OSX
|
||||
if(APPLE)
|
||||
set_target_properties(${SHARED_LIB} PROPERTIES INSTALL_RPATH "@loader_path/.")
|
||||
endif()
|
||||
|
||||
target_compile_features(${SHARED_LIB} PUBLIC ${REQUIRED_FEATURES})
|
||||
|
||||
if(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
target_include_directories(${SHARED_LIB} PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>
|
||||
)
|
||||
endif()
|
||||
|
||||
list(APPEND CMAKE_TARGETS ${SHARED_LIB})
|
||||
endif()
|
||||
|
||||
if(BUILD_STATIC_LIBS)
|
||||
set(STATIC_LIB ${PROJECT_NAME}_static)
|
||||
add_library(${STATIC_LIB} STATIC ${PUBLIC_HEADERS} ${JSONCPP_SOURCES})
|
||||
|
||||
# avoid name clashes on windows as the shared import lib is alse named jsoncpp.lib
|
||||
if(NOT DEFINED STATIC_SUFFIX AND BUILD_SHARED_LIBS)
|
||||
set(STATIC_SUFFIX "_static")
|
||||
endif()
|
||||
|
||||
set_target_properties(${STATIC_LIB} PROPERTIES
|
||||
OUTPUT_NAME jsoncpp${STATIC_SUFFIX}
|
||||
VERSION ${PROJECT_VERSION}
|
||||
)
|
||||
|
||||
# Set library's runtime search path on OSX
|
||||
if(APPLE)
|
||||
set_target_properties(${STATIC_LIB} PROPERTIES INSTALL_RPATH "@loader_path/.")
|
||||
endif()
|
||||
|
||||
target_compile_features(${STATIC_LIB} PUBLIC ${REQUIRED_FEATURES})
|
||||
|
||||
if(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
target_include_directories(${STATIC_LIB} PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>
|
||||
)
|
||||
endif()
|
||||
|
||||
list(APPEND CMAKE_TARGETS ${STATIC_LIB})
|
||||
endif()
|
||||
|
||||
if(BUILD_OBJECT_LIBS)
|
||||
set(OBJECT_LIB ${PROJECT_NAME}_object)
|
||||
add_library(${OBJECT_LIB} OBJECT ${PUBLIC_HEADERS} ${JSONCPP_SOURCES})
|
||||
|
||||
set_target_properties(${OBJECT_LIB} PROPERTIES
|
||||
OUTPUT_NAME jsoncpp
|
||||
VERSION ${PROJECT_VERSION}
|
||||
SOVERSION ${PROJECT_SOVERSION}
|
||||
POSITION_INDEPENDENT_CODE ON
|
||||
)
|
||||
|
||||
# Set library's runtime search path on OSX
|
||||
if(APPLE)
|
||||
set_target_properties(${OBJECT_LIB} PROPERTIES INSTALL_RPATH "@loader_path/.")
|
||||
endif()
|
||||
|
||||
target_compile_features(${OBJECT_LIB} PUBLIC ${REQUIRED_FEATURES})
|
||||
|
||||
if(NOT CMAKE_VERSION VERSION_LESS 2.8.11)
|
||||
target_include_directories(${OBJECT_LIB} PUBLIC
|
||||
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
|
||||
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>
|
||||
)
|
||||
endif()
|
||||
|
||||
list(APPEND CMAKE_TARGETS ${OBJECT_LIB})
|
||||
endif()
|
||||
|
||||
install(TARGETS ${CMAKE_TARGETS} ${INSTALL_EXPORT}
|
||||
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
|
||||
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
|
||||
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
|
||||
OBJECTS DESTINATION ${CMAKE_INSTALL_LIBDIR}
|
||||
)
|
||||
|
||||
|
@@ -10,8 +10,10 @@
|
||||
#include <json/reader.h>
|
||||
#include <json/value.h>
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
#include <istream>
|
||||
#include <limits>
|
||||
#include <memory>
|
||||
@@ -51,9 +53,9 @@ static size_t const stackLimit_g =
|
||||
namespace Json {
|
||||
|
||||
#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520)
|
||||
typedef std::unique_ptr<CharReader> CharReaderPtr;
|
||||
using CharReaderPtr = std::unique_ptr<CharReader>;
|
||||
#else
|
||||
typedef std::auto_ptr<CharReader> CharReaderPtr;
|
||||
using CharReaderPtr = std::auto_ptr<CharReader>;
|
||||
#endif
|
||||
|
||||
// Implementation of class Features
|
||||
@@ -76,25 +78,17 @@ Features Features::strictMode() {
|
||||
// ////////////////////////////////
|
||||
|
||||
bool Reader::containsNewLine(Reader::Location begin, Reader::Location end) {
|
||||
for (; begin < end; ++begin)
|
||||
if (*begin == '\n' || *begin == '\r')
|
||||
return true;
|
||||
return false;
|
||||
return std::any_of(begin, end, [](char b) { return b == '\n' || b == '\r'; });
|
||||
}
|
||||
|
||||
// Class Reader
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
Reader::Reader()
|
||||
: errors_(), document_(), commentsBefore_(), features_(Features::all()) {}
|
||||
Reader::Reader() : features_(Features::all()) {}
|
||||
|
||||
Reader::Reader(const Features& features)
|
||||
: errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(),
|
||||
lastValue_(), commentsBefore_(), features_(features), collectComments_() {
|
||||
}
|
||||
Reader::Reader(const Features& features) : features_(features) {}
|
||||
|
||||
bool Reader::parse(const std::string& document,
|
||||
Value& root,
|
||||
bool Reader::parse(const std::string& document, Value& root,
|
||||
bool collectComments) {
|
||||
document_.assign(document.begin(), document.end());
|
||||
const char* begin = document_.c_str();
|
||||
@@ -111,13 +105,11 @@ bool Reader::parse(std::istream& is, Value& root, bool collectComments) {
|
||||
// Since String is reference-counted, this at least does not
|
||||
// create an extra copy.
|
||||
String doc;
|
||||
std::getline(is, doc, (char)EOF);
|
||||
std::getline(is, doc, static_cast<char> EOF);
|
||||
return parse(doc.data(), doc.data() + doc.size(), root, collectComments);
|
||||
}
|
||||
|
||||
bool Reader::parse(const char* beginDoc,
|
||||
const char* endDoc,
|
||||
Value& root,
|
||||
bool Reader::parse(const char* beginDoc, const char* endDoc, Value& root,
|
||||
bool collectComments) {
|
||||
if (!features_.allowComments_) {
|
||||
collectComments = false;
|
||||
@@ -311,7 +303,7 @@ bool Reader::readToken(Token& token) {
|
||||
if (!ok)
|
||||
token.type_ = tokenError;
|
||||
token.end_ = current_;
|
||||
return true;
|
||||
return ok;
|
||||
}
|
||||
|
||||
void Reader::skipSpaces() {
|
||||
@@ -324,7 +316,7 @@ void Reader::skipSpaces() {
|
||||
}
|
||||
}
|
||||
|
||||
bool Reader::match(Location pattern, int patternLength) {
|
||||
bool Reader::match(const Char* pattern, int patternLength) {
|
||||
if (end_ - current_ < patternLength)
|
||||
return false;
|
||||
int index = patternLength;
|
||||
@@ -377,8 +369,7 @@ String Reader::normalizeEOL(Reader::Location begin, Reader::Location end) {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
void Reader::addComment(Location begin,
|
||||
Location end,
|
||||
void Reader::addComment(Location begin, Location end,
|
||||
CommentPlacement placement) {
|
||||
assert(collectComments_);
|
||||
const String& normalized = normalizeEOL(begin, end);
|
||||
@@ -416,7 +407,7 @@ bool Reader::readCppStyleComment() {
|
||||
}
|
||||
|
||||
void Reader::readNumber() {
|
||||
const char* p = current_;
|
||||
Location p = current_;
|
||||
char c = '0'; // stopgap for already consumed character
|
||||
// integral part
|
||||
while (c >= '0' && c <= '9')
|
||||
@@ -471,7 +462,7 @@ bool Reader::readObject(Token& token) {
|
||||
Value numberName;
|
||||
if (!decodeNumber(tokenName, numberName))
|
||||
return recoverFromError(tokenObjectEnd);
|
||||
name = String(numberName.asCString());
|
||||
name = numberName.asString();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@@ -636,7 +627,7 @@ bool Reader::decodeString(Token& token, String& decoded) {
|
||||
Char c = *current++;
|
||||
if (c == '"')
|
||||
break;
|
||||
else if (c == '\\') {
|
||||
if (c == '\\') {
|
||||
if (current == end)
|
||||
return addError("Empty escape sequence in string", token, current);
|
||||
Char escape = *current++;
|
||||
@@ -681,10 +672,8 @@ bool Reader::decodeString(Token& token, String& decoded) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Reader::decodeUnicodeCodePoint(Token& token,
|
||||
Location& current,
|
||||
Location end,
|
||||
unsigned int& unicode) {
|
||||
bool Reader::decodeUnicodeCodePoint(Token& token, Location& current,
|
||||
Location end, unsigned int& unicode) {
|
||||
|
||||
if (!decodeUnicodeEscapeSequence(token, current, end, unicode))
|
||||
return false;
|
||||
@@ -708,8 +697,7 @@ bool Reader::decodeUnicodeCodePoint(Token& token,
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Reader::decodeUnicodeEscapeSequence(Token& token,
|
||||
Location& current,
|
||||
bool Reader::decodeUnicodeEscapeSequence(Token& token, Location& current,
|
||||
Location end,
|
||||
unsigned int& ret_unicode) {
|
||||
if (end - current < 4)
|
||||
@@ -757,8 +745,7 @@ bool Reader::recoverFromError(TokenType skipUntilToken) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool Reader::addErrorAndRecover(const String& message,
|
||||
Token& token,
|
||||
bool Reader::addErrorAndRecover(const String& message, Token& token,
|
||||
TokenType skipUntilToken) {
|
||||
addError(message, token);
|
||||
return recoverFromError(skipUntilToken);
|
||||
@@ -772,8 +759,7 @@ Reader::Char Reader::getNextChar() {
|
||||
return *current_++;
|
||||
}
|
||||
|
||||
void Reader::getLocationLineAndColumn(Location location,
|
||||
int& line,
|
||||
void Reader::getLocationLineAndColumn(Location location, int& line,
|
||||
int& column) const {
|
||||
Location current = begin_;
|
||||
Location lastLineStart = current;
|
||||
@@ -849,8 +835,7 @@ bool Reader::pushError(const Value& value, const String& message) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool Reader::pushError(const Value& value,
|
||||
const String& message,
|
||||
bool Reader::pushError(const Value& value, const String& message,
|
||||
const Value& extra) {
|
||||
ptrdiff_t const length = end_ - begin_;
|
||||
if (value.getOffsetStart() > length || value.getOffsetLimit() > length ||
|
||||
@@ -876,6 +861,7 @@ class OurFeatures {
|
||||
public:
|
||||
static OurFeatures all();
|
||||
bool allowComments_;
|
||||
bool allowTrailingCommas_;
|
||||
bool strictRoot_;
|
||||
bool allowDroppedNullPlaceholders_;
|
||||
bool allowNumericKeys_;
|
||||
@@ -883,6 +869,7 @@ public:
|
||||
bool failIfExtra_;
|
||||
bool rejectDupKeys_;
|
||||
bool allowSpecialFloats_;
|
||||
bool skipBom_;
|
||||
size_t stackLimit_;
|
||||
}; // OurFeatures
|
||||
|
||||
@@ -895,24 +882,19 @@ OurFeatures OurFeatures::all() { return {}; }
|
||||
// for implementing JSON reading.
|
||||
class OurReader {
|
||||
public:
|
||||
typedef char Char;
|
||||
typedef const Char* Location;
|
||||
using Char = char;
|
||||
using Location = const Char*;
|
||||
struct StructuredError {
|
||||
ptrdiff_t offset_start;
|
||||
ptrdiff_t offset_limit;
|
||||
String message;
|
||||
};
|
||||
|
||||
OurReader(OurFeatures const& features);
|
||||
bool parse(const char* beginDoc,
|
||||
const char* endDoc,
|
||||
Value& root,
|
||||
explicit OurReader(OurFeatures const& features);
|
||||
bool parse(const char* beginDoc, const char* endDoc, Value& root,
|
||||
bool collectComments = true);
|
||||
String getFormattedErrorMessages() const;
|
||||
std::vector<StructuredError> getStructuredErrors() const;
|
||||
bool pushError(const Value& value, const String& message);
|
||||
bool pushError(const Value& value, const String& message, const Value& extra);
|
||||
bool good() const;
|
||||
|
||||
private:
|
||||
OurReader(OurReader const&); // no impl
|
||||
@@ -952,13 +934,14 @@ private:
|
||||
Location extra_;
|
||||
};
|
||||
|
||||
typedef std::deque<ErrorInfo> Errors;
|
||||
using Errors = std::deque<ErrorInfo>;
|
||||
|
||||
bool readToken(Token& token);
|
||||
void skipSpaces();
|
||||
bool match(Location pattern, int patternLength);
|
||||
void skipBom(bool skipBom);
|
||||
bool match(const Char* pattern, int patternLength);
|
||||
bool readComment();
|
||||
bool readCStyleComment();
|
||||
bool readCStyleComment(bool* containsNewLineResult);
|
||||
bool readCppStyleComment();
|
||||
bool readString();
|
||||
bool readStringSingleQuote();
|
||||
@@ -972,24 +955,19 @@ private:
|
||||
bool decodeString(Token& token, String& decoded);
|
||||
bool decodeDouble(Token& token);
|
||||
bool decodeDouble(Token& token, Value& decoded);
|
||||
bool decodeUnicodeCodePoint(Token& token,
|
||||
Location& current,
|
||||
Location end,
|
||||
bool decodeUnicodeCodePoint(Token& token, Location& current, Location end,
|
||||
unsigned int& unicode);
|
||||
bool decodeUnicodeEscapeSequence(Token& token,
|
||||
Location& current,
|
||||
Location end,
|
||||
unsigned int& unicode);
|
||||
bool decodeUnicodeEscapeSequence(Token& token, Location& current,
|
||||
Location end, unsigned int& unicode);
|
||||
bool addError(const String& message, Token& token, Location extra = nullptr);
|
||||
bool recoverFromError(TokenType skipUntilToken);
|
||||
bool addErrorAndRecover(const String& message,
|
||||
Token& token,
|
||||
bool addErrorAndRecover(const String& message, Token& token,
|
||||
TokenType skipUntilToken);
|
||||
void skipUntilSpace();
|
||||
Value& currentValue();
|
||||
Char getNextChar();
|
||||
void
|
||||
getLocationLineAndColumn(Location location, int& line, int& column) const;
|
||||
void getLocationLineAndColumn(Location location, int& line,
|
||||
int& column) const;
|
||||
String getLocationLineAndColumn(Location location) const;
|
||||
void addComment(Location begin, Location end, CommentPlacement placement);
|
||||
void skipCommentTokens(Token& token);
|
||||
@@ -997,39 +975,33 @@ private:
|
||||
static String normalizeEOL(Location begin, Location end);
|
||||
static bool containsNewLine(Location begin, Location end);
|
||||
|
||||
typedef std::stack<Value*> Nodes;
|
||||
Nodes nodes_;
|
||||
Errors errors_;
|
||||
String document_;
|
||||
Location begin_;
|
||||
Location end_;
|
||||
Location current_;
|
||||
Location lastValueEnd_;
|
||||
Value* lastValue_;
|
||||
String commentsBefore_;
|
||||
using Nodes = std::stack<Value*>;
|
||||
|
||||
Nodes nodes_{};
|
||||
Errors errors_{};
|
||||
String document_{};
|
||||
Location begin_ = nullptr;
|
||||
Location end_ = nullptr;
|
||||
Location current_ = nullptr;
|
||||
Location lastValueEnd_ = nullptr;
|
||||
Value* lastValue_ = nullptr;
|
||||
bool lastValueHasAComment_ = false;
|
||||
String commentsBefore_{};
|
||||
|
||||
OurFeatures const features_;
|
||||
bool collectComments_;
|
||||
bool collectComments_ = false;
|
||||
}; // OurReader
|
||||
|
||||
// complete copy of Read impl, for OurReader
|
||||
|
||||
bool OurReader::containsNewLine(OurReader::Location begin,
|
||||
OurReader::Location end) {
|
||||
for (; begin < end; ++begin)
|
||||
if (*begin == '\n' || *begin == '\r')
|
||||
return true;
|
||||
return false;
|
||||
return std::any_of(begin, end, [](char b) { return b == '\n' || b == '\r'; });
|
||||
}
|
||||
|
||||
OurReader::OurReader(OurFeatures const& features)
|
||||
: errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(),
|
||||
lastValue_(), commentsBefore_(), features_(features), collectComments_() {
|
||||
}
|
||||
OurReader::OurReader(OurFeatures const& features) : features_(features) {}
|
||||
|
||||
bool OurReader::parse(const char* beginDoc,
|
||||
const char* endDoc,
|
||||
Value& root,
|
||||
bool OurReader::parse(const char* beginDoc, const char* endDoc, Value& root,
|
||||
bool collectComments) {
|
||||
if (!features_.allowComments_) {
|
||||
collectComments = false;
|
||||
@@ -1047,16 +1019,15 @@ bool OurReader::parse(const char* beginDoc,
|
||||
nodes_.pop();
|
||||
nodes_.push(&root);
|
||||
|
||||
// skip byte order mark if it exists at the beginning of the UTF-8 text.
|
||||
skipBom(features_.skipBom_);
|
||||
bool successful = readValue();
|
||||
nodes_.pop();
|
||||
Token token;
|
||||
skipCommentTokens(token);
|
||||
if (features_.failIfExtra_) {
|
||||
if ((features_.strictRoot_ || token.type_ != tokenError) &&
|
||||
token.type_ != tokenEndOfStream) {
|
||||
addError("Extra non-whitespace after JSON value.", token);
|
||||
return false;
|
||||
}
|
||||
if (features_.failIfExtra_ && (token.type_ != tokenEndOfStream)) {
|
||||
addError("Extra non-whitespace after JSON value.", token);
|
||||
return false;
|
||||
}
|
||||
if (collectComments_ && !commentsBefore_.empty())
|
||||
root.setComment(commentsBefore_, commentAfter);
|
||||
@@ -1161,6 +1132,7 @@ bool OurReader::readValue() {
|
||||
|
||||
if (collectComments_) {
|
||||
lastValueEnd_ = current_;
|
||||
lastValueHasAComment_ = false;
|
||||
lastValue_ = ¤tValue();
|
||||
}
|
||||
|
||||
@@ -1203,8 +1175,11 @@ bool OurReader::readToken(Token& token) {
|
||||
if (features_.allowSingleQuotes_) {
|
||||
token.type_ = tokenString;
|
||||
ok = readStringSingleQuote();
|
||||
break;
|
||||
} // else fall through
|
||||
} else {
|
||||
// If we don't allow single quotes, this is a failure case.
|
||||
ok = false;
|
||||
}
|
||||
break;
|
||||
case '/':
|
||||
token.type_ = tokenComment;
|
||||
ok = readComment();
|
||||
@@ -1230,6 +1205,14 @@ bool OurReader::readToken(Token& token) {
|
||||
ok = features_.allowSpecialFloats_ && match("nfinity", 7);
|
||||
}
|
||||
break;
|
||||
case '+':
|
||||
if (readNumber(true)) {
|
||||
token.type_ = tokenNumber;
|
||||
} else {
|
||||
token.type_ = tokenPosInf;
|
||||
ok = features_.allowSpecialFloats_ && match("nfinity", 7);
|
||||
}
|
||||
break;
|
||||
case 't':
|
||||
token.type_ = tokenTrue;
|
||||
ok = match("rue", 3);
|
||||
@@ -1274,7 +1257,7 @@ bool OurReader::readToken(Token& token) {
|
||||
if (!ok)
|
||||
token.type_ = tokenError;
|
||||
token.end_ = current_;
|
||||
return true;
|
||||
return ok;
|
||||
}
|
||||
|
||||
void OurReader::skipSpaces() {
|
||||
@@ -1287,7 +1270,17 @@ void OurReader::skipSpaces() {
|
||||
}
|
||||
}
|
||||
|
||||
bool OurReader::match(Location pattern, int patternLength) {
|
||||
void OurReader::skipBom(bool skipBom) {
|
||||
// The default behavior is to skip BOM.
|
||||
if (skipBom) {
|
||||
if ((end_ - begin_) >= 3 && strncmp(begin_, "\xEF\xBB\xBF", 3) == 0) {
|
||||
begin_ += 3;
|
||||
current_ = begin_;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool OurReader::match(const Char* pattern, int patternLength) {
|
||||
if (end_ - current_ < patternLength)
|
||||
return false;
|
||||
int index = patternLength;
|
||||
@@ -1299,21 +1292,32 @@ bool OurReader::match(Location pattern, int patternLength) {
|
||||
}
|
||||
|
||||
bool OurReader::readComment() {
|
||||
Location commentBegin = current_ - 1;
|
||||
Char c = getNextChar();
|
||||
const Location commentBegin = current_ - 1;
|
||||
const Char c = getNextChar();
|
||||
bool successful = false;
|
||||
if (c == '*')
|
||||
successful = readCStyleComment();
|
||||
else if (c == '/')
|
||||
bool cStyleWithEmbeddedNewline = false;
|
||||
|
||||
const bool isCStyleComment = (c == '*');
|
||||
const bool isCppStyleComment = (c == '/');
|
||||
if (isCStyleComment) {
|
||||
successful = readCStyleComment(&cStyleWithEmbeddedNewline);
|
||||
} else if (isCppStyleComment) {
|
||||
successful = readCppStyleComment();
|
||||
}
|
||||
|
||||
if (!successful)
|
||||
return false;
|
||||
|
||||
if (collectComments_) {
|
||||
CommentPlacement placement = commentBefore;
|
||||
if (lastValueEnd_ && !containsNewLine(lastValueEnd_, commentBegin)) {
|
||||
if (c != '*' || !containsNewLine(commentBegin, current_))
|
||||
placement = commentAfterOnSameLine;
|
||||
|
||||
if (!lastValueHasAComment_) {
|
||||
if (lastValueEnd_ && !containsNewLine(lastValueEnd_, commentBegin)) {
|
||||
if (isCppStyleComment || !cStyleWithEmbeddedNewline) {
|
||||
placement = commentAfterOnSameLine;
|
||||
lastValueHasAComment_ = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
addComment(commentBegin, current_, placement);
|
||||
@@ -1341,8 +1345,7 @@ String OurReader::normalizeEOL(OurReader::Location begin,
|
||||
return normalized;
|
||||
}
|
||||
|
||||
void OurReader::addComment(Location begin,
|
||||
Location end,
|
||||
void OurReader::addComment(Location begin, Location end,
|
||||
CommentPlacement placement) {
|
||||
assert(collectComments_);
|
||||
const String& normalized = normalizeEOL(begin, end);
|
||||
@@ -1354,12 +1357,17 @@ void OurReader::addComment(Location begin,
|
||||
}
|
||||
}
|
||||
|
||||
bool OurReader::readCStyleComment() {
|
||||
bool OurReader::readCStyleComment(bool* containsNewLineResult) {
|
||||
*containsNewLineResult = false;
|
||||
|
||||
while ((current_ + 1) < end_) {
|
||||
Char c = getNextChar();
|
||||
if (c == '*' && *current_ == '/')
|
||||
break;
|
||||
if (c == '\n')
|
||||
*containsNewLineResult = true;
|
||||
}
|
||||
|
||||
return getNextChar() == '/';
|
||||
}
|
||||
|
||||
@@ -1380,7 +1388,7 @@ bool OurReader::readCppStyleComment() {
|
||||
}
|
||||
|
||||
bool OurReader::readNumber(bool checkInf) {
|
||||
const char* p = current_;
|
||||
Location p = current_;
|
||||
if (checkInf && p != end_ && *p == 'I') {
|
||||
current_ = ++p;
|
||||
return false;
|
||||
@@ -1441,7 +1449,9 @@ bool OurReader::readObject(Token& token) {
|
||||
initialTokenOk = readToken(tokenName);
|
||||
if (!initialTokenOk)
|
||||
break;
|
||||
if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object
|
||||
if (tokenName.type_ == tokenObjectEnd &&
|
||||
(name.empty() ||
|
||||
features_.allowTrailingCommas_)) // empty object or trailing comma
|
||||
return true;
|
||||
name.clear();
|
||||
if (tokenName.type_ == tokenString) {
|
||||
@@ -1495,15 +1505,19 @@ bool OurReader::readArray(Token& token) {
|
||||
Value init(arrayValue);
|
||||
currentValue().swapPayload(init);
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
skipSpaces();
|
||||
if (current_ != end_ && *current_ == ']') // empty array
|
||||
{
|
||||
Token endArray;
|
||||
readToken(endArray);
|
||||
return true;
|
||||
}
|
||||
int index = 0;
|
||||
for (;;) {
|
||||
skipSpaces();
|
||||
if (current_ != end_ && *current_ == ']' &&
|
||||
(index == 0 ||
|
||||
(features_.allowTrailingCommas_ &&
|
||||
!features_.allowDroppedNullPlaceholders_))) // empty array or trailing
|
||||
// comma
|
||||
{
|
||||
Token endArray;
|
||||
readToken(endArray);
|
||||
return true;
|
||||
}
|
||||
Value& value = currentValue()[index++];
|
||||
nodes_.push(&value);
|
||||
bool ok = readValue();
|
||||
@@ -1544,20 +1558,45 @@ bool OurReader::decodeNumber(Token& token, Value& decoded) {
|
||||
// larger than the maximum supported value of an integer then
|
||||
// we decode the number as a double.
|
||||
Location current = token.start_;
|
||||
bool isNegative = *current == '-';
|
||||
if (isNegative)
|
||||
const bool isNegative = *current == '-';
|
||||
if (isNegative) {
|
||||
++current;
|
||||
}
|
||||
|
||||
// TODO(issue #960): Change to constexpr
|
||||
static const auto positive_threshold = Value::maxLargestUInt / 10;
|
||||
static const auto positive_last_digit = Value::maxLargestUInt % 10;
|
||||
static const auto negative_threshold =
|
||||
Value::LargestUInt(Value::minLargestInt) / 10;
|
||||
static const auto negative_last_digit =
|
||||
Value::LargestUInt(Value::minLargestInt) % 10;
|
||||
// We assume we can represent the largest and smallest integer types as
|
||||
// unsigned integers with separate sign. This is only true if they can fit
|
||||
// into an unsigned integer.
|
||||
static_assert(Value::maxLargestInt <= Value::maxLargestUInt,
|
||||
"Int must be smaller than UInt");
|
||||
|
||||
const auto threshold = isNegative ? negative_threshold : positive_threshold;
|
||||
const auto last_digit =
|
||||
// We need to convert minLargestInt into a positive number. The easiest way
|
||||
// to do this conversion is to assume our "threshold" value of minLargestInt
|
||||
// divided by 10 can fit in maxLargestInt when absolute valued. This should
|
||||
// be a safe assumption.
|
||||
static_assert(Value::minLargestInt <= -Value::maxLargestInt,
|
||||
"The absolute value of minLargestInt must be greater than or "
|
||||
"equal to maxLargestInt");
|
||||
static_assert(Value::minLargestInt / 10 >= -Value::maxLargestInt,
|
||||
"The absolute value of minLargestInt must be only 1 magnitude "
|
||||
"larger than maxLargest Int");
|
||||
|
||||
static constexpr Value::LargestUInt positive_threshold =
|
||||
Value::maxLargestUInt / 10;
|
||||
static constexpr Value::UInt positive_last_digit = Value::maxLargestUInt % 10;
|
||||
|
||||
// For the negative values, we have to be more careful. Since typically
|
||||
// -Value::minLargestInt will cause an overflow, we first divide by 10 and
|
||||
// then take the inverse. This assumes that minLargestInt is only a single
|
||||
// power of 10 different in magnitude, which we check above. For the last
|
||||
// digit, we take the modulus before negating for the same reason.
|
||||
static constexpr auto negative_threshold =
|
||||
Value::LargestUInt(-(Value::minLargestInt / 10));
|
||||
static constexpr auto negative_last_digit =
|
||||
Value::UInt(-(Value::minLargestInt % 10));
|
||||
|
||||
const Value::LargestUInt threshold =
|
||||
isNegative ? negative_threshold : positive_threshold;
|
||||
const Value::UInt max_last_digit =
|
||||
isNegative ? negative_last_digit : positive_last_digit;
|
||||
|
||||
Value::LargestUInt value = 0;
|
||||
@@ -1573,19 +1612,23 @@ bool OurReader::decodeNumber(Token& token, Value& decoded) {
|
||||
// b) this is the last digit, or
|
||||
// c) it's small enough to fit in that rounding delta, we're okay.
|
||||
// Otherwise treat this number as a double to avoid overflow.
|
||||
if (value > threshold || current != token.end_ || digit > last_digit) {
|
||||
if (value > threshold || current != token.end_ ||
|
||||
digit > max_last_digit) {
|
||||
return decodeDouble(token, decoded);
|
||||
}
|
||||
}
|
||||
value = value * 10 + digit;
|
||||
}
|
||||
|
||||
if (isNegative)
|
||||
decoded = -Value::LargestInt(value);
|
||||
else if (value <= Value::LargestUInt(Value::maxLargestInt))
|
||||
if (isNegative) {
|
||||
// We use the same magnitude assumption here, just in case.
|
||||
const auto last_digit = static_cast<Value::UInt>(value % 10);
|
||||
decoded = -Value::LargestInt(value / 10) * 10 - last_digit;
|
||||
} else if (value <= Value::LargestUInt(Value::maxLargestInt)) {
|
||||
decoded = Value::LargestInt(value);
|
||||
else
|
||||
} else {
|
||||
decoded = value;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -1602,37 +1645,12 @@ bool OurReader::decodeDouble(Token& token) {
|
||||
|
||||
bool OurReader::decodeDouble(Token& token, Value& decoded) {
|
||||
double value = 0;
|
||||
const int bufferSize = 32;
|
||||
int count;
|
||||
ptrdiff_t const length = token.end_ - token.start_;
|
||||
|
||||
// Sanity check to avoid buffer overflow exploits.
|
||||
if (length < 0) {
|
||||
return addError("Unable to parse token length", token);
|
||||
}
|
||||
auto const ulength = static_cast<size_t>(length);
|
||||
|
||||
// Avoid using a string constant for the format control string given to
|
||||
// sscanf, as this can cause hard to debug crashes on OS X. See here for more
|
||||
// info:
|
||||
//
|
||||
// http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html
|
||||
char format[] = "%lf";
|
||||
|
||||
if (length <= bufferSize) {
|
||||
Char buffer[bufferSize + 1];
|
||||
memcpy(buffer, token.start_, ulength);
|
||||
buffer[length] = 0;
|
||||
fixNumericLocaleInput(buffer, buffer + length);
|
||||
count = sscanf(buffer, format, &value);
|
||||
} else {
|
||||
String buffer(token.start_, token.end_);
|
||||
count = sscanf(buffer.c_str(), format, &value);
|
||||
}
|
||||
|
||||
if (count != 1)
|
||||
const String buffer(token.start_, token.end_);
|
||||
IStringStream is(buffer);
|
||||
if (!(is >> value)) {
|
||||
return addError(
|
||||
"'" + String(token.start_, token.end_) + "' is not a number.", token);
|
||||
}
|
||||
decoded = value;
|
||||
return true;
|
||||
}
|
||||
@@ -1656,7 +1674,7 @@ bool OurReader::decodeString(Token& token, String& decoded) {
|
||||
Char c = *current++;
|
||||
if (c == '"')
|
||||
break;
|
||||
else if (c == '\\') {
|
||||
if (c == '\\') {
|
||||
if (current == end)
|
||||
return addError("Empty escape sequence in string", token, current);
|
||||
Char escape = *current++;
|
||||
@@ -1701,10 +1719,8 @@ bool OurReader::decodeString(Token& token, String& decoded) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OurReader::decodeUnicodeCodePoint(Token& token,
|
||||
Location& current,
|
||||
Location end,
|
||||
unsigned int& unicode) {
|
||||
bool OurReader::decodeUnicodeCodePoint(Token& token, Location& current,
|
||||
Location end, unsigned int& unicode) {
|
||||
|
||||
if (!decodeUnicodeEscapeSequence(token, current, end, unicode))
|
||||
return false;
|
||||
@@ -1728,8 +1744,7 @@ bool OurReader::decodeUnicodeCodePoint(Token& token,
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OurReader::decodeUnicodeEscapeSequence(Token& token,
|
||||
Location& current,
|
||||
bool OurReader::decodeUnicodeEscapeSequence(Token& token, Location& current,
|
||||
Location end,
|
||||
unsigned int& ret_unicode) {
|
||||
if (end - current < 4)
|
||||
@@ -1777,8 +1792,7 @@ bool OurReader::recoverFromError(TokenType skipUntilToken) {
|
||||
return false;
|
||||
}
|
||||
|
||||
bool OurReader::addErrorAndRecover(const String& message,
|
||||
Token& token,
|
||||
bool OurReader::addErrorAndRecover(const String& message, Token& token,
|
||||
TokenType skipUntilToken) {
|
||||
addError(message, token);
|
||||
return recoverFromError(skipUntilToken);
|
||||
@@ -1792,8 +1806,7 @@ OurReader::Char OurReader::getNextChar() {
|
||||
return *current_++;
|
||||
}
|
||||
|
||||
void OurReader::getLocationLineAndColumn(Location location,
|
||||
int& line,
|
||||
void OurReader::getLocationLineAndColumn(Location location, int& line,
|
||||
int& column) const {
|
||||
Location current = begin_;
|
||||
Location lastLineStart = current;
|
||||
@@ -1848,43 +1861,6 @@ std::vector<OurReader::StructuredError> OurReader::getStructuredErrors() const {
|
||||
return allErrors;
|
||||
}
|
||||
|
||||
bool OurReader::pushError(const Value& value, const String& message) {
|
||||
ptrdiff_t length = end_ - begin_;
|
||||
if (value.getOffsetStart() > length || value.getOffsetLimit() > length)
|
||||
return false;
|
||||
Token token;
|
||||
token.type_ = tokenError;
|
||||
token.start_ = begin_ + value.getOffsetStart();
|
||||
token.end_ = begin_ + value.getOffsetLimit();
|
||||
ErrorInfo info;
|
||||
info.token_ = token;
|
||||
info.message_ = message;
|
||||
info.extra_ = nullptr;
|
||||
errors_.push_back(info);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OurReader::pushError(const Value& value,
|
||||
const String& message,
|
||||
const Value& extra) {
|
||||
ptrdiff_t length = end_ - begin_;
|
||||
if (value.getOffsetStart() > length || value.getOffsetLimit() > length ||
|
||||
extra.getOffsetLimit() > length)
|
||||
return false;
|
||||
Token token;
|
||||
token.type_ = tokenError;
|
||||
token.start_ = begin_ + value.getOffsetStart();
|
||||
token.end_ = begin_ + value.getOffsetLimit();
|
||||
ErrorInfo info;
|
||||
info.token_ = token;
|
||||
info.message_ = message;
|
||||
info.extra_ = begin_ + extra.getOffsetStart();
|
||||
errors_.push_back(info);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OurReader::good() const { return errors_.empty(); }
|
||||
|
||||
class OurCharReader : public CharReader {
|
||||
bool const collectComments_;
|
||||
OurReader reader_;
|
||||
@@ -1892,9 +1868,7 @@ class OurCharReader : public CharReader {
|
||||
public:
|
||||
OurCharReader(bool collectComments, OurFeatures const& features)
|
||||
: collectComments_(collectComments), reader_(features) {}
|
||||
bool parse(char const* beginDoc,
|
||||
char const* endDoc,
|
||||
Value* root,
|
||||
bool parse(char const* beginDoc, char const* endDoc, Value* root,
|
||||
String* errs) override {
|
||||
bool ok = reader_.parse(beginDoc, endDoc, *root, collectComments_);
|
||||
if (errs) {
|
||||
@@ -1910,6 +1884,7 @@ CharReader* CharReaderBuilder::newCharReader() const {
|
||||
bool collectComments = settings_["collectComments"].asBool();
|
||||
OurFeatures features = OurFeatures::all();
|
||||
features.allowComments_ = settings_["allowComments"].asBool();
|
||||
features.allowTrailingCommas_ = settings_["allowTrailingCommas"].asBool();
|
||||
features.strictRoot_ = settings_["strictRoot"].asBool();
|
||||
features.allowDroppedNullPlaceholders_ =
|
||||
settings_["allowDroppedNullPlaceholders"].asBool();
|
||||
@@ -1922,38 +1897,37 @@ CharReader* CharReaderBuilder::newCharReader() const {
|
||||
features.failIfExtra_ = settings_["failIfExtra"].asBool();
|
||||
features.rejectDupKeys_ = settings_["rejectDupKeys"].asBool();
|
||||
features.allowSpecialFloats_ = settings_["allowSpecialFloats"].asBool();
|
||||
features.skipBom_ = settings_["skipBom"].asBool();
|
||||
return new OurCharReader(collectComments, features);
|
||||
}
|
||||
static void getValidReaderKeys(std::set<String>* valid_keys) {
|
||||
valid_keys->clear();
|
||||
valid_keys->insert("collectComments");
|
||||
valid_keys->insert("allowComments");
|
||||
valid_keys->insert("strictRoot");
|
||||
valid_keys->insert("allowDroppedNullPlaceholders");
|
||||
valid_keys->insert("allowNumericKeys");
|
||||
valid_keys->insert("allowSingleQuotes");
|
||||
valid_keys->insert("stackLimit");
|
||||
valid_keys->insert("failIfExtra");
|
||||
valid_keys->insert("rejectDupKeys");
|
||||
valid_keys->insert("allowSpecialFloats");
|
||||
}
|
||||
|
||||
bool CharReaderBuilder::validate(Json::Value* invalid) const {
|
||||
Json::Value my_invalid;
|
||||
if (!invalid)
|
||||
invalid = &my_invalid; // so we do not need to test for NULL
|
||||
Json::Value& inv = *invalid;
|
||||
std::set<String> valid_keys;
|
||||
getValidReaderKeys(&valid_keys);
|
||||
Value::Members keys = settings_.getMemberNames();
|
||||
size_t n = keys.size();
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
String const& key = keys[i];
|
||||
if (valid_keys.find(key) == valid_keys.end()) {
|
||||
inv[key] = settings_[key];
|
||||
}
|
||||
static const auto& valid_keys = *new std::set<String>{
|
||||
"collectComments",
|
||||
"allowComments",
|
||||
"allowTrailingCommas",
|
||||
"strictRoot",
|
||||
"allowDroppedNullPlaceholders",
|
||||
"allowNumericKeys",
|
||||
"allowSingleQuotes",
|
||||
"stackLimit",
|
||||
"failIfExtra",
|
||||
"rejectDupKeys",
|
||||
"allowSpecialFloats",
|
||||
"skipBom",
|
||||
};
|
||||
for (auto si = settings_.begin(); si != settings_.end(); ++si) {
|
||||
auto key = si.name();
|
||||
if (valid_keys.count(key))
|
||||
continue;
|
||||
if (invalid)
|
||||
(*invalid)[std::move(key)] = *si;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
return inv.empty();
|
||||
return invalid ? invalid->empty() : true;
|
||||
}
|
||||
|
||||
Value& CharReaderBuilder::operator[](const String& key) {
|
||||
return settings_[key];
|
||||
}
|
||||
@@ -1961,6 +1935,7 @@ Value& CharReaderBuilder::operator[](const String& key) {
|
||||
void CharReaderBuilder::strictMode(Json::Value* settings) {
|
||||
//! [CharReaderBuilderStrictMode]
|
||||
(*settings)["allowComments"] = false;
|
||||
(*settings)["allowTrailingCommas"] = false;
|
||||
(*settings)["strictRoot"] = true;
|
||||
(*settings)["allowDroppedNullPlaceholders"] = false;
|
||||
(*settings)["allowNumericKeys"] = false;
|
||||
@@ -1969,6 +1944,7 @@ void CharReaderBuilder::strictMode(Json::Value* settings) {
|
||||
(*settings)["failIfExtra"] = true;
|
||||
(*settings)["rejectDupKeys"] = true;
|
||||
(*settings)["allowSpecialFloats"] = false;
|
||||
(*settings)["skipBom"] = true;
|
||||
//! [CharReaderBuilderStrictMode]
|
||||
}
|
||||
// static
|
||||
@@ -1976,6 +1952,7 @@ void CharReaderBuilder::setDefaults(Json::Value* settings) {
|
||||
//! [CharReaderBuilderDefaults]
|
||||
(*settings)["collectComments"] = true;
|
||||
(*settings)["allowComments"] = true;
|
||||
(*settings)["allowTrailingCommas"] = true;
|
||||
(*settings)["strictRoot"] = false;
|
||||
(*settings)["allowDroppedNullPlaceholders"] = false;
|
||||
(*settings)["allowNumericKeys"] = false;
|
||||
@@ -1984,15 +1961,14 @@ void CharReaderBuilder::setDefaults(Json::Value* settings) {
|
||||
(*settings)["failIfExtra"] = false;
|
||||
(*settings)["rejectDupKeys"] = false;
|
||||
(*settings)["allowSpecialFloats"] = false;
|
||||
(*settings)["skipBom"] = true;
|
||||
//! [CharReaderBuilderDefaults]
|
||||
}
|
||||
|
||||
//////////////////////////////////
|
||||
// global functions
|
||||
|
||||
bool parseFromStream(CharReader::Factory const& fact,
|
||||
IStream& sin,
|
||||
Value* root,
|
||||
bool parseFromStream(CharReader::Factory const& fact, IStream& sin, Value* root,
|
||||
String* errs) {
|
||||
OStringStream ssin;
|
||||
ssin << sin.rdbuf();
|
||||
|
@@ -71,7 +71,7 @@ enum {
|
||||
};
|
||||
|
||||
// Defines a char buffer for use with uintToString().
|
||||
typedef char UIntToStringBuffer[uintToStringBufferSize];
|
||||
using UIntToStringBuffer = char[uintToStringBufferSize];
|
||||
|
||||
/** Converts an unsigned integer to string.
|
||||
* @param value Unsigned integer to convert to string
|
||||
|
@@ -8,24 +8,20 @@
|
||||
#include <json/value.h>
|
||||
#include <json/writer.h>
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
#include <cmath>
|
||||
#include <cstddef>
|
||||
#include <cstring>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <utility>
|
||||
#ifdef JSON_USE_CPPTL
|
||||
#include <cpptl/conststring.h>
|
||||
#endif
|
||||
#include <algorithm> // min()
|
||||
#include <cstddef> // size_t
|
||||
|
||||
// Provide implementation equivalent of std::snprintf for older _MSC compilers
|
||||
#if defined(_MSC_VER) && _MSC_VER < 1900
|
||||
#include <stdarg.h>
|
||||
static int msvc_pre1900_c99_vsnprintf(char* outBuf,
|
||||
size_t size,
|
||||
const char* format,
|
||||
va_list ap) {
|
||||
static int msvc_pre1900_c99_vsnprintf(char* outBuf, size_t size,
|
||||
const char* format, va_list ap) {
|
||||
int count = -1;
|
||||
if (size != 0)
|
||||
count = _vsnprintf_s(outBuf, size, _TRUNCATE, format, ap);
|
||||
@@ -34,10 +30,8 @@ static int msvc_pre1900_c99_vsnprintf(char* outBuf,
|
||||
return count;
|
||||
}
|
||||
|
||||
int JSON_API msvc_pre1900_c99_snprintf(char* outBuf,
|
||||
size_t size,
|
||||
const char* format,
|
||||
...) {
|
||||
int JSON_API msvc_pre1900_c99_snprintf(char* outBuf, size_t size,
|
||||
const char* format, ...) {
|
||||
va_list ap;
|
||||
va_start(ap, format);
|
||||
const int count = msvc_pre1900_c99_vsnprintf(outBuf, size, format, ap);
|
||||
@@ -88,31 +82,12 @@ Value const& Value::null = Value::nullSingleton();
|
||||
Value const& Value::nullRef = Value::nullSingleton();
|
||||
#endif
|
||||
|
||||
const Int Value::minInt = Int(~(UInt(-1) / 2));
|
||||
const Int Value::maxInt = Int(UInt(-1) / 2);
|
||||
const UInt Value::maxUInt = UInt(-1);
|
||||
#if defined(JSON_HAS_INT64)
|
||||
const Int64 Value::minInt64 = Int64(~(UInt64(-1) / 2));
|
||||
const Int64 Value::maxInt64 = Int64(UInt64(-1) / 2);
|
||||
const UInt64 Value::maxUInt64 = UInt64(-1);
|
||||
// The constant is hard-coded because some compiler have trouble
|
||||
// converting Value::maxUInt64 to a double correctly (AIX/xlC).
|
||||
// Assumes that UInt64 is a 64 bits integer.
|
||||
static const double maxUInt64AsDouble = 18446744073709551615.0;
|
||||
#endif // defined(JSON_HAS_INT64)
|
||||
const LargestInt Value::minLargestInt = LargestInt(~(LargestUInt(-1) / 2));
|
||||
const LargestInt Value::maxLargestInt = LargestInt(LargestUInt(-1) / 2);
|
||||
const LargestUInt Value::maxLargestUInt = LargestUInt(-1);
|
||||
|
||||
const UInt Value::defaultRealPrecision = 17;
|
||||
|
||||
#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
|
||||
template <typename T, typename U>
|
||||
static inline bool InRange(double d, T min, U max) {
|
||||
// The casts can lose precision, but we are looking only for
|
||||
// an approximate range. Might fail on edge cases though. ~cdunn
|
||||
// return d >= static_cast<double>(min) && d <= static_cast<double>(max);
|
||||
return d >= min && d <= max;
|
||||
return d >= static_cast<double>(min) && d <= static_cast<double>(max);
|
||||
}
|
||||
#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
|
||||
static inline double integerToDouble(Json::UInt64 value) {
|
||||
@@ -143,7 +118,7 @@ static inline char* duplicateStringValue(const char* value, size_t length) {
|
||||
if (length >= static_cast<size_t>(Value::maxInt))
|
||||
length = Value::maxInt - 1;
|
||||
|
||||
char* newString = static_cast<char*>(malloc(length + 1));
|
||||
auto newString = static_cast<char*>(malloc(length + 1));
|
||||
if (newString == nullptr) {
|
||||
throwRuntimeError("in Json::Value::duplicateStringValue(): "
|
||||
"Failed to allocate string value buffer");
|
||||
@@ -163,8 +138,8 @@ static inline char* duplicateAndPrefixStringValue(const char* value,
|
||||
sizeof(unsigned) - 1U,
|
||||
"in Json::Value::duplicateAndPrefixStringValue(): "
|
||||
"length too big for prefixing");
|
||||
unsigned actualLength = length + static_cast<unsigned>(sizeof(unsigned)) + 1U;
|
||||
char* newString = static_cast<char*>(malloc(actualLength));
|
||||
size_t actualLength = sizeof(length) + length + 1;
|
||||
auto newString = static_cast<char*>(malloc(actualLength));
|
||||
if (newString == nullptr) {
|
||||
throwRuntimeError("in Json::Value::duplicateAndPrefixStringValue(): "
|
||||
"Failed to allocate string value buffer");
|
||||
@@ -175,10 +150,8 @@ static inline char* duplicateAndPrefixStringValue(const char* value,
|
||||
0; // to avoid buffer over-run accidents by users later
|
||||
return newString;
|
||||
}
|
||||
inline static void decodePrefixedString(bool isPrefixed,
|
||||
char const* prefixed,
|
||||
unsigned* length,
|
||||
char const** value) {
|
||||
inline static void decodePrefixedString(bool isPrefixed, char const* prefixed,
|
||||
unsigned* length, char const** value) {
|
||||
if (!isPrefixed) {
|
||||
*length = static_cast<unsigned>(strlen(prefixed));
|
||||
*value = prefixed;
|
||||
@@ -228,19 +201,25 @@ namespace Json {
|
||||
|
||||
#if JSON_USE_EXCEPTION
|
||||
Exception::Exception(String msg) : msg_(std::move(msg)) {}
|
||||
Exception::~Exception() JSONCPP_NOEXCEPT {}
|
||||
char const* Exception::what() const JSONCPP_NOEXCEPT { return msg_.c_str(); }
|
||||
Exception::~Exception() noexcept = default;
|
||||
char const* Exception::what() const noexcept { return msg_.c_str(); }
|
||||
RuntimeError::RuntimeError(String const& msg) : Exception(msg) {}
|
||||
LogicError::LogicError(String const& msg) : Exception(msg) {}
|
||||
[[noreturn]] void throwRuntimeError(String const& msg) {
|
||||
JSONCPP_NORETURN void throwRuntimeError(String const& msg) {
|
||||
throw RuntimeError(msg);
|
||||
}
|
||||
[[noreturn]] void throwLogicError(String const& msg) {
|
||||
JSONCPP_NORETURN void throwLogicError(String const& msg) {
|
||||
throw LogicError(msg);
|
||||
}
|
||||
#else // !JSON_USE_EXCEPTION
|
||||
[[noreturn]] void throwRuntimeError(String const& msg) { abort(); }
|
||||
[[noreturn]] void throwLogicError(String const& msg) { abort(); }
|
||||
JSONCPP_NORETURN void throwRuntimeError(String const& msg) {
|
||||
std::cerr << msg << std::endl;
|
||||
abort();
|
||||
}
|
||||
JSONCPP_NORETURN void throwLogicError(String const& msg) {
|
||||
std::cerr << msg << std::endl;
|
||||
abort();
|
||||
}
|
||||
#endif
|
||||
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
@@ -256,8 +235,7 @@ LogicError::LogicError(String const& msg) : Exception(msg) {}
|
||||
|
||||
Value::CZString::CZString(ArrayIndex index) : cstr_(nullptr), index_(index) {}
|
||||
|
||||
Value::CZString::CZString(char const* str,
|
||||
unsigned length,
|
||||
Value::CZString::CZString(char const* str, unsigned length,
|
||||
DuplicationPolicy allocate)
|
||||
: cstr_(str) {
|
||||
// allocate != duplicate
|
||||
@@ -289,7 +267,7 @@ Value::CZString::CZString(CZString&& other)
|
||||
Value::CZString::~CZString() {
|
||||
if (cstr_ && storage_.policy_ == duplicate) {
|
||||
releaseStringValue(const_cast<char*>(cstr_),
|
||||
storage_.length_ + 1u); // +1 for null terminating
|
||||
storage_.length_ + 1U); // +1 for null terminating
|
||||
// character for sake of
|
||||
// completeness but not actually
|
||||
// necessary
|
||||
@@ -445,14 +423,6 @@ Value::Value(const StaticString& value) {
|
||||
value_.string_ = const_cast<char*>(value.c_str());
|
||||
}
|
||||
|
||||
#ifdef JSON_USE_CPPTL
|
||||
Value::Value(const CppTL::ConstString& value) {
|
||||
initBasic(stringValue, true);
|
||||
value_.string_ = duplicateAndPrefixStringValue(
|
||||
value, static_cast<unsigned>(value.length()));
|
||||
}
|
||||
#endif
|
||||
|
||||
Value::Value(bool value) {
|
||||
initBasic(booleanValue);
|
||||
value_.bool_ = value;
|
||||
@@ -520,7 +490,7 @@ int Value::compare(const Value& other) const {
|
||||
bool Value::operator<(const Value& other) const {
|
||||
int typeDelta = type() - other.type();
|
||||
if (typeDelta)
|
||||
return typeDelta < 0 ? true : false;
|
||||
return typeDelta < 0;
|
||||
switch (type()) {
|
||||
case nullValue:
|
||||
return false;
|
||||
@@ -534,10 +504,7 @@ bool Value::operator<(const Value& other) const {
|
||||
return value_.bool_ < other.value_.bool_;
|
||||
case stringValue: {
|
||||
if ((value_.string_ == nullptr) || (other.value_.string_ == nullptr)) {
|
||||
if (other.value_.string_)
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
return other.value_.string_ != nullptr;
|
||||
}
|
||||
unsigned this_len;
|
||||
unsigned other_len;
|
||||
@@ -558,9 +525,10 @@ bool Value::operator<(const Value& other) const {
|
||||
}
|
||||
case arrayValue:
|
||||
case objectValue: {
|
||||
int delta = int(value_.map_->size() - other.value_.map_->size());
|
||||
if (delta)
|
||||
return delta < 0;
|
||||
auto thisSize = value_.map_->size();
|
||||
auto otherSize = other.value_.map_->size();
|
||||
if (thisSize != otherSize)
|
||||
return thisSize < otherSize;
|
||||
return (*value_.map_) < (*other.value_.map_);
|
||||
}
|
||||
default:
|
||||
@@ -683,15 +651,6 @@ String Value::asString() const {
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef JSON_USE_CPPTL
|
||||
CppTL::ConstString Value::asConstString() const {
|
||||
unsigned len;
|
||||
char const* str;
|
||||
decodePrefixedString(isAllocated(), value_.string_, &len, &str);
|
||||
return CppTL::ConstString(str, len);
|
||||
}
|
||||
#endif
|
||||
|
||||
Value::Int Value::asInt() const {
|
||||
switch (type()) {
|
||||
case intValue:
|
||||
@@ -835,7 +794,7 @@ float Value::asFloat() const {
|
||||
case nullValue:
|
||||
return 0.0;
|
||||
case booleanValue:
|
||||
return value_.bool_ ? 1.0f : 0.0f;
|
||||
return value_.bool_ ? 1.0F : 0.0F;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
@@ -849,9 +808,9 @@ bool Value::asBool() const {
|
||||
case nullValue:
|
||||
return false;
|
||||
case intValue:
|
||||
return value_.int_ ? true : false;
|
||||
return value_.int_ != 0;
|
||||
case uintValue:
|
||||
return value_.uint_ ? true : false;
|
||||
return value_.uint_ != 0;
|
||||
case realValue: {
|
||||
// According to JavaScript language zero or NaN is regarded as false
|
||||
const auto value_classification = std::fpclassify(value_.real_);
|
||||
@@ -867,7 +826,7 @@ bool Value::isConvertibleTo(ValueType other) const {
|
||||
switch (other) {
|
||||
case nullValue:
|
||||
return (isNumeric() && asDouble() == 0.0) ||
|
||||
(type() == booleanValue && value_.bool_ == false) ||
|
||||
(type() == booleanValue && !value_.bool_) ||
|
||||
(type() == stringValue && asString().empty()) ||
|
||||
(type() == arrayValue && value_.map_->empty()) ||
|
||||
(type() == objectValue && value_.map_->empty()) ||
|
||||
@@ -922,9 +881,8 @@ ArrayIndex Value::size() const {
|
||||
|
||||
bool Value::empty() const {
|
||||
if (isNull() || isArray() || isObject())
|
||||
return size() == 0u;
|
||||
else
|
||||
return false;
|
||||
return size() == 0U;
|
||||
return false;
|
||||
}
|
||||
|
||||
Value::operator bool() const { return !isNull(); }
|
||||
@@ -1164,26 +1122,36 @@ Value& Value::operator[](const StaticString& key) {
|
||||
return resolveReference(key.c_str());
|
||||
}
|
||||
|
||||
#ifdef JSON_USE_CPPTL
|
||||
Value& Value::operator[](const CppTL::ConstString& key) {
|
||||
return resolveReference(key.c_str(), key.end_c_str());
|
||||
}
|
||||
Value const& Value::operator[](CppTL::ConstString const& key) const {
|
||||
Value const* found = find(key.c_str(), key.end_c_str());
|
||||
if (!found)
|
||||
return nullSingleton();
|
||||
return *found;
|
||||
}
|
||||
#endif
|
||||
|
||||
Value& Value::append(const Value& value) { return (*this)[size()] = value; }
|
||||
Value& Value::append(const Value& value) { return append(Value(value)); }
|
||||
|
||||
Value& Value::append(Value&& value) {
|
||||
return (*this)[size()] = std::move(value);
|
||||
JSON_ASSERT_MESSAGE(type() == nullValue || type() == arrayValue,
|
||||
"in Json::Value::append: requires arrayValue");
|
||||
if (type() == nullValue) {
|
||||
*this = Value(arrayValue);
|
||||
}
|
||||
return this->value_.map_->emplace(size(), std::move(value)).first->second;
|
||||
}
|
||||
|
||||
Value Value::get(char const* begin,
|
||||
char const* end,
|
||||
bool Value::insert(ArrayIndex index, const Value& newValue) {
|
||||
return insert(index, Value(newValue));
|
||||
}
|
||||
|
||||
bool Value::insert(ArrayIndex index, Value&& newValue) {
|
||||
JSON_ASSERT_MESSAGE(type() == nullValue || type() == arrayValue,
|
||||
"in Json::Value::insert: requires arrayValue");
|
||||
ArrayIndex length = size();
|
||||
if (index > length) {
|
||||
return false;
|
||||
}
|
||||
for (ArrayIndex i = length; i > index; i--) {
|
||||
(*this)[i] = std::move((*this)[i - 1]);
|
||||
}
|
||||
(*this)[index] = std::move(newValue);
|
||||
return true;
|
||||
}
|
||||
|
||||
Value Value::get(char const* begin, char const* end,
|
||||
Value const& defaultValue) const {
|
||||
Value const* found = find(begin, end);
|
||||
return !found ? defaultValue : *found;
|
||||
@@ -1250,13 +1218,6 @@ bool Value::removeIndex(ArrayIndex index, Value* removed) {
|
||||
return true;
|
||||
}
|
||||
|
||||
#ifdef JSON_USE_CPPTL
|
||||
Value Value::get(const CppTL::ConstString& key,
|
||||
const Value& defaultValue) const {
|
||||
return get(key.c_str(), key.end_c_str(), defaultValue);
|
||||
}
|
||||
#endif
|
||||
|
||||
bool Value::isMember(char const* begin, char const* end) const {
|
||||
Value const* value = find(begin, end);
|
||||
return nullptr != value;
|
||||
@@ -1268,12 +1229,6 @@ bool Value::isMember(String const& key) const {
|
||||
return isMember(key.data(), key.data() + key.length());
|
||||
}
|
||||
|
||||
#ifdef JSON_USE_CPPTL
|
||||
bool Value::isMember(const CppTL::ConstString& key) const {
|
||||
return isMember(key.c_str(), key.end_c_str());
|
||||
}
|
||||
#endif
|
||||
|
||||
Value::Members Value::getMemberNames() const {
|
||||
JSON_ASSERT_MESSAGE(
|
||||
type() == nullValue || type() == objectValue,
|
||||
@@ -1289,31 +1244,6 @@ Value::Members Value::getMemberNames() const {
|
||||
}
|
||||
return members;
|
||||
}
|
||||
//
|
||||
//# ifdef JSON_USE_CPPTL
|
||||
// EnumMemberNames
|
||||
// Value::enumMemberNames() const
|
||||
//{
|
||||
// if ( type() == objectValue )
|
||||
// {
|
||||
// return CppTL::Enum::any( CppTL::Enum::transform(
|
||||
// CppTL::Enum::keys( *(value_.map_), CppTL::Type<const CZString &>() ),
|
||||
// MemberNamesTransform() ) );
|
||||
// }
|
||||
// return EnumMemberNames();
|
||||
//}
|
||||
//
|
||||
//
|
||||
// EnumValues
|
||||
// Value::enumValues() const
|
||||
//{
|
||||
// if ( type() == objectValue || type() == arrayValue )
|
||||
// return CppTL::Enum::anyValues( *(value_.map_),
|
||||
// CppTL::Type<const Value &>() );
|
||||
// return EnumValues();
|
||||
//}
|
||||
//
|
||||
//# endif
|
||||
|
||||
static bool IsIntegral(double d) {
|
||||
double integral_part;
|
||||
@@ -1469,7 +1399,10 @@ void Value::Comments::set(CommentPlacement slot, String comment) {
|
||||
if (!ptr_) {
|
||||
ptr_ = std::unique_ptr<Array>(new Array());
|
||||
}
|
||||
(*ptr_)[slot] = std::move(comment);
|
||||
// check comments array boundry.
|
||||
if (slot < CommentPlacement::numberOfCommentPlacement) {
|
||||
(*ptr_)[slot] = std::move(comment);
|
||||
}
|
||||
}
|
||||
|
||||
void Value::setComment(String comment, CommentPlacement placement) {
|
||||
@@ -1565,25 +1498,20 @@ Value::iterator Value::end() {
|
||||
// class PathArgument
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
PathArgument::PathArgument() : key_() {}
|
||||
PathArgument::PathArgument() = default;
|
||||
|
||||
PathArgument::PathArgument(ArrayIndex index)
|
||||
: key_(), index_(index), kind_(kindIndex) {}
|
||||
: index_(index), kind_(kindIndex) {}
|
||||
|
||||
PathArgument::PathArgument(const char* key)
|
||||
: key_(key), index_(), kind_(kindKey) {}
|
||||
PathArgument::PathArgument(const char* key) : key_(key), kind_(kindKey) {}
|
||||
|
||||
PathArgument::PathArgument(const String& key)
|
||||
: key_(key.c_str()), index_(), kind_(kindKey) {}
|
||||
PathArgument::PathArgument(String key) : key_(std::move(key)), kind_(kindKey) {}
|
||||
|
||||
// class Path
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
|
||||
Path::Path(const String& path,
|
||||
const PathArgument& a1,
|
||||
const PathArgument& a2,
|
||||
const PathArgument& a3,
|
||||
const PathArgument& a4,
|
||||
Path::Path(const String& path, const PathArgument& a1, const PathArgument& a2,
|
||||
const PathArgument& a3, const PathArgument& a4,
|
||||
const PathArgument& a5) {
|
||||
InArgs in;
|
||||
in.reserve(5);
|
||||
@@ -1626,8 +1554,7 @@ void Path::makePath(const String& path, const InArgs& in) {
|
||||
}
|
||||
}
|
||||
|
||||
void Path::addPathInArg(const String& /*path*/,
|
||||
const InArgs& in,
|
||||
void Path::addPathInArg(const String& /*path*/, const InArgs& in,
|
||||
InArgs::const_iterator& itInArg,
|
||||
PathArgument::Kind kind) {
|
||||
if (itInArg == in.end()) {
|
||||
|
@@ -21,7 +21,8 @@ ValueIteratorBase::ValueIteratorBase(
|
||||
const Value::ObjectValues::iterator& current)
|
||||
: current_(current), isNull_(false) {}
|
||||
|
||||
Value& ValueIteratorBase::deref() const { return current_->second; }
|
||||
Value& ValueIteratorBase::deref() { return current_->second; }
|
||||
const Value& ValueIteratorBase::deref() const { return current_->second; }
|
||||
|
||||
void ValueIteratorBase::increment() { ++current_; }
|
||||
|
||||
@@ -29,9 +30,6 @@ void ValueIteratorBase::decrement() { --current_; }
|
||||
|
||||
ValueIteratorBase::difference_type
|
||||
ValueIteratorBase::computeDistance(const SelfType& other) const {
|
||||
#ifdef JSON_USE_CPPTL_SMALLMAP
|
||||
return other.current_ - current_;
|
||||
#else
|
||||
// Iterator for null value are initialized using the default
|
||||
// constructor, which initialize current_ to the default
|
||||
// std::map::iterator. As begin() and end() are two instance
|
||||
@@ -52,7 +50,6 @@ ValueIteratorBase::computeDistance(const SelfType& other) const {
|
||||
++myDistance;
|
||||
}
|
||||
return myDistance;
|
||||
#endif
|
||||
}
|
||||
|
||||
bool ValueIteratorBase::isEqual(const SelfType& other) const {
|
||||
|
@@ -7,7 +7,9 @@
|
||||
#include "json_tool.h"
|
||||
#include <json/writer.h>
|
||||
#endif // if !defined(JSON_IS_AMALGAMATION)
|
||||
#include <algorithm>
|
||||
#include <cassert>
|
||||
#include <cctype>
|
||||
#include <cstring>
|
||||
#include <iomanip>
|
||||
#include <memory>
|
||||
@@ -84,9 +86,9 @@
|
||||
namespace Json {
|
||||
|
||||
#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520)
|
||||
typedef std::unique_ptr<StreamWriter> StreamWriterPtr;
|
||||
using StreamWriterPtr = std::unique_ptr<StreamWriter>;
|
||||
#else
|
||||
typedef std::auto_ptr<StreamWriter> StreamWriterPtr;
|
||||
using StreamWriterPtr = std::auto_ptr<StreamWriter>;
|
||||
#endif
|
||||
|
||||
String valueToString(LargestInt value) {
|
||||
@@ -122,10 +124,8 @@ String valueToString(UInt value) { return valueToString(LargestUInt(value)); }
|
||||
#endif // # if defined(JSON_HAS_INT64)
|
||||
|
||||
namespace {
|
||||
String valueToString(double value,
|
||||
bool useSpecialFloats,
|
||||
unsigned int precision,
|
||||
PrecisionType precisionType) {
|
||||
String valueToString(double value, bool useSpecialFloats,
|
||||
unsigned int precision, PrecisionType precisionType) {
|
||||
// Print into the buffer. We need not request the alternative representation
|
||||
// that always has a decimal point because JSON doesn't distinguish the
|
||||
// concepts of reals and integers.
|
||||
@@ -168,24 +168,19 @@ String valueToString(double value,
|
||||
}
|
||||
} // namespace
|
||||
|
||||
String valueToString(double value,
|
||||
unsigned int precision,
|
||||
String valueToString(double value, unsigned int precision,
|
||||
PrecisionType precisionType) {
|
||||
return valueToString(value, false, precision, precisionType);
|
||||
}
|
||||
|
||||
String valueToString(bool value) { return value ? "true" : "false"; }
|
||||
|
||||
static bool isAnyCharRequiredQuoting(char const* s, size_t n) {
|
||||
static bool doesAnyCharRequireEscaping(char const* s, size_t n) {
|
||||
assert(s || !n);
|
||||
|
||||
char const* const end = s + n;
|
||||
for (char const* cur = s; cur < end; ++cur) {
|
||||
if (*cur == '\\' || *cur == '\"' || *cur < ' ' ||
|
||||
static_cast<unsigned char>(*cur) < 0x80)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
return std::any_of(s, s + n, [](unsigned char c) {
|
||||
return c == '\\' || c == '"' || c < 0x20 || c > 0x7F;
|
||||
});
|
||||
}
|
||||
|
||||
static unsigned int utf8ToCodepoint(const char*& s, const char* e) {
|
||||
@@ -267,11 +262,20 @@ static String toHex16Bit(unsigned int x) {
|
||||
return result;
|
||||
}
|
||||
|
||||
static String valueToQuotedStringN(const char* value, unsigned length) {
|
||||
static void appendRaw(String& result, unsigned ch) {
|
||||
result += static_cast<char>(ch);
|
||||
}
|
||||
|
||||
static void appendHex(String& result, unsigned ch) {
|
||||
result.append("\\u").append(toHex16Bit(ch));
|
||||
}
|
||||
|
||||
static String valueToQuotedStringN(const char* value, unsigned length,
|
||||
bool emitUTF8 = false) {
|
||||
if (value == nullptr)
|
||||
return "";
|
||||
|
||||
if (!isAnyCharRequiredQuoting(value, length))
|
||||
if (!doesAnyCharRequireEscaping(value, length))
|
||||
return String("\"") + value + "\"";
|
||||
// We have to walk value and escape any special characters.
|
||||
// Appending to String is not efficient, but this should be rare.
|
||||
@@ -313,21 +317,28 @@ static String valueToQuotedStringN(const char* value, unsigned length) {
|
||||
// Should add a flag to allow this compatibility mode and prevent this
|
||||
// sequence from occurring.
|
||||
default: {
|
||||
unsigned int cp = utf8ToCodepoint(c, end);
|
||||
// don't escape non-control characters
|
||||
// (short escape sequence are applied above)
|
||||
if (cp < 0x80 && cp >= 0x20)
|
||||
result += static_cast<char>(cp);
|
||||
else if (cp < 0x10000) { // codepoint is in Basic Multilingual Plane
|
||||
result += "\\u";
|
||||
result += toHex16Bit(cp);
|
||||
} else { // codepoint is not in Basic Multilingual Plane
|
||||
// convert to surrogate pair first
|
||||
cp -= 0x10000;
|
||||
result += "\\u";
|
||||
result += toHex16Bit((cp >> 10) + 0xD800);
|
||||
result += "\\u";
|
||||
result += toHex16Bit((cp & 0x3FF) + 0xDC00);
|
||||
if (emitUTF8) {
|
||||
unsigned codepoint = static_cast<unsigned char>(*c);
|
||||
if (codepoint < 0x20) {
|
||||
appendHex(result, codepoint);
|
||||
} else {
|
||||
appendRaw(result, codepoint);
|
||||
}
|
||||
} else {
|
||||
unsigned codepoint = utf8ToCodepoint(c, end); // modifies `c`
|
||||
if (codepoint < 0x20) {
|
||||
appendHex(result, codepoint);
|
||||
} else if (codepoint < 0x80) {
|
||||
appendRaw(result, codepoint);
|
||||
} else if (codepoint < 0x10000) {
|
||||
// Basic Multilingual Plane
|
||||
appendHex(result, codepoint);
|
||||
} else {
|
||||
// Extended Unicode. Encode 20 bits as a surrogate pair.
|
||||
codepoint -= 0x10000;
|
||||
appendHex(result, 0xd800 + ((codepoint >> 10) & 0x3ff));
|
||||
appendHex(result, 0xdc00 + (codepoint & 0x3ff));
|
||||
}
|
||||
}
|
||||
} break;
|
||||
}
|
||||
@@ -864,13 +875,10 @@ struct CommentStyle {
|
||||
};
|
||||
|
||||
struct BuiltStyledStreamWriter : public StreamWriter {
|
||||
BuiltStyledStreamWriter(String indentation,
|
||||
CommentStyle::Enum cs,
|
||||
String colonSymbol,
|
||||
String nullSymbol,
|
||||
String endingLineFeedSymbol,
|
||||
bool useSpecialFloats,
|
||||
unsigned int precision,
|
||||
BuiltStyledStreamWriter(String indentation, CommentStyle::Enum cs,
|
||||
String colonSymbol, String nullSymbol,
|
||||
String endingLineFeedSymbol, bool useSpecialFloats,
|
||||
bool emitUTF8, unsigned int precision,
|
||||
PrecisionType precisionType);
|
||||
int write(Value const& root, OStream* sout) override;
|
||||
|
||||
@@ -887,7 +895,7 @@ private:
|
||||
void writeCommentAfterValueOnSameLine(Value const& root);
|
||||
static bool hasCommentForValue(const Value& value);
|
||||
|
||||
typedef std::vector<String> ChildValues;
|
||||
using ChildValues = std::vector<String>;
|
||||
|
||||
ChildValues childValues_;
|
||||
String indentString_;
|
||||
@@ -900,23 +908,20 @@ private:
|
||||
bool addChildValues_ : 1;
|
||||
bool indented_ : 1;
|
||||
bool useSpecialFloats_ : 1;
|
||||
bool emitUTF8_ : 1;
|
||||
unsigned int precision_;
|
||||
PrecisionType precisionType_;
|
||||
};
|
||||
BuiltStyledStreamWriter::BuiltStyledStreamWriter(String indentation,
|
||||
CommentStyle::Enum cs,
|
||||
String colonSymbol,
|
||||
String nullSymbol,
|
||||
String endingLineFeedSymbol,
|
||||
bool useSpecialFloats,
|
||||
unsigned int precision,
|
||||
PrecisionType precisionType)
|
||||
BuiltStyledStreamWriter::BuiltStyledStreamWriter(
|
||||
String indentation, CommentStyle::Enum cs, String colonSymbol,
|
||||
String nullSymbol, String endingLineFeedSymbol, bool useSpecialFloats,
|
||||
bool emitUTF8, unsigned int precision, PrecisionType precisionType)
|
||||
: rightMargin_(74), indentation_(std::move(indentation)), cs_(cs),
|
||||
colonSymbol_(std::move(colonSymbol)), nullSymbol_(std::move(nullSymbol)),
|
||||
endingLineFeedSymbol_(std::move(endingLineFeedSymbol)),
|
||||
addChildValues_(false), indented_(false),
|
||||
useSpecialFloats_(useSpecialFloats), precision_(precision),
|
||||
precisionType_(precisionType) {}
|
||||
useSpecialFloats_(useSpecialFloats), emitUTF8_(emitUTF8),
|
||||
precision_(precision), precisionType_(precisionType) {}
|
||||
int BuiltStyledStreamWriter::write(Value const& root, OStream* sout) {
|
||||
sout_ = sout;
|
||||
addChildValues_ = false;
|
||||
@@ -953,7 +958,8 @@ void BuiltStyledStreamWriter::writeValue(Value const& value) {
|
||||
char const* end;
|
||||
bool ok = value.getString(&str, &end);
|
||||
if (ok)
|
||||
pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end - str)));
|
||||
pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end - str),
|
||||
emitUTF8_));
|
||||
else
|
||||
pushValue("");
|
||||
break;
|
||||
@@ -977,7 +983,7 @@ void BuiltStyledStreamWriter::writeValue(Value const& value) {
|
||||
Value const& childValue = value[name];
|
||||
writeCommentBeforeValue(childValue);
|
||||
writeWithIndent(valueToQuotedStringN(
|
||||
name.data(), static_cast<unsigned>(name.length())));
|
||||
name.data(), static_cast<unsigned>(name.length()), emitUTF8_));
|
||||
*sout_ << colonSymbol_;
|
||||
writeValue(childValue);
|
||||
if (++it == members.end()) {
|
||||
@@ -1153,12 +1159,13 @@ StreamWriter::Factory::~Factory() = default;
|
||||
StreamWriterBuilder::StreamWriterBuilder() { setDefaults(&settings_); }
|
||||
StreamWriterBuilder::~StreamWriterBuilder() = default;
|
||||
StreamWriter* StreamWriterBuilder::newStreamWriter() const {
|
||||
String indentation = settings_["indentation"].asString();
|
||||
String cs_str = settings_["commentStyle"].asString();
|
||||
String pt_str = settings_["precisionType"].asString();
|
||||
bool eyc = settings_["enableYAMLCompatibility"].asBool();
|
||||
bool dnp = settings_["dropNullPlaceholders"].asBool();
|
||||
bool usf = settings_["useSpecialFloats"].asBool();
|
||||
const String indentation = settings_["indentation"].asString();
|
||||
const String cs_str = settings_["commentStyle"].asString();
|
||||
const String pt_str = settings_["precisionType"].asString();
|
||||
const bool eyc = settings_["enableYAMLCompatibility"].asBool();
|
||||
const bool dnp = settings_["dropNullPlaceholders"].asBool();
|
||||
const bool usf = settings_["useSpecialFloats"].asBool();
|
||||
const bool emitUTF8 = settings_["emitUTF8"].asBool();
|
||||
unsigned int pre = settings_["precision"].asUInt();
|
||||
CommentStyle::Enum cs = CommentStyle::All;
|
||||
if (cs_str == "All") {
|
||||
@@ -1190,36 +1197,33 @@ StreamWriter* StreamWriterBuilder::newStreamWriter() const {
|
||||
pre = 17;
|
||||
String endingLineFeedSymbol;
|
||||
return new BuiltStyledStreamWriter(indentation, cs, colonSymbol, nullSymbol,
|
||||
endingLineFeedSymbol, usf, pre,
|
||||
endingLineFeedSymbol, usf, emitUTF8, pre,
|
||||
precisionType);
|
||||
}
|
||||
static void getValidWriterKeys(std::set<String>* valid_keys) {
|
||||
valid_keys->clear();
|
||||
valid_keys->insert("indentation");
|
||||
valid_keys->insert("commentStyle");
|
||||
valid_keys->insert("enableYAMLCompatibility");
|
||||
valid_keys->insert("dropNullPlaceholders");
|
||||
valid_keys->insert("useSpecialFloats");
|
||||
valid_keys->insert("precision");
|
||||
valid_keys->insert("precisionType");
|
||||
}
|
||||
|
||||
bool StreamWriterBuilder::validate(Json::Value* invalid) const {
|
||||
Json::Value my_invalid;
|
||||
if (!invalid)
|
||||
invalid = &my_invalid; // so we do not need to test for NULL
|
||||
Json::Value& inv = *invalid;
|
||||
std::set<String> valid_keys;
|
||||
getValidWriterKeys(&valid_keys);
|
||||
Value::Members keys = settings_.getMemberNames();
|
||||
size_t n = keys.size();
|
||||
for (size_t i = 0; i < n; ++i) {
|
||||
String const& key = keys[i];
|
||||
if (valid_keys.find(key) == valid_keys.end()) {
|
||||
inv[key] = settings_[key];
|
||||
}
|
||||
static const auto& valid_keys = *new std::set<String>{
|
||||
"indentation",
|
||||
"commentStyle",
|
||||
"enableYAMLCompatibility",
|
||||
"dropNullPlaceholders",
|
||||
"useSpecialFloats",
|
||||
"emitUTF8",
|
||||
"precision",
|
||||
"precisionType",
|
||||
};
|
||||
for (auto si = settings_.begin(); si != settings_.end(); ++si) {
|
||||
auto key = si.name();
|
||||
if (valid_keys.count(key))
|
||||
continue;
|
||||
if (invalid)
|
||||
(*invalid)[std::move(key)] = *si;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
return inv.empty();
|
||||
return invalid ? invalid->empty() : true;
|
||||
}
|
||||
|
||||
Value& StreamWriterBuilder::operator[](const String& key) {
|
||||
return settings_[key];
|
||||
}
|
||||
@@ -1231,6 +1235,7 @@ void StreamWriterBuilder::setDefaults(Json::Value* settings) {
|
||||
(*settings)["enableYAMLCompatibility"] = false;
|
||||
(*settings)["dropNullPlaceholders"] = false;
|
||||
(*settings)["useSpecialFloats"] = false;
|
||||
(*settings)["emitUTF8"] = false;
|
||||
(*settings)["precision"] = 17;
|
||||
(*settings)["precisionType"] = "significant";
|
||||
//! [StreamWriterBuilderDefaults]
|
||||
|
@@ -1,22 +0,0 @@
|
||||
// DO NOT EDIT. This file (and "version") is a template used by the build system
|
||||
// (either CMake or Meson) to generate a "version.h" header file.
|
||||
#ifndef JSON_VERSION_H_INCLUDED
|
||||
#define JSON_VERSION_H_INCLUDED
|
||||
|
||||
#define JSONCPP_VERSION_STRING "@JSONCPP_VERSION@"
|
||||
#define JSONCPP_VERSION_MAJOR @JSONCPP_VERSION_MAJOR@
|
||||
#define JSONCPP_VERSION_MINOR @JSONCPP_VERSION_MINOR@
|
||||
#define JSONCPP_VERSION_PATCH @JSONCPP_VERSION_PATCH@
|
||||
#define JSONCPP_VERSION_QUALIFIER
|
||||
#define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) \
|
||||
| (JSONCPP_VERSION_MINOR << 16) \
|
||||
| (JSONCPP_VERSION_PATCH << 8))
|
||||
|
||||
#ifdef JSONCPP_USING_SECURE_MEMORY
|
||||
#undef JSONCPP_USING_SECURE_MEMORY
|
||||
#endif
|
||||
#define JSONCPP_USING_SECURE_MEMORY @JSONCPP_USE_SECURE_MEMORY@
|
||||
// If non-zero, the library zeroes any memory that it has allocated before
|
||||
// it frees its memory.
|
||||
|
||||
#endif // JSON_VERSION_H_INCLUDED
|
@@ -1,42 +1,39 @@
|
||||
# vim: et ts=4 sts=4 sw=4 tw=0
|
||||
|
||||
add_executable( jsoncpp_test
|
||||
jsontest.cpp
|
||||
jsontest.h
|
||||
fuzz.cpp
|
||||
fuzz.h
|
||||
main.cpp
|
||||
)
|
||||
add_executable(jsoncpp_test
|
||||
jsontest.cpp
|
||||
jsontest.h
|
||||
fuzz.cpp
|
||||
fuzz.h
|
||||
main.cpp
|
||||
)
|
||||
|
||||
|
||||
if(BUILD_SHARED_LIBS)
|
||||
add_compile_definitions( JSON_DLL )
|
||||
if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.12.0)
|
||||
add_compile_definitions( JSON_DLL )
|
||||
else()
|
||||
add_definitions( -DJSON_DLL )
|
||||
endif()
|
||||
target_link_libraries(jsoncpp_test jsoncpp_lib)
|
||||
else()
|
||||
target_link_libraries(jsoncpp_test jsoncpp_static)
|
||||
endif()
|
||||
target_link_libraries(jsoncpp_test jsoncpp_lib)
|
||||
|
||||
# another way to solve issue #90
|
||||
#set_target_properties(jsoncpp_test PROPERTIES COMPILE_FLAGS -ffloat-store)
|
||||
|
||||
## Create tests for dashboard submission, allows easy review of CI results https://my.cdash.org/index.php?project=jsoncpp
|
||||
add_test(NAME jsoncpp_test
|
||||
COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:jsoncpp_test>
|
||||
)
|
||||
set_target_properties(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)
|
||||
|
||||
# Run unit tests in post-build
|
||||
# (default cmake workflow hides away the test result into a file, resulting in poor dev workflow?!?)
|
||||
if(JSONCPP_WITH_POST_BUILD_UNITTEST)
|
||||
if(BUILD_SHARED_LIBS)
|
||||
# First, copy the shared lib, for Microsoft.
|
||||
# Then, run the test executable.
|
||||
add_custom_command( TARGET jsoncpp_test
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_if_different $<TARGET_FILE:jsoncpp_lib> $<TARGET_FILE_DIR:jsoncpp_test>
|
||||
COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:jsoncpp_test>)
|
||||
else(BUILD_SHARED_LIBS)
|
||||
# Just run the test executable.
|
||||
add_custom_command( TARGET jsoncpp_test
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:jsoncpp_test>)
|
||||
endif()
|
||||
## Create tests for dashboard submission, allows easy review of CI results https://my.cdash.org/index.php?project=jsoncpp
|
||||
add_test(NAME jsoncpp_test
|
||||
COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:jsoncpp_test>
|
||||
add_custom_command(TARGET jsoncpp_test
|
||||
POST_BUILD
|
||||
COMMAND ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:jsoncpp_test>
|
||||
)
|
||||
endif()
|
||||
|
||||
set_target_properties(jsoncpp_test PROPERTIES OUTPUT_NAME jsoncpp_test)
|
||||
|
@@ -9,7 +9,6 @@
|
||||
#include <json/config.h>
|
||||
#include <json/json.h>
|
||||
#include <memory>
|
||||
#include <stdint.h>
|
||||
#include <string>
|
||||
|
||||
namespace Json {
|
||||
@@ -23,8 +22,12 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
uint32_t hash_settings = *(const uint32_t*)data;
|
||||
const uint32_t hash_settings = static_cast<uint32_t>(data[0]) |
|
||||
(static_cast<uint32_t>(data[1]) << 8) |
|
||||
(static_cast<uint32_t>(data[2]) << 16) |
|
||||
(static_cast<uint32_t>(data[3]) << 24);
|
||||
data += sizeof(uint32_t);
|
||||
size -= sizeof(uint32_t);
|
||||
|
||||
builder.settings_["failIfExtra"] = hash_settings & (1 << 0);
|
||||
builder.settings_["allowComments_"] = hash_settings & (1 << 1);
|
||||
@@ -35,11 +38,13 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
|
||||
builder.settings_["failIfExtra_"] = hash_settings & (1 << 6);
|
||||
builder.settings_["rejectDupKeys_"] = hash_settings & (1 << 7);
|
||||
builder.settings_["allowSpecialFloats_"] = hash_settings & (1 << 8);
|
||||
builder.settings_["collectComments"] = hash_settings & (1 << 9);
|
||||
builder.settings_["allowTrailingCommas_"] = hash_settings & (1 << 10);
|
||||
|
||||
std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
|
||||
|
||||
Json::Value root;
|
||||
const char* data_str = reinterpret_cast<const char*>(data);
|
||||
const auto data_str = reinterpret_cast<const char*>(data);
|
||||
try {
|
||||
reader->parse(data_str, data_str + size, &root, nullptr);
|
||||
} catch (Json::Exception const&) {
|
||||
|
54
src/test_lib_json/fuzz.dict
Normal file
54
src/test_lib_json/fuzz.dict
Normal file
@@ -0,0 +1,54 @@
|
||||
#
|
||||
# AFL dictionary for JSON
|
||||
# -----------------------
|
||||
#
|
||||
# Just the very basics.
|
||||
#
|
||||
# Inspired by a dictionary by Jakub Wilk <jwilk@jwilk.net>
|
||||
#
|
||||
# https://github.com/rc0r/afl-fuzz/blob/master/dictionaries/json.dict
|
||||
#
|
||||
|
||||
"0"
|
||||
",0"
|
||||
":0"
|
||||
"0:"
|
||||
"-1.2e+3"
|
||||
|
||||
"true"
|
||||
"false"
|
||||
"null"
|
||||
|
||||
"\"\""
|
||||
",\"\""
|
||||
":\"\""
|
||||
"\"\":"
|
||||
|
||||
"{}"
|
||||
",{}"
|
||||
":{}"
|
||||
"{\"\":0}"
|
||||
"{{}}"
|
||||
|
||||
"[]"
|
||||
",[]"
|
||||
":[]"
|
||||
"[0]"
|
||||
"[[]]"
|
||||
|
||||
"''"
|
||||
"\\"
|
||||
"\\b"
|
||||
"\\f"
|
||||
"\\n"
|
||||
"\\r"
|
||||
"\\t"
|
||||
"\\u0000"
|
||||
"\\x00"
|
||||
"\\0"
|
||||
"\\uD800\\uDC00"
|
||||
"\\uDBFF\\uDFFF"
|
||||
|
||||
"\"\":0"
|
||||
"//"
|
||||
"/**/"
|
@@ -82,8 +82,8 @@ TestResult::TestResult() {
|
||||
|
||||
void TestResult::setTestName(const Json::String& name) { name_ = name; }
|
||||
|
||||
TestResult&
|
||||
TestResult::addFailure(const char* file, unsigned int line, const char* expr) {
|
||||
TestResult& TestResult::addFailure(const char* file, unsigned int line,
|
||||
const char* expr) {
|
||||
/// Walks the PredicateContext stack adding them to failures_ if not already
|
||||
/// added.
|
||||
unsigned int nestingLevel = 0;
|
||||
@@ -107,10 +107,8 @@ TestResult::addFailure(const char* file, unsigned int line, const char* expr) {
|
||||
return *this;
|
||||
}
|
||||
|
||||
void TestResult::addFailureInfo(const char* file,
|
||||
unsigned int line,
|
||||
const char* expr,
|
||||
unsigned int nestingLevel) {
|
||||
void TestResult::addFailureInfo(const char* file, unsigned int line,
|
||||
const char* expr, unsigned int nestingLevel) {
|
||||
Failure failure;
|
||||
failure.file_ = file;
|
||||
failure.line_ = line;
|
||||
@@ -269,19 +267,18 @@ bool Runner::runAllTest(bool printSummary) const {
|
||||
printf("All %zu tests passed\n", count);
|
||||
}
|
||||
return true;
|
||||
} else {
|
||||
for (auto& result : failures) {
|
||||
result.printFailure(count > 1);
|
||||
}
|
||||
|
||||
if (printSummary) {
|
||||
size_t const failedCount = failures.size();
|
||||
size_t const passedCount = count - failedCount;
|
||||
printf("%zu/%zu tests passed (%zu failure(s))\n", passedCount, count,
|
||||
failedCount);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
for (auto& result : failures) {
|
||||
result.printFailure(count > 1);
|
||||
}
|
||||
|
||||
if (printSummary) {
|
||||
size_t const failedCount = failures.size();
|
||||
size_t const passedCount = count - failedCount;
|
||||
printf("%zu/%zu tests passed (%zu failure(s))\n", passedCount, count,
|
||||
failedCount);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool Runner::testIndex(const Json::String& testName, size_t& indexOut) const {
|
||||
@@ -310,7 +307,8 @@ int Runner::runCommandLine(int argc, const char* argv[]) const {
|
||||
if (opt == "--list-tests") {
|
||||
listTests();
|
||||
return 0;
|
||||
} else if (opt == "--test-auto") {
|
||||
}
|
||||
if (opt == "--test-auto") {
|
||||
preventDialogOnCrash();
|
||||
} else if (opt == "--test") {
|
||||
++index;
|
||||
@@ -342,8 +340,8 @@ int Runner::runCommandLine(int argc, const char* argv[]) const {
|
||||
|
||||
#if defined(_MSC_VER) && defined(_DEBUG)
|
||||
// Hook MSVCRT assertions to prevent dialog from appearing
|
||||
static int
|
||||
msvcrtSilentReportHook(int reportType, char* message, int* /*returnValue*/) {
|
||||
static int msvcrtSilentReportHook(int reportType, char* message,
|
||||
int* /*returnValue*/) {
|
||||
// The default CRT handling of error and assertion is to display
|
||||
// an error dialog to the user.
|
||||
// Instead, when an error or an assertion occurs, we force the
|
||||
@@ -418,12 +416,9 @@ Json::String ToJsonString(std::string in) {
|
||||
}
|
||||
#endif
|
||||
|
||||
TestResult& checkStringEqual(TestResult& result,
|
||||
const Json::String& expected,
|
||||
const Json::String& actual,
|
||||
const char* file,
|
||||
unsigned int line,
|
||||
const char* expr) {
|
||||
TestResult& checkStringEqual(TestResult& result, const Json::String& expected,
|
||||
const Json::String& actual, const char* file,
|
||||
unsigned int line, const char* expr) {
|
||||
if (expected != actual) {
|
||||
result.addFailure(file, line, expr);
|
||||
result << "Expected: '" << expected << "'\n";
|
||||
|
@@ -8,6 +8,7 @@
|
||||
|
||||
#include <cstdio>
|
||||
#include <deque>
|
||||
#include <iomanip>
|
||||
#include <json/config.h>
|
||||
#include <json/value.h>
|
||||
#include <json/writer.h>
|
||||
@@ -41,7 +42,7 @@ public:
|
||||
/// Must be a POD to allow inline initialisation without stepping
|
||||
/// into the debugger.
|
||||
struct PredicateContext {
|
||||
typedef unsigned int Id;
|
||||
using Id = unsigned int;
|
||||
Id id_;
|
||||
const char* file_;
|
||||
unsigned int line_;
|
||||
@@ -68,8 +69,8 @@ public:
|
||||
void setTestName(const Json::String& name);
|
||||
|
||||
/// Adds an assertion failure.
|
||||
TestResult&
|
||||
addFailure(const char* file, unsigned int line, const char* expr = nullptr);
|
||||
TestResult& addFailure(const char* file, unsigned int line,
|
||||
const char* expr = nullptr);
|
||||
|
||||
/// Removes the last PredicateContext added to the predicate stack
|
||||
/// chained list.
|
||||
@@ -83,9 +84,7 @@ public:
|
||||
// Generic operator that will work with anything ostream can deal with.
|
||||
template <typename T> TestResult& operator<<(const T& value) {
|
||||
Json::OStringStream oss;
|
||||
oss.precision(16);
|
||||
oss.setf(std::ios_base::floatfield);
|
||||
oss << value;
|
||||
oss << std::setprecision(16) << std::hexfloat << value;
|
||||
return addToLastFailure(oss.str());
|
||||
}
|
||||
|
||||
@@ -98,14 +97,12 @@ public:
|
||||
private:
|
||||
TestResult& addToLastFailure(const Json::String& message);
|
||||
/// Adds a failure or a predicate context
|
||||
void addFailureInfo(const char* file,
|
||||
unsigned int line,
|
||||
const char* expr,
|
||||
void addFailureInfo(const char* file, unsigned int line, const char* expr,
|
||||
unsigned int nestingLevel);
|
||||
static Json::String indentText(const Json::String& text,
|
||||
const Json::String& indent);
|
||||
|
||||
typedef std::deque<Failure> Failures;
|
||||
using Failures = std::deque<Failure>;
|
||||
Failures failures_;
|
||||
Json::String name_;
|
||||
PredicateContext rootPredicateNode_;
|
||||
@@ -132,7 +129,7 @@ private:
|
||||
};
|
||||
|
||||
/// Function pointer type for TestCase factory
|
||||
typedef TestCase* (*TestCaseFactory)();
|
||||
using TestCaseFactory = TestCase* (*)();
|
||||
|
||||
class Runner {
|
||||
public:
|
||||
@@ -171,17 +168,13 @@ private:
|
||||
static void preventDialogOnCrash();
|
||||
|
||||
private:
|
||||
typedef std::deque<TestCaseFactory> Factories;
|
||||
using Factories = std::deque<TestCaseFactory>;
|
||||
Factories tests_;
|
||||
};
|
||||
|
||||
template <typename T, typename U>
|
||||
TestResult& checkEqual(TestResult& result,
|
||||
T expected,
|
||||
U actual,
|
||||
const char* file,
|
||||
unsigned int line,
|
||||
const char* expr) {
|
||||
TestResult& checkEqual(TestResult& result, T expected, U actual,
|
||||
const char* file, unsigned int line, const char* expr) {
|
||||
if (static_cast<U>(expected) != actual) {
|
||||
result.addFailure(file, line, expr);
|
||||
result << "Expected: " << static_cast<U>(expected) << "\n";
|
||||
@@ -196,12 +189,9 @@ Json::String ToJsonString(Json::String in);
|
||||
Json::String ToJsonString(std::string in);
|
||||
#endif
|
||||
|
||||
TestResult& checkStringEqual(TestResult& result,
|
||||
const Json::String& expected,
|
||||
const Json::String& actual,
|
||||
const char* file,
|
||||
unsigned int line,
|
||||
const char* expr);
|
||||
TestResult& checkStringEqual(TestResult& result, const Json::String& expected,
|
||||
const Json::String& actual, const char* file,
|
||||
unsigned int line, const char* expr);
|
||||
|
||||
} // namespace JsonTest
|
||||
|
||||
@@ -217,7 +207,7 @@ TestResult& checkStringEqual(TestResult& result,
|
||||
/// The predicate may do other assertions and be a member function of the
|
||||
/// fixture.
|
||||
#define JSONTEST_ASSERT_PRED(expr) \
|
||||
{ \
|
||||
do { \
|
||||
JsonTest::PredicateContext _minitest_Context = { \
|
||||
result_->predicateId_, __FILE__, __LINE__, #expr, NULL, NULL}; \
|
||||
result_->predicateStackTail_->next_ = &_minitest_Context; \
|
||||
@@ -225,7 +215,7 @@ TestResult& checkStringEqual(TestResult& result,
|
||||
result_->predicateStackTail_ = &_minitest_Context; \
|
||||
(expr); \
|
||||
result_->popPredicateContext(); \
|
||||
}
|
||||
} while (0)
|
||||
|
||||
/// \brief Asserts that two values are equals.
|
||||
#define JSONTEST_ASSERT_EQUAL(expected, actual) \
|
||||
@@ -240,7 +230,7 @@ TestResult& checkStringEqual(TestResult& result,
|
||||
|
||||
/// \brief Asserts that a given expression throws an exception
|
||||
#define JSONTEST_ASSERT_THROWS(expr) \
|
||||
{ \
|
||||
do { \
|
||||
bool _threw = false; \
|
||||
try { \
|
||||
expr; \
|
||||
@@ -250,7 +240,7 @@ TestResult& checkStringEqual(TestResult& result,
|
||||
if (!_threw) \
|
||||
result_->addFailure(__FILE__, __LINE__, \
|
||||
"expected exception thrown: " #expr); \
|
||||
}
|
||||
} while (0)
|
||||
|
||||
/// \brief Begin a fixture test case.
|
||||
#define JSONTEST_FIXTURE(FixtureType, name) \
|
||||
@@ -273,4 +263,26 @@ TestResult& checkStringEqual(TestResult& result,
|
||||
#define JSONTEST_REGISTER_FIXTURE(runner, FixtureType, name) \
|
||||
(runner).add(JSONTEST_FIXTURE_FACTORY(FixtureType, name))
|
||||
|
||||
/// \brief Begin a fixture test case.
|
||||
#define JSONTEST_FIXTURE_V2(FixtureType, name, collections) \
|
||||
class Test##FixtureType##name : public FixtureType { \
|
||||
public: \
|
||||
static JsonTest::TestCase* factory() { \
|
||||
return new Test##FixtureType##name(); \
|
||||
} \
|
||||
static bool collect() { \
|
||||
(collections).push_back(JSONTEST_FIXTURE_FACTORY(FixtureType, name)); \
|
||||
return true; \
|
||||
} \
|
||||
\
|
||||
public: /* overridden from TestCase */ \
|
||||
const char* testName() const override { return #FixtureType "/" #name; } \
|
||||
void runTestCase() override; \
|
||||
}; \
|
||||
\
|
||||
static bool test##FixtureType##name##collect = \
|
||||
Test##FixtureType##name::collect(); \
|
||||
\
|
||||
void Test##FixtureType##name::runTestCase()
|
||||
|
||||
#endif // ifndef JSONTEST_H_INCLUDED
|
||||
|
File diff suppressed because it is too large
Load Diff
1
test/data/fail_invalid_quote.json
Normal file
1
test/data/fail_invalid_quote.json
Normal file
@@ -0,0 +1 @@
|
||||
{'//this is bad JSON.'}
|
1
test/data/fail_test_array_02.json
Normal file
1
test/data/fail_test_array_02.json
Normal file
@@ -0,0 +1 @@
|
||||
[1,,]
|
1
test/data/fail_test_object_01.json
Normal file
1
test/data/fail_test_object_01.json
Normal file
@@ -0,0 +1 @@
|
||||
{ "count" : 1234,, }
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user