Merge branch 'master' into performance-optimization

This commit is contained in:
qiwei 2024-10-06 12:21:34 +08:00 committed by GitHub
commit 8eff94f5a9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
45 changed files with 550 additions and 854 deletions

View File

@ -1,4 +1,4 @@
BasedOnStyle: LLVM BasedOnStyle: LLVM
DerivePointerAlignment: false DerivePointerAlignment: false
PointerAlignment: Left PointerAlignment: Left
SpacesBeforeTrailingComments: 1

20
.github/workflows/clang-format.yml vendored Normal file
View File

@ -0,0 +1,20 @@
name: clang-format check
on: [check_run, pull_request, push]
jobs:
formatting-check:
name: formatting check
runs-on: ubuntu-latest
strategy:
matrix:
path:
- 'src'
- 'examples'
- 'include'
steps:
- uses: actions/checkout@v4
- name: runs clang-format style check for C/C++/Protobuf programs.
uses: jidicula/clang-format-action@v4.13.0
with:
clang-format-version: '18'
check-path: ${{ matrix.path }}

18
.github/workflows/cmake.yml vendored Normal file
View File

@ -0,0 +1,18 @@
name: cmake
on: [check_run, push, pull_request]
jobs:
cmake-publish:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
steps:
- name: checkout project
uses: actions/checkout@v4
- name: build project
uses: threeal/cmake-action@v2.0.0

65
.github/workflows/meson.yml vendored Normal file
View File

@ -0,0 +1,65 @@
name: meson build and test
run-name: update pushed to ${{ github.ref }}
on: [check_run, push, pull_request]
jobs:
meson-publish:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
steps:
- name: checkout repository
uses: actions/checkout@v4
- name: setup python
uses: actions/setup-python@v5
- name: meson build
uses: BSFishy/meson-build@v1.0.3
with:
meson-version: 1.5.1
ninja-version: 1.11.1.1
action: build
- name: meson test
uses: BSFishy/meson-build@v1.0.3
with:
meson-version: 1.5.1
ninja-version: 1.11.1.1
action: test
meson-coverage:
runs-on: ubuntu-latest
steps:
- name: checkout repository
uses: actions/checkout@v4
- name: setup python
uses: actions/setup-python@v5
- name: meson build
uses: BSFishy/meson-build@v1.0.3
with:
meson-version: 1.5.1
ninja-version: 1.11.1.1
setup-options: -Db_coverage=true
action: build
- name: meson test
uses: BSFishy/meson-build@v1.0.3
with:
meson-version: 1.5.1
ninja-version: 1.11.1.1
setup-options: -Db_coverage=true
action: test
- name: generate code coverage report
uses: threeal/gcovr-action@v1.0.0
with:
coveralls-send: true
github-token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,71 +0,0 @@
# Build matrix / environment variables are explained on:
# http://about.travis-ci.com/docs/user/build-configuration/
# This file can be validated on: http://www.yamllint.com/
# Or using the Ruby based travel command line tool:
# gem install travis --no-rdoc --no-ri
# travis lint .travis.yml
language: cpp
sudo: false
addons:
homebrew:
packages:
- clang-format
- meson
- ninja
update: false # do not update homebrew by default
apt:
sources:
- ubuntu-toolchain-r-test
- llvm-toolchain-xenial-8
packages:
- clang-format-8
- clang-8
- valgrind
matrix:
include:
- name: Mac clang meson static release testing
os: osx
osx_image: xcode11
compiler: clang
env:
CXX="clang++"
CC="clang"
LIB_TYPE=static
BUILD_TYPE=release
script: ./.travis_scripts/meson_builder.sh
- name: Linux xenial clang meson static release testing
os: linux
dist: xenial
compiler: clang
env:
CXX="clang++"
CC="clang"
LIB_TYPE=static
BUILD_TYPE=release
PYTHONUSERBASE="$(pwd)/LOCAL"
PATH="$PYTHONUSERBASE/bin:$PATH"
# before_install and install steps only needed for linux meson builds
before_install:
- source ./.travis_scripts/travis.before_install.${TRAVIS_OS_NAME}.sh
install:
- source ./.travis_scripts/travis.install.${TRAVIS_OS_NAME}.sh
script: ./.travis_scripts/meson_builder.sh
- name: Linux xenial gcc cmake coverage
os: linux
dist: xenial
compiler: gcc
env:
CXX=g++
CC=gcc
DO_Coverage=ON
BUILD_TOOL="Unix Makefiles"
BUILD_TYPE=Debug
LIB_TYPE=shared
DESTDIR=/tmp/cmake_json_cpp
before_install:
- pip install --user cpp-coveralls
script: ./.travis_scripts/cmake_builder.sh
after_success:
- coveralls --include src/lib_json --include include
notifications:
email: false

View File

@ -1,130 +0,0 @@
#!/usr/bin/env sh
# This script can be used on the command line directly to configure several
# different build environments.
# This is called by `.travis.yml` via Travis CI.
# Travis supplies $TRAVIS_OS_NAME.
# http://docs.travis-ci.com/user/multi-os/
# Our .travis.yml also defines:
# - BUILD_TYPE=Release/Debug
# - LIB_TYPE=static/shared
#
# Optional environmental variables
# - DESTDIR <- used for setting the install prefix
# - BUILD_TOOL=["Unix Makefile"|"Ninja"]
# - BUILDNAME <- how to identify this build on the dashboard
# - DO_MemCheck <- if set, try to use valgrind
# - DO_Coverage <- if set, try to do dashboard coverage testing
#
env_set=1
if ${BUILD_TYPE+false}; then
echo "BUILD_TYPE not set in environment."
env_set=0
fi
if ${LIB_TYPE+false}; then
echo "LIB_TYPE not set in environment."
env_set=0
fi
if ${CXX+false}; then
echo "CXX not set in environment."
env_set=0
fi
if [ ${env_set} -eq 0 ]; then
echo "USAGE: CXX=$(which clang++) BUILD_TYPE=[Release|Debug] LIB_TYPE=[static|shared] $0"
echo ""
echo "Examples:"
echo " CXX=$(which clang++) BUILD_TYPE=Release LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
echo " CXX=$(which clang++) BUILD_TYPE=Debug LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
echo " CXX=$(which clang++) BUILD_TYPE=Release LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
echo " CXX=$(which clang++) BUILD_TYPE=Debug LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
echo " CXX=$(which g++) BUILD_TYPE=Release LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
echo " CXX=$(which g++) BUILD_TYPE=Debug LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
echo " CXX=$(which g++) BUILD_TYPE=Release LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
echo " CXX=$(which g++) BUILD_TYPE=Debug LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
exit -1
fi
if ${DESTDIR+false}; then
DESTDIR="/usr/local"
fi
# -e: fail on error
# -v: show commands
# -x: show expanded commands
set -vex
env | sort
which cmake
cmake --version
echo ${CXX}
${CXX} --version
_COMPILER_NAME=`basename ${CXX}`
if [ "${LIB_TYPE}" = "shared" ]; then
_CMAKE_BUILD_SHARED_LIBS=ON
else
_CMAKE_BUILD_SHARED_LIBS=OFF
fi
CTEST_TESTING_OPTION="-D ExperimentalTest"
# - DO_MemCheck <- if set, try to use valgrind
if ! ${DO_MemCheck+false}; then
valgrind --version
CTEST_TESTING_OPTION="-D ExperimentalMemCheck"
else
# - DO_Coverage <- if set, try to do dashboard coverage testing
if ! ${DO_Coverage+false}; then
export CXXFLAGS="-fprofile-arcs -ftest-coverage"
export LDFLAGS="-fprofile-arcs -ftest-coverage"
CTEST_TESTING_OPTION="-D ExperimentalTest -D ExperimentalCoverage"
#gcov --version
fi
fi
# Ninja = Generates build.ninja files.
if ${BUILD_TOOL+false}; then
BUILD_TOOL="Ninja"
export _BUILD_EXE=ninja
which ninja
ninja --version
else
# Unix Makefiles = Generates standard UNIX makefiles.
export _BUILD_EXE=make
fi
_BUILD_DIR_NAME="build-cmake_${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}_${_BUILD_EXE}"
mkdir -p ${_BUILD_DIR_NAME}
cd "${_BUILD_DIR_NAME}"
if ${BUILDNAME+false}; then
_HOSTNAME=`hostname -s`
BUILDNAME="${_HOSTNAME}_${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}_${_BUILD_EXE}"
fi
cmake \
-G "${BUILD_TOOL}" \
-DBUILDNAME:STRING="${BUILDNAME}" \
-DCMAKE_CXX_COMPILER:PATH=${CXX} \
-DCMAKE_BUILD_TYPE:STRING=${BUILD_TYPE} \
-DBUILD_SHARED_LIBS:BOOL=${_CMAKE_BUILD_SHARED_LIBS} \
-DCMAKE_INSTALL_PREFIX:PATH=${DESTDIR} \
../
ctest -C ${BUILD_TYPE} -D ExperimentalStart -D ExperimentalConfigure -D ExperimentalBuild ${CTEST_TESTING_OPTION} -D ExperimentalSubmit
# Final step is to verify that installation succeeds
cmake --build . --config ${BUILD_TYPE} --target install
if [ "${DESTDIR}" != "/usr/local" ]; then
${_BUILD_EXE} install
fi
cd -
if ${CLEANUP+false}; then
echo "Skipping cleanup: build directory will persist."
else
rm -r "${_BUILD_DIR_NAME}"
fi

View File

@ -1,83 +0,0 @@
#!/usr/bin/env sh
# This script can be used on the command line directly to configure several
# different build environments.
# This is called by `.travis.yml` via Travis CI.
# Travis supplies $TRAVIS_OS_NAME.
# http://docs.travis-ci.com/user/multi-os/
# Our .travis.yml also defines:
# - BUILD_TYPE=release/debug
# - LIB_TYPE=static/shared
env_set=1
if ${BUILD_TYPE+false}; then
echo "BUILD_TYPE not set in environment."
env_set=0
fi
if ${LIB_TYPE+false}; then
echo "LIB_TYPE not set in environment."
env_set=0
fi
if ${CXX+false}; then
echo "CXX not set in environment."
env_set=0
fi
if [ ${env_set} -eq 0 ]; then
echo "USAGE: CXX=$(which clang++) BUILD_TYPE=[release|debug] LIB_TYPE=[static|shared] $0"
echo ""
echo "Examples:"
echo " CXX=$(which clang++) BUILD_TYPE=release LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
echo " CXX=$(which clang++) BUILD_TYPE=debug LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
echo " CXX=$(which clang++) BUILD_TYPE=release LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
echo " CXX=$(which clang++) BUILD_TYPE=debug LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
echo " CXX=$(which g++) BUILD_TYPE=release LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
echo " CXX=$(which g++) BUILD_TYPE=debug LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
echo " CXX=$(which g++) BUILD_TYPE=release LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
echo " CXX=$(which g++) BUILD_TYPE=debug LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
exit -1
fi
if ${DESTDIR+false}; then
DESTDIR="/usr/local"
fi
# -e: fail on error
# -v: show commands
# -x: show expanded commands
set -vex
env | sort
which python3
which meson
which ninja
echo ${CXX}
${CXX} --version
python3 --version
meson --version
ninja --version
_COMPILER_NAME=`basename ${CXX}`
_BUILD_DIR_NAME="build-${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}"
#./.travis_scripts/run-clang-format.sh
meson --fatal-meson-warnings --werror --buildtype ${BUILD_TYPE} --default-library ${LIB_TYPE} . "${_BUILD_DIR_NAME}"
ninja -v -j 2 -C "${_BUILD_DIR_NAME}"
cd "${_BUILD_DIR_NAME}"
meson test --no-rebuild --print-errorlogs
if [ "${DESTDIR}" != "/usr/local" ]; then
ninja install
fi
cd -
if ${CLEANUP+false}; then
echo "Skipping cleanup: build directory will persist."
else
rm -r "${_BUILD_DIR_NAME}"
fi

View File

@ -1,356 +0,0 @@
#!/usr/bin/env python
"""A wrapper script around clang-format, suitable for linting multiple files
and to use for continuous integration.
This is an alternative API for the clang-format command line.
It runs over multiple files and directories in parallel.
A diff output is produced and a sensible exit code is returned.
NOTE: pulled from https://github.com/Sarcasm/run-clang-format, which is
licensed under the MIT license.
"""
from __future__ import print_function, unicode_literals
import argparse
import codecs
import difflib
import fnmatch
import io
import multiprocessing
import os
import signal
import subprocess
import sys
import traceback
from functools import partial
try:
from subprocess import DEVNULL # py3k
except ImportError:
DEVNULL = open(os.devnull, "wb")
DEFAULT_EXTENSIONS = 'c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx'
class ExitStatus:
SUCCESS = 0
DIFF = 1
TROUBLE = 2
def list_files(files, recursive=False, extensions=None, exclude=None):
if extensions is None:
extensions = []
if exclude is None:
exclude = []
out = []
for file in files:
if recursive and os.path.isdir(file):
for dirpath, dnames, fnames in os.walk(file):
fpaths = [os.path.join(dirpath, fname) for fname in fnames]
for pattern in exclude:
# os.walk() supports trimming down the dnames list
# by modifying it in-place,
# to avoid unnecessary directory listings.
dnames[:] = [
x for x in dnames
if
not fnmatch.fnmatch(os.path.join(dirpath, x), pattern)
]
fpaths = [
x for x in fpaths if not fnmatch.fnmatch(x, pattern)
]
for f in fpaths:
ext = os.path.splitext(f)[1][1:]
if ext in extensions:
out.append(f)
else:
out.append(file)
return out
def make_diff(file, original, reformatted):
return list(
difflib.unified_diff(
original,
reformatted,
fromfile='{}\t(original)'.format(file),
tofile='{}\t(reformatted)'.format(file),
n=3))
class DiffError(Exception):
def __init__(self, message, errs=None):
super(DiffError, self).__init__(message)
self.errs = errs or []
class UnexpectedError(Exception):
def __init__(self, message, exc=None):
super(UnexpectedError, self).__init__(message)
self.formatted_traceback = traceback.format_exc()
self.exc = exc
def run_clang_format_diff_wrapper(args, file):
try:
ret = run_clang_format_diff(args, file)
return ret
except DiffError:
raise
except Exception as e:
raise UnexpectedError('{}: {}: {}'.format(file, e.__class__.__name__,
e), e)
def run_clang_format_diff(args, file):
try:
with io.open(file, 'r', encoding='utf-8') as f:
original = f.readlines()
except IOError as exc:
raise DiffError(str(exc))
invocation = [args.clang_format_executable, file]
# Use of utf-8 to decode the process output.
#
# Hopefully, this is the correct thing to do.
#
# It's done due to the following assumptions (which may be incorrect):
# - clang-format will returns the bytes read from the files as-is,
# without conversion, and it is already assumed that the files use utf-8.
# - if the diagnostics were internationalized, they would use utf-8:
# > Adding Translations to Clang
# >
# > Not possible yet!
# > Diagnostic strings should be written in UTF-8,
# > the client can translate to the relevant code page if needed.
# > Each translation completely replaces the format string
# > for the diagnostic.
# > -- http://clang.llvm.org/docs/InternalsManual.html#internals-diag-translation
#
# It's not pretty, due to Python 2 & 3 compatibility.
encoding_py3 = {}
if sys.version_info[0] >= 3:
encoding_py3['encoding'] = 'utf-8'
try:
proc = subprocess.Popen(
invocation,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
**encoding_py3)
except OSError as exc:
raise DiffError(
"Command '{}' failed to start: {}".format(
subprocess.list2cmdline(invocation), exc
)
)
proc_stdout = proc.stdout
proc_stderr = proc.stderr
if sys.version_info[0] < 3:
# make the pipes compatible with Python 3,
# reading lines should output unicode
encoding = 'utf-8'
proc_stdout = codecs.getreader(encoding)(proc_stdout)
proc_stderr = codecs.getreader(encoding)(proc_stderr)
# hopefully the stderr pipe won't get full and block the process
outs = list(proc_stdout.readlines())
errs = list(proc_stderr.readlines())
proc.wait()
if proc.returncode:
raise DiffError(
"Command '{}' returned non-zero exit status {}".format(
subprocess.list2cmdline(invocation), proc.returncode
),
errs,
)
return make_diff(file, original, outs), errs
def bold_red(s):
return '\x1b[1m\x1b[31m' + s + '\x1b[0m'
def colorize(diff_lines):
def bold(s):
return '\x1b[1m' + s + '\x1b[0m'
def cyan(s):
return '\x1b[36m' + s + '\x1b[0m'
def green(s):
return '\x1b[32m' + s + '\x1b[0m'
def red(s):
return '\x1b[31m' + s + '\x1b[0m'
for line in diff_lines:
if line[:4] in ['--- ', '+++ ']:
yield bold(line)
elif line.startswith('@@ '):
yield cyan(line)
elif line.startswith('+'):
yield green(line)
elif line.startswith('-'):
yield red(line)
else:
yield line
def print_diff(diff_lines, use_color):
if use_color:
diff_lines = colorize(diff_lines)
if sys.version_info[0] < 3:
sys.stdout.writelines((l.encode('utf-8') for l in diff_lines))
else:
sys.stdout.writelines(diff_lines)
def print_trouble(prog, message, use_colors):
error_text = 'error:'
if use_colors:
error_text = bold_red(error_text)
print("{}: {} {}".format(prog, error_text, message), file=sys.stderr)
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--clang-format-executable',
metavar='EXECUTABLE',
help='path to the clang-format executable',
default='clang-format')
parser.add_argument(
'--extensions',
help='comma separated list of file extensions (default: {})'.format(
DEFAULT_EXTENSIONS),
default=DEFAULT_EXTENSIONS)
parser.add_argument(
'-r',
'--recursive',
action='store_true',
help='run recursively over directories')
parser.add_argument('files', metavar='file', nargs='+')
parser.add_argument(
'-q',
'--quiet',
action='store_true')
parser.add_argument(
'-j',
metavar='N',
type=int,
default=0,
help='run N clang-format jobs in parallel'
' (default number of cpus + 1)')
parser.add_argument(
'--color',
default='auto',
choices=['auto', 'always', 'never'],
help='show colored diff (default: auto)')
parser.add_argument(
'-e',
'--exclude',
metavar='PATTERN',
action='append',
default=[],
help='exclude paths matching the given glob-like pattern(s)'
' from recursive search')
args = parser.parse_args()
# use default signal handling, like diff return SIGINT value on ^C
# https://bugs.python.org/issue14229#msg156446
signal.signal(signal.SIGINT, signal.SIG_DFL)
try:
signal.SIGPIPE
except AttributeError:
# compatibility, SIGPIPE does not exist on Windows
pass
else:
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
colored_stdout = False
colored_stderr = False
if args.color == 'always':
colored_stdout = True
colored_stderr = True
elif args.color == 'auto':
colored_stdout = sys.stdout.isatty()
colored_stderr = sys.stderr.isatty()
version_invocation = [args.clang_format_executable, str("--version")]
try:
subprocess.check_call(version_invocation, stdout=DEVNULL)
except subprocess.CalledProcessError as e:
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
return ExitStatus.TROUBLE
except OSError as e:
print_trouble(
parser.prog,
"Command '{}' failed to start: {}".format(
subprocess.list2cmdline(version_invocation), e
),
use_colors=colored_stderr,
)
return ExitStatus.TROUBLE
retcode = ExitStatus.SUCCESS
files = list_files(
args.files,
recursive=args.recursive,
exclude=args.exclude,
extensions=args.extensions.split(','))
if not files:
return
njobs = args.j
if njobs == 0:
njobs = multiprocessing.cpu_count() + 1
njobs = min(len(files), njobs)
if njobs == 1:
# execute directly instead of in a pool,
# less overhead, simpler stacktraces
it = (run_clang_format_diff_wrapper(args, file) for file in files)
pool = None
else:
pool = multiprocessing.Pool(njobs)
it = pool.imap_unordered(
partial(run_clang_format_diff_wrapper, args), files)
while True:
try:
outs, errs = next(it)
except StopIteration:
break
except DiffError as e:
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
retcode = ExitStatus.TROUBLE
sys.stderr.writelines(e.errs)
except UnexpectedError as e:
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
sys.stderr.write(e.formatted_traceback)
retcode = ExitStatus.TROUBLE
# stop at the first unexpected error,
# something could be very wrong,
# don't process all files unnecessarily
if pool:
pool.terminate()
break
else:
sys.stderr.writelines(errs)
if outs == []:
continue
if not args.quiet:
print_diff(outs, use_color=colored_stdout)
if retcode == ExitStatus.SUCCESS:
retcode = ExitStatus.DIFF
return retcode
if __name__ == '__main__':
sys.exit(main())

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
python $DIR/run-clang-format.py -r $DIR/../src/**/ $DIR/../include/**/

View File

@ -1,8 +0,0 @@
set -vex
# Preinstalled versions of python are dependent on which Ubuntu distribution
# you are running. The below version needs to be updated whenever we roll
# the Ubuntu version used in Travis.
# https://docs.travis-ci.com/user/languages/python/
pyenv global 3.7.1

View File

@ -1,5 +0,0 @@
set -vex
pip3 install --user meson ninja
which meson
which ninja

View File

@ -1 +0,0 @@
# NOTHING TO DO HERE

View File

@ -6,7 +6,7 @@
# policies that provide successful builds. By setting JSONCPP_NEWEST_VALIDATED_POLICIES_VERSION # policies that provide successful builds. By setting JSONCPP_NEWEST_VALIDATED_POLICIES_VERSION
# to a value greater than the oldest policies, all policies between # to a value greater than the oldest policies, all policies between
# JSONCPP_OLDEST_VALIDATED_POLICIES_VERSION and CMAKE_VERSION (used for this build) # JSONCPP_OLDEST_VALIDATED_POLICIES_VERSION and CMAKE_VERSION (used for this build)
# are set to their NEW behaivor, thereby suppressing policy warnings related to policies # are set to their NEW behavior, thereby suppressing policy warnings related to policies
# between the JSONCPP_OLDEST_VALIDATED_POLICIES_VERSION and CMAKE_VERSION. # between the JSONCPP_OLDEST_VALIDATED_POLICIES_VERSION and CMAKE_VERSION.
# #
# CMake versions greater than the JSONCPP_NEWEST_VALIDATED_POLICIES_VERSION policies will # CMake versions greater than the JSONCPP_NEWEST_VALIDATED_POLICIES_VERSION policies will
@ -54,16 +54,6 @@ endif()
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake") set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
# ---------------------------------------------------------------------------
# use ccache if found, has to be done before project()
# ---------------------------------------------------------------------------
find_program(CCACHE_EXECUTABLE "ccache" HINTS /usr/local/bin /opt/local/bin)
if(CCACHE_EXECUTABLE)
message(STATUS "use ccache")
set(CMAKE_CXX_COMPILER_LAUNCHER "${CCACHE_EXECUTABLE}" CACHE PATH "ccache" FORCE)
set(CMAKE_C_COMPILER_LAUNCHER "${CCACHE_EXECUTABLE}" CACHE PATH "ccache" FORCE)
endif()
project(jsoncpp project(jsoncpp
# Note: version must be updated in three places when doing a release. This # Note: version must be updated in three places when doing a release. This
# annoying process ensures that amalgamate, CMake, and meson all report the # annoying process ensures that amalgamate, CMake, and meson all report the
@ -72,11 +62,11 @@ project(jsoncpp
# 2. ./include/json/version.h # 2. ./include/json/version.h
# 3. ./CMakeLists.txt # 3. ./CMakeLists.txt
# IMPORTANT: also update the PROJECT_SOVERSION!! # IMPORTANT: also update the PROJECT_SOVERSION!!
VERSION 1.9.5 # <major>[.<minor>[.<patch>[.<tweak>]]] VERSION 1.9.7 # <major>[.<minor>[.<patch>[.<tweak>]]]
LANGUAGES CXX) LANGUAGES CXX)
message(STATUS "JsonCpp Version: ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH}") message(STATUS "JsonCpp Version: ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH}")
set(PROJECT_SOVERSION 25) set(PROJECT_SOVERSION 27)
include(${CMAKE_CURRENT_SOURCE_DIR}/include/PreventInSourceBuilds.cmake) include(${CMAKE_CURRENT_SOURCE_DIR}/include/PreventInSourceBuilds.cmake)
include(${CMAKE_CURRENT_SOURCE_DIR}/include/PreventInBuildInstalls.cmake) include(${CMAKE_CURRENT_SOURCE_DIR}/include/PreventInBuildInstalls.cmake)
@ -96,12 +86,22 @@ option(BUILD_OBJECT_LIBS "Build jsoncpp_lib as a object library." ON)
# Adhere to GNU filesystem layout conventions # Adhere to GNU filesystem layout conventions
include(GNUInstallDirs) include(GNUInstallDirs)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib" CACHE PATH "Archive output dir.") if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib" CACHE PATH "Library output dir.") set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib" CACHE PATH "Archive output dir.")
set(CMAKE_PDB_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" CACHE PATH "PDB (MSVC debug symbol)output dir.") set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib" CACHE PATH "Library output dir.")
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" CACHE PATH "Executable/dll output dir.") set(CMAKE_PDB_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" CACHE PATH "PDB (MSVC debug symbol)output dir.")
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin" CACHE PATH "Executable/dll output dir.")
endif()
set(JSONCPP_USE_SECURE_MEMORY "0" CACHE STRING "-D...=1 to use memory-wiping allocator for STL") include(CheckFunctionExists)
check_function_exists(memset_s HAVE_MEMSET_S)
if(HAVE_MEMSET_S)
add_definitions("-DHAVE_MEMSET_S=1")
endif()
if(JSONCPP_USE_SECURE_MEMORY)
add_definitions("-DJSONCPP_USE_SECURE_MEMORY=1")
endif()
configure_file("${PROJECT_SOURCE_DIR}/version.in" configure_file("${PROJECT_SOURCE_DIR}/version.in"
"${PROJECT_BINARY_DIR}/version" "${PROJECT_BINARY_DIR}/version"

View File

@ -77,7 +77,7 @@ See `doxybuild.py --help` for options.
To add a test, you need to create two files in test/data: To add a test, you need to create two files in test/data:
* a `TESTNAME.json` file, that contains the input document in JSON format. * a `TESTNAME.json` file, that contains the input document in JSON format.
* a `TESTNAME.expected` file, that contains a flatened representation of the * a `TESTNAME.expected` file, that contains a flattened representation of the
input document. input document.
The `TESTNAME.expected` file format is as follows: The `TESTNAME.expected` file format is as follows:

17
SECURITY.md Normal file
View File

@ -0,0 +1,17 @@
# Security Policy
If you have discovered a security vulnerability in this project, please report it
privately. **Do not disclose it as a public issue.** This gives us time to work with you
to fix the issue before public exposure, reducing the chance that the exploit will be
used before a patch is released.
Please submit the report by filling out
[this form](https://github.com/open-source-parsers/jsoncpp/security/advisories/new).
Please provide the following information in your report:
- A description of the vulnerability and its impact
- How to reproduce the issue
This project is maintained by volunteers on a reasonable-effort basis. As such,
we ask that you give us 90 days to work on a fix before public exposure.

View File

@ -63,7 +63,7 @@ def amalgamate_source(source_top_dir=None,
""" """
print("Amalgamating header...") print("Amalgamating header...")
header = AmalgamationFile(source_top_dir) header = AmalgamationFile(source_top_dir)
header.add_text("/// Json-cpp amalgamated header (http://jsoncpp.sourceforge.net/).") header.add_text("/// Json-cpp amalgamated header (https://github.com/open-source-parsers/jsoncpp/).")
header.add_text('/// It is intended to be used with #include "%s"' % header_include_path) header.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
header.add_file("LICENSE", wrap_in_comment=True) header.add_file("LICENSE", wrap_in_comment=True)
header.add_text("#ifndef JSON_AMALGAMATED_H_INCLUDED") header.add_text("#ifndef JSON_AMALGAMATED_H_INCLUDED")
@ -90,7 +90,7 @@ def amalgamate_source(source_top_dir=None,
forward_header_include_path = base + "-forwards" + ext forward_header_include_path = base + "-forwards" + ext
print("Amalgamating forward header...") print("Amalgamating forward header...")
header = AmalgamationFile(source_top_dir) header = AmalgamationFile(source_top_dir)
header.add_text("/// Json-cpp amalgamated forward header (http://jsoncpp.sourceforge.net/).") header.add_text("/// Json-cpp amalgamated forward header (https://github.com/open-source-parsers/jsoncpp/).")
header.add_text('/// It is intended to be used with #include "%s"' % forward_header_include_path) header.add_text('/// It is intended to be used with #include "%s"' % forward_header_include_path)
header.add_text("/// This header provides forward declaration for all JsonCpp types.") header.add_text("/// This header provides forward declaration for all JsonCpp types.")
header.add_file("LICENSE", wrap_in_comment=True) header.add_file("LICENSE", wrap_in_comment=True)
@ -112,7 +112,7 @@ def amalgamate_source(source_top_dir=None,
print("Amalgamating source...") print("Amalgamating source...")
source = AmalgamationFile(source_top_dir) source = AmalgamationFile(source_top_dir)
source.add_text("/// Json-cpp amalgamated source (http://jsoncpp.sourceforge.net/).") source.add_text("/// Json-cpp amalgamated source (https://github.com/open-source-parsers/jsoncpp/).")
source.add_text('/// It is intended to be used with #include "%s"' % header_include_path) source.add_text('/// It is intended to be used with #include "%s"' % header_include_path)
source.add_file("LICENSE", wrap_in_comment=True) source.add_file("LICENSE", wrap_in_comment=True)
source.add_text("") source.add_text("")

View File

@ -25,7 +25,7 @@ int main() {
const std::unique_ptr<Json::CharReader> reader(builder.newCharReader()); const std::unique_ptr<Json::CharReader> reader(builder.newCharReader());
if (!reader->parse(rawJson.c_str(), rawJson.c_str() + rawJsonLength, &root, if (!reader->parse(rawJson.c_str(), rawJson.c_str() + rawJsonLength, &root,
&err)) { &err)) {
std::cout << "error" << std::endl; std::cout << "error: " << err << std::endl;
return EXIT_FAILURE; return EXIT_FAILURE;
} }
} }

View File

@ -2,8 +2,8 @@
# This function will prevent in-source builds # This function will prevent in-source builds
function(AssureOutOfSourceBuilds) function(AssureOutOfSourceBuilds)
# make sure the user doesn't play dirty with symlinks # make sure the user doesn't play dirty with symlinks
get_filename_component(srcdir "${CMAKE_SOURCE_DIR}" REALPATH) get_filename_component(srcdir "${CMAKE_CURRENT_SOURCE_DIR}" REALPATH)
get_filename_component(bindir "${CMAKE_BINARY_DIR}" REALPATH) get_filename_component(bindir "${CMAKE_CURRENT_BINARY_DIR}" REALPATH)
# disallow in-source builds # disallow in-source builds
if("${srcdir}" STREQUAL "${bindir}") if("${srcdir}" STREQUAL "${bindir}")

View File

@ -6,10 +6,12 @@
#ifndef JSON_ALLOCATOR_H_INCLUDED #ifndef JSON_ALLOCATOR_H_INCLUDED
#define JSON_ALLOCATOR_H_INCLUDED #define JSON_ALLOCATOR_H_INCLUDED
#include <algorithm>
#include <cstring> #include <cstring>
#include <memory> #include <memory>
#pragma pack(push, 8) #pragma pack(push)
#pragma pack()
namespace Json { namespace Json {
template <typename T> class SecureAllocator { template <typename T> class SecureAllocator {
@ -37,8 +39,16 @@ public:
* The memory block is filled with zeroes before being released. * The memory block is filled with zeroes before being released.
*/ */
void deallocate(pointer p, size_type n) { void deallocate(pointer p, size_type n) {
// memset_s is used because memset may be optimized away by the compiler // These constructs will not be removed by the compiler during optimization,
// unlike memset.
#if defined(HAVE_MEMSET_S)
memset_s(p, n * sizeof(T), 0, n * sizeof(T)); memset_s(p, n * sizeof(T), 0, n * sizeof(T));
#elif defined(_WIN32)
RtlSecureZeroMemory(p, n * sizeof(T));
#else
std::fill_n(reinterpret_cast<volatile unsigned char*>(p), n, 0);
#endif
// free using "global operator delete" // free using "global operator delete"
::operator delete(p); ::operator delete(p);
} }
@ -68,7 +78,9 @@ public:
// Boilerplate // Boilerplate
SecureAllocator() {} SecureAllocator() {}
template <typename U> SecureAllocator(const SecureAllocator<U>&) {} template <typename U> SecureAllocator(const SecureAllocator<U>&) {}
template <typename U> struct rebind { using other = SecureAllocator<U>; }; template <typename U> struct rebind {
using other = SecureAllocator<U>;
};
}; };
template <typename T, typename U> template <typename T, typename U>

View File

@ -127,7 +127,7 @@ using LargestUInt = UInt64;
template <typename T> template <typename T>
using Allocator = using Allocator =
typename std::conditional<JSONCPP_USING_SECURE_MEMORY, SecureAllocator<T>, typename std::conditional<JSONCPP_USE_SECURE_MEMORY, SecureAllocator<T>,
std::allocator<T>>::type; std::allocator<T>>::type;
using String = std::basic_string<char, std::char_traits<char>, Allocator<char>>; using String = std::basic_string<char, std::char_traits<char>, Allocator<char>>;
using IStringStream = using IStringStream =

View File

@ -10,7 +10,8 @@
#include "forwards.h" #include "forwards.h"
#endif // if !defined(JSON_IS_AMALGAMATION) #endif // if !defined(JSON_IS_AMALGAMATION)
#pragma pack(push, 8) #pragma pack(push)
#pragma pack()
namespace Json { namespace Json {

View File

@ -23,7 +23,8 @@
#pragma warning(disable : 4251) #pragma warning(disable : 4251)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) #endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma pack(push, 8) #pragma pack(push)
#pragma pack()
namespace Json { namespace Json {
@ -50,12 +51,12 @@ public:
}; };
/** \brief Constructs a Reader allowing all features for parsing. /** \brief Constructs a Reader allowing all features for parsing.
* \deprecated Use CharReader and CharReaderBuilder. * \deprecated Use CharReader and CharReaderBuilder.
*/ */
Reader(); Reader();
/** \brief Constructs a Reader allowing the specified feature set for parsing. /** \brief Constructs a Reader allowing the specified feature set for parsing.
* \deprecated Use CharReader and CharReaderBuilder. * \deprecated Use CharReader and CharReaderBuilder.
*/ */
Reader(const Features& features); Reader(const Features& features);
@ -189,6 +190,7 @@ private:
using Errors = std::deque<ErrorInfo>; using Errors = std::deque<ErrorInfo>;
bool readToken(Token& token); bool readToken(Token& token);
bool readTokenSkippingComments(Token& token);
void skipSpaces(); void skipSpaces();
bool match(const Char* pattern, int patternLength); bool match(const Char* pattern, int patternLength);
bool readComment(); bool readComment();
@ -220,7 +222,6 @@ private:
int& column) const; int& column) const;
String getLocationLineAndColumn(Location location) const; String getLocationLineAndColumn(Location location) const;
void addComment(Location begin, Location end, CommentPlacement placement); void addComment(Location begin, Location end, CommentPlacement placement);
void skipCommentTokens(Token& token);
static bool containsNewLine(Location begin, Location end); static bool containsNewLine(Location begin, Location end);
static String normalizeEOL(Location begin, Location end); static String normalizeEOL(Location begin, Location end);
@ -243,6 +244,12 @@ private:
*/ */
class JSON_API CharReader { class JSON_API CharReader {
public: public:
struct JSON_API StructuredError {
ptrdiff_t offset_start;
ptrdiff_t offset_limit;
String message;
};
virtual ~CharReader() = default; virtual ~CharReader() = default;
/** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a> /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
* document. The document must be a UTF-8 encoded string containing the * document. The document must be a UTF-8 encoded string containing the
@ -261,7 +268,12 @@ public:
* error occurred. * error occurred.
*/ */
virtual bool parse(char const* beginDoc, char const* endDoc, Value* root, virtual bool parse(char const* beginDoc, char const* endDoc, Value* root,
String* errs) = 0; String* errs);
/** \brief Returns a vector of structured errors encountered while parsing.
* Each parse call resets the stored list of errors.
*/
std::vector<StructuredError> getStructuredErrors() const;
class JSON_API Factory { class JSON_API Factory {
public: public:
@ -271,7 +283,21 @@ public:
*/ */
virtual CharReader* newCharReader() const = 0; virtual CharReader* newCharReader() const = 0;
}; // Factory }; // Factory
}; // CharReader
protected:
class Impl {
public:
virtual ~Impl() = default;
virtual bool parse(char const* beginDoc, char const* endDoc, Value* root,
String* errs) = 0;
virtual std::vector<StructuredError> getStructuredErrors() const = 0;
};
explicit CharReader(std::unique_ptr<Impl> impl) : _impl(std::move(impl)) {}
private:
std::unique_ptr<Impl> _impl;
}; // CharReader
/** \brief Build a CharReader implementation. /** \brief Build a CharReader implementation.
* *
@ -359,6 +385,12 @@ public:
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode * \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
*/ */
static void strictMode(Json::Value* settings); static void strictMode(Json::Value* settings);
/** ECMA-404 mode.
* \pre 'settings' != NULL (but Json::null is fine)
* \remark Defaults:
* \snippet src/lib_json/json_reader.cpp CharReaderBuilderECMA404Mode
*/
static void ecma404Mode(Json::Value* settings);
}; };
/** Consume entire stream and use its begin/end. /** Consume entire stream and use its begin/end.

View File

@ -3,8 +3,8 @@
// recognized in your jurisdiction. // recognized in your jurisdiction.
// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE // See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
#ifndef JSON_H_INCLUDED #ifndef JSON_VALUE_H_INCLUDED
#define JSON_H_INCLUDED #define JSON_VALUE_H_INCLUDED
#if !defined(JSON_IS_AMALGAMATION) #if !defined(JSON_IS_AMALGAMATION)
#include "forwards.h" #include "forwards.h"
@ -53,7 +53,8 @@
#pragma warning(disable : 4251 4275) #pragma warning(disable : 4251 4275)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) #endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma pack(push, 8) #pragma pack(push)
#pragma pack()
/** \brief JSON (JavaScript Object Notation). /** \brief JSON (JavaScript Object Notation).
*/ */
@ -374,7 +375,7 @@ public:
int compare(const Value& other) const; int compare(const Value& other) const;
const char* asCString() const; ///< Embedded zeroes could cause you trouble! const char* asCString() const; ///< Embedded zeroes could cause you trouble!
#if JSONCPP_USING_SECURE_MEMORY #if JSONCPP_USE_SECURE_MEMORY
unsigned getCStringLength() const; // Allows you to understand the length of unsigned getCStringLength() const; // Allows you to understand the length of
// the CString // the CString
#endif #endif
@ -436,7 +437,7 @@ public:
/// \post type() is arrayValue /// \post type() is arrayValue
void resize(ArrayIndex newSize); void resize(ArrayIndex newSize);
//@{ ///@{
/// Access an array element (zero based index). If the array contains less /// Access an array element (zero based index). If the array contains less
/// than index element, then null value are inserted in the array so that /// than index element, then null value are inserted in the array so that
/// its size is index+1. /// its size is index+1.
@ -444,15 +445,15 @@ public:
/// this from the operator[] which takes a string.) /// this from the operator[] which takes a string.)
Value& operator[](ArrayIndex index); Value& operator[](ArrayIndex index);
Value& operator[](int index); Value& operator[](int index);
//@} ///@}
//@{ ///@{
/// Access an array element (zero based index). /// Access an array element (zero based index).
/// (You may need to say 'value[0u]' to get your compiler to distinguish /// (You may need to say 'value[0u]' to get your compiler to distinguish
/// this from the operator[] which takes a string.) /// this from the operator[] which takes a string.)
const Value& operator[](ArrayIndex index) const; const Value& operator[](ArrayIndex index) const;
const Value& operator[](int index) const; const Value& operator[](int index) const;
//@} ///@}
/// If the array contains at least index+1 elements, returns the element /// If the array contains at least index+1 elements, returns the element
/// value, otherwise returns defaultValue. /// value, otherwise returns defaultValue.
@ -512,6 +513,9 @@ public:
/// and operator[]const /// and operator[]const
/// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30 /// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
Value const* find(char const* begin, char const* end) const; Value const* find(char const* begin, char const* end) const;
/// Most general and efficient version of isMember()const, get()const,
/// and operator[]const
Value const* find(const String& key) const;
/// Most general and efficient version of object-mutators. /// Most general and efficient version of object-mutators.
/// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30 /// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
/// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue. /// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue.
@ -584,6 +588,26 @@ public:
iterator begin(); iterator begin();
iterator end(); iterator end();
/// \brief Returns a reference to the first element in the `Value`.
/// Requires that this value holds an array or json object, with at least one
/// element.
const Value& front() const;
/// \brief Returns a reference to the first element in the `Value`.
/// Requires that this value holds an array or json object, with at least one
/// element.
Value& front();
/// \brief Returns a reference to the last element in the `Value`.
/// Requires that value holds an array or json object, with at least one
/// element.
const Value& back() const;
/// \brief Returns a reference to the last element in the `Value`.
/// Requires that this value holds an array or json object, with at least one
/// element.
Value& back();
// Accessors for the [start, limit) range of bytes within the JSON text from // Accessors for the [start, limit) range of bytes within the JSON text from
// which this value was parsed, if any. // which this value was parsed, if any.
void setOffsetStart(ptrdiff_t start); void setOffsetStart(ptrdiff_t start);
@ -924,6 +948,14 @@ public:
inline void swap(Value& a, Value& b) { a.swap(b); } inline void swap(Value& a, Value& b) { a.swap(b); }
inline const Value& Value::front() const { return *begin(); }
inline Value& Value::front() { return *begin(); }
inline const Value& Value::back() const { return *(--end()); }
inline Value& Value::back() { return *(--end()); }
} // namespace Json } // namespace Json
#pragma pack(pop) #pragma pack(pop)

View File

@ -9,19 +9,18 @@
// 3. /CMakeLists.txt // 3. /CMakeLists.txt
// IMPORTANT: also update the SOVERSION!! // IMPORTANT: also update the SOVERSION!!
#define JSONCPP_VERSION_STRING "1.9.5" #define JSONCPP_VERSION_STRING "1.9.7"
#define JSONCPP_VERSION_MAJOR 1 #define JSONCPP_VERSION_MAJOR 1
#define JSONCPP_VERSION_MINOR 9 #define JSONCPP_VERSION_MINOR 9
#define JSONCPP_VERSION_PATCH 5 #define JSONCPP_VERSION_PATCH 7
#define JSONCPP_VERSION_QUALIFIER #define JSONCPP_VERSION_QUALIFIER
#define JSONCPP_VERSION_HEXA \ #define JSONCPP_VERSION_HEXA \
((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | \ ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | \
(JSONCPP_VERSION_PATCH << 8)) (JSONCPP_VERSION_PATCH << 8))
#ifdef JSONCPP_USING_SECURE_MEMORY #if !defined(JSONCPP_USE_SECURE_MEMORY)
#undef JSONCPP_USING_SECURE_MEMORY #define JSONCPP_USE_SECURE_MEMORY 0
#endif #endif
#define JSONCPP_USING_SECURE_MEMORY 0
// If non-zero, the library zeroes any memory that it has allocated before // If non-zero, the library zeroes any memory that it has allocated before
// it frees its memory. // it frees its memory.

View File

@ -20,7 +20,8 @@
#pragma warning(disable : 4251) #pragma warning(disable : 4251)
#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING) #endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
#pragma pack(push, 8) #pragma pack(push)
#pragma pack()
namespace Json { namespace Json {
@ -63,7 +64,7 @@ public:
*/ */
virtual StreamWriter* newStreamWriter() const = 0; virtual StreamWriter* newStreamWriter() const = 0;
}; // Factory }; // Factory
}; // StreamWriter }; // StreamWriter
/** \brief Write into stringstream, then return string, for convenience. /** \brief Write into stringstream, then return string, for convenience.
* A StreamWriter will be created from the factory, used, and then deleted. * A StreamWriter will be created from the factory, used, and then deleted.
@ -167,8 +168,7 @@ public:
#pragma warning(push) #pragma warning(push)
#pragma warning(disable : 4996) // Deriving from deprecated class #pragma warning(disable : 4996) // Deriving from deprecated class
#endif #endif
class JSON_API FastWriter class JSON_API FastWriter : public Writer {
: public Writer {
public: public:
FastWriter(); FastWriter();
~FastWriter() override = default; ~FastWriter() override = default;
@ -217,7 +217,7 @@ private:
* - otherwise, it the values do not fit on one line, or the array contains * - otherwise, it the values do not fit on one line, or the array contains
* object or non empty array, then print one value per line. * object or non empty array, then print one value per line.
* *
* If the Value have comments then they are outputed according to their * If the Value have comments then they are outputted according to their
*#CommentPlacement. *#CommentPlacement.
* *
* \sa Reader, Value, Value::setComment() * \sa Reader, Value, Value::setComment()
@ -227,8 +227,7 @@ private:
#pragma warning(push) #pragma warning(push)
#pragma warning(disable : 4996) // Deriving from deprecated class #pragma warning(disable : 4996) // Deriving from deprecated class
#endif #endif
class JSON_API class JSON_API StyledWriter : public Writer {
StyledWriter : public Writer {
public: public:
StyledWriter(); StyledWriter();
~StyledWriter() override = default; ~StyledWriter() override = default;
@ -286,7 +285,7 @@ private:
* - otherwise, it the values do not fit on one line, or the array contains * - otherwise, it the values do not fit on one line, or the array contains
* object or non empty array, then print one value per line. * object or non empty array, then print one value per line.
* *
* If the Value have comments then they are outputed according to their * If the Value have comments then they are outputted according to their
#CommentPlacement. #CommentPlacement.
* *
* \sa Reader, Value, Value::setComment() * \sa Reader, Value, Value::setComment()
@ -296,8 +295,7 @@ private:
#pragma warning(push) #pragma warning(push)
#pragma warning(disable : 4996) // Deriving from deprecated class #pragma warning(disable : 4996) // Deriving from deprecated class
#endif #endif
class JSON_API class JSON_API StyledStreamWriter {
StyledStreamWriter {
public: public:
/** /**
* \param indentation Each level will be indented by this amount extra. * \param indentation Each level will be indented by this amount extra.
@ -353,6 +351,7 @@ String JSON_API valueToString(
PrecisionType precisionType = PrecisionType::significantDigits); PrecisionType precisionType = PrecisionType::significantDigits);
String JSON_API valueToString(bool value); String JSON_API valueToString(bool value);
String JSON_API valueToQuotedString(const char* value); String JSON_API valueToQuotedString(const char* value);
String JSON_API valueToQuotedString(const char* value, size_t length);
/// \brief Output using the StyledStreamWriter. /// \brief Output using the StyledStreamWriter.
/// \see Json::operator>>() /// \see Json::operator>>()

View File

@ -1,7 +1,9 @@
if (TARGET jsoncpp_static) if (NOT TARGET JsonCpp::JsonCpp)
add_library(JsonCpp::JsonCpp INTERFACE IMPORTED) if (TARGET jsoncpp_static)
set_target_properties(JsonCpp::JsonCpp PROPERTIES INTERFACE_LINK_LIBRARIES "jsoncpp_static") add_library(JsonCpp::JsonCpp INTERFACE IMPORTED)
elseif (TARGET jsoncpp_lib) set_target_properties(JsonCpp::JsonCpp PROPERTIES INTERFACE_LINK_LIBRARIES "jsoncpp_static")
add_library(JsonCpp::JsonCpp INTERFACE IMPORTED) elseif (TARGET jsoncpp_lib)
set_target_properties(JsonCpp::JsonCpp PROPERTIES INTERFACE_LINK_LIBRARIES "jsoncpp_lib") add_library(JsonCpp::JsonCpp INTERFACE IMPORTED)
endif () set_target_properties(JsonCpp::JsonCpp PROPERTIES INTERFACE_LINK_LIBRARIES "jsoncpp_lib")
endif ()
endif ()

View File

@ -1,5 +1,5 @@
cmake_policy(PUSH) cmake_policy(PUSH)
cmake_policy(VERSION 3.0) cmake_policy(VERSION 3.0...3.26)
@PACKAGE_INIT@ @PACKAGE_INIT@

View File

@ -0,0 +1,6 @@
@PACKAGE_INIT@
@MESON_SHARED_TARGET@
@MESON_STATIC_TARGET@
include ( "${CMAKE_CURRENT_LIST_DIR}/jsoncpp-namespaced-targets.cmake" )

View File

@ -9,13 +9,13 @@ project(
# 2. /include/json/version.h # 2. /include/json/version.h
# 3. /CMakeLists.txt # 3. /CMakeLists.txt
# IMPORTANT: also update the SOVERSION!! # IMPORTANT: also update the SOVERSION!!
version : '1.9.4', version : '1.9.7',
default_options : [ default_options : [
'buildtype=release', 'buildtype=release',
'cpp_std=c++11', 'cpp_std=c++11',
'warning_level=1'], 'warning_level=1'],
license : 'Public Domain', license : 'Public Domain',
meson_version : '>= 0.49.0') meson_version : '>= 0.54.0')
jsoncpp_headers = files([ jsoncpp_headers = files([
@ -50,7 +50,7 @@ jsoncpp_lib = library(
'src/lib_json/json_value.cpp', 'src/lib_json/json_value.cpp',
'src/lib_json/json_writer.cpp', 'src/lib_json/json_writer.cpp',
]), ]),
soversion : 25, soversion : 27,
install : true, install : true,
include_directories : jsoncpp_include_directories, include_directories : jsoncpp_include_directories,
cpp_args: dll_export_flag) cpp_args: dll_export_flag)
@ -62,6 +62,43 @@ import('pkgconfig').generate(
filebase : 'jsoncpp', filebase : 'jsoncpp',
description : 'A C++ library for interacting with JSON') description : 'A C++ library for interacting with JSON')
cmakeconf = configuration_data()
cmakeconf.set('MESON_LIB_DIR', get_option('libdir'))
cmakeconf.set('MESON_INCLUDE_DIR', get_option('includedir'))
fs = import('fs')
if get_option('default_library') == 'shared'
shared_name = fs.name(jsoncpp_lib.full_path())
endif
if get_option('default_library') == 'static'
static_name = fs.name(jsoncpp_lib.full_path())
endif
if get_option('default_library') == 'both'
shared_name = fs.name(jsoncpp_lib.get_shared_lib().full_path())
static_name = fs.name(jsoncpp_lib.get_static_lib().full_path())
endif
if get_option('default_library') == 'shared' or get_option('default_library') == 'both'
cmakeconf.set('MESON_SHARED_TARGET', '''
add_library(jsoncpp_lib IMPORTED SHARED)
set_target_properties(jsoncpp_lib PROPERTIES
IMPORTED_LOCATION "''' + join_paths('${PACKAGE_PREFIX_DIR}', get_option('libdir'), shared_name) + '''"
INTERFACE_INCLUDE_DIRECTORIES "''' + join_paths('${PACKAGE_PREFIX_DIR}', get_option('includedir')) + '")')
endif
if get_option('default_library') == 'static' or get_option('default_library') == 'both'
cmakeconf.set('MESON_STATIC_TARGET', '''
add_library(jsoncpp_static IMPORTED STATIC)
set_target_properties(jsoncpp_static PROPERTIES
IMPORTED_LOCATION "''' + join_paths('${PACKAGE_PREFIX_DIR}', get_option('libdir'), static_name) + '''"
INTERFACE_INCLUDE_DIRECTORIES "''' + join_paths('${PACKAGE_PREFIX_DIR}', get_option('includedir')) + '")')
endif
import('cmake').configure_package_config_file(
name: 'jsoncpp',
input: 'jsoncppConfig.cmake.meson.in',
configuration: cmakeconf)
install_data('jsoncpp-namespaced-targets.cmake', install_dir : join_paths(get_option('libdir'), 'cmake', jsoncpp_lib.name()))
# for libraries bundling jsoncpp # for libraries bundling jsoncpp
jsoncpp_dep = declare_dependency( jsoncpp_dep = declare_dependency(
include_directories : jsoncpp_include_directories, include_directories : jsoncpp_include_directories,
@ -73,7 +110,7 @@ if meson.is_subproject() or not get_option('tests')
subdir_done() subdir_done()
endif endif
python = import('python').find_installation() python = find_program('python3')
jsoncpp_test = executable( jsoncpp_test = executable(
'jsoncpp_test', files([ 'jsoncpp_test', files([

View File

@ -240,11 +240,14 @@ static int parseCommandLine(int argc, const char* argv[], Options* opts) {
return printUsage(argv); return printUsage(argv);
} }
int index = 1; int index = 1;
if (Json::String(argv[index]) == "--json-checker") { if (Json::String(argv[index]) == "--parse-only") {
opts->features = Json::Features::strictMode();
opts->parseOnly = true; opts->parseOnly = true;
++index; ++index;
} }
if (Json::String(argv[index]) == "--strict") {
opts->features = Json::Features::strictMode();
++index;
}
if (Json::String(argv[index]) == "--json-config") { if (Json::String(argv[index]) == "--json-config") {
printConfig(); printConfig();
return 3; return 3;

View File

@ -132,7 +132,6 @@ if(BUILD_SHARED_LIBS)
target_include_directories(${SHARED_LIB} PUBLIC target_include_directories(${SHARED_LIB} PUBLIC
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}> $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}> $<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>
) )
list(APPEND CMAKE_TARGETS ${SHARED_LIB}) list(APPEND CMAKE_TARGETS ${SHARED_LIB})
@ -142,9 +141,9 @@ if(BUILD_STATIC_LIBS)
set(STATIC_LIB ${PROJECT_NAME}_static) set(STATIC_LIB ${PROJECT_NAME}_static)
add_library(${STATIC_LIB} STATIC ${PUBLIC_HEADERS} ${JSONCPP_SOURCES}) add_library(${STATIC_LIB} STATIC ${PUBLIC_HEADERS} ${JSONCPP_SOURCES})
# avoid name clashes on windows as the shared import lib is alse named jsoncpp.lib # avoid name clashes on windows as the shared import lib is also named jsoncpp.lib
if(NOT DEFINED STATIC_SUFFIX AND BUILD_SHARED_LIBS) if(NOT DEFINED STATIC_SUFFIX AND BUILD_SHARED_LIBS)
if (MSVC) if (WIN32)
set(STATIC_SUFFIX "_static") set(STATIC_SUFFIX "_static")
else() else()
set(STATIC_SUFFIX "") set(STATIC_SUFFIX "")
@ -166,7 +165,6 @@ if(BUILD_STATIC_LIBS)
target_include_directories(${STATIC_LIB} PUBLIC target_include_directories(${STATIC_LIB} PUBLIC
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}> $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}> $<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>
) )
list(APPEND CMAKE_TARGETS ${STATIC_LIB}) list(APPEND CMAKE_TARGETS ${STATIC_LIB})
@ -193,7 +191,6 @@ if(BUILD_OBJECT_LIBS)
target_include_directories(${OBJECT_LIB} PUBLIC target_include_directories(${OBJECT_LIB} PUBLIC
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}> $<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}> $<BUILD_INTERFACE:${CMAKE_CURRENT_LIST_DIR}/${JSONCPP_INCLUDE_DIR}>
$<BUILD_INTERFACE:${PROJECT_BINARY_DIR}/include/json>
) )
list(APPEND CMAKE_TARGETS ${OBJECT_LIB}) list(APPEND CMAKE_TARGETS ${OBJECT_LIB})

View File

@ -12,6 +12,7 @@
#endif // if !defined(JSON_IS_AMALGAMATION) #endif // if !defined(JSON_IS_AMALGAMATION)
#include <algorithm> #include <algorithm>
#include <cassert> #include <cassert>
#include <cmath>
#include <cstring> #include <cstring>
#include <iostream> #include <iostream>
#include <istream> #include <istream>
@ -128,7 +129,7 @@ bool Reader::parse(const char* beginDoc, const char* endDoc, Value& root,
bool successful = readValue(); bool successful = readValue();
Token token; Token token;
skipCommentTokens(token); readTokenSkippingComments(token);
if (collectComments_ && !commentsBefore_.empty()) if (collectComments_ && !commentsBefore_.empty())
root.setComment(commentsBefore_, commentAfter); root.setComment(commentsBefore_, commentAfter);
if (features_.strictRoot_) { if (features_.strictRoot_) {
@ -156,7 +157,7 @@ bool Reader::readValue() {
throwRuntimeError("Exceeded stackLimit in readValue()."); throwRuntimeError("Exceeded stackLimit in readValue().");
Token token; Token token;
skipCommentTokens(token); readTokenSkippingComments(token);
bool successful = true; bool successful = true;
if (collectComments_ && !commentsBefore_.empty()) { if (collectComments_ && !commentsBefore_.empty()) {
@ -224,14 +225,14 @@ bool Reader::readValue() {
return successful; return successful;
} }
void Reader::skipCommentTokens(Token& token) { bool Reader::readTokenSkippingComments(Token& token) {
bool success = readToken(token);
if (features_.allowComments_) { if (features_.allowComments_) {
do { while (success && token.type_ == tokenComment) {
readToken(token); success = readToken(token);
} while (token.type_ == tokenComment); }
} else {
readToken(token);
} }
return success;
} }
bool Reader::readToken(Token& token) { bool Reader::readToken(Token& token) {
@ -445,12 +446,7 @@ bool Reader::readObject(Token& token) {
Value init(objectValue); Value init(objectValue);
currentValue().swapPayload(init); currentValue().swapPayload(init);
currentValue().setOffsetStart(token.start_ - begin_); currentValue().setOffsetStart(token.start_ - begin_);
while (readToken(tokenName)) { while (readTokenSkippingComments(tokenName)) {
bool initialTokenOk = true;
while (tokenName.type_ == tokenComment && initialTokenOk)
initialTokenOk = readToken(tokenName);
if (!initialTokenOk)
break;
if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object
return true; return true;
name.clear(); name.clear();
@ -479,15 +475,11 @@ bool Reader::readObject(Token& token) {
return recoverFromError(tokenObjectEnd); return recoverFromError(tokenObjectEnd);
Token comma; Token comma;
if (!readToken(comma) || if (!readTokenSkippingComments(comma) ||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator && (comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator)) {
comma.type_ != tokenComment)) {
return addErrorAndRecover("Missing ',' or '}' in object declaration", return addErrorAndRecover("Missing ',' or '}' in object declaration",
comma, tokenObjectEnd); comma, tokenObjectEnd);
} }
bool finalizeTokenOk = true;
while (comma.type_ == tokenComment && finalizeTokenOk)
finalizeTokenOk = readToken(comma);
if (comma.type_ == tokenObjectEnd) if (comma.type_ == tokenObjectEnd)
return true; return true;
} }
@ -517,10 +509,7 @@ bool Reader::readArray(Token& token) {
Token currentToken; Token currentToken;
// Accept Comment after last item in the array. // Accept Comment after last item in the array.
ok = readToken(currentToken); ok = readTokenSkippingComments(currentToken);
while (currentToken.type_ == tokenComment && ok) {
ok = readToken(currentToken);
}
bool badTokenType = (currentToken.type_ != tokenArraySeparator && bool badTokenType = (currentToken.type_ != tokenArraySeparator &&
currentToken.type_ != tokenArrayEnd); currentToken.type_ != tokenArrayEnd);
if (!ok || badTokenType) { if (!ok || badTokenType) {
@ -598,11 +587,16 @@ bool Reader::decodeDouble(Token& token) {
bool Reader::decodeDouble(Token& token, Value& decoded) { bool Reader::decodeDouble(Token& token, Value& decoded) {
double value = 0; double value = 0;
String buffer(token.start_, token.end_); IStringStream is(String(token.start_, token.end_));
IStringStream is(buffer); if (!(is >> value)) {
if (!(is >> value)) if (value == std::numeric_limits<double>::max())
return addError( value = std::numeric_limits<double>::infinity();
"'" + String(token.start_, token.end_) + "' is not a number.", token); else if (value == std::numeric_limits<double>::lowest())
value = -std::numeric_limits<double>::infinity();
else if (!std::isinf(value))
return addError(
"'" + String(token.start_, token.end_) + "' is not a number.", token);
}
decoded = value; decoded = value;
return true; return true;
} }
@ -766,7 +760,7 @@ void Reader::getLocationLineAndColumn(Location location, int& line,
while (current < location && current != end_) { while (current < location && current != end_) {
Char c = *current++; Char c = *current++;
if (c == '\r') { if (c == '\r') {
if (*current == '\n') if (current != end_ && *current == '\n')
++current; ++current;
lastLineStart = current; lastLineStart = current;
++line; ++line;
@ -884,17 +878,12 @@ class OurReader {
public: public:
using Char = char; using Char = char;
using Location = const Char*; using Location = const Char*;
struct StructuredError {
ptrdiff_t offset_start;
ptrdiff_t offset_limit;
String message;
};
explicit OurReader(OurFeatures const& features); explicit OurReader(OurFeatures const& features);
bool parse(const char* beginDoc, const char* endDoc, Value& root, bool parse(const char* beginDoc, const char* endDoc, Value& root,
bool collectComments = true); bool collectComments = true);
String getFormattedErrorMessages() const; String getFormattedErrorMessages() const;
std::vector<StructuredError> getStructuredErrors() const; std::vector<CharReader::StructuredError> getStructuredErrors() const;
private: private:
OurReader(OurReader const&); // no impl OurReader(OurReader const&); // no impl
@ -937,6 +926,7 @@ private:
using Errors = std::deque<ErrorInfo>; using Errors = std::deque<ErrorInfo>;
bool readToken(Token& token); bool readToken(Token& token);
bool readTokenSkippingComments(Token& token);
void skipSpaces(); void skipSpaces();
void skipBom(bool skipBom); void skipBom(bool skipBom);
bool match(const Char* pattern, int patternLength); bool match(const Char* pattern, int patternLength);
@ -970,7 +960,6 @@ private:
int& column) const; int& column) const;
String getLocationLineAndColumn(Location location) const; String getLocationLineAndColumn(Location location) const;
void addComment(Location begin, Location end, CommentPlacement placement); void addComment(Location begin, Location end, CommentPlacement placement);
void skipCommentTokens(Token& token);
static String normalizeEOL(Location begin, Location end); static String normalizeEOL(Location begin, Location end);
static bool containsNewLine(Location begin, Location end); static bool containsNewLine(Location begin, Location end);
@ -1024,7 +1013,7 @@ bool OurReader::parse(const char* beginDoc, const char* endDoc, Value& root,
bool successful = readValue(); bool successful = readValue();
nodes_.pop(); nodes_.pop();
Token token; Token token;
skipCommentTokens(token); readTokenSkippingComments(token);
if (features_.failIfExtra_ && (token.type_ != tokenEndOfStream)) { if (features_.failIfExtra_ && (token.type_ != tokenEndOfStream)) {
addError("Extra non-whitespace after JSON value.", token); addError("Extra non-whitespace after JSON value.", token);
return false; return false;
@ -1052,7 +1041,7 @@ bool OurReader::readValue() {
if (nodes_.size() > features_.stackLimit_) if (nodes_.size() > features_.stackLimit_)
throwRuntimeError("Exceeded stackLimit in readValue()."); throwRuntimeError("Exceeded stackLimit in readValue().");
Token token; Token token;
skipCommentTokens(token); readTokenSkippingComments(token);
bool successful = true; bool successful = true;
if (collectComments_ && !commentsBefore_.empty()) { if (collectComments_ && !commentsBefore_.empty()) {
@ -1139,14 +1128,14 @@ bool OurReader::readValue() {
return successful; return successful;
} }
void OurReader::skipCommentTokens(Token& token) { bool OurReader::readTokenSkippingComments(Token& token) {
bool success = readToken(token);
if (features_.allowComments_) { if (features_.allowComments_) {
do { while (success && token.type_ == tokenComment) {
readToken(token); success = readToken(token);
} while (token.type_ == tokenComment); }
} else {
readToken(token);
} }
return success;
} }
bool OurReader::readToken(Token& token) { bool OurReader::readToken(Token& token) {
@ -1443,12 +1432,7 @@ bool OurReader::readObject(Token& token) {
Value init(objectValue); Value init(objectValue);
currentValue().swapPayload(init); currentValue().swapPayload(init);
currentValue().setOffsetStart(token.start_ - begin_); currentValue().setOffsetStart(token.start_ - begin_);
while (readToken(tokenName)) { while (readTokenSkippingComments(tokenName)) {
bool initialTokenOk = true;
while (tokenName.type_ == tokenComment && initialTokenOk)
initialTokenOk = readToken(tokenName);
if (!initialTokenOk)
break;
if (tokenName.type_ == tokenObjectEnd && if (tokenName.type_ == tokenObjectEnd &&
(name.empty() || (name.empty() ||
features_.allowTrailingCommas_)) // empty object or trailing comma features_.allowTrailingCommas_)) // empty object or trailing comma
@ -1486,15 +1470,11 @@ bool OurReader::readObject(Token& token) {
return recoverFromError(tokenObjectEnd); return recoverFromError(tokenObjectEnd);
Token comma; Token comma;
if (!readToken(comma) || if (!readTokenSkippingComments(comma) ||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator && (comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator)) {
comma.type_ != tokenComment)) {
return addErrorAndRecover("Missing ',' or '}' in object declaration", return addErrorAndRecover("Missing ',' or '}' in object declaration",
comma, tokenObjectEnd); comma, tokenObjectEnd);
} }
bool finalizeTokenOk = true;
while (comma.type_ == tokenComment && finalizeTokenOk)
finalizeTokenOk = readToken(comma);
if (comma.type_ == tokenObjectEnd) if (comma.type_ == tokenObjectEnd)
return true; return true;
} }
@ -1528,10 +1508,7 @@ bool OurReader::readArray(Token& token) {
Token currentToken; Token currentToken;
// Accept Comment after last item in the array. // Accept Comment after last item in the array.
ok = readToken(currentToken); ok = readTokenSkippingComments(currentToken);
while (currentToken.type_ == tokenComment && ok) {
ok = readToken(currentToken);
}
bool badTokenType = (currentToken.type_ != tokenArraySeparator && bool badTokenType = (currentToken.type_ != tokenArraySeparator &&
currentToken.type_ != tokenArrayEnd); currentToken.type_ != tokenArrayEnd);
if (!ok || badTokenType) { if (!ok || badTokenType) {
@ -1609,7 +1586,7 @@ bool OurReader::decodeNumber(Token& token, Value& decoded) {
const auto digit(static_cast<Value::UInt>(c - '0')); const auto digit(static_cast<Value::UInt>(c - '0'));
if (value >= threshold) { if (value >= threshold) {
// We've hit or exceeded the max value divided by 10 (rounded down). If // We've hit or exceeded the max value divided by 10 (rounded down). If
// a) we've only just touched the limit, meaing value == threshold, // a) we've only just touched the limit, meaning value == threshold,
// b) this is the last digit, or // b) this is the last digit, or
// c) it's small enough to fit in that rounding delta, we're okay. // c) it's small enough to fit in that rounding delta, we're okay.
// Otherwise treat this number as a double to avoid overflow. // Otherwise treat this number as a double to avoid overflow.
@ -1646,11 +1623,15 @@ bool OurReader::decodeDouble(Token& token) {
bool OurReader::decodeDouble(Token& token, Value& decoded) { bool OurReader::decodeDouble(Token& token, Value& decoded) {
double value = 0; double value = 0;
const String buffer(token.start_, token.end_); IStringStream is(String(token.start_, token.end_));
IStringStream is(buffer);
if (!(is >> value)) { if (!(is >> value)) {
return addError( if (value == std::numeric_limits<double>::max())
"'" + String(token.start_, token.end_) + "' is not a number.", token); value = std::numeric_limits<double>::infinity();
else if (value == std::numeric_limits<double>::lowest())
value = -std::numeric_limits<double>::infinity();
else if (!std::isinf(value))
return addError(
"'" + String(token.start_, token.end_) + "' is not a number.", token);
} }
decoded = value; decoded = value;
return true; return true;
@ -1819,7 +1800,7 @@ void OurReader::getLocationLineAndColumn(Location location, int& line,
while (current < location && current != end_) { while (current < location && current != end_) {
Char c = *current++; Char c = *current++;
if (c == '\r') { if (c == '\r') {
if (*current == '\n') if (current != end_ && *current == '\n')
++current; ++current;
lastLineStart = current; lastLineStart = current;
++line; ++line;
@ -1854,10 +1835,11 @@ String OurReader::getFormattedErrorMessages() const {
return formattedMessage; return formattedMessage;
} }
std::vector<OurReader::StructuredError> OurReader::getStructuredErrors() const { std::vector<CharReader::StructuredError>
std::vector<OurReader::StructuredError> allErrors; OurReader::getStructuredErrors() const {
std::vector<CharReader::StructuredError> allErrors;
for (const auto& error : errors_) { for (const auto& error : errors_) {
OurReader::StructuredError structured; CharReader::StructuredError structured;
structured.offset_start = error.token_.start_ - begin_; structured.offset_start = error.token_.start_ - begin_;
structured.offset_limit = error.token_.end_ - begin_; structured.offset_limit = error.token_.end_ - begin_;
structured.message = error.message_; structured.message = error.message_;
@ -1867,20 +1849,36 @@ std::vector<OurReader::StructuredError> OurReader::getStructuredErrors() const {
} }
class OurCharReader : public CharReader { class OurCharReader : public CharReader {
bool const collectComments_;
OurReader reader_;
public: public:
OurCharReader(bool collectComments, OurFeatures const& features) OurCharReader(bool collectComments, OurFeatures const& features)
: collectComments_(collectComments), reader_(features) {} : CharReader(
bool parse(char const* beginDoc, char const* endDoc, Value* root, std::unique_ptr<OurImpl>(new OurImpl(collectComments, features))) {}
String* errs) override {
bool ok = reader_.parse(beginDoc, endDoc, *root, collectComments_); protected:
if (errs) { class OurImpl : public Impl {
*errs = reader_.getFormattedErrorMessages(); public:
OurImpl(bool collectComments, OurFeatures const& features)
: collectComments_(collectComments), reader_(features) {}
bool parse(char const* beginDoc, char const* endDoc, Value* root,
String* errs) override {
bool ok = reader_.parse(beginDoc, endDoc, *root, collectComments_);
if (errs) {
*errs = reader_.getFormattedErrorMessages();
}
return ok;
} }
return ok;
} std::vector<CharReader::StructuredError>
getStructuredErrors() const override {
return reader_.getStructuredErrors();
}
private:
bool const collectComments_;
OurReader reader_;
};
}; };
CharReaderBuilder::CharReaderBuilder() { setDefaults(&settings_); } CharReaderBuilder::CharReaderBuilder() { setDefaults(&settings_); }
@ -1970,6 +1968,32 @@ void CharReaderBuilder::setDefaults(Json::Value* settings) {
(*settings)["skipBom"] = true; (*settings)["skipBom"] = true;
//! [CharReaderBuilderDefaults] //! [CharReaderBuilderDefaults]
} }
// static
void CharReaderBuilder::ecma404Mode(Json::Value* settings) {
//! [CharReaderBuilderECMA404Mode]
(*settings)["allowComments"] = false;
(*settings)["allowTrailingCommas"] = false;
(*settings)["strictRoot"] = false;
(*settings)["allowDroppedNullPlaceholders"] = false;
(*settings)["allowNumericKeys"] = false;
(*settings)["allowSingleQuotes"] = false;
(*settings)["stackLimit"] = 1000;
(*settings)["failIfExtra"] = true;
(*settings)["rejectDupKeys"] = false;
(*settings)["allowSpecialFloats"] = false;
(*settings)["skipBom"] = false;
//! [CharReaderBuilderECMA404Mode]
}
std::vector<CharReader::StructuredError>
CharReader::getStructuredErrors() const {
return _impl->getStructuredErrors();
}
bool CharReader::parse(char const* beginDoc, char const* endDoc, Value* root,
String* errs) {
return _impl->parse(beginDoc, endDoc, root, errs);
}
////////////////////////////////// //////////////////////////////////
// global functions // global functions
@ -1978,7 +2002,7 @@ bool parseFromStream(CharReader::Factory const& fact, IStream& sin, Value* root,
String* errs) { String* errs) {
OStringStream ssin; OStringStream ssin;
ssin << sin.rdbuf(); ssin << sin.rdbuf();
String doc = ssin.str(); String doc = std::move(ssin).str();
char const* begin = doc.data(); char const* begin = doc.data();
char const* end = begin + doc.size(); char const* end = begin + doc.size();
// Note that we do not actually need a null-terminator. // Note that we do not actually need a null-terminator.

View File

@ -87,7 +87,8 @@ template <typename T, typename U>
static inline bool InRange(double d, T min, U max) { static inline bool InRange(double d, T min, U max) {
// The casts can lose precision, but we are looking only for // The casts can lose precision, but we are looking only for
// an approximate range. Might fail on edge cases though. ~cdunn // an approximate range. Might fail on edge cases though. ~cdunn
return d >= static_cast<double>(min) && d <= static_cast<double>(max); return d >= static_cast<double>(min) && d <= static_cast<double>(max) &&
!(static_cast<U>(d) == min && d != static_cast<double>(min));
} }
#else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) #else // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
static inline double integerToDouble(Json::UInt64 value) { static inline double integerToDouble(Json::UInt64 value) {
@ -101,7 +102,8 @@ template <typename T> static inline double integerToDouble(T value) {
template <typename T, typename U> template <typename T, typename U>
static inline bool InRange(double d, T min, U max) { static inline bool InRange(double d, T min, U max) {
return d >= integerToDouble(min) && d <= integerToDouble(max); return d >= integerToDouble(min) && d <= integerToDouble(max) &&
!(static_cast<U>(d) == min && d != integerToDouble(min));
} }
#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION) #endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
@ -163,7 +165,7 @@ inline static void decodePrefixedString(bool isPrefixed, char const* prefixed,
/** Free the string duplicated by /** Free the string duplicated by
* duplicateStringValue()/duplicateAndPrefixStringValue(). * duplicateStringValue()/duplicateAndPrefixStringValue().
*/ */
#if JSONCPP_USING_SECURE_MEMORY #if JSONCPP_USE_SECURE_MEMORY
static inline void releasePrefixedStringValue(char* value) { static inline void releasePrefixedStringValue(char* value) {
unsigned length = 0; unsigned length = 0;
char const* valueDecoded; char const* valueDecoded;
@ -178,10 +180,10 @@ static inline void releaseStringValue(char* value, unsigned length) {
memset(value, 0, size); memset(value, 0, size);
free(value); free(value);
} }
#else // !JSONCPP_USING_SECURE_MEMORY #else // !JSONCPP_USE_SECURE_MEMORY
static inline void releasePrefixedStringValue(char* value) { free(value); } static inline void releasePrefixedStringValue(char* value) { free(value); }
static inline void releaseStringValue(char* value, unsigned) { free(value); } static inline void releaseStringValue(char* value, unsigned) { free(value); }
#endif // JSONCPP_USING_SECURE_MEMORY #endif // JSONCPP_USE_SECURE_MEMORY
} // namespace Json } // namespace Json
@ -599,7 +601,7 @@ const char* Value::asCString() const {
return this_str; return this_str;
} }
#if JSONCPP_USING_SECURE_MEMORY #if JSONCPP_USE_SECURE_MEMORY
unsigned Value::getCStringLength() const { unsigned Value::getCStringLength() const {
JSON_ASSERT_MESSAGE(type() == stringValue, JSON_ASSERT_MESSAGE(type() == stringValue,
"in Json::Value::asCString(): requires stringValue"); "in Json::Value::asCString(): requires stringValue");
@ -705,6 +707,11 @@ Value::Int64 Value::asInt64() const {
JSON_ASSERT_MESSAGE(isInt64(), "LargestUInt out of Int64 range"); JSON_ASSERT_MESSAGE(isInt64(), "LargestUInt out of Int64 range");
return Int64(value_.uint_); return Int64(value_.uint_);
case realValue: case realValue:
// If the double value is in proximity to minInt64, it will be rounded to
// minInt64. The correct value in this scenario is indeterminable
JSON_ASSERT_MESSAGE(
value_.real_ != minInt64,
"Double value is minInt64, precise value cannot be determined");
JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt64, maxInt64), JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt64, maxInt64),
"double out of Int64 range"); "double out of Int64 range");
return Int64(value_.real_); return Int64(value_.real_);
@ -1092,6 +1099,9 @@ Value const* Value::find(char const* begin, char const* end) const {
return nullptr; return nullptr;
return &(*it).second; return &(*it).second;
} }
Value const* Value::find(const String& key) const {
return find(key.data(), key.data() + key.length());
}
Value* Value::demand(char const* begin, char const* end) { Value* Value::demand(char const* begin, char const* end) {
JSON_ASSERT_MESSAGE(type() == nullValue || type() == objectValue, JSON_ASSERT_MESSAGE(type() == nullValue || type() == objectValue,
"in Json::Value::demand(begin, end): requires " "in Json::Value::demand(begin, end): requires "
@ -1105,7 +1115,7 @@ const Value& Value::operator[](const char* key) const {
return *found; return *found;
} }
Value const& Value::operator[](const String& key) const { Value const& Value::operator[](const String& key) const {
Value const* found = find(key.data(), key.data() + key.length()); Value const* found = find(key);
if (!found) if (!found)
return nullSingleton(); return nullSingleton();
return *found; return *found;
@ -1205,7 +1215,7 @@ bool Value::removeIndex(ArrayIndex index, Value* removed) {
return false; return false;
} }
if (removed) if (removed)
*removed = it->second; *removed = std::move(it->second);
ArrayIndex oldSize = size(); ArrayIndex oldSize = size();
// shift left all items left, into the place of the "removed" // shift left all items left, into the place of the "removed"
for (ArrayIndex i = index; i < (oldSize - 1); ++i) { for (ArrayIndex i = index; i < (oldSize - 1); ++i) {
@ -1308,8 +1318,12 @@ bool Value::isInt64() const {
// Note that maxInt64 (= 2^63 - 1) is not exactly representable as a // Note that maxInt64 (= 2^63 - 1) is not exactly representable as a
// double, so double(maxInt64) will be rounded up to 2^63. Therefore we // double, so double(maxInt64) will be rounded up to 2^63. Therefore we
// require the value to be strictly less than the limit. // require the value to be strictly less than the limit.
return value_.real_ >= double(minInt64) && // minInt64 is -2^63 which can be represented as a double, but since double
value_.real_ < double(maxInt64) && IsIntegral(value_.real_); // values in its proximity are also rounded to -2^63, we require the value
// to be strictly greater than the limit to avoid returning 'true' for
// values that are not in the range
return value_.real_ > double(minInt64) && value_.real_ < double(maxInt64) &&
IsIntegral(value_.real_);
default: default:
break; break;
} }
@ -1347,7 +1361,11 @@ bool Value::isIntegral() const {
// Note that maxUInt64 (= 2^64 - 1) is not exactly representable as a // Note that maxUInt64 (= 2^64 - 1) is not exactly representable as a
// double, so double(maxUInt64) will be rounded up to 2^64. Therefore we // double, so double(maxUInt64) will be rounded up to 2^64. Therefore we
// require the value to be strictly less than the limit. // require the value to be strictly less than the limit.
return value_.real_ >= double(minInt64) && // minInt64 is -2^63 which can be represented as a double, but since double
// values in its proximity are also rounded to -2^63, we require the value
// to be strictly greater than the limit to avoid returning 'true' for
// values that are not in the range
return value_.real_ > double(minInt64) &&
value_.real_ < maxUInt64AsDouble && IsIntegral(value_.real_); value_.real_ < maxUInt64AsDouble && IsIntegral(value_.real_);
#else #else
return value_.real_ >= minInt && value_.real_ <= maxUInt && return value_.real_ >= minInt && value_.real_ <= maxUInt &&
@ -1410,9 +1428,8 @@ void Value::setComment(String comment, CommentPlacement placement) {
// Always discard trailing newline, to aid indentation. // Always discard trailing newline, to aid indentation.
comment.pop_back(); comment.pop_back();
} }
JSON_ASSERT(!comment.empty());
JSON_ASSERT_MESSAGE( JSON_ASSERT_MESSAGE(
comment[0] == '\0' || comment[0] == '/', comment.empty() || comment[0] == '/',
"in Json::Value::setComment(): Comments must start with /"); "in Json::Value::setComment(): Comments must start with /");
comments_.set(placement, std::move(comment)); comments_.set(placement, std::move(comment));
} }

View File

@ -132,8 +132,9 @@ String valueToString(double value, bool useSpecialFloats,
if (!isfinite(value)) { if (!isfinite(value)) {
static const char* const reps[2][3] = {{"NaN", "-Infinity", "Infinity"}, static const char* const reps[2][3] = {{"NaN", "-Infinity", "Infinity"},
{"null", "-1e+9999", "1e+9999"}}; {"null", "-1e+9999", "1e+9999"}};
return reps[useSpecialFloats ? 0 : 1] return reps[useSpecialFloats ? 0 : 1][isnan(value) ? 0
[isnan(value) ? 0 : (value < 0) ? 1 : 2]; : (value < 0) ? 1
: 2];
} }
String buffer(size_t(36), '\0'); String buffer(size_t(36), '\0');
@ -353,6 +354,10 @@ String valueToQuotedString(const char* value) {
return valueToQuotedStringN(value, strlen(value)); return valueToQuotedStringN(value, strlen(value));
} }
String valueToQuotedString(const char* value, size_t length) {
return valueToQuotedStringN(value, length);
}
// Class Writer // Class Writer
// ////////////////////////////////////////////////////////////////// // //////////////////////////////////////////////////////////////////
Writer::~Writer() = default; Writer::~Writer() = default;
@ -490,7 +495,7 @@ void StyledWriter::writeValue(const Value& value) {
const String& name = *it; const String& name = *it;
const Value& childValue = value[name]; const Value& childValue = value[name];
writeCommentBeforeValue(childValue); writeCommentBeforeValue(childValue);
writeWithIndent(valueToQuotedString(name.c_str())); writeWithIndent(valueToQuotedString(name.c_str(), name.size()));
document_ += " : "; document_ += " : ";
writeValue(childValue); writeValue(childValue);
if (++it == members.end()) { if (++it == members.end()) {
@ -708,7 +713,7 @@ void StyledStreamWriter::writeValue(const Value& value) {
const String& name = *it; const String& name = *it;
const Value& childValue = value[name]; const Value& childValue = value[name];
writeCommentBeforeValue(childValue); writeCommentBeforeValue(childValue);
writeWithIndent(valueToQuotedString(name.c_str())); writeWithIndent(valueToQuotedString(name.c_str(), name.size()));
*document_ << " : "; *document_ << " : ";
writeValue(childValue); writeValue(childValue);
if (++it == members.end()) { if (++it == members.end()) {
@ -1246,7 +1251,7 @@ String writeString(StreamWriter::Factory const& factory, Value const& root) {
OStringStream sout; OStringStream sout;
StreamWriterPtr const writer(factory.newStreamWriter()); StreamWriterPtr const writer(factory.newStreamWriter());
writer->write(root, &sout); writer->write(root, &sout);
return sout.str(); return std::move(sout).str();
} }
OStream& operator<<(OStream& sout, Value const& root) { OStream& operator<<(OStream& sout, Value const& root) {

View File

@ -410,7 +410,7 @@ Json::String ToJsonString(const char* toConvert) {
Json::String ToJsonString(Json::String in) { return in; } Json::String ToJsonString(Json::String in) { return in; }
#if JSONCPP_USING_SECURE_MEMORY #if JSONCPP_USE_SECURE_MEMORY
Json::String ToJsonString(std::string in) { Json::String ToJsonString(std::string in) {
return Json::String(in.data(), in.data() + in.length()); return Json::String(in.data(), in.data() + in.length());
} }

View File

@ -74,7 +74,7 @@ public:
/// Removes the last PredicateContext added to the predicate stack /// Removes the last PredicateContext added to the predicate stack
/// chained list. /// chained list.
/// Next messages will be targed at the PredicateContext that was removed. /// Next messages will be targeted at the PredicateContext that was removed.
TestResult& popPredicateContext(); TestResult& popPredicateContext();
bool failed() const; bool failed() const;
@ -185,7 +185,7 @@ TestResult& checkEqual(TestResult& result, T expected, U actual,
Json::String ToJsonString(const char* toConvert); Json::String ToJsonString(const char* toConvert);
Json::String ToJsonString(Json::String in); Json::String ToJsonString(Json::String in);
#if JSONCPP_USING_SECURE_MEMORY #if JSONCPP_USE_SECURE_MEMORY
Json::String ToJsonString(std::string in); Json::String ToJsonString(std::string in);
#endif #endif

View File

@ -220,11 +220,20 @@ JSONTEST_FIXTURE_LOCAL(ValueTest, objects) {
JSONTEST_ASSERT(foundId != nullptr); JSONTEST_ASSERT(foundId != nullptr);
JSONTEST_ASSERT_EQUAL(Json::Value(1234), *foundId); JSONTEST_ASSERT_EQUAL(Json::Value(1234), *foundId);
const std::string stringIdKey = "id";
const Json::Value* stringFoundId = object1_.find(stringIdKey);
JSONTEST_ASSERT(stringFoundId != nullptr);
JSONTEST_ASSERT_EQUAL(Json::Value(1234), *stringFoundId);
const char unknownIdKey[] = "unknown id"; const char unknownIdKey[] = "unknown id";
const Json::Value* foundUnknownId = const Json::Value* foundUnknownId =
object1_.find(unknownIdKey, unknownIdKey + strlen(unknownIdKey)); object1_.find(unknownIdKey, unknownIdKey + strlen(unknownIdKey));
JSONTEST_ASSERT_EQUAL(nullptr, foundUnknownId); JSONTEST_ASSERT_EQUAL(nullptr, foundUnknownId);
const std::string stringUnknownIdKey = "unknown id";
const Json::Value* stringFoundUnknownId = object1_.find(stringUnknownIdKey);
JSONTEST_ASSERT_EQUAL(nullptr, stringFoundUnknownId);
// Access through demand() // Access through demand()
const char yetAnotherIdKey[] = "yet another id"; const char yetAnotherIdKey[] = "yet another id";
const Json::Value* foundYetAnotherId = const Json::Value* foundYetAnotherId =
@ -310,10 +319,14 @@ JSONTEST_FIXTURE_LOCAL(ValueTest, arrays) {
const Json::Value& constArray = array1_; const Json::Value& constArray = array1_;
JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray[index0]); JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray[index0]);
JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray[0]); JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray[0]);
JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray.front());
JSONTEST_ASSERT_EQUAL(Json::Value(1234), constArray.back());
// Access through non-const reference // Access through non-const reference
JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_[index0]); JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_[index0]);
JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_[0]); JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_[0]);
JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_.front());
JSONTEST_ASSERT_EQUAL(Json::Value(1234), array1_.back());
array1_[2] = Json::Value(17); array1_[2] = Json::Value(17);
JSONTEST_ASSERT_EQUAL(Json::Value(), array1_[1]); JSONTEST_ASSERT_EQUAL(Json::Value(), array1_[1]);
@ -356,6 +369,8 @@ JSONTEST_FIXTURE_LOCAL(ValueTest, resizePopulatesAllMissingElements) {
v.resize(n); v.resize(n);
JSONTEST_ASSERT_EQUAL(n, v.size()); JSONTEST_ASSERT_EQUAL(n, v.size());
JSONTEST_ASSERT_EQUAL(n, std::distance(v.begin(), v.end())); JSONTEST_ASSERT_EQUAL(n, std::distance(v.begin(), v.end()));
JSONTEST_ASSERT_EQUAL(v.front(), Json::Value{});
JSONTEST_ASSERT_EQUAL(v.back(), Json::Value{});
for (const Json::Value& e : v) for (const Json::Value& e : v)
JSONTEST_ASSERT_EQUAL(e, Json::Value{}); JSONTEST_ASSERT_EQUAL(e, Json::Value{});
} }
@ -406,6 +421,8 @@ JSONTEST_FIXTURE_LOCAL(ValueTest, arrayInsertAtRandomIndex) {
JSONTEST_ASSERT_EQUAL(Json::Value("index0"), array[0]); // check append JSONTEST_ASSERT_EQUAL(Json::Value("index0"), array[0]); // check append
JSONTEST_ASSERT_EQUAL(Json::Value("index1"), array[1]); JSONTEST_ASSERT_EQUAL(Json::Value("index1"), array[1]);
JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array[2]); JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array[2]);
JSONTEST_ASSERT_EQUAL(Json::Value("index0"), array.front());
JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array.back());
// insert lvalue at the head // insert lvalue at the head
JSONTEST_ASSERT(array.insert(0, str1)); JSONTEST_ASSERT(array.insert(0, str1));
@ -413,6 +430,8 @@ JSONTEST_FIXTURE_LOCAL(ValueTest, arrayInsertAtRandomIndex) {
JSONTEST_ASSERT_EQUAL(Json::Value("index0"), array[1]); JSONTEST_ASSERT_EQUAL(Json::Value("index0"), array[1]);
JSONTEST_ASSERT_EQUAL(Json::Value("index1"), array[2]); JSONTEST_ASSERT_EQUAL(Json::Value("index1"), array[2]);
JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array[3]); JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array[3]);
JSONTEST_ASSERT_EQUAL(Json::Value("index3"), array.front());
JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array.back());
// checking address // checking address
for (Json::ArrayIndex i = 0; i < 3; i++) { for (Json::ArrayIndex i = 0; i < 3; i++) {
JSONTEST_ASSERT_EQUAL(vec[i], &array[i]); JSONTEST_ASSERT_EQUAL(vec[i], &array[i]);
@ -425,6 +444,8 @@ JSONTEST_FIXTURE_LOCAL(ValueTest, arrayInsertAtRandomIndex) {
JSONTEST_ASSERT_EQUAL(Json::Value("index4"), array[2]); JSONTEST_ASSERT_EQUAL(Json::Value("index4"), array[2]);
JSONTEST_ASSERT_EQUAL(Json::Value("index1"), array[3]); JSONTEST_ASSERT_EQUAL(Json::Value("index1"), array[3]);
JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array[4]); JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array[4]);
JSONTEST_ASSERT_EQUAL(Json::Value("index3"), array.front());
JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array.back());
// checking address // checking address
for (Json::ArrayIndex i = 0; i < 4; i++) { for (Json::ArrayIndex i = 0; i < 4; i++) {
JSONTEST_ASSERT_EQUAL(vec[i], &array[i]); JSONTEST_ASSERT_EQUAL(vec[i], &array[i]);
@ -438,6 +459,8 @@ JSONTEST_FIXTURE_LOCAL(ValueTest, arrayInsertAtRandomIndex) {
JSONTEST_ASSERT_EQUAL(Json::Value("index1"), array[3]); JSONTEST_ASSERT_EQUAL(Json::Value("index1"), array[3]);
JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array[4]); JSONTEST_ASSERT_EQUAL(Json::Value("index2"), array[4]);
JSONTEST_ASSERT_EQUAL(Json::Value("index5"), array[5]); JSONTEST_ASSERT_EQUAL(Json::Value("index5"), array[5]);
JSONTEST_ASSERT_EQUAL(Json::Value("index3"), array.front());
JSONTEST_ASSERT_EQUAL(Json::Value("index5"), array.back());
// checking address // checking address
for (Json::ArrayIndex i = 0; i < 5; i++) { for (Json::ArrayIndex i = 0; i < 5; i++) {
JSONTEST_ASSERT_EQUAL(vec[i], &array[i]); JSONTEST_ASSERT_EQUAL(vec[i], &array[i]);
@ -1168,15 +1191,13 @@ JSONTEST_FIXTURE_LOCAL(ValueTest, integers) {
JSONTEST_ASSERT_EQUAL(true, val.asBool()); JSONTEST_ASSERT_EQUAL(true, val.asBool());
JSONTEST_ASSERT_STRING_EQUAL("-9223372036854775808", val.asString()); JSONTEST_ASSERT_STRING_EQUAL("-9223372036854775808", val.asString());
// int64 min (floating point constructor). Note that kint64min *is* exactly // int64 min (floating point constructor). Since double values in proximity of
// representable as a double. // kint64min are rounded to kint64min, we don't check for conversion to int64.
val = Json::Value(double(kint64min)); val = Json::Value(double(kint64min));
JSONTEST_ASSERT_EQUAL(Json::realValue, val.type()); JSONTEST_ASSERT_EQUAL(Json::realValue, val.type());
checks = IsCheck(); checks = IsCheck();
checks.isInt64_ = true;
checks.isIntegral_ = true;
checks.isDouble_ = true; checks.isDouble_ = true;
checks.isNumeric_ = true; checks.isNumeric_ = true;
JSONTEST_ASSERT_PRED(checkIs(val, checks)); JSONTEST_ASSERT_PRED(checkIs(val, checks));
@ -1185,8 +1206,6 @@ JSONTEST_FIXTURE_LOCAL(ValueTest, integers) {
JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue)); JSONTEST_ASSERT(!val.isConvertibleTo(Json::intValue));
JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue)); JSONTEST_ASSERT(!val.isConvertibleTo(Json::uintValue));
JSONTEST_ASSERT_EQUAL(kint64min, val.asInt64());
JSONTEST_ASSERT_EQUAL(kint64min, val.asLargestInt());
JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asDouble()); JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asDouble());
JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asFloat()); JSONTEST_ASSERT_EQUAL(-9223372036854775808.0, val.asFloat());
JSONTEST_ASSERT_EQUAL(true, val.asBool()); JSONTEST_ASSERT_EQUAL(true, val.asBool());
@ -3618,12 +3637,12 @@ JSONTEST_FIXTURE_LOCAL(CharReaderAllowSpecialFloatsTest, issue209) {
for (const auto& td : test_data) { for (const auto& td : test_data) {
bool ok = reader->parse(&*td.in.begin(), &*td.in.begin() + td.in.size(), bool ok = reader->parse(&*td.in.begin(), &*td.in.begin() + td.in.size(),
&root, &errs); &root, &errs);
JSONTEST_ASSERT(td.ok == ok) << "line:" << td.line << "\n" // clang-format off
<< " expected: {" JSONTEST_ASSERT(td.ok == ok) <<
<< "ok:" << td.ok << ", in:\'" << td.in << "\'" "line:" << td.line << "\n " <<
<< "}\n" "expected: {ok:" << td.ok << ", in:\'" << td.in << "\'}\n " <<
<< " actual: {" "actual: {ok:" << ok << "}\n";
<< "ok:" << ok << "}\n"; // clang-format on
} }
{ {
@ -3903,6 +3922,36 @@ JSONTEST_FIXTURE_LOCAL(FuzzTest, fuzzDoesntCrash) {
example.size())); example.size()));
} }
struct ParseWithStructuredErrorsTest : JsonTest::TestCase {
void testErrors(
const std::string& doc, bool success,
const std::vector<Json::CharReader::StructuredError>& expectedErrors) {
Json::CharReaderBuilder b;
CharReaderPtr reader(b.newCharReader());
Json::Value root;
JSONTEST_ASSERT_EQUAL(
reader->parse(doc.data(), doc.data() + doc.length(), &root, nullptr),
success);
auto actualErrors = reader->getStructuredErrors();
JSONTEST_ASSERT_EQUAL(expectedErrors.size(), actualErrors.size());
for (std::size_t i = 0; i < actualErrors.size(); i++) {
const auto& a = actualErrors[i];
const auto& e = expectedErrors[i];
JSONTEST_ASSERT_EQUAL(a.offset_start, e.offset_start);
JSONTEST_ASSERT_EQUAL(a.offset_limit, e.offset_limit);
JSONTEST_ASSERT_STRING_EQUAL(a.message, e.message);
}
}
};
JSONTEST_FIXTURE_LOCAL(ParseWithStructuredErrorsTest, success) {
testErrors("{}", true, {});
}
JSONTEST_FIXTURE_LOCAL(ParseWithStructuredErrorsTest, singleError) {
testErrors("{ 1 : 2 }", false, {{2, 3, "Missing '}' or object member name"}});
}
int main(int argc, const char* argv[]) { int main(int argc, const char* argv[]) {
JsonTest::Runner runner; JsonTest::Runner runner;

View File

@ -0,0 +1,4 @@
{
"a": "aaa",
"b": "bbb" // comments not allowed in strict mode
}

View File

@ -0,0 +1,4 @@
{
"a": "aaa", // comments not allowed in strict mode
"b": "bbb"
}

View File

@ -0,0 +1,3 @@
{
"array" : [1, 2, 3 /* comments not allowed in strict mode */]
}

View File

@ -0,0 +1 @@
{"one": 1 /* } */ { "two" : 2 }

View File

@ -0,0 +1,3 @@
.=[]
.[0]=-inf
.[1]=inf

View File

@ -0,0 +1 @@
[-1e+9999, 1e+9999]

View File

@ -97,14 +97,17 @@ def runAllTests(jsontest_executable_path, input_dir = None,
valgrind_path = use_valgrind and VALGRIND_CMD or '' valgrind_path = use_valgrind and VALGRIND_CMD or ''
for input_path in tests + test_jsonchecker: for input_path in tests + test_jsonchecker:
expect_failure = os.path.basename(input_path).startswith('fail') expect_failure = os.path.basename(input_path).startswith('fail')
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure is_json_checker_test = input_path in test_jsonchecker
is_parse_only = is_json_checker_test or expect_failure
is_strict_test = ('_strict_' in os.path.basename(input_path)) or is_json_checker_test
print('TESTING:', input_path, end=' ') print('TESTING:', input_path, end=' ')
options = is_json_checker_test and '--json-checker' or '' options = is_parse_only and '--parse-only' or ''
options += is_strict_test and ' --strict' or ''
options += ' --json-writer %s'%writerClass options += ' --json-writer %s'%writerClass
cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options, cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options,
input_path) input_path)
status, process_output = getStatusOutput(cmd) status, process_output = getStatusOutput(cmd)
if is_json_checker_test: if is_parse_only:
if expect_failure: if expect_failure:
if not status: if not status:
print('FAILED') print('FAILED')