mirror of
https://github.com/open-source-parsers/jsoncpp.git
synced 2025-07-04 02:37:10 +02:00
Merge branch 'master' into master
This commit is contained in:
commit
3ad2a4aebb
@ -1,4 +1,4 @@
|
||||
BasedOnStyle: LLVM
|
||||
DerivePointerAlignment: false
|
||||
PointerAlignment: Left
|
||||
|
||||
SpacesBeforeTrailingComments: 1
|
||||
|
20
.github/workflows/clang-format.yml
vendored
Normal file
20
.github/workflows/clang-format.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
name: clang-format check
|
||||
on: [check_run, push]
|
||||
|
||||
jobs:
|
||||
formatting-check:
|
||||
name: formatting check
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
path:
|
||||
- 'src'
|
||||
- 'examples'
|
||||
- 'include'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: runs clang-format style check for C/C++/Protobuf programs.
|
||||
uses: jidicula/clang-format-action@v4.13.0
|
||||
with:
|
||||
clang-format-version: '18'
|
||||
check-path: ${{ matrix.path }}
|
65
.github/workflows/meson.yml
vendored
Normal file
65
.github/workflows/meson.yml
vendored
Normal file
@ -0,0 +1,65 @@
|
||||
name: meson build and test
|
||||
run-name: update pushed to ${{ github.ref }}
|
||||
on: [check_run, push]
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, windows-latest, macos-latest]
|
||||
|
||||
steps:
|
||||
- name: checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
- name: meson build
|
||||
uses: BSFishy/meson-build@v1.0.3
|
||||
with:
|
||||
meson-version: 1.5.1
|
||||
ninja-version: 1.11.1.1
|
||||
action: build
|
||||
|
||||
- name: meson test
|
||||
uses: BSFishy/meson-build@v1.0.3
|
||||
with:
|
||||
meson-version: 1.5.1
|
||||
ninja-version: 1.11.1.1
|
||||
action: test
|
||||
|
||||
coverage:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: setup python
|
||||
uses: actions/setup-python@v5
|
||||
|
||||
- name: meson build
|
||||
uses: BSFishy/meson-build@v1.0.3
|
||||
with:
|
||||
meson-version: 1.5.1
|
||||
ninja-version: 1.11.1.1
|
||||
setup-options: -Db_coverage=true
|
||||
action: build
|
||||
|
||||
- name: meson test
|
||||
uses: BSFishy/meson-build@v1.0.3
|
||||
with:
|
||||
meson-version: 1.5.1
|
||||
ninja-version: 1.11.1.1
|
||||
setup-options: -Db_coverage=true
|
||||
action: test
|
||||
|
||||
- name: generate code coverage report
|
||||
uses: threeal/gcovr-action@v1.0.0
|
||||
with:
|
||||
coveralls-send: true
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
71
.travis.yml
71
.travis.yml
@ -1,71 +0,0 @@
|
||||
# Build matrix / environment variables are explained on:
|
||||
# http://about.travis-ci.com/docs/user/build-configuration/
|
||||
# This file can be validated on: http://www.yamllint.com/
|
||||
# Or using the Ruby based travel command line tool:
|
||||
# gem install travis --no-rdoc --no-ri
|
||||
# travis lint .travis.yml
|
||||
language: cpp
|
||||
sudo: false
|
||||
addons:
|
||||
homebrew:
|
||||
packages:
|
||||
- clang-format
|
||||
- meson
|
||||
- ninja
|
||||
update: false # do not update homebrew by default
|
||||
apt:
|
||||
sources:
|
||||
- ubuntu-toolchain-r-test
|
||||
- llvm-toolchain-xenial-8
|
||||
packages:
|
||||
- clang-format-8
|
||||
- clang-8
|
||||
- valgrind
|
||||
matrix:
|
||||
include:
|
||||
- name: Mac clang meson static release testing
|
||||
os: osx
|
||||
osx_image: xcode11
|
||||
compiler: clang
|
||||
env:
|
||||
CXX="clang++"
|
||||
CC="clang"
|
||||
LIB_TYPE=static
|
||||
BUILD_TYPE=release
|
||||
script: ./.travis_scripts/meson_builder.sh
|
||||
- name: Linux xenial clang meson static release testing
|
||||
os: linux
|
||||
dist: xenial
|
||||
compiler: clang
|
||||
env:
|
||||
CXX="clang++"
|
||||
CC="clang"
|
||||
LIB_TYPE=static
|
||||
BUILD_TYPE=release
|
||||
PYTHONUSERBASE="$(pwd)/LOCAL"
|
||||
PATH="$PYTHONUSERBASE/bin:$PATH"
|
||||
# before_install and install steps only needed for linux meson builds
|
||||
before_install:
|
||||
- source ./.travis_scripts/travis.before_install.${TRAVIS_OS_NAME}.sh
|
||||
install:
|
||||
- source ./.travis_scripts/travis.install.${TRAVIS_OS_NAME}.sh
|
||||
script: ./.travis_scripts/meson_builder.sh
|
||||
- name: Linux xenial gcc cmake coverage
|
||||
os: linux
|
||||
dist: xenial
|
||||
compiler: gcc
|
||||
env:
|
||||
CXX=g++
|
||||
CC=gcc
|
||||
DO_Coverage=ON
|
||||
BUILD_TOOL="Unix Makefiles"
|
||||
BUILD_TYPE=Debug
|
||||
LIB_TYPE=shared
|
||||
DESTDIR=/tmp/cmake_json_cpp
|
||||
before_install:
|
||||
- pip install --user cpp-coveralls
|
||||
script: ./.travis_scripts/cmake_builder.sh
|
||||
after_success:
|
||||
- coveralls --include src/lib_json --include include
|
||||
notifications:
|
||||
email: false
|
@ -1,130 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
# This script can be used on the command line directly to configure several
|
||||
# different build environments.
|
||||
# This is called by `.travis.yml` via Travis CI.
|
||||
# Travis supplies $TRAVIS_OS_NAME.
|
||||
# http://docs.travis-ci.com/user/multi-os/
|
||||
# Our .travis.yml also defines:
|
||||
|
||||
# - BUILD_TYPE=Release/Debug
|
||||
# - LIB_TYPE=static/shared
|
||||
#
|
||||
# Optional environmental variables
|
||||
# - DESTDIR <- used for setting the install prefix
|
||||
# - BUILD_TOOL=["Unix Makefile"|"Ninja"]
|
||||
# - BUILDNAME <- how to identify this build on the dashboard
|
||||
# - DO_MemCheck <- if set, try to use valgrind
|
||||
# - DO_Coverage <- if set, try to do dashboard coverage testing
|
||||
#
|
||||
|
||||
env_set=1
|
||||
if ${BUILD_TYPE+false}; then
|
||||
echo "BUILD_TYPE not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
if ${LIB_TYPE+false}; then
|
||||
echo "LIB_TYPE not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
if ${CXX+false}; then
|
||||
echo "CXX not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
|
||||
|
||||
if [ ${env_set} -eq 0 ]; then
|
||||
echo "USAGE: CXX=$(which clang++) BUILD_TYPE=[Release|Debug] LIB_TYPE=[static|shared] $0"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=Release LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=Debug LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=Release LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=Debug LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
|
||||
echo " CXX=$(which g++) BUILD_TYPE=Release LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=Debug LIB_TYPE=shared DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=Release LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=Debug LIB_TYPE=static DESTDIR=/tmp/cmake_json_cpp $0"
|
||||
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if ${DESTDIR+false}; then
|
||||
DESTDIR="/usr/local"
|
||||
fi
|
||||
|
||||
# -e: fail on error
|
||||
# -v: show commands
|
||||
# -x: show expanded commands
|
||||
set -vex
|
||||
|
||||
env | sort
|
||||
|
||||
which cmake
|
||||
cmake --version
|
||||
|
||||
echo ${CXX}
|
||||
${CXX} --version
|
||||
_COMPILER_NAME=`basename ${CXX}`
|
||||
if [ "${LIB_TYPE}" = "shared" ]; then
|
||||
_CMAKE_BUILD_SHARED_LIBS=ON
|
||||
else
|
||||
_CMAKE_BUILD_SHARED_LIBS=OFF
|
||||
fi
|
||||
|
||||
CTEST_TESTING_OPTION="-D ExperimentalTest"
|
||||
# - DO_MemCheck <- if set, try to use valgrind
|
||||
if ! ${DO_MemCheck+false}; then
|
||||
valgrind --version
|
||||
CTEST_TESTING_OPTION="-D ExperimentalMemCheck"
|
||||
else
|
||||
# - DO_Coverage <- if set, try to do dashboard coverage testing
|
||||
if ! ${DO_Coverage+false}; then
|
||||
export CXXFLAGS="-fprofile-arcs -ftest-coverage"
|
||||
export LDFLAGS="-fprofile-arcs -ftest-coverage"
|
||||
CTEST_TESTING_OPTION="-D ExperimentalTest -D ExperimentalCoverage"
|
||||
#gcov --version
|
||||
fi
|
||||
fi
|
||||
|
||||
# Ninja = Generates build.ninja files.
|
||||
if ${BUILD_TOOL+false}; then
|
||||
BUILD_TOOL="Ninja"
|
||||
export _BUILD_EXE=ninja
|
||||
which ninja
|
||||
ninja --version
|
||||
else
|
||||
# Unix Makefiles = Generates standard UNIX makefiles.
|
||||
export _BUILD_EXE=make
|
||||
fi
|
||||
|
||||
_BUILD_DIR_NAME="build-cmake_${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}_${_BUILD_EXE}"
|
||||
mkdir -p ${_BUILD_DIR_NAME}
|
||||
cd "${_BUILD_DIR_NAME}"
|
||||
if ${BUILDNAME+false}; then
|
||||
_HOSTNAME=`hostname -s`
|
||||
BUILDNAME="${_HOSTNAME}_${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}_${_BUILD_EXE}"
|
||||
fi
|
||||
cmake \
|
||||
-G "${BUILD_TOOL}" \
|
||||
-DBUILDNAME:STRING="${BUILDNAME}" \
|
||||
-DCMAKE_CXX_COMPILER:PATH=${CXX} \
|
||||
-DCMAKE_BUILD_TYPE:STRING=${BUILD_TYPE} \
|
||||
-DBUILD_SHARED_LIBS:BOOL=${_CMAKE_BUILD_SHARED_LIBS} \
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${DESTDIR} \
|
||||
../
|
||||
|
||||
ctest -C ${BUILD_TYPE} -D ExperimentalStart -D ExperimentalConfigure -D ExperimentalBuild ${CTEST_TESTING_OPTION} -D ExperimentalSubmit
|
||||
# Final step is to verify that installation succeeds
|
||||
cmake --build . --config ${BUILD_TYPE} --target install
|
||||
|
||||
if [ "${DESTDIR}" != "/usr/local" ]; then
|
||||
${_BUILD_EXE} install
|
||||
fi
|
||||
cd -
|
||||
|
||||
if ${CLEANUP+false}; then
|
||||
echo "Skipping cleanup: build directory will persist."
|
||||
else
|
||||
rm -r "${_BUILD_DIR_NAME}"
|
||||
fi
|
@ -1,83 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
# This script can be used on the command line directly to configure several
|
||||
# different build environments.
|
||||
# This is called by `.travis.yml` via Travis CI.
|
||||
# Travis supplies $TRAVIS_OS_NAME.
|
||||
# http://docs.travis-ci.com/user/multi-os/
|
||||
# Our .travis.yml also defines:
|
||||
|
||||
# - BUILD_TYPE=release/debug
|
||||
# - LIB_TYPE=static/shared
|
||||
|
||||
env_set=1
|
||||
if ${BUILD_TYPE+false}; then
|
||||
echo "BUILD_TYPE not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
if ${LIB_TYPE+false}; then
|
||||
echo "LIB_TYPE not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
if ${CXX+false}; then
|
||||
echo "CXX not set in environment."
|
||||
env_set=0
|
||||
fi
|
||||
|
||||
|
||||
if [ ${env_set} -eq 0 ]; then
|
||||
echo "USAGE: CXX=$(which clang++) BUILD_TYPE=[release|debug] LIB_TYPE=[static|shared] $0"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=release LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=debug LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=release LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which clang++) BUILD_TYPE=debug LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
|
||||
|
||||
echo " CXX=$(which g++) BUILD_TYPE=release LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=debug LIB_TYPE=shared DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=release LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
|
||||
echo " CXX=$(which g++) BUILD_TYPE=debug LIB_TYPE=static DESTDIR=/tmp/meson_json_cpp $0"
|
||||
|
||||
exit -1
|
||||
fi
|
||||
|
||||
if ${DESTDIR+false}; then
|
||||
DESTDIR="/usr/local"
|
||||
fi
|
||||
|
||||
# -e: fail on error
|
||||
# -v: show commands
|
||||
# -x: show expanded commands
|
||||
set -vex
|
||||
|
||||
|
||||
env | sort
|
||||
|
||||
which python3
|
||||
which meson
|
||||
which ninja
|
||||
echo ${CXX}
|
||||
${CXX} --version
|
||||
python3 --version
|
||||
meson --version
|
||||
ninja --version
|
||||
_COMPILER_NAME=`basename ${CXX}`
|
||||
_BUILD_DIR_NAME="build-${BUILD_TYPE}_${LIB_TYPE}_${_COMPILER_NAME}"
|
||||
|
||||
#./.travis_scripts/run-clang-format.sh
|
||||
meson --fatal-meson-warnings --werror --buildtype ${BUILD_TYPE} --default-library ${LIB_TYPE} . "${_BUILD_DIR_NAME}"
|
||||
ninja -v -j 2 -C "${_BUILD_DIR_NAME}"
|
||||
|
||||
cd "${_BUILD_DIR_NAME}"
|
||||
meson test --no-rebuild --print-errorlogs
|
||||
|
||||
if [ "${DESTDIR}" != "/usr/local" ]; then
|
||||
ninja install
|
||||
fi
|
||||
cd -
|
||||
|
||||
if ${CLEANUP+false}; then
|
||||
echo "Skipping cleanup: build directory will persist."
|
||||
else
|
||||
rm -r "${_BUILD_DIR_NAME}"
|
||||
fi
|
@ -1,356 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
"""A wrapper script around clang-format, suitable for linting multiple files
|
||||
and to use for continuous integration.
|
||||
This is an alternative API for the clang-format command line.
|
||||
It runs over multiple files and directories in parallel.
|
||||
A diff output is produced and a sensible exit code is returned.
|
||||
|
||||
NOTE: pulled from https://github.com/Sarcasm/run-clang-format, which is
|
||||
licensed under the MIT license.
|
||||
"""
|
||||
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import codecs
|
||||
import difflib
|
||||
import fnmatch
|
||||
import io
|
||||
import multiprocessing
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from functools import partial
|
||||
|
||||
try:
|
||||
from subprocess import DEVNULL # py3k
|
||||
except ImportError:
|
||||
DEVNULL = open(os.devnull, "wb")
|
||||
|
||||
|
||||
DEFAULT_EXTENSIONS = 'c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx'
|
||||
|
||||
|
||||
class ExitStatus:
|
||||
SUCCESS = 0
|
||||
DIFF = 1
|
||||
TROUBLE = 2
|
||||
|
||||
|
||||
def list_files(files, recursive=False, extensions=None, exclude=None):
|
||||
if extensions is None:
|
||||
extensions = []
|
||||
if exclude is None:
|
||||
exclude = []
|
||||
|
||||
out = []
|
||||
for file in files:
|
||||
if recursive and os.path.isdir(file):
|
||||
for dirpath, dnames, fnames in os.walk(file):
|
||||
fpaths = [os.path.join(dirpath, fname) for fname in fnames]
|
||||
for pattern in exclude:
|
||||
# os.walk() supports trimming down the dnames list
|
||||
# by modifying it in-place,
|
||||
# to avoid unnecessary directory listings.
|
||||
dnames[:] = [
|
||||
x for x in dnames
|
||||
if
|
||||
not fnmatch.fnmatch(os.path.join(dirpath, x), pattern)
|
||||
]
|
||||
fpaths = [
|
||||
x for x in fpaths if not fnmatch.fnmatch(x, pattern)
|
||||
]
|
||||
for f in fpaths:
|
||||
ext = os.path.splitext(f)[1][1:]
|
||||
if ext in extensions:
|
||||
out.append(f)
|
||||
else:
|
||||
out.append(file)
|
||||
return out
|
||||
|
||||
|
||||
def make_diff(file, original, reformatted):
|
||||
return list(
|
||||
difflib.unified_diff(
|
||||
original,
|
||||
reformatted,
|
||||
fromfile='{}\t(original)'.format(file),
|
||||
tofile='{}\t(reformatted)'.format(file),
|
||||
n=3))
|
||||
|
||||
|
||||
class DiffError(Exception):
|
||||
def __init__(self, message, errs=None):
|
||||
super(DiffError, self).__init__(message)
|
||||
self.errs = errs or []
|
||||
|
||||
|
||||
class UnexpectedError(Exception):
|
||||
def __init__(self, message, exc=None):
|
||||
super(UnexpectedError, self).__init__(message)
|
||||
self.formatted_traceback = traceback.format_exc()
|
||||
self.exc = exc
|
||||
|
||||
|
||||
def run_clang_format_diff_wrapper(args, file):
|
||||
try:
|
||||
ret = run_clang_format_diff(args, file)
|
||||
return ret
|
||||
except DiffError:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise UnexpectedError('{}: {}: {}'.format(file, e.__class__.__name__,
|
||||
e), e)
|
||||
|
||||
|
||||
def run_clang_format_diff(args, file):
|
||||
try:
|
||||
with io.open(file, 'r', encoding='utf-8') as f:
|
||||
original = f.readlines()
|
||||
except IOError as exc:
|
||||
raise DiffError(str(exc))
|
||||
invocation = [args.clang_format_executable, file]
|
||||
|
||||
# Use of utf-8 to decode the process output.
|
||||
#
|
||||
# Hopefully, this is the correct thing to do.
|
||||
#
|
||||
# It's done due to the following assumptions (which may be incorrect):
|
||||
# - clang-format will returns the bytes read from the files as-is,
|
||||
# without conversion, and it is already assumed that the files use utf-8.
|
||||
# - if the diagnostics were internationalized, they would use utf-8:
|
||||
# > Adding Translations to Clang
|
||||
# >
|
||||
# > Not possible yet!
|
||||
# > Diagnostic strings should be written in UTF-8,
|
||||
# > the client can translate to the relevant code page if needed.
|
||||
# > Each translation completely replaces the format string
|
||||
# > for the diagnostic.
|
||||
# > -- http://clang.llvm.org/docs/InternalsManual.html#internals-diag-translation
|
||||
#
|
||||
# It's not pretty, due to Python 2 & 3 compatibility.
|
||||
encoding_py3 = {}
|
||||
if sys.version_info[0] >= 3:
|
||||
encoding_py3['encoding'] = 'utf-8'
|
||||
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
invocation,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
universal_newlines=True,
|
||||
**encoding_py3)
|
||||
except OSError as exc:
|
||||
raise DiffError(
|
||||
"Command '{}' failed to start: {}".format(
|
||||
subprocess.list2cmdline(invocation), exc
|
||||
)
|
||||
)
|
||||
proc_stdout = proc.stdout
|
||||
proc_stderr = proc.stderr
|
||||
if sys.version_info[0] < 3:
|
||||
# make the pipes compatible with Python 3,
|
||||
# reading lines should output unicode
|
||||
encoding = 'utf-8'
|
||||
proc_stdout = codecs.getreader(encoding)(proc_stdout)
|
||||
proc_stderr = codecs.getreader(encoding)(proc_stderr)
|
||||
# hopefully the stderr pipe won't get full and block the process
|
||||
outs = list(proc_stdout.readlines())
|
||||
errs = list(proc_stderr.readlines())
|
||||
proc.wait()
|
||||
if proc.returncode:
|
||||
raise DiffError(
|
||||
"Command '{}' returned non-zero exit status {}".format(
|
||||
subprocess.list2cmdline(invocation), proc.returncode
|
||||
),
|
||||
errs,
|
||||
)
|
||||
return make_diff(file, original, outs), errs
|
||||
|
||||
|
||||
def bold_red(s):
|
||||
return '\x1b[1m\x1b[31m' + s + '\x1b[0m'
|
||||
|
||||
|
||||
def colorize(diff_lines):
|
||||
def bold(s):
|
||||
return '\x1b[1m' + s + '\x1b[0m'
|
||||
|
||||
def cyan(s):
|
||||
return '\x1b[36m' + s + '\x1b[0m'
|
||||
|
||||
def green(s):
|
||||
return '\x1b[32m' + s + '\x1b[0m'
|
||||
|
||||
def red(s):
|
||||
return '\x1b[31m' + s + '\x1b[0m'
|
||||
|
||||
for line in diff_lines:
|
||||
if line[:4] in ['--- ', '+++ ']:
|
||||
yield bold(line)
|
||||
elif line.startswith('@@ '):
|
||||
yield cyan(line)
|
||||
elif line.startswith('+'):
|
||||
yield green(line)
|
||||
elif line.startswith('-'):
|
||||
yield red(line)
|
||||
else:
|
||||
yield line
|
||||
|
||||
|
||||
def print_diff(diff_lines, use_color):
|
||||
if use_color:
|
||||
diff_lines = colorize(diff_lines)
|
||||
if sys.version_info[0] < 3:
|
||||
sys.stdout.writelines((l.encode('utf-8') for l in diff_lines))
|
||||
else:
|
||||
sys.stdout.writelines(diff_lines)
|
||||
|
||||
|
||||
def print_trouble(prog, message, use_colors):
|
||||
error_text = 'error:'
|
||||
if use_colors:
|
||||
error_text = bold_red(error_text)
|
||||
print("{}: {} {}".format(prog, error_text, message), file=sys.stderr)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.add_argument(
|
||||
'--clang-format-executable',
|
||||
metavar='EXECUTABLE',
|
||||
help='path to the clang-format executable',
|
||||
default='clang-format')
|
||||
parser.add_argument(
|
||||
'--extensions',
|
||||
help='comma separated list of file extensions (default: {})'.format(
|
||||
DEFAULT_EXTENSIONS),
|
||||
default=DEFAULT_EXTENSIONS)
|
||||
parser.add_argument(
|
||||
'-r',
|
||||
'--recursive',
|
||||
action='store_true',
|
||||
help='run recursively over directories')
|
||||
parser.add_argument('files', metavar='file', nargs='+')
|
||||
parser.add_argument(
|
||||
'-q',
|
||||
'--quiet',
|
||||
action='store_true')
|
||||
parser.add_argument(
|
||||
'-j',
|
||||
metavar='N',
|
||||
type=int,
|
||||
default=0,
|
||||
help='run N clang-format jobs in parallel'
|
||||
' (default number of cpus + 1)')
|
||||
parser.add_argument(
|
||||
'--color',
|
||||
default='auto',
|
||||
choices=['auto', 'always', 'never'],
|
||||
help='show colored diff (default: auto)')
|
||||
parser.add_argument(
|
||||
'-e',
|
||||
'--exclude',
|
||||
metavar='PATTERN',
|
||||
action='append',
|
||||
default=[],
|
||||
help='exclude paths matching the given glob-like pattern(s)'
|
||||
' from recursive search')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# use default signal handling, like diff return SIGINT value on ^C
|
||||
# https://bugs.python.org/issue14229#msg156446
|
||||
signal.signal(signal.SIGINT, signal.SIG_DFL)
|
||||
try:
|
||||
signal.SIGPIPE
|
||||
except AttributeError:
|
||||
# compatibility, SIGPIPE does not exist on Windows
|
||||
pass
|
||||
else:
|
||||
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
|
||||
|
||||
colored_stdout = False
|
||||
colored_stderr = False
|
||||
if args.color == 'always':
|
||||
colored_stdout = True
|
||||
colored_stderr = True
|
||||
elif args.color == 'auto':
|
||||
colored_stdout = sys.stdout.isatty()
|
||||
colored_stderr = sys.stderr.isatty()
|
||||
|
||||
version_invocation = [args.clang_format_executable, str("--version")]
|
||||
try:
|
||||
subprocess.check_call(version_invocation, stdout=DEVNULL)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
return ExitStatus.TROUBLE
|
||||
except OSError as e:
|
||||
print_trouble(
|
||||
parser.prog,
|
||||
"Command '{}' failed to start: {}".format(
|
||||
subprocess.list2cmdline(version_invocation), e
|
||||
),
|
||||
use_colors=colored_stderr,
|
||||
)
|
||||
return ExitStatus.TROUBLE
|
||||
|
||||
retcode = ExitStatus.SUCCESS
|
||||
files = list_files(
|
||||
args.files,
|
||||
recursive=args.recursive,
|
||||
exclude=args.exclude,
|
||||
extensions=args.extensions.split(','))
|
||||
|
||||
if not files:
|
||||
return
|
||||
|
||||
njobs = args.j
|
||||
if njobs == 0:
|
||||
njobs = multiprocessing.cpu_count() + 1
|
||||
njobs = min(len(files), njobs)
|
||||
|
||||
if njobs == 1:
|
||||
# execute directly instead of in a pool,
|
||||
# less overhead, simpler stacktraces
|
||||
it = (run_clang_format_diff_wrapper(args, file) for file in files)
|
||||
pool = None
|
||||
else:
|
||||
pool = multiprocessing.Pool(njobs)
|
||||
it = pool.imap_unordered(
|
||||
partial(run_clang_format_diff_wrapper, args), files)
|
||||
while True:
|
||||
try:
|
||||
outs, errs = next(it)
|
||||
except StopIteration:
|
||||
break
|
||||
except DiffError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
retcode = ExitStatus.TROUBLE
|
||||
sys.stderr.writelines(e.errs)
|
||||
except UnexpectedError as e:
|
||||
print_trouble(parser.prog, str(e), use_colors=colored_stderr)
|
||||
sys.stderr.write(e.formatted_traceback)
|
||||
retcode = ExitStatus.TROUBLE
|
||||
# stop at the first unexpected error,
|
||||
# something could be very wrong,
|
||||
# don't process all files unnecessarily
|
||||
if pool:
|
||||
pool.terminate()
|
||||
break
|
||||
else:
|
||||
sys.stderr.writelines(errs)
|
||||
if outs == []:
|
||||
continue
|
||||
if not args.quiet:
|
||||
print_diff(outs, use_color=colored_stdout)
|
||||
if retcode == ExitStatus.SUCCESS:
|
||||
retcode = ExitStatus.DIFF
|
||||
return retcode
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
python $DIR/run-clang-format.py -r $DIR/../src/**/ $DIR/../include/**/
|
@ -1,8 +0,0 @@
|
||||
set -vex
|
||||
|
||||
# Preinstalled versions of python are dependent on which Ubuntu distribution
|
||||
# you are running. The below version needs to be updated whenever we roll
|
||||
# the Ubuntu version used in Travis.
|
||||
# https://docs.travis-ci.com/user/languages/python/
|
||||
|
||||
pyenv global 3.7.1
|
@ -1,5 +0,0 @@
|
||||
set -vex
|
||||
|
||||
pip3 install --user meson ninja
|
||||
which meson
|
||||
which ninja
|
@ -1 +0,0 @@
|
||||
# NOTHING TO DO HERE
|
@ -69,7 +69,9 @@ public:
|
||||
// Boilerplate
|
||||
SecureAllocator() {}
|
||||
template <typename U> SecureAllocator(const SecureAllocator<U>&) {}
|
||||
template <typename U> struct rebind { using other = SecureAllocator<U>; };
|
||||
template <typename U> struct rebind {
|
||||
using other = SecureAllocator<U>;
|
||||
};
|
||||
};
|
||||
|
||||
template <typename T, typename U>
|
||||
|
@ -190,6 +190,7 @@ private:
|
||||
using Errors = std::deque<ErrorInfo>;
|
||||
|
||||
bool readToken(Token& token);
|
||||
bool readTokenSkippingComments(Token& token);
|
||||
void skipSpaces();
|
||||
bool match(const Char* pattern, int patternLength);
|
||||
bool readComment();
|
||||
@ -221,7 +222,6 @@ private:
|
||||
int& column) const;
|
||||
String getLocationLineAndColumn(Location location) const;
|
||||
void addComment(Location begin, Location end, CommentPlacement placement);
|
||||
void skipCommentTokens(Token& token);
|
||||
|
||||
static bool containsNewLine(Location begin, Location end);
|
||||
static String normalizeEOL(Location begin, Location end);
|
||||
|
@ -586,19 +586,23 @@ public:
|
||||
iterator end();
|
||||
|
||||
/// \brief Returns a reference to the first element in the `Value`.
|
||||
/// Requires that this value holds an array or json object, with at least one element.
|
||||
/// Requires that this value holds an array or json object, with at least one
|
||||
/// element.
|
||||
const Value& front() const;
|
||||
|
||||
/// \brief Returns a reference to the first element in the `Value`.
|
||||
/// Requires that this value holds an array or json object, with at least one element.
|
||||
/// Requires that this value holds an array or json object, with at least one
|
||||
/// element.
|
||||
Value& front();
|
||||
|
||||
/// \brief Returns a reference to the last element in the `Value`.
|
||||
/// Requires that value holds an array or json object, with at least one element.
|
||||
/// Requires that value holds an array or json object, with at least one
|
||||
/// element.
|
||||
const Value& back() const;
|
||||
|
||||
/// \brief Returns a reference to the last element in the `Value`.
|
||||
/// Requires that this value holds an array or json object, with at least one element.
|
||||
/// Requires that this value holds an array or json object, with at least one
|
||||
/// element.
|
||||
Value& back();
|
||||
|
||||
// Accessors for the [start, limit) range of bytes within the JSON text from
|
||||
|
@ -168,8 +168,7 @@ public:
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4996) // Deriving from deprecated class
|
||||
#endif
|
||||
class JSON_API FastWriter
|
||||
: public Writer {
|
||||
class JSON_API FastWriter : public Writer {
|
||||
public:
|
||||
FastWriter();
|
||||
~FastWriter() override = default;
|
||||
@ -228,8 +227,7 @@ private:
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4996) // Deriving from deprecated class
|
||||
#endif
|
||||
class JSON_API
|
||||
StyledWriter : public Writer {
|
||||
class JSON_API StyledWriter : public Writer {
|
||||
public:
|
||||
StyledWriter();
|
||||
~StyledWriter() override = default;
|
||||
@ -297,8 +295,7 @@ private:
|
||||
#pragma warning(push)
|
||||
#pragma warning(disable : 4996) // Deriving from deprecated class
|
||||
#endif
|
||||
class JSON_API
|
||||
StyledStreamWriter {
|
||||
class JSON_API StyledStreamWriter {
|
||||
public:
|
||||
/**
|
||||
* \param indentation Each level will be indented by this amount extra.
|
||||
@ -354,6 +351,7 @@ String JSON_API valueToString(
|
||||
PrecisionType precisionType = PrecisionType::significantDigits);
|
||||
String JSON_API valueToString(bool value);
|
||||
String JSON_API valueToQuotedString(const char* value);
|
||||
String JSON_API valueToQuotedString(const char* value, size_t length);
|
||||
|
||||
/// \brief Output using the StyledStreamWriter.
|
||||
/// \see Json::operator>>()
|
||||
|
@ -240,11 +240,14 @@ static int parseCommandLine(int argc, const char* argv[], Options* opts) {
|
||||
return printUsage(argv);
|
||||
}
|
||||
int index = 1;
|
||||
if (Json::String(argv[index]) == "--json-checker") {
|
||||
opts->features = Json::Features::strictMode();
|
||||
if (Json::String(argv[index]) == "--parse-only") {
|
||||
opts->parseOnly = true;
|
||||
++index;
|
||||
}
|
||||
if (Json::String(argv[index]) == "--strict") {
|
||||
opts->features = Json::Features::strictMode();
|
||||
++index;
|
||||
}
|
||||
if (Json::String(argv[index]) == "--json-config") {
|
||||
printConfig();
|
||||
return 3;
|
||||
|
@ -129,7 +129,7 @@ bool Reader::parse(const char* beginDoc, const char* endDoc, Value& root,
|
||||
|
||||
bool successful = readValue();
|
||||
Token token;
|
||||
skipCommentTokens(token);
|
||||
readTokenSkippingComments(token);
|
||||
if (collectComments_ && !commentsBefore_.empty())
|
||||
root.setComment(commentsBefore_, commentAfter);
|
||||
if (features_.strictRoot_) {
|
||||
@ -157,7 +157,7 @@ bool Reader::readValue() {
|
||||
throwRuntimeError("Exceeded stackLimit in readValue().");
|
||||
|
||||
Token token;
|
||||
skipCommentTokens(token);
|
||||
readTokenSkippingComments(token);
|
||||
bool successful = true;
|
||||
|
||||
if (collectComments_ && !commentsBefore_.empty()) {
|
||||
@ -225,14 +225,14 @@ bool Reader::readValue() {
|
||||
return successful;
|
||||
}
|
||||
|
||||
void Reader::skipCommentTokens(Token& token) {
|
||||
bool Reader::readTokenSkippingComments(Token& token) {
|
||||
bool success = readToken(token);
|
||||
if (features_.allowComments_) {
|
||||
do {
|
||||
readToken(token);
|
||||
} while (token.type_ == tokenComment);
|
||||
} else {
|
||||
readToken(token);
|
||||
while (success && token.type_ == tokenComment) {
|
||||
success = readToken(token);
|
||||
}
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
bool Reader::readToken(Token& token) {
|
||||
@ -446,12 +446,7 @@ bool Reader::readObject(Token& token) {
|
||||
Value init(objectValue);
|
||||
currentValue().swapPayload(init);
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
while (readToken(tokenName)) {
|
||||
bool initialTokenOk = true;
|
||||
while (tokenName.type_ == tokenComment && initialTokenOk)
|
||||
initialTokenOk = readToken(tokenName);
|
||||
if (!initialTokenOk)
|
||||
break;
|
||||
while (readTokenSkippingComments(tokenName)) {
|
||||
if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object
|
||||
return true;
|
||||
name.clear();
|
||||
@ -480,15 +475,11 @@ bool Reader::readObject(Token& token) {
|
||||
return recoverFromError(tokenObjectEnd);
|
||||
|
||||
Token comma;
|
||||
if (!readToken(comma) ||
|
||||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator &&
|
||||
comma.type_ != tokenComment)) {
|
||||
if (!readTokenSkippingComments(comma) ||
|
||||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator)) {
|
||||
return addErrorAndRecover("Missing ',' or '}' in object declaration",
|
||||
comma, tokenObjectEnd);
|
||||
}
|
||||
bool finalizeTokenOk = true;
|
||||
while (comma.type_ == tokenComment && finalizeTokenOk)
|
||||
finalizeTokenOk = readToken(comma);
|
||||
if (comma.type_ == tokenObjectEnd)
|
||||
return true;
|
||||
}
|
||||
@ -518,10 +509,7 @@ bool Reader::readArray(Token& token) {
|
||||
|
||||
Token currentToken;
|
||||
// Accept Comment after last item in the array.
|
||||
ok = readToken(currentToken);
|
||||
while (currentToken.type_ == tokenComment && ok) {
|
||||
ok = readToken(currentToken);
|
||||
}
|
||||
ok = readTokenSkippingComments(currentToken);
|
||||
bool badTokenType = (currentToken.type_ != tokenArraySeparator &&
|
||||
currentToken.type_ != tokenArrayEnd);
|
||||
if (!ok || badTokenType) {
|
||||
@ -773,7 +761,7 @@ void Reader::getLocationLineAndColumn(Location location, int& line,
|
||||
while (current < location && current != end_) {
|
||||
Char c = *current++;
|
||||
if (c == '\r') {
|
||||
if (*current == '\n')
|
||||
if (current != end_ && *current == '\n')
|
||||
++current;
|
||||
lastLineStart = current;
|
||||
++line;
|
||||
@ -943,6 +931,7 @@ private:
|
||||
using Errors = std::deque<ErrorInfo>;
|
||||
|
||||
bool readToken(Token& token);
|
||||
bool readTokenSkippingComments(Token& token);
|
||||
void skipSpaces();
|
||||
void skipBom(bool skipBom);
|
||||
bool match(const Char* pattern, int patternLength);
|
||||
@ -976,7 +965,6 @@ private:
|
||||
int& column) const;
|
||||
String getLocationLineAndColumn(Location location) const;
|
||||
void addComment(Location begin, Location end, CommentPlacement placement);
|
||||
void skipCommentTokens(Token& token);
|
||||
|
||||
static String normalizeEOL(Location begin, Location end);
|
||||
static bool containsNewLine(Location begin, Location end);
|
||||
@ -1030,7 +1018,7 @@ bool OurReader::parse(const char* beginDoc, const char* endDoc, Value& root,
|
||||
bool successful = readValue();
|
||||
nodes_.pop();
|
||||
Token token;
|
||||
skipCommentTokens(token);
|
||||
readTokenSkippingComments(token);
|
||||
if (features_.failIfExtra_ && (token.type_ != tokenEndOfStream)) {
|
||||
addError("Extra non-whitespace after JSON value.", token);
|
||||
return false;
|
||||
@ -1058,7 +1046,7 @@ bool OurReader::readValue() {
|
||||
if (nodes_.size() > features_.stackLimit_)
|
||||
throwRuntimeError("Exceeded stackLimit in readValue().");
|
||||
Token token;
|
||||
skipCommentTokens(token);
|
||||
readTokenSkippingComments(token);
|
||||
bool successful = true;
|
||||
|
||||
if (collectComments_ && !commentsBefore_.empty()) {
|
||||
@ -1145,14 +1133,14 @@ bool OurReader::readValue() {
|
||||
return successful;
|
||||
}
|
||||
|
||||
void OurReader::skipCommentTokens(Token& token) {
|
||||
bool OurReader::readTokenSkippingComments(Token& token) {
|
||||
bool success = readToken(token);
|
||||
if (features_.allowComments_) {
|
||||
do {
|
||||
readToken(token);
|
||||
} while (token.type_ == tokenComment);
|
||||
} else {
|
||||
readToken(token);
|
||||
while (success && token.type_ == tokenComment) {
|
||||
success = readToken(token);
|
||||
}
|
||||
}
|
||||
return success;
|
||||
}
|
||||
|
||||
bool OurReader::readToken(Token& token) {
|
||||
@ -1449,12 +1437,7 @@ bool OurReader::readObject(Token& token) {
|
||||
Value init(objectValue);
|
||||
currentValue().swapPayload(init);
|
||||
currentValue().setOffsetStart(token.start_ - begin_);
|
||||
while (readToken(tokenName)) {
|
||||
bool initialTokenOk = true;
|
||||
while (tokenName.type_ == tokenComment && initialTokenOk)
|
||||
initialTokenOk = readToken(tokenName);
|
||||
if (!initialTokenOk)
|
||||
break;
|
||||
while (readTokenSkippingComments(tokenName)) {
|
||||
if (tokenName.type_ == tokenObjectEnd &&
|
||||
(name.empty() ||
|
||||
features_.allowTrailingCommas_)) // empty object or trailing comma
|
||||
@ -1491,15 +1474,11 @@ bool OurReader::readObject(Token& token) {
|
||||
return recoverFromError(tokenObjectEnd);
|
||||
|
||||
Token comma;
|
||||
if (!readToken(comma) ||
|
||||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator &&
|
||||
comma.type_ != tokenComment)) {
|
||||
if (!readTokenSkippingComments(comma) ||
|
||||
(comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator)) {
|
||||
return addErrorAndRecover("Missing ',' or '}' in object declaration",
|
||||
comma, tokenObjectEnd);
|
||||
}
|
||||
bool finalizeTokenOk = true;
|
||||
while (comma.type_ == tokenComment && finalizeTokenOk)
|
||||
finalizeTokenOk = readToken(comma);
|
||||
if (comma.type_ == tokenObjectEnd)
|
||||
return true;
|
||||
}
|
||||
@ -1533,10 +1512,7 @@ bool OurReader::readArray(Token& token) {
|
||||
|
||||
Token currentToken;
|
||||
// Accept Comment after last item in the array.
|
||||
ok = readToken(currentToken);
|
||||
while (currentToken.type_ == tokenComment && ok) {
|
||||
ok = readToken(currentToken);
|
||||
}
|
||||
ok = readTokenSkippingComments(currentToken);
|
||||
bool badTokenType = (currentToken.type_ != tokenArraySeparator &&
|
||||
currentToken.type_ != tokenArrayEnd);
|
||||
if (!ok || badTokenType) {
|
||||
@ -1825,7 +1801,7 @@ void OurReader::getLocationLineAndColumn(Location location, int& line,
|
||||
while (current < location && current != end_) {
|
||||
Char c = *current++;
|
||||
if (c == '\r') {
|
||||
if (*current == '\n')
|
||||
if (current != end_ && *current == '\n')
|
||||
++current;
|
||||
lastLineStart = current;
|
||||
++line;
|
||||
|
@ -1337,9 +1337,8 @@ void Value::setComment(String comment, CommentPlacement placement) {
|
||||
// Always discard trailing newline, to aid indentation.
|
||||
comment.pop_back();
|
||||
}
|
||||
JSON_ASSERT(!comment.empty());
|
||||
JSON_ASSERT_MESSAGE(
|
||||
comment[0] == '\0' || comment[0] == '/',
|
||||
comment.empty() || comment[0] == '/',
|
||||
"in Json::Value::setComment(): Comments must start with /");
|
||||
comments_.set(placement, std::move(comment));
|
||||
}
|
||||
|
@ -132,8 +132,9 @@ String valueToString(double value, bool useSpecialFloats,
|
||||
if (!isfinite(value)) {
|
||||
static const char* const reps[2][3] = {{"NaN", "-Infinity", "Infinity"},
|
||||
{"null", "-1e+9999", "1e+9999"}};
|
||||
return reps[useSpecialFloats ? 0 : 1]
|
||||
[isnan(value) ? 0 : (value < 0) ? 1 : 2];
|
||||
return reps[useSpecialFloats ? 0 : 1][isnan(value) ? 0
|
||||
: (value < 0) ? 1
|
||||
: 2];
|
||||
}
|
||||
|
||||
String buffer(size_t(36), '\0');
|
||||
@ -353,6 +354,10 @@ String valueToQuotedString(const char* value) {
|
||||
return valueToQuotedStringN(value, strlen(value));
|
||||
}
|
||||
|
||||
String valueToQuotedString(const char* value, size_t length) {
|
||||
return valueToQuotedStringN(value, length);
|
||||
}
|
||||
|
||||
// Class Writer
|
||||
// //////////////////////////////////////////////////////////////////
|
||||
Writer::~Writer() = default;
|
||||
@ -490,7 +495,7 @@ void StyledWriter::writeValue(const Value& value) {
|
||||
const String& name = *it;
|
||||
const Value& childValue = value[name];
|
||||
writeCommentBeforeValue(childValue);
|
||||
writeWithIndent(valueToQuotedString(name.c_str()));
|
||||
writeWithIndent(valueToQuotedString(name.c_str(), name.size()));
|
||||
document_ += " : ";
|
||||
writeValue(childValue);
|
||||
if (++it == members.end()) {
|
||||
@ -708,7 +713,7 @@ void StyledStreamWriter::writeValue(const Value& value) {
|
||||
const String& name = *it;
|
||||
const Value& childValue = value[name];
|
||||
writeCommentBeforeValue(childValue);
|
||||
writeWithIndent(valueToQuotedString(name.c_str()));
|
||||
writeWithIndent(valueToQuotedString(name.c_str(), name.size()));
|
||||
*document_ << " : ";
|
||||
writeValue(childValue);
|
||||
if (++it == members.end()) {
|
||||
|
@ -3632,12 +3632,12 @@ JSONTEST_FIXTURE_LOCAL(CharReaderAllowSpecialFloatsTest, issue209) {
|
||||
for (const auto& td : test_data) {
|
||||
bool ok = reader->parse(&*td.in.begin(), &*td.in.begin() + td.in.size(),
|
||||
&root, &errs);
|
||||
JSONTEST_ASSERT(td.ok == ok) << "line:" << td.line << "\n"
|
||||
<< " expected: {"
|
||||
<< "ok:" << td.ok << ", in:\'" << td.in << "\'"
|
||||
<< "}\n"
|
||||
<< " actual: {"
|
||||
<< "ok:" << ok << "}\n";
|
||||
// clang-format off
|
||||
JSONTEST_ASSERT(td.ok == ok) <<
|
||||
"line:" << td.line << "\n " <<
|
||||
"expected: {ok:" << td.ok << ", in:\'" << td.in << "\'}\n " <<
|
||||
"actual: {ok:" << ok << "}\n";
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
{
|
||||
|
4
test/data/fail_strict_comment_01.json
Normal file
4
test/data/fail_strict_comment_01.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"a": "aaa",
|
||||
"b": "bbb" // comments not allowed in strict mode
|
||||
}
|
4
test/data/fail_strict_comment_02.json
Normal file
4
test/data/fail_strict_comment_02.json
Normal file
@ -0,0 +1,4 @@
|
||||
{
|
||||
"a": "aaa", // comments not allowed in strict mode
|
||||
"b": "bbb"
|
||||
}
|
3
test/data/fail_strict_comment_03.json
Normal file
3
test/data/fail_strict_comment_03.json
Normal file
@ -0,0 +1,3 @@
|
||||
{
|
||||
"array" : [1, 2, 3 /* comments not allowed in strict mode */]
|
||||
}
|
1
test/data/fail_test_object_02.json
Normal file
1
test/data/fail_test_object_02.json
Normal file
@ -0,0 +1 @@
|
||||
{"one": 1 /* } */ { "two" : 2 }
|
@ -97,14 +97,17 @@ def runAllTests(jsontest_executable_path, input_dir = None,
|
||||
valgrind_path = use_valgrind and VALGRIND_CMD or ''
|
||||
for input_path in tests + test_jsonchecker:
|
||||
expect_failure = os.path.basename(input_path).startswith('fail')
|
||||
is_json_checker_test = (input_path in test_jsonchecker) or expect_failure
|
||||
is_json_checker_test = input_path in test_jsonchecker
|
||||
is_parse_only = is_json_checker_test or expect_failure
|
||||
is_strict_test = ('_strict_' in os.path.basename(input_path)) or is_json_checker_test
|
||||
print('TESTING:', input_path, end=' ')
|
||||
options = is_json_checker_test and '--json-checker' or ''
|
||||
options = is_parse_only and '--parse-only' or ''
|
||||
options += is_strict_test and ' --strict' or ''
|
||||
options += ' --json-writer %s'%writerClass
|
||||
cmd = '%s%s %s "%s"' % ( valgrind_path, jsontest_executable_path, options,
|
||||
input_path)
|
||||
status, process_output = getStatusOutput(cmd)
|
||||
if is_json_checker_test:
|
||||
if is_parse_only:
|
||||
if expect_failure:
|
||||
if not status:
|
||||
print('FAILED')
|
||||
|
Loading…
x
Reference in New Issue
Block a user