Compare commits

..

8 Commits

Author SHA1 Message Date
Niels Lohmann
1e99a0273f 🔀 merge branch 'release/2.1.0' 2017-01-28 18:42:27 +01:00
Niels Lohmann
528bc96d7e Merge branch 'develop' 2017-01-02 16:41:33 +01:00
Niels Lohmann
1c98ce869c 🔀 merge branch 'release/2.0.10' 2017-01-02 16:37:52 +01:00
Niels Lohmann
6df60b0448 🔀 Merge branch 'release/2.0.9' 2016-12-16 21:34:54 +01:00
Niels Lohmann
272ebdc900 🔖 Merge branch 'release/2.0.8' 2016-12-02 20:07:14 +01:00
Niels
79a9d00e15 Merge branch 'develop' 2016-11-03 18:54:59 +01:00
Niels
a4d13c92ba Merge branch 'release/2.0.7' 2016-11-02 20:52:24 +01:00
Niels
60bba02cc6 Merge branch 'release/2.0.6' 2016-10-15 16:47:56 +02:00
1424 changed files with 4226261 additions and 113341 deletions

View File

@@ -1,84 +0,0 @@
#AccessModifierOffset: 2
AlignAfterOpenBracket: Align
AlignConsecutiveAssignments: false
#AlignConsecutiveBitFields: false
AlignConsecutiveDeclarations: false
AlignConsecutiveMacros: false
AlignEscapedNewlines: Right
#AlignOperands: AlignAfterOperator
AlignTrailingComments: true
AllowAllArgumentsOnNextLine: false
AllowAllConstructorInitializersOnNextLine: false
AllowAllParametersOfDeclarationOnNextLine: false
AllowShortBlocksOnASingleLine: Empty
AllowShortCaseLabelsOnASingleLine: false
#AllowShortEnumsOnASingleLine: true
AllowShortFunctionsOnASingleLine: Empty
AllowShortIfStatementsOnASingleLine: Never
AllowShortLambdasOnASingleLine: Empty
AllowShortLoopsOnASingleLine: false
AlwaysBreakAfterReturnType: None
AlwaysBreakBeforeMultilineStrings: false
AlwaysBreakTemplateDeclarations: Yes
BinPackArguments: false
BinPackParameters: false
#BitFieldColonSpacing: Both
BreakBeforeBraces: Custom # or Allman
BraceWrapping:
AfterCaseLabel: true
AfterClass: true
AfterControlStatement: Always
AfterEnum: true
AfterFunction: true
AfterNamespace: false
AfterStruct: true
AfterUnion: true
AfterExternBlock: false
BeforeCatch: true
BeforeElse: true
#BeforeLambdaBody: false
#BeforeWhile: false
SplitEmptyFunction: false
SplitEmptyRecord: false
SplitEmptyNamespace: false
BreakBeforeTernaryOperators: true
BreakConstructorInitializers: BeforeComma
BreakStringLiterals: false
ColumnLimit: 0
CompactNamespaces: false
ConstructorInitializerIndentWidth: 2
Cpp11BracedListStyle: true
PointerAlignment: Left
FixNamespaceComments: true
IncludeBlocks: Preserve
#IndentCaseBlocks: false
IndentCaseLabels: true
IndentGotoLabels: false
IndentPPDirectives: BeforeHash
IndentWidth: 4
KeepEmptyLinesAtTheStartOfBlocks: false
MaxEmptyLinesToKeep: 1
NamespaceIndentation: None
ReflowComments: false
SortIncludes: true
SortUsingDeclarations: true
SpaceAfterCStyleCast: false
SpaceAfterLogicalNot: false
SpaceAfterTemplateKeyword: false
SpaceBeforeAssignmentOperators: true
SpaceBeforeCpp11BracedList: false
SpaceBeforeParens: ControlStatements
SpaceBeforeRangeBasedForLoopColon: true
SpaceBeforeSquareBrackets: false
SpaceInEmptyBlock: false
SpaceInEmptyParentheses: false
SpacesBeforeTrailingComments: 2
SpacesInAngles: false
SpacesInCStyleCastParentheses: false
SpacesInConditionalStatement: false
SpacesInContainerLiterals: false
SpacesInParentheses: false
SpacesInSquareBrackets: false
Standard: c++11
TabWidth: 4
UseTab: Never

View File

@@ -1,44 +0,0 @@
Checks: '*,
-android-cloexec-fopen,
-cppcoreguidelines-avoid-goto,
-cppcoreguidelines-avoid-magic-numbers,
-cppcoreguidelines-avoid-non-const-global-variables,
-cppcoreguidelines-macro-usage,
-cppcoreguidelines-pro-bounds-array-to-pointer-decay,
-cppcoreguidelines-pro-bounds-constant-array-index,
-cppcoreguidelines-pro-bounds-pointer-arithmetic,
-cppcoreguidelines-pro-type-reinterpret-cast,
-cppcoreguidelines-pro-type-union-access,
-fuchsia-default-arguments-calls,
-fuchsia-default-arguments-declarations,
-fuchsia-overloaded-operator,
-google-explicit-constructor,
-google-readability-function-size,
-google-runtime-int,
-google-runtime-references,
-hicpp-avoid-goto,
-hicpp-explicit-conversions,
-hicpp-function-size,
-hicpp-no-array-decay,
-hicpp-no-assembler,
-hicpp-signed-bitwise,
-hicpp-uppercase-literal-suffix,
-llvm-header-guard,
-llvm-include-order,
-llvmlibc-*,
-misc-no-recursion,
-misc-non-private-member-variables-in-classes,
-modernize-use-trailing-return-type,
-readability-function-size,
-readability-magic-numbers,
-readability-redundant-access-specifiers,
-readability-uppercase-literal-suffix'
CheckOptions:
- key: hicpp-special-member-functions.AllowSoleDefaultDtor
value: 1
WarningsAsErrors: '*'
#HeaderFilterRegex: '.*nlohmann.*'
HeaderFilterRegex: '.*hpp$'

24
.doozer.json Normal file
View File

@@ -0,0 +1,24 @@
{
"targets": {
"xenial-i386": {
"buildenv": "xenial-i386",
"builddeps": ["build-essential", "cmake"],
"buildcmd": ["mkdir cm", "cd cm", "cmake ..", "cmake --build .", "ctest --output-on-failure"]
},
"xenial-amd64": {
"buildenv": "xenial-amd64",
"builddeps": ["build-essential", "cmake"],
"buildcmd": ["mkdir cm", "cd cm", "cmake ..", "cmake --build .", "ctest --output-on-failure"]
},
"fedora24-x86_64": {
"buildenv": "fedora24-x86_64",
"builddeps": ["cmake", "make", "clang"],
"buildcmd": ["mkdir cm", "cd cm", "CXX=clang++ cmake ..", "cmake --build .", "ctest --output-on-failure"]
},
"osx": {
"buildenv": "osx",
"builddeps": ["build-essential"],
"buildcmd": ["make check"]
}
}
}

View File

@@ -1,22 +0,0 @@
kind: pipeline
name: test-on-arm64
platform:
arch: arm64
steps:
- name: build
image: gcc
commands:
- wget https://github.com/Kitware/CMake/releases/download/v3.20.2/cmake-3.20.2.tar.gz
- tar xfz cmake-3.20.2.tar.gz
- cd cmake-3.20.2
- ./configure
- make cmake ctest -j10
- cd ..
- mkdir build
- cd build
- ../cmake-3.20.2/bin/cmake .. -DJSON_FastTests=ON
- make -j10
- cd test
- ../../cmake-3.20.2/bin/ctest -j10

6
.github/CODEOWNERS vendored
View File

@@ -1,6 +0,0 @@
# JSON for Modern C++ has been originally written by Niels Lohmann.
# Since 2013 over 140 contributors have helped to improve the library.
# This CODEOWNERS file is only to make sure that @nlohmann is requested
# for a code review in case of a pull request.
* @nlohmann

View File

@@ -4,15 +4,17 @@
This project started as a little excuse to exercise some of the cool new C++11 features. Over time, people actually started to use the JSON library (yey!) and started to help improve it by proposing features, finding bugs, or even fixing my mistakes. I am really [thankful](https://github.com/nlohmann/json/blob/master/README.md#thanks) for this and try to keep track of all the helpers.
To make it as easy as possible for you to contribute and for me to keep an overview, here are a few guidelines which should help us avoid all kinds of unnecessary work or disappointment. And of course, this document is subject to discussion, so please [create an issue](https://github.com/nlohmann/json/issues/new/choose) or a pull request if you find a way to improve it!
To make it as easy as possible for you to contribute and for me to keep an overview, here are a few guidelines which should help us avoid all kinds of unnecessary work or disappointment. And of course, this document is subject to discussion, so please [create an issue](https://github.com/nlohmann/json/issues/new) or a pull request if you find a way to improve it!
## Private reports
Usually, all issues are tracked publicly on [GitHub](https://github.com/nlohmann/json/issues). If you want to make a private report (e.g., for a vulnerability or to attach an example that is not meant to be published), please send an email to <mail@nlohmann.me>.
Usually, all issues are tracked publicly on [Github](https://github.com/nlohmann/json/issues). If you want to make a private report (e.g., for a vulnerability or to attach an example that is not meant to be publisheed), please send an email to <mail@nlohmann.me>.
## Prerequisites
Please [create an issue](https://github.com/nlohmann/json/issues/new/choose), assuming one does not already exist, and describe your concern. Note you need a [GitHub account](https://github.com/signup/free) for this.
Please [create an issue](https://github.com/nlohmann/json/issues/new), assuming one does not already exist, and describe your concern. Note you need a [GitHub account](https://github.com/signup/free) for this.
If you want propose changes to the code, you need to download the [`re2c`](http://re2c.org) tool.
## Describe your issue
@@ -22,30 +24,33 @@ Clearly describe the issue:
- If you propose a change or addition, try to give an **example** how the improved code could look like or how to use it.
- If you found a compilation error, please tell us which **compiler** (version and operating system) you used and paste the (relevant part of) the error messages to the ticket.
Please stick to the provided issue templates ([bug report](https://github.com/nlohmann/json/blob/develop/.github/ISSUE_TEMPLATE/Bug_report.md), [feature request](https://github.com/nlohmann/json/blob/develop/.github/ISSUE_TEMPLATE/Feature_request.md), or [question](https://github.com/nlohmann/json/blob/develop/.github/ISSUE_TEMPLATE/question.md)) if possible.
## Files to change
:exclamation: Before you make any changes, note the single-header file [`single_include/nlohmann/json.hpp`](https://github.com/nlohmann/json/blob/develop/single_include/nlohmann/json.hpp) is **generated** from the source files in the [`include/nlohmann` directory](https://github.com/nlohmann/json/tree/develop/include/nlohmann). Please **do not** edit file `single_include/nlohmann/json.hpp` directly, but change the `include/nlohmann` sources and regenerate file `single_include/nlohmann/json.hpp` by executing `make amalgamate`.
There are currently two files which need to be edited:
To make changes, you need to edit the following files:
1. [`src/json.hpp.re2c`](https://github.com/nlohmann/json/blob/master/src/json.hpp.re2c) (note the `.re2c` suffix) - This file contains a comment section which describes the JSON lexic. This section is translated by [`re2c`](http://re2c.org) into file [`src/json.hpp`](https://github.com/nlohmann/json/blob/master/src/json.hpp) which is plain "vanilla" C++11 code. (In fact, the generated lexer consists of some hundred lines of `goto`s, which is a hint you never want to edit this file...).
1. [`include/nlohmann/*`](https://github.com/nlohmann/json/tree/develop/include/nlohmann) - These files are the sources of the library. Before testing or creating a pull request, execute `make amalgamate` to regenerate `single_include/nlohmann/json.hpp`.
If you only edit file `src/json.hpp` (without the `.re2c` suffix), your changes will be overwritten as soon as the lexer is touched again. To generate the `src/json.hpp` file which is actually used during compilation of the tests and all other code, please execute
2. [`test/src/unit-*.cpp`](https://github.com/nlohmann/json/tree/develop/test/src) - These files contain the [doctest](https://github.com/onqtam/doctest) unit tests which currently cover [100 %](https://coveralls.io/github/nlohmann/json) of the library's code.
```sh
make re2c
```
To run [`re2c`](http://re2c.org) and generate/overwrite file `src/json.hpp` with your changes in file `src/json.hpp.re2c`. We currently use re2c version 0.16. Please also use this version, because other re2c versions tend to create code that differs a lot from which makes diffs unusable.
2. [`test/src/unit.cpp`](https://github.com/nlohmann/json/blob/master/test/unit.cpp) - This contains the [Catch](https://github.com/philsquared/Catch) unit tests which currently cover [100 %](https://coveralls.io/github/nlohmann/json) of the library's code.
If you add or change a feature, please also add a unit test to this file. The unit tests can be compiled and executed with
```sh
$ mkdir build
$ cd build
$ cmake ..
$ cmake --build .
$ ctest
make check
```
The test cases are also executed with several different compilers on [Travis](https://travis-ci.org/nlohmann/json) once you open a pull request.
Please understand that I cannot accept pull requests changing only file `src/json.hpp`.
## Note
@@ -54,11 +59,12 @@ To make changes, you need to edit the following files:
## Please don't
- The C++11 support varies between different **compilers** and versions. Please note the [list of supported compilers](https://github.com/nlohmann/json/blob/master/README.md#supported-compilers). Some compilers like GCC 4.7 (and earlier), Clang 3.3 (and earlier), or Microsoft Visual Studio 13.0 and earlier are known not to work due to missing or incomplete C++11 support. Please refrain from proposing changes that work around these compiler's limitations with `#ifdef`s or other means.
- Please do not only make changes to file `src/json.hpp` -- please read the paragraph above and understand why `src/json.hpp.re2c` exists.
- The C++11 support varies between different **compilers** and versions. Please note the [list of supported compilers](https://github.com/nlohmann/json/blob/master/README.md#supported-compilers). Some compilers like GCC 4.8 (and earlier), Clang 3.3 (and earlier), or Microsoft Visual Studio 13.0 and earlier are known not to work due to missing or incomplete C++11 support. Please refrain from proposing changes that work around these compiler's limitations with `#ifdef`s or other means.
- Specifically, I am aware of compilation problems with **Microsoft Visual Studio** (there even is an [issue label](https://github.com/nlohmann/json/issues?utf8=✓&q=label%3A%22visual+studio%22+) for these kind of bugs). I understand that even in 2016, complete C++11 support isn't there yet. But please also understand that I do not want to drop features or uglify the code just to make Microsoft's sub-standard compiler happy. The past has shown that there are ways to express the functionality such that the code compiles with the most recent MSVC - unfortunately, this is not the main objective of the project.
- Please refrain from proposing changes that would **break [JSON](https://json.org) conformance**. If you propose a conformant extension of JSON to be supported by the library, please motivate this extension.
- Please refrain from proposing changes that would **break [JSON](http://json.org) conformance**. If you propose a conformant extension of JSON to be supported by the library, please motivate this extension.
- We shall not extend the library to **support comments**. There is quite some [controversy](https://www.reddit.com/r/programming/comments/4v6chu/why_json_doesnt_support_comments_douglas_crockford/) around this topic, and there were quite some [issues](https://github.com/nlohmann/json/issues/376) on this. We believe that JSON is fine without comments.
- We do not preserve the **insertion order of object elements**. The [JSON standard](https://tools.ietf.org/html/rfc8259.html) defines objects as "an unordered collection of zero or more name/value pairs". To this end, this library does not preserve insertion order of name/value pairs. (In fact, keys will be traversed in alphabetical order as `std::map` with `std::less` is used by default.) Note this behavior conforms to the standard, and we shall not change it to any other order. If you do want to preserve the insertion order, you can specialize the object type with containers like [`tsl::ordered_map`](https://github.com/Tessil/ordered-map) or [`nlohmann::fifo_map`](https://github.com/nlohmann/fifo_map).
- We do not preserve the **insertion order of object elements**. The [JSON standard](https://tools.ietf.org/html/rfc7159.html) defines objects as "an unordered collection of zero or more name/value pairs". To this end, this library does not preserve insertion order of name/value pairs. (In fact, keys will be traversed in alphabetical order as `std::map` with `std::less` is used by default.) Note this behavior conforms to the standard, and we shall not it to any other order.
- Please do not open pull requests that address **multiple issues**.
@@ -66,6 +72,6 @@ To make changes, you need to edit the following files:
The following areas really need contribution:
- Extending the **continuous integration** toward more exotic compilers such as Android NDK, Intel's Compiler, or the bleeding-edge versions Clang.
- Extending the **continuous integration** toward more exotic compilers such as Android NDK, Intel's Compiler, or the bleeding-edge versions of GCC or Clang.
- Improving the efficiency of the **JSON parser**. The current parser is implemented as a naive recursive descent parser with hand coded string handling. More sophisticated approaches like LALR parsers would be really appreciated. That said, parser generators like Bison or ANTLR do not play nice with single-header files -- I really would like to keep the parser inside the `json.hpp` header, and I am not aware of approaches similar to [`re2c`](http://re2c.org) for parsing.
- Extending and updating existing **benchmarks** to include (the most recent version of) this library. Though efficiency is not everything, speed and memory consumption are very important characteristics for C++ developers, so having proper comparisons would be interesting.

2
.github/FUNDING.yml vendored
View File

@@ -1,2 +0,0 @@
github: nlohmann
custom: http://paypal.me/nlohmann

View File

@@ -1,57 +0,0 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: 'kind: bug'
assignees: ''
---
<!-- Provide a concise summary of the issue in the title above. -->
#### What is the issue you have?
<!-- Provide a detailed introduction to the issue itself, and why you consider it to be a bug. -->
<!-- If possible, be specific and add stack traces, error messages, etc. Avoid vague terms like "crash" or "doesn't work". -->
#### Please describe the steps to reproduce the issue.
<!-- Provide a link to a live example, or an unambiguous set of steps to -->
<!-- reproduce this bug. Include code to reproduce, if relevant -->
1.
2.
3.
#### Can you provide a small but working code example?
<!-- Please understand that we cannot analyze and debug large code bases. -->
#### What is the expected behavior?
<!-- Tell us what should happen -->
#### And what is the actual behavior instead?
<!-- Tell us what happens instead. -->
#### Which compiler and operating system are you using?
<!-- Include as many relevant details about the environment you experienced the bug in. -->
<!-- Make sure you use a supported compiler, see https://github.com/nlohmann/json#supported-compilers. -->
- Compiler: ___
- Operating system: ___
#### Which version of the library did you use?
<!-- Please add an `x` to the respective line. -->
- [ ] latest release version 3.10.0
- [ ] other release - please state the version: ___
- [ ] the `develop` branch
#### If you experience a compilation error: can you [compile and run the unit tests](https://github.com/nlohmann/json#execute-unit-tests)?
- [ ] yes
- [ ] no - please copy/paste the error message below

View File

@@ -1,5 +0,0 @@
blank_issues_enabled: false
contact_links:
- name: Ask a question
url: https://github.com/nlohmann/json/discussions
about: Ask questions and discuss with other community members

View File

@@ -1,19 +1,46 @@
[Describe your pull request here. Please read the text below the line, and make sure you follow the checklist.]
* * *
## Files to change
## Pull request checklist
There are currently two files which need to be edited:
Read the [Contribution Guidelines](https://github.com/nlohmann/json/blob/develop/.github/CONTRIBUTING.md) for detailed information.
1. [`src/json.hpp.re2c`](https://github.com/nlohmann/json/blob/master/src/json.hpp.re2c) (note the `.re2c` suffix) - This file contains a comment section which describes the JSON lexic. This section is translated by [`re2c`](http://re2c.org) into file [`src/json.hpp`](https://github.com/nlohmann/json/blob/master/src/json.hpp) which is plain "vanilla" C++11 code. (In fact, the generated lexer consists of some hundred lines of `goto`s, which is a hint you never want to edit this file...).
- [ ] Changes are described in the pull request, or an [existing issue is referenced](https://github.com/nlohmann/json/issues).
- [ ] The test suite [compiles and runs](https://github.com/nlohmann/json/blob/develop/README.md#execute-unit-tests) without error.
- [ ] [Code coverage](https://coveralls.io/github/nlohmann/json) is 100%. Test cases can be added by editing the [test suite](https://github.com/nlohmann/json/tree/develop/test/src).
- [ ] The source code is amalgamated; that is, after making changes to the sources in the `include/nlohmann` directory, run `make amalgamate` to create the single-header file `single_include/nlohmann/json.hpp`. The whole process is described [here](https://github.com/nlohmann/json/blob/develop/.github/CONTRIBUTING.md#files-to-change).
If you only edit file `src/json.hpp` (without the `.re2c` suffix), your changes will be overwritten as soon as the lexer is touched again. To generate the `src/json.hpp` file which is actually used during compilation of the tests and all other code, please execute
```sh
make re2c
```
To run [`re2c`](http://re2c.org) and generate/overwrite file `src/json.hpp` with your changes in file `src/json.hpp.re2c`.
2. [`test/src/unit.cpp`](https://github.com/nlohmann/json/blob/master/test/unit.cpp) - This contains the [Catch](https://github.com/philsquared/Catch) unit tests which currently cover [100 %](https://coveralls.io/github/nlohmann/json) of the library's code.
If you add or change a feature, please also add a unit test to this file. The unit tests can be compiled with
```sh
make
```
and can be executed with
```sh
./json_unit
```
The test cases are also executed with several different compilers on [Travis](https://travis-ci.org/nlohmann/json) once you open a pull request.
Please understand that I cannot accept pull requests changing only file `src/json.hpp`.
## Note
- If you open a pull request, the code will be automatically tested with [Valgrind](http://valgrind.org)'s Memcheck tool to detect memory leaks. Please be aware that the execution with Valgrind _may_ in rare cases yield different behavior than running the code directly. This can result in failing unit tests which run successfully without Valgrind.
## Please don't
- The C++11 support varies between different **compilers** and versions. Please note the [list of supported compilers](https://github.com/nlohmann/json/blob/master/README.md#supported-compilers). Some compilers like GCC 4.7 (and earlier), Clang 3.3 (and earlier), or Microsoft Visual Studio 13.0 and earlier are known not to work due to missing or incomplete C++11 support. Please refrain from proposing changes that work around these compiler's limitations with `#ifdef`s or other means.
- Only make changes to file `src/json.hpp` -- please read the paragraph above and understand why `src/json.hpp.re2c` exists.
- The C++11 support varies between different **compilers** and versions. Please note the [list of supported compilers](https://github.com/nlohmann/json/blob/master/README.md#supported-compilers). Some compilers like GCC 4.8 (and earlier), Clang 3.3 (and earlier), or Microsoft Visual Studio 13.0 and earlier are known not to work due to missing or incomplete C++11 support. Please refrain from proposing changes that work around these compiler's limitations with `#ifdef`s or other means.
- Specifically, I am aware of compilation problems with **Microsoft Visual Studio** (there even is an [issue label](https://github.com/nlohmann/json/issues?utf8=✓&q=label%3A%22visual+studio%22+) for these kind of bugs). I understand that even in 2016, complete C++11 support isn't there yet. But please also understand that I do not want to drop features or uglify the code just to make Microsoft's sub-standard compiler happy. The past has shown that there are ways to express the functionality such that the code compiles with the most recent MSVC - unfortunately, this is not the main objective of the project.
- Please refrain from proposing changes that would **break [JSON](https://json.org) conformance**. If you propose a conformant extension of JSON to be supported by the library, please motivate this extension.
- Please refrain from proposing changes that would **break [JSON](http://json.org) conformance**. If you propose a conformant extension of JSON to be supported by the library, please motivate this extension.
- Please do not open pull requests that address **multiple issues**.

5
.github/SECURITY.md vendored
View File

@@ -1,5 +0,0 @@
# Security Policy
## Reporting a Vulnerability
Usually, all issues are tracked publicly on [GitHub](https://github.com/nlohmann/json/issues). If you want to make a private report (e.g., for a vulnerability or to attach an example that is not meant to be published), please send an email to <mail@nlohmann.me>. You can use [this key](https://keybase.io/nlohmann/pgp_keys.asc?fingerprint=797167ae41c0a6d9232e48457f3cea63ae251b69) for encryption.

19
.github/config.yml vendored
View File

@@ -1,19 +0,0 @@
# Configuration for sentiment-bot - https://github.com/behaviorbot/sentiment-bot
# *Required* toxicity threshold between 0 and .99 with the higher numbers being the most toxic
# Anything higher than this threshold will be marked as toxic and commented on
sentimentBotToxicityThreshold: .7
# *Required* Comment to reply with
sentimentBotReplyComment: >
Please be sure to review the [code of conduct](https://github.com/nlohmann/json/blob/develop/CODE_OF_CONDUCT.md) and be respectful of other users. cc/ @nlohmann
# Configuration for request-info - https://github.com/behaviorbot/request-info
# *Required* Comment to reply with
requestInfoReplyComment: >
We would appreciate it if you could provide us with more info about this issue or pull request! Please check the [issue template](https://github.com/nlohmann/json/blob/develop/.github/ISSUE_TEMPLATE.md) and the [pull request template](https://github.com/nlohmann/json/blob/develop/.github/PULL_REQUEST_TEMPLATE.md).
# *OPTIONAL* Label to be added to Issues and Pull Requests with insufficient information given
requestInfoLabelToAdd: "state: needs more info"

17
.github/stale.yml vendored
View File

@@ -1,17 +0,0 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 30
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
exemptLabels:
- pinned
- security
# Label to use when marking an issue as stale
staleLabel: "state: stale"
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false

View File

@@ -1,55 +0,0 @@
name: "Code scanning - action"
on:
push:
branches:
- develop
- master
- release/*
pull_request:
schedule:
- cron: '0 19 * * 1'
jobs:
CodeQL-Build:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
with:
# We must fetch at least the immediate parents so that if this is
# a pull request then we can checkout the head.
fetch-depth: 2
# If this run was triggered by a pull request event, then checkout
# the head of the pull request instead of the merge commit.
- run: git checkout HEAD^2
if: ${{ github.event_name == 'pull_request' }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@@ -1,44 +0,0 @@
name: macOS
on:
push:
branches:
- develop
- master
- release/*
pull_request:
jobs:
xcode:
runs-on: macos-10.15
strategy:
matrix:
xcode: [12.4, 12.3, 12.2, 12.1.1, 12.1, 12, 11.7, 11.6, 11.5, 11.4.1, 11.3.1, 11.2.1, 10.3]
env:
DEVELOPER_DIR: /Applications/Xcode_${{ matrix.xcode }}.app/Contents/Developer
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -D CMAKE_BUILD_TYPE=Debug -DJSON_BuildTests=On -DJSON_FastTests=ON
- name: build
run: cmake --build build --parallel 10
- name: test
run: cd build ; ctest -j 10 --output-on-failure
xcode_standards:
runs-on: macos-10.15
strategy:
matrix:
standard: [11, 14, 17, 20]
env:
DEVELOPER_DIR: /Applications/Xcode_12.4.app/Contents/Developer
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -D CMAKE_BUILD_TYPE=Debug -DJSON_BuildTests=On -DCMAKE_CXX_STANDARD=${{ matrix.standard }} -DCMAKE_CXX_STANDARD_REQUIRED=ON
- name: build
run: cmake --build build --parallel 10
- name: test
run: cd build ; ctest -j 10 --output-on-failure

View File

@@ -1,114 +0,0 @@
name: Ubuntu
on:
push:
branches:
- develop
- master
- release/*
pull_request:
jobs:
ci_test_clang:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: install_ninja
run: |
sudo apt update
sudo apt install ninja-build
shell: bash
- name: install_clang
run: |
wget https://apt.llvm.org/llvm.sh
chmod +x llvm.sh
sudo ./llvm.sh 11
sudo apt-get install clang-tools-11
shell: bash
- name: cmake
run: cmake -S . -B build -DJSON_CI=On
- name: build
run: cmake --build build --target ci_test_clang
ci_test_gcc:
runs-on: ubuntu-latest
container: nlohmann/json-ci:latest
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -DJSON_CI=On
- name: build
run: cmake --build build --target ci_test_gcc
ci_static_analysis:
runs-on: ubuntu-latest
container: nlohmann/json-ci:latest
strategy:
matrix:
target: [ci_clang_tidy, ci_cppcheck, ci_test_valgrind, ci_test_clang_sanitizer, ci_test_amalgamation, ci_clang_analyze, ci_cpplint, ci_cmake_flags, ci_single_binaries, ci_reproducible_tests, ci_non_git_tests, ci_offline_testdata, ci_infer]
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -DJSON_CI=On
- name: build
run: cmake --build build --target ${{ matrix.target }}
ci_cmake_options:
runs-on: ubuntu-latest
container: nlohmann/json-ci:latest
strategy:
matrix:
target: [ci_test_diagnostics, ci_test_noexceptions, ci_test_noimplicitconversions]
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -DJSON_CI=On
- name: build
run: cmake --build build --target ${{ matrix.target }}
ci_test_coverage:
runs-on: ubuntu-latest
container: nlohmann/json-ci:latest
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -DJSON_CI=On
- name: build
run: cmake --build build --target ci_test_coverage
- name: archive coverage report
uses: actions/upload-artifact@v2
with:
name: code-coverage-report
path: /__w/json/json/build/html
- name: Coveralls
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
path-to-lcov: /__w/json/json/build/json.info.filtered.noexcept
ci_test_compilers:
runs-on: ubuntu-latest
container: nlohmann/json-ci:latest
strategy:
matrix:
compiler: [g++-4.8, g++-4.9, g++-5, g++-7, g++-8, g++-9, g++-10, clang++-3.5, clang++-3.6, clang++-3.7, clang++-3.8, clang++-3.9, clang++-4.0, clang++-5.0, clang++-6.0, clang++-7, clang++-8, clang++-9, clang++-10, clang++-11]
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -DJSON_CI=On
- name: build
run: cmake --build build --target ci_test_compiler_${{ matrix.compiler }}
ci_test_standards:
runs-on: ubuntu-latest
container: nlohmann/json-ci:latest
strategy:
matrix:
standard: [11, 14, 17, 20]
compiler: [gcc, clang]
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -DJSON_CI=On
- name: build
run: cmake --build build --target ci_test_${{ matrix.compiler }}_cxx${{ matrix.standard }}

View File

@@ -1,128 +0,0 @@
name: Windows
on:
push:
branches:
- develop
- master
- release/*
pull_request:
jobs:
mingw:
runs-on: windows-latest
strategy:
matrix:
architecture: [x64, x86]
steps:
- uses: actions/checkout@v2
- name: Set up MinGW
uses: egor-tensin/setup-mingw@v2
with:
platform: ${{ matrix.architecture }}
- name: cmake
run: cmake -S . -B build -G "MinGW Makefiles" -DCMAKE_BUILD_TYPE=Debug -DJSON_BuildTests=On
- name: build
run: cmake --build build --parallel 10
- name: test
run: cd build ; ctest -j 10 -C Debug --output-on-failure
msvc2017:
runs-on: windows-2016
strategy:
matrix:
build_type: [Debug, Release]
architecture: [Win32, x64]
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -G "Visual Studio 15 2017" -A ${{ matrix.architecture }} -DJSON_BuildTests=On -DCMAKE_CXX_FLAGS="/W4 /WX"
if: matrix.build_type == 'Release' && matrix.architecture == 'x64'
- name: cmake
run: cmake -S . -B build -G "Visual Studio 15 2017" -A ${{ matrix.architecture }} -DJSON_BuildTests=On -DCMAKE_CXX_FLAGS="/W4 /WX"
if: matrix.build_type == 'Release' && matrix.architecture != 'x64'
- name: cmake
run: cmake -S . -B build -G "Visual Studio 15 2017" -A ${{ matrix.architecture }} -DJSON_BuildTests=On -DJSON_FastTests=ON -DCMAKE_CXX_FLAGS="/W4 /WX"
if: matrix.build_type == 'Debug'
- name: build
run: cmake --build build --config ${{ matrix.build_type }} --parallel 10
- name: test
run: cd build ; ctest -j 10 -C ${{ matrix.build_type }} --output-on-failure
msvc2017_latest:
runs-on: windows-2016
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -G "Visual Studio 15 2017" -DJSON_BuildTests=On -DCMAKE_CXX_FLAGS="/permissive- /std:c++latest /utf-8 /W4 /WX"
- name: build
run: cmake --build build --config Release --parallel 10
- name: test
run: cd build ; ctest -j 10 -C Release --output-on-failure
msvc2019:
runs-on: windows-latest
strategy:
matrix:
build_type: [Debug, Release]
architecture: [Win32, x64]
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -G "Visual Studio 16 2019" -A ${{ matrix.architecture }} -DJSON_BuildTests=On -DCMAKE_CXX_FLAGS="/W4 /WX"
if: matrix.build_type == 'Release'
- name: cmake
run: cmake -S . -B build -G "Visual Studio 16 2019" -A ${{ matrix.architecture }} -DJSON_BuildTests=On -DJSON_FastTests=ON -DCMAKE_CXX_FLAGS="/W4 /WX"
if: matrix.build_type == 'Debug'
- name: build
run: cmake --build build --config ${{ matrix.build_type }} --parallel 10
- name: test
run: cd build ; ctest -j 10 -C ${{ matrix.build_type }} --output-on-failure
msvc2019_latest:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -G "Visual Studio 16 2019" -DJSON_BuildTests=On -DCMAKE_CXX_FLAGS="/permissive- /std:c++latest /utf-8 /W4 /WX"
- name: build
run: cmake --build build --config Release --parallel 10
- name: test
run: cd build ; ctest -j 10 -C Release --output-on-failure
clang:
runs-on: windows-latest
strategy:
matrix:
version: [11, 12]
steps:
- uses: actions/checkout@v2
- name: install Clang
run: curl -fsSL -o LLVM${{ matrix.version }}.exe https://github.com/llvm/llvm-project/releases/download/llvmorg-${{ matrix.version }}.0.0/LLVM-${{ matrix.version }}.0.0-win64.exe ; 7z x LLVM${{ matrix.version }}.exe -y -o"C:/Program Files/LLVM"
- name: cmake
run: cmake -S . -B build -DCMAKE_CXX_COMPILER="C:/Program Files/LLVM/bin/clang++.exe" -G"MinGW Makefiles" -DCMAKE_BUILD_TYPE=Debug -DJSON_BuildTests=On
- name: build
run: cmake --build build --parallel 10
- name: test
run: cd build ; ctest -j 10 -C Debug --exclude-regex "test-unicode" --output-on-failure
clang-cl-11:
runs-on: windows-latest
strategy:
matrix:
architecture: [Win32, x64]
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build -G "Visual Studio 16 2019" -A ${{ matrix.architecture }} -T ClangCL -DJSON_BuildTests=On
- name: build
run: cmake --build build --config Debug --parallel 10
- name: test
run: cd build ; ctest -j 10 -C Debug --exclude-regex "test-unicode" --output-on-failure

27
.gitignore vendored
View File

@@ -1,6 +1,5 @@
json_unit
json_benchmarks
json_benchmarks_simple
fuzz-testing
*.dSYM
@@ -8,9 +7,7 @@ fuzz-testing
*.gcno
*.gcda
build
build_coverage
clang_analyze_build
working
doc/xml
doc/html
@@ -18,19 +15,17 @@ me.nlohmann.json.docset
benchmarks/files/numbers/*.json
.wsjcpp-logs/*
.wsjcpp/*
.idea
/cmake-build-*
cmake-build-debug
test/test-*
/.vs
.svn
doc/mkdocs/venv/
doc/mkdocs/docs/images
doc/mkdocs/docs/examples
doc/mkdocs/site
doc/mkdocs/docs/__pycache__/
doc/xml
/doc/docset/nlohmann_json.docset/
test/thirdparty/Fuzzer/libFuzzer.a
test/parse_afl_fuzzer
test/parse_cbor_fuzzer
test/parse_msgpack_fuzzer

View File

@@ -7,7 +7,17 @@ language: cpp
dist: trusty
sudo: required
group: edge
###################
# global settings #
###################
env:
global:
# The next declaration is the encrypted COVERITY_SCAN_TOKEN, created
# via the "travis encrypt" command using the project repo's public key
- secure: "m89SSgE+ASLO38rSKx7MTXK3n5NkP9bIx95jwY71YEiuFzib30PDJ/DifKnXxBjvy/AkCGztErQRk/8ZCvq+4HXozU2knEGnL/RUitvlwbhzfh2D4lmS3BvWBGS3N3NewoPBrRmdcvnT0xjOGXxtZaJ3P74TkB9GBnlz/HmKORA="
################
@@ -17,78 +27,251 @@ group: edge
matrix:
include:
# Valgrind
- os: linux
compiler: gcc
env:
- COMPILER=g++-4.9
- SPECIAL=valgrind
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: [g++-4.9, valgrind]
after_success:
- make check TEST_PREFIX="valgrind --error-exitcode=1 --leak-check=full " TEST_PATTERN=""
# cLang sanitizer
# note: sadly clang's libc++ has errors when running with sanitize,
# so we use clang with gcc's libstdc++ which doesn't give those error.
# that's why we need to install g++-6 to get the lastest version
- os: linux
env:
- LLVM_VERSION=3.8.1
- SPECIAL=sanitizer
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: g++-6
compiler: clang
before_script:
- make clang_sanitize
# cppcheck
- os: linux
compiler: gcc
env:
- COMPILER=g++-4.9
- SPECIAL=cppcheck
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: [g++-4.9, cppcheck]
after_success:
- make cppcheck
# no exceptions
- os: linux
compiler: gcc
env:
- COMPILER=g++-4.9
- SPECIAL=no_exceptions
- TEST_PATTERN=-e \"*\"
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: [g++-4.9, cppcheck]
before_script:
- CPPFLAGS="-DJSON_NOEXCEPTION" make
# Coveralls (http://gronlier.fr/blog/2015/01/adding-code-coverage-to-your-c-project/)
- os: linux
compiler: gcc
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-4.9', 'ruby']
before_script:
- wget http://ftp.de.debian.org/debian/pool/main/l/lcov/lcov_1.11.orig.tar.gz
- tar xf lcov_1.11.orig.tar.gz
- sudo make -C lcov-1.11/ install
- gem install coveralls-lcov
- pip install --user cpp-coveralls
after_success:
- make clean
- CXXFLAGS="--coverage -g -O0" CPPFLAGS="-DNDEBUG" make json_unit
- test/json_unit "*"
- coveralls --build-root test --exclude src/catch.hpp --exclude src/unit-algorithms.cpp --exclude src/unit-allocator.cpp --exclude src/unit-capacity.cpp --exclude src/unit-class_const_iterator.cpp --exclude src/unit-class_iterator.cpp --exclude src/unit-class_lexer.cpp --exclude src/unit-class_parser.cpp --exclude src/unit-comparison.cpp --exclude src/unit-concepts.cpp --exclude src/unit-constructor1.cpp --exclude src/unit-constructor2.cpp --exclude src/unit-convenience.cpp --exclude src/unit-conversions.cpp --exclude src/unit-deserialization.cpp --exclude src/unit-element_access1.cpp --exclude src/unit-element_access2.cpp --exclude src/unit-inspection.cpp --exclude src/unit-iterator_wrapper.cpp --exclude src/unit-iterators1.cpp --exclude src/unit-iterators2.cpp --exclude src/unit-json_patch.cpp --exclude src/unit-json_pointer.cpp --exclude src/unit-modifiers.cpp --exclude src/unit-pointer_access.cpp --exclude src/unit-readme.cpp --exclude src/unit-reference_access.cpp --exclude src/unit-regression.cpp --exclude src/unit-serialization.cpp --exclude src/unit-testsuites.cpp --exclude src/unit-unicode.cpp --include ../src/json.hpp --gcov-options '\-lp' --gcov 'gcov-4.9'
- lcov --directory src --directory test/src --capture --output-file coverage.info --rc lcov_branch_coverage=1 --no-external
- lcov --remove coverage.info 'test/src/*' --output-file coverage.info --rc lcov_branch_coverage=1
- lcov --list coverage.info --rc lcov_branch_coverage=1
- coveralls-lcov --repo-token F9bs4Nop10JRgqPQXRcifyQKYhb3FczkS coverage.info
env:
- COMPILER=g++-4.9
- SPECIAL=coveralls
# Coverity (only for branch coverity_scan)
- os: linux
compiler: clang
before_install: echo -n | openssl s_client -connect scan.coverity.com:443 | sed -ne '/-BEGIN CERTIFICATE-/,/-END CERTIFICATE-/p' | sudo tee -a /etc/ssl/certs/ca-certificates.crt
addons:
apt:
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-precise-3.6']
packages: ['g++-6', 'clang-3.6', 'ninja-build']
coverity_scan:
project:
name: "nlohmann/json"
description: "Build submitted via Travis CI"
notification_email: niels.lohmann@gmail.com
build_command_prepend: "mkdir coverity_build ; cd coverity_build ; cmake .. ; cd .."
build_command: "make -C coverity_build"
build_command_prepend: "make clean"
build_command: "make"
branch_pattern: coverity_scan
env:
- LLVM_VERSION=3.6.0
- SPECIAL=coverity
- COMPILER=clang++-3.6
# The next declaration is the encrypted COVERITY_SCAN_TOKEN, created
# via the "travis encrypt" command using the project repo's public key
- secure: "m89SSgE+ASLO38rSKx7MTXK3n5NkP9bIx95jwY71YEiuFzib30PDJ/DifKnXxBjvy/AkCGztErQRk/8ZCvq+4HXozU2knEGnL/RUitvlwbhzfh2D4lmS3BvWBGS3N3NewoPBrRmdcvnT0xjOGXxtZaJ3P74TkB9GBnlz/HmKORA="
# OSX / Clang
- os: osx
osx_image: xcode10.2
osx_image: xcode6.4
- os: osx
osx_image: xcode7.3
- os: osx
osx_image: xcode8
- os: osx
osx_image: xcode8.1
- os: osx
osx_image: xcode8.2
# Linux / GCC
- os: linux
compiler: gcc
env: COMPILER=g++-4.9
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: g++-4.9
- os: linux
compiler: gcc
env: COMPILER=g++-5
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: g++-5
- os: linux
compiler: gcc
env: COMPILER=g++-6
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-6', 'ninja-build']
packages: g++-6
# Linux / Clang
- os: linux
env: LLVM_VERSION=3.6.0
compiler: clang
- os: linux
env: LLVM_VERSION=3.6.1
compiler: clang
- os: linux
env: LLVM_VERSION=3.6.2
compiler: clang
- os: linux
env: LLVM_VERSION=3.7.0
compiler: clang
- os: linux
env: LLVM_VERSION=3.7.1
compiler: clang
- os: linux
env: LLVM_VERSION=3.8.0
compiler: clang
- os: linux
env: LLVM_VERSION=3.8.1
compiler: clang
#####################
# installation step #
#####################
# set directories to cache
cache:
directories:
- ${TRAVIS_BUILD_DIR}/deps/llvm-3.6.2
- ${TRAVIS_BUILD_DIR}/deps/llvm-3.6.1
- ${TRAVIS_BUILD_DIR}/deps/llvm-3.6.0
- ${TRAVIS_BUILD_DIR}/deps/llvm-3.7.0
- ${TRAVIS_BUILD_DIR}/deps/llvm-3.7.1
- ${TRAVIS_BUILD_DIR}/deps/llvm-3.8.0
- ${TRAVIS_BUILD_DIR}/deps/llvm-3.8.1
install:
# create deps dir if not existing
- DEPS_DIR="${TRAVIS_BUILD_DIR}/deps"
- mkdir -p ${DEPS_DIR}
# make sure CXX is correctly set
- if [[ "${COMPILER}" != "" ]]; then export CXX=${COMPILER}; fi
# install LLVM/clang when LLVM_VERSION is set
- |
if [[ "${LLVM_VERSION}" != "" ]]; then
LLVM_DIR=${DEPS_DIR}/llvm-${LLVM_VERSION}
if [[ -z "$(ls -A ${LLVM_DIR})" ]]; then
travis_retry wget --quiet https://cmake.org/files/v3.6/cmake-3.6.1.tar.gz
tar xfz cmake-3.6.1.tar.gz
(cd cmake-3.6.1 && ./configure --prefix=${LLVM_DIR}/cmake && make install)
export PATH="${LLVM_DIR}/cmake/bin:${PATH}"
LLVM_URL="http://llvm.org/releases/${LLVM_VERSION}/llvm-${LLVM_VERSION}.src.tar.xz"
LIBCXX_URL="http://llvm.org/releases/${LLVM_VERSION}/libcxx-${LLVM_VERSION}.src.tar.xz"
LIBCXXABI_URL="http://llvm.org/releases/${LLVM_VERSION}/libcxxabi-${LLVM_VERSION}.src.tar.xz"
CLANG_URL="http://llvm.org/releases/${LLVM_VERSION}/clang+llvm-${LLVM_VERSION}-x86_64-linux-gnu-ubuntu-14.04.tar.xz"
mkdir -p ${LLVM_DIR} ${LLVM_DIR}/build ${LLVM_DIR}/projects/libcxx ${LLVM_DIR}/projects/libcxxabi ${LLVM_DIR}/clang
travis_retry wget --quiet -O - ${LLVM_URL} | tar --strip-components=1 -xJ -C ${LLVM_DIR}
travis_retry wget --quiet -O - ${LIBCXX_URL} | tar --strip-components=1 -xJ -C ${LLVM_DIR}/projects/libcxx
travis_retry wget --quiet -O - ${LIBCXXABI_URL} | tar --strip-components=1 -xJ -C ${LLVM_DIR}/projects/libcxxabi
travis_retry wget --quiet -O - ${CLANG_URL} | tar --strip-components=1 -xJ -C ${LLVM_DIR}/clang
(cd ${LLVM_DIR}/build && cmake .. -DCMAKE_INSTALL_PREFIX=${LLVM_DIR}/install -DCMAKE_CXX_COMPILER=clang++)
(cd ${LLVM_DIR}/build/projects/libcxx && make install -j2)
(cd ${LLVM_DIR}/build/projects/libcxxabi && make install -j2)
fi
export CXXFLAGS="-nostdinc++ -isystem ${LLVM_DIR}/install/include/c++/v1"
export LDFLAGS="-L ${LLVM_DIR}/install/lib -l c++ -l c++abi"
export LD_LIBRARY_PATH="${LD_LIBRARY_PATH}:${LLVM_DIR}/install/lib"
export PATH="${LLVM_DIR}/clang/bin:${PATH}"
fi
################
# build script #
################
script:
# get CMake and Ninja (only for systems with brew - macOS)
- |
if [[ (-x $(which brew)) ]]; then
brew update
brew install cmake ninja
brew upgrade cmake
cmake --version
fi
# make sure CXX is correctly set
- if [[ "${COMPILER}" != "" ]]; then export CXX=${COMPILER}; fi
# append CXX_STANDARD to CMAKE_OPTIONS if required
- CMAKE_OPTIONS+=${CXX_STANDARD:+ -DCMAKE_CXX_STANDARD=$CXX_STANDARD -DCMAKE_CXX_STANDARD_REQUIRED=ON}
# build configuration
- CMAKE_OPTIONS+=" -DCMAKE_BUILD_TYPE=Release"
# show OS/compiler version
- uname -a
- $CXX --version
# compile and execute unit tests
- mkdir -p build && cd build
- cmake .. ${CMAKE_OPTIONS} -DJSON_BuildTests=On -GNinja && cmake --build .
- ctest --timeout 2700 -V -j
- cd ..
- make check
# check if homebrew works (only checks develop branch)
- if [ `which brew` ]; then
brew update ;
brew tap nlohmann/json ;
brew install nlohmann_json --HEAD ;
brew test nlohmann_json ;
# check if homebrew works (only checks develop branch)
- if [ `which brew` ]; then
brew update ;
brew tap nlohmann/json ;
brew install nlohmann_json --HEAD ;
brew test nlohmann_json ;
fi

View File

@@ -1,14 +0,0 @@
cff-version: 1.1.0
message: "If you use this software, please cite it as below."
authors:
- family-names: Lohmann
given-names: Niels
orcid: https://orcid.org/0000-0001-9037-795X
email: mail@nlohmann.me
website: https://nlohmann.me
title: "JSON for Modern C++"
version: 3.10.0
date-released: 2021
license: MIT
repository-code: "https://github.com/nlohmann"
url: https://json.nlohmann.me

View File

@@ -1,188 +1,55 @@
cmake_minimum_required(VERSION 3.1)
cmake_minimum_required(VERSION 3.0)
##
## PROJECT
## name and version
##
project(nlohmann_json VERSION 3.10.0 LANGUAGES CXX)
# define the project
project(nlohmann_json VERSION 2.1.0 LANGUAGES CXX)
##
## MAIN_PROJECT CHECK
## determine if nlohmann_json is built as a subproject (using add_subdirectory) or if it is the main project
##
set(MAIN_PROJECT OFF)
if (CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR)
set(MAIN_PROJECT ON)
endif()
enable_testing()
##
## INCLUDE
##
##
include(ExternalProject)
option(BuildTests "Build the unit tests" ON)
##
## OPTIONS
##
# define project variables
set(JSON_TARGET_NAME ${PROJECT_NAME})
set(JSON_PACKAGE_NAME ${JSON_TARGET_NAME})
set(JSON_TARGETS_FILENAME "${JSON_PACKAGE_NAME}Targets.cmake")
set(JSON_CONFIG_FILENAME "${JSON_PACKAGE_NAME}Config.cmake")
set(JSON_CONFIGVERSION_FILENAME "${JSON_PACKAGE_NAME}ConfigVersion.cmake")
set(JSON_CONFIG_DESTINATION "cmake")
set(JSON_INCLUDE_DESTINATION "include/nlohmann")
if (POLICY CMP0077)
# Allow CMake 3.13+ to override options when using FetchContent / add_subdirectory.
cmake_policy(SET CMP0077 NEW)
endif ()
# create and configure the library target
add_library(${JSON_TARGET_NAME} INTERFACE)
target_include_directories(${JSON_TARGET_NAME} INTERFACE
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>
$<INSTALL_INTERFACE:${JSON_INCLUDE_DESTINATION}>)
option(JSON_BuildTests "Build the unit tests when BUILD_TESTING is enabled." ${MAIN_PROJECT})
option(JSON_CI "Enable CI build targets." OFF)
option(JSON_Diagnostics "Use extended diagnostic messages." OFF)
option(JSON_ImplicitConversions "Enable implicit conversions." ON)
option(JSON_Install "Install CMake targets during install step." ${MAIN_PROJECT})
option(JSON_MultipleHeaders "Use non-amalgamated version of the library." OFF)
option(JSON_SystemInclude "Include as system headers (skip for clang-tidy)." OFF)
if (JSON_CI)
include(cmake/ci.cmake)
endif ()
##
## CONFIGURATION
##
include(GNUInstallDirs)
set(NLOHMANN_JSON_TARGET_NAME ${PROJECT_NAME})
set(NLOHMANN_JSON_CONFIG_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}" CACHE INTERNAL "")
set(NLOHMANN_JSON_INCLUDE_INSTALL_DIR "${CMAKE_INSTALL_INCLUDEDIR}")
set(NLOHMANN_JSON_TARGETS_EXPORT_NAME "${PROJECT_NAME}Targets")
set(NLOHMANN_JSON_CMAKE_CONFIG_TEMPLATE "cmake/config.cmake.in")
set(NLOHMANN_JSON_CMAKE_CONFIG_DIR "${CMAKE_CURRENT_BINARY_DIR}")
set(NLOHMANN_JSON_CMAKE_VERSION_CONFIG_FILE "${NLOHMANN_JSON_CMAKE_CONFIG_DIR}/${PROJECT_NAME}ConfigVersion.cmake")
set(NLOHMANN_JSON_CMAKE_PROJECT_CONFIG_FILE "${NLOHMANN_JSON_CMAKE_CONFIG_DIR}/${PROJECT_NAME}Config.cmake")
set(NLOHMANN_JSON_CMAKE_PROJECT_TARGETS_FILE "${NLOHMANN_JSON_CMAKE_CONFIG_DIR}/${PROJECT_NAME}Targets.cmake")
set(NLOHMANN_JSON_PKGCONFIG_INSTALL_DIR "${CMAKE_INSTALL_LIBDIR}/pkgconfig")
if (JSON_MultipleHeaders)
set(NLOHMANN_JSON_INCLUDE_BUILD_DIR "${PROJECT_SOURCE_DIR}/include/")
message(STATUS "Using the multi-header code from ${NLOHMANN_JSON_INCLUDE_BUILD_DIR}")
else()
set(NLOHMANN_JSON_INCLUDE_BUILD_DIR "${PROJECT_SOURCE_DIR}/single_include/")
message(STATUS "Using the single-header code from ${NLOHMANN_JSON_INCLUDE_BUILD_DIR}")
endif()
if (NOT JSON_ImplicitConversions)
message(STATUS "Implicit conversions are disabled")
endif()
if (JSON_Diagnostics)
message(STATUS "Diagnostics enabled")
endif()
if (JSON_SystemInclude)
set(NLOHMANN_JSON_SYSTEM_INCLUDE "SYSTEM")
endif()
##
## TARGET
## create target and add include path
##
add_library(${NLOHMANN_JSON_TARGET_NAME} INTERFACE)
add_library(${PROJECT_NAME}::${NLOHMANN_JSON_TARGET_NAME} ALIAS ${NLOHMANN_JSON_TARGET_NAME})
if (${CMAKE_VERSION} VERSION_LESS "3.8.0")
target_compile_features(${NLOHMANN_JSON_TARGET_NAME} INTERFACE cxx_range_for)
else()
target_compile_features(${NLOHMANN_JSON_TARGET_NAME} INTERFACE cxx_std_11)
endif()
target_compile_definitions(
${NLOHMANN_JSON_TARGET_NAME}
INTERFACE
JSON_USE_IMPLICIT_CONVERSIONS=$<BOOL:${JSON_ImplicitConversions}>
JSON_DIAGNOSTICS=$<BOOL:${JSON_Diagnostics}>
)
target_include_directories(
${NLOHMANN_JSON_TARGET_NAME}
${NLOHMANN_JSON_SYSTEM_INCLUDE} INTERFACE
$<BUILD_INTERFACE:${NLOHMANN_JSON_INCLUDE_BUILD_DIR}>
$<INSTALL_INTERFACE:include>
)
## add debug view definition file for msvc (natvis)
if (MSVC)
set(NLOHMANN_ADD_NATVIS TRUE)
set(NLOHMANN_NATVIS_FILE "nlohmann_json.natvis")
target_sources(
${NLOHMANN_JSON_TARGET_NAME}
INTERFACE
$<INSTALL_INTERFACE:${NLOHMANN_NATVIS_FILE}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/${NLOHMANN_NATVIS_FILE}>
)
endif()
# Install a pkg-config file, so other tools can find this.
CONFIGURE_FILE(
"${CMAKE_CURRENT_SOURCE_DIR}/cmake/pkg-config.pc.in"
"${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.pc"
)
##
## TESTS
## create and configure the unit test target
##
if (JSON_BuildTests)
include(CTest)
enable_testing()
# create and configure the unit test target
if (BuildTests)
add_subdirectory(test)
endif()
##
## INSTALL
## install header files, generate and install cmake config files for find_package()
##
# generate a config and config version file for the package
include(CMakePackageConfigHelpers)
# use a custom package version config file instead of
# write_basic_package_version_file to ensure that it's architecture-independent
# https://github.com/nlohmann/json/issues/1697
configure_file(
"cmake/nlohmann_jsonConfigVersion.cmake.in"
${NLOHMANN_JSON_CMAKE_VERSION_CONFIG_FILE}
@ONLY
)
configure_file(
${NLOHMANN_JSON_CMAKE_CONFIG_TEMPLATE}
${NLOHMANN_JSON_CMAKE_PROJECT_CONFIG_FILE}
@ONLY
)
configure_package_config_file("cmake/config.cmake.in"
"${CMAKE_CURRENT_BINARY_DIR}/${JSON_CONFIG_FILENAME}"
INSTALL_DESTINATION ${JSON_CONFIG_DESTINATION}
PATH_VARS JSON_INCLUDE_DESTINATION)
write_basic_package_version_file("${CMAKE_CURRENT_BINARY_DIR}/${JSON_CONFIGVERSION_FILENAME}"
VERSION ${PROJECT_VERSION}
COMPATIBILITY SameMajorVersion)
if(JSON_Install)
install(
DIRECTORY ${NLOHMANN_JSON_INCLUDE_BUILD_DIR}
DESTINATION ${NLOHMANN_JSON_INCLUDE_INSTALL_DIR}
)
install(
FILES ${NLOHMANN_JSON_CMAKE_PROJECT_CONFIG_FILE} ${NLOHMANN_JSON_CMAKE_VERSION_CONFIG_FILE}
DESTINATION ${NLOHMANN_JSON_CONFIG_INSTALL_DIR}
)
if (NLOHMANN_ADD_NATVIS)
install(
FILES ${NLOHMANN_NATVIS_FILE}
DESTINATION .
)
endif()
export(
TARGETS ${NLOHMANN_JSON_TARGET_NAME}
NAMESPACE ${PROJECT_NAME}::
FILE ${NLOHMANN_JSON_CMAKE_PROJECT_TARGETS_FILE}
)
install(
TARGETS ${NLOHMANN_JSON_TARGET_NAME}
EXPORT ${NLOHMANN_JSON_TARGETS_EXPORT_NAME}
INCLUDES DESTINATION ${NLOHMANN_JSON_INCLUDE_INSTALL_DIR}
)
install(
EXPORT ${NLOHMANN_JSON_TARGETS_EXPORT_NAME}
NAMESPACE ${PROJECT_NAME}::
DESTINATION ${NLOHMANN_JSON_CONFIG_INSTALL_DIR}
)
install(
FILES "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.pc"
DESTINATION ${NLOHMANN_JSON_PKGCONFIG_INSTALL_DIR}
)
endif()
# export the library target and store build directory in package registry
export(TARGETS ${JSON_TARGET_NAME}
FILE "${CMAKE_CURRENT_BINARY_DIR}/${JSON_TARGETS_FILENAME}")
export(PACKAGE ${JSON_PACKAGE_NAME})
# install library target and config files
install(TARGETS ${JSON_TARGET_NAME}
EXPORT ${JSON_PACKAGE_NAME})
install(FILES "src/json.hpp"
DESTINATION ${JSON_INCLUDE_DESTINATION})
install(EXPORT ${JSON_PACKAGE_NAME}
FILE ${JSON_TARGETS_FILENAME}
DESTINATION ${JSON_CONFIG_DESTINATION})
install(FILES "${CMAKE_CURRENT_BINARY_DIR}/${JSON_CONFIG_FILENAME}"
"${CMAKE_CURRENT_BINARY_DIR}/${JSON_CONFIGVERSION_FILENAME}"
DESTINATION ${JSON_CONFIG_DESTINATION})

View File

@@ -1,46 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at mail@nlohmann.me. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2013-2021 Niels Lohmann
Copyright (c) 2013-2017 Niels Lohmann
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

234
Makefile
View File

@@ -1,45 +1,35 @@
.PHONY: pretty clean ChangeLog.md release
.PHONY: pretty clean ChangeLog.md
##########################################################################
# configuration
##########################################################################
# directory to recent compiler binaries
COMPILER_DIR=/usr/local/opt/llvm/bin
# find GNU sed to use `-i` parameter
SED:=$(shell command -v gsed || which sed)
##########################################################################
# source files
##########################################################################
# the list of sources in the include folder
SRCS=$(shell find include -type f | sort)
# the single header (amalgamated from the source files)
AMALGAMATED_FILE=single_include/nlohmann/json.hpp
##########################################################################
# documentation of the Makefile's targets
##########################################################################
# used programs
RE2C = re2c
SED = sed
# main target
all:
@echo "amalgamate - amalgamate file single_include/nlohmann/json.hpp from the include/nlohmann sources"
@echo "ChangeLog.md - generate ChangeLog file"
@echo "check-amalgamation - check whether sources have been amalgamated"
@echo "clean - remove built files"
@echo "doctest - compile example files and check their output"
@echo "fuzz_testing - prepare fuzz testing of the JSON parser"
@echo "fuzz_testing_bson - prepare fuzz testing of the BSON parser"
@echo "fuzz_testing_cbor - prepare fuzz testing of the CBOR parser"
@echo "fuzz_testing_msgpack - prepare fuzz testing of the MessagePack parser"
@echo "fuzz_testing_ubjson - prepare fuzz testing of the UBJSON parser"
@echo "pretty - beautify code with Artistic Style"
@echo "run_benchmarks - build and run benchmarks"
$(MAKE) -C test
# clean up
clean:
rm -fr json_unit json_benchmarks fuzz fuzz-testing *.dSYM test/*.dSYM
rm -fr benchmarks/files/numbers/*.json
$(MAKE) clean -Cdoc
$(MAKE) clean -Ctest
##########################################################################
# unit tests
##########################################################################
# build unit tests
json_unit:
@$(MAKE) json_unit -C test
# run unit tests
check:
$(MAKE) check -C test
check-fast:
$(MAKE) check -C test TEST_PATTERN=""
##########################################################################
@@ -51,17 +41,6 @@ doctest:
$(MAKE) check_output -C doc
##########################################################################
# benchmarks
##########################################################################
run_benchmarks:
rm -fr cmake-build-benchmarks
mkdir cmake-build-benchmarks
cd cmake-build-benchmarks ; cmake ../benchmarks -GNinja -DCMAKE_BUILD_TYPE=Release -DJSON_BuildTests=On
cd cmake-build-benchmarks ; ninja
cd cmake-build-benchmarks ; ./json_benchmarks
##########################################################################
# fuzzing
##########################################################################
@@ -75,14 +54,6 @@ fuzz_testing:
find test/data/json_tests -size -5k -name *json | xargs -I{} cp "{}" fuzz-testing/testcases
@echo "Execute: afl-fuzz -i fuzz-testing/testcases -o fuzz-testing/out fuzz-testing/fuzzer"
fuzz_testing_bson:
rm -fr fuzz-testing
mkdir -p fuzz-testing fuzz-testing/testcases fuzz-testing/out
$(MAKE) parse_bson_fuzzer -C test CXX=afl-clang++
mv test/parse_bson_fuzzer fuzz-testing/fuzzer
find test/data -size -5k -name *.bson | xargs -I{} cp "{}" fuzz-testing/testcases
@echo "Execute: afl-fuzz -i fuzz-testing/testcases -o fuzz-testing/out fuzz-testing/fuzzer"
fuzz_testing_cbor:
rm -fr fuzz-testing
mkdir -p fuzz-testing fuzz-testing/testcases fuzz-testing/out
@@ -99,14 +70,6 @@ fuzz_testing_msgpack:
find test/data -size -5k -name *.msgpack | xargs -I{} cp "{}" fuzz-testing/testcases
@echo "Execute: afl-fuzz -i fuzz-testing/testcases -o fuzz-testing/out fuzz-testing/fuzzer"
fuzz_testing_ubjson:
rm -fr fuzz-testing
mkdir -p fuzz-testing fuzz-testing/testcases fuzz-testing/out
$(MAKE) parse_ubjson_fuzzer -C test CXX=afl-clang++
mv test/parse_ubjson_fuzzer fuzz-testing/fuzzer
find test/data -size -5k -name *.ubjson | xargs -I{} cp "{}" fuzz-testing/testcases
@echo "Execute: afl-fuzz -i fuzz-testing/testcases -o fuzz-testing/out fuzz-testing/fuzzer"
fuzzing-start:
afl-fuzz -S fuzzer1 -i fuzz-testing/testcases -o fuzz-testing/out fuzz-testing/fuzzer > /dev/null &
afl-fuzz -S fuzzer2 -i fuzz-testing/testcases -o fuzz-testing/out fuzz-testing/fuzzer > /dev/null &
@@ -121,125 +84,56 @@ fuzzing-stop:
-killall fuzzer
-killall afl-fuzz
##########################################################################
# Static analysis
# static analyzer
##########################################################################
# call PVS-Studio Analyzer <https://www.viva64.com/en/pvs-studio/>
pvs_studio:
rm -fr cmake-build-pvs-studio
mkdir cmake-build-pvs-studio
cd cmake-build-pvs-studio ; cmake .. -DCMAKE_EXPORT_COMPILE_COMMANDS=On -DJSON_MultipleHeaders=ON
cd cmake-build-pvs-studio ; pvs-studio-analyzer analyze -j 10
cd cmake-build-pvs-studio ; plog-converter -a'GA:1,2;64:1;CS' -t fullhtml PVS-Studio.log -o pvs
open cmake-build-pvs-studio/pvs/index.html
# call cppcheck on the main header file
cppcheck:
cppcheck --enable=warning --inconclusive --force --std=c++11 src/json.hpp --error-exitcode=1
# run clang sanitize (we are overrding the CXXFLAGS provided by travis in order to use gcc's libstdc++)
clang_sanitize: clean
CXX=clang++ CXXFLAGS="-g -O2 -fsanitize=address -fsanitize=undefined -fno-omit-frame-pointer" $(MAKE)
##########################################################################
# Code format and source amalgamation
# maintainer targets
##########################################################################
# call the Artistic Style pretty printer on all source files
# create scanner with re2c
re2c: src/json.hpp.re2c
$(RE2C) -W --utf-8 --encoding-policy fail --bit-vectors --nested-ifs --no-debug-info $< | $(SED) '1d' > src/json.hpp
# pretty printer
pretty:
astyle \
--style=allman \
--indent=spaces=4 \
--indent-modifiers \
--indent-switches \
--indent-preproc-block \
--indent-preproc-define \
--indent-col1-comments \
--pad-oper \
--pad-header \
--align-pointer=type \
--align-reference=type \
--add-brackets \
--convert-tabs \
--close-templates \
--lineend=linux \
--preserve-date \
--suffix=none \
--formatted \
$(SRCS) $(AMALGAMATED_FILE) test/src/*.cpp test/src/*.hpp benchmarks/src/benchmarks.cpp doc/examples/*.cpp
# call the Clang-Format on all source files
pretty_format:
for FILE in $(SRCS) $(AMALGAMATED_FILE) test/src/*.cpp test/src/*.hpp benchmarks/src/benchmarks.cpp doc/examples/*.cpp; do echo $$FILE; clang-format -i $$FILE; done
# create single header file
amalgamate: $(AMALGAMATED_FILE)
# call the amalgamation tool and pretty print
$(AMALGAMATED_FILE): $(SRCS)
third_party/amalgamate/amalgamate.py -c third_party/amalgamate/config.json -s . --verbose=yes
$(MAKE) pretty
# check if file single_include/nlohmann/json.hpp has been amalgamated from the nlohmann sources
# Note: this target is called by Travis
check-amalgamation:
@mv $(AMALGAMATED_FILE) $(AMALGAMATED_FILE)~
@$(MAKE) amalgamate
@diff $(AMALGAMATED_FILE) $(AMALGAMATED_FILE)~ || (echo "===================================================================\n Amalgamation required! Please read the contribution guidelines\n in file .github/CONTRIBUTING.md.\n===================================================================" ; mv $(AMALGAMATED_FILE)~ $(AMALGAMATED_FILE) ; false)
@mv $(AMALGAMATED_FILE)~ $(AMALGAMATED_FILE)
astyle --style=allman --indent=spaces=4 --indent-modifiers \
--indent-switches --indent-preproc-block --indent-preproc-define \
--indent-col1-comments --pad-oper --pad-header --align-pointer=type \
--align-reference=type --add-brackets --convert-tabs --close-templates \
--lineend=linux --preserve-date --suffix=none --formatted \
src/json.hpp src/json.hpp.re2c test/src/*.cpp \
benchmarks/benchmarks.cpp doc/examples/*.cpp
##########################################################################
# ChangeLog
# benchmarks
##########################################################################
# Create a ChangeLog based on the git log using the GitHub Changelog Generator
# (<https://github.com/github-changelog-generator/github-changelog-generator>).
# benchmarks
json_benchmarks: benchmarks/benchmarks.cpp benchmarks/benchpress.hpp benchmarks/cxxopts.hpp src/json.hpp
cd benchmarks/files/numbers ; python generate.py
$(CXX) -std=c++11 -pthread $(CXXFLAGS) -DNDEBUG -O3 -flto -I src -I benchmarks $< $(LDFLAGS) -o $@
./json_benchmarks
##########################################################################
# changelog
##########################################################################
# variable to control the diffs between the last released version and the current repository state
NEXT_VERSION ?= "unreleased"
ChangeLog.md:
github_changelog_generator -o ChangeLog.md --user nlohmann --project json --simple-list --release-url https://github.com/nlohmann/json/releases/tag/%s --future-release $(NEXT_VERSION)
$(SED) -i 's|https://github.com/nlohmann/json/releases/tag/HEAD|https://github.com/nlohmann/json/tree/HEAD|' ChangeLog.md
$(SED) -i '2i All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/).' ChangeLog.md
##########################################################################
# Release files
##########################################################################
# Create the files for a release and add signatures and hashes. We use `-X` to make the resulting ZIP file
# reproducible, see <https://content.pivotal.io/blog/barriers-to-deterministic-reproducible-zip-files>.
release:
rm -fr release_files
mkdir release_files
zip -9 --recurse-paths -X include.zip $(SRCS) $(AMALGAMATED_FILE) meson.build LICENSE.MIT
gpg --armor --detach-sig include.zip
mv include.zip include.zip.asc release_files
gpg --armor --detach-sig $(AMALGAMATED_FILE)
cp $(AMALGAMATED_FILE) release_files
mv $(AMALGAMATED_FILE).asc release_files
cd release_files ; shasum -a 256 json.hpp > hashes.txt
cd release_files ; shasum -a 256 include.zip >> hashes.txt
##########################################################################
# Maintenance
##########################################################################
# clean up
clean:
rm -fr json_unit json_benchmarks fuzz fuzz-testing *.dSYM test/*.dSYM oclint_report.html
rm -fr benchmarks/files/numbers/*.json
rm -fr cmake-3.1.0-Darwin64.tar.gz cmake-3.1.0-Darwin64
rm -fr cmake-build-benchmarks cmake-build-pedantic fuzz-testing cmake-build-clang-analyze cmake-build-pvs-studio cmake-build-infer cmake_build
$(MAKE) clean -Cdoc
##########################################################################
# Thirdparty code
##########################################################################
update_hedley:
rm -f include/nlohmann/thirdparty/hedley/hedley.hpp include/nlohmann/thirdparty/hedley/hedley_undef.hpp
curl https://raw.githubusercontent.com/nemequ/hedley/master/hedley.h -o include/nlohmann/thirdparty/hedley/hedley.hpp
$(SED) -i 's/HEDLEY_/JSON_HEDLEY_/g' include/nlohmann/thirdparty/hedley/hedley.hpp
grep "[[:blank:]]*#[[:blank:]]*undef" include/nlohmann/thirdparty/hedley/hedley.hpp | grep -v "__" | sort | uniq | $(SED) 's/ //g' | $(SED) 's/undef/undef /g' > include/nlohmann/thirdparty/hedley/hedley_undef.hpp
$(SED) -i '1s/^/#pragma once\n\n/' include/nlohmann/thirdparty/hedley/hedley.hpp
$(SED) -i '1s/^/#pragma once\n\n/' include/nlohmann/thirdparty/hedley/hedley_undef.hpp
$(MAKE) amalgamate
github_changelog_generator -o ChangeLog.md --simple-list --release-url https://github.com/nlohmann/json/releases/tag/%s --future-release $(NEXT_VERSION)
gsed -i 's|https://github.com/nlohmann/json/releases/tag/HEAD|https://github.com/nlohmann/json/tree/HEAD|' ChangeLog.md
gsed -i '2i All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](http://semver.org/).' ChangeLog.md

1117
README.md

File diff suppressed because it is too large Load Diff

View File

@@ -1,83 +1,10 @@
version: '{build}'
environment:
matrix:
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Debug
platform: x86
CXX_FLAGS: "/W4 /WX"
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Release
platform: x86
CXX_FLAGS: "/W4 /WX"
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Release
platform: x86
name: with_win_header
CXX_FLAGS: "/W4 /WX"
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
configuration: Release
platform: x86
CXX_FLAGS: "/permissive- /std:c++latest /utf-8 /W4 /WX"
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 15 2017
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019
configuration: Release
platform: x86
CXX_FLAGS: "/W4 /WX"
CMAKE_OPTIONS: "-DJSON_ImplicitConversions=OFF"
GENERATOR: Visual Studio 16 2019
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015
configuration: Release
platform: x64
CXX_FLAGS: "/W4 /WX"
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 14 2015
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
configuration: Release
platform: x64
CXX_FLAGS: "/permissive- /std:c++latest /Zc:__cplusplus /utf-8 /W4 /WX"
CMAKE_OPTIONS: ""
GENERATOR: Visual Studio 15 2017
init:
- cmake --version
- msbuild /version
install:
- if "%platform%"=="x86" set GENERATOR_PLATFORM=Win32
before_build:
# for with_win_header build, inject the inclusion of Windows.h to the single-header library
- ps: if ($env:name -Eq "with_win_header") { $header_path = "single_include\nlohmann\json.hpp" }
- ps: if ($env:name -Eq "with_win_header") { "#include <Windows.h>`n" + (Get-Content $header_path | Out-String) | Set-Content $header_path }
- cmake . -G "%GENERATOR%" -A "%GENERATOR_PLATFORM%" -DCMAKE_CXX_FLAGS="%CXX_FLAGS%" -DCMAKE_IGNORE_PATH="C:/Program Files/Git/usr/bin" -DJSON_BuildTests=On "%CMAKE_OPTIONS%"
build_script:
- cmake --build . --config "%configuration%"
test_script:
- if "%configuration%"=="Release" ctest -C "%configuration%" -V -j
# On Debug builds, skip test-unicode_all
# as it is extremely slow to run and cause
# occasional timeouts on AppVeyor.
# More info: https://github.com/nlohmann/json/pull/1570
- if "%configuration%"=="Debug" ctest --exclude-regex "test-unicode" -C "%configuration%" -V -j
# only build PRs and commits to develop branch
# (see https://help.appveyor.com/discussions/questions/55079-two-builds-per-commit-to-pull-request)
branches:
only:
- develop
version: '{build}'
os: Visual Studio 2015
init: []
install: []
build_script:
- set PATH=C:\Program Files (x86)\MSBuild\14.0\Bin;%PATH%
- cmake . -G "Visual Studio 14 2015"
- cmake --build . --config Release
test_script:
- ctest -C Release -V

View File

@@ -1,33 +0,0 @@
cmake_minimum_required(VERSION 3.11)
project(JSON_Benchmarks LANGUAGES CXX)
# set compiler flags
if((CMAKE_CXX_COMPILER_ID MATCHES GNU) OR (CMAKE_CXX_COMPILER_ID MATCHES Clang))
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -flto -DNDEBUG -O3")
endif()
# configure Google Benchmarks
include(FetchContent)
FetchContent_Declare(
benchmark
GIT_REPOSITORY https://github.com/google/benchmark.git
GIT_TAG origin/main
GIT_SHALLOW TRUE
)
FetchContent_GetProperties(benchmark)
if(NOT benchmark_POPULATED)
FetchContent_Populate(benchmark)
set(BENCHMARK_ENABLE_TESTING OFF CACHE INTERNAL "" FORCE)
add_subdirectory(${benchmark_SOURCE_DIR} ${benchmark_BINARY_DIR})
endif()
# download test data
include(${CMAKE_SOURCE_DIR}/../cmake/download_test_data.cmake)
# benchmark binary
add_executable(json_benchmarks src/benchmarks.cpp)
target_compile_features(json_benchmarks PRIVATE cxx_std_11)
target_link_libraries(json_benchmarks benchmark ${CMAKE_THREAD_LIBS_INIT})
add_dependencies(json_benchmarks download_test_data)
target_include_directories(json_benchmarks PRIVATE ${CMAKE_SOURCE_DIR}/../single_include ${CMAKE_BINARY_DIR}/include)

118
benchmarks/benchmarks.cpp Normal file
View File

@@ -0,0 +1,118 @@
#define BENCHPRESS_CONFIG_MAIN
#include <fstream>
#include <sstream>
#include <benchpress.hpp>
#include <json.hpp>
#include <pthread.h>
#include <thread>
using json = nlohmann::json;
struct StartUp
{
StartUp()
{
#ifndef __llvm__
// pin thread to a single CPU
cpu_set_t cpuset;
pthread_t thread;
thread = pthread_self();
CPU_ZERO(&cpuset);
CPU_SET(std::thread::hardware_concurrency() - 1, &cpuset);
pthread_setaffinity_np(thread, sizeof(cpu_set_t), &cpuset);
#endif
}
};
StartUp startup;
enum class EMode { input, output_no_indent, output_with_indent };
static void bench(benchpress::context& ctx,
const std::string& in_path,
const EMode mode)
{
// using string streams for benchmarking to factor-out cold-cache disk
// access.
std::stringstream istr;
{
// read file into string stream
std::ifstream input_file(in_path);
istr << input_file.rdbuf();
input_file.close();
// read the stream once
json j;
j << istr;
// clear flags and rewind
istr.clear();
istr.seekg(0);
}
switch (mode)
{
// benchmarking input
case EMode::input:
{
ctx.reset_timer();
for (size_t i = 0; i < ctx.num_iterations(); ++i)
{
// clear flags and rewind
istr.clear();
istr.seekg(0);
json j;
j << istr;
}
break;
}
// benchmarking output
case EMode::output_no_indent:
case EMode::output_with_indent:
{
// create JSON value from input
json j;
j << istr;
std::stringstream ostr;
ctx.reset_timer();
for (size_t i = 0; i < ctx.num_iterations(); ++i)
{
if (mode == EMode::output_no_indent)
{
ostr << j;
}
else
{
ostr << std::setw(4) << j;
}
// reset data
ostr.str(std::string());
}
break;
}
}
}
#define BENCHMARK_I(mode, title, in_path) \
BENCHMARK((title), [](benchpress::context* ctx) \
{ \
bench(*ctx, (in_path), (mode)); \
})
BENCHMARK_I(EMode::input, "parse jeopardy.json", "benchmarks/files/jeopardy/jeopardy.json");
BENCHMARK_I(EMode::input, "parse canada.json", "benchmarks/files/nativejson-benchmark/canada.json");
BENCHMARK_I(EMode::input, "parse citm_catalog.json", "benchmarks/files/nativejson-benchmark/citm_catalog.json");
BENCHMARK_I(EMode::input, "parse twitter.json", "benchmarks/files/nativejson-benchmark/twitter.json");
BENCHMARK_I(EMode::input, "parse numbers/floats.json", "benchmarks/files/numbers/floats.json");
BENCHMARK_I(EMode::input, "parse numbers/signed_ints.json", "benchmarks/files/numbers/signed_ints.json");
BENCHMARK_I(EMode::input, "parse numbers/unsigned_ints.json", "benchmarks/files/numbers/unsigned_ints.json");
BENCHMARK_I(EMode::output_no_indent, "dump jeopardy.json", "benchmarks/files/jeopardy/jeopardy.json");
BENCHMARK_I(EMode::output_with_indent, "dump jeopardy.json with indent", "benchmarks/files/jeopardy/jeopardy.json");
BENCHMARK_I(EMode::output_no_indent, "dump numbers/floats.json", "benchmarks/files/numbers/floats.json");
BENCHMARK_I(EMode::output_no_indent, "dump numbers/signed_ints.json", "benchmarks/files/numbers/signed_ints.json");

401
benchmarks/benchpress.hpp Normal file
View File

@@ -0,0 +1,401 @@
/*
* Copyright (C) 2015 Christopher Gilbert.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifndef BENCHPRESS_HPP
#define BENCHPRESS_HPP
#include <algorithm> // max, min
#include <atomic> // atomic_intmax_t
#include <chrono> // high_resolution_timer, duration
#include <functional> // function
#include <iomanip> // setw
#include <iostream> // cout
#include <regex> // regex, regex_match
#include <sstream> // stringstream
#include <string> // string
#include <thread> // thread
#include <vector> // vector
namespace benchpress {
/*
* The options class encapsulates all options for running benchmarks.
*
* When including benchpress, a main function can be emitted which includes a command-line parser for building an
* options object. However from time-to-time it may be necessary for the developer to have to build their own main
* stub and construct the options object manually.
*
* options opts;
* opts
* .bench(".*")
* .benchtime(1)
* .cpu(4);
*/
class options {
std::string d_bench;
size_t d_benchtime;
size_t d_cpu;
public:
options()
: d_bench(".*")
, d_benchtime(1)
, d_cpu(std::thread::hardware_concurrency())
{}
options& bench(const std::string& bench) {
d_bench = bench;
return *this;
}
options& benchtime(size_t benchtime) {
d_benchtime = benchtime;
return *this;
}
options& cpu(size_t cpu) {
d_cpu = cpu;
return *this;
}
std::string get_bench() const {
return d_bench;
}
size_t get_benchtime() const {
return d_benchtime;
}
size_t get_cpu() const {
return d_cpu;
}
};
class context;
/*
* The benchmark_info class is used to store a function name / pointer pair.
*
* benchmark_info bi("example", [](benchpress::context* b) {
* // benchmark function
* });
*/
class benchmark_info {
std::string d_name;
std::function<void(context*)> d_func;
public:
benchmark_info(std::string name, std::function<void(context*)> func)
: d_name(name)
, d_func(func)
{}
std::string get_name() const { return d_name; }
std::function<void(context*)> get_func() const { return d_func; }
};
/*
* The registration class is responsible for providing a single global point of reference for registering
* benchmark functions.
*
* registration::get_ptr()->register_benchmark(info);
*/
class registration {
static registration* d_this;
std::vector<benchmark_info> d_benchmarks;
public:
static registration* get_ptr() {
if (nullptr == d_this) {
d_this = new registration();
}
return d_this;
}
void register_benchmark(benchmark_info& info) {
d_benchmarks.push_back(info);
}
std::vector<benchmark_info> get_benchmarks() { return d_benchmarks; }
};
/*
* The auto_register class is a helper used to register benchmarks.
*/
class auto_register {
public:
auto_register(const std::string& name, std::function<void(context*)> func) {
benchmark_info info(name, func);
registration::get_ptr()->register_benchmark(info);
}
};
#define CONCAT(x, y) x ## y
#define CONCAT2(x, y) CONCAT(x, y)
// The BENCHMARK macro is a helper for creating benchmark functions and automatically registering them with the
// registration class.
#define BENCHMARK(x, f) benchpress::auto_register CONCAT2(register_, __LINE__)((x), (f));
// This macro will prevent the compiler from removing a redundant code path which has no side-effects.
#define DISABLE_REDUNDANT_CODE_OPT() { asm(""); }
/*
* The result class is responsible for producing a printable string representation of a benchmark run.
*/
class result {
size_t d_num_iterations;
std::chrono::nanoseconds d_duration;
size_t d_num_bytes;
public:
result(size_t num_iterations, std::chrono::nanoseconds duration, size_t num_bytes)
: d_num_iterations(num_iterations)
, d_duration(duration)
, d_num_bytes(num_bytes)
{}
size_t get_ns_per_op() const {
if (d_num_iterations <= 0) {
return 0;
}
return d_duration.count() / d_num_iterations;
}
double get_mb_per_s() const {
if (d_num_iterations <= 0 || d_duration.count() <= 0 || d_num_bytes <= 0) {
return 0;
}
return ((double(d_num_bytes) * double(d_num_iterations) / double(1e6)) /
double(std::chrono::duration_cast<std::chrono::seconds>(d_duration).count()));
}
std::string to_string() const {
std::stringstream tmp;
tmp << std::setw(12) << std::right << d_num_iterations;
size_t npo = get_ns_per_op();
tmp << std::setw(12) << std::right << npo << std::setw(0) << " ns/op";
double mbs = get_mb_per_s();
if (mbs > 0.0) {
tmp << std::setw(12) << std::right << mbs << std::setw(0) << " MB/s";
}
return std::string(tmp.str());
}
};
/*
* The parallel_context class is responsible for providing a thread-safe context for parallel benchmark code.
*/
class parallel_context {
std::atomic_intmax_t d_num_iterations;
public:
parallel_context(size_t num_iterations)
: d_num_iterations(num_iterations)
{}
bool next() {
return (d_num_iterations.fetch_sub(1) > 0);
}
};
/*
* The context class is responsible for providing an interface for capturing benchmark metrics to benchmark functions.
*/
class context {
bool d_timer_on;
std::chrono::high_resolution_clock::time_point d_start;
std::chrono::nanoseconds d_duration;
std::chrono::seconds d_benchtime;
size_t d_num_iterations;
size_t d_num_threads;
size_t d_num_bytes;
benchmark_info d_benchmark;
public:
context(const benchmark_info& info, const options& opts)
: d_timer_on(false)
, d_start()
, d_duration()
, d_benchtime(std::chrono::seconds(opts.get_benchtime()))
, d_num_iterations(1)
, d_num_threads(opts.get_cpu())
, d_num_bytes(0)
, d_benchmark(info)
{}
size_t num_iterations() const { return d_num_iterations; }
void set_num_threads(size_t n) { d_num_threads = n; }
size_t num_threads() const { return d_num_threads; }
void start_timer() {
if (!d_timer_on) {
d_start = std::chrono::high_resolution_clock::now();
d_timer_on = true;
}
}
void stop_timer() {
if (d_timer_on) {
d_duration += std::chrono::high_resolution_clock::now() - d_start;
d_timer_on = false;
}
}
void reset_timer() {
if (d_timer_on) {
d_start = std::chrono::high_resolution_clock::now();
}
d_duration = std::chrono::nanoseconds::zero();
}
void set_bytes(int64_t bytes) { d_num_bytes = bytes; }
size_t get_ns_per_op() {
if (d_num_iterations <= 0) {
return 0;
}
return d_duration.count() / d_num_iterations;
}
void run_n(size_t n) {
d_num_iterations = n;
reset_timer();
start_timer();
d_benchmark.get_func()(this);
stop_timer();
}
void run_parallel(std::function<void(parallel_context*)> f) {
parallel_context pc(d_num_iterations);
std::vector<std::thread> threads;
for (size_t i = 0; i < d_num_threads; ++i) {
threads.push_back(std::thread([&pc,&f]() -> void {
f(&pc);
}));
}
for(auto& thread : threads){
thread.join();
}
}
result run() {
size_t n = 1;
run_n(n);
while (d_duration < d_benchtime && n < 1e9) {
size_t last = n;
if (get_ns_per_op() == 0) {
n = 1e9;
} else {
n = d_duration.count() / get_ns_per_op();
}
n = std::max(std::min(n+n/2, 100*last), last+1);
n = round_up(n);
run_n(n);
}
return result(n, d_duration, d_num_bytes);
}
private:
template<typename T>
T round_down_10(T n) {
int tens = 0;
while (n > 10) {
n /= 10;
tens++;
}
int result = 1;
for (int i = 0; i < tens; ++i) {
result *= 10;
}
return result;
}
template<typename T>
T round_up(T n) {
T base = round_down_10(n);
if (n < (2 * base)) {
return 2 * base;
}
if (n < (5 * base)) {
return 5 * base;
}
return 10 * base;
}
};
/*
* The run_benchmarks function will run the registered benchmarks.
*/
void run_benchmarks(const options& opts) {
std::regex match_r(opts.get_bench());
auto benchmarks = registration::get_ptr()->get_benchmarks();
for (auto& info : benchmarks) {
if (std::regex_match(info.get_name(), match_r)) {
context c(info, opts);
auto r = c.run();
std::cout << std::setw(35) << std::left << info.get_name() << r.to_string() << std::endl;
}
}
}
} // namespace benchpress
/*
* If BENCHPRESS_CONFIG_MAIN is defined when the file is included then a main function will be emitted which provides a
* command-line parser and then executes run_benchmarks.
*/
#ifdef BENCHPRESS_CONFIG_MAIN
#include "cxxopts.hpp"
benchpress::registration* benchpress::registration::d_this;
int main(int argc, char** argv) {
std::chrono::high_resolution_clock::time_point bp_start = std::chrono::high_resolution_clock::now();
benchpress::options bench_opts;
try {
cxxopts::Options cmd_opts(argv[0], " - command line options");
cmd_opts.add_options()
("bench", "run benchmarks matching the regular expression", cxxopts::value<std::string>()
->default_value(".*"))
("benchtime", "run enough iterations of each benchmark to take t seconds", cxxopts::value<size_t>()
->default_value("1"))
("cpu", "specify the number of threads to use for parallel benchmarks", cxxopts::value<size_t>()
->default_value(std::to_string(std::thread::hardware_concurrency())))
("help", "print help")
;
cmd_opts.parse(argc, argv);
if (cmd_opts.count("help")) {
std::cout << cmd_opts.help({""}) << std::endl;
exit(0);
}
if (cmd_opts.count("bench")) {
bench_opts.bench(cmd_opts["bench"].as<std::string>());
}
if (cmd_opts.count("benchtime")) {
bench_opts.benchtime(cmd_opts["benchtime"].as<size_t>());
}
if (cmd_opts.count("cpu")) {
bench_opts.cpu(cmd_opts["cpu"].as<size_t>());
}
} catch (const cxxopts::OptionException& e) {
std::cout << "error parsing options: " << e.what() << std::endl;
exit(1);
}
benchpress::run_benchmarks(bench_opts);
float duration = std::chrono::duration_cast<std::chrono::milliseconds>(
std::chrono::high_resolution_clock::now() - bp_start
).count() / 1000.f;
std::cout << argv[0] << " " << duration << "s" << std::endl;
return 0;
}
#endif
#endif // BENCHPRESS_HPP

1312
benchmarks/cxxopts.hpp Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env python
import json
import random
import sys
random.seed(0)
# floats
result_floats = []
for x in range(0, 1000000):
result_floats.append(random.uniform(-100000000.0, 100000000.0))
json.dump(result_floats, open("floats.json", "w"), indent=2)
# unsigned integers
result_uints = []
for x in range(0, 1000000):
result_uints.append(random.randint(0, 18446744073709551615))
json.dump(result_uints, open("unsigned_ints.json", "w"), indent=2)
# signed integers
result_sints = []
for x in range(0, 1000000):
result_sints.append(random.randint(-9223372036854775808, 9223372036854775807))
json.dump(result_sints, open("signed_ints.json", "w"), indent=2)

View File

@@ -1,110 +0,0 @@
#include "benchmark/benchmark.h"
#include <nlohmann/json.hpp>
#include <fstream>
#include <test_data.hpp>
using json = nlohmann::json;
//////////////////////////////////////////////////////////////////////////////
// parse JSON from file
//////////////////////////////////////////////////////////////////////////////
static void ParseFile(benchmark::State& state, const char* filename)
{
while (state.KeepRunning())
{
state.PauseTiming();
auto* f = new std::ifstream(filename);
auto* j = new json();
state.ResumeTiming();
*j = json::parse(*f);
state.PauseTiming();
delete f;
delete j;
state.ResumeTiming();
}
std::ifstream file(filename, std::ios::binary | std::ios::ate);
state.SetBytesProcessed(state.iterations() * file.tellg());
}
BENCHMARK_CAPTURE(ParseFile, jeopardy, TEST_DATA_DIRECTORY "/jeopardy/jeopardy.json");
BENCHMARK_CAPTURE(ParseFile, canada, TEST_DATA_DIRECTORY "/nativejson-benchmark/canada.json");
BENCHMARK_CAPTURE(ParseFile, citm_catalog, TEST_DATA_DIRECTORY "/nativejson-benchmark/citm_catalog.json");
BENCHMARK_CAPTURE(ParseFile, twitter, TEST_DATA_DIRECTORY "/nativejson-benchmark/twitter.json");
BENCHMARK_CAPTURE(ParseFile, floats, TEST_DATA_DIRECTORY "/regression/floats.json");
BENCHMARK_CAPTURE(ParseFile, signed_ints, TEST_DATA_DIRECTORY "/regression/signed_ints.json");
BENCHMARK_CAPTURE(ParseFile, unsigned_ints, TEST_DATA_DIRECTORY "/regression/unsigned_ints.json");
BENCHMARK_CAPTURE(ParseFile, small_signed_ints, TEST_DATA_DIRECTORY "/regression/small_signed_ints.json");
//////////////////////////////////////////////////////////////////////////////
// parse JSON from string
//////////////////////////////////////////////////////////////////////////////
static void ParseString(benchmark::State& state, const char* filename)
{
std::ifstream f(filename);
std::string str((std::istreambuf_iterator<char>(f)), std::istreambuf_iterator<char>());
while (state.KeepRunning())
{
state.PauseTiming();
auto* j = new json();
state.ResumeTiming();
*j = json::parse(str);
state.PauseTiming();
delete j;
state.ResumeTiming();
}
state.SetBytesProcessed(state.iterations() * str.size());
}
BENCHMARK_CAPTURE(ParseString, jeopardy, TEST_DATA_DIRECTORY "/jeopardy/jeopardy.json");
BENCHMARK_CAPTURE(ParseString, canada, TEST_DATA_DIRECTORY "/nativejson-benchmark/canada.json");
BENCHMARK_CAPTURE(ParseString, citm_catalog, TEST_DATA_DIRECTORY "/nativejson-benchmark/citm_catalog.json");
BENCHMARK_CAPTURE(ParseString, twitter, TEST_DATA_DIRECTORY "/nativejson-benchmark/twitter.json");
BENCHMARK_CAPTURE(ParseString, floats, TEST_DATA_DIRECTORY "/regression/floats.json");
BENCHMARK_CAPTURE(ParseString, signed_ints, TEST_DATA_DIRECTORY "/regression/signed_ints.json");
BENCHMARK_CAPTURE(ParseString, unsigned_ints, TEST_DATA_DIRECTORY "/regression/unsigned_ints.json");
BENCHMARK_CAPTURE(ParseString, small_signed_ints, TEST_DATA_DIRECTORY "/regression/small_signed_ints.json");
//////////////////////////////////////////////////////////////////////////////
// serialize JSON
//////////////////////////////////////////////////////////////////////////////
static void Dump(benchmark::State& state, const char* filename, int indent)
{
std::ifstream f(filename);
std::string str((std::istreambuf_iterator<char>(f)), std::istreambuf_iterator<char>());
json j = json::parse(str);
while (state.KeepRunning())
{
j.dump(indent);
}
state.SetBytesProcessed(state.iterations() * j.dump(indent).size());
}
BENCHMARK_CAPTURE(Dump, jeopardy / -, TEST_DATA_DIRECTORY "/jeopardy/jeopardy.json", -1);
BENCHMARK_CAPTURE(Dump, jeopardy / 4, TEST_DATA_DIRECTORY "/jeopardy/jeopardy.json", 4);
BENCHMARK_CAPTURE(Dump, canada / -, TEST_DATA_DIRECTORY "/nativejson-benchmark/canada.json", -1);
BENCHMARK_CAPTURE(Dump, canada / 4, TEST_DATA_DIRECTORY "/nativejson-benchmark/canada.json", 4);
BENCHMARK_CAPTURE(Dump, citm_catalog / -, TEST_DATA_DIRECTORY "/nativejson-benchmark/citm_catalog.json", -1);
BENCHMARK_CAPTURE(Dump, citm_catalog / 4, TEST_DATA_DIRECTORY "/nativejson-benchmark/citm_catalog.json", 4);
BENCHMARK_CAPTURE(Dump, twitter / -, TEST_DATA_DIRECTORY "/nativejson-benchmark/twitter.json", -1);
BENCHMARK_CAPTURE(Dump, twitter / 4, TEST_DATA_DIRECTORY "/nativejson-benchmark/twitter.json", 4);
BENCHMARK_CAPTURE(Dump, floats / -, TEST_DATA_DIRECTORY "/regression/floats.json", -1);
BENCHMARK_CAPTURE(Dump, floats / 4, TEST_DATA_DIRECTORY "/regression/floats.json", 4);
BENCHMARK_CAPTURE(Dump, signed_ints / -, TEST_DATA_DIRECTORY "/regression/signed_ints.json", -1);
BENCHMARK_CAPTURE(Dump, signed_ints / 4, TEST_DATA_DIRECTORY "/regression/signed_ints.json", 4);
BENCHMARK_CAPTURE(Dump, unsigned_ints / -, TEST_DATA_DIRECTORY "/regression/unsigned_ints.json", -1);
BENCHMARK_CAPTURE(Dump, unsigned_ints / 4, TEST_DATA_DIRECTORY "/regression/unsigned_ints.json", 4);
BENCHMARK_CAPTURE(Dump, small_signed_ints / -, TEST_DATA_DIRECTORY "/regression/small_signed_ints.json", -1);
BENCHMARK_CAPTURE(Dump, small_signed_ints / 4, TEST_DATA_DIRECTORY "/regression/small_signed_ints.json", 4);
BENCHMARK_MAIN();

View File

@@ -1,835 +0,0 @@
# number of parallel jobs for CTest
set(N 10)
###############################################################################
# Needed tools.
###############################################################################
include(FindPython3)
find_package(Python3 COMPONENTS Interpreter)
find_program(ASTYLE_TOOL NAMES astyle)
execute_process(COMMAND ${ASTYLE_TOOL} --version OUTPUT_VARIABLE ASTYLE_TOOL_VERSION ERROR_VARIABLE ASTYLE_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" ASTYLE_TOOL_VERSION "${ASTYLE_TOOL_VERSION}")
message(STATUS "🔖 Artistic Style ${ASTYLE_TOOL_VERSION} (${ASTYLE_TOOL})")
find_program(CLANG_TOOL NAMES clang++-HEAD clang++-12 clang++-11 clang++)
execute_process(COMMAND ${CLANG_TOOL} --version OUTPUT_VARIABLE CLANG_TOOL_VERSION ERROR_VARIABLE CLANG_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" CLANG_TOOL_VERSION "${CLANG_TOOL_VERSION}")
message(STATUS "🔖 Clang ${CLANG_TOOL_VERSION} (${CLANG_TOOL})")
find_program(CLANG_TIDY_TOOL NAMES clang-tidy-12 clang-tidy-11 clang-tidy)
execute_process(COMMAND ${CLANG_TIDY_TOOL} --version OUTPUT_VARIABLE CLANG_TIDY_TOOL_VERSION ERROR_VARIABLE CLANG_TIDY_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" CLANG_TIDY_TOOL_VERSION "${CLANG_TIDY_TOOL_VERSION}")
message(STATUS "🔖 Clang-Tidy ${CLANG_TIDY_TOOL_VERSION} (${CLANG_TIDY_TOOL})")
message(STATUS "🔖 CMake ${CMAKE_VERSION} (${CMAKE_COMMAND})")
find_program(CPPCHECK_TOOL NAMES cppcheck)
execute_process(COMMAND ${CPPCHECK_TOOL} --version OUTPUT_VARIABLE CPPCHECK_TOOL_VERSION ERROR_VARIABLE CPPCHECK_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" CPPCHECK_TOOL_VERSION "${CPPCHECK_TOOL_VERSION}")
message(STATUS "🔖 Cppcheck ${CPPCHECK_TOOL_VERSION} (${CPPCHECK_TOOL})")
find_program(GCC_TOOL NAMES g++-HEAD g++-11 g++-latest)
execute_process(COMMAND ${GCC_TOOL} --version OUTPUT_VARIABLE GCC_TOOL_VERSION ERROR_VARIABLE GCC_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" GCC_TOOL_VERSION "${GCC_TOOL_VERSION}")
message(STATUS "🔖 GCC ${GCC_TOOL_VERSION} (${GCC_TOOL})")
find_program(GCOV_TOOL NAMES gcov-HEAD gcov-11 gcov-10 gcov)
execute_process(COMMAND ${GCOV_TOOL} --version OUTPUT_VARIABLE GCOV_TOOL_VERSION ERROR_VARIABLE GCOV_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" GCOV_TOOL_VERSION "${GCOV_TOOL_VERSION}")
message(STATUS "🔖 GCOV ${GCOV_TOOL_VERSION} (${GCOV_TOOL})")
find_program(GIT_TOOL NAMES git)
execute_process(COMMAND ${GIT_TOOL} --version OUTPUT_VARIABLE GIT_TOOL_VERSION ERROR_VARIABLE GIT_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" GIT_TOOL_VERSION "${GIT_TOOL_VERSION}")
message(STATUS "🔖 Git ${GIT_TOOL_VERSION} (${GIT_TOOL})")
find_program(IWYU_TOOL NAMES include-what-you-use iwyu)
execute_process(COMMAND ${IWYU_TOOL} --version OUTPUT_VARIABLE IWYU_TOOL_VERSION ERROR_VARIABLE IWYU_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" IWYU_TOOL_VERSION "${IWYU_TOOL_VERSION}")
message(STATUS "🔖 include-what-you-use ${IWYU_TOOL_VERSION} (${IWYU_TOOL})")
find_program(INFER_TOOL NAMES infer)
execute_process(COMMAND ${INFER_TOOL} --version OUTPUT_VARIABLE INFER_TOOL_VERSION ERROR_VARIABLE INFER_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" INFER_TOOL_VERSION "${INFER_TOOL_VERSION}")
message(STATUS "🔖 Infer ${INFER_TOOL_VERSION} (${INFER_TOOL})")
find_program(LCOV_TOOL NAMES lcov)
execute_process(COMMAND ${LCOV_TOOL} --version OUTPUT_VARIABLE LCOV_TOOL_VERSION ERROR_VARIABLE LCOV_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" LCOV_TOOL_VERSION "${LCOV_TOOL_VERSION}")
message(STATUS "🔖 LCOV ${LCOV_TOOL_VERSION} (${LCOV_TOOL})")
find_program(NINJA_TOOL NAMES ninja)
execute_process(COMMAND ${NINJA_TOOL} --version OUTPUT_VARIABLE NINJA_TOOL_VERSION ERROR_VARIABLE NINJA_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" NINJA_TOOL_VERSION "${NINJA_TOOL_VERSION}")
message(STATUS "🔖 Ninja ${NINJA_TOOL_VERSION} (${NINJA_TOOL})")
find_program(OCLINT_TOOL NAMES oclint-json-compilation-database)
find_program(OCLINT_VERSION_TOOL NAMES oclint)
execute_process(COMMAND ${OCLINT_VERSION_TOOL} --version OUTPUT_VARIABLE OCLINT_TOOL_VERSION ERROR_VARIABLE OCLINT_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" OCLINT_TOOL_VERSION "${OCLINT_TOOL_VERSION}")
message(STATUS "🔖 OCLint ${OCLINT_TOOL_VERSION} (${OCLINT_TOOL})")
find_program(VALGRIND_TOOL NAMES valgrind)
execute_process(COMMAND ${VALGRIND_TOOL} --version OUTPUT_VARIABLE VALGRIND_TOOL_VERSION ERROR_VARIABLE VALGRIND_TOOL_VERSION)
string(REGEX MATCH "[0-9]+(\\.[0-9]+)+" VALGRIND_TOOL_VERSION "${VALGRIND_TOOL_VERSION}")
message(STATUS "🔖 Valgrind ${VALGRIND_TOOL_VERSION} (${VALGRIND_TOOL})")
find_program(GENHTML_TOOL NAMES genhtml)
find_program(PLOG_CONVERTER_TOOL NAMES plog-converter)
find_program(PVS_STUDIO_ANALYZER_TOOL NAMES pvs-studio-analyzer)
find_program(SCAN_BUILD_TOOL NAMES scan-build-11 scan-build)
# the individual source files
file(GLOB_RECURSE SRC_FILES ${PROJECT_SOURCE_DIR}/include/nlohmann/*.hpp)
###############################################################################
# Thorough check with recent compilers
###############################################################################
# Ignored Clang warnings:
# -Wno-c++98-compat The library targets C++11.
# -Wno-c++98-compat-pedantic The library targets C++11.
# -Wno-deprecated-declarations The library contains annotations for deprecated functions.
# -Wno-extra-semi-stmt The library uses std::assert which triggers this warning.
# -Wno-padded We do not care about padding warnings.
# -Wno-covered-switch-default All switches list all cases and a default case.
# -Wno-weak-vtables The library is header-only.
set(CLANG_CXXFLAGS "-std=c++11 \
-Werror \
-Weverything \
-Wno-c++98-compat \
-Wno-c++98-compat-pedantic \
-Wno-deprecated-declarations \
-Wno-extra-semi-stmt \
-Wno-padded \
-Wno-covered-switch-default \
-Wno-weak-vtables \
")
# Ignored GCC warnings:
# -Wno-abi-tag We do not care about ABI tags.
# -Wno-aggregate-return The library uses aggregate returns.
# -Wno-long-long The library uses the long long type to interface with system functions.
# -Wno-namespaces The library uses namespaces.
# -Wno-padded We do not care about padding warnings.
# -Wno-system-headers We do not care about warnings in system headers.
# -Wno-templates The library uses templates.
set(GCC_CXXFLAGS "-std=c++11 \
-pedantic \
-Werror \
--all-warnings \
--extra-warnings \
-W \
-WNSObject-attribute \
-Wno-abi-tag \
-Waddress \
-Waddress-of-packed-member \
-Wno-aggregate-return \
-Waggressive-loop-optimizations \
-Waligned-new=all \
-Wall \
-Walloc-zero \
-Walloca \
-Wanalyzer-double-fclose \
-Wanalyzer-double-free \
-Wanalyzer-exposure-through-output-file \
-Wanalyzer-file-leak \
-Wanalyzer-free-of-non-heap \
-Wanalyzer-malloc-leak \
-Wanalyzer-mismatching-deallocation \
-Wanalyzer-null-argument \
-Wanalyzer-null-dereference \
-Wanalyzer-possible-null-argument \
-Wanalyzer-possible-null-dereference \
-Wanalyzer-shift-count-negative \
-Wanalyzer-shift-count-overflow \
-Wanalyzer-stale-setjmp-buffer \
-Wanalyzer-tainted-array-index \
-Wanalyzer-too-complex \
-Wanalyzer-unsafe-call-within-signal-handler \
-Wanalyzer-use-after-free \
-Wanalyzer-use-of-pointer-in-stale-stack-frame \
-Wanalyzer-write-to-const \
-Wanalyzer-write-to-string-literal \
-Warith-conversion \
-Warray-bounds \
-Warray-bounds=2 \
-Warray-parameter=2 \
-Wattribute-alias=2 \
-Wattribute-warning \
-Wattributes \
-Wbool-compare \
-Wbool-operation \
-Wbuiltin-declaration-mismatch \
-Wbuiltin-macro-redefined \
-Wc++0x-compat \
-Wc++11-compat \
-Wc++14-compat \
-Wc++17-compat \
-Wc++1z-compat \
-Wc++20-compat \
-Wc++2a-compat \
-Wcannot-profile \
-Wcast-align \
-Wcast-align=strict \
-Wcast-function-type \
-Wcast-qual \
-Wcatch-value=3 \
-Wchar-subscripts \
-Wclass-conversion \
-Wclass-memaccess \
-Wclobbered \
-Wcomma-subscript \
-Wcomment \
-Wcomments \
-Wconditionally-supported \
-Wconversion \
-Wconversion-null \
-Wcoverage-mismatch \
-Wcpp \
-Wctad-maybe-unsupported \
-Wctor-dtor-privacy \
-Wdangling-else \
-Wdate-time \
-Wdelete-incomplete \
-Wdelete-non-virtual-dtor \
-Wdeprecated \
-Wdeprecated-copy \
-Wdeprecated-copy-dtor \
-Wdeprecated-declarations \
-Wdeprecated-enum-enum-conversion \
-Wdeprecated-enum-float-conversion \
-Wdisabled-optimization \
-Wdiv-by-zero \
-Wdouble-promotion \
-Wduplicated-branches \
-Wduplicated-cond \
-Weffc++ \
-Wempty-body \
-Wendif-labels \
-Wenum-compare \
-Wenum-conversion \
-Wexpansion-to-defined \
-Wextra \
-Wextra-semi \
-Wfloat-conversion \
-Wfloat-equal \
-Wformat-contains-nul \
-Wformat-diag \
-Wformat-extra-args \
-Wformat-nonliteral \
-Wformat-overflow=2 \
-Wformat-security \
-Wformat-signedness \
-Wformat-truncation=2 \
-Wformat-y2k \
-Wformat-zero-length \
-Wformat=2 \
-Wframe-address \
-Wfree-nonheap-object \
-Whsa \
-Wif-not-aligned \
-Wignored-attributes \
-Wignored-qualifiers \
-Wimplicit-fallthrough=5 \
-Winaccessible-base \
-Winherited-variadic-ctor \
-Winit-list-lifetime \
-Winit-self \
-Winline \
-Wint-in-bool-context \
-Wint-to-pointer-cast \
-Winvalid-memory-model \
-Winvalid-offsetof \
-Winvalid-pch \
-Wliteral-suffix \
-Wlogical-not-parentheses \
-Wlogical-op \
-Wno-long-long \
-Wlto-type-mismatch \
-Wmain \
-Wmaybe-uninitialized \
-Wmemset-elt-size \
-Wmemset-transposed-args \
-Wmisleading-indentation \
-Wmismatched-dealloc \
-Wmismatched-new-delete \
-Wmismatched-tags \
-Wmissing-attributes \
-Wmissing-braces \
-Wmissing-declarations \
-Wmissing-field-initializers \
-Wmissing-include-dirs \
-Wmissing-profile \
-Wmultichar \
-Wmultiple-inheritance \
-Wmultistatement-macros \
-Wno-namespaces \
-Wnarrowing \
-Wnoexcept \
-Wnoexcept-type \
-Wnon-template-friend \
-Wnon-virtual-dtor \
-Wnonnull \
-Wnonnull-compare \
-Wnormalized=nfkc \
-Wnull-dereference \
-Wodr \
-Wold-style-cast \
-Wopenmp-simd \
-Woverflow \
-Woverlength-strings \
-Woverloaded-virtual \
-Wpacked \
-Wpacked-bitfield-compat \
-Wpacked-not-aligned \
-Wno-padded \
-Wparentheses \
-Wpedantic \
-Wpessimizing-move \
-Wplacement-new=2 \
-Wpmf-conversions \
-Wpointer-arith \
-Wpointer-compare \
-Wpragmas \
-Wprio-ctor-dtor \
-Wpsabi \
-Wrange-loop-construct \
-Wredundant-decls \
-Wredundant-move \
-Wredundant-tags \
-Wregister \
-Wreorder \
-Wrestrict \
-Wreturn-local-addr \
-Wreturn-type \
-Wscalar-storage-order \
-Wsequence-point \
-Wshadow=compatible-local \
-Wshadow=global \
-Wshadow=local \
-Wshift-count-negative \
-Wshift-count-overflow \
-Wshift-negative-value \
-Wshift-overflow=2 \
-Wsign-compare \
-Wsign-conversion \
-Wsign-promo \
-Wsized-deallocation \
-Wsizeof-array-argument \
-Wsizeof-array-div \
-Wsizeof-pointer-div \
-Wsizeof-pointer-memaccess \
-Wstack-protector \
-Wstrict-aliasing \
-Wstrict-aliasing=3 \
-Wstrict-null-sentinel \
-Wstrict-overflow \
-Wstrict-overflow=5 \
-Wstring-compare \
-Wstringop-overflow=4 \
-Wstringop-overread \
-Wstringop-truncation \
-Wsubobject-linkage \
-Wsuggest-attribute=cold \
-Wsuggest-attribute=const \
-Wsuggest-attribute=format \
-Wsuggest-attribute=malloc \
-Wsuggest-attribute=noreturn \
-Wsuggest-attribute=pure \
-Wsuggest-final-methods \
-Wsuggest-final-types \
-Wsuggest-override \
-Wswitch \
-Wswitch-bool \
-Wswitch-default \
-Wswitch-enum \
-Wswitch-outside-range \
-Wswitch-unreachable \
-Wsync-nand \
-Wsynth \
-Wno-system-headers \
-Wtautological-compare \
-Wno-templates \
-Wterminate \
-Wtrampolines \
-Wtrigraphs \
-Wtsan \
-Wtype-limits \
-Wundef \
-Wuninitialized \
-Wunknown-pragmas \
-Wunreachable-code \
-Wunsafe-loop-optimizations \
-Wunused \
-Wunused-but-set-parameter \
-Wunused-but-set-variable \
-Wunused-const-variable=2 \
-Wunused-function \
-Wunused-label \
-Wunused-local-typedefs \
-Wunused-macros \
-Wunused-parameter \
-Wunused-result \
-Wunused-value \
-Wunused-variable \
-Wuseless-cast \
-Wvarargs \
-Wvariadic-macros \
-Wvector-operation-performance \
-Wvexing-parse \
-Wvirtual-inheritance \
-Wvirtual-move-assign \
-Wvla \
-Wvla-parameter \
-Wvolatile \
-Wvolatile-register-var \
-Wwrite-strings \
-Wzero-as-null-pointer-constant \
-Wzero-length-bounds \
")
add_custom_target(ci_test_gcc
COMMAND CXX=${GCC_TOOL} CXXFLAGS=${GCC_CXXFLAGS} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_MultipleHeaders=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_gcc
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_gcc
COMMAND cd ${PROJECT_BINARY_DIR}/build_gcc && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMENT "Compile and test with GCC using maximal warning flags"
)
add_custom_target(ci_test_clang
COMMAND CXX=${CLANG_TOOL} CXXFLAGS=${CLANG_CXXFLAGS} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_MultipleHeaders=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_clang
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_clang
COMMAND cd ${PROJECT_BINARY_DIR}/build_clang && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMENT "Compile and test with Clang using maximal warning flags"
)
###############################################################################
# Different C++ Standards.
###############################################################################
foreach(CXX_STANDARD 11 14 17 20)
add_custom_target(ci_test_gcc_cxx${CXX_STANDARD}
COMMAND CXX=${GCC_TOOL} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DCMAKE_CXX_STANDARD=${CXX_STANDARD} -DCMAKE_CXX_STANDARD_REQUIRED=ON
-DJSON_BuildTests=ON -DJSON_FastTests=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_gcc_cxx${CXX_STANDARD}
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_gcc_cxx${CXX_STANDARD}
COMMAND cd ${PROJECT_BINARY_DIR}/build_gcc_cxx${CXX_STANDARD} && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMENT "Compile and test with GCC for C++${CXX_STANDARD}"
)
add_custom_target(ci_test_clang_cxx${CXX_STANDARD}
COMMAND CXX=${CLANG_TOOL} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DCMAKE_CXX_STANDARD=${CXX_STANDARD} -DCMAKE_CXX_STANDARD_REQUIRED=ON
-DJSON_BuildTests=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_clang_cxx${CXX_STANDARD}
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_clang_cxx${CXX_STANDARD}
COMMAND cd ${PROJECT_BINARY_DIR}/build_clang_cxx${CXX_STANDARD} && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMENT "Compile and test with Clang for C++${CXX_STANDARD}"
)
endforeach()
###############################################################################
# Disable exceptions.
###############################################################################
add_custom_target(ci_test_noexceptions
COMMAND CXX=${CLANG_TOOL} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_MultipleHeaders=ON -DCMAKE_CXX_FLAGS=-DJSON_NOEXCEPTION -DDOCTEST_TEST_FILTER=--no-throw
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_noexceptions
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_noexceptions
COMMAND cd ${PROJECT_BINARY_DIR}/build_noexceptions && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMENT "Compile and test with exceptions switched off"
)
###############################################################################
# Disable implicit conversions.
###############################################################################
add_custom_target(ci_test_noimplicitconversions
COMMAND CXX=${CLANG_TOOL} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_MultipleHeaders=ON -DJSON_ImplicitConversions=OFF
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_noimplicitconversions
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_noimplicitconversions
COMMAND cd ${PROJECT_BINARY_DIR}/build_noimplicitconversions && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMENT "Compile and test with implicit conversions switched off"
)
###############################################################################
# Enable improved diagnostics.
###############################################################################
add_custom_target(ci_test_diagnostics
COMMAND CXX=${CLANG_TOOL} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_MultipleHeaders=ON -DJSON_Diagnostics=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_diagnostics
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_diagnostics
COMMAND cd ${PROJECT_BINARY_DIR}/build_diagnostics && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMENT "Compile and test with improved diagnostics enabled"
)
###############################################################################
# Coverage.
###############################################################################
add_custom_target(ci_test_coverage
COMMAND CXX=g++ ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja -DCMAKE_CXX_FLAGS="--coverage;-fprofile-arcs;-ftest-coverage"
-DJSON_BuildTests=ON -DJSON_MultipleHeaders=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_coverage
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_coverage
COMMAND cd ${PROJECT_BINARY_DIR}/build_coverage && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMAND ${LCOV_TOOL} --directory . --capture --output-file json.info --rc lcov_branch_coverage=1
COMMAND ${LCOV_TOOL} -e json.info ${SRC_FILES} --output-file json.info.filtered --rc lcov_branch_coverage=1
COMMAND ${CMAKE_SOURCE_DIR}/test/thirdparty/imapdl/filterbr.py json.info.filtered > json.info.filtered.noexcept
COMMAND genhtml --title "JSON for Modern C++" --legend --demangle-cpp --output-directory html --show-details --branch-coverage json.info.filtered.noexcept
COMMENT "Compile and test with coverage"
)
###############################################################################
# Sanitizers.
###############################################################################
set(CLANG_CXX_FLAGS_SANITIZER "-g -O1 -fsanitize=address -fsanitize=undefined -fsanitize=integer -fsanitize=nullability -fno-omit-frame-pointer -fno-sanitize-recover=all -fno-sanitize=unsigned-integer-overflow -fno-sanitize=unsigned-shift-base")
add_custom_target(ci_test_clang_sanitizer
COMMAND CXX=${CLANG_TOOL} CXXFLAGS=${CLANG_CXX_FLAGS_SANITIZER} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_clang_sanitizer
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_clang_sanitizer
COMMAND cd ${PROJECT_BINARY_DIR}/build_clang_sanitizer && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMENT "Compile and test with sanitizers"
)
###############################################################################
# Check if header is amalgamated and sources are properly indented.
###############################################################################
set(ASTYLE_FLAGS --style=allman --indent=spaces=4 --indent-modifiers --indent-switches --indent-preproc-block --indent-preproc-define --indent-col1-comments --pad-oper --pad-header --align-pointer=type --align-reference=type --add-brackets --convert-tabs --close-templates --lineend=linux --preserve-date --formatted)
file(GLOB_RECURSE INDENT_FILES
${PROJECT_SOURCE_DIR}/include/nlohmann/*.hpp
${PROJECT_SOURCE_DIR}/test/src/*.cpp
${PROJECT_SOURCE_DIR}/test/src/*.hpp
${PROJECT_SOURCE_DIR}/benchmarks/src/benchmarks.cpp
${PROJECT_SOURCE_DIR}/doc/examples/*.cpp
)
add_custom_target(ci_test_amalgamation
COMMAND rm -fr ${PROJECT_SOURCE_DIR}/single_include/nlohmann/json.hpp~
COMMAND cp ${PROJECT_SOURCE_DIR}/single_include/nlohmann/json.hpp ${PROJECT_SOURCE_DIR}/single_include/nlohmann/json.hpp~
COMMAND ${Python3_EXECUTABLE} ${PROJECT_SOURCE_DIR}/third_party/amalgamate/amalgamate.py -c ${PROJECT_SOURCE_DIR}/third_party/amalgamate/config.json -s .
COMMAND ${ASTYLE_TOOL} ${ASTYLE_FLAGS} --suffix=none --quiet ${PROJECT_SOURCE_DIR}/single_include/nlohmann/json.hpp
COMMAND diff ${PROJECT_SOURCE_DIR}/single_include/nlohmann/json.hpp~ ${PROJECT_SOURCE_DIR}/single_include/nlohmann/json.hpp
COMMAND ${ASTYLE_TOOL} ${ASTYLE_FLAGS} ${INDENT_FILES}
COMMAND cd ${PROJECT_SOURCE_DIR} && for FILE in `find . -name '*.orig'`\; do false \; done
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR}
COMMENT "Check amalagamation and indentation"
)
###############################################################################
# Valgrind.
###############################################################################
add_custom_target(ci_test_valgrind
COMMAND CXX=${GCC_TOOL} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_Valgrind=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_valgrind
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_valgrind
COMMAND cd ${PROJECT_BINARY_DIR}/build_valgrind && ${CMAKE_CTEST_COMMAND} -L valgrind --parallel ${N} --output-on-failure
COMMENT "Compile and test with Valgrind"
)
###############################################################################
# Check code with Clang Static Analyzer.
###############################################################################
set(CLANG_ANALYZER_CHECKS "fuchsia.HandleChecker,nullability.NullableDereferenced,nullability.NullablePassedToNonnull,nullability.NullableReturnedFromNonnull,optin.cplusplus.UninitializedObject,optin.cplusplus.VirtualCall,optin.mpi.MPI-Checker,optin.osx.OSObjectCStyleCast,optin.osx.cocoa.localizability.EmptyLocalizationContextChecker,optin.osx.cocoa.localizability.NonLocalizedStringChecker,optin.performance.GCDAntipattern,optin.performance.Padding,optin.portability.UnixAPI,security.FloatLoopCounter,security.insecureAPI.DeprecatedOrUnsafeBufferHandling,security.insecureAPI.bcmp,security.insecureAPI.bcopy,security.insecureAPI.bzero,security.insecureAPI.rand,security.insecureAPI.strcpy,valist.CopyToSelf,valist.Uninitialized,valist.Unterminated,webkit.NoUncountedMemberChecker,webkit.RefCntblBaseVirtualDtor,core.CallAndMessage,core.DivideZero,core.NonNullParamChecker,core.NullDereference,core.StackAddressEscape,core.UndefinedBinaryOperatorResult,core.VLASize,core.uninitialized.ArraySubscript,core.uninitialized.Assign,core.uninitialized.Branch,core.uninitialized.CapturedBlockVariable,core.uninitialized.UndefReturn,cplusplus.InnerPointer,cplusplus.Move,cplusplus.NewDelete,cplusplus.NewDeleteLeaks,cplusplus.PlacementNew,cplusplus.PureVirtualCall,deadcode.DeadStores,nullability.NullPassedToNonnull,nullability.NullReturnedFromNonnull,osx.API,osx.MIG,osx.NumberObjectConversion,osx.OSObjectRetainCount,osx.ObjCProperty,osx.SecKeychainAPI,osx.cocoa.AtSync,osx.cocoa.AutoreleaseWrite,osx.cocoa.ClassRelease,osx.cocoa.Dealloc,osx.cocoa.IncompatibleMethodTypes,osx.cocoa.Loops,osx.cocoa.MissingSuperCall,osx.cocoa.NSAutoreleasePool,osx.cocoa.NSError,osx.cocoa.NilArg,osx.cocoa.NonNilReturnValue,osx.cocoa.ObjCGenerics,osx.cocoa.RetainCount,osx.cocoa.RunLoopAutoreleaseLeak,osx.cocoa.SelfInit,osx.cocoa.SuperDealloc,osx.cocoa.UnusedIvars,osx.cocoa.VariadicMethodTypes,osx.coreFoundation.CFError,osx.coreFoundation.CFNumber,osx.coreFoundation.CFRetainRelease,osx.coreFoundation.containers.OutOfBounds,osx.coreFoundation.containers.PointerSizedValues,security.insecureAPI.UncheckedReturn,security.insecureAPI.decodeValueOfObjCType,security.insecureAPI.getpw,security.insecureAPI.gets,security.insecureAPI.mkstemp,security.insecureAPI.mktemp,security.insecureAPI.vfork,unix.API,unix.Malloc,unix.MallocSizeof,unix.MismatchedDeallocator,unix.Vfork,unix.cstring.BadSizeArg,unix.cstring.NullArg")
add_custom_target(ci_clang_analyze
COMMAND CXX=${CLANG_TOOL} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_clang_analyze
COMMAND cd ${PROJECT_BINARY_DIR}/build_clang_analyze && ${SCAN_BUILD_TOOL} -enable-checker ${CLANG_ANALYZER_CHECKS} --use-c++=${CLANG_TOOL} -analyze-headers -o ${PROJECT_BINARY_DIR}/report ninja
COMMENT "Check code with Clang Analyzer"
)
###############################################################################
# Check code with Cppcheck.
###############################################################################
add_custom_target(ci_cppcheck
COMMAND ${CPPCHECK_TOOL} --enable=warning --suppress=missingReturn --inline-suppr --inconclusive --force --std=c++11 ${PROJECT_SOURCE_DIR}/single_include/nlohmann/json.hpp --error-exitcode=1
COMMENT "Check code with Cppcheck"
)
###############################################################################
# Check code with cpplint.
###############################################################################
add_custom_target(ci_cpplint
COMMAND ${Python3_EXECUTABLE} ${CMAKE_SOURCE_DIR}/third_party/cpplint/cpplint.py --filter=-whitespace,-legal,-runtime/references,-runtime/explicit,-runtime/indentation_namespace,-readability/casting,-readability/nolint --quiet --recursive ${SRC_FILES}
COMMENT "Check code with cpplint"
)
###############################################################################
# Check code with OCLint.
###############################################################################
file(COPY ${PROJECT_SOURCE_DIR}/single_include/nlohmann/json.hpp DESTINATION ${PROJECT_BINARY_DIR}/src_single)
file(RENAME ${PROJECT_BINARY_DIR}/src_single/json.hpp ${PROJECT_BINARY_DIR}/src_single/all.cpp)
file(APPEND "${PROJECT_BINARY_DIR}/src_single/all.cpp" "\n\nint main()\n{}\n")
add_executable(single_all ${PROJECT_BINARY_DIR}/src_single/all.cpp)
target_compile_features(single_all PRIVATE cxx_std_11)
add_custom_target(ci_oclint
COMMAND ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON
-DJSON_BuildTests=OFF -DJSON_CI=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_oclint
COMMAND ${OCLINT_TOOL} -i ${PROJECT_BINARY_DIR}/build_oclint/src_single/all.cpp -p ${PROJECT_BINARY_DIR}/build_oclint --
-report-type html -enable-global-analysis --max-priority-1=0 --max-priority-2=1000 --max-priority-3=2000
--disable-rule=MultipleUnaryOperator
--disable-rule=DoubleNegative
--disable-rule=ShortVariableName
--disable-rule=GotoStatement
--disable-rule=LongLine
-o ${PROJECT_BINARY_DIR}/build_oclint/oclint_report.html
COMMENT "Check code with OCLint"
)
###############################################################################
# Check code with Clang-Tidy.
###############################################################################
add_custom_target(ci_clang_tidy
COMMAND CXX=${CLANG_TOOL} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON -DCMAKE_CXX_CLANG_TIDY=${CLANG_TIDY_TOOL}
-DJSON_BuildTests=ON -DJSON_MultipleHeaders=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_clang_tidy
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_clang_tidy
COMMENT "Check code with Clang-Tidy"
)
###############################################################################
# Check code with PVS-Studio Analyzer <https://www.viva64.com/en/pvs-studio/>.
###############################################################################
add_custom_target(ci_pvs_studio
COMMAND CXX=${CLANG_TOOL} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug
-DCMAKE_EXPORT_COMPILE_COMMANDS=ON
-DJSON_BuildTests=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_pvs_studio
COMMAND cd ${PROJECT_BINARY_DIR}/build_pvs_studio && ${PVS_STUDIO_ANALYZER_TOOL} analyze -j 10
COMMAND cd ${PROJECT_BINARY_DIR}/build_pvs_studio && ${PLOG_CONVERTER_TOOL} -a'GA:1,2;64:1;CS' -t fullhtml PVS-Studio.log -o pvs
COMMENT "Check code with PVS Studio"
)
###############################################################################
# Check code with Infer <https://fbinfer.com> static analyzer.
###############################################################################
add_custom_target(ci_infer
COMMAND mkdir -p ${PROJECT_BINARY_DIR}/build_infer
COMMAND cd ${PROJECT_BINARY_DIR}/build_infer && ${INFER_TOOL} compile -- ${CMAKE_COMMAND} -DCMAKE_BUILD_TYPE=Debug ${PROJECT_SOURCE_DIR} -DJSON_BuildTests=ON -DJSON_MultipleHeaders=ON
COMMAND cd ${PROJECT_BINARY_DIR}/build_infer && ${INFER_TOOL} run -- make
COMMENT "Check code with Infer"
)
###############################################################################
# Run test suite with previously downloaded test data.
###############################################################################
add_custom_target(ci_offline_testdata
COMMAND mkdir -p ${PROJECT_BINARY_DIR}/build_offline_testdata/test_data
COMMAND cd ${PROJECT_BINARY_DIR}/build_offline_testdata/test_data && ${GIT_TOOL} clone -c advice.detachedHead=false --branch v3.0.0 https://github.com/nlohmann/json_test_data.git --quiet --depth 1
COMMAND ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_FastTests=ON -DJSON_TestDataDirectory=${PROJECT_BINARY_DIR}/build_offline_testdata/test_data/json_test_data
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_offline_testdata
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_offline_testdata
COMMAND cd ${PROJECT_BINARY_DIR}/build_offline_testdata && ${CMAKE_CTEST_COMMAND} --parallel ${N} --output-on-failure
COMMENT "Check code with previously downloaded test data"
)
###############################################################################
# Run test suite when project was not checked out from Git
###############################################################################
add_custom_target(ci_non_git_tests
COMMAND mkdir -p ${PROJECT_BINARY_DIR}/build_non_git_tests/sources
COMMAND cd ${PROJECT_SOURCE_DIR} && for FILE in `${GIT_TOOL} ls-tree --name-only HEAD`\; do cp -r $$FILE ${PROJECT_BINARY_DIR}/build_non_git_tests/sources \; done
COMMAND ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_FastTests=ON
-S${PROJECT_BINARY_DIR}/build_non_git_tests/sources -B${PROJECT_BINARY_DIR}/build_non_git_tests
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_non_git_tests
COMMAND cd ${PROJECT_BINARY_DIR}/build_non_git_tests && ${CMAKE_CTEST_COMMAND} --parallel ${N} -LE git_required --output-on-failure
COMMENT "Check code when project was not checked out from Git"
)
###############################################################################
# Run test suite and exclude tests that change installed files
###############################################################################
add_custom_target(ci_reproducible_tests
COMMAND ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_FastTests=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_reproducible_tests
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_reproducible_tests
COMMAND cd ${PROJECT_BINARY_DIR}/build_reproducible_tests && ${CMAKE_CTEST_COMMAND} --parallel ${N} -LE not_reproducible --output-on-failure
COMMENT "Check code and exclude tests that change installed files"
)
###############################################################################
# Check if every header in the include folder includes sufficient headers to
# be compiled individually.
###############################################################################
set(iwyu_path_and_options ${IWYU_TOOL} -Xiwyu --max_line_length=300)
foreach(SRC_FILE ${SRC_FILES})
# get relative path of the header file
file(RELATIVE_PATH RELATIVE_SRC_FILE "${PROJECT_SOURCE_DIR}/include/nlohmann" "${SRC_FILE}")
# replace slashes and strip suffix
string(REPLACE "/" "_" RELATIVE_SRC_FILE "${RELATIVE_SRC_FILE}")
string(REPLACE ".hpp" "" RELATIVE_SRC_FILE "${RELATIVE_SRC_FILE}")
# create code file
file(WRITE "${PROJECT_BINARY_DIR}/src_single/${RELATIVE_SRC_FILE}.cpp" "#include \"${SRC_FILE}\" // IWYU pragma: keep\n\nint main()\n{}\n")
# create executable
add_executable(single_${RELATIVE_SRC_FILE} EXCLUDE_FROM_ALL ${PROJECT_BINARY_DIR}/src_single/${RELATIVE_SRC_FILE}.cpp)
target_include_directories(single_${RELATIVE_SRC_FILE} PRIVATE ${PROJECT_SOURCE_DIR}/include)
target_compile_features(single_${RELATIVE_SRC_FILE} PRIVATE cxx_std_11)
set_property(TARGET single_${RELATIVE_SRC_FILE} PROPERTY CXX_INCLUDE_WHAT_YOU_USE "${iwyu_path_and_options}")
# remember binary for ci_single_binaries target
list(APPEND single_binaries single_${RELATIVE_SRC_FILE})
endforeach()
add_custom_target(ci_single_binaries
DEPENDS ${single_binaries}
COMMENT "Check if headers are self-contained"
)
###############################################################################
# Benchmarks
###############################################################################
add_custom_target(ci_benchmarks
COMMAND ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Release -GNinja
-S${PROJECT_SOURCE_DIR}/benchmarks -B${PROJECT_BINARY_DIR}/build_benchmarks
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_benchmarks --target json_benchmarks
COMMAND cd ${PROJECT_BINARY_DIR}/build_benchmarks && ./json_benchmarks
COMMENT "Run benchmarks"
)
###############################################################################
# CMake flags
###############################################################################
if (APPLE)
set(CMAKE_310_BINARY ${PROJECT_BINARY_DIR}/cmake-3.1.0-Darwin64/CMake.app/Contents/bin/cmake)
add_custom_command(
OUTPUT ${CMAKE_310_BINARY}
COMMAND wget https://github.com/Kitware/CMake/releases/download/v3.1.0/cmake-3.1.0-Darwin64.tar.gz
COMMAND tar xfz cmake-3.1.0-Darwin64.tar.gz
COMMAND rm cmake-3.1.0-Darwin64.tar.gz
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMENT "Download CMake 3.1.0"
)
else()
set(CMAKE_310_BINARY ${PROJECT_BINARY_DIR}/cmake-3.1.0-Linux-x86_64/bin/cmake)
add_custom_command(
OUTPUT ${CMAKE_310_BINARY}
COMMAND wget https://github.com/Kitware/CMake/releases/download/v3.1.0/cmake-3.1.0-Linux-x86_64.tar.gz
COMMAND tar xfz cmake-3.1.0-Linux-x86_64.tar.gz
COMMAND rm cmake-3.1.0-Linux-x86_64.tar.gz
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
COMMENT "Download CMake 3.1.0"
)
endif()
set(JSON_CMAKE_FLAGS "JSON_BuildTests;JSON_Install;JSON_MultipleHeaders;JSON_ImplicitConversions;JSON_Valgrind;JSON_Diagnostics;JSON_SystemInclude")
foreach(JSON_CMAKE_FLAG ${JSON_CMAKE_FLAGS})
string(TOLOWER "ci_cmake_flag_${JSON_CMAKE_FLAG}" JSON_CMAKE_FLAG_TARGET)
add_custom_target("${JSON_CMAKE_FLAG_TARGET}"
COMMENT "Check CMake flag ${JSON_CMAKE_FLAG} (CMake ${CMAKE_VERSION})"
COMMAND ${CMAKE_COMMAND}
-Werror=dev
-D${JSON_CMAKE_FLAG}=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_${JSON_CMAKE_FLAG_TARGET}
)
add_custom_target("${JSON_CMAKE_FLAG_TARGET}_31"
COMMENT "Check CMake flag ${JSON_CMAKE_FLAG} (CMake 3.1)"
COMMAND mkdir ${PROJECT_BINARY_DIR}/build_${JSON_CMAKE_FLAG_TARGET}_31
COMMAND cd ${PROJECT_BINARY_DIR}/build_${JSON_CMAKE_FLAG_TARGET}_31 && ${CMAKE_310_BINARY}
-Werror=dev ${PROJECT_SOURCE_DIR}
-D${JSON_CMAKE_FLAG}=ON
-DCMAKE_CXX_COMPILE_FEATURES="cxx_range_for" -DCMAKE_CXX_FLAGS="-std=gnu++11"
DEPENDS ${CMAKE_310_BINARY}
)
list(APPEND JSON_CMAKE_FLAG_TARGETS ${JSON_CMAKE_FLAG_TARGET} ${JSON_CMAKE_FLAG_TARGET}_31)
list(APPEND JSON_CMAKE_FLAG_BUILD_DIRS ${PROJECT_BINARY_DIR}/build_${JSON_CMAKE_FLAG_TARGET} ${PROJECT_BINARY_DIR}/build_${JSON_CMAKE_FLAG_TARGET}_31)
endforeach()
add_custom_target(ci_cmake_flags
DEPENDS ${JSON_CMAKE_FLAG_TARGETS}
COMMENT "Check CMake flags"
)
###############################################################################
# Use more installed compilers.
###############################################################################
foreach(COMPILER g++-4.8 g++-4.9 g++-5 g++-7 g++-8 g++-9 g++-10 clang++-3.5 clang++-3.6 clang++-3.7 clang++-3.8 clang++-3.9 clang++-4.0 clang++-5.0 clang++-6.0 clang++-7 clang++-8 clang++-9 clang++-10 clang++-11)
find_program(COMPILER_TOOL NAMES ${COMPILER})
if (COMPILER_TOOL)
add_custom_target(ci_test_compiler_${COMPILER}
COMMAND CXX=${COMPILER} ${CMAKE_COMMAND}
-DCMAKE_BUILD_TYPE=Debug -GNinja
-DJSON_BuildTests=ON -DJSON_FastTests=ON
-S${PROJECT_SOURCE_DIR} -B${PROJECT_BINARY_DIR}/build_compiler_${COMPILER}
COMMAND ${CMAKE_COMMAND} --build ${PROJECT_BINARY_DIR}/build_compiler_${COMPILER}
COMMAND cd ${PROJECT_BINARY_DIR}/build_compiler_${COMPILER} && ${CMAKE_CTEST_COMMAND} --parallel ${N} --exclude-regex "test-unicode" --output-on-failure
COMMENT "Compile and test with ${COMPILER}"
)
endif()
unset(COMPILER_TOOL CACHE)
endforeach()
###############################################################################
# Clean up all generated files.
###############################################################################
add_custom_target(ci_clean
COMMAND rm -fr ${PROJECT_BINARY_DIR}/build_* cmake-3.1.0-Darwin64 ${JSON_CMAKE_FLAG_BUILD_DIRS} ${single_binaries}
COMMENT "Clean generated directories"
)

View File

@@ -1,15 +1,7 @@
include(FindPackageHandleStandardArgs)
set(${CMAKE_FIND_PACKAGE_NAME}_CONFIG ${CMAKE_CURRENT_LIST_FILE})
find_package_handle_standard_args(@PROJECT_NAME@ CONFIG_MODE)
@PACKAGE_INIT@
set_and_check(JSON_INCLUDE_DIR "@PACKAGE_JSON_INCLUDE_DESTINATION@")
if(NOT TARGET @PROJECT_NAME@::@NLOHMANN_JSON_TARGET_NAME@)
include("${CMAKE_CURRENT_LIST_DIR}/@NLOHMANN_JSON_TARGETS_EXPORT_NAME@.cmake")
if((NOT TARGET @NLOHMANN_JSON_TARGET_NAME@) AND
(NOT @PROJECT_NAME@_FIND_VERSION OR
@PROJECT_NAME@_FIND_VERSION VERSION_LESS 3.2.0))
add_library(@NLOHMANN_JSON_TARGET_NAME@ INTERFACE IMPORTED)
set_target_properties(@NLOHMANN_JSON_TARGET_NAME@ PROPERTIES
INTERFACE_LINK_LIBRARIES @PROJECT_NAME@::@NLOHMANN_JSON_TARGET_NAME@
)
endif()
endif()
cmake_policy(PUSH)
cmake_policy(SET CMP0024 OLD)
include(${CMAKE_CURRENT_LIST_DIR}/@JSON_TARGETS_FILENAME@)
cmake_policy(POP)

View File

@@ -1,56 +0,0 @@
set(JSON_TEST_DATA_URL https://github.com/nlohmann/json_test_data)
set(JSON_TEST_DATA_VERSION 3.0.0)
# if variable is set, use test data from given directory rather than downloading them
if(JSON_TestDataDirectory)
message(STATUS "Using test data in ${JSON_TestDataDirectory}.")
add_custom_target(download_test_data)
file(WRITE ${CMAKE_BINARY_DIR}/include/test_data.hpp "#define TEST_DATA_DIRECTORY \"${JSON_TestDataDirectory}\"\n")
else()
find_package(Git)
# target to download test data
add_custom_target(download_test_data
COMMAND test -d json_test_data || ${GIT_EXECUTABLE} clone -c advice.detachedHead=false --branch v${JSON_TEST_DATA_VERSION} ${JSON_TEST_DATA_URL}.git --quiet --depth 1
COMMENT "Downloading test data from ${JSON_TEST_DATA_URL} (v${JSON_TEST_DATA_VERSION})"
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
)
# create a header with the path to the downloaded test data
file(WRITE ${CMAKE_BINARY_DIR}/include/test_data.hpp "#define TEST_DATA_DIRECTORY \"${CMAKE_BINARY_DIR}/json_test_data\"\n")
endif()
# determine the operating system (for debug and support purposes)
find_program(UNAME_COMMAND uname)
find_program(VER_COMMAND ver)
find_program(LSB_RELEASE_COMMAND lsb_release)
find_program(SW_VERS_COMMAND sw_vers)
set(OS_VERSION_STRINGS "${CMAKE_SYSTEM}")
if (VER_COMMAND)
execute_process(COMMAND ${VER_COMMAND} OUTPUT_VARIABLE VER_COMMAND_RESULT OUTPUT_STRIP_TRAILING_WHITESPACE)
set(OS_VERSION_STRINGS "${OS_VERSION_STRINGS}; ${VER_COMMAND_RESULT}")
endif()
if (SW_VERS_COMMAND)
execute_process(COMMAND ${SW_VERS_COMMAND} OUTPUT_VARIABLE SW_VERS_COMMAND_RESULT OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_QUIET)
string(REGEX REPLACE "[ ]*\n" "; " SW_VERS_COMMAND_RESULT "${SW_VERS_COMMAND_RESULT}")
set(OS_VERSION_STRINGS "${OS_VERSION_STRINGS}; ${SW_VERS_COMMAND_RESULT}")
endif()
if (LSB_RELEASE_COMMAND)
execute_process(COMMAND ${LSB_RELEASE_COMMAND} -a OUTPUT_VARIABLE LSB_RELEASE_COMMAND_RESULT OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_QUIET)
string(REGEX REPLACE "[ ]*\n" "; " LSB_RELEASE_COMMAND_RESULT "${LSB_RELEASE_COMMAND_RESULT}")
set(OS_VERSION_STRINGS "${OS_VERSION_STRINGS}; ${LSB_RELEASE_COMMAND_RESULT}")
endif()
if (UNAME_COMMAND)
execute_process(COMMAND ${UNAME_COMMAND} -a OUTPUT_VARIABLE UNAME_COMMAND_RESULT OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_QUIET)
set(OS_VERSION_STRINGS "${OS_VERSION_STRINGS}; ${UNAME_COMMAND_RESULT}")
endif()
message(STATUS "Operating system: ${OS_VERSION_STRINGS}")
# determine the compiler (for debug and support purposes)
if (MSVC)
execute_process(COMMAND ${CMAKE_CXX_COMPILER} OUTPUT_VARIABLE CXX_VERSION_RESULT OUTPUT_STRIP_TRAILING_WHITESPACE ERROR_VARIABLE CXX_VERSION_RESULT ERROR_STRIP_TRAILING_WHITESPACE)
set(CXX_VERSION_RESULT "${CXX_VERSION_RESULT}; MSVC_VERSION=${MSVC_VERSION}; MSVC_TOOLSET_VERSION=${MSVC_TOOLSET_VERSION}")
else()
execute_process(COMMAND ${CMAKE_CXX_COMPILER} --version OUTPUT_VARIABLE CXX_VERSION_RESULT OUTPUT_STRIP_TRAILING_WHITESPACE)
endif()
string(REGEX REPLACE "[ ]*\n" "; " CXX_VERSION_RESULT "${CXX_VERSION_RESULT}")
message(STATUS "Compiler: ${CXX_VERSION_RESULT}")

View File

@@ -1,20 +0,0 @@
# This is essentially cmake's BasicConfigVersion-SameMajorVersion.cmake.in but
# without the 32/64-bit check. Since json is a header-only library, it doesn't
# matter if it was built on a different platform than what it is used on (see
# https://github.com/nlohmann/json/issues/1697).
set(PACKAGE_VERSION "@PROJECT_VERSION@")
if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION)
set(PACKAGE_VERSION_COMPATIBLE FALSE)
else()
if(PACKAGE_FIND_VERSION_MAJOR STREQUAL "@PROJECT_VERSION_MAJOR@")
set(PACKAGE_VERSION_COMPATIBLE TRUE)
else()
set(PACKAGE_VERSION_COMPATIBLE FALSE)
endif()
if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION)
set(PACKAGE_VERSION_EXACT TRUE)
endif()
endif()

View File

@@ -1,4 +0,0 @@
Name: ${PROJECT_NAME}
Description: JSON for Modern C++
Version: ${PROJECT_VERSION}
Cflags: -I${CMAKE_INSTALL_FULL_INCLUDEDIR}

View File

@@ -1,18 +1,17 @@
# Doxyfile 1.9.1
# Doxyfile 1.8.9.1
#---------------------------------------------------------------------------
# Project related configuration options
#---------------------------------------------------------------------------
DOXYFILE_ENCODING = UTF-8
PROJECT_NAME = "JSON for Modern C++"
PROJECT_NUMBER = 3.10.0
PROJECT_BRIEF =
PROJECT_NUMBER = 2.1.0
PROJECT_BRIEF =
PROJECT_LOGO =
OUTPUT_DIRECTORY = .
CREATE_SUBDIRS = NO
ALLOW_UNICODE_NAMES = NO
OUTPUT_LANGUAGE = English
OUTPUT_TEXT_DIRECTION = None
BRIEF_MEMBER_DESC = YES
REPEAT_BRIEF = NO
ABBREVIATE_BRIEF =
@@ -23,51 +22,43 @@ STRIP_FROM_PATH =
STRIP_FROM_INC_PATH =
SHORT_NAMES = NO
JAVADOC_AUTOBRIEF = NO
JAVADOC_BANNER = NO
QT_AUTOBRIEF = NO
MULTILINE_CPP_IS_BRIEF = NO
PYTHON_DOCSTRING = YES
INHERIT_DOCS = YES
SEPARATE_MEMBER_PAGES = YES
TAB_SIZE = 4
ALIASES = "complexity=@par Complexity^^" \
"liveexample{2}=@par Example^^ \1 ^^ @includelineno \2.cpp \n Output (play with this example @htmlinclude \2.link):^^ @verbinclude \2.output ^^ The <a href= https://github.com/nlohmann/json/blob/develop/doc/examples/\2.cpp>example code</a> above can be translated with @verbatim g++ -std=c++11 -Isingle_include doc/examples/\2.cpp -o \2 @endverbatim" \
"requirement=@par Requirements^^" \
"exceptionsafety=@par Exception safety^^" \
"iterators=@par Iterator validity^^"
ALIASES = "complexity=@par Complexity\n"
ALIASES += liveexample{2}="@par Example\n \1 \n @includelineno \2.cpp \n Output (play with this example @htmlinclude \2.link):\n @verbinclude \2.output \n The example code above can be translated with @verbatim g++ -std=c++11 -Isrc doc/examples/\2.cpp -o \2 @endverbatim"
ALIASES += requirement="@par Requirements\n"
ALIASES += exceptionsafety="@par Exception safety\n"
TCL_SUBST =
OPTIMIZE_OUTPUT_FOR_C = NO
OPTIMIZE_OUTPUT_JAVA = NO
OPTIMIZE_FOR_FORTRAN = NO
OPTIMIZE_OUTPUT_VHDL = NO
OPTIMIZE_OUTPUT_SLICE = NO
EXTENSION_MAPPING =
MARKDOWN_SUPPORT = YES
TOC_INCLUDE_HEADINGS = 0
AUTOLINK_SUPPORT = NO
BUILTIN_STL_SUPPORT = YES
CPP_CLI_SUPPORT = NO
SIP_SUPPORT = NO
IDL_PROPERTY_SUPPORT = YES
DISTRIBUTE_GROUP_DOC = NO
GROUP_NESTED_COMPOUNDS = NO
SUBGROUPING = YES
INLINE_GROUPED_CLASSES = NO
INLINE_SIMPLE_STRUCTS = NO
TYPEDEF_HIDES_STRUCT = NO
LOOKUP_CACHE_SIZE = 0
NUM_PROC_THREADS = 1
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
EXTRACT_ALL = YES
EXTRACT_PRIVATE = NO
EXTRACT_PRIV_VIRTUAL = NO
EXTRACT_PACKAGE = YES
EXTRACT_STATIC = YES
EXTRACT_LOCAL_CLASSES = YES
EXTRACT_LOCAL_METHODS = YES
EXTRACT_ANON_NSPACES = YES
RESOLVE_UNNAMED_PARAMS = YES
HIDE_UNDOC_MEMBERS = NO
HIDE_UNDOC_CLASSES = NO
HIDE_FRIEND_COMPOUNDS = NO
@@ -106,14 +97,12 @@ WARNINGS = YES
WARN_IF_UNDOCUMENTED = YES
WARN_IF_DOC_ERROR = YES
WARN_NO_PARAMDOC = YES
WARN_AS_ERROR = NO
WARN_FORMAT = "$file:$line: $text"
WARN_LOGFILE =
#---------------------------------------------------------------------------
# Configuration options related to the input files
#---------------------------------------------------------------------------
INPUT = ../single_include/nlohmann/json.hpp \
index.md
INPUT = ../src/json.hpp index.md
INPUT_ENCODING = UTF-8
FILE_PATTERNS =
RECURSIVE = NO
@@ -146,6 +135,7 @@ VERBATIM_HEADERS = NO
# Configuration options related to the alphabetical class index
#---------------------------------------------------------------------------
ALPHABETICAL_INDEX = YES
COLS_IN_ALPHA_INDEX = 5
IGNORE_PREFIX =
#---------------------------------------------------------------------------
# Configuration options related to the HTML output
@@ -162,14 +152,13 @@ HTML_COLORSTYLE_HUE = 220
HTML_COLORSTYLE_SAT = 100
HTML_COLORSTYLE_GAMMA = 80
HTML_TIMESTAMP = YES
HTML_DYNAMIC_MENUS = YES
HTML_DYNAMIC_SECTIONS = YES
HTML_INDEX_NUM_ENTRIES = 100
GENERATE_DOCSET = YES
DOCSET_FEEDNAME = "Doxygen generated docs"
DOCSET_BUNDLE_ID = me.nlohmann.json
DOCSET_PUBLISHER_ID = me.nlohmann
DOCSET_PUBLISHER_NAME = NielsLohmann
DOCSET_PUBLISHER_NAME = Niels Lohmann
GENERATE_HTMLHELP = NO
CHM_FILE =
HHC_LOCATION =
@@ -192,10 +181,8 @@ GENERATE_TREEVIEW = NO
ENUM_VALUES_PER_LINE = 4
TREEVIEW_WIDTH = 250
EXT_LINKS_IN_WINDOW = NO
HTML_FORMULA_FORMAT = png
FORMULA_FONTSIZE = 10
FORMULA_TRANSPARENT = YES
FORMULA_MACROFILE =
USE_MATHJAX = NO
MATHJAX_FORMAT = HTML-CSS
MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
@@ -215,7 +202,6 @@ GENERATE_LATEX = NO
LATEX_OUTPUT = latex
LATEX_CMD_NAME = latex
MAKEINDEX_CMD_NAME = makeindex
LATEX_MAKEINDEX_CMD = \makeindex
COMPACT_LATEX = NO
PAPER_TYPE = a4
EXTRA_PACKAGES =
@@ -229,8 +215,6 @@ LATEX_BATCHMODE = NO
LATEX_HIDE_INDICES = NO
LATEX_SOURCE_CODE = NO
LATEX_BIB_STYLE = plain
LATEX_TIMESTAMP = NO
LATEX_EMOJI_DIRECTORY =
#---------------------------------------------------------------------------
# Configuration options related to the RTF output
#---------------------------------------------------------------------------
@@ -255,7 +239,6 @@ MAN_LINKS = NO
GENERATE_XML = YES
XML_OUTPUT = xml
XML_PROGRAMLISTING = YES
XML_NS_MEMB_FILE_SCOPE = NO
#---------------------------------------------------------------------------
# Configuration options related to the DOCBOOK output
#---------------------------------------------------------------------------
@@ -289,14 +272,16 @@ SKIP_FUNCTION_MACROS = YES
# Configuration options related to external references
#---------------------------------------------------------------------------
TAGFILES =
GENERATE_TAGFILE = html/nlohmann_json.tag
GENERATE_TAGFILE =
ALLEXTERNALS = NO
EXTERNAL_GROUPS = YES
EXTERNAL_PAGES = YES
PERL_PATH = /usr/bin/perl
#---------------------------------------------------------------------------
# Configuration options related to the dot tool
#---------------------------------------------------------------------------
CLASS_DIAGRAMS = NO
MSCGEN_PATH =
DIA_PATH =
HIDE_UNDOC_RELATIONS = YES
HAVE_DOT = YES
@@ -309,8 +294,6 @@ COLLABORATION_GRAPH = NO
GROUP_GRAPHS = YES
UML_LOOK = YES
UML_LIMIT_NUM_FIELDS = 10
DOT_UML_DETAILS = NO
DOT_WRAP_THRESHOLD = 17
TEMPLATE_RELATIONS = NO
INCLUDE_GRAPH = NO
INCLUDED_BY_GRAPH = NO
@@ -325,7 +308,6 @@ DOTFILE_DIRS =
MSCFILE_DIRS =
DIAFILE_DIRS =
PLANTUML_JAR_PATH =
PLANTUML_CFG_FILE =
PLANTUML_INCLUDE_PATH =
DOT_GRAPH_MAX_NODES = 50
MAX_DOT_GRAPH_DEPTH = 0

View File

@@ -1,4 +1,4 @@
SRCDIR = ../single_include
SRCDIR = ../src
SED:=$(shell command -v gsed || which sed)
all: doxygen
@@ -28,8 +28,8 @@ EXAMPLES = $(wildcard examples/*.cpp)
%.link: %.cpp
rm -fr tmp
mkdir tmp
cp -r $(SRCDIR)/nlohmann tmp
python2 scripts/send_to_wandbox.py tmp $< > $@.tmp
cp $(SRCDIR)/json.hpp tmp
scripts/send_to_wandbox.py tmp $< > $@.tmp
/bin/echo -n "<a target=\"_blank\" href=\"`cat $@.tmp`\"><b>online</b></a>" > $@
rm -fr tmp $@.tmp
@@ -43,9 +43,7 @@ check_output: $(EXAMPLES:.cpp=.test)
clean:
rm -fr me.nlohmann.json.docset html xml $(EXAMPLES:.cpp=)
$(MAKE) clean -C docset
$(MAKE) clean -C mkdocs
rm -fr me.nlohmann.json.docset html $(EXAMPLES:.cpp=)
##########################################################################
@@ -55,19 +53,14 @@ clean:
# create Doxygen documentation
doxygen: create_output create_links
doxygen
$(SED) -i 's@&lt; ObjectType, ArrayType, StringType, BooleanType, NumberIntegerType, NumberFloatType, AllocatorType, JSONSerializer &gt;@@g' html/*.html
$(SED) -i 's@&lt;&#160;ObjectType,&#160;ArrayType,&#160;StringType,&#160;BooleanType,&#160;NumberIntegerType,&#160;NumberFloatType,&#160;AllocatorType&#160;JSONSerializer&#160;&gt;@@g' html/*.html
$(SED) -i 's@&lt; ObjectType, ArrayType, StringType, BooleanType, NumberIntegerType, NumberUnsignedType, NumberFloatType, AllocatorType, JSONSerializer &gt;@@g' html/*.html
$(SED) -i 's@< ObjectType, ArrayType, StringType, BooleanType, NumberIntegerType, NumberUnsignedType, NumberFloatType, AllocatorType, JSONSerializer >@@g' html/*.html
$(SED) -i 's@&lt;&#160;ObjectType,&#160;ArrayType,&#160;StringType,&#160;BooleanType,&#160;NumberIntegerType,&#160;NumberUnsignedType,&#160;NumberFloatType,&#160;AllocatorType&#160;JSONSerializer&#160;&gt;@@g' html/*.html
$(SED) -i 's@template&lt;template&lt; typename U, typename V, typename... Args &gt; class ObjectType = std::map, template&lt; typename U, typename... Args &gt; class ArrayType = std::vector, class StringType = std::string, class BooleanType = bool, class NumberIntegerType = std::int64_t, class NumberUnsignedType = std::uint64_t, class NumberFloatType = double, template&lt; typename U &gt; class AllocatorType = std::allocator, template&lt; typename T, typename SFINAE=void &gt; class JSONSerializer = adl_serializer&gt;@@g' html/*.html
$(SED) -i 's@&lt; ObjectType, ArrayType, StringType, BooleanType, NumberIntegerType, NumberUnsignedType, NumberFloatType, AllocatorType, JSONSerializer &gt;@@g' html/*.html
$(SED) -i 's@&lt;&#160;ObjectType,&#160;ArrayType,&#160;StringType,&#160;BooleanType,&#160;NumberIntegerType,&#160;NumberUnsignedType,&#160;NumberFloatType,&#160;AllocatorType,&#160;JSONSerializer&#160;&gt;@@g' html/*.html
$(SED) -i 's@JSON_HEDLEY_RETURNS_NON_NULL@@g' html/*.html
$(SED) -i 's@JSON_HEDLEY_WARN_UNUSED_RESULT@@g' html/*.html
$(SED) -i 's@&lt; ObjectType, ArrayType, StringType, BooleanType, NumberIntegerType, NumberFloatType, AllocatorType &gt;@@g' html/*.html
$(SED) -i 's@&lt;&#160;ObjectType,&#160;ArrayType,&#160;StringType,&#160;BooleanType,&#160;NumberIntegerType,&#160;NumberFloatType,&#160;AllocatorType&#160;&gt;@@g' html/*.html
$(SED) -i 's@&lt; ObjectType, ArrayType, StringType, BooleanType, NumberIntegerType, NumberUnsignedType, NumberFloatType, AllocatorType &gt;@@g' html/*.html
$(SED) -i 's@< ObjectType, ArrayType, StringType, BooleanType, NumberIntegerType, NumberUnsignedType, NumberFloatType, AllocatorType >@@g' html/*.html
$(SED) -i 's@&lt;&#160;ObjectType,&#160;ArrayType,&#160;StringType,&#160;BooleanType,&#160;NumberIntegerType,&#160;NumberUnsignedType,&#160;NumberFloatType,&#160;AllocatorType&#160;&gt;@@g' html/*.html
upload: clean doxygen check_output
scripts/git-update-ghpages nlohmann/json html
cd html ; ../scripts/git-update-ghpages nlohmann/json
rm -fr html
open http://nlohmann.github.io/json/

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 MiB

View File

@@ -1,20 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleIdentifier</key>
<string>nlohmann_json</string>
<key>CFBundleName</key>
<string>JSON for Modern C++</string>
<key>DocSetPlatformFamily</key>
<string>json</string>
<key>isDashDocset</key>
<true/>
<key>dashIndexFilePath</key>
<string>index.html</string>
<key>DashDocSetFallbackURL</key>
<string>https://nlohmann.github.io/json/</string>
<key>isJavaScriptEnabled</key>
<true/>
</dict>
</plist>

View File

@@ -1,21 +0,0 @@
nlohmann_json.docset: Info.plist docSet.sql
$(MAKE) clean
mkdir -p nlohmann_json.docset/Contents/Resources/Documents/
cp info.plist nlohmann_json.docset/Contents
# build and copy documentation
$(MAKE) build -C ../mkdocs
cp -r ../mkdocs/site/* nlohmann_json.docset/Contents/Resources/Documents
# patch CSS to hide navigation items
echo "\n\nheader, footer, navi, div.md-sidebar--primary, nav.md-tabs--active, a.md-content__button { display: none; }" >> nlohmann_json.docset/Contents/Resources/Documents/assets/stylesheets/main.b5d04df8.min.css
# fix spacing
echo "\n\ndiv.md-sidebar div.md-sidebar--secondary, div.md-main__inner { top: 0; margin-top: 0 }" >> nlohmann_json.docset/Contents/Resources/Documents/assets/stylesheets/main.b5d04df8.min.css
# remove "JSON for Modern C++" from page titles
find nlohmann_json.docset/Contents/Resources/Documents -type f -exec gsed -i 's| - JSON for Modern C++</title>|</title>|' {} +
# clean up
rm nlohmann_json.docset/Contents/Resources/Documents/hooks.py
rm nlohmann_json.docset/Contents/Resources/Documents/sitemap.*
# generate index
sqlite3 nlohmann_json.docset/Contents/Resources/docSet.dsidx < docSet.sql
clean:
rm -fr nlohmann_json.docset

View File

@@ -1,13 +0,0 @@
# docset
The folder contains the required files to create a [docset](https://kapeli.com/docsets) which can be used in
documentation browsers like [Dash](https://kapeli.com/dash), [Velocity](https://velocity.silverlakesoftware.com), or
[Zeal](https://zealdocs.org).
The docset can be created with
```sh
make nlohmann_json.docset
```
The generated folder `nlohmann_json.docset` can then be opened in the documentation browser.

View File

@@ -1,155 +0,0 @@
DROP TABLE IF EXISTS searchIndex;
CREATE TABLE searchIndex(id INTEGER PRIMARY KEY, name TEXT, type TEXT, path TEXT);
CREATE UNIQUE INDEX anchor ON searchIndex (name, type, path);
-- API
INSERT INTO searchIndex(name, type, path) VALUES ('accept', 'Function', 'api/basic_json/accept/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('adl_serializer', 'Class', 'api/adl_serializer/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('array', 'Function', 'api/basic_json/array/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('array_t', 'Type', 'api/basic_json/array_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('at', 'Method', 'api/basic_json/at/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('back', 'Method', 'api/basic_json/back/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('basic_json', 'Class', 'api/basic_json/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('basic_json', 'Constructor', 'api/basic_json/basic_json/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('begin', 'Method', 'api/basic_json/begin/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('binary', 'Function', 'api/basic_json/binary/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('binary_t', 'Type', 'api/basic_json/binary_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('boolean_t', 'Type', 'api/basic_json/boolean_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('cbegin', 'Method', 'api/basic_json/cbegin/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('cbor_tag_handler_t', 'Enum', 'api/basic_json/cbor_tag_handler_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('cend', 'Method', 'api/basic_json/cend/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('clear', 'Method', 'api/basic_json/clear/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('contains', 'Method', 'api/basic_json/contains/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('count', 'Method', 'api/basic_json/count/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('crbegin', 'Method', 'api/basic_json/crbegin/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('crend', 'Method', 'api/basic_json/crend/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('diff', 'Function', 'api/basic_json/diff/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('dump', 'Method', 'api/basic_json/dump/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('emplace', 'Method', 'api/basic_json/emplace/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('emplace_back', 'Method', 'api/basic_json/emplace_back/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('empty', 'Method', 'api/basic_json/empty/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('end', 'Method', 'api/basic_json/end/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('erase', 'Method', 'api/basic_json/erase/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('error_handler_t', 'Enum', 'api/basic_json/error_handler_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('exception', 'Class', 'api/basic_json/exception/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('find', 'Method', 'api/basic_json/find/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('flatten', 'Method', 'api/basic_json/flatten/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('from_bson', 'Function', 'api/basic_json/from_bson/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('from_cbor', 'Function', 'api/basic_json/from_cbor/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('from_msgpack', 'Function', 'api/basic_json/from_msgpack/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('from_ubjson', 'Function', 'api/basic_json/from_ubjson/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('front', 'Method', 'api/basic_json/front/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('get', 'Method', 'api/basic_json/get/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('get_allocator', 'Function', 'api/basic_json/get_allocator/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('get_binary', 'Method', 'api/basic_json/get_binary/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('get_ptr', 'Method', 'api/basic_json/get_ptr/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('get_ref', 'Method', 'api/basic_json/get_ref/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('get_to', 'Method', 'api/basic_json/get_to/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('input_format_t', 'Enum', 'api/basic_json/input_format_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('insert', 'Method', 'api/basic_json/insert/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('invalid_iterator', 'Class', 'api/basic_json/invalid_iterator/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_array', 'Method', 'api/basic_json/is_array/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_binary', 'Method', 'api/basic_json/is_binary/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_boolean', 'Method', 'api/basic_json/is_boolean/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_discarded', 'Method', 'api/basic_json/is_discarded/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_null', 'Method', 'api/basic_json/is_null/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_number', 'Method', 'api/basic_json/is_number/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_number_float', 'Method', 'api/basic_json/is_number_float/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_number_integer', 'Method', 'api/basic_json/is_number_integer/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_number_unsigned', 'Method', 'api/basic_json/is_number_unsigned/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_object', 'Method', 'api/basic_json/is_object/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_primitive', 'Method', 'api/basic_json/is_primitive/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_string', 'Method', 'api/basic_json/is_string/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('is_structured', 'Method', 'api/basic_json/is_structured/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('items', 'Method', 'api/basic_json/items/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('json', 'Class', 'api/json/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('json_pointer', 'Class', 'api/json_pointer/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('json_serializer', 'Type', 'api/basic_json/json_serializer/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('max_size', 'Method', 'api/basic_json/max_size/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('merge_patch', 'Method', 'api/basic_json/merge_patch/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('meta', 'Function', 'api/basic_json/meta/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('number_float_t', 'Type', 'api/basic_json/number_float_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('number_integer_t', 'Type', 'api/basic_json/number_integer_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('number_unsigned_t', 'Type', 'api/basic_json/number_unsigned_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('object', 'Function', 'api/basic_json/object/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('object_comparator_t', 'Type', 'api/basic_json/object_comparator_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('object_t', 'Type', 'api/basic_json/object_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator!=', 'Operator', 'api/basic_json/operator_ne/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator+=', 'Operator', 'api/basic_json/operator+=/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator=', 'Operator', 'api/basic_json/operator=/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator==', 'Operator', 'api/basic_json/operator_eq/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator<', 'Operator', 'api/basic_json/operator_lt/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator<=', 'Operator', 'api/basic_json/operator_le/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator>', 'Operator', 'api/basic_json/operator_gt/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator>=', 'Operator', 'api/basic_json/operator_ge/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator[]', 'Operator', 'api/basic_json/operator[]/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator""_json', 'Literal', 'api/basic_json/operator_literal_json/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator""_json_pointer', 'Literal', 'api/basic_json/operator_literal_json_pointer/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator ValueType', 'Operator', 'api/basic_json/operator_ValueType/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('operator value_t', 'Operator', 'api/basic_json/operator_value_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('ordered_json', 'Class', 'api/ordered_json/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('ordered_map', 'Class', 'api/ordered_map/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('out_of_range', 'Class', 'api/basic_json/out_of_range/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('other_error', 'Class', 'api/basic_json/other_error/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('parse', 'Function', 'api/basic_json/parse/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('parse_error', 'Class', 'api/basic_json/parse_error/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('parse_event_t', 'Enum', 'api/basic_json/parse_event_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('parser_callback_t', 'Type', 'api/basic_json/parser_callback_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('patch', 'Method', 'api/basic_json/patch/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('push_back', 'Method', 'api/basic_json/push_back/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('rbegin', 'Method', 'api/basic_json/rbegin/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('rend', 'Method', 'api/basic_json/rend/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('sax_parse', 'Function', 'api/basic_json/sax_parse/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('size', 'Method', 'api/basic_json/size/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('string_t', 'Type', 'api/basic_json/string_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('type', 'Method', 'api/basic_json/type/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('type_error', 'Class', 'api/basic_json/type_error/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('type_name', 'Method', 'api/basic_json/type_name/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('unflatten', 'Method', 'api/basic_json/unflatten/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('update', 'Method', 'api/basic_json/update/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('to_bson', 'Function', 'api/basic_json/to_bson/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('to_cbor', 'Function', 'api/basic_json/to_cbor/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('to_msgpack', 'Function', 'api/basic_json/to_msgpack/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('to_ubjson', 'Function', 'api/basic_json/to_ubjson/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('value', 'Method', 'api/basic_json/value/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('value_t', 'Enum', 'api/basic_json/value_t/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('~basic_json', 'Method', 'api/basic_json/~basic_json/index.html');
-- Features
INSERT INTO searchIndex(name, type, path) VALUES ('Binary Formats', 'Guide', 'features/binary_formats/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('BSON', 'Guide', 'features/binary_formats/bson/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('CBOR', 'Guide', 'features/binary_formats/cbor/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('MessagePack', 'Guide', 'features/binary_formats/messagepack/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('UBJSON', 'Guide', 'features/binary_formats/ubjson/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Supported Macros', 'Guide', 'features/macros/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Binary Values', 'Guide', 'features/binary_values/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Comments', 'Guide', 'features/comments/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Iterators', 'Guide', 'features/iterators/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Types', 'Guide', 'features/types/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Number Handling', 'Guide', 'features/types/number_handling/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Element Access', 'Guide', 'features/element_access/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON Pointer', 'Guide', 'features/json_pointer/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON Patch and Diff', 'Guide', 'features/json_patch/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON Merge Patch', 'Guide', 'features/merge_patch/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Object Order', 'Guide', 'features/object_order/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Parsing and Exceptions', 'Guide', 'features/parsing/parse_exceptions/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('Parser Callbacks', 'Guide', 'features/parsing/parser_callbacks/index.html');
INSERT INTO searchIndex(name, type, path) VALUES ('SAX Interface', 'Guide', 'features/parsing/sax_interface/index.html');
-- Macros
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_ASSERT', 'Macro', 'features/macros/index.html#json_assertx');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_CATCH_USER', 'Macro', 'features/macros/index.html#json_catch_userexception');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_DIAGNOSTICS', 'Macro', 'features/macros/index.html#json_diagnostics');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_HAS_CPP_11', 'Macro', 'features/macros/index.html#json_has_cpp_11-json_has_cpp_14-json_has_cpp_17-json_has_cpp_20');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_HAS_CPP_14', 'Macro', 'features/macros/index.html#json_has_cpp_11-json_has_cpp_14-json_has_cpp_17-json_has_cpp_20');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_HAS_CPP_17', 'Macro', 'features/macros/index.html#json_has_cpp_11-json_has_cpp_14-json_has_cpp_17-json_has_cpp_20');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_HAS_CPP_20', 'Macro', 'features/macros/index.html#json_has_cpp_11-json_has_cpp_14-json_has_cpp_17-json_has_cpp_20');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_NOEXCEPTION', 'Macro', 'features/macros/index.html#json_noexception');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_NO_IO', 'Macro', 'features/macros/index.html#json_no_io');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_SKIP_UNSUPPORTED_COMPILER_CHECK', 'Macro', 'features/macros/index.html#json_skip_unsupported_compiler_check');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_THROW_USER', 'Macro', 'features/macros/index.html#json_throw_userexception');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_TRY_USER', 'Macro', 'features/macros/index.html#json_try_user');
INSERT INTO searchIndex(name, type, path) VALUES ('JSON_USE_IMPLICIT_CONVERSIONS', 'Macro', 'features/macros/index.html#json_use_implicit_conversions');
INSERT INTO searchIndex(name, type, path) VALUES ('NLOHMANN_DEFINE_TYPE_INTRUSIVE', 'Macro', 'features/macros/index.html#nlohmann_define_type_intrusivetype-member');
INSERT INTO searchIndex(name, type, path) VALUES ('NLOHMANN_DEFINE_TYPE_NON_INTRUSIVE', 'Macro', 'features/macros/index.html#nlohmann_define_type_non_intrusivetype-member');
INSERT INTO searchIndex(name, type, path) VALUES ('NLOHMANN_JSON_SERIALIZE_ENUM', 'Macro', 'features/macros/index.html#nlohmann_json_serialize_enumtype');

View File

@@ -1,6 +1,4 @@
#include <iostream>
#include <iomanip>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;
@@ -31,8 +29,7 @@ int main()
j["new"]["key"]["value"] = {"another", "list"};
// count elements
auto s = j.size();
j["size"] = s;
j["size"] = j.size();
// pretty print with indent of 4 spaces
std::cout << std::setw(4) << j << '\n';

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/IYGZTTRU0q5Ty1ev"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/4NEU6ZZMoM9lpIex"><b>online</b></a>

View File

@@ -23,5 +23,5 @@
"value": 42.99
},
"pi": 3.141,
"size": 8
"size": 9
}

View File

@@ -1,26 +0,0 @@
#include <iostream>
#include <iomanip>
#include <nlohmann/json.hpp>
using json = nlohmann::json;
int main()
{
// a valid JSON text
auto valid_text = R"(
{
"numbers": [1, 2, 3]
}
)";
// an invalid JSON text
auto invalid_text = R"(
{
"strings": ["extra", "comma", ]
}
)";
std::cout << std::boolalpha
<< json::accept(valid_text) << ' '
<< json::accept(invalid_text) << '\n';
}

View File

@@ -1 +0,0 @@
<a target="_blank" href="https://wandbox.org/permlink/r5Tai1spihWHTdFA"><b>online</b></a>

View File

@@ -1 +0,0 @@
true false

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/Ptk2BoNyGfrZfxRU"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/1fsm4gI55p83DOwU"><b>online</b></a>

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;
@@ -22,27 +21,13 @@ int main()
// output changed array
std::cout << object << '\n';
// exception type_error.304
// try to write at a nonexisting key
try
{
// use at() on a non-object type
json str = "I am a string";
str.at("the good") = "Another string";
}
catch (json::type_error& e)
{
std::cout << e.what() << '\n';
}
// exception out_of_range.401
try
{
// try to write at a nonexisting key
object.at("the fast") = "il rapido";
}
catch (json::out_of_range& e)
catch (std::out_of_range& e)
{
std::cout << e.what() << '\n';
std::cout << "out of range: " << e.what() << '\n';
}
}

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/055DW3OWJPwGYr92"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/tb5CaFfsMWpAvi7m"><b>online</b></a>

View File

@@ -1,4 +1,3 @@
"il brutto"
{"the bad":"il cattivo","the good":"il buono","the ugly":"il brutto"}
[json.exception.type_error.304] cannot use at() with string
[json.exception.out_of_range.403] key 'the fast' not found
out of range: key 'the fast' not found

View File

@@ -1,12 +1,11 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;
int main()
{
// create JSON object
const json object =
json object =
{
{"the good", "il buono"},
{"the bad", "il cattivo"},
@@ -16,26 +15,12 @@ int main()
// output element with key "the ugly"
std::cout << object.at("the ugly") << '\n';
// exception type_error.304
// try to read from a nonexisting key
try
{
// use at() on a non-object type
const json str = "I am a string";
std::cout << str.at("the good") << '\n';
}
catch (json::type_error& e)
{
std::cout << e.what() << '\n';
}
// exception out_of_range.401
try
{
// try to read from a nonexisting key
std::cout << object.at("the fast") << '\n';
}
catch (json::out_of_range)
catch (std::out_of_range)
{
std::cout << "out of range" << '\n';
}

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/gM1uqvzukokLwjMa"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/NFG86H5khRUePc1s"><b>online</b></a>

View File

@@ -1,3 +1,2 @@
"il brutto"
[json.exception.type_error.304] cannot use at() with string
out of range

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;
@@ -17,27 +16,13 @@ int main()
// output changed array
std::cout << array << '\n';
// exception type_error.304
// try to write beyond the array limit
try
{
// use at() on a non-array type
json str = "I am a string";
str.at(0) = "Another string";
}
catch (json::type_error& e)
{
std::cout << e.what() << '\n';
}
// exception out_of_range.401
try
{
// try to write beyond the array limit
array.at(5) = "sixth";
}
catch (json::out_of_range& e)
catch (std::out_of_range& e)
{
std::cout << e.what() << '\n';
std::cout << "out of range: " << e.what() << '\n';
}
}

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/H3tBOr4lpufGRHlF"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/R7z2SB2rMdFQ9XtR"><b>online</b></a>

View File

@@ -1,4 +1,3 @@
"third"
["first","second","third","fourth"]
[json.exception.type_error.304] cannot use at() with string
[json.exception.out_of_range.401] array index 5 is out of range
out of range: array index 5 is out of range

View File

@@ -1,37 +1,22 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;
int main()
{
// create JSON array
const json array = {"first", "2nd", "third", "fourth"};
json array = {"first", "2nd", "third", "fourth"};
// output element at index 2 (third element)
std::cout << array.at(2) << '\n';
// exception type_error.304
// try to read beyond the array limit
try
{
// use at() on a non-array type
const json str = "I am a string";
std::cout << str.at(0) << '\n';
}
catch (json::type_error& e)
{
std::cout << e.what() << '\n';
}
// exception out_of_range.401
try
{
// try to read beyond the array limit
std::cout << array.at(5) << '\n';
}
catch (json::out_of_range& e)
catch (std::out_of_range)
{
std::cout << e.what() << '\n';
std::cout << "out of range" << '\n';
}
}

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/UtGhIQGxCYIml36F"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/L1bMeiN6nYm7JrvA"><b>online</b></a>

View File

@@ -1,3 +1,2 @@
"third"
[json.exception.type_error.304] cannot use at() with string
[json.exception.out_of_range.401] array index 5 is out of range
out of range

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;
@@ -33,71 +32,4 @@ int main()
j.at("/array/1"_json_pointer) = 21;
// output the changed array
std::cout << j["array"] << '\n';
// out_of_range.106
try
{
// try to use an array index with leading '0'
json::reference ref = j.at("/array/01"_json_pointer);
}
catch (json::parse_error& e)
{
std::cout << e.what() << '\n';
}
// out_of_range.109
try
{
// try to use an array index that is not a number
json::reference ref = j.at("/array/one"_json_pointer);
}
catch (json::parse_error& e)
{
std::cout << e.what() << '\n';
}
// out_of_range.401
try
{
// try to use a an invalid array index
json::reference ref = j.at("/array/4"_json_pointer);
}
catch (json::out_of_range& e)
{
std::cout << e.what() << '\n';
}
// out_of_range.402
try
{
// try to use the array index '-'
json::reference ref = j.at("/array/-"_json_pointer);
}
catch (json::out_of_range& e)
{
std::cout << e.what() << '\n';
}
// out_of_range.403
try
{
// try to use a JSON pointer to an nonexistent object key
json::const_reference ref = j.at("/foo"_json_pointer);
}
catch (json::out_of_range& e)
{
std::cout << e.what() << '\n';
}
// out_of_range.404
try
{
// try to use a JSON pointer that cannot be resolved
json::reference ref = j.at("/number/foo"_json_pointer);
}
catch (json::out_of_range& e)
{
std::cout << e.what() << '\n';
}
}

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/NHe5FLR1KYaiHwBx"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/NDSjglHZIjIZ0Uxg"><b>online</b></a>

View File

@@ -4,9 +4,3 @@
2
"bar"
[1,21]
[json.exception.parse_error.106] parse error: array index '01' must not begin with '0'
[json.exception.parse_error.109] parse error: array index 'one' is not a number
[json.exception.out_of_range.401] array index 4 is out of range
[json.exception.out_of_range.402] array index '-' (2) is out of range
[json.exception.out_of_range.403] key 'foo' not found
[json.exception.out_of_range.404] unresolved reference token 'foo'

View File

@@ -1,12 +1,11 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;
int main()
{
// create a JSON value
const json j =
json j =
{
{"number", 1}, {"string", "foo"}, {"array", {1, 2}}
};
@@ -21,59 +20,4 @@ int main()
std::cout << j.at("/array"_json_pointer) << '\n';
// output element with JSON pointer "/array/1"
std::cout << j.at("/array/1"_json_pointer) << '\n';
// out_of_range.109
try
{
// try to use an array index that is not a number
json::const_reference ref = j.at("/array/one"_json_pointer);
}
catch (json::parse_error& e)
{
std::cout << e.what() << '\n';
}
// out_of_range.401
try
{
// try to use a an invalid array index
json::const_reference ref = j.at("/array/4"_json_pointer);
}
catch (json::out_of_range& e)
{
std::cout << e.what() << '\n';
}
// out_of_range.402
try
{
// try to use the array index '-'
json::const_reference ref = j.at("/array/-"_json_pointer);
}
catch (json::out_of_range& e)
{
std::cout << e.what() << '\n';
}
// out_of_range.403
try
{
// try to use a JSON pointer to an nonexistent object key
json::const_reference ref = j.at("/foo"_json_pointer);
}
catch (json::out_of_range& e)
{
std::cout << e.what() << '\n';
}
// out_of_range.404
try
{
// try to use a JSON pointer that cannot be resolved
json::const_reference ref = j.at("/number/foo"_json_pointer);
}
catch (json::out_of_range& e)
{
std::cout << e.what() << '\n';
}
}

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/lbZjdwg6yII33cV5"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/OuLYiMJ3pgyOHupb"><b>online</b></a>

View File

@@ -2,8 +2,3 @@
"foo"
[1,2]
2
[json.exception.parse_error.109] parse error: array index 'one' is not a number
[json.exception.out_of_range.401] array index 4 is out of range
[json.exception.out_of_range.402] array index '-' (2) is out of range
[json.exception.out_of_range.403] key 'foo' not found
[json.exception.out_of_range.404] unresolved reference token 'foo'

View File

@@ -1,11 +1,11 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;
int main()
{
// create JSON values
json j_null;
json j_boolean = true;
json j_number_integer = 17;
json j_number_float = 23.42;
@@ -16,6 +16,7 @@ int main()
json j_string = "Hello, world";
// call back()
//std::cout << j_null.back() << '\n'; // would throw
std::cout << j_boolean.back() << '\n';
std::cout << j_number_integer.back() << '\n';
std::cout << j_number_float.back() << '\n';
@@ -24,15 +25,4 @@ int main()
std::cout << j_array.back() << '\n';
//std::cout << j_array_empty.back() << '\n'; // undefined behavior
std::cout << j_string.back() << '\n';
// back() called on a null value
try
{
json j_null;
j_null.back();
}
catch (json::invalid_iterator& e)
{
std::cout << e.what() << '\n';
}
}

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/swnzqmd8zj6ij1fb"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/nPVnBcHf8nrNpGOJ"><b>online</b></a>

View File

@@ -4,4 +4,3 @@ true
2
16
"Hello, world"
[json.exception.invalid_iterator.214] cannot get value

View File

@@ -1,12 +1,10 @@
#include <iostream>
#include <json.hpp>
#include <deque>
#include <list>
#include <forward_list>
#include <set>
#include <unordered_map>
#include <unordered_set>
#include <valarray>
#include <nlohmann/json.hpp>
using json = nlohmann::json;
@@ -68,10 +66,6 @@ int main()
std::vector<int> c_vector {1, 2, 3, 4};
json j_vec(c_vector);
// create an array from std::valarray
std::valarray<short> c_valarray {10, 9, 8, 7};
json j_valarray(c_valarray);
// create an array from std::deque
std::deque<double> c_deque {1.2, 2.3, 3.4, 5.6};
json j_deque(c_deque);
@@ -107,7 +101,6 @@ int main()
// serialize the JSON arrays
std::cout << j_array_t << '\n';
std::cout << j_vec << '\n';
std::cout << j_valarray << '\n';
std::cout << j_deque << '\n';
std::cout << j_list << '\n';
std::cout << j_flist << '\n';

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/RodBcX8vpcP5dQ40"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/VM7W2kpE7sIYJ5DW"><b>online</b></a>

View File

@@ -6,7 +6,6 @@
["one","two",3,4.5,false]
[1,2,3,4]
[10,9,8,7]
[1.2,2.3,3.4,5.6]
[true,true,false,true]
[12345678909876,23456789098765,34567890987654,45678909876543]
@@ -28,10 +27,10 @@
1024
-17
8
3.141592653589793
3.14159265358979
null
null
42.22999954223633
42.2299995422363
null
23.42

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;
@@ -19,14 +18,4 @@ int main()
std::cout << j_array_range << '\n';
std::cout << j_number_range << '\n';
std::cout << j_object_range << '\n';
// example for an exception
try
{
json j_invalid(j_number.begin() + 1, j_number.end());
}
catch (json::invalid_iterator& e)
{
std::cout << e.what() << '\n';
}
}

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/oWFo4AhVlMBjNv8E"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/nKF1QcieoCHm6Lez"><b>online</b></a>

View File

@@ -1,4 +1,3 @@
["bravo","charly"]
42
{"one":"eins"}
[json.exception.invalid_iterator.204] iterators out of range

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/oRn0YsYyurs0Zi5T"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/6Qfn12BDzb3vqO5p"><b>online</b></a>

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/kVknQzPMpqjywTpG"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/iWYvmlA4qQmtvACX"><b>online</b></a>

View File

@@ -0,0 +1,57 @@
#include <json.hpp>
using json = nlohmann::json;
int main()
{
// a JSON text
auto text = R"(
{
"Image": {
"Width": 800,
"Height": 600,
"Title": "View from 15th Floor",
"Thumbnail": {
"Url": "http://www.example.com/image/481989943",
"Height": 125,
"Width": 100
},
"Animated" : false,
"IDs": [116, 943, 234, 38793]
}
}
)";
// fill a stream with JSON text
std::stringstream ss;
ss << text;
// create JSON from stream
json j_complete(ss); // deprecated!
// shall be replaced by: json j_complete = json::parse(ss);
std::cout << std::setw(4) << j_complete << "\n\n";
// define parser callback
json::parser_callback_t cb = [](int depth, json::parse_event_t event, json & parsed)
{
// skip object elements with key "Thumbnail"
if (event == json::parse_event_t::key and parsed == json("Thumbnail"))
{
return false;
}
else
{
return true;
}
};
// fill a stream with JSON text
ss.clear();
ss << text;
// create JSON from stream (with callback)
json j_filtered(ss, cb);
// shall be replaced by: json j_filtered = json::parse(ss, cb);
std::cout << std::setw(4) << j_filtered << '\n';
}

View File

@@ -0,0 +1 @@
<a target="_blank" href="http://melpon.org/wandbox/permlink/R6dzpKXlxrttShf7"><b>online</b></a>

View File

@@ -0,0 +1,34 @@
{
"Image": {
"Animated": false,
"Height": 600,
"IDs": [
116,
943,
234,
38793
],
"Thumbnail": {
"Height": 125,
"Url": "http://www.example.com/image/481989943",
"Width": 100
},
"Title": "View from 15th Floor",
"Width": 800
}
}
{
"Image": {
"Animated": false,
"Height": 600,
"IDs": [
116,
943,
234,
38793
],
"Title": "View from 15th Floor",
"Width": 800
}
}

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/yzkjGGzMOESBkCdH"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/iZzzxEpB7tbhz0cx"><b>online</b></a>

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/eIOvQiuID2qgxp5u"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/CaDlatv1uXhQiu7o"><b>online</b></a>

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/Vm0nR32x6co8rq40"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/9Tvfs2dJBW8m8ihA"><b>online</b></a>

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;

View File

@@ -1 +1 @@
<a target="_blank" href="https://wandbox.org/permlink/w4TtEQzlVyokIrtp"><b>online</b></a>
<a target="_blank" href="http://melpon.org/wandbox/permlink/IFZT4VL0oRotJBxl"><b>online</b></a>

View File

@@ -1,5 +1,4 @@
#include <iostream>
#include <nlohmann/json.hpp>
#include <json.hpp>
using json = nlohmann::json;

Some files were not shown because too many files have changed in this diff Show More