mirror of
https://github.com/pantor/inja.git
synced 2026-02-17 09:03:58 +00:00
Inja v2 (#67)
* inja2 * header only * reduce dependencies * code cleaning * c++17 * use stdc++ * code cleaning * infrastructure * header only * add infrastructure * fix tests * use minimum clang 6 * code cleaning, polyfill for c++11 * fix some file tests * fix readme * update appveyor * fix polyfill and ci * fix polyfill * fix ci? * test msvc __cplusplus * add doxygen * activate all tests * code cleaning * add coveralls, set default to dot notation * add html test * add doxygen comments * test single_include file * change build folder in appveyor * correct make arguments in appveyor * fix appveyor arguments
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -40,3 +40,6 @@ dist
|
||||
.coveralls.yml
|
||||
|
||||
.vscode
|
||||
|
||||
doc/html
|
||||
doc/latex
|
||||
|
||||
99
.travis.yml
99
.travis.yml
@@ -9,103 +9,56 @@ sudo: required
|
||||
|
||||
matrix:
|
||||
include:
|
||||
### Coveralls ###
|
||||
- os: linux
|
||||
compiler: gcc
|
||||
env: COMPILER=g++-5
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
packages: g++-5
|
||||
before_install:
|
||||
- pip install --user cpp-coveralls
|
||||
after_success:
|
||||
- make clean
|
||||
# - coveralls --exclude lib --exclude tests --gcov-options '\-lp'
|
||||
|
||||
- os: linux
|
||||
compiler: gcc
|
||||
env: COMPILER=g++-4.9
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
packages: g++-4.9
|
||||
|
||||
- os: linux
|
||||
compiler: gcc
|
||||
env: COMPILER=g++-5
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
packages: g++-5
|
||||
|
||||
- os: linux
|
||||
compiler: gcc
|
||||
env: COMPILER=g++-6
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
packages: g++-6
|
||||
|
||||
- os: linux
|
||||
compiler: gcc
|
||||
env: COMPILER=g++-7
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test']
|
||||
sources: ubuntu-toolchain-r-test
|
||||
packages: g++-7
|
||||
|
||||
- os: linux
|
||||
compiler: clang
|
||||
env: COMPILER=clang++-3.6
|
||||
compiler: gcc
|
||||
env:
|
||||
- COMPILER=g++-8
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-precise-3.6']
|
||||
packages: clang-3.6
|
||||
sources: ubuntu-toolchain-r-test
|
||||
packages: g++-8
|
||||
|
||||
- os: linux
|
||||
compiler: gcc
|
||||
env:
|
||||
- COMPILER=g++-8
|
||||
- CXXFLAGS=-std=c++17
|
||||
addons:
|
||||
apt:
|
||||
sources: ubuntu-toolchain-r-test
|
||||
packages: g++-8
|
||||
|
||||
- os: linux
|
||||
compiler: clang
|
||||
env: COMPILER=clang++-3.7
|
||||
env: COMPILER=clang++-6.0
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-precise-3.7']
|
||||
packages: clang-3.7
|
||||
|
||||
- os: linux
|
||||
compiler: clang
|
||||
env: COMPILER=clang++-4.0
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-trusty-4.0']
|
||||
packages: ['g++-6', 'clang-4.0']
|
||||
|
||||
- os: linux
|
||||
compiler: clang
|
||||
env: COMPILER=clang++-5.0
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-trusty-5.0']
|
||||
packages: ['g++-6', 'clang-5.0']
|
||||
|
||||
- os: osx
|
||||
osx_image: xcode7.3
|
||||
|
||||
- os: osx
|
||||
osx_image: xcode8
|
||||
|
||||
- os: osx
|
||||
osx_image: xcode8.3
|
||||
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-trusty-6.0']
|
||||
packages: ['clang-6.0', 'libstdc++-7-dev']
|
||||
|
||||
- os: osx
|
||||
osx_image: xcode9
|
||||
|
||||
- os: osx
|
||||
osx_image: xcode10
|
||||
|
||||
|
||||
script:
|
||||
- if [[ "${COMPILER}" != "" ]]; then export CXX=${COMPILER}; fi
|
||||
- uname -a
|
||||
- $CXX --version
|
||||
|
||||
- mkdir -p build && cd build
|
||||
- cmake .. && cmake --build . --config Release -- -j4
|
||||
- mkdir -p build
|
||||
- cd build
|
||||
- cmake ..
|
||||
- cmake --build . --config Release -- -j4
|
||||
- ctest -C Release -V
|
||||
- cd ..
|
||||
|
||||
158
CMakeLists.txt
158
CMakeLists.txt
@@ -1,137 +1,69 @@
|
||||
cmake_minimum_required(VERSION 3.1)
|
||||
cmake_minimum_required(VERSION 3.5)
|
||||
|
||||
|
||||
##
|
||||
## PROJECT
|
||||
##
|
||||
project(inja LANGUAGES CXX VERSION 1.0.1)
|
||||
set(INJA_VERSION ${PROJECT_VERSION})
|
||||
project(inja LANGUAGES CXX VERSION 2.0.0)
|
||||
|
||||
|
||||
##
|
||||
## OPTIONS
|
||||
##
|
||||
option(BUILD_UNIT_TESTS "Build the unit tests" ON)
|
||||
option(BUILD_BENCHMARK "Build the inja benchmark" OFF)
|
||||
option(HUNTER_ENABLED "Use hunter to manage dependencies" OFF)
|
||||
option(BUILD_TESTS "Build the inja unit tests" ON)
|
||||
option(BUILD_BENCHMARK "Build the inja benchmark" ON)
|
||||
option(COVERALLS "Generate coveralls data" OFF)
|
||||
|
||||
|
||||
##
|
||||
## HUNTER
|
||||
##
|
||||
if(HUNTER_ENABLED)
|
||||
include("cmake/HunterGate.cmake")
|
||||
HunterGate(
|
||||
URL "https://github.com/ruslo/hunter/archive/v0.19.156.tar.gz"
|
||||
SHA1 "8d5e4635b137365e0d1ade4d60accf4e2bb41f0d"
|
||||
)
|
||||
endif()
|
||||
|
||||
|
||||
##
|
||||
## CONFIGURATION
|
||||
##
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
set(CMAKE_CXX_STANDARD 17)
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
|
||||
set(INJA_INCLUDE_DIR include)
|
||||
set(INJA_SINGLE_INCLUDE_DIR single_include)
|
||||
set(INJA_HEADER_INSTALL_DIR include)
|
||||
|
||||
if(WIN32 AND MSVC AND MSVC_VERSION LESS 1900)
|
||||
message(FATAL_ERROR "[${PROJECT_NAME}] Visual Studio versions prior to 2015 do not support the noexcept keyword, which is used in the JSON library.")
|
||||
endif()
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${PROJECT_SOURCE_DIR}/cmake)
|
||||
set(CMAKE_BUILD_TYPE Debug)
|
||||
# set(CMAKE_BUILD_TYPE Release)
|
||||
# set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} -O3")
|
||||
|
||||
|
||||
##
|
||||
## TESTS
|
||||
## create and configure the unit test target
|
||||
##
|
||||
if(BUILD_UNIT_TESTS)
|
||||
enable_testing()
|
||||
add_subdirectory(test)
|
||||
endif()
|
||||
|
||||
|
||||
##
|
||||
## AMALGAMATE
|
||||
## amalgamate header files into single_include
|
||||
##
|
||||
execute_process(COMMAND python3 amalgamate/amalgamate.py -c amalgamate/config.json -s include
|
||||
WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
|
||||
|
||||
|
||||
##
|
||||
## TARGETS
|
||||
## Build targets for the interface library
|
||||
##
|
||||
add_library(inja INTERFACE)
|
||||
target_include_directories(inja INTERFACE
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/${INJA_INCLUDE_DIR}>
|
||||
$<INSTALL_INTERFACE:${INJA_HEADER_INSTALL_DIR}>
|
||||
)
|
||||
target_include_directories(inja INTERFACE include)
|
||||
|
||||
|
||||
add_library(inja_single INTERFACE)
|
||||
target_include_directories(inja_single INTERFACE
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/${INJA_SINGLE_INCLUDE_DIR}>
|
||||
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/${INJA_INCLUDE_DIR}>
|
||||
$<INSTALL_INTERFACE:${INJA_HEADER_INSTALL_DIR}>
|
||||
)
|
||||
execute_process(COMMAND python3 amalgamate/amalgamate.py -c amalgamate/config.json -s include WORKING_DIRECTORY ${PROJECT_SOURCE_DIR})
|
||||
|
||||
|
||||
if(HUNTER_ENABLED) # Use Hunter to manage dependencies
|
||||
# Add JSON package
|
||||
hunter_add_package(nlohmann_json)
|
||||
find_package(nlohmann_json CONFIG REQUIRED)
|
||||
# Add dependencies to target
|
||||
target_link_libraries(inja INTERFACE nlohmann_json)
|
||||
if (COVERALLS)
|
||||
include(Coveralls)
|
||||
coveralls_turn_on_coverage()
|
||||
|
||||
file(GLOB_RECURSE COVERAGE_SRCS include/inja/*.hpp)
|
||||
|
||||
# set(COVERAGE_SRCS test/unit.cpp test/unit-renderer.cpp include/inja)
|
||||
|
||||
coveralls_setup("${COVERAGE_SRCS}" OFF) # If we should upload.
|
||||
endif()
|
||||
|
||||
|
||||
##
|
||||
## INSTALL
|
||||
## install header files, generate and install cmake config files for find_package()
|
||||
##
|
||||
set(include_install_dir ${INJA_HEADER_INSTALL_DIR})
|
||||
set(config_install_dir "lib/cmake/${PROJECT_NAME}")
|
||||
set(generated_dir "${CMAKE_CURRENT_BINARY_DIR}/generated")
|
||||
set(version_config "${generated_dir}/${PROJECT_NAME}ConfigVersion.cmake")
|
||||
set(project_config "${generated_dir}/${PROJECT_NAME}Config.cmake")
|
||||
set(TARGETS_EXPORT_NAME "${PROJECT_NAME}Targets")
|
||||
set(namespace "${PROJECT_NAME}::")
|
||||
include(CMakePackageConfigHelpers)
|
||||
if(BUILD_TESTS)
|
||||
enable_testing()
|
||||
|
||||
add_executable(inja_test
|
||||
test/unit.cpp
|
||||
test/unit-files.cpp
|
||||
test/unit-renderer.cpp
|
||||
)
|
||||
target_link_libraries(inja_test PRIVATE inja)
|
||||
|
||||
add_test(inja_test ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/inja_test)
|
||||
|
||||
|
||||
write_basic_package_version_file(
|
||||
"${version_config}" COMPATIBILITY SameMajorVersion
|
||||
)
|
||||
configure_package_config_file(
|
||||
"cmake/Config.cmake.in"
|
||||
"${project_config}"
|
||||
INSTALL_DESTINATION "${config_install_dir}"
|
||||
)
|
||||
add_library(single_inja INTERFACE)
|
||||
target_include_directories(single_inja INTERFACE single_include include)
|
||||
|
||||
install(
|
||||
TARGETS inja
|
||||
EXPORT "${TARGETS_EXPORT_NAME}"
|
||||
LIBRARY DESTINATION "lib"
|
||||
ARCHIVE DESTINATION "lib"
|
||||
RUNTIME DESTINATION "bin"
|
||||
INCLUDES DESTINATION "${include_install_dir}"
|
||||
add_executable(single_inja_test
|
||||
test/unit.cpp
|
||||
test/unit-files.cpp
|
||||
test/unit-renderer.cpp
|
||||
)
|
||||
target_link_libraries(single_inja_test PRIVATE single_inja)
|
||||
|
||||
install(
|
||||
FILES ${INJA_INCLUDE_DIR}/inja.hpp
|
||||
DESTINATION "${include_install_dir}"
|
||||
)
|
||||
add_test(single_inja_test ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/single_inja_test)
|
||||
endif()
|
||||
|
||||
install(
|
||||
FILES "${project_config}" "${version_config}"
|
||||
DESTINATION "${config_install_dir}"
|
||||
)
|
||||
|
||||
install(
|
||||
EXPORT "${TARGETS_EXPORT_NAME}"
|
||||
NAMESPACE "${namespace}"
|
||||
DESTINATION "${config_install_dir}"
|
||||
)
|
||||
if(BUILD_BENCHMARK)
|
||||
add_executable(inja_benchmark test/benchmark.cpp)
|
||||
target_link_libraries(inja_benchmark PRIVATE inja)
|
||||
endif()
|
||||
|
||||
29
LICENSE
29
LICENSE
@@ -19,3 +19,32 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
Copyright (c) 2009-2018 FIRST
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the FIRST nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY FIRST AND CONTRIBUTORS``AS IS'' AND ANY
|
||||
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY NONINFRINGEMENT AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL FIRST OR CONTRIBUTORS BE LIABLE FOR
|
||||
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
63
README.md
63
README.md
@@ -10,7 +10,7 @@
|
||||
[](https://raw.githubusercontent.com/pantor/inja/master/LICENSE)
|
||||
|
||||
|
||||
Inja is a template engine for modern C++, loosely inspired by [jinja](http://jinja.pocoo.org) for python. It has an easy and yet powerful template syntax with all variables, loops, conditions, includes, callbacks, comments you need, nested and combined as you like. Inja uses the wonderful [json](https://github.com/nlohmann/json) library by nlohmann for data input and handling. Most importantly, *inja* needs only two header files, which is (nearly) as trivial as integration in C++ can get. Of course, everything is tested on all relevant compilers. Have a look what it looks like:
|
||||
Inja is a template engine for modern C++, loosely inspired by [jinja](http://jinja.pocoo.org) for python. It has an easy and yet powerful template syntax with all variables, loops, conditions, includes, callbacks, comments you need, nested and combined as you like. Inja uses the wonderful [json](https://github.com/nlohmann/json) library by nlohmann for data input and handling. Most importantly, *inja* needs only two header files, which is (nearly) as trivial as integration in C++ can get. Of course, everything is tested on all relevant compilers. Here is what it looks like:
|
||||
|
||||
```c++
|
||||
json data;
|
||||
@@ -56,17 +56,17 @@ data["name"] = "world";
|
||||
render("Hello {{ name }}!", data); // "Hello world!"
|
||||
|
||||
// For more advanced usage, an environment is recommended
|
||||
Environment env = Environment();
|
||||
Environment env;
|
||||
|
||||
// Render a string with json data
|
||||
std::string result = env.render("Hello {{ name }}!", data); // "Hello world!"
|
||||
|
||||
// Or directly read a template file
|
||||
Template temp = env.parse_template("./template.txt");
|
||||
std::string result = env.render_template(temp, data); // "Hello world!"
|
||||
std::string result = env.render(temp, data); // "Hello world!"
|
||||
|
||||
data["name"] = "Inja";
|
||||
std::string result = env.render_template(temp, data); // "Hello Inja!"
|
||||
std::string result = env.render(temp, data); // "Hello Inja!"
|
||||
|
||||
// Or read a json file for data directly from the environment
|
||||
result = env.render_file("./template.txt", "./data.json");
|
||||
@@ -79,22 +79,22 @@ env.write("./template.txt", "./data.json", "./result.txt")
|
||||
The environment class can be configured to your needs.
|
||||
```c++
|
||||
// With default settings
|
||||
Environment env_default = Environment();
|
||||
Environment env_default;
|
||||
|
||||
// With global path to template files
|
||||
Environment env = Environment("../path/templates/");
|
||||
// With global path to template files and where files will be saved
|
||||
Environment env_1 = Environment("../path/templates/");
|
||||
|
||||
// With global path where to save rendered files
|
||||
Environment env = Environment("../path/templates/", "../path/results/");
|
||||
// With separate input and output path
|
||||
Environment env_2 = Environment("../path/templates/", "../path/results/");
|
||||
|
||||
// Choose between JSON pointer or dot notation to access elements
|
||||
env.set_element_notation(ElementNotation::Pointer); // (default) e.g. time/start
|
||||
env.set_element_notation(ElementNotation::Dot); // e.g. time.start
|
||||
// Choose between dot notation (like Jinja2) and JSON pointer to access elements
|
||||
env.set_element_notation(ElementNotation::Dot); // (default) e.g. time.start
|
||||
env.set_element_notation(ElementNotation::Pointer); // e.g. time/start
|
||||
|
||||
// With other opening and closing strings (here the defaults, as regex)
|
||||
env.set_expression("\\{\\{", "\\}\\}"); // Expressions {{ }}
|
||||
env.set_comment("\\{#", "#\\}"); // Comments {# #}
|
||||
env.set_statement("\\{\\%", "\\%\\}"); // Statements {% %} for many things, see below
|
||||
// With other opening and closing strings (here the defaults)
|
||||
env.set_expression("{{", "}}"); // Expressions
|
||||
env.set_comment("{#", "#}"); // Comments
|
||||
env.set_statement("{%", "%}"); // Statements {% %} for many things, see below
|
||||
env.set_line_statement("##"); // Line statements ## (just an opener)
|
||||
```
|
||||
|
||||
@@ -109,10 +109,10 @@ data["time"]["start"] = 16;
|
||||
data["time"]["end"] = 22;
|
||||
|
||||
// Indexing in array
|
||||
render("{{ guests/1 }}", data); // "Tom"
|
||||
render("{{ guests.1 }}", data); // "Tom"
|
||||
|
||||
// Objects
|
||||
render("{{ time/start }} to {{ time/end }}pm", data); // "16 to 22pm"
|
||||
render("{{ time.start }} to {{ time.end }}pm", data); // "16 to 22pm"
|
||||
```
|
||||
In general, the variables can be fetched using the [JSON Pointer](https://tools.ietf.org/html/rfc6901) syntax. For convenience, the leading `/` can be ommited. If no variable is found, valid JSON is printed directly, otherwise an error is thrown.
|
||||
|
||||
@@ -127,7 +127,7 @@ Statements can be written either with the `{% ... %}` syntax or the `##` syntax
|
||||
// Combining loops and line statements
|
||||
render(R"(Guest List:
|
||||
## for guest in guests
|
||||
{{ loop/index1 }}: {{ guest }}
|
||||
{{ loop.index1 }}: {{ guest }}
|
||||
## endfor )", data)
|
||||
|
||||
/* Guest List:
|
||||
@@ -142,7 +142,7 @@ In a loop, the special variables `loop/index (number)`, `loop/index1 (number)`,
|
||||
Conditions support the typical if, else if and else statements. Following conditions are for example possible:
|
||||
```c++
|
||||
// Standard comparisons with variable
|
||||
render("{% if time/hour >= 18 %}…{% endif %}", data); // True
|
||||
render("{% if time.hour >= 18 %}…{% endif %}", data); // True
|
||||
|
||||
// Variable in list
|
||||
render("{% if neighbour in guests %}…{% endif %}", data); // True
|
||||
@@ -175,7 +175,7 @@ render("Hello {{ upper(neighbour) }}!", data); // "Hello PETER!"
|
||||
render("Hello {{ lower(neighbour) }}!", data); // "Hello peter!"
|
||||
|
||||
// Range function, useful for loops
|
||||
render("{% for i in range(4) %}{{ loop/index1 }}{% endfor %}", data); // "1234"
|
||||
render("{% for i in range(4) %}{{ loop.index1 }}{% endfor %}", data); // "1234"
|
||||
|
||||
// Length function (please don't combine with range, use list directly...)
|
||||
render("I count {{ length(guests) }} guests.", data); // "I count 3 guests."
|
||||
@@ -225,7 +225,7 @@ render("{{ isArray(guests) }}", data); // "true"
|
||||
|
||||
You can create your own and more complex functions with callbacks.
|
||||
```c++
|
||||
Environment env = Environment();
|
||||
Environment env;
|
||||
|
||||
/*
|
||||
* Callbacks are defined by its:
|
||||
@@ -233,8 +233,8 @@ Environment env = Environment();
|
||||
* - number of arguments
|
||||
* - callback function. Implemented with std::function, you can for example use lambdas.
|
||||
*/
|
||||
env.add_callback("double", 1, [&env](Parsed::Arguments args, json data) {
|
||||
int number = env.get_argument<int>(args, 0, data); // Adapt the type and index of the argument
|
||||
env.add_callback("double", 1, [](Arguments& args) {
|
||||
int number = args.at(0)->get<int>(); // Adapt the index and type of the argument
|
||||
return 2 * number;
|
||||
});
|
||||
|
||||
@@ -243,7 +243,7 @@ env.render("{{ double(16) }}", data); // "32"
|
||||
|
||||
// A callback without argument can be used like a dynamic variable:
|
||||
std::string greet = "Hello";
|
||||
env.add_callback("double-greetings", 0, [greet](Parsed::Arguments args, json data) {
|
||||
env.add_callback("double-greetings", 0, [greet](Arguments args) {
|
||||
return greet + " " + greet + "!";
|
||||
});
|
||||
env.render("{{ double-greetings }}", data); // "Hello Hello!"
|
||||
@@ -260,15 +260,8 @@ render("Hello{# Todo #}!", data); // "Hello!"
|
||||
|
||||
## Supported compilers
|
||||
|
||||
Currently, the following compilers are tested:
|
||||
Inja uses `string_view` from C++17, everything else should work with C++11. Currently, the following compilers are tested:
|
||||
|
||||
- GCC 4.9 - 7.1 (and possibly later)
|
||||
- Clang 3.6 - 5.0 (and possibly later)
|
||||
- Microsoft Visual C++ 2015 / Build Tools 14.0.25123.0 (and possibly later)
|
||||
- GCC 7.0 - 8.0 (and possibly later)
|
||||
- Clang 6.0 (and possibly later)
|
||||
- Microsoft Visual C++ 2017 / Build Tools 15.1.548.43366 (and possibly later)
|
||||
|
||||
|
||||
|
||||
## License
|
||||
|
||||
Inja is licensed under the [MIT License](https://raw.githubusercontent.com/pantor/inja/master/LICENSE).
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"project": "inja",
|
||||
"target": "single_include/inja.hpp",
|
||||
"target": "single_include/inja/inja.hpp",
|
||||
"sources": [
|
||||
"../include/inja.hpp"
|
||||
"../include/inja/inja.hpp"
|
||||
],
|
||||
"include_paths": [
|
||||
]
|
||||
|
||||
29
appveyor.yml
Normal file → Executable file
29
appveyor.yml
Normal file → Executable file
@@ -1,28 +1,25 @@
|
||||
version: '{build}'
|
||||
|
||||
os:
|
||||
- Visual Studio 2015
|
||||
- Visual Studio 2017
|
||||
|
||||
environment:
|
||||
matrix:
|
||||
- additional_flags: ""
|
||||
- additional_flags: "/permissive- /std:c++latest /utf-8"
|
||||
- APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017
|
||||
platform: x64
|
||||
FLAGS: "/permissive- /std:c++17 /utf-8 /Zc:__cplusplus"
|
||||
GENERATOR: Visual Studio 15 2017
|
||||
|
||||
matrix:
|
||||
exclude:
|
||||
- additional_flags: "/permissive- /std:c++latest /utf-8"
|
||||
os: Visual Studio 2015
|
||||
|
||||
init: []
|
||||
init:
|
||||
- cmake --version
|
||||
- msbuild /version
|
||||
|
||||
install: []
|
||||
|
||||
before_build:
|
||||
- mkdir -p build
|
||||
- cd build
|
||||
- cmake .. -G "%GENERATOR%" -DCMAKE_CXX_FLAGS="%FLAGS%" -DCMAKE_IGNORE_PATH="C:/Program Files/Git/usr/bin"
|
||||
|
||||
build_script:
|
||||
- IF "%APPVEYOR_BUILD_WORKER_IMAGE%" == "Visual Studio 2015" ( SET GEN="Visual Studio 14 2015") ELSE (SET GEN="Visual Studio 15 2017")
|
||||
- mkdir -p build && cd build
|
||||
- cmake .. -G%GEN% -DCMAKE_CXX_FLAGS="%additional_flags%"
|
||||
- cmake --build . --config Release
|
||||
|
||||
test_script:
|
||||
- ctest -C Release -V
|
||||
- ctest -C Release -V -j
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
@PACKAGE_INIT@
|
||||
|
||||
include(CMakeFindDependencyMacro)
|
||||
find_dependency(nlohmann_json CONFIG REQUIRED)
|
||||
|
||||
include("${CMAKE_CURRENT_LIST_DIR}/@TARGETS_EXPORT_NAME@.cmake")
|
||||
check_required_components("@PROJECT_NAME@")
|
||||
125
cmake/Coveralls.cmake
Normal file
125
cmake/Coveralls.cmake
Normal file
@@ -0,0 +1,125 @@
|
||||
#
|
||||
# The MIT License (MIT)
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
#
|
||||
# Copyright (C) 2014 Joakim Söderberg <joakim.soderberg@gmail.com>
|
||||
#
|
||||
|
||||
set(_CMAKE_SCRIPT_PATH ${CMAKE_CURRENT_LIST_DIR}) # must be outside coveralls_setup() to get correct path
|
||||
|
||||
#
|
||||
# Param _COVERAGE_SRCS A list of source files that coverage should be collected for.
|
||||
# Param _COVERALLS_UPLOAD Upload the result to coveralls?
|
||||
#
|
||||
|
||||
function(coveralls_setup _COVERAGE_SRCS _COVERALLS_UPLOAD)
|
||||
|
||||
if (ARGC GREATER 2)
|
||||
set(_CMAKE_SCRIPT_PATH ${ARGN})
|
||||
message(STATUS "Coveralls: Using alternate CMake script dir: ${_CMAKE_SCRIPT_PATH}")
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${_CMAKE_SCRIPT_PATH}/CoverallsClear.cmake")
|
||||
message(FATAL_ERROR "Coveralls: Missing ${_CMAKE_SCRIPT_PATH}/CoverallsClear.cmake")
|
||||
endif()
|
||||
|
||||
if (NOT EXISTS "${_CMAKE_SCRIPT_PATH}/CoverallsGenerateGcov.cmake")
|
||||
message(FATAL_ERROR "Coveralls: Missing ${_CMAKE_SCRIPT_PATH}/CoverallsGenerateGcov.cmake")
|
||||
endif()
|
||||
|
||||
# When passing a CMake list to an external process, the list
|
||||
# will be converted from the format "1;2;3" to "1 2 3".
|
||||
# This means the script we're calling won't see it as a list
|
||||
# of sources, but rather just one long path. We remedy this
|
||||
# by replacing ";" with "*" and then reversing that in the script
|
||||
# that we're calling.
|
||||
# http://cmake.3232098.n2.nabble.com/Passing-a-CMake-list-quot-as-is-quot-to-a-custom-target-td6505681.html
|
||||
set(COVERAGE_SRCS_TMP ${_COVERAGE_SRCS})
|
||||
set(COVERAGE_SRCS "")
|
||||
foreach (COVERAGE_SRC ${COVERAGE_SRCS_TMP})
|
||||
set(COVERAGE_SRCS "${COVERAGE_SRCS}*${COVERAGE_SRC}")
|
||||
endforeach()
|
||||
|
||||
#message("Coverage sources: ${COVERAGE_SRCS}")
|
||||
set(COVERALLS_FILE ${PROJECT_BINARY_DIR}/coveralls.json)
|
||||
|
||||
add_custom_target(coveralls_generate
|
||||
|
||||
# Zero the coverage counters.
|
||||
COMMAND ${CMAKE_COMMAND} -DPROJECT_BINARY_DIR="${PROJECT_BINARY_DIR}" -P "${_CMAKE_SCRIPT_PATH}/CoverallsClear.cmake"
|
||||
|
||||
# Run regress tests.
|
||||
COMMAND ${CMAKE_CTEST_COMMAND} --output-on-failure
|
||||
|
||||
# Generate Gcov and translate it into coveralls JSON.
|
||||
# We do this by executing an external CMake script.
|
||||
# (We don't want this to run at CMake generation time, but after compilation and everything has run).
|
||||
COMMAND ${CMAKE_COMMAND}
|
||||
-DCOVERAGE_SRCS="${COVERAGE_SRCS}" # TODO: This is passed like: "a b c", not "a;b;c"
|
||||
-DCOVERALLS_OUTPUT_FILE="${COVERALLS_FILE}"
|
||||
-DCOV_PATH="${PROJECT_BINARY_DIR}"
|
||||
-DPROJECT_ROOT="${PROJECT_SOURCE_DIR}"
|
||||
-P "${_CMAKE_SCRIPT_PATH}/CoverallsGenerateGcov.cmake"
|
||||
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
COMMENT "Generating coveralls output..."
|
||||
)
|
||||
|
||||
if (_COVERALLS_UPLOAD)
|
||||
message("COVERALLS UPLOAD: ON")
|
||||
|
||||
find_program(CURL_EXECUTABLE curl)
|
||||
|
||||
if (NOT CURL_EXECUTABLE)
|
||||
message(FATAL_ERROR "Coveralls: curl not found! Aborting")
|
||||
endif()
|
||||
|
||||
add_custom_target(coveralls_upload
|
||||
# Upload the JSON to coveralls.
|
||||
COMMAND ${CURL_EXECUTABLE}
|
||||
-S -F json_file=@${COVERALLS_FILE}
|
||||
https://coveralls.io/api/v1/jobs
|
||||
|
||||
DEPENDS coveralls_generate
|
||||
|
||||
WORKING_DIRECTORY ${PROJECT_BINARY_DIR}
|
||||
COMMENT "Uploading coveralls output...")
|
||||
|
||||
add_custom_target(coveralls DEPENDS coveralls_upload)
|
||||
else()
|
||||
message("COVERALLS UPLOAD: OFF")
|
||||
add_custom_target(coveralls DEPENDS coveralls_generate)
|
||||
endif()
|
||||
|
||||
endfunction()
|
||||
|
||||
macro(coveralls_turn_on_coverage)
|
||||
if(NOT (CMAKE_COMPILER_IS_GNUCC OR CMAKE_COMPILER_IS_GNUCXX)
|
||||
AND (NOT "${CMAKE_CXX_COMPILER_ID}" STREQUAL "AppleClang"))
|
||||
message(FATAL_ERROR "Coveralls: Compiler ${CMAKE_C_COMPILER_ID} is not GNU gcc! Aborting... You can set this on the command line using CC=/usr/bin/gcc CXX=/usr/bin/g++ cmake <options> ..")
|
||||
endif()
|
||||
|
||||
if(NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
message(FATAL_ERROR "Coveralls: Code coverage results with an optimised (non-Debug) build may be misleading! Add -DCMAKE_BUILD_TYPE=Debug")
|
||||
endif()
|
||||
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
|
||||
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -g -O0 -fprofile-arcs -ftest-coverage")
|
||||
endmacro()
|
||||
31
cmake/CoverallsClear.cmake
Normal file
31
cmake/CoverallsClear.cmake
Normal file
@@ -0,0 +1,31 @@
|
||||
#
|
||||
# The MIT License (MIT)
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
#
|
||||
# Copyright (C) 2014 Joakim Söderberg <joakim.soderberg@gmail.com>
|
||||
#
|
||||
|
||||
# do not follow symlinks in file(GLOB_RECURSE ...)
|
||||
cmake_policy(SET CMP0009 NEW)
|
||||
|
||||
file(GLOB_RECURSE GCDA_FILES "${PROJECT_BINARY_DIR}/*.gcda")
|
||||
if(NOT GCDA_FILES STREQUAL "")
|
||||
file(REMOVE ${GCDA_FILES})
|
||||
endif()
|
||||
484
cmake/CoverallsGenerateGcov.cmake
Normal file
484
cmake/CoverallsGenerateGcov.cmake
Normal file
@@ -0,0 +1,484 @@
|
||||
#
|
||||
# The MIT License (MIT)
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
#
|
||||
# Copyright (C) 2014 Joakim Söderberg <joakim.soderberg@gmail.com>
|
||||
#
|
||||
# This is intended to be run by a custom target in a CMake project like this.
|
||||
# 0. Compile program with coverage support.
|
||||
# 1. Clear coverage data. (Recursively delete *.gcda in build dir)
|
||||
# 2. Run the unit tests.
|
||||
# 3. Run this script specifying which source files the coverage should be performed on.
|
||||
#
|
||||
# This script will then use gcov to generate .gcov files in the directory specified
|
||||
# via the COV_PATH var. This should probably be the same as your cmake build dir.
|
||||
#
|
||||
# It then parses the .gcov files to convert them into the Coveralls JSON format:
|
||||
# https://coveralls.io/docs/api
|
||||
#
|
||||
# Example for running as standalone CMake script from the command line:
|
||||
# (Note it is important the -P is at the end...)
|
||||
# $ cmake -DCOV_PATH=$(pwd)
|
||||
# -DCOVERAGE_SRCS="catcierge_rfid.c;catcierge_timer.c"
|
||||
# -P ../cmake/CoverallsGcovUpload.cmake
|
||||
#
|
||||
CMAKE_MINIMUM_REQUIRED(VERSION 2.8)
|
||||
|
||||
|
||||
#
|
||||
# Make sure we have the needed arguments.
|
||||
#
|
||||
if (NOT COVERALLS_OUTPUT_FILE)
|
||||
message(FATAL_ERROR "Coveralls: No coveralls output file specified. Please set COVERALLS_OUTPUT_FILE")
|
||||
endif()
|
||||
|
||||
if (NOT COV_PATH)
|
||||
message(FATAL_ERROR "Coveralls: Missing coverage directory path where gcov files will be generated. Please set COV_PATH")
|
||||
endif()
|
||||
|
||||
if (NOT COVERAGE_SRCS)
|
||||
message(FATAL_ERROR "Coveralls: Missing the list of source files that we should get the coverage data for COVERAGE_SRCS")
|
||||
endif()
|
||||
|
||||
if (NOT PROJECT_ROOT)
|
||||
message(FATAL_ERROR "Coveralls: Missing PROJECT_ROOT.")
|
||||
endif()
|
||||
|
||||
# Since it's not possible to pass a CMake list properly in the
|
||||
# "1;2;3" format to an external process, we have replaced the
|
||||
# ";" with "*", so reverse that here so we get it back into the
|
||||
# CMake list format.
|
||||
string(REGEX REPLACE "\\*" ";" COVERAGE_SRCS ${COVERAGE_SRCS})
|
||||
|
||||
if (NOT DEFINED ENV{GCOV})
|
||||
find_program(GCOV_EXECUTABLE gcov)
|
||||
else()
|
||||
find_program(GCOV_EXECUTABLE $ENV{GCOV})
|
||||
endif()
|
||||
|
||||
# convert all paths in COVERAGE_SRCS to absolute paths
|
||||
set(COVERAGE_SRCS_TMP "")
|
||||
foreach (COVERAGE_SRC ${COVERAGE_SRCS})
|
||||
if (NOT "${COVERAGE_SRC}" MATCHES "^/")
|
||||
set(COVERAGE_SRC ${PROJECT_ROOT}/${COVERAGE_SRC})
|
||||
endif()
|
||||
list(APPEND COVERAGE_SRCS_TMP ${COVERAGE_SRC})
|
||||
endforeach()
|
||||
set(COVERAGE_SRCS ${COVERAGE_SRCS_TMP})
|
||||
unset(COVERAGE_SRCS_TMP)
|
||||
|
||||
if (NOT GCOV_EXECUTABLE)
|
||||
message(FATAL_ERROR "gcov not found! Aborting...")
|
||||
endif()
|
||||
|
||||
find_package(Git)
|
||||
|
||||
set(JSON_REPO_TEMPLATE
|
||||
"{
|
||||
\"head\": {
|
||||
\"id\": \"\@GIT_COMMIT_HASH\@\",
|
||||
\"author_name\": \"\@GIT_AUTHOR_NAME\@\",
|
||||
\"author_email\": \"\@GIT_AUTHOR_EMAIL\@\",
|
||||
\"committer_name\": \"\@GIT_COMMITTER_NAME\@\",
|
||||
\"committer_email\": \"\@GIT_COMMITTER_EMAIL\@\",
|
||||
\"message\": \"\@GIT_COMMIT_MESSAGE\@\"
|
||||
},
|
||||
\"branch\": \"@GIT_BRANCH@\",
|
||||
\"remotes\": []
|
||||
}"
|
||||
)
|
||||
|
||||
# TODO: Fill in git remote data
|
||||
if (GIT_FOUND)
|
||||
# Branch.
|
||||
execute_process(
|
||||
COMMAND ${GIT_EXECUTABLE} rev-parse --abbrev-ref HEAD
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
OUTPUT_VARIABLE GIT_BRANCH
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
|
||||
macro (git_log_format FORMAT_CHARS VAR_NAME)
|
||||
execute_process(
|
||||
COMMAND ${GIT_EXECUTABLE} log -1 --pretty=format:%${FORMAT_CHARS}
|
||||
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
|
||||
OUTPUT_VARIABLE ${VAR_NAME}
|
||||
OUTPUT_STRIP_TRAILING_WHITESPACE
|
||||
)
|
||||
endmacro()
|
||||
|
||||
git_log_format(an GIT_AUTHOR_NAME)
|
||||
git_log_format(ae GIT_AUTHOR_EMAIL)
|
||||
git_log_format(cn GIT_COMMITTER_NAME)
|
||||
git_log_format(ce GIT_COMMITTER_EMAIL)
|
||||
git_log_format(B GIT_COMMIT_MESSAGE)
|
||||
git_log_format(H GIT_COMMIT_HASH)
|
||||
|
||||
if(GIT_COMMIT_MESSAGE)
|
||||
string(REPLACE "\n" "\\n" GIT_COMMIT_MESSAGE ${GIT_COMMIT_MESSAGE})
|
||||
endif()
|
||||
|
||||
message("Git exe: ${GIT_EXECUTABLE}")
|
||||
message("Git branch: ${GIT_BRANCH}")
|
||||
message("Git author: ${GIT_AUTHOR_NAME}")
|
||||
message("Git e-mail: ${GIT_AUTHOR_EMAIL}")
|
||||
message("Git commiter name: ${GIT_COMMITTER_NAME}")
|
||||
message("Git commiter e-mail: ${GIT_COMMITTER_EMAIL}")
|
||||
message("Git commit hash: ${GIT_COMMIT_HASH}")
|
||||
message("Git commit message: ${GIT_COMMIT_MESSAGE}")
|
||||
|
||||
string(CONFIGURE ${JSON_REPO_TEMPLATE} JSON_REPO_DATA)
|
||||
else()
|
||||
set(JSON_REPO_DATA "{}")
|
||||
endif()
|
||||
|
||||
############################# Macros #########################################
|
||||
|
||||
#
|
||||
# This macro converts from the full path format gcov outputs:
|
||||
#
|
||||
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
|
||||
#
|
||||
# to the original source file path the .gcov is for:
|
||||
#
|
||||
# /path/to/project/root/subdir/the_file.c
|
||||
#
|
||||
macro(get_source_path_from_gcov_filename _SRC_FILENAME _GCOV_FILENAME)
|
||||
|
||||
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
|
||||
# ->
|
||||
# #path#to#project#root#subdir#the_file.c.gcov
|
||||
get_filename_component(_GCOV_FILENAME_WEXT ${_GCOV_FILENAME} NAME)
|
||||
|
||||
# #path#to#project#root#subdir#the_file.c.gcov -> /path/to/project/root/subdir/the_file.c
|
||||
string(REGEX REPLACE "\\.gcov$" "" SRC_FILENAME_TMP ${_GCOV_FILENAME_WEXT})
|
||||
string(REGEX REPLACE "\\^" ".." SRC_FILENAME_TMP ${SRC_FILENAME_TMP})
|
||||
string(REGEX REPLACE "\#" "/" SRC_FILENAME_TMP ${SRC_FILENAME_TMP})
|
||||
get_filename_component(SRC_FILENAME_TMP_ABSOLUTE ${SRC_FILENAME_TMP} ABSOLUTE)
|
||||
set(${_SRC_FILENAME} "${SRC_FILENAME_TMP_ABSOLUTE}")
|
||||
endmacro()
|
||||
|
||||
##############################################################################
|
||||
|
||||
# Get the coverage data.
|
||||
file(GLOB_RECURSE GCDA_FILES "${COV_PATH}/*.gcda")
|
||||
message("GCDA files:")
|
||||
|
||||
# Get a list of all the object directories needed by gcov
|
||||
# (The directories the .gcda files and .o files are found in)
|
||||
# and run gcov on those.
|
||||
foreach(GCDA ${GCDA_FILES})
|
||||
message("Process: ${GCDA}")
|
||||
message("------------------------------------------------------------------------------")
|
||||
get_filename_component(GCDA_DIR ${GCDA} PATH)
|
||||
|
||||
#
|
||||
# The -p below refers to "Preserve path components",
|
||||
# This means that the generated gcov filename of a source file will
|
||||
# keep the original files entire filepath, but / is replaced with #.
|
||||
# Example:
|
||||
#
|
||||
# /path/to/project/root/build/CMakeFiles/the_file.dir/subdir/the_file.c.gcda
|
||||
# ------------------------------------------------------------------------------
|
||||
# File '/path/to/project/root/subdir/the_file.c'
|
||||
# Lines executed:68.34% of 199
|
||||
# /path/to/project/root/subdir/the_file.c:creating '#path#to#project#root#subdir#the_file.c.gcov'
|
||||
#
|
||||
# If -p is not specified then the file is named only "the_file.c.gcov"
|
||||
#
|
||||
execute_process(
|
||||
COMMAND ${GCOV_EXECUTABLE} -p -o ${GCDA_DIR} ${GCDA}
|
||||
WORKING_DIRECTORY ${COV_PATH}
|
||||
)
|
||||
endforeach()
|
||||
|
||||
# TODO: Make these be absolute path
|
||||
file(GLOB ALL_GCOV_FILES ${COV_PATH}/*.gcov)
|
||||
|
||||
# Get only the filenames to use for filtering.
|
||||
#set(COVERAGE_SRCS_NAMES "")
|
||||
#foreach (COVSRC ${COVERAGE_SRCS})
|
||||
# get_filename_component(COVSRC_NAME ${COVSRC} NAME)
|
||||
# message("${COVSRC} -> ${COVSRC_NAME}")
|
||||
# list(APPEND COVERAGE_SRCS_NAMES "${COVSRC_NAME}")
|
||||
#endforeach()
|
||||
|
||||
#
|
||||
# Filter out all but the gcov files we want.
|
||||
#
|
||||
# We do this by comparing the list of COVERAGE_SRCS filepaths that the
|
||||
# user wants the coverage data for with the paths of the generated .gcov files,
|
||||
# so that we only keep the relevant gcov files.
|
||||
#
|
||||
# Example:
|
||||
# COVERAGE_SRCS =
|
||||
# /path/to/project/root/subdir/the_file.c
|
||||
#
|
||||
# ALL_GCOV_FILES =
|
||||
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
|
||||
# /path/to/project/root/build/#path#to#project#root#subdir#other_file.c.gcov
|
||||
#
|
||||
# Result should be:
|
||||
# GCOV_FILES =
|
||||
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
|
||||
#
|
||||
set(GCOV_FILES "")
|
||||
#message("Look in coverage sources: ${COVERAGE_SRCS}")
|
||||
message("\nFilter out unwanted GCOV files:")
|
||||
message("===============================")
|
||||
|
||||
set(COVERAGE_SRCS_REMAINING ${COVERAGE_SRCS})
|
||||
|
||||
foreach (GCOV_FILE ${ALL_GCOV_FILES})
|
||||
|
||||
#
|
||||
# /path/to/project/root/build/#path#to#project#root#subdir#the_file.c.gcov
|
||||
# ->
|
||||
# /path/to/project/root/subdir/the_file.c
|
||||
get_source_path_from_gcov_filename(GCOV_SRC_PATH ${GCOV_FILE})
|
||||
file(RELATIVE_PATH GCOV_SRC_REL_PATH "${PROJECT_ROOT}" "${GCOV_SRC_PATH}")
|
||||
|
||||
# Is this in the list of source files?
|
||||
# TODO: We want to match against relative path filenames from the source file root...
|
||||
list(FIND COVERAGE_SRCS ${GCOV_SRC_PATH} WAS_FOUND)
|
||||
|
||||
if (NOT WAS_FOUND EQUAL -1)
|
||||
message("YES: ${GCOV_FILE}")
|
||||
list(APPEND GCOV_FILES ${GCOV_FILE})
|
||||
|
||||
# We remove it from the list, so we don't bother searching for it again.
|
||||
# Also files left in COVERAGE_SRCS_REMAINING after this loop ends should
|
||||
# have coverage data generated from them (no lines are covered).
|
||||
list(REMOVE_ITEM COVERAGE_SRCS_REMAINING ${GCOV_SRC_PATH})
|
||||
else()
|
||||
message("NO: ${GCOV_FILE}")
|
||||
endif()
|
||||
endforeach()
|
||||
|
||||
# TODO: Enable setting these
|
||||
set(JSON_SERVICE_NAME "travis-ci")
|
||||
set(JSON_SERVICE_JOB_ID $ENV{TRAVIS_JOB_ID})
|
||||
set(JSON_REPO_TOKEN $ENV{COVERALLS_REPO_TOKEN})
|
||||
|
||||
set(JSON_TEMPLATE
|
||||
"{
|
||||
\"repo_token\": \"\@JSON_REPO_TOKEN\@\",
|
||||
\"service_name\": \"\@JSON_SERVICE_NAME\@\",
|
||||
\"service_job_id\": \"\@JSON_SERVICE_JOB_ID\@\",
|
||||
\"source_files\": \@JSON_GCOV_FILES\@,
|
||||
\"git\": \@JSON_REPO_DATA\@
|
||||
}"
|
||||
)
|
||||
|
||||
set(SRC_FILE_TEMPLATE
|
||||
"{
|
||||
\"name\": \"\@GCOV_SRC_REL_PATH\@\",
|
||||
\"source_digest\": \"\@GCOV_CONTENTS_MD5\@\",
|
||||
\"coverage\": \@GCOV_FILE_COVERAGE\@
|
||||
}"
|
||||
)
|
||||
|
||||
message("\nGenerate JSON for files:")
|
||||
message("=========================")
|
||||
|
||||
set(JSON_GCOV_FILES "[")
|
||||
|
||||
# Read the GCOV files line by line and get the coverage data.
|
||||
foreach (GCOV_FILE ${GCOV_FILES})
|
||||
|
||||
get_source_path_from_gcov_filename(GCOV_SRC_PATH ${GCOV_FILE})
|
||||
file(RELATIVE_PATH GCOV_SRC_REL_PATH "${PROJECT_ROOT}" "${GCOV_SRC_PATH}")
|
||||
|
||||
# The new coveralls API doesn't need the entire source (Yay!)
|
||||
# However, still keeping that part for now. Will cleanup in the future.
|
||||
file(MD5 "${GCOV_SRC_PATH}" GCOV_CONTENTS_MD5)
|
||||
message("MD5: ${GCOV_SRC_PATH} = ${GCOV_CONTENTS_MD5}")
|
||||
|
||||
# Loads the gcov file as a list of lines.
|
||||
# (We first open the file and replace all occurences of [] with _
|
||||
# because CMake will fail to parse a line containing unmatched brackets...
|
||||
# also the \ to escaped \n in macros screws up things.)
|
||||
# https://public.kitware.com/Bug/view.php?id=15369
|
||||
file(READ ${GCOV_FILE} GCOV_CONTENTS)
|
||||
string(REPLACE "[" "_" GCOV_CONTENTS "${GCOV_CONTENTS}")
|
||||
string(REPLACE "]" "_" GCOV_CONTENTS "${GCOV_CONTENTS}")
|
||||
string(REPLACE "\\" "_" GCOV_CONTENTS "${GCOV_CONTENTS}")
|
||||
|
||||
# Remove file contents to avoid encoding issues (cmake 2.8 has no ENCODING option)
|
||||
string(REGEX REPLACE "([^:]*):([^:]*):([^\n]*)\n" "\\1:\\2: \n" GCOV_CONTENTS "${GCOV_CONTENTS}")
|
||||
file(WRITE ${GCOV_FILE}_tmp "${GCOV_CONTENTS}")
|
||||
|
||||
file(STRINGS ${GCOV_FILE}_tmp GCOV_LINES)
|
||||
list(LENGTH GCOV_LINES LINE_COUNT)
|
||||
|
||||
# Instead of trying to parse the source from the
|
||||
# gcov file, simply read the file contents from the source file.
|
||||
# (Parsing it from the gcov is hard because C-code uses ; in many places
|
||||
# which also happens to be the same as the CMake list delimeter).
|
||||
file(READ ${GCOV_SRC_PATH} GCOV_FILE_SOURCE)
|
||||
|
||||
string(REPLACE "\\" "\\\\" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
|
||||
string(REGEX REPLACE "\"" "\\\\\"" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
|
||||
string(REPLACE "\t" "\\\\t" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
|
||||
string(REPLACE "\r" "\\\\r" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
|
||||
string(REPLACE "\n" "\\\\n" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
|
||||
# According to http://json.org/ these should be escaped as well.
|
||||
# Don't know how to do that in CMake however...
|
||||
#string(REPLACE "\b" "\\\\b" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
|
||||
#string(REPLACE "\f" "\\\\f" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
|
||||
#string(REGEX REPLACE "\u([a-fA-F0-9]{4})" "\\\\u\\1" GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}")
|
||||
|
||||
# We want a json array of coverage data as a single string
|
||||
# start building them from the contents of the .gcov
|
||||
set(GCOV_FILE_COVERAGE "[")
|
||||
|
||||
set(GCOV_LINE_COUNT 1) # Line number for the .gcov.
|
||||
set(DO_SKIP 0)
|
||||
foreach (GCOV_LINE ${GCOV_LINES})
|
||||
#message("${GCOV_LINE}")
|
||||
# Example of what we're parsing:
|
||||
# Hitcount |Line | Source
|
||||
# " 8: 26: if (!allowed || (strlen(allowed) == 0))"
|
||||
string(REGEX REPLACE
|
||||
"^([^:]*):([^:]*):(.*)$"
|
||||
"\\1;\\2;\\3"
|
||||
RES
|
||||
"${GCOV_LINE}")
|
||||
|
||||
# Check if we should exclude lines using the Lcov syntax.
|
||||
string(REGEX MATCH "LCOV_EXCL_START" START_SKIP "${GCOV_LINE}")
|
||||
string(REGEX MATCH "LCOV_EXCL_END" END_SKIP "${GCOV_LINE}")
|
||||
string(REGEX MATCH "LCOV_EXCL_LINE" LINE_SKIP "${GCOV_LINE}")
|
||||
|
||||
set(RESET_SKIP 0)
|
||||
if (LINE_SKIP AND NOT DO_SKIP)
|
||||
set(DO_SKIP 1)
|
||||
set(RESET_SKIP 1)
|
||||
endif()
|
||||
|
||||
if (START_SKIP)
|
||||
set(DO_SKIP 1)
|
||||
message("${GCOV_LINE_COUNT}: Start skip")
|
||||
endif()
|
||||
|
||||
if (END_SKIP)
|
||||
set(DO_SKIP 0)
|
||||
endif()
|
||||
|
||||
list(LENGTH RES RES_COUNT)
|
||||
|
||||
if (RES_COUNT GREATER 2)
|
||||
list(GET RES 0 HITCOUNT)
|
||||
list(GET RES 1 LINE)
|
||||
list(GET RES 2 SOURCE)
|
||||
|
||||
string(STRIP ${HITCOUNT} HITCOUNT)
|
||||
string(STRIP ${LINE} LINE)
|
||||
|
||||
# Lines with 0 line numbers are metadata and can be ignored.
|
||||
if (NOT ${LINE} EQUAL 0)
|
||||
|
||||
if (DO_SKIP)
|
||||
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}null, ")
|
||||
else()
|
||||
# Translate the hitcount into valid JSON values.
|
||||
if (${HITCOUNT} STREQUAL "#####" OR ${HITCOUNT} STREQUAL "=====")
|
||||
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}0, ")
|
||||
elseif (${HITCOUNT} STREQUAL "-")
|
||||
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}null, ")
|
||||
else()
|
||||
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}${HITCOUNT}, ")
|
||||
endif()
|
||||
endif()
|
||||
endif()
|
||||
else()
|
||||
message(WARNING "Failed to properly parse line (RES_COUNT = ${RES_COUNT}) ${GCOV_FILE}:${GCOV_LINE_COUNT}\n-->${GCOV_LINE}")
|
||||
endif()
|
||||
|
||||
if (RESET_SKIP)
|
||||
set(DO_SKIP 0)
|
||||
endif()
|
||||
math(EXPR GCOV_LINE_COUNT "${GCOV_LINE_COUNT}+1")
|
||||
endforeach()
|
||||
|
||||
message("${GCOV_LINE_COUNT} of ${LINE_COUNT} lines read!")
|
||||
|
||||
# Advanced way of removing the trailing comma in the JSON array.
|
||||
# "[1, 2, 3, " -> "[1, 2, 3"
|
||||
string(REGEX REPLACE ",[ ]*$" "" GCOV_FILE_COVERAGE ${GCOV_FILE_COVERAGE})
|
||||
|
||||
# Append the trailing ] to complete the JSON array.
|
||||
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}]")
|
||||
|
||||
# Generate the final JSON for this file.
|
||||
message("Generate JSON for file: ${GCOV_SRC_REL_PATH}...")
|
||||
string(CONFIGURE ${SRC_FILE_TEMPLATE} FILE_JSON)
|
||||
|
||||
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}${FILE_JSON}, ")
|
||||
endforeach()
|
||||
|
||||
# Loop through all files we couldn't find any coverage for
|
||||
# as well, and generate JSON for those as well with 0% coverage.
|
||||
foreach(NOT_COVERED_SRC ${COVERAGE_SRCS_REMAINING})
|
||||
|
||||
# Set variables for json replacement
|
||||
set(GCOV_SRC_PATH ${NOT_COVERED_SRC})
|
||||
file(MD5 "${GCOV_SRC_PATH}" GCOV_CONTENTS_MD5)
|
||||
file(RELATIVE_PATH GCOV_SRC_REL_PATH "${PROJECT_ROOT}" "${GCOV_SRC_PATH}")
|
||||
|
||||
# Loads the source file as a list of lines.
|
||||
file(STRINGS ${NOT_COVERED_SRC} SRC_LINES)
|
||||
|
||||
set(GCOV_FILE_COVERAGE "[")
|
||||
set(GCOV_FILE_SOURCE "")
|
||||
|
||||
foreach (SOURCE ${SRC_LINES})
|
||||
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}null, ")
|
||||
|
||||
string(REPLACE "\\" "\\\\" SOURCE "${SOURCE}")
|
||||
string(REGEX REPLACE "\"" "\\\\\"" SOURCE "${SOURCE}")
|
||||
string(REPLACE "\t" "\\\\t" SOURCE "${SOURCE}")
|
||||
string(REPLACE "\r" "\\\\r" SOURCE "${SOURCE}")
|
||||
set(GCOV_FILE_SOURCE "${GCOV_FILE_SOURCE}${SOURCE}\\n")
|
||||
endforeach()
|
||||
|
||||
# Remove trailing comma, and complete JSON array with ]
|
||||
string(REGEX REPLACE ",[ ]*$" "" GCOV_FILE_COVERAGE ${GCOV_FILE_COVERAGE})
|
||||
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}]")
|
||||
|
||||
# Generate the final JSON for this file.
|
||||
message("Generate JSON for non-gcov file: ${NOT_COVERED_SRC}...")
|
||||
string(CONFIGURE ${SRC_FILE_TEMPLATE} FILE_JSON)
|
||||
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}${FILE_JSON}, ")
|
||||
endforeach()
|
||||
|
||||
# Get rid of trailing comma.
|
||||
string(REGEX REPLACE ",[ ]*$" "" JSON_GCOV_FILES ${JSON_GCOV_FILES})
|
||||
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}]")
|
||||
|
||||
# Generate the final complete JSON!
|
||||
message("Generate final JSON...")
|
||||
string(CONFIGURE ${JSON_TEMPLATE} JSON)
|
||||
|
||||
file(WRITE "${COVERALLS_OUTPUT_FILE}" "${JSON}")
|
||||
message("###########################################################################")
|
||||
message("Generated coveralls JSON containing coverage data:")
|
||||
message("${COVERALLS_OUTPUT_FILE}")
|
||||
message("###########################################################################")
|
||||
@@ -1,540 +0,0 @@
|
||||
# Copyright (c) 2013-2017, Ruslan Baratov
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright notice, this
|
||||
# list of conditions and the following disclaimer.
|
||||
#
|
||||
# * Redistributions in binary form must reproduce the above copyright notice,
|
||||
# this list of conditions and the following disclaimer in the documentation
|
||||
# and/or other materials provided with the distribution.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# This is a gate file to Hunter package manager.
|
||||
# Include this file using `include` command and add package you need, example:
|
||||
#
|
||||
# cmake_minimum_required(VERSION 3.0)
|
||||
#
|
||||
# include("cmake/HunterGate.cmake")
|
||||
# HunterGate(
|
||||
# URL "https://github.com/path/to/hunter/archive.tar.gz"
|
||||
# SHA1 "798501e983f14b28b10cda16afa4de69eee1da1d"
|
||||
# )
|
||||
#
|
||||
# project(MyProject)
|
||||
#
|
||||
# hunter_add_package(Foo)
|
||||
# hunter_add_package(Boo COMPONENTS Bar Baz)
|
||||
#
|
||||
# Projects:
|
||||
# * https://github.com/hunter-packages/gate/
|
||||
# * https://github.com/ruslo/hunter
|
||||
|
||||
option(HUNTER_ENABLED "Enable Hunter package manager support" ON)
|
||||
if(HUNTER_ENABLED)
|
||||
if(CMAKE_VERSION VERSION_LESS "3.0")
|
||||
message(FATAL_ERROR "At least CMake version 3.0 required for hunter dependency management."
|
||||
" Update CMake or set HUNTER_ENABLED to OFF.")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
include(CMakeParseArguments) # cmake_parse_arguments
|
||||
|
||||
option(HUNTER_STATUS_PRINT "Print working status" ON)
|
||||
option(HUNTER_STATUS_DEBUG "Print a lot info" OFF)
|
||||
|
||||
set(HUNTER_WIKI "https://github.com/ruslo/hunter/wiki")
|
||||
|
||||
function(hunter_gate_status_print)
|
||||
foreach(print_message ${ARGV})
|
||||
if(HUNTER_STATUS_PRINT OR HUNTER_STATUS_DEBUG)
|
||||
message(STATUS "[hunter] ${print_message}")
|
||||
endif()
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
||||
function(hunter_gate_status_debug)
|
||||
foreach(print_message ${ARGV})
|
||||
if(HUNTER_STATUS_DEBUG)
|
||||
string(TIMESTAMP timestamp)
|
||||
message(STATUS "[hunter *** DEBUG *** ${timestamp}] ${print_message}")
|
||||
endif()
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
||||
function(hunter_gate_wiki wiki_page)
|
||||
message("------------------------------ WIKI -------------------------------")
|
||||
message(" ${HUNTER_WIKI}/${wiki_page}")
|
||||
message("-------------------------------------------------------------------")
|
||||
message("")
|
||||
message(FATAL_ERROR "")
|
||||
endfunction()
|
||||
|
||||
function(hunter_gate_internal_error)
|
||||
message("")
|
||||
foreach(print_message ${ARGV})
|
||||
message("[hunter ** INTERNAL **] ${print_message}")
|
||||
endforeach()
|
||||
message("[hunter ** INTERNAL **] [Directory:${CMAKE_CURRENT_LIST_DIR}]")
|
||||
message("")
|
||||
hunter_gate_wiki("error.internal")
|
||||
endfunction()
|
||||
|
||||
function(hunter_gate_fatal_error)
|
||||
cmake_parse_arguments(hunter "" "WIKI" "" "${ARGV}")
|
||||
string(COMPARE EQUAL "${hunter_WIKI}" "" have_no_wiki)
|
||||
if(have_no_wiki)
|
||||
hunter_gate_internal_error("Expected wiki")
|
||||
endif()
|
||||
message("")
|
||||
foreach(x ${hunter_UNPARSED_ARGUMENTS})
|
||||
message("[hunter ** FATAL ERROR **] ${x}")
|
||||
endforeach()
|
||||
message("[hunter ** FATAL ERROR **] [Directory:${CMAKE_CURRENT_LIST_DIR}]")
|
||||
message("")
|
||||
hunter_gate_wiki("${hunter_WIKI}")
|
||||
endfunction()
|
||||
|
||||
function(hunter_gate_user_error)
|
||||
hunter_gate_fatal_error(${ARGV} WIKI "error.incorrect.input.data")
|
||||
endfunction()
|
||||
|
||||
function(hunter_gate_self root version sha1 result)
|
||||
string(COMPARE EQUAL "${root}" "" is_bad)
|
||||
if(is_bad)
|
||||
hunter_gate_internal_error("root is empty")
|
||||
endif()
|
||||
|
||||
string(COMPARE EQUAL "${version}" "" is_bad)
|
||||
if(is_bad)
|
||||
hunter_gate_internal_error("version is empty")
|
||||
endif()
|
||||
|
||||
string(COMPARE EQUAL "${sha1}" "" is_bad)
|
||||
if(is_bad)
|
||||
hunter_gate_internal_error("sha1 is empty")
|
||||
endif()
|
||||
|
||||
string(SUBSTRING "${sha1}" 0 7 archive_id)
|
||||
|
||||
if(EXISTS "${root}/cmake/Hunter")
|
||||
set(hunter_self "${root}")
|
||||
else()
|
||||
set(
|
||||
hunter_self
|
||||
"${root}/_Base/Download/Hunter/${version}/${archive_id}/Unpacked"
|
||||
)
|
||||
endif()
|
||||
|
||||
set("${result}" "${hunter_self}" PARENT_SCOPE)
|
||||
endfunction()
|
||||
|
||||
# Set HUNTER_GATE_ROOT cmake variable to suitable value.
|
||||
function(hunter_gate_detect_root)
|
||||
# Check CMake variable
|
||||
string(COMPARE NOTEQUAL "${HUNTER_ROOT}" "" not_empty)
|
||||
if(not_empty)
|
||||
set(HUNTER_GATE_ROOT "${HUNTER_ROOT}" PARENT_SCOPE)
|
||||
hunter_gate_status_debug("HUNTER_ROOT detected by cmake variable")
|
||||
return()
|
||||
endif()
|
||||
|
||||
# Check environment variable
|
||||
string(COMPARE NOTEQUAL "$ENV{HUNTER_ROOT}" "" not_empty)
|
||||
if(not_empty)
|
||||
set(HUNTER_GATE_ROOT "$ENV{HUNTER_ROOT}" PARENT_SCOPE)
|
||||
hunter_gate_status_debug("HUNTER_ROOT detected by environment variable")
|
||||
return()
|
||||
endif()
|
||||
|
||||
# Check HOME environment variable
|
||||
string(COMPARE NOTEQUAL "$ENV{HOME}" "" result)
|
||||
if(result)
|
||||
set(HUNTER_GATE_ROOT "$ENV{HOME}/.hunter" PARENT_SCOPE)
|
||||
hunter_gate_status_debug("HUNTER_ROOT set using HOME environment variable")
|
||||
return()
|
||||
endif()
|
||||
|
||||
# Check SYSTEMDRIVE and USERPROFILE environment variable (windows only)
|
||||
if(WIN32)
|
||||
string(COMPARE NOTEQUAL "$ENV{SYSTEMDRIVE}" "" result)
|
||||
if(result)
|
||||
set(HUNTER_GATE_ROOT "$ENV{SYSTEMDRIVE}/.hunter" PARENT_SCOPE)
|
||||
hunter_gate_status_debug(
|
||||
"HUNTER_ROOT set using SYSTEMDRIVE environment variable"
|
||||
)
|
||||
return()
|
||||
endif()
|
||||
|
||||
string(COMPARE NOTEQUAL "$ENV{USERPROFILE}" "" result)
|
||||
if(result)
|
||||
set(HUNTER_GATE_ROOT "$ENV{USERPROFILE}/.hunter" PARENT_SCOPE)
|
||||
hunter_gate_status_debug(
|
||||
"HUNTER_ROOT set using USERPROFILE environment variable"
|
||||
)
|
||||
return()
|
||||
endif()
|
||||
endif()
|
||||
|
||||
hunter_gate_fatal_error(
|
||||
"Can't detect HUNTER_ROOT"
|
||||
WIKI "error.detect.hunter.root"
|
||||
)
|
||||
endfunction()
|
||||
|
||||
macro(hunter_gate_lock dir)
|
||||
if(NOT HUNTER_SKIP_LOCK)
|
||||
if("${CMAKE_VERSION}" VERSION_LESS "3.2")
|
||||
hunter_gate_fatal_error(
|
||||
"Can't lock, upgrade to CMake 3.2 or use HUNTER_SKIP_LOCK"
|
||||
WIKI "error.can.not.lock"
|
||||
)
|
||||
endif()
|
||||
hunter_gate_status_debug("Locking directory: ${dir}")
|
||||
file(LOCK "${dir}" DIRECTORY GUARD FUNCTION)
|
||||
hunter_gate_status_debug("Lock done")
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
function(hunter_gate_download dir)
|
||||
string(
|
||||
COMPARE
|
||||
NOTEQUAL
|
||||
"$ENV{HUNTER_DISABLE_AUTOINSTALL}"
|
||||
""
|
||||
disable_autoinstall
|
||||
)
|
||||
if(disable_autoinstall AND NOT HUNTER_RUN_INSTALL)
|
||||
hunter_gate_fatal_error(
|
||||
"Hunter not found in '${dir}'"
|
||||
"Set HUNTER_RUN_INSTALL=ON to auto-install it from '${HUNTER_GATE_URL}'"
|
||||
"Settings:"
|
||||
" HUNTER_ROOT: ${HUNTER_GATE_ROOT}"
|
||||
" HUNTER_SHA1: ${HUNTER_GATE_SHA1}"
|
||||
WIKI "error.run.install"
|
||||
)
|
||||
endif()
|
||||
string(COMPARE EQUAL "${dir}" "" is_bad)
|
||||
if(is_bad)
|
||||
hunter_gate_internal_error("Empty 'dir' argument")
|
||||
endif()
|
||||
|
||||
string(COMPARE EQUAL "${HUNTER_GATE_SHA1}" "" is_bad)
|
||||
if(is_bad)
|
||||
hunter_gate_internal_error("HUNTER_GATE_SHA1 empty")
|
||||
endif()
|
||||
|
||||
string(COMPARE EQUAL "${HUNTER_GATE_URL}" "" is_bad)
|
||||
if(is_bad)
|
||||
hunter_gate_internal_error("HUNTER_GATE_URL empty")
|
||||
endif()
|
||||
|
||||
set(done_location "${dir}/DONE")
|
||||
set(sha1_location "${dir}/SHA1")
|
||||
|
||||
set(build_dir "${dir}/Build")
|
||||
set(cmakelists "${dir}/CMakeLists.txt")
|
||||
|
||||
hunter_gate_lock("${dir}")
|
||||
if(EXISTS "${done_location}")
|
||||
# while waiting for lock other instance can do all the job
|
||||
hunter_gate_status_debug("File '${done_location}' found, skip install")
|
||||
return()
|
||||
endif()
|
||||
|
||||
file(REMOVE_RECURSE "${build_dir}")
|
||||
file(REMOVE_RECURSE "${cmakelists}")
|
||||
|
||||
file(MAKE_DIRECTORY "${build_dir}") # check directory permissions
|
||||
|
||||
# Disabling languages speeds up a little bit, reduces noise in the output
|
||||
# and avoids path too long windows error
|
||||
file(
|
||||
WRITE
|
||||
"${cmakelists}"
|
||||
"cmake_minimum_required(VERSION 3.0)\n"
|
||||
"project(HunterDownload LANGUAGES NONE)\n"
|
||||
"include(ExternalProject)\n"
|
||||
"ExternalProject_Add(\n"
|
||||
" Hunter\n"
|
||||
" URL\n"
|
||||
" \"${HUNTER_GATE_URL}\"\n"
|
||||
" URL_HASH\n"
|
||||
" SHA1=${HUNTER_GATE_SHA1}\n"
|
||||
" DOWNLOAD_DIR\n"
|
||||
" \"${dir}\"\n"
|
||||
" SOURCE_DIR\n"
|
||||
" \"${dir}/Unpacked\"\n"
|
||||
" CONFIGURE_COMMAND\n"
|
||||
" \"\"\n"
|
||||
" BUILD_COMMAND\n"
|
||||
" \"\"\n"
|
||||
" INSTALL_COMMAND\n"
|
||||
" \"\"\n"
|
||||
")\n"
|
||||
)
|
||||
|
||||
if(HUNTER_STATUS_DEBUG)
|
||||
set(logging_params "")
|
||||
else()
|
||||
set(logging_params OUTPUT_QUIET)
|
||||
endif()
|
||||
|
||||
hunter_gate_status_debug("Run generate")
|
||||
|
||||
# Need to add toolchain file too.
|
||||
# Otherwise on Visual Studio + MDD this will fail with error:
|
||||
# "Could not find an appropriate version of the Windows 10 SDK installed on this machine"
|
||||
if(EXISTS "${CMAKE_TOOLCHAIN_FILE}")
|
||||
get_filename_component(absolute_CMAKE_TOOLCHAIN_FILE "${CMAKE_TOOLCHAIN_FILE}" ABSOLUTE)
|
||||
set(toolchain_arg "-DCMAKE_TOOLCHAIN_FILE=${absolute_CMAKE_TOOLCHAIN_FILE}")
|
||||
else()
|
||||
# 'toolchain_arg' can't be empty
|
||||
set(toolchain_arg "-DCMAKE_TOOLCHAIN_FILE=")
|
||||
endif()
|
||||
|
||||
string(COMPARE EQUAL "${CMAKE_MAKE_PROGRAM}" "" no_make)
|
||||
if(no_make)
|
||||
set(make_arg "")
|
||||
else()
|
||||
# Test case: remove Ninja from PATH but set it via CMAKE_MAKE_PROGRAM
|
||||
set(make_arg "-DCMAKE_MAKE_PROGRAM=${CMAKE_MAKE_PROGRAM}")
|
||||
endif()
|
||||
|
||||
execute_process(
|
||||
COMMAND
|
||||
"${CMAKE_COMMAND}"
|
||||
"-H${dir}"
|
||||
"-B${build_dir}"
|
||||
"-G${CMAKE_GENERATOR}"
|
||||
"${toolchain_arg}"
|
||||
${make_arg}
|
||||
WORKING_DIRECTORY "${dir}"
|
||||
RESULT_VARIABLE download_result
|
||||
${logging_params}
|
||||
)
|
||||
|
||||
if(NOT download_result EQUAL 0)
|
||||
hunter_gate_internal_error("Configure project failed")
|
||||
endif()
|
||||
|
||||
hunter_gate_status_print(
|
||||
"Initializing Hunter workspace (${HUNTER_GATE_SHA1})"
|
||||
" ${HUNTER_GATE_URL}"
|
||||
" -> ${dir}"
|
||||
)
|
||||
execute_process(
|
||||
COMMAND "${CMAKE_COMMAND}" --build "${build_dir}"
|
||||
WORKING_DIRECTORY "${dir}"
|
||||
RESULT_VARIABLE download_result
|
||||
${logging_params}
|
||||
)
|
||||
|
||||
if(NOT download_result EQUAL 0)
|
||||
hunter_gate_internal_error("Build project failed")
|
||||
endif()
|
||||
|
||||
file(REMOVE_RECURSE "${build_dir}")
|
||||
file(REMOVE_RECURSE "${cmakelists}")
|
||||
|
||||
file(WRITE "${sha1_location}" "${HUNTER_GATE_SHA1}")
|
||||
file(WRITE "${done_location}" "DONE")
|
||||
|
||||
hunter_gate_status_debug("Finished")
|
||||
endfunction()
|
||||
|
||||
# Must be a macro so master file 'cmake/Hunter' can
|
||||
# apply all variables easily just by 'include' command
|
||||
# (otherwise PARENT_SCOPE magic needed)
|
||||
macro(HunterGate)
|
||||
if(HUNTER_GATE_DONE)
|
||||
# variable HUNTER_GATE_DONE set explicitly for external project
|
||||
# (see `hunter_download`)
|
||||
set_property(GLOBAL PROPERTY HUNTER_GATE_DONE YES)
|
||||
endif()
|
||||
|
||||
# First HunterGate command will init Hunter, others will be ignored
|
||||
get_property(_hunter_gate_done GLOBAL PROPERTY HUNTER_GATE_DONE SET)
|
||||
|
||||
if(NOT HUNTER_ENABLED)
|
||||
# Empty function to avoid error "unknown function"
|
||||
function(hunter_add_package)
|
||||
endfunction()
|
||||
|
||||
set(
|
||||
_hunter_gate_disabled_mode_dir
|
||||
"${CMAKE_CURRENT_LIST_DIR}/cmake/Hunter/disabled-mode"
|
||||
)
|
||||
if(EXISTS "${_hunter_gate_disabled_mode_dir}")
|
||||
hunter_gate_status_debug(
|
||||
"Adding \"disabled-mode\" modules: ${_hunter_gate_disabled_mode_dir}"
|
||||
)
|
||||
list(APPEND CMAKE_PREFIX_PATH "${_hunter_gate_disabled_mode_dir}")
|
||||
endif()
|
||||
elseif(_hunter_gate_done)
|
||||
hunter_gate_status_debug("Secondary HunterGate (use old settings)")
|
||||
hunter_gate_self(
|
||||
"${HUNTER_CACHED_ROOT}"
|
||||
"${HUNTER_VERSION}"
|
||||
"${HUNTER_SHA1}"
|
||||
_hunter_self
|
||||
)
|
||||
include("${_hunter_self}/cmake/Hunter")
|
||||
else()
|
||||
set(HUNTER_GATE_LOCATION "${CMAKE_CURRENT_LIST_DIR}")
|
||||
|
||||
string(COMPARE NOTEQUAL "${PROJECT_NAME}" "" _have_project_name)
|
||||
if(_have_project_name)
|
||||
hunter_gate_fatal_error(
|
||||
"Please set HunterGate *before* 'project' command. "
|
||||
"Detected project: ${PROJECT_NAME}"
|
||||
WIKI "error.huntergate.before.project"
|
||||
)
|
||||
endif()
|
||||
|
||||
cmake_parse_arguments(
|
||||
HUNTER_GATE "LOCAL" "URL;SHA1;GLOBAL;FILEPATH" "" ${ARGV}
|
||||
)
|
||||
|
||||
string(COMPARE EQUAL "${HUNTER_GATE_SHA1}" "" _empty_sha1)
|
||||
string(COMPARE EQUAL "${HUNTER_GATE_URL}" "" _empty_url)
|
||||
string(
|
||||
COMPARE
|
||||
NOTEQUAL
|
||||
"${HUNTER_GATE_UNPARSED_ARGUMENTS}"
|
||||
""
|
||||
_have_unparsed
|
||||
)
|
||||
string(COMPARE NOTEQUAL "${HUNTER_GATE_GLOBAL}" "" _have_global)
|
||||
string(COMPARE NOTEQUAL "${HUNTER_GATE_FILEPATH}" "" _have_filepath)
|
||||
|
||||
if(_have_unparsed)
|
||||
hunter_gate_user_error(
|
||||
"HunterGate unparsed arguments: ${HUNTER_GATE_UNPARSED_ARGUMENTS}"
|
||||
)
|
||||
endif()
|
||||
if(_empty_sha1)
|
||||
hunter_gate_user_error("SHA1 suboption of HunterGate is mandatory")
|
||||
endif()
|
||||
if(_empty_url)
|
||||
hunter_gate_user_error("URL suboption of HunterGate is mandatory")
|
||||
endif()
|
||||
if(_have_global)
|
||||
if(HUNTER_GATE_LOCAL)
|
||||
hunter_gate_user_error("Unexpected LOCAL (already has GLOBAL)")
|
||||
endif()
|
||||
if(_have_filepath)
|
||||
hunter_gate_user_error("Unexpected FILEPATH (already has GLOBAL)")
|
||||
endif()
|
||||
endif()
|
||||
if(HUNTER_GATE_LOCAL)
|
||||
if(_have_global)
|
||||
hunter_gate_user_error("Unexpected GLOBAL (already has LOCAL)")
|
||||
endif()
|
||||
if(_have_filepath)
|
||||
hunter_gate_user_error("Unexpected FILEPATH (already has LOCAL)")
|
||||
endif()
|
||||
endif()
|
||||
if(_have_filepath)
|
||||
if(_have_global)
|
||||
hunter_gate_user_error("Unexpected GLOBAL (already has FILEPATH)")
|
||||
endif()
|
||||
if(HUNTER_GATE_LOCAL)
|
||||
hunter_gate_user_error("Unexpected LOCAL (already has FILEPATH)")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
hunter_gate_detect_root() # set HUNTER_GATE_ROOT
|
||||
|
||||
# Beautify path, fix probable problems with windows path slashes
|
||||
get_filename_component(
|
||||
HUNTER_GATE_ROOT "${HUNTER_GATE_ROOT}" ABSOLUTE
|
||||
)
|
||||
hunter_gate_status_debug("HUNTER_ROOT: ${HUNTER_GATE_ROOT}")
|
||||
if(NOT HUNTER_ALLOW_SPACES_IN_PATH)
|
||||
string(FIND "${HUNTER_GATE_ROOT}" " " _contain_spaces)
|
||||
if(NOT _contain_spaces EQUAL -1)
|
||||
hunter_gate_fatal_error(
|
||||
"HUNTER_ROOT (${HUNTER_GATE_ROOT}) contains spaces."
|
||||
"Set HUNTER_ALLOW_SPACES_IN_PATH=ON to skip this error"
|
||||
"(Use at your own risk!)"
|
||||
WIKI "error.spaces.in.hunter.root"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
|
||||
string(
|
||||
REGEX
|
||||
MATCH
|
||||
"[0-9]+\\.[0-9]+\\.[0-9]+[-_a-z0-9]*"
|
||||
HUNTER_GATE_VERSION
|
||||
"${HUNTER_GATE_URL}"
|
||||
)
|
||||
string(COMPARE EQUAL "${HUNTER_GATE_VERSION}" "" _is_empty)
|
||||
if(_is_empty)
|
||||
set(HUNTER_GATE_VERSION "unknown")
|
||||
endif()
|
||||
|
||||
hunter_gate_self(
|
||||
"${HUNTER_GATE_ROOT}"
|
||||
"${HUNTER_GATE_VERSION}"
|
||||
"${HUNTER_GATE_SHA1}"
|
||||
_hunter_self
|
||||
)
|
||||
|
||||
set(_master_location "${_hunter_self}/cmake/Hunter")
|
||||
if(EXISTS "${HUNTER_GATE_ROOT}/cmake/Hunter")
|
||||
# Hunter downloaded manually (e.g. by 'git clone')
|
||||
set(_unused "xxxxxxxxxx")
|
||||
set(HUNTER_GATE_SHA1 "${_unused}")
|
||||
set(HUNTER_GATE_VERSION "${_unused}")
|
||||
else()
|
||||
get_filename_component(_archive_id_location "${_hunter_self}/.." ABSOLUTE)
|
||||
set(_done_location "${_archive_id_location}/DONE")
|
||||
set(_sha1_location "${_archive_id_location}/SHA1")
|
||||
|
||||
# Check Hunter already downloaded by HunterGate
|
||||
if(NOT EXISTS "${_done_location}")
|
||||
hunter_gate_download("${_archive_id_location}")
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${_done_location}")
|
||||
hunter_gate_internal_error("hunter_gate_download failed")
|
||||
endif()
|
||||
|
||||
if(NOT EXISTS "${_sha1_location}")
|
||||
hunter_gate_internal_error("${_sha1_location} not found")
|
||||
endif()
|
||||
file(READ "${_sha1_location}" _sha1_value)
|
||||
string(COMPARE EQUAL "${_sha1_value}" "${HUNTER_GATE_SHA1}" _is_equal)
|
||||
if(NOT _is_equal)
|
||||
hunter_gate_internal_error(
|
||||
"Short SHA1 collision:"
|
||||
" ${_sha1_value} (from ${_sha1_location})"
|
||||
" ${HUNTER_GATE_SHA1} (HunterGate)"
|
||||
)
|
||||
endif()
|
||||
if(NOT EXISTS "${_master_location}")
|
||||
hunter_gate_user_error(
|
||||
"Master file not found:"
|
||||
" ${_master_location}"
|
||||
"try to update Hunter/HunterGate"
|
||||
)
|
||||
endif()
|
||||
endif()
|
||||
include("${_master_location}")
|
||||
set_property(GLOBAL PROPERTY HUNTER_GATE_DONE YES)
|
||||
endif()
|
||||
endmacro()
|
||||
2483
doc/Doxyfile
Normal file
2483
doc/Doxyfile
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,130 +0,0 @@
|
||||
#ifndef PANTOR_INJA_ENVIRONMENT_HPP
|
||||
#define PANTOR_INJA_ENVIRONMENT_HPP
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
|
||||
#include <regex.hpp>
|
||||
#include <parser.hpp>
|
||||
#include <renderer.hpp>
|
||||
#include <template.hpp>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
||||
|
||||
/*!
|
||||
@brief Environment class
|
||||
*/
|
||||
class Environment {
|
||||
const std::string input_path;
|
||||
const std::string output_path;
|
||||
|
||||
Parser parser;
|
||||
Renderer renderer;
|
||||
|
||||
public:
|
||||
Environment(): Environment("./") { }
|
||||
explicit Environment(const std::string& global_path): input_path(global_path), output_path(global_path), parser() { }
|
||||
explicit Environment(const std::string& input_path, const std::string& output_path): input_path(input_path), output_path(output_path), parser() { }
|
||||
|
||||
void set_statement(const std::string& open, const std::string& close) {
|
||||
parser.regex_map_delimiters[Parsed::Delimiter::Statement] = Regex{open + "\\s*(.+?)\\s*" + close};
|
||||
}
|
||||
|
||||
void set_line_statement(const std::string& open) {
|
||||
parser.regex_map_delimiters[Parsed::Delimiter::LineStatement] = Regex{"(?:^|\\n)" + open + " *(.+?) *(?:\\n|$)"};
|
||||
}
|
||||
|
||||
void set_expression(const std::string& open, const std::string& close) {
|
||||
parser.regex_map_delimiters[Parsed::Delimiter::Expression] = Regex{open + "\\s*(.+?)\\s*" + close};
|
||||
}
|
||||
|
||||
void set_comment(const std::string& open, const std::string& close) {
|
||||
parser.regex_map_delimiters[Parsed::Delimiter::Comment] = Regex{open + "\\s*(.+?)\\s*" + close};
|
||||
}
|
||||
|
||||
void set_element_notation(const ElementNotation element_notation_) {
|
||||
parser.element_notation = element_notation_;
|
||||
}
|
||||
|
||||
Template parse(const std::string& input) {
|
||||
return parser.parse(input);
|
||||
}
|
||||
|
||||
Template parse_template(const std::string& filename) {
|
||||
return parser.parse_template(input_path + filename);
|
||||
}
|
||||
|
||||
std::string render(const std::string& input, const json& data) {
|
||||
return renderer.render(parse(input), data);
|
||||
}
|
||||
|
||||
std::string render_template(const Template& temp, const json& data) {
|
||||
return renderer.render(temp, data);
|
||||
}
|
||||
|
||||
std::string render_file(const std::string& filename, const json& data) {
|
||||
return renderer.render(parse_template(filename), data);
|
||||
}
|
||||
|
||||
std::string render_file_with_json_file(const std::string& filename, const std::string& filename_data) {
|
||||
const json data = load_json(filename_data);
|
||||
return render_file(filename, data);
|
||||
}
|
||||
|
||||
void write(const std::string& filename, const json& data, const std::string& filename_out) {
|
||||
std::ofstream file(output_path + filename_out);
|
||||
file << render_file(filename, data);
|
||||
file.close();
|
||||
}
|
||||
|
||||
void write(const Template& temp, const json& data, const std::string& filename_out) {
|
||||
std::ofstream file(output_path + filename_out);
|
||||
file << render_template(temp, data);
|
||||
file.close();
|
||||
}
|
||||
|
||||
void write_with_json_file(const std::string& filename, const std::string& filename_data, const std::string& filename_out) {
|
||||
const json data = load_json(filename_data);
|
||||
write(filename, data, filename_out);
|
||||
}
|
||||
|
||||
void write_with_json_file(const Template& temp, const std::string& filename_data, const std::string& filename_out) {
|
||||
const json data = load_json(filename_data);
|
||||
write(temp, data, filename_out);
|
||||
}
|
||||
|
||||
std::string load_global_file(const std::string& filename) {
|
||||
return parser.load_file(input_path + filename);
|
||||
}
|
||||
|
||||
json load_json(const std::string& filename) {
|
||||
std::ifstream file(input_path + filename);
|
||||
json j;
|
||||
file >> j;
|
||||
return j;
|
||||
}
|
||||
|
||||
void add_callback(std::string name, int number_arguments, const std::function<json(const Parsed::Arguments&, const json&)>& callback) {
|
||||
const Parsed::CallbackSignature signature = std::make_pair(name, number_arguments);
|
||||
parser.regex_map_callbacks[signature] = Parser::function_regex(name, number_arguments);
|
||||
renderer.map_callbacks[signature] = callback;
|
||||
}
|
||||
|
||||
void include_template(std::string name, const Template& temp) {
|
||||
parser.included_templates[name] = temp;
|
||||
}
|
||||
|
||||
template<typename T = json>
|
||||
T get_argument(const Parsed::Arguments& args, int index, const json& data) {
|
||||
return renderer.eval_expression<T>(args[index], data);
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_ENVIRONMENT_HPP
|
||||
@@ -1,18 +0,0 @@
|
||||
#ifndef PANTOR_INJA_ERROR_HPP
|
||||
#define PANTOR_INJA_ERROR_HPP
|
||||
|
||||
#include <string>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
/*!
|
||||
@brief throw an error with a given message
|
||||
*/
|
||||
inline void inja_throw(const std::string& type, const std::string& message) {
|
||||
throw std::runtime_error("[inja.exception." + type + "] " + message);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_ERROR_HPP
|
||||
@@ -1,49 +0,0 @@
|
||||
/*
|
||||
Inja - A Template Engine for Modern C++
|
||||
version 1.1.0
|
||||
https://github.com/pantor/inja
|
||||
|
||||
Licensed under the MIT License <https://opensource.org/licenses/MIT>.
|
||||
Copyright (c) 2017-2018 Pantor <https://github.com/pantor>.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
*/
|
||||
|
||||
#ifndef PANTOR_INJA_HPP
|
||||
#define PANTOR_INJA_HPP
|
||||
|
||||
#define PANTOR_INJA_VERSION_MAJOR 1
|
||||
#define PANTOR_INJA_VERSION_MINOR 1
|
||||
#define PANTOR_INJA_VERSION_PATCH 0
|
||||
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
||||
#include "error.hpp"
|
||||
#include "regex.hpp"
|
||||
#include "parsed.hpp"
|
||||
#include "template.hpp"
|
||||
#include "renderer.hpp"
|
||||
#include "parser.hpp"
|
||||
#include "environment.hpp"
|
||||
#include "utils.hpp"
|
||||
|
||||
|
||||
#endif // PANTOR_INJA_HPP
|
||||
129
include/inja/bytecode.hpp
Normal file
129
include/inja/bytecode.hpp
Normal file
@@ -0,0 +1,129 @@
|
||||
#ifndef PANTOR_INJA_BYTECODE_HPP
|
||||
#define PANTOR_INJA_BYTECODE_HPP
|
||||
|
||||
#include <string_view>
|
||||
#include <utility>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
using namespace nlohmann;
|
||||
|
||||
|
||||
struct Bytecode {
|
||||
enum class Op : uint8_t {
|
||||
Nop,
|
||||
// print StringRef (always immediate)
|
||||
PrintText,
|
||||
// print value
|
||||
PrintValue,
|
||||
// push value onto stack (always immediate)
|
||||
Push,
|
||||
|
||||
// builtin functions
|
||||
// result is pushed to stack
|
||||
// args specify number of arguments
|
||||
// all functions can take their "last" argument either immediate
|
||||
// or popped off stack (e.g. if immediate, it's like the immediate was
|
||||
// just pushed to the stack)
|
||||
Not,
|
||||
And,
|
||||
Or,
|
||||
In,
|
||||
Equal,
|
||||
Greater,
|
||||
GreaterEqual,
|
||||
Less,
|
||||
LessEqual,
|
||||
Different,
|
||||
DivisibleBy,
|
||||
Even,
|
||||
First,
|
||||
Float,
|
||||
Int,
|
||||
Last,
|
||||
Length,
|
||||
Lower,
|
||||
Max,
|
||||
Min,
|
||||
Odd,
|
||||
Range,
|
||||
Result,
|
||||
Round,
|
||||
Sort,
|
||||
Upper,
|
||||
Exists,
|
||||
ExistsInObject,
|
||||
IsBoolean,
|
||||
IsNumber,
|
||||
IsInteger,
|
||||
IsFloat,
|
||||
IsObject,
|
||||
IsArray,
|
||||
IsString,
|
||||
Default,
|
||||
|
||||
// include another template
|
||||
// value is the template name
|
||||
Include,
|
||||
|
||||
// callback function
|
||||
// str is the function name (this means it cannot be a lookup)
|
||||
// args specify number of arguments
|
||||
// as with builtin functions, "last" argument can be immediate
|
||||
Callback,
|
||||
|
||||
// unconditional jump
|
||||
// args is the index of the bytecode to jump to.
|
||||
Jump,
|
||||
|
||||
// conditional jump
|
||||
// value popped off stack is checked for truthyness
|
||||
// if false, args is the index of the bytecode to jump to.
|
||||
// if true, no action is taken (falls through)
|
||||
ConditionalJump,
|
||||
|
||||
// start loop
|
||||
// value popped off stack is what is iterated over
|
||||
// args is index of bytecode after end loop (jumped to if iterable is
|
||||
// empty)
|
||||
// immediate value is key name (for maps)
|
||||
// str is value name
|
||||
StartLoop,
|
||||
|
||||
// end a loop
|
||||
// args is index of the first bytecode in the loop body
|
||||
EndLoop,
|
||||
};
|
||||
|
||||
enum Flag {
|
||||
// location of value for value-taking ops (mask)
|
||||
ValueMask = 0x03,
|
||||
// pop value off stack
|
||||
ValuePop = 0x00,
|
||||
// value is immediate rather than on stack
|
||||
ValueImmediate = 0x01,
|
||||
// lookup immediate str (dot notation)
|
||||
ValueLookupDot = 0x02,
|
||||
// lookup immediate str (json pointer notation)
|
||||
ValueLookupPointer = 0x03,
|
||||
};
|
||||
|
||||
Op op {Op::Nop};
|
||||
uint32_t args: 30;
|
||||
uint32_t flags: 2;
|
||||
|
||||
json value;
|
||||
std::string_view str;
|
||||
|
||||
Bytecode(): args(0), flags(0) {}
|
||||
explicit Bytecode(Op op, unsigned int args = 0): op(op), args(args), flags(0) {}
|
||||
explicit Bytecode(Op op, std::string_view str, unsigned int flags): op(op), args(0), flags(flags), str(str) {}
|
||||
explicit Bytecode(Op op, json&& value, unsigned int flags): op(op), args(0), flags(flags), value(std::move(value)) {}
|
||||
};
|
||||
|
||||
} // namespace inja
|
||||
|
||||
#endif // PANTOR_INJA_BYTECODE_HPP
|
||||
46
include/inja/config.hpp
Normal file
46
include/inja/config.hpp
Normal file
@@ -0,0 +1,46 @@
|
||||
#ifndef PANTOR_INJA_CONFIG_HPP
|
||||
#define PANTOR_INJA_CONFIG_HPP
|
||||
|
||||
#include <functional>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
enum class ElementNotation {
|
||||
Dot,
|
||||
Pointer
|
||||
};
|
||||
|
||||
struct LexerConfig {
|
||||
std::string statement_open {"{%"};
|
||||
std::string statement_close {"%}"};
|
||||
std::string line_statement {"##"};
|
||||
std::string expression_open {"{{"};
|
||||
std::string expression_close {"}}"};
|
||||
std::string comment_open {"{#"};
|
||||
std::string comment_close {"#}"};
|
||||
std::string open_chars {"#{"};
|
||||
|
||||
void update_open_chars() {
|
||||
open_chars = "\n";
|
||||
if (open_chars.find(statement_open[0]) == std::string::npos) {
|
||||
open_chars += statement_open[0];
|
||||
}
|
||||
if (open_chars.find(expression_open[0]) == std::string::npos) {
|
||||
open_chars += expression_open[0];
|
||||
}
|
||||
if (open_chars.find(comment_open[0]) == std::string::npos) {
|
||||
open_chars += comment_open[0];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct ParserConfig {
|
||||
ElementNotation notation {ElementNotation::Dot};
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_CONFIG_HPP
|
||||
175
include/inja/environment.hpp
Normal file
175
include/inja/environment.hpp
Normal file
@@ -0,0 +1,175 @@
|
||||
#ifndef PANTOR_INJA_ENVIRONMENT_HPP
|
||||
#define PANTOR_INJA_ENVIRONMENT_HPP
|
||||
|
||||
#include <memory>
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include "config.hpp"
|
||||
#include "function_storage.hpp"
|
||||
#include "parser.hpp"
|
||||
#include "polyfill.hpp"
|
||||
#include "renderer.hpp"
|
||||
#include "template.hpp"
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
using namespace nlohmann;
|
||||
|
||||
class Environment {
|
||||
class Impl {
|
||||
public:
|
||||
std::string input_path;
|
||||
std::string output_path;
|
||||
|
||||
LexerConfig lexer_config;
|
||||
ParserConfig parser_config;
|
||||
|
||||
FunctionStorage callbacks;
|
||||
TemplateStorage included_templates;
|
||||
};
|
||||
|
||||
std::unique_ptr<Impl> m_impl;
|
||||
|
||||
public:
|
||||
Environment(): Environment("./") { }
|
||||
|
||||
explicit Environment(const std::string& global_path): m_impl(stdinja::make_unique<Impl>()) {
|
||||
m_impl->input_path = global_path;
|
||||
m_impl->output_path = global_path;
|
||||
}
|
||||
|
||||
explicit Environment(const std::string& input_path, const std::string& output_path): m_impl(stdinja::make_unique<Impl>()) {
|
||||
m_impl->input_path = input_path;
|
||||
m_impl->output_path = output_path;
|
||||
}
|
||||
|
||||
/// Sets the opener and closer for template statements
|
||||
void set_statement(const std::string& open, const std::string& close) {
|
||||
m_impl->lexer_config.statement_open = open;
|
||||
m_impl->lexer_config.statement_close = close;
|
||||
m_impl->lexer_config.update_open_chars();
|
||||
}
|
||||
|
||||
/// Sets the opener for template line statements
|
||||
void set_line_statement(const std::string& open) {
|
||||
m_impl->lexer_config.line_statement = open;
|
||||
m_impl->lexer_config.update_open_chars();
|
||||
}
|
||||
|
||||
/// Sets the opener and closer for template expressions
|
||||
void set_expression(const std::string& open, const std::string& close) {
|
||||
m_impl->lexer_config.expression_open = open;
|
||||
m_impl->lexer_config.expression_close = close;
|
||||
m_impl->lexer_config.update_open_chars();
|
||||
}
|
||||
|
||||
/// Sets the opener and closer for template comments
|
||||
void set_comment(const std::string& open, const std::string& close) {
|
||||
m_impl->lexer_config.comment_open = open;
|
||||
m_impl->lexer_config.comment_close = close;
|
||||
m_impl->lexer_config.update_open_chars();
|
||||
}
|
||||
|
||||
/// Sets the element notation syntax
|
||||
void set_element_notation(ElementNotation notation) {
|
||||
m_impl->parser_config.notation = notation;
|
||||
}
|
||||
|
||||
|
||||
Template parse(std::string_view input) {
|
||||
Parser parser(m_impl->parser_config, m_impl->lexer_config, m_impl->included_templates);
|
||||
return parser.parse(input);
|
||||
}
|
||||
|
||||
Template parse_template(const std::string& filename) {
|
||||
Parser parser(m_impl->parser_config, m_impl->lexer_config, m_impl->included_templates);
|
||||
return parser.parse_template(m_impl->input_path + static_cast<std::string>(filename));
|
||||
}
|
||||
|
||||
std::string render(std::string_view input, const json& data) {
|
||||
return render(parse(input), data);
|
||||
}
|
||||
|
||||
std::string render(const Template& tmpl, const json& data) {
|
||||
std::stringstream os;
|
||||
render_to(os, tmpl, data);
|
||||
return os.str();
|
||||
}
|
||||
|
||||
std::string render_file(const std::string& filename, const json& data) {
|
||||
return render(parse_template(filename), data);
|
||||
}
|
||||
|
||||
std::string render_file_with_json_file(const std::string& filename, const std::string& filename_data) {
|
||||
const json data = load_json(filename_data);
|
||||
return render_file(filename, data);
|
||||
}
|
||||
|
||||
void write(const std::string& filename, const json& data, const std::string& filename_out) {
|
||||
std::ofstream file(m_impl->output_path + filename_out);
|
||||
file << render_file(filename, data);
|
||||
file.close();
|
||||
}
|
||||
|
||||
void write(const Template& temp, const json& data, const std::string& filename_out) {
|
||||
std::ofstream file(m_impl->output_path + filename_out);
|
||||
file << render(temp, data);
|
||||
file.close();
|
||||
}
|
||||
|
||||
void write_with_json_file(const std::string& filename, const std::string& filename_data, const std::string& filename_out) {
|
||||
const json data = load_json(filename_data);
|
||||
write(filename, data, filename_out);
|
||||
}
|
||||
|
||||
void write_with_json_file(const Template& temp, const std::string& filename_data, const std::string& filename_out) {
|
||||
const json data = load_json(filename_data);
|
||||
write(temp, data, filename_out);
|
||||
}
|
||||
|
||||
std::stringstream& render_to(std::stringstream& os, const Template& tmpl, const json& data) {
|
||||
Renderer(m_impl->included_templates, m_impl->callbacks).render_to(os, tmpl, data);
|
||||
return os;
|
||||
}
|
||||
|
||||
std::string load_file(const std::string& filename) {
|
||||
Parser parser(m_impl->parser_config, m_impl->lexer_config, m_impl->included_templates);
|
||||
return parser.load_file(m_impl->input_path + filename);
|
||||
}
|
||||
|
||||
json load_json(const std::string& filename) {
|
||||
std::ifstream file(m_impl->input_path + filename);
|
||||
json j;
|
||||
file >> j;
|
||||
return j;
|
||||
}
|
||||
|
||||
void add_callback(const std::string& name, unsigned int numArgs, const CallbackFunction& callback) {
|
||||
m_impl->callbacks.add_callback(name, numArgs, callback);
|
||||
}
|
||||
|
||||
/** Includes a template with a given name into the environment.
|
||||
* Then, a template can be rendered in another template using the
|
||||
* include "<name>" syntax.
|
||||
*/
|
||||
void include_template(const std::string& name, const Template& tmpl) {
|
||||
m_impl->included_templates[name] = tmpl;
|
||||
}
|
||||
};
|
||||
|
||||
/*!
|
||||
@brief render with default settings
|
||||
*/
|
||||
inline std::string render(std::string_view input, const json& data) {
|
||||
return Environment().render(input, data);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_ENVIRONMENT_HPP
|
||||
73
include/inja/function_storage.hpp
Normal file
73
include/inja/function_storage.hpp
Normal file
@@ -0,0 +1,73 @@
|
||||
#ifndef PANTOR_INJA_FUNCTION_STORAGE_HPP
|
||||
#define PANTOR_INJA_FUNCTION_STORAGE_HPP
|
||||
|
||||
#include <string_view>
|
||||
|
||||
#include "bytecode.hpp"
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
using namespace nlohmann;
|
||||
|
||||
using Arguments = std::vector<const json*>;
|
||||
using CallbackFunction = std::function<json(Arguments& args)>;
|
||||
|
||||
class FunctionStorage {
|
||||
public:
|
||||
void add_builtin(std::string_view name, unsigned int num_args, Bytecode::Op op) {
|
||||
auto& data = get_or_new(name, num_args);
|
||||
data.op = op;
|
||||
}
|
||||
|
||||
void add_callback(std::string_view name, unsigned int num_args, const CallbackFunction& function) {
|
||||
auto& data = get_or_new(name, num_args);
|
||||
data.function = function;
|
||||
}
|
||||
|
||||
Bytecode::Op find_builtin(std::string_view name, unsigned int num_args) const {
|
||||
if (auto ptr = get(name, num_args)) {
|
||||
return ptr->op;
|
||||
}
|
||||
return Bytecode::Op::Nop;
|
||||
}
|
||||
|
||||
CallbackFunction find_callback(std::string_view name, unsigned int num_args) const {
|
||||
if (auto ptr = get(name, num_args)) {
|
||||
return ptr->function;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
private:
|
||||
struct FunctionData {
|
||||
unsigned int num_args {0};
|
||||
Bytecode::Op op {Bytecode::Op::Nop}; // for builtins
|
||||
CallbackFunction function; // for callbacks
|
||||
};
|
||||
|
||||
FunctionData& get_or_new(std::string_view name, unsigned int num_args) {
|
||||
auto &vec = m_map[static_cast<std::string>(name)];
|
||||
for (auto &i: vec) {
|
||||
if (i.num_args == num_args) return i;
|
||||
}
|
||||
vec.emplace_back();
|
||||
vec.back().num_args = num_args;
|
||||
return vec.back();
|
||||
}
|
||||
|
||||
const FunctionData* get(std::string_view name, unsigned int num_args) const {
|
||||
auto it = m_map.find(static_cast<std::string>(name));
|
||||
if (it == m_map.end()) return nullptr;
|
||||
for (auto &&i: it->second) {
|
||||
if (i.num_args == num_args) return &i;
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::map<std::string, std::vector<FunctionData>> m_map;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_FUNCTION_STORAGE_HPP
|
||||
21
include/inja/inja.hpp
Normal file
21
include/inja/inja.hpp
Normal file
@@ -0,0 +1,21 @@
|
||||
#ifndef PANTOR_INJA_HPP
|
||||
#define PANTOR_INJA_HPP
|
||||
|
||||
#include <functional>
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
#include <vector>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include "environment.hpp"
|
||||
#include "template.hpp"
|
||||
#include "parser.hpp"
|
||||
#include "renderer.hpp"
|
||||
|
||||
|
||||
#endif // PANTOR_INJA_HPP
|
||||
253
include/inja/lexer.hpp
Normal file
253
include/inja/lexer.hpp
Normal file
@@ -0,0 +1,253 @@
|
||||
#ifndef PANTOR_INJA_LEXER_HPP
|
||||
#define PANTOR_INJA_LEXER_HPP
|
||||
|
||||
#include <cctype>
|
||||
#include <locale>
|
||||
|
||||
#include "config.hpp"
|
||||
#include "token.hpp"
|
||||
#include "utils.hpp"
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
class Lexer {
|
||||
enum class State {
|
||||
Text,
|
||||
ExpressionStart,
|
||||
ExpressionBody,
|
||||
LineStart,
|
||||
LineBody,
|
||||
StatementStart,
|
||||
StatementBody,
|
||||
CommentStart,
|
||||
CommentBody
|
||||
} m_state;
|
||||
|
||||
const LexerConfig& m_config;
|
||||
std::string_view m_in;
|
||||
size_t m_tok_start;
|
||||
size_t m_pos;
|
||||
|
||||
public:
|
||||
explicit Lexer(const LexerConfig& config) : m_config(config) {}
|
||||
|
||||
void start(std::string_view in) {
|
||||
m_in = in;
|
||||
m_tok_start = 0;
|
||||
m_pos = 0;
|
||||
m_state = State::Text;
|
||||
}
|
||||
|
||||
Token scan() {
|
||||
m_tok_start = m_pos;
|
||||
|
||||
again:
|
||||
if (m_tok_start >= m_in.size()) return make_token(Token::Kind::Eof);
|
||||
|
||||
switch (m_state) {
|
||||
default:
|
||||
case State::Text: {
|
||||
// fast-scan to first open character
|
||||
size_t open_start = m_in.substr(m_pos).find_first_of(m_config.open_chars);
|
||||
if (open_start == std::string_view::npos) {
|
||||
// didn't find open, return remaining text as text token
|
||||
m_pos = m_in.size();
|
||||
return make_token(Token::Kind::Text);
|
||||
}
|
||||
m_pos += open_start;
|
||||
|
||||
// try to match one of the opening sequences, and get the close
|
||||
std::string_view open_str = m_in.substr(m_pos);
|
||||
if (string_view::starts_with(open_str, m_config.expression_open)) {
|
||||
m_state = State::ExpressionStart;
|
||||
} else if (string_view::starts_with(open_str, m_config.statement_open)) {
|
||||
m_state = State::StatementStart;
|
||||
} else if (string_view::starts_with(open_str, m_config.comment_open)) {
|
||||
m_state = State::CommentStart;
|
||||
} else if ((m_pos == 0 || m_in[m_pos - 1] == '\n') &&
|
||||
string_view::starts_with(open_str, m_config.line_statement)) {
|
||||
m_state = State::LineStart;
|
||||
} else {
|
||||
m_pos += 1; // wasn't actually an opening sequence
|
||||
goto again;
|
||||
}
|
||||
if (m_pos == m_tok_start) goto again; // don't generate empty token
|
||||
return make_token(Token::Kind::Text);
|
||||
}
|
||||
case State::ExpressionStart: {
|
||||
m_state = State::ExpressionBody;
|
||||
m_pos += m_config.expression_open.size();
|
||||
return make_token(Token::Kind::ExpressionOpen);
|
||||
}
|
||||
case State::LineStart: {
|
||||
m_state = State::LineBody;
|
||||
m_pos += m_config.line_statement.size();
|
||||
return make_token(Token::Kind::LineStatementOpen);
|
||||
}
|
||||
case State::StatementStart: {
|
||||
m_state = State::StatementBody;
|
||||
m_pos += m_config.statement_open.size();
|
||||
return make_token(Token::Kind::StatementOpen);
|
||||
}
|
||||
case State::CommentStart: {
|
||||
m_state = State::CommentBody;
|
||||
m_pos += m_config.comment_open.size();
|
||||
return make_token(Token::Kind::CommentOpen);
|
||||
}
|
||||
case State::ExpressionBody:
|
||||
return scan_body(m_config.expression_close, Token::Kind::ExpressionClose);
|
||||
case State::LineBody:
|
||||
return scan_body("\n", Token::Kind::LineStatementClose);
|
||||
case State::StatementBody:
|
||||
return scan_body(m_config.statement_close, Token::Kind::StatementClose);
|
||||
case State::CommentBody: {
|
||||
// fast-scan to comment close
|
||||
size_t end = m_in.substr(m_pos).find(m_config.comment_close);
|
||||
if (end == std::string_view::npos) {
|
||||
m_pos = m_in.size();
|
||||
return make_token(Token::Kind::Eof);
|
||||
}
|
||||
// return the entire comment in the close token
|
||||
m_state = State::Text;
|
||||
m_pos += end + m_config.comment_close.size();
|
||||
return make_token(Token::Kind::CommentClose);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const LexerConfig& get_config() const { return m_config; }
|
||||
|
||||
private:
|
||||
Token scan_body(std::string_view close, Token::Kind closeKind) {
|
||||
again:
|
||||
// skip whitespace (except for \n as it might be a close)
|
||||
if (m_tok_start >= m_in.size()) return make_token(Token::Kind::Eof);
|
||||
char ch = m_in[m_tok_start];
|
||||
if (ch == ' ' || ch == '\t' || ch == '\r') {
|
||||
m_tok_start += 1;
|
||||
goto again;
|
||||
}
|
||||
|
||||
// check for close
|
||||
if (string_view::starts_with(m_in.substr(m_tok_start), close)) {
|
||||
m_state = State::Text;
|
||||
m_pos = m_tok_start + close.size();
|
||||
return make_token(closeKind);
|
||||
}
|
||||
|
||||
// skip \n
|
||||
if (ch == '\n') {
|
||||
m_tok_start += 1;
|
||||
goto again;
|
||||
}
|
||||
|
||||
m_pos = m_tok_start + 1;
|
||||
if (std::isalpha(ch)) return scan_id();
|
||||
switch (ch) {
|
||||
case ',':
|
||||
return make_token(Token::Kind::Comma);
|
||||
case ':':
|
||||
return make_token(Token::Kind::Colon);
|
||||
case '(':
|
||||
return make_token(Token::Kind::LeftParen);
|
||||
case ')':
|
||||
return make_token(Token::Kind::RightParen);
|
||||
case '[':
|
||||
return make_token(Token::Kind::LeftBracket);
|
||||
case ']':
|
||||
return make_token(Token::Kind::RightBracket);
|
||||
case '{':
|
||||
return make_token(Token::Kind::LeftBrace);
|
||||
case '}':
|
||||
return make_token(Token::Kind::RightBrace);
|
||||
case '>':
|
||||
if (m_pos < m_in.size() && m_in[m_pos] == '=') {
|
||||
m_pos += 1;
|
||||
return make_token(Token::Kind::GreaterEqual);
|
||||
}
|
||||
return make_token(Token::Kind::GreaterThan);
|
||||
case '<':
|
||||
if (m_pos < m_in.size() && m_in[m_pos] == '=') {
|
||||
m_pos += 1;
|
||||
return make_token(Token::Kind::LessEqual);
|
||||
}
|
||||
return make_token(Token::Kind::LessThan);
|
||||
case '=':
|
||||
if (m_pos < m_in.size() && m_in[m_pos] == '=') {
|
||||
m_pos += 1;
|
||||
return make_token(Token::Kind::Equal);
|
||||
}
|
||||
return make_token(Token::Kind::Unknown);
|
||||
case '!':
|
||||
if (m_pos < m_in.size() && m_in[m_pos] == '=') {
|
||||
m_pos += 1;
|
||||
return make_token(Token::Kind::NotEqual);
|
||||
}
|
||||
return make_token(Token::Kind::Unknown);
|
||||
case '\"':
|
||||
return scan_string();
|
||||
case '0':
|
||||
case '1':
|
||||
case '2':
|
||||
case '3':
|
||||
case '4':
|
||||
case '5':
|
||||
case '6':
|
||||
case '7':
|
||||
case '8':
|
||||
case '9':
|
||||
case '-':
|
||||
return scan_number();
|
||||
case '_':
|
||||
return scan_id();
|
||||
default:
|
||||
return make_token(Token::Kind::Unknown);
|
||||
}
|
||||
}
|
||||
|
||||
Token scan_id() {
|
||||
for (;;) {
|
||||
if (m_pos >= m_in.size()) break;
|
||||
char ch = m_in[m_pos];
|
||||
if (!std::isalnum(ch) && ch != '.' && ch != '/' && ch != '_' && ch != '-') break;
|
||||
m_pos += 1;
|
||||
}
|
||||
return make_token(Token::Kind::Id);
|
||||
}
|
||||
|
||||
Token scan_number() {
|
||||
for (;;) {
|
||||
if (m_pos >= m_in.size()) break;
|
||||
char ch = m_in[m_pos];
|
||||
// be very permissive in lexer (we'll catch errors when conversion happens)
|
||||
if (!std::isdigit(ch) && ch != '.' && ch != 'e' && ch != 'E' && ch != '+' && ch != '-')
|
||||
break;
|
||||
m_pos += 1;
|
||||
}
|
||||
return make_token(Token::Kind::Number);
|
||||
}
|
||||
|
||||
Token scan_string() {
|
||||
bool escape {false};
|
||||
for (;;) {
|
||||
if (m_pos >= m_in.size()) break;
|
||||
char ch = m_in[m_pos++];
|
||||
if (ch == '\\')
|
||||
escape = true;
|
||||
else if (!escape && ch == m_in[m_tok_start])
|
||||
break;
|
||||
else
|
||||
escape = false;
|
||||
}
|
||||
return make_token(Token::Kind::String);
|
||||
}
|
||||
|
||||
Token make_token(Token::Kind kind) const {
|
||||
return Token(kind, string_view::slice(m_in, m_tok_start, m_pos));
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_LEXER_HPP
|
||||
554
include/inja/parser.hpp
Normal file
554
include/inja/parser.hpp
Normal file
@@ -0,0 +1,554 @@
|
||||
#ifndef PANTOR_INJA_PARSER_HPP
|
||||
#define PANTOR_INJA_PARSER_HPP
|
||||
|
||||
#include <limits>
|
||||
|
||||
#include "bytecode.hpp"
|
||||
#include "config.hpp"
|
||||
#include "function_storage.hpp"
|
||||
#include "lexer.hpp"
|
||||
#include "template.hpp"
|
||||
#include "token.hpp"
|
||||
#include "utils.hpp"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
class ParserStatic {
|
||||
ParserStatic() {
|
||||
functions.add_builtin("default", 2, Bytecode::Op::Default);
|
||||
functions.add_builtin("divisibleBy", 2, Bytecode::Op::DivisibleBy);
|
||||
functions.add_builtin("even", 1, Bytecode::Op::Even);
|
||||
functions.add_builtin("first", 1, Bytecode::Op::First);
|
||||
functions.add_builtin("float", 1, Bytecode::Op::Float);
|
||||
functions.add_builtin("int", 1, Bytecode::Op::Int);
|
||||
functions.add_builtin("last", 1, Bytecode::Op::Last);
|
||||
functions.add_builtin("length", 1, Bytecode::Op::Length);
|
||||
functions.add_builtin("lower", 1, Bytecode::Op::Lower);
|
||||
functions.add_builtin("max", 1, Bytecode::Op::Max);
|
||||
functions.add_builtin("min", 1, Bytecode::Op::Min);
|
||||
functions.add_builtin("odd", 1, Bytecode::Op::Odd);
|
||||
functions.add_builtin("range", 1, Bytecode::Op::Range);
|
||||
functions.add_builtin("round", 2, Bytecode::Op::Round);
|
||||
functions.add_builtin("sort", 1, Bytecode::Op::Sort);
|
||||
functions.add_builtin("upper", 1, Bytecode::Op::Upper);
|
||||
functions.add_builtin("exists", 1, Bytecode::Op::Exists);
|
||||
functions.add_builtin("existsIn", 2, Bytecode::Op::ExistsInObject);
|
||||
functions.add_builtin("isBoolean", 1, Bytecode::Op::IsBoolean);
|
||||
functions.add_builtin("isNumber", 1, Bytecode::Op::IsNumber);
|
||||
functions.add_builtin("isInteger", 1, Bytecode::Op::IsInteger);
|
||||
functions.add_builtin("isFloat", 1, Bytecode::Op::IsFloat);
|
||||
functions.add_builtin("isObject", 1, Bytecode::Op::IsObject);
|
||||
functions.add_builtin("isArray", 1, Bytecode::Op::IsArray);
|
||||
functions.add_builtin("isString", 1, Bytecode::Op::IsString);
|
||||
}
|
||||
|
||||
public:
|
||||
ParserStatic(const ParserStatic&) = delete;
|
||||
ParserStatic& operator=(const ParserStatic&) = delete;
|
||||
|
||||
static const ParserStatic& get_instance() {
|
||||
static ParserStatic inst;
|
||||
return inst;
|
||||
}
|
||||
|
||||
FunctionStorage functions;
|
||||
};
|
||||
|
||||
class Parser {
|
||||
public:
|
||||
explicit Parser(const ParserConfig& parser_config, const LexerConfig& lexer_config, TemplateStorage& included_templates): m_config(parser_config), m_lexer(lexer_config), m_included_templates(included_templates), m_static(ParserStatic::get_instance()) { }
|
||||
|
||||
bool parse_expression(Template& tmpl) {
|
||||
if (!parse_expression_and(tmpl)) return false;
|
||||
if (m_tok.kind != Token::Kind::Id || m_tok.text != "or") return true;
|
||||
get_next_token();
|
||||
if (!parse_expression_and(tmpl)) return false;
|
||||
append_function(tmpl, Bytecode::Op::Or, 2);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool parse_expression_and(Template& tmpl) {
|
||||
if (!parse_expression_not(tmpl)) return false;
|
||||
if (m_tok.kind != Token::Kind::Id || m_tok.text != "and") return true;
|
||||
get_next_token();
|
||||
if (!parse_expression_not(tmpl)) return false;
|
||||
append_function(tmpl, Bytecode::Op::And, 2);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool parse_expression_not(Template& tmpl) {
|
||||
if (m_tok.kind == Token::Kind::Id && m_tok.text == "not") {
|
||||
get_next_token();
|
||||
if (!parse_expression_not(tmpl)) return false;
|
||||
append_function(tmpl, Bytecode::Op::Not, 1);
|
||||
return true;
|
||||
} else {
|
||||
return parse_expression_comparison(tmpl);
|
||||
}
|
||||
}
|
||||
|
||||
bool parse_expression_comparison(Template& tmpl) {
|
||||
if (!parse_expression_datum(tmpl)) return false;
|
||||
Bytecode::Op op;
|
||||
switch (m_tok.kind) {
|
||||
case Token::Kind::Id:
|
||||
if (m_tok.text == "in")
|
||||
op = Bytecode::Op::In;
|
||||
else
|
||||
return true;
|
||||
break;
|
||||
case Token::Kind::Equal:
|
||||
op = Bytecode::Op::Equal;
|
||||
break;
|
||||
case Token::Kind::GreaterThan:
|
||||
op = Bytecode::Op::Greater;
|
||||
break;
|
||||
case Token::Kind::LessThan:
|
||||
op = Bytecode::Op::Less;
|
||||
break;
|
||||
case Token::Kind::LessEqual:
|
||||
op = Bytecode::Op::LessEqual;
|
||||
break;
|
||||
case Token::Kind::GreaterEqual:
|
||||
op = Bytecode::Op::GreaterEqual;
|
||||
break;
|
||||
case Token::Kind::NotEqual:
|
||||
op = Bytecode::Op::Different;
|
||||
break;
|
||||
default:
|
||||
return true;
|
||||
}
|
||||
get_next_token();
|
||||
if (!parse_expression_datum(tmpl)) return false;
|
||||
append_function(tmpl, op, 2);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool parse_expression_datum(Template& tmpl) {
|
||||
std::string_view json_first;
|
||||
size_t bracket_level = 0;
|
||||
size_t brace_level = 0;
|
||||
|
||||
for (;;) {
|
||||
switch (m_tok.kind) {
|
||||
case Token::Kind::LeftParen: {
|
||||
get_next_token();
|
||||
if (!parse_expression(tmpl)) return false;
|
||||
if (m_tok.kind != Token::Kind::RightParen) {
|
||||
inja_throw("parser_error", "unmatched '('");
|
||||
}
|
||||
get_next_token();
|
||||
return true;
|
||||
}
|
||||
case Token::Kind::Id:
|
||||
get_peek_token();
|
||||
if (m_peek_tok.kind == Token::Kind::LeftParen) {
|
||||
// function call, parse arguments
|
||||
Token func_token = m_tok;
|
||||
get_next_token(); // id
|
||||
get_next_token(); // leftParen
|
||||
unsigned int num_args = 0;
|
||||
if (m_tok.kind == Token::Kind::RightParen) {
|
||||
// no args
|
||||
get_next_token();
|
||||
} else {
|
||||
for (;;) {
|
||||
if (!parse_expression(tmpl)) {
|
||||
inja_throw("parser_error", "expected expression, got '" + m_tok.describe() + "'");
|
||||
}
|
||||
num_args += 1;
|
||||
if (m_tok.kind == Token::Kind::RightParen) {
|
||||
get_next_token();
|
||||
break;
|
||||
}
|
||||
if (m_tok.kind != Token::Kind::Comma) {
|
||||
inja_throw("parser_error", "expected ')' or ',', got '" + m_tok.describe() + "'");
|
||||
}
|
||||
get_next_token();
|
||||
}
|
||||
}
|
||||
|
||||
auto op = m_static.functions.find_builtin(func_token.text, num_args);
|
||||
|
||||
if (op != Bytecode::Op::Nop) {
|
||||
// swap arguments for default(); see comment in RenderTo()
|
||||
if (op == Bytecode::Op::Default)
|
||||
std::swap(tmpl.bytecodes.back(), *(tmpl.bytecodes.rbegin() + 1));
|
||||
append_function(tmpl, op, num_args);
|
||||
return true;
|
||||
} else {
|
||||
append_callback(tmpl, func_token.text, num_args);
|
||||
return true;
|
||||
}
|
||||
} else if (m_tok.text == "true" || m_tok.text == "false" || m_tok.text == "null") {
|
||||
// true, false, null are json literals
|
||||
if (brace_level == 0 && bracket_level == 0) {
|
||||
json_first = m_tok.text;
|
||||
goto returnJson;
|
||||
}
|
||||
break;
|
||||
} else {
|
||||
// normal literal (json read)
|
||||
tmpl.bytecodes.emplace_back(
|
||||
Bytecode::Op::Push, m_tok.text,
|
||||
m_config.notation == ElementNotation::Pointer ? Bytecode::Flag::ValueLookupPointer : Bytecode::Flag::ValueLookupDot);
|
||||
get_next_token();
|
||||
return true;
|
||||
}
|
||||
// json passthrough
|
||||
case Token::Kind::Number:
|
||||
case Token::Kind::String:
|
||||
if (brace_level == 0 && bracket_level == 0) {
|
||||
json_first = m_tok.text;
|
||||
goto returnJson;
|
||||
}
|
||||
break;
|
||||
case Token::Kind::Comma:
|
||||
case Token::Kind::Colon:
|
||||
if (brace_level == 0 && bracket_level == 0) {
|
||||
inja_throw("parser_error", "unexpected token '" + m_tok.describe() + "'");
|
||||
}
|
||||
break;
|
||||
case Token::Kind::LeftBracket:
|
||||
if (brace_level == 0 && bracket_level == 0) {
|
||||
json_first = m_tok.text;
|
||||
}
|
||||
bracket_level += 1;
|
||||
break;
|
||||
case Token::Kind::LeftBrace:
|
||||
if (brace_level == 0 && bracket_level == 0) {
|
||||
json_first = m_tok.text;
|
||||
}
|
||||
brace_level += 1;
|
||||
break;
|
||||
case Token::Kind::RightBracket:
|
||||
if (bracket_level == 0) {
|
||||
inja_throw("parser_error", "unexpected ']'");
|
||||
}
|
||||
--bracket_level;
|
||||
if (brace_level == 0 && bracket_level == 0) goto returnJson;
|
||||
break;
|
||||
case Token::Kind::RightBrace:
|
||||
if (brace_level == 0) {
|
||||
inja_throw("parser_error", "unexpected '}'");
|
||||
}
|
||||
--brace_level;
|
||||
if (brace_level == 0 && bracket_level == 0) goto returnJson;
|
||||
break;
|
||||
default:
|
||||
if (brace_level != 0) {
|
||||
inja_throw("parser_error", "unmatched '{'");
|
||||
}
|
||||
if (bracket_level != 0) {
|
||||
inja_throw("parser_error", "unmatched '['");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
get_next_token();
|
||||
}
|
||||
|
||||
returnJson:
|
||||
// bridge across all intermediate tokens
|
||||
std::string_view json_text(json_first.data(), m_tok.text.data() - json_first.data() + m_tok.text.size());
|
||||
tmpl.bytecodes.emplace_back(Bytecode::Op::Push, json::parse(json_text), Bytecode::Flag::ValueImmediate);
|
||||
get_next_token();
|
||||
return true;
|
||||
}
|
||||
|
||||
bool parse_statement(Template& tmpl, std::string_view path) {
|
||||
if (m_tok.kind != Token::Kind::Id) return false;
|
||||
|
||||
if (m_tok.text == "if") {
|
||||
get_next_token();
|
||||
|
||||
// evaluate expression
|
||||
if (!parse_expression(tmpl)) return false;
|
||||
|
||||
// start a new if block on if stack
|
||||
m_if_stack.emplace_back(tmpl.bytecodes.size());
|
||||
|
||||
// conditional jump; destination will be filled in by else or endif
|
||||
tmpl.bytecodes.emplace_back(Bytecode::Op::ConditionalJump);
|
||||
} else if (m_tok.text == "endif") {
|
||||
if (m_if_stack.empty()) {
|
||||
inja_throw("parser_error", "endif without matching if");
|
||||
}
|
||||
auto& if_data = m_if_stack.back();
|
||||
get_next_token();
|
||||
|
||||
// previous conditional jump jumps here
|
||||
if (if_data.prev_cond_jump != std::numeric_limits<unsigned int>::max()) {
|
||||
tmpl.bytecodes[if_data.prev_cond_jump].args = tmpl.bytecodes.size();
|
||||
}
|
||||
|
||||
// update all previous unconditional jumps to here
|
||||
for (unsigned int i: if_data.uncond_jumps) {
|
||||
tmpl.bytecodes[i].args = tmpl.bytecodes.size();
|
||||
}
|
||||
|
||||
// pop if stack
|
||||
m_if_stack.pop_back();
|
||||
} else if (m_tok.text == "else") {
|
||||
if (m_if_stack.empty())
|
||||
inja_throw("parser_error", "else without matching if");
|
||||
auto& if_data = m_if_stack.back();
|
||||
get_next_token();
|
||||
|
||||
// end previous block with unconditional jump to endif; destination will be
|
||||
// filled in by endif
|
||||
if_data.uncond_jumps.push_back(tmpl.bytecodes.size());
|
||||
tmpl.bytecodes.emplace_back(Bytecode::Op::Jump);
|
||||
|
||||
// previous conditional jump jumps here
|
||||
tmpl.bytecodes[if_data.prev_cond_jump].args = tmpl.bytecodes.size();
|
||||
if_data.prev_cond_jump = std::numeric_limits<unsigned int>::max();
|
||||
|
||||
// chained else if
|
||||
if (m_tok.kind == Token::Kind::Id && m_tok.text == "if") {
|
||||
get_next_token();
|
||||
|
||||
// evaluate expression
|
||||
if (!parse_expression(tmpl)) return false;
|
||||
|
||||
// update "previous jump"
|
||||
if_data.prev_cond_jump = tmpl.bytecodes.size();
|
||||
|
||||
// conditional jump; destination will be filled in by else or endif
|
||||
tmpl.bytecodes.emplace_back(Bytecode::Op::ConditionalJump);
|
||||
}
|
||||
} else if (m_tok.text == "for") {
|
||||
get_next_token();
|
||||
|
||||
// options: for a in arr; for a, b in obj
|
||||
if (m_tok.kind != Token::Kind::Id)
|
||||
inja_throw("parser_error", "expected id, got '" + m_tok.describe() + "'");
|
||||
Token value_token = m_tok;
|
||||
get_next_token();
|
||||
|
||||
Token key_token;
|
||||
if (m_tok.kind == Token::Kind::Comma) {
|
||||
get_next_token();
|
||||
if (m_tok.kind != Token::Kind::Id)
|
||||
inja_throw("parser_error", "expected id, got '" + m_tok.describe() + "'");
|
||||
key_token = std::move(value_token);
|
||||
value_token = m_tok;
|
||||
get_next_token();
|
||||
}
|
||||
|
||||
if (m_tok.kind != Token::Kind::Id || m_tok.text != "in")
|
||||
inja_throw("parser_error",
|
||||
"expected 'in', got '" + m_tok.describe() + "'");
|
||||
get_next_token();
|
||||
|
||||
if (!parse_expression(tmpl)) return false;
|
||||
|
||||
m_loop_stack.push_back(tmpl.bytecodes.size());
|
||||
|
||||
tmpl.bytecodes.emplace_back(Bytecode::Op::StartLoop);
|
||||
if (!key_token.text.empty()) {
|
||||
tmpl.bytecodes.back().value = key_token.text;
|
||||
}
|
||||
tmpl.bytecodes.back().str = value_token.text;
|
||||
} else if (m_tok.text == "endfor") {
|
||||
get_next_token();
|
||||
if (m_loop_stack.empty()) {
|
||||
inja_throw("parser_error", "endfor without matching for");
|
||||
}
|
||||
|
||||
// update loop with EndLoop index (for empty case)
|
||||
tmpl.bytecodes[m_loop_stack.back()].args = tmpl.bytecodes.size();
|
||||
|
||||
tmpl.bytecodes.emplace_back(Bytecode::Op::EndLoop);
|
||||
tmpl.bytecodes.back().args = m_loop_stack.back() + 1; // loop body
|
||||
m_loop_stack.pop_back();
|
||||
} else if (m_tok.text == "include") {
|
||||
get_next_token();
|
||||
|
||||
if (m_tok.kind != Token::Kind::String) {
|
||||
inja_throw("parser_error", "expected string, got '" + m_tok.describe() + "'");
|
||||
}
|
||||
|
||||
// build the relative path
|
||||
json json_name = json::parse(m_tok.text);
|
||||
std::string pathname = static_cast<std::string>(path);
|
||||
pathname += json_name.get_ref<const std::string&>();
|
||||
if (pathname.compare(0, 2, "./") == 0) {
|
||||
pathname.erase(0, 2);
|
||||
}
|
||||
// sys::path::remove_dots(pathname, true, sys::path::Style::posix);
|
||||
|
||||
// parse it only if it's new
|
||||
// TemplateStorage::iterator included;
|
||||
// bool is_new {true};
|
||||
// std::tie(included, is_new) = m_included_templates.emplace(pathname);
|
||||
// if (is_new) included->second = parse_template(pathname);
|
||||
|
||||
Template include_template = parse_template(pathname);
|
||||
m_included_templates.emplace(pathname, include_template);
|
||||
|
||||
// generate a reference bytecode
|
||||
tmpl.bytecodes.emplace_back(Bytecode::Op::Include, json(pathname), Bytecode::Flag::ValueImmediate);
|
||||
|
||||
get_next_token();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void append_function(Template& tmpl, Bytecode::Op op, unsigned int num_args) {
|
||||
// we can merge with back-to-back push
|
||||
if (!tmpl.bytecodes.empty()) {
|
||||
Bytecode& last = tmpl.bytecodes.back();
|
||||
if (last.op == Bytecode::Op::Push) {
|
||||
last.op = op;
|
||||
last.args = num_args;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise just add it to the end
|
||||
tmpl.bytecodes.emplace_back(op, num_args);
|
||||
}
|
||||
|
||||
void append_callback(Template& tmpl, std::string_view name, unsigned int num_args) {
|
||||
// we can merge with back-to-back push value (not lookup)
|
||||
if (!tmpl.bytecodes.empty()) {
|
||||
Bytecode& last = tmpl.bytecodes.back();
|
||||
if (last.op == Bytecode::Op::Push &&
|
||||
(last.flags & Bytecode::Flag::ValueMask) == Bytecode::Flag::ValueImmediate) {
|
||||
last.op = Bytecode::Op::Callback;
|
||||
last.args = num_args;
|
||||
last.str = name;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// otherwise just add it to the end
|
||||
tmpl.bytecodes.emplace_back(Bytecode::Op::Callback, num_args);
|
||||
tmpl.bytecodes.back().str = name;
|
||||
}
|
||||
|
||||
void parse_into(Template& tmpl, std::string_view path) {
|
||||
m_lexer.start(tmpl.content);
|
||||
|
||||
for (;;) {
|
||||
get_next_token();
|
||||
switch (m_tok.kind) {
|
||||
case Token::Kind::Eof:
|
||||
if (!m_if_stack.empty()) inja_throw("parser_error", "unmatched if");
|
||||
if (!m_loop_stack.empty()) inja_throw("parser_error", "unmatched for");
|
||||
return;
|
||||
case Token::Kind::Text:
|
||||
tmpl.bytecodes.emplace_back(Bytecode::Op::PrintText, m_tok.text, 0u);
|
||||
break;
|
||||
case Token::Kind::StatementOpen:
|
||||
get_next_token();
|
||||
if (!parse_statement(tmpl, path)) {
|
||||
inja_throw("parser_error", "expected statement, got '" + m_tok.describe() + "'");
|
||||
}
|
||||
if (m_tok.kind != Token::Kind::StatementClose) {
|
||||
inja_throw("parser_error", "expected statement close, got '" + m_tok.describe() + "'");
|
||||
}
|
||||
break;
|
||||
case Token::Kind::LineStatementOpen:
|
||||
get_next_token();
|
||||
parse_statement(tmpl, path);
|
||||
if (m_tok.kind != Token::Kind::LineStatementClose &&
|
||||
m_tok.kind != Token::Kind::Eof) {
|
||||
inja_throw("parser_error", "expected line statement close, got '" + m_tok.describe() + "'");
|
||||
}
|
||||
break;
|
||||
case Token::Kind::ExpressionOpen:
|
||||
get_next_token();
|
||||
if (!parse_expression(tmpl)) {
|
||||
inja_throw("parser_error", "expected expression, got '" + m_tok.describe() + "'");
|
||||
}
|
||||
append_function(tmpl, Bytecode::Op::PrintValue, 1);
|
||||
if (m_tok.kind != Token::Kind::ExpressionClose) {
|
||||
inja_throw("parser_error", "expected expression close, got '" + m_tok.describe() + "'");
|
||||
}
|
||||
break;
|
||||
case Token::Kind::CommentOpen:
|
||||
get_next_token();
|
||||
if (m_tok.kind != Token::Kind::CommentClose) {
|
||||
inja_throw("parser_error", "expected comment close, got '" + m_tok.describe() + "'");
|
||||
}
|
||||
break;
|
||||
default:
|
||||
inja_throw("parser_error", "unexpected token '" + m_tok.describe() + "'");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Template parse(std::string_view input, std::string_view path) {
|
||||
Template result;
|
||||
result.content = input;
|
||||
parse_into(result, path);
|
||||
return result;
|
||||
}
|
||||
|
||||
Template parse(std::string_view input) {
|
||||
return parse(input, "./");
|
||||
}
|
||||
|
||||
Template parse_template(std::string_view filename) {
|
||||
Template result;
|
||||
result.content = load_file(filename);
|
||||
|
||||
std::string_view path = filename.substr(0, filename.find_last_of("/\\") + 1);
|
||||
// StringRef path = sys::path::parent_path(filename);
|
||||
Parser(m_config, m_lexer.get_config(), m_included_templates).parse_into(result, path);
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string load_file(std::string_view filename) {
|
||||
std::ifstream file(static_cast<std::string>(filename));
|
||||
std::string text((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>());
|
||||
return text;
|
||||
}
|
||||
|
||||
private:
|
||||
const ParserConfig& m_config;
|
||||
Lexer m_lexer;
|
||||
Token m_tok;
|
||||
Token m_peek_tok;
|
||||
bool m_have_peek_tok {false};
|
||||
TemplateStorage& m_included_templates;
|
||||
const ParserStatic& m_static;
|
||||
|
||||
struct IfData {
|
||||
unsigned int prev_cond_jump;
|
||||
std::vector<unsigned int> uncond_jumps;
|
||||
|
||||
explicit IfData(unsigned int condJump): prev_cond_jump(condJump) {}
|
||||
};
|
||||
|
||||
std::vector<IfData> m_if_stack;
|
||||
std::vector<unsigned int> m_loop_stack;
|
||||
|
||||
void get_next_token() {
|
||||
if (m_have_peek_tok) {
|
||||
m_tok = m_peek_tok;
|
||||
m_have_peek_tok = false;
|
||||
} else {
|
||||
m_tok = m_lexer.scan();
|
||||
}
|
||||
}
|
||||
|
||||
void get_peek_token() {
|
||||
if (!m_have_peek_tok) {
|
||||
m_peek_tok = m_lexer.scan();
|
||||
m_have_peek_tok = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace inja
|
||||
|
||||
#endif // PANTOR_INJA_PARSER_HPP
|
||||
50
include/inja/polyfill.hpp
Normal file
50
include/inja/polyfill.hpp
Normal file
@@ -0,0 +1,50 @@
|
||||
#ifndef PANTOR_INJA_POLYFILL_HPP
|
||||
#define PANTOR_INJA_POLYFILL_HPP
|
||||
|
||||
|
||||
#if __cplusplus < 201402L
|
||||
|
||||
#include <cstddef>
|
||||
#include <type_traits>
|
||||
#include <utility>
|
||||
|
||||
|
||||
namespace stdinja {
|
||||
template<class T> struct _Unique_if {
|
||||
typedef std::unique_ptr<T> _Single_object;
|
||||
};
|
||||
|
||||
template<class T> struct _Unique_if<T[]> {
|
||||
typedef std::unique_ptr<T[]> _Unknown_bound;
|
||||
};
|
||||
|
||||
template<class T, size_t N> struct _Unique_if<T[N]> {
|
||||
typedef void _Known_bound;
|
||||
};
|
||||
|
||||
template<class T, class... Args>
|
||||
typename _Unique_if<T>::_Single_object
|
||||
make_unique(Args&&... args) {
|
||||
return std::unique_ptr<T>(new T(std::forward<Args>(args)...));
|
||||
}
|
||||
|
||||
template<class T>
|
||||
typename _Unique_if<T>::_Unknown_bound
|
||||
make_unique(size_t n) {
|
||||
typedef typename std::remove_extent<T>::type U;
|
||||
return std::unique_ptr<T>(new U[n]());
|
||||
}
|
||||
|
||||
template<class T, class... Args>
|
||||
typename _Unique_if<T>::_Known_bound
|
||||
make_unique(Args&&...) = delete;
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
namespace stdinja = std;
|
||||
|
||||
#endif // memory */
|
||||
|
||||
|
||||
#endif // PANTOR_INJA_POLYFILL_HPP
|
||||
553
include/inja/renderer.hpp
Normal file
553
include/inja/renderer.hpp
Normal file
@@ -0,0 +1,553 @@
|
||||
#ifndef PANTOR_INJA_RENDERER_HPP
|
||||
#define PANTOR_INJA_RENDERER_HPP
|
||||
|
||||
#include <algorithm>
|
||||
#include <numeric>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include "bytecode.hpp"
|
||||
#include "template.hpp"
|
||||
#include "utils.hpp"
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
inline std::string_view convert_dot_to_json_pointer(std::string_view dot, std::string& out) {
|
||||
out.clear();
|
||||
do {
|
||||
std::string_view part;
|
||||
std::tie(part, dot) = string_view::split(dot, '.');
|
||||
out.push_back('/');
|
||||
out.append(part.begin(), part.end());
|
||||
} while (!dot.empty());
|
||||
return std::string_view(out.data(), out.size());
|
||||
}
|
||||
|
||||
class Renderer {
|
||||
std::vector<const json*>& get_args(const Bytecode& bc) {
|
||||
m_tmp_args.clear();
|
||||
|
||||
bool hasImm = ((bc.flags & Bytecode::Flag::ValueMask) != Bytecode::Flag::ValuePop);
|
||||
|
||||
// get args from stack
|
||||
unsigned int pop_args = bc.args;
|
||||
if (hasImm) --pop_args;
|
||||
|
||||
for (auto i = std::prev(m_stack.end(), pop_args); i != m_stack.end(); i++) {
|
||||
m_tmp_args.push_back(&(*i));
|
||||
}
|
||||
|
||||
// get immediate arg
|
||||
if (hasImm) {
|
||||
m_tmp_args.push_back(get_imm(bc));
|
||||
}
|
||||
|
||||
return m_tmp_args;
|
||||
}
|
||||
|
||||
void pop_args(const Bytecode& bc) {
|
||||
unsigned int popArgs = bc.args;
|
||||
if ((bc.flags & Bytecode::Flag::ValueMask) != Bytecode::Flag::ValuePop)
|
||||
--popArgs;
|
||||
for (unsigned int i = 0; i < popArgs; ++i) m_stack.pop_back();
|
||||
}
|
||||
|
||||
const json* get_imm(const Bytecode& bc) {
|
||||
std::string ptr_buffer;
|
||||
std::string_view ptr;
|
||||
switch (bc.flags & Bytecode::Flag::ValueMask) {
|
||||
case Bytecode::Flag::ValuePop:
|
||||
return nullptr;
|
||||
case Bytecode::Flag::ValueImmediate:
|
||||
return &bc.value;
|
||||
case Bytecode::Flag::ValueLookupDot:
|
||||
ptr = convert_dot_to_json_pointer(bc.str, ptr_buffer);
|
||||
break;
|
||||
case Bytecode::Flag::ValueLookupPointer:
|
||||
ptr_buffer += '/';
|
||||
ptr_buffer += bc.str;
|
||||
ptr = ptr_buffer;
|
||||
break;
|
||||
}
|
||||
try {
|
||||
return &m_data->at(json::json_pointer(ptr.data()));
|
||||
} catch (std::exception&) {
|
||||
// try to evaluate as a no-argument callback
|
||||
if (auto callback = m_callbacks.find_callback(bc.str, 0)) {
|
||||
std::vector<const json*> arguments {};
|
||||
m_tmp_val = callback(arguments);
|
||||
return &m_tmp_val;
|
||||
}
|
||||
inja_throw("render_error", "variable '" + static_cast<std::string>(bc.str) + "' not found");
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
bool truthy(const json& var) const {
|
||||
if (var.empty()) {
|
||||
return false;
|
||||
} else if (var.is_number()) {
|
||||
return (var != 0);
|
||||
} else if (var.is_string()) {
|
||||
return !var.empty();
|
||||
}
|
||||
|
||||
try {
|
||||
return var.get<bool>();
|
||||
} catch (json::type_error& e) {
|
||||
inja_throw("json_error", e.what());
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
void update_loop_data() {
|
||||
LoopLevel& level = m_loop_stack.back();
|
||||
|
||||
if (m_loop_stack.size() > 1) {
|
||||
for (int i = m_loop_stack.size() - 2; i >= 0; i--) {
|
||||
auto& level_it = m_loop_stack.at(i);
|
||||
|
||||
level.data[static_cast<std::string>(level_it.value_name)] = level_it.values.at(level_it.index);
|
||||
}
|
||||
}
|
||||
|
||||
if (level.key_name.empty()) {
|
||||
level.data[static_cast<std::string>(level.value_name)] = level.values.at(level.index); // *level.it;
|
||||
auto& loopData = level.data["loop"];
|
||||
loopData["index"] = level.index;
|
||||
loopData["index1"] = level.index + 1;
|
||||
loopData["is_first"] = (level.index == 0);
|
||||
loopData["is_last"] = (level.index == level.size - 1);
|
||||
} else {
|
||||
level.data[static_cast<std::string>(level.key_name)] = level.map_it->first;
|
||||
level.data[static_cast<std::string>(level.value_name)] = *level.map_it->second;
|
||||
}
|
||||
}
|
||||
|
||||
const TemplateStorage& m_included_templates;
|
||||
const FunctionStorage& m_callbacks;
|
||||
|
||||
std::vector<json> m_stack;
|
||||
|
||||
struct LoopLevel {
|
||||
std::string_view key_name; // variable name for keys
|
||||
std::string_view value_name; // variable name for values
|
||||
json data; // data with loop info added
|
||||
|
||||
json values; // values to iterate over
|
||||
|
||||
// loop over list
|
||||
json::iterator it; // iterator over values
|
||||
size_t index; // current list index
|
||||
size_t size; // length of list
|
||||
|
||||
// loop over map
|
||||
using KeyValue = std::pair<std::string_view, json*>;
|
||||
using MapValues = std::vector<KeyValue>;
|
||||
MapValues map_values; // values to iterate over
|
||||
MapValues::iterator map_it; // iterator over values
|
||||
};
|
||||
|
||||
std::vector<LoopLevel> m_loop_stack;
|
||||
const json* m_data;
|
||||
|
||||
std::vector<const json*> m_tmp_args;
|
||||
json m_tmp_val;
|
||||
|
||||
|
||||
public:
|
||||
Renderer(const TemplateStorage& included_templates, const FunctionStorage& callbacks): m_included_templates(included_templates), m_callbacks(callbacks) {
|
||||
m_stack.reserve(16);
|
||||
m_tmp_args.reserve(4);
|
||||
}
|
||||
|
||||
void render_to(std::stringstream& os, const Template& tmpl, const json& data) {
|
||||
m_data = &data;
|
||||
|
||||
for (size_t i = 0; i < tmpl.bytecodes.size(); ++i) {
|
||||
const auto& bc = tmpl.bytecodes[i];
|
||||
|
||||
switch (bc.op) {
|
||||
case Bytecode::Op::Nop:
|
||||
break;
|
||||
case Bytecode::Op::PrintText:
|
||||
os << bc.str;
|
||||
break;
|
||||
case Bytecode::Op::PrintValue: {
|
||||
const json& val = *get_args(bc)[0];
|
||||
if (val.is_string())
|
||||
os << val.get_ref<const std::string&>();
|
||||
else
|
||||
os << val.dump();
|
||||
// val.dump(os);
|
||||
pop_args(bc);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Push:
|
||||
m_stack.emplace_back(*get_imm(bc));
|
||||
break;
|
||||
case Bytecode::Op::Upper: {
|
||||
auto result = get_args(bc)[0]->get<std::string>();
|
||||
std::transform(result.begin(), result.end(), result.begin(), ::toupper);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(std::move(result));
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Lower: {
|
||||
auto result = get_args(bc)[0]->get<std::string>();
|
||||
std::transform(result.begin(), result.end(), result.begin(), ::tolower);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(std::move(result));
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Range: {
|
||||
int number = get_args(bc)[0]->get<int>();
|
||||
std::vector<int> result(number);
|
||||
std::iota(std::begin(result), std::end(result), 0);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(std::move(result));
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Length: {
|
||||
auto result = get_args(bc)[0]->size();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Sort: {
|
||||
auto result = get_args(bc)[0]->get<std::vector<json>>();
|
||||
std::sort(result.begin(), result.end());
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(std::move(result));
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::First: {
|
||||
auto result = get_args(bc)[0]->front();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Last: {
|
||||
auto result = get_args(bc)[0]->back();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Round: {
|
||||
auto args = get_args(bc);
|
||||
double number = args[0]->get<double>();
|
||||
int precision = args[1]->get<int>();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(std::round(number * std::pow(10.0, precision)) / std::pow(10.0, precision));
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::DivisibleBy: {
|
||||
auto args = get_args(bc);
|
||||
int number = args[0]->get<int>();
|
||||
int divisor = args[1]->get<int>();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back((divisor != 0) && (number % divisor == 0));
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Odd: {
|
||||
int number = get_args(bc)[0]->get<int>();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(number % 2 != 0);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Even: {
|
||||
int number = get_args(bc)[0]->get<int>();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(number % 2 == 0);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Max: {
|
||||
auto args = get_args(bc);
|
||||
auto result = *std::max_element(args[0]->begin(), args[0]->end());
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(std::move(result));
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Min: {
|
||||
auto args = get_args(bc);
|
||||
auto result = *std::min_element(args[0]->begin(), args[0]->end());
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(std::move(result));
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Not: {
|
||||
bool result = !truthy(*get_args(bc)[0]);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::And: {
|
||||
auto args = get_args(bc);
|
||||
bool result = truthy(*args[0]) && truthy(*args[1]);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Or: {
|
||||
auto args = get_args(bc);
|
||||
bool result = truthy(*args[0]) || truthy(*args[1]);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::In: {
|
||||
auto args = get_args(bc);
|
||||
bool result = std::find(args[1]->begin(), args[1]->end(), *args[0]) !=
|
||||
args[1]->end();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Equal: {
|
||||
auto args = get_args(bc);
|
||||
bool result = (*args[0] == *args[1]);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Greater: {
|
||||
auto args = get_args(bc);
|
||||
bool result = (*args[0] > *args[1]);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Less: {
|
||||
auto args = get_args(bc);
|
||||
bool result = (*args[0] < *args[1]);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::GreaterEqual: {
|
||||
auto args = get_args(bc);
|
||||
bool result = (*args[0] >= *args[1]);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::LessEqual: {
|
||||
auto args = get_args(bc);
|
||||
bool result = (*args[0] <= *args[1]);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Different: {
|
||||
auto args = get_args(bc);
|
||||
bool result = (*args[0] != *args[1]);
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Float: {
|
||||
double result =
|
||||
std::stod(get_args(bc)[0]->get_ref<const std::string&>());
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Int: {
|
||||
int result = std::stoi(get_args(bc)[0]->get_ref<const std::string&>());
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Exists: {
|
||||
auto&& name = get_args(bc)[0]->get_ref<const std::string&>();
|
||||
bool result = (data.find(name) != data.end());
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::ExistsInObject: {
|
||||
auto args = get_args(bc);
|
||||
auto&& name = args[1]->get_ref<const std::string&>();
|
||||
bool result = (args[0]->find(name) != args[0]->end());
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::IsBoolean: {
|
||||
bool result = get_args(bc)[0]->is_boolean();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::IsNumber: {
|
||||
bool result = get_args(bc)[0]->is_number();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::IsInteger: {
|
||||
bool result = get_args(bc)[0]->is_number_integer();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::IsFloat: {
|
||||
bool result = get_args(bc)[0]->is_number_float();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::IsObject: {
|
||||
bool result = get_args(bc)[0]->is_object();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::IsArray: {
|
||||
bool result = get_args(bc)[0]->is_array();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::IsString: {
|
||||
bool result = get_args(bc)[0]->is_string();
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(result);
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Default: {
|
||||
// default needs to be a bit "magic"; we can't evaluate the first
|
||||
// argument during the push operation, so we swap the arguments during
|
||||
// the parse phase so the second argument is pushed on the stack and
|
||||
// the first argument is in the immediate
|
||||
try {
|
||||
const json* imm = get_imm(bc);
|
||||
// if no exception was raised, replace the stack value with it
|
||||
m_stack.back() = *imm;
|
||||
} catch (std::exception&) {
|
||||
// couldn't read immediate, just leave the stack as is
|
||||
}
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Include:
|
||||
Renderer(m_included_templates, m_callbacks).render_to(os, m_included_templates.find(get_imm(bc)->get_ref<const std::string&>())->second, data);
|
||||
break;
|
||||
case Bytecode::Op::Callback: {
|
||||
auto callback = m_callbacks.find_callback(bc.str, bc.args);
|
||||
if (!callback) {
|
||||
inja_throw("render_error", "function '" + static_cast<std::string>(bc.str) + "' (" + std::to_string(static_cast<unsigned int>(bc.args)) + ") not found");
|
||||
}
|
||||
json result = callback(get_args(bc));
|
||||
pop_args(bc);
|
||||
m_stack.emplace_back(std::move(result));
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::Jump:
|
||||
i = bc.args - 1; // -1 due to ++i in loop
|
||||
break;
|
||||
case Bytecode::Op::ConditionalJump: {
|
||||
if (!truthy(m_stack.back())) {
|
||||
i = bc.args - 1; // -1 due to ++i in loop
|
||||
}
|
||||
m_stack.pop_back();
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::StartLoop: {
|
||||
// jump past loop body if empty
|
||||
if (m_stack.back().empty()) {
|
||||
m_stack.pop_back();
|
||||
i = bc.args; // ++i in loop will take it past EndLoop
|
||||
break;
|
||||
}
|
||||
|
||||
m_loop_stack.emplace_back();
|
||||
LoopLevel& level = m_loop_stack.back();
|
||||
level.value_name = bc.str;
|
||||
level.values = std::move(m_stack.back());
|
||||
level.data = data;
|
||||
m_stack.pop_back();
|
||||
|
||||
if (bc.value.is_string()) {
|
||||
// map iterator
|
||||
if (!level.values.is_object()) {
|
||||
m_loop_stack.pop_back();
|
||||
inja_throw("render_error", "for key, value requires object");
|
||||
}
|
||||
level.key_name = bc.value.get_ref<const std::string&>();
|
||||
|
||||
// sort by key
|
||||
for (auto it = level.values.begin(), end = level.values.end(); it != end; ++it) {
|
||||
level.map_values.emplace_back(it.key(), &it.value());
|
||||
}
|
||||
std::sort(level.map_values.begin(), level.map_values.end(), [](const LoopLevel::KeyValue& a, const LoopLevel::KeyValue& b) { return a.first < b.first; });
|
||||
level.map_it = level.map_values.begin();
|
||||
} else {
|
||||
if (!level.values.is_array()) {
|
||||
m_loop_stack.pop_back();
|
||||
inja_throw("render_error", "type must be array");
|
||||
}
|
||||
|
||||
// list iterator
|
||||
level.it = level.values.begin();
|
||||
level.index = 0;
|
||||
level.size = level.values.size();
|
||||
}
|
||||
|
||||
// provide parent access in nested loop
|
||||
auto parent_loop_it = level.data.find("loop");
|
||||
if (parent_loop_it != level.data.end()) {
|
||||
json loop_copy = *parent_loop_it;
|
||||
(*parent_loop_it)["parent"] = std::move(loop_copy);
|
||||
}
|
||||
|
||||
// set "current" data to loop data
|
||||
m_data = &level.data;
|
||||
update_loop_data();
|
||||
break;
|
||||
}
|
||||
case Bytecode::Op::EndLoop: {
|
||||
if (m_loop_stack.empty()) {
|
||||
inja_throw("render_error", "unexpected state in renderer");
|
||||
}
|
||||
LoopLevel& level = m_loop_stack.back();
|
||||
|
||||
bool done;
|
||||
if (level.key_name.empty()) {
|
||||
level.it += 1;
|
||||
level.index += 1;
|
||||
// done = (level.it == level.values.end());
|
||||
done = (level.index == level.values.size());
|
||||
} else {
|
||||
level.map_it += 1;
|
||||
done = (level.map_it == level.map_values.end());
|
||||
}
|
||||
|
||||
if (done) {
|
||||
m_loop_stack.pop_back();
|
||||
// set "current" data to outer loop data or main data as appropriate
|
||||
if (!m_loop_stack.empty()) {
|
||||
m_data = &m_loop_stack.back().data;
|
||||
} else {
|
||||
m_data = &data;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
update_loop_data();
|
||||
|
||||
// jump back to start of loop
|
||||
i = bc.args - 1; // -1 due to ++i in loop
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
inja_throw("render_error", "unknown op in renderer: " + std::to_string(static_cast<unsigned int>(bc.op)));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace inja
|
||||
|
||||
#endif // PANTOR_INJA_RENDERER_HPP
|
||||
41
include/inja/template.hpp
Normal file
41
include/inja/template.hpp
Normal file
@@ -0,0 +1,41 @@
|
||||
#ifndef PANTOR_INJA_TEMPLATE_HPP
|
||||
#define PANTOR_INJA_TEMPLATE_HPP
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "bytecode.hpp"
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
class Template {
|
||||
friend class Parser;
|
||||
friend class Renderer;
|
||||
|
||||
std::vector<Bytecode> bytecodes;
|
||||
std::string content;
|
||||
|
||||
public:
|
||||
Template() {}
|
||||
Template(const Template& oth): bytecodes(oth.bytecodes), content(oth.content) {}
|
||||
Template(Template&& oth): bytecodes(std::move(oth.bytecodes)), content(std::move(oth.content)) {}
|
||||
|
||||
Template& operator=(const Template& oth) {
|
||||
bytecodes = oth.bytecodes;
|
||||
content = oth.content;
|
||||
return *this;
|
||||
}
|
||||
|
||||
Template& operator=(Template&& oth) {
|
||||
bytecodes = std::move(oth.bytecodes);
|
||||
content = std::move(oth.content);
|
||||
return *this;
|
||||
}
|
||||
};
|
||||
|
||||
using TemplateStorage = std::map<std::string, Template>;
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_TEMPLATE_HPP
|
||||
62
include/inja/token.hpp
Normal file
62
include/inja/token.hpp
Normal file
@@ -0,0 +1,62 @@
|
||||
#ifndef PANTOR_INJA_TOKEN_HPP
|
||||
#define PANTOR_INJA_TOKEN_HPP
|
||||
|
||||
#include <string_view>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
struct Token {
|
||||
enum class Kind {
|
||||
Text,
|
||||
ExpressionOpen, // {{
|
||||
ExpressionClose, // }}
|
||||
LineStatementOpen, // ##
|
||||
LineStatementClose, // \n
|
||||
StatementOpen, // {%
|
||||
StatementClose, // %}
|
||||
CommentOpen, // {#
|
||||
CommentClose, // #}
|
||||
Id, // this, this.foo
|
||||
Number, // 1, 2, -1, 5.2, -5.3
|
||||
String, // "this"
|
||||
Comma, // ,
|
||||
Colon, // :
|
||||
LeftParen, // (
|
||||
RightParen, // )
|
||||
LeftBracket, // [
|
||||
RightBracket, // ]
|
||||
LeftBrace, // {
|
||||
RightBrace, // }
|
||||
Equal, // ==
|
||||
GreaterThan, // >
|
||||
GreaterEqual, // >=
|
||||
LessThan, // <
|
||||
LessEqual, // <=
|
||||
NotEqual, // !=
|
||||
Unknown,
|
||||
Eof
|
||||
} kind {Kind::Unknown};
|
||||
|
||||
std::string_view text;
|
||||
|
||||
constexpr Token() = default;
|
||||
constexpr Token(Kind kind, std::string_view text): kind(kind), text(text) {}
|
||||
|
||||
std::string describe() const {
|
||||
switch (kind) {
|
||||
case Kind::Text:
|
||||
return "<text>";
|
||||
case Kind::LineStatementClose:
|
||||
return "<eol>";
|
||||
case Kind::Eof:
|
||||
return "<eof>";
|
||||
default:
|
||||
return static_cast<std::string>(text);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_TOKEN_HPP
|
||||
36
include/inja/utils.hpp
Normal file
36
include/inja/utils.hpp
Normal file
@@ -0,0 +1,36 @@
|
||||
#ifndef PANTOR_INJA_UTILS_HPP
|
||||
#define PANTOR_INJA_UTILS_HPP
|
||||
|
||||
#include <stdexcept>
|
||||
#include <string_view>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
inline void inja_throw(const std::string& type, const std::string& message) {
|
||||
throw std::runtime_error("[inja.exception." + type + "] " + message);
|
||||
}
|
||||
|
||||
namespace string_view {
|
||||
inline std::string_view slice(std::string_view view, size_t start, size_t end) {
|
||||
start = std::min(start, view.size());
|
||||
end = std::min(std::max(start, end), view.size());
|
||||
return view.substr(start, end - start); // StringRef(Data + Start, End - Start);
|
||||
}
|
||||
|
||||
inline std::pair<std::string_view, std::string_view> split(std::string_view view, char Separator) {
|
||||
size_t idx = view.find(Separator);
|
||||
if (idx == std::string_view::npos) {
|
||||
return std::make_pair(view, std::string_view());
|
||||
}
|
||||
return std::make_pair(slice(view, 0, idx), slice(view, idx + 1, std::string_view::npos));
|
||||
}
|
||||
|
||||
inline bool starts_with(std::string_view view, std::string_view prefix) {
|
||||
return (view.size() >= prefix.size() && view.compare(0, prefix.size(), prefix) == 0);
|
||||
}
|
||||
} // namespace string
|
||||
|
||||
} // namespace inja
|
||||
|
||||
#endif // PANTOR_INJA_UTILS_HPP
|
||||
@@ -1,154 +0,0 @@
|
||||
#ifndef PANTOR_INJA_PARSED_HPP
|
||||
#define PANTOR_INJA_PARSED_HPP
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
||||
|
||||
enum class ElementNotation {
|
||||
Dot,
|
||||
Pointer
|
||||
};
|
||||
|
||||
struct Parsed {
|
||||
enum class Type {
|
||||
Comment,
|
||||
Condition,
|
||||
ConditionBranch,
|
||||
Expression,
|
||||
Loop,
|
||||
Main,
|
||||
String
|
||||
};
|
||||
|
||||
enum class Delimiter {
|
||||
Comment,
|
||||
Expression,
|
||||
LineStatement,
|
||||
Statement
|
||||
};
|
||||
|
||||
enum class Statement {
|
||||
Condition,
|
||||
Include,
|
||||
Loop
|
||||
};
|
||||
|
||||
enum class Function {
|
||||
Not,
|
||||
And,
|
||||
Or,
|
||||
In,
|
||||
Equal,
|
||||
Greater,
|
||||
GreaterEqual,
|
||||
Less,
|
||||
LessEqual,
|
||||
Different,
|
||||
Callback,
|
||||
DivisibleBy,
|
||||
Even,
|
||||
First,
|
||||
Float,
|
||||
Int,
|
||||
Last,
|
||||
Length,
|
||||
Lower,
|
||||
Max,
|
||||
Min,
|
||||
Odd,
|
||||
Range,
|
||||
Result,
|
||||
Round,
|
||||
Sort,
|
||||
Upper,
|
||||
ReadJson,
|
||||
Exists,
|
||||
ExistsInObject,
|
||||
IsBoolean,
|
||||
IsNumber,
|
||||
IsInteger,
|
||||
IsFloat,
|
||||
IsObject,
|
||||
IsArray,
|
||||
IsString,
|
||||
Default
|
||||
};
|
||||
|
||||
enum class Condition {
|
||||
If,
|
||||
ElseIf,
|
||||
Else
|
||||
};
|
||||
|
||||
enum class Loop {
|
||||
ForListIn,
|
||||
ForMapIn
|
||||
};
|
||||
|
||||
struct Element {
|
||||
Type type;
|
||||
std::string inner;
|
||||
std::vector<std::shared_ptr<Element>> children;
|
||||
|
||||
explicit Element(): Element(Type::Main, "") { }
|
||||
explicit Element(const Type type): Element(type, "") { }
|
||||
explicit Element(const Type type, const std::string& inner): type(type), inner(inner), children({}) { }
|
||||
};
|
||||
|
||||
struct ElementString: public Element {
|
||||
const std::string text;
|
||||
|
||||
explicit ElementString(const std::string& text): Element(Type::String), text(text) { }
|
||||
};
|
||||
|
||||
struct ElementComment: public Element {
|
||||
const std::string text;
|
||||
|
||||
explicit ElementComment(const std::string& text): Element(Type::Comment), text(text) { }
|
||||
};
|
||||
|
||||
struct ElementExpression: public Element {
|
||||
Function function;
|
||||
std::vector<ElementExpression> args;
|
||||
std::string command;
|
||||
json result;
|
||||
|
||||
explicit ElementExpression(): ElementExpression(Function::ReadJson) { }
|
||||
explicit ElementExpression(const Function function_): Element(Type::Expression), function(function_), args({}), command("") { }
|
||||
};
|
||||
|
||||
struct ElementLoop: public Element {
|
||||
Loop loop;
|
||||
const std::string key;
|
||||
const std::string value;
|
||||
const ElementExpression list;
|
||||
|
||||
explicit ElementLoop(const Loop loop_, const std::string& value, const ElementExpression& list, const std::string& inner): Element(Type::Loop, inner), loop(loop_), value(value), list(list) { }
|
||||
explicit ElementLoop(const Loop loop_, const std::string& key, const std::string& value, const ElementExpression& list, const std::string& inner): Element(Type::Loop, inner), loop(loop_), key(key), value(value), list(list) { }
|
||||
};
|
||||
|
||||
struct ElementConditionContainer: public Element {
|
||||
explicit ElementConditionContainer(): Element(Type::Condition) { }
|
||||
};
|
||||
|
||||
struct ElementConditionBranch: public Element {
|
||||
const Condition condition_type;
|
||||
const ElementExpression condition;
|
||||
|
||||
explicit ElementConditionBranch(const std::string& inner, const Condition condition_type): Element(Type::ConditionBranch, inner), condition_type(condition_type) { }
|
||||
explicit ElementConditionBranch(const std::string& inner, const Condition condition_type, const ElementExpression& condition): Element(Type::ConditionBranch, inner), condition_type(condition_type), condition(condition) { }
|
||||
};
|
||||
|
||||
using Arguments = std::vector<ElementExpression>;
|
||||
using CallbackSignature = std::pair<std::string, size_t>;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_PARSED_HPP
|
||||
@@ -1,345 +0,0 @@
|
||||
#ifndef PANTOR_INJA_PARSER_HPP
|
||||
#define PANTOR_INJA_PARSER_HPP
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <map>
|
||||
#include <string>
|
||||
|
||||
#include <regex.hpp>
|
||||
#include <template.hpp>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
||||
|
||||
class Parser {
|
||||
public:
|
||||
ElementNotation element_notation = ElementNotation::Pointer;
|
||||
|
||||
std::map<Parsed::CallbackSignature, Regex, std::greater<Parsed::CallbackSignature>> regex_map_callbacks;
|
||||
|
||||
std::map<const std::string, Template> included_templates;
|
||||
|
||||
/*!
|
||||
@brief create a corresponding regex for a function name with a number of arguments separated by ,
|
||||
*/
|
||||
static Regex function_regex(const std::string& name, int number_arguments) {
|
||||
std::string pattern = name;
|
||||
pattern.append("(?:\\(");
|
||||
for (int i = 0; i < number_arguments; i++) {
|
||||
if (i != 0) pattern.append(",");
|
||||
pattern.append("(.*)");
|
||||
}
|
||||
pattern.append("\\))");
|
||||
if (number_arguments == 0) { // Without arguments, allow to use the callback without parenthesis
|
||||
pattern.append("?");
|
||||
}
|
||||
return Regex{"\\s*" + pattern + "\\s*"};
|
||||
}
|
||||
|
||||
/*!
|
||||
@brief dot notation to json pointer notation
|
||||
*/
|
||||
static std::string dot_to_json_pointer_notation(const std::string& dot) {
|
||||
std::string result = dot;
|
||||
while (result.find(".") != std::string::npos) {
|
||||
result.replace(result.find("."), 1, "/");
|
||||
}
|
||||
result.insert(0, "/");
|
||||
return result;
|
||||
}
|
||||
|
||||
std::map<Parsed::Delimiter, Regex> regex_map_delimiters = {
|
||||
{Parsed::Delimiter::Statement, Regex{"\\{\\%\\s*(.+?)\\s*\\%\\}"}},
|
||||
{Parsed::Delimiter::LineStatement, Regex{"(?:^|\\n)## *(.+?) *(?:\\n|$)"}},
|
||||
{Parsed::Delimiter::Expression, Regex{"\\{\\{\\s*(.+?)\\s*\\}\\}"}},
|
||||
{Parsed::Delimiter::Comment, Regex{"\\{#\\s*(.*?)\\s*#\\}"}}
|
||||
};
|
||||
|
||||
const std::map<Parsed::Statement, Regex> regex_map_statement_openers = {
|
||||
{Parsed::Statement::Loop, Regex{"for\\s+(.+)"}},
|
||||
{Parsed::Statement::Condition, Regex{"if\\s+(.+)"}},
|
||||
{Parsed::Statement::Include, Regex{"include\\s+\"(.+)\""}}
|
||||
};
|
||||
|
||||
const std::map<Parsed::Statement, Regex> regex_map_statement_closers = {
|
||||
{Parsed::Statement::Loop, Regex{"endfor"}},
|
||||
{Parsed::Statement::Condition, Regex{"endif"}}
|
||||
};
|
||||
|
||||
const std::map<Parsed::Loop, Regex> regex_map_loop = {
|
||||
{Parsed::Loop::ForListIn, Regex{"for\\s+(\\w+)\\s+in\\s+(.+)"}},
|
||||
{Parsed::Loop::ForMapIn, Regex{"for\\s+(\\w+),\\s+(\\w+)\\s+in\\s+(.+)"}},
|
||||
};
|
||||
|
||||
const std::map<Parsed::Condition, Regex> regex_map_condition = {
|
||||
{Parsed::Condition::If, Regex{"if\\s+(.+)"}},
|
||||
{Parsed::Condition::ElseIf, Regex{"else\\s+if\\s+(.+)"}},
|
||||
{Parsed::Condition::Else, Regex{"else"}}
|
||||
};
|
||||
|
||||
const std::map<Parsed::Function, Regex> regex_map_functions = {
|
||||
{Parsed::Function::Not, Regex{"not (.+)"}},
|
||||
{Parsed::Function::And, Regex{"(.+) and (.+)"}},
|
||||
{Parsed::Function::Or, Regex{"(.+) or (.+)"}},
|
||||
{Parsed::Function::In, Regex{"(.+) in (.+)"}},
|
||||
{Parsed::Function::Equal, Regex{"(.+) == (.+)"}},
|
||||
{Parsed::Function::Greater, Regex{"(.+) > (.+)"}},
|
||||
{Parsed::Function::Less, Regex{"(.+) < (.+)"}},
|
||||
{Parsed::Function::GreaterEqual, Regex{"(.+) >= (.+)"}},
|
||||
{Parsed::Function::LessEqual, Regex{"(.+) <= (.+)"}},
|
||||
{Parsed::Function::Different, Regex{"(.+) != (.+)"}},
|
||||
{Parsed::Function::Default, function_regex("default", 2)},
|
||||
{Parsed::Function::DivisibleBy, function_regex("divisibleBy", 2)},
|
||||
{Parsed::Function::Even, function_regex("even", 1)},
|
||||
{Parsed::Function::First, function_regex("first", 1)},
|
||||
{Parsed::Function::Float, function_regex("float", 1)},
|
||||
{Parsed::Function::Int, function_regex("int", 1)},
|
||||
{Parsed::Function::Last, function_regex("last", 1)},
|
||||
{Parsed::Function::Length, function_regex("length", 1)},
|
||||
{Parsed::Function::Lower, function_regex("lower", 1)},
|
||||
{Parsed::Function::Max, function_regex("max", 1)},
|
||||
{Parsed::Function::Min, function_regex("min", 1)},
|
||||
{Parsed::Function::Odd, function_regex("odd", 1)},
|
||||
{Parsed::Function::Range, function_regex("range", 1)},
|
||||
{Parsed::Function::Round, function_regex("round", 2)},
|
||||
{Parsed::Function::Sort, function_regex("sort", 1)},
|
||||
{Parsed::Function::Upper, function_regex("upper", 1)},
|
||||
{Parsed::Function::Exists, function_regex("exists", 1)},
|
||||
{Parsed::Function::ExistsInObject, function_regex("existsIn", 2)},
|
||||
{Parsed::Function::IsBoolean, function_regex("isBoolean", 1)},
|
||||
{Parsed::Function::IsNumber, function_regex("isNumber", 1)},
|
||||
{Parsed::Function::IsInteger, function_regex("isInteger", 1)},
|
||||
{Parsed::Function::IsFloat, function_regex("isFloat", 1)},
|
||||
{Parsed::Function::IsObject, function_regex("isObject", 1)},
|
||||
{Parsed::Function::IsArray, function_regex("isArray", 1)},
|
||||
{Parsed::Function::IsString, function_regex("isString", 1)},
|
||||
{Parsed::Function::ReadJson, Regex{"\\s*([^\\(\\)]*\\S)\\s*"}}
|
||||
};
|
||||
|
||||
Parser() { }
|
||||
|
||||
Parsed::ElementExpression parse_expression(const std::string& input) {
|
||||
const MatchType<Parsed::CallbackSignature> match_callback = match(input, regex_map_callbacks);
|
||||
if (!match_callback.type().first.empty()) {
|
||||
std::vector<Parsed::ElementExpression> args {};
|
||||
for (unsigned int i = 1; i < match_callback.size(); i++) { // str(0) is whole group
|
||||
args.push_back( parse_expression(match_callback.str(i)) );
|
||||
}
|
||||
|
||||
Parsed::ElementExpression result = Parsed::ElementExpression(Parsed::Function::Callback);
|
||||
result.args = args;
|
||||
result.command = match_callback.type().first;
|
||||
return result;
|
||||
}
|
||||
|
||||
const MatchType<Parsed::Function> match_function = match(input, regex_map_functions);
|
||||
switch ( match_function.type() ) {
|
||||
case Parsed::Function::ReadJson: {
|
||||
std::string command = match_function.str(1);
|
||||
if ( json::accept(command) ) { // JSON Result
|
||||
Parsed::ElementExpression result = Parsed::ElementExpression(Parsed::Function::Result);
|
||||
result.result = json::parse(command);
|
||||
return result;
|
||||
}
|
||||
|
||||
Parsed::ElementExpression result = Parsed::ElementExpression(Parsed::Function::ReadJson);
|
||||
switch (element_notation) {
|
||||
case ElementNotation::Pointer: {
|
||||
if (command[0] != '/') { command.insert(0, "/"); }
|
||||
result.command = command;
|
||||
break;
|
||||
}
|
||||
case ElementNotation::Dot: {
|
||||
result.command = dot_to_json_pointer_notation(command);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
default: {
|
||||
std::vector<Parsed::ElementExpression> args = {};
|
||||
for (unsigned int i = 1; i < match_function.size(); i++) { // str(0) is whole group
|
||||
args.push_back( parse_expression(match_function.str(i)) );
|
||||
}
|
||||
|
||||
Parsed::ElementExpression result = Parsed::ElementExpression(match_function.type());
|
||||
result.args = args;
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<std::shared_ptr<Parsed::Element>> parse_level(const std::string& input, const std::string& path) {
|
||||
std::vector<std::shared_ptr<Parsed::Element>> result;
|
||||
|
||||
size_t current_position = 0;
|
||||
MatchType<Parsed::Delimiter> match_delimiter = search(input, regex_map_delimiters, current_position);
|
||||
while (match_delimiter.found()) {
|
||||
current_position = match_delimiter.end_position();
|
||||
const std::string string_prefix = match_delimiter.prefix();
|
||||
if (not string_prefix.empty()) {
|
||||
result.emplace_back( std::make_shared<Parsed::ElementString>(string_prefix) );
|
||||
}
|
||||
|
||||
const std::string delimiter_inner = match_delimiter.str(1);
|
||||
|
||||
switch ( match_delimiter.type() ) {
|
||||
case Parsed::Delimiter::Statement:
|
||||
case Parsed::Delimiter::LineStatement: {
|
||||
|
||||
const MatchType<Parsed::Statement> match_statement = match(delimiter_inner, regex_map_statement_openers);
|
||||
switch ( match_statement.type() ) {
|
||||
case Parsed::Statement::Loop: {
|
||||
const MatchClosed loop_match = search_closed(input, match_delimiter.regex(), regex_map_statement_openers.at(Parsed::Statement::Loop), regex_map_statement_closers.at(Parsed::Statement::Loop), match_delimiter);
|
||||
|
||||
current_position = loop_match.end_position();
|
||||
|
||||
const std::string loop_inner = match_statement.str(0);
|
||||
const MatchType<Parsed::Loop> match_command = match(loop_inner, regex_map_loop);
|
||||
if (not match_command.found()) {
|
||||
inja_throw("parser_error", "unknown loop statement: " + loop_inner);
|
||||
}
|
||||
switch (match_command.type()) {
|
||||
case Parsed::Loop::ForListIn: {
|
||||
const std::string value_name = match_command.str(1);
|
||||
const std::string list_name = match_command.str(2);
|
||||
|
||||
result.emplace_back( std::make_shared<Parsed::ElementLoop>(match_command.type(), value_name, parse_expression(list_name), loop_match.inner()));
|
||||
break;
|
||||
}
|
||||
case Parsed::Loop::ForMapIn: {
|
||||
const std::string key_name = match_command.str(1);
|
||||
const std::string value_name = match_command.str(2);
|
||||
const std::string list_name = match_command.str(3);
|
||||
|
||||
result.emplace_back( std::make_shared<Parsed::ElementLoop>(match_command.type(), key_name, value_name, parse_expression(list_name), loop_match.inner()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case Parsed::Statement::Condition: {
|
||||
auto condition_container = std::make_shared<Parsed::ElementConditionContainer>();
|
||||
|
||||
Match condition_match = match_delimiter;
|
||||
MatchClosed else_if_match = search_closed_on_level(input, match_delimiter.regex(), regex_map_statement_openers.at(Parsed::Statement::Condition), regex_map_statement_closers.at(Parsed::Statement::Condition), regex_map_condition.at(Parsed::Condition::ElseIf), condition_match);
|
||||
while (else_if_match.found()) {
|
||||
condition_match = else_if_match.close_match;
|
||||
|
||||
const std::string else_if_match_inner = else_if_match.open_match.str(1);
|
||||
const MatchType<Parsed::Condition> match_command = match(else_if_match_inner, regex_map_condition);
|
||||
if (not match_command.found()) {
|
||||
inja_throw("parser_error", "unknown if statement: " + else_if_match.open_match.str());
|
||||
}
|
||||
condition_container->children.push_back( std::make_shared<Parsed::ElementConditionBranch>(else_if_match.inner(), match_command.type(), parse_expression(match_command.str(1))) );
|
||||
|
||||
else_if_match = search_closed_on_level(input, match_delimiter.regex(), regex_map_statement_openers.at(Parsed::Statement::Condition), regex_map_statement_closers.at(Parsed::Statement::Condition), regex_map_condition.at(Parsed::Condition::ElseIf), condition_match);
|
||||
}
|
||||
|
||||
MatchClosed else_match = search_closed_on_level(input, match_delimiter.regex(), regex_map_statement_openers.at(Parsed::Statement::Condition), regex_map_statement_closers.at(Parsed::Statement::Condition), regex_map_condition.at(Parsed::Condition::Else), condition_match);
|
||||
if (else_match.found()) {
|
||||
condition_match = else_match.close_match;
|
||||
|
||||
const std::string else_match_inner = else_match.open_match.str(1);
|
||||
const MatchType<Parsed::Condition> match_command = match(else_match_inner, regex_map_condition);
|
||||
if (not match_command.found()) {
|
||||
inja_throw("parser_error", "unknown if statement: " + else_match.open_match.str());
|
||||
}
|
||||
condition_container->children.push_back( std::make_shared<Parsed::ElementConditionBranch>(else_match.inner(), match_command.type(), parse_expression(match_command.str(1))) );
|
||||
}
|
||||
|
||||
const MatchClosed last_if_match = search_closed(input, match_delimiter.regex(), regex_map_statement_openers.at(Parsed::Statement::Condition), regex_map_statement_closers.at(Parsed::Statement::Condition), condition_match);
|
||||
if (not last_if_match.found()) {
|
||||
inja_throw("parser_error", "misordered if statement");
|
||||
}
|
||||
|
||||
const std::string last_if_match_inner = last_if_match.open_match.str(1);
|
||||
const MatchType<Parsed::Condition> match_command = match(last_if_match_inner, regex_map_condition);
|
||||
if (not match_command.found()) {
|
||||
inja_throw("parser_error", "unknown if statement: " + last_if_match.open_match.str());
|
||||
}
|
||||
if (match_command.type() == Parsed::Condition::Else) {
|
||||
condition_container->children.push_back( std::make_shared<Parsed::ElementConditionBranch>(last_if_match.inner(), match_command.type()) );
|
||||
} else {
|
||||
condition_container->children.push_back( std::make_shared<Parsed::ElementConditionBranch>(last_if_match.inner(), match_command.type(), parse_expression(match_command.str(1))) );
|
||||
}
|
||||
|
||||
current_position = last_if_match.end_position();
|
||||
result.emplace_back(condition_container);
|
||||
break;
|
||||
}
|
||||
case Parsed::Statement::Include: {
|
||||
const std::string template_name = match_statement.str(1);
|
||||
Template included_template;
|
||||
if (included_templates.find( template_name ) != included_templates.end()) {
|
||||
included_template = included_templates[template_name];
|
||||
} else {
|
||||
included_template = parse_template(path + template_name);
|
||||
}
|
||||
|
||||
auto children = included_template.parsed_template().children;
|
||||
result.insert(result.end(), children.begin(), children.end());
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case Parsed::Delimiter::Expression: {
|
||||
result.emplace_back( std::make_shared<Parsed::ElementExpression>(parse_expression(delimiter_inner)) );
|
||||
break;
|
||||
}
|
||||
case Parsed::Delimiter::Comment: {
|
||||
result.emplace_back( std::make_shared<Parsed::ElementComment>(delimiter_inner) );
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
match_delimiter = search(input, regex_map_delimiters, current_position);
|
||||
}
|
||||
if (current_position < input.length()) {
|
||||
result.emplace_back( std::make_shared<Parsed::ElementString>(input.substr(current_position)) );
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
std::shared_ptr<Parsed::Element> parse_tree(std::shared_ptr<Parsed::Element> current_element, const std::string& path) {
|
||||
if (not current_element->inner.empty()) {
|
||||
current_element->children = parse_level(current_element->inner, path);
|
||||
current_element->inner.clear();
|
||||
}
|
||||
|
||||
if (not current_element->children.empty()) {
|
||||
for (auto& child: current_element->children) {
|
||||
child = parse_tree(child, path);
|
||||
}
|
||||
}
|
||||
return current_element;
|
||||
}
|
||||
|
||||
Template parse(const std::string& input) {
|
||||
auto parsed = parse_tree(std::make_shared<Parsed::Element>(Parsed::Element(Parsed::Type::Main, input)), "./");
|
||||
return Template(*parsed);
|
||||
}
|
||||
|
||||
Template parse_template(const std::string& filename) {
|
||||
const std::string input = load_file(filename);
|
||||
const std::string path = filename.substr(0, filename.find_last_of("/\\") + 1);
|
||||
auto parsed = parse_tree(std::make_shared<Parsed::Element>(Parsed::Element(Parsed::Type::Main, input)), path);
|
||||
return Template(*parsed);
|
||||
}
|
||||
|
||||
std::string load_file(const std::string& filename) {
|
||||
std::ifstream file(filename);
|
||||
std::string text((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>());
|
||||
return text;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_PARSER_HPP
|
||||
@@ -1,171 +0,0 @@
|
||||
#ifndef PANTOR_INJA_REGEX_HPP
|
||||
#define PANTOR_INJA_REGEX_HPP
|
||||
|
||||
#include <regex>
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
/*!
|
||||
@brief inja regex class, saves string pattern in addition to std::regex
|
||||
*/
|
||||
class Regex: public std::regex {
|
||||
std::string pattern_;
|
||||
|
||||
public:
|
||||
Regex(): std::regex() {}
|
||||
explicit Regex(const std::string& pattern): std::regex(pattern, std::regex_constants::ECMAScript), pattern_(pattern) { }
|
||||
|
||||
std::string pattern() const { return pattern_; }
|
||||
};
|
||||
|
||||
|
||||
class Match: public std::match_results<std::string::const_iterator> {
|
||||
size_t offset_ {0};
|
||||
unsigned int group_offset_ {0};
|
||||
Regex regex_;
|
||||
|
||||
public:
|
||||
Match(): std::match_results<std::string::const_iterator>() { }
|
||||
explicit Match(size_t offset): std::match_results<std::string::const_iterator>(), offset_(offset) { }
|
||||
explicit Match(size_t offset, const Regex& regex): std::match_results<std::string::const_iterator>(), offset_(offset), regex_(regex) { }
|
||||
|
||||
void set_group_offset(unsigned int group_offset) { group_offset_ = group_offset; }
|
||||
void set_regex(Regex regex) { regex_ = regex; }
|
||||
|
||||
size_t position() const { return offset_ + std::match_results<std::string::const_iterator>::position(); }
|
||||
size_t end_position() const { return position() + length(); }
|
||||
bool found() const { return not empty(); }
|
||||
const std::string str() const { return str(0); }
|
||||
const std::string str(int i) const { return std::match_results<std::string::const_iterator>::str(i + group_offset_); }
|
||||
Regex regex() const { return regex_; }
|
||||
};
|
||||
|
||||
|
||||
template<typename T>
|
||||
class MatchType: public Match {
|
||||
T type_;
|
||||
|
||||
public:
|
||||
MatchType(): Match() { }
|
||||
explicit MatchType(const Match& obj): Match(obj) { }
|
||||
MatchType(Match&& obj): Match(std::move(obj)) { }
|
||||
|
||||
void set_type(T type) { type_ = type; }
|
||||
|
||||
T type() const { return type_; }
|
||||
};
|
||||
|
||||
|
||||
class MatchClosed {
|
||||
public:
|
||||
Match open_match, close_match;
|
||||
|
||||
MatchClosed() { }
|
||||
MatchClosed(Match& open_match, Match& close_match): open_match(open_match), close_match(close_match) { }
|
||||
|
||||
size_t position() const { return open_match.position(); }
|
||||
size_t end_position() const { return close_match.end_position(); }
|
||||
size_t length() const { return close_match.end_position() - open_match.position(); }
|
||||
bool found() const { return open_match.found() and close_match.found(); }
|
||||
std::string prefix() const { return open_match.prefix().str(); }
|
||||
std::string suffix() const { return close_match.suffix().str(); }
|
||||
std::string outer() const { return open_match.str() + static_cast<std::string>(open_match.suffix()).substr(0, close_match.end_position() - open_match.end_position()); }
|
||||
std::string inner() const { return static_cast<std::string>(open_match.suffix()).substr(0, close_match.position() - open_match.end_position()); }
|
||||
};
|
||||
|
||||
|
||||
inline Match search(const std::string& input, const Regex& regex, size_t position) {
|
||||
if (position >= input.length()) { return Match(); }
|
||||
|
||||
Match match{position, regex};
|
||||
std::regex_search(input.cbegin() + position, input.cend(), match, regex);
|
||||
return match;
|
||||
}
|
||||
|
||||
|
||||
template<typename T>
|
||||
inline MatchType<T> search(const std::string& input, const std::map<T, Regex>& regexes, size_t position) {
|
||||
// Map to vectors
|
||||
std::vector<T> class_vector;
|
||||
std::vector<Regex> regexes_vector;
|
||||
for (const auto& element: regexes) {
|
||||
class_vector.push_back(element.first);
|
||||
regexes_vector.push_back(element.second);
|
||||
}
|
||||
|
||||
// Regex join
|
||||
std::stringstream ss;
|
||||
for (size_t i = 0; i < regexes_vector.size(); ++i)
|
||||
{
|
||||
if (i != 0) { ss << ")|("; }
|
||||
ss << regexes_vector[i].pattern();
|
||||
}
|
||||
Regex regex{"(" + ss.str() + ")"};
|
||||
|
||||
MatchType<T> search_match = search(input, regex, position);
|
||||
if (not search_match.found()) { return MatchType<T>(); }
|
||||
|
||||
// Vector of id vs groups
|
||||
std::vector<unsigned int> regex_mark_counts = {};
|
||||
for (unsigned int i = 0; i < regexes_vector.size(); i++) {
|
||||
for (unsigned int j = 0; j < regexes_vector[i].mark_count() + 1; j++) {
|
||||
regex_mark_counts.push_back(i);
|
||||
}
|
||||
}
|
||||
|
||||
for (unsigned int i = 1; i < search_match.size(); i++) {
|
||||
if (search_match.length(i) > 0) {
|
||||
search_match.set_group_offset(i);
|
||||
search_match.set_type(class_vector[regex_mark_counts[i]]);
|
||||
search_match.set_regex(regexes_vector[regex_mark_counts[i]]);
|
||||
return search_match;
|
||||
}
|
||||
}
|
||||
|
||||
inja_throw("regex_search_error", "error while searching in input: " + input);
|
||||
return search_match;
|
||||
}
|
||||
|
||||
inline MatchClosed search_closed_on_level(const std::string& input, const Regex& regex_statement, const Regex& regex_level_up, const Regex& regex_level_down, const Regex& regex_search, Match open_match) {
|
||||
|
||||
int level {0};
|
||||
size_t current_position = open_match.end_position();
|
||||
Match match_delimiter = search(input, regex_statement, current_position);
|
||||
while (match_delimiter.found()) {
|
||||
current_position = match_delimiter.end_position();
|
||||
|
||||
const std::string inner = match_delimiter.str(1);
|
||||
if (std::regex_match(inner.cbegin(), inner.cend(), regex_search) and level == 0) { break; }
|
||||
if (std::regex_match(inner.cbegin(), inner.cend(), regex_level_up)) { level += 1; }
|
||||
else if (std::regex_match(inner.cbegin(), inner.cend(), regex_level_down)) { level -= 1; }
|
||||
|
||||
if (level < 0) { return MatchClosed(); }
|
||||
match_delimiter = search(input, regex_statement, current_position);
|
||||
}
|
||||
|
||||
return MatchClosed(open_match, match_delimiter);
|
||||
}
|
||||
|
||||
inline MatchClosed search_closed(const std::string& input, const Regex& regex_statement, const Regex& regex_open, const Regex& regex_close, Match& open_match) {
|
||||
return search_closed_on_level(input, regex_statement, regex_open, regex_close, regex_close, open_match);
|
||||
}
|
||||
|
||||
template<typename T, typename S>
|
||||
inline MatchType<T> match(const std::string& input, const std::map<T, Regex, S>& regexes) {
|
||||
MatchType<T> match;
|
||||
for (const auto& e: regexes) {
|
||||
if (std::regex_match(input.cbegin(), input.cend(), match, e.second)) {
|
||||
match.set_type(e.first);
|
||||
match.set_regex(e.second);
|
||||
return match;
|
||||
}
|
||||
}
|
||||
return match;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_REGEX_HPP
|
||||
@@ -1,285 +0,0 @@
|
||||
#ifndef PANTOR_INJA_RENDERER_HPP
|
||||
#define PANTOR_INJA_RENDERER_HPP
|
||||
|
||||
#include <algorithm>
|
||||
#include <string>
|
||||
#include <sstream>
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
||||
|
||||
class Renderer {
|
||||
public:
|
||||
std::map<Parsed::CallbackSignature, std::function<json(const Parsed::Arguments&, const json&)>> map_callbacks;
|
||||
|
||||
template<bool>
|
||||
bool eval_expression(const Parsed::ElementExpression& element, const json& data) {
|
||||
const json var = eval_function(element, data);
|
||||
if (var.empty()) { return false; }
|
||||
else if (var.is_number()) { return (var != 0); }
|
||||
else if (var.is_string()) { return not var.empty(); }
|
||||
try {
|
||||
return var.get<bool>();
|
||||
} catch (json::type_error& e) {
|
||||
inja_throw("json_error", e.what());
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
template<typename T = json>
|
||||
T eval_expression(const Parsed::ElementExpression& element, const json& data) {
|
||||
const json var = eval_function(element, data);
|
||||
if (var.empty()) return T();
|
||||
try {
|
||||
return var.get<T>();
|
||||
} catch (json::type_error& e) {
|
||||
inja_throw("json_error", e.what());
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
json eval_function(const Parsed::ElementExpression& element, const json& data) {
|
||||
switch (element.function) {
|
||||
case Parsed::Function::Upper: {
|
||||
std::string str = eval_expression<std::string>(element.args[0], data);
|
||||
std::transform(str.begin(), str.end(), str.begin(), ::toupper);
|
||||
return str;
|
||||
}
|
||||
case Parsed::Function::Lower: {
|
||||
std::string str = eval_expression<std::string>(element.args[0], data);
|
||||
std::transform(str.begin(), str.end(), str.begin(), ::tolower);
|
||||
return str;
|
||||
}
|
||||
case Parsed::Function::Range: {
|
||||
const int number = eval_expression<int>(element.args[0], data);
|
||||
std::vector<int> result(number);
|
||||
std::iota(std::begin(result), std::end(result), 0);
|
||||
return result;
|
||||
}
|
||||
case Parsed::Function::Length: {
|
||||
const std::vector<json> list = eval_expression<std::vector<json>>(element.args[0], data);
|
||||
return list.size();
|
||||
}
|
||||
case Parsed::Function::Sort: {
|
||||
std::vector<json> list = eval_expression<std::vector<json>>(element.args[0], data);
|
||||
std::sort(list.begin(), list.end());
|
||||
return list;
|
||||
}
|
||||
case Parsed::Function::First: {
|
||||
const std::vector<json> list = eval_expression<std::vector<json>>(element.args[0], data);
|
||||
return list.front();
|
||||
}
|
||||
case Parsed::Function::Last: {
|
||||
const std::vector<json> list = eval_expression<std::vector<json>>(element.args[0], data);
|
||||
return list.back();
|
||||
}
|
||||
case Parsed::Function::Round: {
|
||||
const double number = eval_expression<double>(element.args[0], data);
|
||||
const int precision = eval_expression<int>(element.args[1], data);
|
||||
return std::round(number * std::pow(10.0, precision)) / std::pow(10.0, precision);
|
||||
}
|
||||
case Parsed::Function::DivisibleBy: {
|
||||
const int number = eval_expression<int>(element.args[0], data);
|
||||
const int divisor = eval_expression<int>(element.args[1], data);
|
||||
return (divisor != 0) && (number % divisor == 0);
|
||||
}
|
||||
case Parsed::Function::Odd: {
|
||||
const int number = eval_expression<int>(element.args[0], data);
|
||||
return (number % 2 != 0);
|
||||
}
|
||||
case Parsed::Function::Even: {
|
||||
const int number = eval_expression<int>(element.args[0], data);
|
||||
return (number % 2 == 0);
|
||||
}
|
||||
case Parsed::Function::Max: {
|
||||
const std::vector<json> list = eval_expression<std::vector<json>>(element.args[0], data);
|
||||
return *std::max_element(list.begin(), list.end());
|
||||
}
|
||||
case Parsed::Function::Min: {
|
||||
const std::vector<json> list = eval_expression<std::vector<json>>(element.args[0], data);
|
||||
return *std::min_element(list.begin(), list.end());
|
||||
}
|
||||
case Parsed::Function::Not: {
|
||||
return not eval_expression<bool>(element.args[0], data);
|
||||
}
|
||||
case Parsed::Function::And: {
|
||||
return (eval_expression<bool>(element.args[0], data) and eval_expression<bool>(element.args[1], data));
|
||||
}
|
||||
case Parsed::Function::Or: {
|
||||
return (eval_expression<bool>(element.args[0], data) or eval_expression<bool>(element.args[1], data));
|
||||
}
|
||||
case Parsed::Function::In: {
|
||||
const json value = eval_expression(element.args[0], data);
|
||||
const json list = eval_expression(element.args[1], data);
|
||||
return (std::find(list.begin(), list.end(), value) != list.end());
|
||||
}
|
||||
case Parsed::Function::Equal: {
|
||||
return eval_expression(element.args[0], data) == eval_expression(element.args[1], data);
|
||||
}
|
||||
case Parsed::Function::Greater: {
|
||||
return eval_expression(element.args[0], data) > eval_expression(element.args[1], data);
|
||||
}
|
||||
case Parsed::Function::Less: {
|
||||
return eval_expression(element.args[0], data) < eval_expression(element.args[1], data);
|
||||
}
|
||||
case Parsed::Function::GreaterEqual: {
|
||||
return eval_expression(element.args[0], data) >= eval_expression(element.args[1], data);
|
||||
}
|
||||
case Parsed::Function::LessEqual: {
|
||||
return eval_expression(element.args[0], data) <= eval_expression(element.args[1], data);
|
||||
}
|
||||
case Parsed::Function::Different: {
|
||||
return eval_expression(element.args[0], data) != eval_expression(element.args[1], data);
|
||||
}
|
||||
case Parsed::Function::Float: {
|
||||
return std::stod(eval_expression<std::string>(element.args[0], data));
|
||||
}
|
||||
case Parsed::Function::Int: {
|
||||
return std::stoi(eval_expression<std::string>(element.args[0], data));
|
||||
}
|
||||
case Parsed::Function::ReadJson: {
|
||||
try {
|
||||
return data.at(json::json_pointer(element.command));
|
||||
} catch (std::exception&) {
|
||||
inja_throw("render_error", "variable '" + element.command + "' not found");
|
||||
}
|
||||
}
|
||||
case Parsed::Function::Result: {
|
||||
return element.result;
|
||||
}
|
||||
case Parsed::Function::Default: {
|
||||
try {
|
||||
return eval_expression(element.args[0], data);
|
||||
} catch (std::exception&) {
|
||||
return eval_expression(element.args[1], data);
|
||||
}
|
||||
}
|
||||
case Parsed::Function::Callback: {
|
||||
Parsed::CallbackSignature signature = std::make_pair(element.command, element.args.size());
|
||||
return map_callbacks.at(signature)(element.args, data);
|
||||
}
|
||||
case Parsed::Function::Exists: {
|
||||
const std::string name = eval_expression<std::string>(element.args[0], data);
|
||||
return data.find(name) != data.end();
|
||||
}
|
||||
case Parsed::Function::ExistsInObject: {
|
||||
const std::string name = eval_expression<std::string>(element.args[1], data);
|
||||
const json d = eval_expression(element.args[0], data);
|
||||
return d.find(name) != d.end();
|
||||
}
|
||||
case Parsed::Function::IsBoolean: {
|
||||
const json d = eval_expression(element.args[0], data);
|
||||
return d.is_boolean();
|
||||
}
|
||||
case Parsed::Function::IsNumber: {
|
||||
const json d = eval_expression(element.args[0], data);
|
||||
return d.is_number();
|
||||
}
|
||||
case Parsed::Function::IsInteger: {
|
||||
const json d = eval_expression(element.args[0], data);
|
||||
return d.is_number_integer();
|
||||
}
|
||||
case Parsed::Function::IsFloat: {
|
||||
const json d = eval_expression(element.args[0], data);
|
||||
return d.is_number_float();
|
||||
}
|
||||
case Parsed::Function::IsObject: {
|
||||
const json d = eval_expression(element.args[0], data);
|
||||
return d.is_object();
|
||||
}
|
||||
case Parsed::Function::IsArray: {
|
||||
const json d = eval_expression(element.args[0], data);
|
||||
return d.is_array();
|
||||
}
|
||||
case Parsed::Function::IsString: {
|
||||
const json d = eval_expression(element.args[0], data);
|
||||
return d.is_string();
|
||||
}
|
||||
}
|
||||
|
||||
inja_throw("render_error", "unknown function in renderer: " + element.command);
|
||||
return json();
|
||||
}
|
||||
|
||||
std::string render(Template temp, const json& data) {
|
||||
std::string result {""};
|
||||
for (const auto& element: temp.parsed_template().children) {
|
||||
switch (element->type) {
|
||||
case Parsed::Type::String: {
|
||||
auto element_string = std::static_pointer_cast<Parsed::ElementString>(element);
|
||||
result.append(element_string->text);
|
||||
break;
|
||||
}
|
||||
case Parsed::Type::Expression: {
|
||||
auto element_expression = std::static_pointer_cast<Parsed::ElementExpression>(element);
|
||||
const json variable = eval_expression(*element_expression, data);
|
||||
|
||||
if (variable.is_string()) {
|
||||
result.append( variable.get<std::string>() );
|
||||
} else {
|
||||
std::stringstream ss;
|
||||
ss << variable;
|
||||
result.append( ss.str() );
|
||||
}
|
||||
break;
|
||||
}
|
||||
case Parsed::Type::Loop: {
|
||||
auto element_loop = std::static_pointer_cast<Parsed::ElementLoop>(element);
|
||||
switch (element_loop->loop) {
|
||||
case Parsed::Loop::ForListIn: {
|
||||
const std::vector<json> list = eval_expression<std::vector<json>>(element_loop->list, data);
|
||||
for (unsigned int i = 0; i < list.size(); i++) {
|
||||
json data_loop = data;
|
||||
/* For nested loops, use parent/index */
|
||||
if (data_loop.count("loop") == 1) {
|
||||
data_loop["loop"]["parent"] = data_loop["loop"];
|
||||
}
|
||||
data_loop[element_loop->value] = list[i];
|
||||
data_loop["loop"]["index"] = i;
|
||||
data_loop["loop"]["index1"] = i + 1;
|
||||
data_loop["loop"]["is_first"] = (i == 0);
|
||||
data_loop["loop"]["is_last"] = (i == list.size() - 1);
|
||||
result.append( render(Template(*element_loop), data_loop) );
|
||||
}
|
||||
break;
|
||||
}
|
||||
case Parsed::Loop::ForMapIn: {
|
||||
const std::map<std::string, json> map = eval_expression<std::map<std::string, json>>(element_loop->list, data);
|
||||
for (const auto& item: map) {
|
||||
json data_loop = data;
|
||||
data_loop[element_loop->key] = item.first;
|
||||
data_loop[element_loop->value] = item.second;
|
||||
result.append( render(Template(*element_loop), data_loop) );
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
case Parsed::Type::Condition: {
|
||||
auto element_condition = std::static_pointer_cast<Parsed::ElementConditionContainer>(element);
|
||||
for (const auto& branch: element_condition->children) {
|
||||
auto element_branch = std::static_pointer_cast<Parsed::ElementConditionBranch>(branch);
|
||||
if (element_branch->condition_type == Parsed::Condition::Else || eval_expression<bool>(element_branch->condition, data)) {
|
||||
result.append( render(Template(*element_branch), data) );
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_RENDERER_HPP
|
||||
@@ -1,19 +0,0 @@
|
||||
#ifndef PANTOR_INJA_TEMPLATE_HPP
|
||||
#define PANTOR_INJA_TEMPLATE_HPP
|
||||
|
||||
|
||||
namespace inja {
|
||||
|
||||
class Template {
|
||||
Parsed::Element _parsed_template;
|
||||
|
||||
public:
|
||||
const Parsed::Element parsed_template() { return _parsed_template; }
|
||||
|
||||
explicit Template(): _parsed_template(Parsed::Element()) { }
|
||||
explicit Template(const Parsed::Element& parsed_template): _parsed_template(parsed_template) { }
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_TEMPLATE_HPP
|
||||
@@ -1,16 +0,0 @@
|
||||
#ifndef PANTOR_INJA_UTILS_HPP
|
||||
#define PANTOR_INJA_UTILS_HPP
|
||||
|
||||
#include <string>
|
||||
|
||||
|
||||
namespace inja {
|
||||
/*!
|
||||
@brief render with default settings
|
||||
*/
|
||||
inline std::string render(const std::string& input, const json& data) {
|
||||
return Environment().render(input, data);
|
||||
}
|
||||
}
|
||||
|
||||
#endif // PANTOR_INJA_UTILS_HPP
|
||||
32
meson.build
32
meson.build
@@ -1,22 +1,28 @@
|
||||
project('inja', 'cpp', default_options: ['cpp_std=c++11'])
|
||||
project('inja', 'cpp', default_options: ['cpp_std=c++17'])
|
||||
|
||||
|
||||
#option('build_tests', type: 'boolean', value: true)
|
||||
#option('build_benchmark', type: 'boolean', value: true)
|
||||
|
||||
|
||||
inja_dep = declare_dependency(
|
||||
include_directories: include_directories('include')
|
||||
)
|
||||
|
||||
inja_single_dep = declare_dependency(
|
||||
include_directories: include_directories('single_include', 'include')
|
||||
|
||||
inja_test = executable(
|
||||
'inja_test',
|
||||
'test/unit.cpp',
|
||||
'test/unit-files.cpp',
|
||||
'test/unit-renderer.cpp',
|
||||
dependencies: inja_dep
|
||||
)
|
||||
|
||||
inja_benchmark = executable(
|
||||
'inja_benchmark',
|
||||
'test/benchmark.cpp',
|
||||
dependencies: inja_dep
|
||||
)
|
||||
|
||||
|
||||
# Amalgamate inja header files
|
||||
r = run_command('python3', 'amalgamate/amalgamate.py', '-c', 'amalgamate/config.json', '-s', 'include')
|
||||
if r.returncode() != 0
|
||||
message(r.stdout().strip())
|
||||
else
|
||||
message('Amalgamated inja header files.')
|
||||
endif
|
||||
|
||||
|
||||
subdir('test')
|
||||
test('Inja unit test', inja_test)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
2018
single_include/inja/inja.hpp
Normal file
2018
single_include/inja/inja.hpp
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,92 +0,0 @@
|
||||
##
|
||||
## HUNTER
|
||||
##
|
||||
option(HUNTER_ENABLED "Use hunter to manage dependencies" OFF)
|
||||
if(HUNTER_ENABLED)
|
||||
include("../cmake/HunterGate.cmake")
|
||||
HunterGate(
|
||||
URL "https://github.com/ruslo/hunter/archive/v0.19.156.tar.gz"
|
||||
SHA1 "8d5e4635b137365e0d1ade4d60accf4e2bb41f0d"
|
||||
)
|
||||
endif()
|
||||
|
||||
|
||||
##
|
||||
## TESTS
|
||||
##
|
||||
add_executable(inja_test
|
||||
src/unit-files.cpp
|
||||
src/unit-renderer.cpp
|
||||
src/unit-string-helper.cpp
|
||||
src/unit.cpp
|
||||
)
|
||||
|
||||
add_executable(inja_single_test
|
||||
src/unit-files.cpp
|
||||
src/unit-renderer.cpp
|
||||
src/unit-string-helper.cpp
|
||||
src/unit.cpp
|
||||
)
|
||||
|
||||
|
||||
if(HUNTER_ENABLED) # Use Hunter to manage dependencies
|
||||
# Add Catch framework
|
||||
hunter_add_package(Catch)
|
||||
find_package(Catch CONFIG REQUIRED)
|
||||
|
||||
# Add JSON package
|
||||
hunter_add_package(nlohmann_json)
|
||||
find_package(nlohmann_json CONFIG REQUIRED)
|
||||
|
||||
# Add dependencies to target
|
||||
target_link_libraries(inja_test Catch::Catch nlohmann_json inja)
|
||||
else() # Manage dependencies manually
|
||||
# Prepare "Catch" library for other executables
|
||||
add_library(Catch INTERFACE)
|
||||
target_include_directories(Catch INTERFACE "src/catch")
|
||||
|
||||
# Prepare "hayai" library for other executables
|
||||
add_library(hayai INTERFACE)
|
||||
target_include_directories(hayai INTERFACE "src/hayai")
|
||||
|
||||
# Add dependencies to targets
|
||||
target_link_libraries(inja_test Catch inja)
|
||||
target_link_libraries(inja_single_test Catch inja_single)
|
||||
endif()
|
||||
|
||||
|
||||
##
|
||||
## BENCHMARK
|
||||
##
|
||||
if(BUILD_BENCHMARK)
|
||||
add_executable(inja_benchmark
|
||||
src/benchmark.cpp
|
||||
)
|
||||
|
||||
target_link_libraries(inja_benchmark hayai inja)
|
||||
endif()
|
||||
|
||||
|
||||
##
|
||||
## Copy test files to build directory
|
||||
##
|
||||
add_custom_command(
|
||||
TARGET inja_test POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E copy_directory
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/data
|
||||
${CMAKE_CURRENT_BINARY_DIR}/data
|
||||
)
|
||||
|
||||
|
||||
##
|
||||
## Add tests to make
|
||||
##
|
||||
add_test(NAME inja_test
|
||||
COMMAND inja_test
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
|
||||
add_test(NAME inja_single_test
|
||||
COMMAND inja_single_test
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
)
|
||||
@@ -1,11 +1,11 @@
|
||||
#include "hayai/hayai.hpp"
|
||||
#include "inja.hpp"
|
||||
#include <inja/inja.hpp>
|
||||
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
||||
|
||||
inja::Environment env = inja::Environment();
|
||||
inja::Environment env;
|
||||
|
||||
json data = {{"name", "Peter"}};
|
||||
|
||||
11
test/data/html/data.json
Normal file
11
test/data/html/data.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"author": "Pantor",
|
||||
"date": "23/12/2018",
|
||||
"tags": [
|
||||
"test",
|
||||
"templates"
|
||||
],
|
||||
"views": 123,
|
||||
"title": "Inja works.",
|
||||
"content": "Inja is the best and fastest template engine for C++. Period."
|
||||
}
|
||||
19
test/data/html/result.txt
Normal file
19
test/data/html/result.txt
Normal file
@@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Inja works.</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Inja works.</h1>
|
||||
<small>Written by Pantor</small>
|
||||
|
||||
<p>Inja is the best and fastest template engine for C++. Period.</p>
|
||||
<small>123 views</small>
|
||||
|
||||
<h5>Tags</h5>
|
||||
<ul>
|
||||
<li>test</li>
|
||||
<li>templates</li>
|
||||
</ul>
|
||||
</body>
|
||||
</html>
|
||||
20
test/data/html/template.txt
Normal file
20
test/data/html/template.txt
Normal file
@@ -0,0 +1,20 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>{{ title }}</title>
|
||||
</head>
|
||||
<body>
|
||||
<h1>{{ title }}</h1>
|
||||
<small>Written by {{ author }}</small>
|
||||
|
||||
<p>{{ content }}</p>
|
||||
<small>{{ views }} views</small>
|
||||
|
||||
<h5>Tags</h5>
|
||||
<ul>
|
||||
## for tag in tags
|
||||
<li>{{ tag }}</li>
|
||||
## endfor
|
||||
</ul>
|
||||
</body>
|
||||
</html>
|
||||
0
test/data/include.txt
Normal file → Executable file
0
test/data/include.txt
Normal file → Executable file
0
test/data/nested-line/data.json
Normal file → Executable file
0
test/data/nested-line/data.json
Normal file → Executable file
0
test/data/nested-line/result.txt
Normal file → Executable file
0
test/data/nested-line/result.txt
Normal file → Executable file
1
test/data/nested-line/template.txt
Normal file → Executable file
1
test/data/nested-line/template.txt
Normal file → Executable file
@@ -1,6 +1,5 @@
|
||||
## for x in xarray
|
||||
## for y in yarray
|
||||
{{x}}-{{y}}
|
||||
|
||||
## endfor
|
||||
## endfor
|
||||
|
||||
0
test/data/nested/data.json
Normal file → Executable file
0
test/data/nested/data.json
Normal file → Executable file
0
test/data/nested/result.txt
Normal file → Executable file
0
test/data/nested/result.txt
Normal file → Executable file
0
test/data/nested/template.txt
Normal file → Executable file
0
test/data/nested/template.txt
Normal file → Executable file
0
test/data/simple-file/data.json
Normal file → Executable file
0
test/data/simple-file/data.json
Normal file → Executable file
0
test/data/simple-file/result.txt
Normal file → Executable file
0
test/data/simple-file/result.txt
Normal file → Executable file
0
test/data/simple-file/template.txt
Normal file → Executable file
0
test/data/simple-file/template.txt
Normal file → Executable file
0
test/data/simple.txt
Normal file → Executable file
0
test/data/simple.txt
Normal file → Executable file
@@ -256,7 +256,7 @@ namespace hayai
|
||||
# elif defined(CLOCK_REALTIME)
|
||||
clock_gettime(CLOCK_REALTIME, &result);
|
||||
# else
|
||||
clock_gettime((clockid_t)-1, &result);
|
||||
clock_gettime((clocId_t)-1, &result);
|
||||
# endif
|
||||
return result;
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
unit_test = executable(
|
||||
'inja-test',
|
||||
'src/unit.cpp',
|
||||
'src/unit-files.cpp',
|
||||
'src/unit-renderer.cpp',
|
||||
'src/unit-string-helper.cpp',
|
||||
dependencies: inja_dep
|
||||
)
|
||||
|
||||
unit_single_test = executable(
|
||||
'inja-single-test',
|
||||
'src/unit.cpp',
|
||||
'src/unit-files.cpp',
|
||||
'src/unit-renderer.cpp',
|
||||
'src/unit-string-helper.cpp',
|
||||
dependencies: inja_single_dep
|
||||
)
|
||||
|
||||
inja_benchmark = executable(
|
||||
'inja_benchmark',
|
||||
'src/benchmark.cpp',
|
||||
dependencies: inja_dep
|
||||
)
|
||||
|
||||
test('Inja unit test', unit_test)
|
||||
test('Inja single unit test', unit_single_test)
|
||||
@@ -1,164 +0,0 @@
|
||||
#include "catch/catch.hpp"
|
||||
#include "inja.hpp"
|
||||
|
||||
|
||||
TEST_CASE("dot to pointer") {
|
||||
CHECK( inja::Parser::dot_to_json_pointer_notation("person.names.surname") == "/person/names/surname" );
|
||||
CHECK( inja::Parser::dot_to_json_pointer_notation("guests.2") == "/guests/2" );
|
||||
}
|
||||
|
||||
TEST_CASE("basic-search") {
|
||||
std::string input = "lorem ipsum dolor it";
|
||||
inja::Regex regex("i(.*)m");
|
||||
|
||||
SECTION("from start") {
|
||||
inja::Match match = inja::search(input, regex, 0);
|
||||
CHECK( match.found() == true );
|
||||
CHECK( match.position() == 6 );
|
||||
CHECK( match.length() == 5 );
|
||||
CHECK( match.end_position() == 11 );
|
||||
CHECK( match.str() == "ipsum" );
|
||||
CHECK( match.str(1) == "psu" );
|
||||
}
|
||||
|
||||
SECTION("from position") {
|
||||
inja::Match match = inja::search(input, regex, 8);
|
||||
CHECK( match.found() == false );
|
||||
CHECK( match.length() == 0 );
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("search-multiple-regexes") {
|
||||
std::string input = "lorem ipsum dolor amit estas tronum.";
|
||||
|
||||
SECTION("basic 1") {
|
||||
std::map<int, inja::Regex> regex_patterns = {
|
||||
{0, inja::Regex("tras")},
|
||||
{1, inja::Regex("do(\\w*)or")},
|
||||
{2, inja::Regex("es(\\w*)as")},
|
||||
{3, inja::Regex("ip(\\w*)um")}
|
||||
};
|
||||
inja::MatchType<int> match = inja::search(input, regex_patterns, 0);
|
||||
CHECK( match.type() == 3 );
|
||||
CHECK( match.str() == "ipsum" );
|
||||
CHECK( match.str(1) == "s" );
|
||||
}
|
||||
|
||||
SECTION("basic 2") {
|
||||
std::map<int, inja::Regex> regex_patterns = {
|
||||
{11, inja::Regex("tras")},
|
||||
{21, inja::Regex("ip(\\w*)um")},
|
||||
{31, inja::Regex("do(\\w*)or")},
|
||||
{41, inja::Regex("es(\\w*)as")}
|
||||
};
|
||||
inja::MatchType<int> match = inja::search(input, regex_patterns, 0);
|
||||
CHECK( match.type() == 21 );
|
||||
CHECK( match.str() == "ipsum" );
|
||||
CHECK( match.str(1) == "s" );
|
||||
}
|
||||
|
||||
SECTION("basic 3") {
|
||||
auto map_functions = inja::Parser().regex_map_functions;
|
||||
std::map<int, inja::Regex> regex_patterns = {
|
||||
{0, map_functions.at(inja::Parsed::Function::Upper)},
|
||||
{1, map_functions.at(inja::Parsed::Function::Lower)},
|
||||
{2, map_functions.at(inja::Parsed::Function::ReadJson)}
|
||||
};
|
||||
|
||||
const std::string input_1 = "upper(name)";
|
||||
inja::MatchType<int> match = inja::search(input_1, regex_patterns, 0);
|
||||
CHECK( match.type() == 0 );
|
||||
CHECK( match.str(0) == "upper(name)" );
|
||||
CHECK( match.str(1) == "name" );
|
||||
|
||||
const std::string input_2 = "upper(lower(name))";
|
||||
inja::MatchType<int> match2 = inja::search(input_2, regex_patterns, 0);
|
||||
CHECK( match2.type() == 0 );
|
||||
CHECK( match2.str(0) == "upper(lower(name))" );
|
||||
CHECK( match2.str(1) == "lower(name)" );
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("match-multiple-regexes") {
|
||||
std::string input = "ipsum";
|
||||
|
||||
SECTION("basic 1") {
|
||||
std::map<int, inja::Regex> regex_patterns = {
|
||||
{1, inja::Regex("tras")},
|
||||
{2, inja::Regex("ip(\\w*)um")},
|
||||
{3, inja::Regex("do(\\w*)or")},
|
||||
{4, inja::Regex("es(\\w*)as")}
|
||||
};
|
||||
inja::MatchType<int> match = inja::match(input, regex_patterns);
|
||||
CHECK( match.type() == 2 );
|
||||
CHECK( match.str() == "ipsum" );
|
||||
CHECK( match.str(1) == "s" );
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("search-on-level") {
|
||||
std::string input = "(% up %)(% up %)Test(% N1 %)(% down %)...(% up %)(% N2 %)(% up %)(% N3 %)(% down %)(% N4 %)(% down %)(% N5 %)(% down %)";
|
||||
|
||||
inja::Regex regex_statement("\\(\\% (.*?) \\%\\)");
|
||||
inja::Regex regex_level_up("up");
|
||||
inja::Regex regex_level_down("down");
|
||||
inja::Regex regex_search("N(\\d+)");
|
||||
|
||||
SECTION("first instance") {
|
||||
inja::Match open_match = inja::search(input, regex_statement, 0);
|
||||
CHECK( open_match.position() == 0 );
|
||||
CHECK( open_match.end_position() == 8 );
|
||||
CHECK( open_match.str(1) == "up" );
|
||||
|
||||
inja::MatchClosed match = inja::search_closed_on_level(input, regex_statement, regex_level_up, regex_level_down, regex_search, open_match);
|
||||
CHECK( match.position() == 0 );
|
||||
CHECK( match.end_position() == 109 );
|
||||
}
|
||||
|
||||
SECTION("second instance") {
|
||||
inja::Match open_match = inja::search(input, regex_statement, 4);
|
||||
|
||||
CHECK( open_match.position() == 8 );
|
||||
CHECK( open_match.end_position() == 16 );
|
||||
CHECK( open_match.str(1) == "up" );
|
||||
|
||||
inja::MatchClosed match = inja::search_closed_on_level(input, regex_statement, regex_level_up, regex_level_down, regex_search, open_match);
|
||||
|
||||
CHECK( match.open_match.position() == 8 );
|
||||
CHECK( match.open_match.end_position() == 16 );
|
||||
CHECK( match.close_match.position() == 20 );
|
||||
CHECK( match.close_match.end_position() == 28 );
|
||||
CHECK( match.position() == 8 );
|
||||
CHECK( match.end_position() == 28 );
|
||||
CHECK( match.outer() == "(% up %)Test(% N1 %)" );
|
||||
CHECK( match.inner() == "Test" );
|
||||
}
|
||||
}
|
||||
|
||||
TEST_CASE("match-functions") {
|
||||
auto map_regex = inja::Parser().regex_map_functions;
|
||||
|
||||
CHECK( inja::match("not test", map_regex).type() == inja::Parsed::Function::Not );
|
||||
CHECK( inja::match("not test", map_regex).type() != inja::Parsed::Function::And );
|
||||
CHECK( inja::match("2 == 3", map_regex).type() == inja::Parsed::Function::Equal );
|
||||
CHECK( inja::match("test and test", map_regex).type() == inja::Parsed::Function::And );
|
||||
CHECK( inja::match("test and test", map_regex).type() != inja::Parsed::Function::ReadJson );
|
||||
CHECK( inja::match("test", map_regex).type() == inja::Parsed::Function::ReadJson );
|
||||
CHECK( inja::match("upper", map_regex).type() == inja::Parsed::Function::ReadJson );
|
||||
CHECK( inja::match("upper()", map_regex).type() == inja::Parsed::Function::Upper );
|
||||
CHECK( inja::match("upper(var)", map_regex).type() == inja::Parsed::Function::Upper );
|
||||
CHECK( inja::match("upper( var )", map_regex).type() == inja::Parsed::Function::Upper );
|
||||
CHECK( inja::match("upper(lower())", map_regex).type() == inja::Parsed::Function::Upper );
|
||||
CHECK( inja::match("upper( lower() )", map_regex).type() == inja::Parsed::Function::Upper );
|
||||
CHECK( inja::match(" upper(lower()) ", map_regex).type() == inja::Parsed::Function::Upper );
|
||||
CHECK( inja::match("lower(upper(test))", map_regex).type() == inja::Parsed::Function::Lower );
|
||||
CHECK( inja::match("round(2, 3)", map_regex).type() == inja::Parsed::Function::Round );
|
||||
CHECK( inja::match("exists(\"var\")", map_regex).type() == inja::Parsed::Function::Exists );
|
||||
CHECK( inja::match("existsIn(var, \"othervar\")", map_regex).type() == inja::Parsed::Function::ExistsInObject );
|
||||
}
|
||||
|
||||
TEST_CASE("create-regex-functions") {
|
||||
CHECK( inja::Parser::function_regex("upper", 1).pattern() == "\\s*upper(?:\\((.*)\\))\\s*" );
|
||||
CHECK( inja::Parser::function_regex("upper", 0).pattern() == "\\s*upper(?:\\(\\))?\\s*" );
|
||||
CHECK( inja::Parser::function_regex("lower", 2).pattern() == "\\s*lower(?:\\((.*),(.*)\\))\\s*" );
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
#include "catch/catch.hpp"
|
||||
#include "inja.hpp"
|
||||
#include "inja/inja.hpp"
|
||||
|
||||
|
||||
using json = nlohmann::json;
|
||||
@@ -11,7 +11,7 @@ TEST_CASE("loading") {
|
||||
data["name"] = "Jeff";
|
||||
|
||||
SECTION("Files should be loaded") {
|
||||
CHECK( env.load_global_file("../test/data/simple.txt") == "Hello {{ name }}." );
|
||||
CHECK( env.load_file("../test/data/simple.txt") == "Hello {{ name }}." );
|
||||
}
|
||||
|
||||
SECTION("Files should be rendered") {
|
||||
@@ -26,9 +26,9 @@ TEST_CASE("loading") {
|
||||
TEST_CASE("complete-files") {
|
||||
inja::Environment env = inja::Environment("../test/data/");
|
||||
|
||||
for (std::string test_name : {"simple-file", "nested", "nested-line"}) {
|
||||
for (std::string test_name : {"simple-file", "nested", "nested-line", "html"}) {
|
||||
SECTION(test_name) {
|
||||
CHECK( env.render_file_with_json_file(test_name + "/template.txt", test_name + "/data.json") == env.load_global_file(test_name + "/result.txt") );
|
||||
CHECK( env.render_file_with_json_file(test_name + "/template.txt", test_name + "/data.json") == env.load_file(test_name + "/result.txt") );
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -41,6 +41,6 @@ TEST_CASE("global-path") {
|
||||
|
||||
SECTION("Files should be written") {
|
||||
env.write("simple.txt", data, "global-path-result.txt");
|
||||
CHECK( env_result.load_global_file("global-path-result.txt") == "Hello Jeff." );
|
||||
CHECK( env_result.load_file("global-path-result.txt") == "Hello Jeff." );
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,18 @@
|
||||
#include "catch/catch.hpp"
|
||||
#include "inja.hpp"
|
||||
#include "inja/inja.hpp"
|
||||
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
||||
|
||||
TEST_CASE("dot-to-pointer") {
|
||||
std::string buffer;
|
||||
CHECK( inja::convert_dot_to_json_pointer("person.names.surname", buffer) == "/person/names/surname" );
|
||||
CHECK( inja::convert_dot_to_json_pointer("guests.2", buffer) == "/guests/2" );
|
||||
}
|
||||
|
||||
TEST_CASE("types") {
|
||||
inja::Environment env = inja::Environment();
|
||||
inja::Environment env;
|
||||
json data;
|
||||
data["name"] = "Peter";
|
||||
data["city"] = "Brunswick";
|
||||
@@ -34,11 +40,11 @@ TEST_CASE("types") {
|
||||
CHECK( env.render("{{name}}", data) == "Peter" );
|
||||
CHECK( env.render("{{ name }} is {{ age }} years old.", data) == "Peter is 29 years old." );
|
||||
CHECK( env.render("Hello {{ name }}! I come from {{ city }}.", data) == "Hello Peter! I come from Brunswick." );
|
||||
CHECK( env.render("Hello {{ names/1 }}!", data) == "Hello Seb!" );
|
||||
CHECK( env.render("Hello {{ brother/name }}!", data) == "Hello Chris!" );
|
||||
CHECK( env.render("Hello {{ brother/daughter0/name }}!", data) == "Hello Maria!" );
|
||||
CHECK( env.render("Hello {{ names.1 }}!", data) == "Hello Seb!" );
|
||||
CHECK( env.render("Hello {{ brother.name }}!", data) == "Hello Chris!" );
|
||||
CHECK( env.render("Hello {{ brother.daughter0.name }}!", data) == "Hello Maria!" );
|
||||
|
||||
CHECK_THROWS_WITH( env.render("{{unknown}}", data), "[inja.exception.render_error] variable '/unknown' not found" );
|
||||
CHECK_THROWS_WITH( env.render("{{unknown}}", data), "[inja.exception.render_error] variable 'unknown' not found" );
|
||||
}
|
||||
|
||||
SECTION("comments") {
|
||||
@@ -49,17 +55,17 @@ TEST_CASE("types") {
|
||||
SECTION("loops") {
|
||||
CHECK( env.render("{% for name in names %}a{% endfor %}", data) == "aa" );
|
||||
CHECK( env.render("Hello {% for name in names %}{{ name }} {% endfor %}!", data) == "Hello Jeff Seb !" );
|
||||
CHECK( env.render("Hello {% for name in names %}{{ loop/index }}: {{ name }}, {% endfor %}!", data) == "Hello 0: Jeff, 1: Seb, !" );
|
||||
CHECK( env.render("Hello {% for name in names %}{{ loop.index }}: {{ name }}, {% endfor %}!", data) == "Hello 0: Jeff, 1: Seb, !" );
|
||||
CHECK( env.render("{% for type, name in relatives %}{{ type }}: {{ name }}, {% endfor %}", data) == "brother: Chris, mother: Maria, sister: Jenny, " );
|
||||
CHECK( env.render("{% for v in vars %}{% if v > 0 %}+{% endif %}{% endfor %}", data) == "+++" );
|
||||
CHECK( env.render("{% for name in names %}{{ loop/index }}: {{ name }}{% if not loop/is_last %}, {% endif %}{% endfor %}!", data) == "0: Jeff, 1: Seb!" );
|
||||
CHECK( env.render("{% for name in names %}{{ loop/index }}: {{ name }}{% if loop/is_last == false %}, {% endif %}{% endfor %}!", data) == "0: Jeff, 1: Seb!" );
|
||||
CHECK( env.render("{% for name in names %}{{ loop.index }}: {{ name }}{% if not loop.is_last %}, {% endif %}{% endfor %}!", data) == "0: Jeff, 1: Seb!" );
|
||||
CHECK( env.render("{% for name in names %}{{ loop.index }}: {{ name }}{% if loop.is_last == false %}, {% endif %}{% endfor %}!", data) == "0: Jeff, 1: Seb!" );
|
||||
|
||||
data["empty_loop"] = {};
|
||||
CHECK( env.render("{% for name in empty_loop %}a{% endfor %}", data) == "" );
|
||||
CHECK( env.render("{% for name in {} %}a{% endfor %}", data) == "" );
|
||||
|
||||
CHECK_THROWS_WITH( env.render("{% for name ins names %}a{% endfor %}", data), "[inja.exception.parser_error] unknown loop statement: for name ins names" );
|
||||
CHECK_THROWS_WITH( env.render("{% for name ins names %}a{% endfor %}", data), "[inja.exception.parser_error] expected 'in', got 'ins'" );
|
||||
// CHECK_THROWS_WITH( env.render("{% for name in relatives %}{{ name }}{% endfor %}", data), "[inja.exception.json_error] [json.exception.type_error.302] type must be array, but is object" );
|
||||
}
|
||||
|
||||
@@ -77,25 +83,28 @@ TEST_CASE("types") {
|
||||
CHECK( env.render("{% if age == 26 %}26{% else if age == 27 %}27{% else if age == 28 %}28{% else %}29{% endif %}", data) == "29" );
|
||||
CHECK( env.render("{% if age == 25 %}+{% endif %}{% if age == 29 %}+{% else %}-{% endif %}", data) == "+" );
|
||||
|
||||
CHECK_THROWS_WITH( env.render("{% if is_happy %}{% if is_happy %}{% endif %}", data), "[inja.exception.parser_error] misordered if statement" );
|
||||
CHECK_THROWS_WITH( env.render("{% if is_happy %}{% else if is_happy %}{% end if %}", data), "[inja.exception.parser_error] misordered if statement" );
|
||||
CHECK_THROWS_WITH( env.render("{% if is_happy %}{% if is_happy %}{% endif %}", data), "[inja.exception.parser_error] unmatched if" );
|
||||
CHECK_THROWS_WITH( env.render("{% if is_happy %}{% else if is_happy %}{% end if %}", data), "[inja.exception.parser_error] expected statement, got 'end'" );
|
||||
}
|
||||
|
||||
SECTION("line statements") {
|
||||
CHECK( env.render(R"(## if is_happy
|
||||
Yeah!
|
||||
## endif)", data) == "Yeah!" );
|
||||
## endif)", data) == R"(Yeah!
|
||||
)" );
|
||||
|
||||
CHECK( env.render(R"(## if is_happy
|
||||
## if is_happy
|
||||
Yeah!
|
||||
## endif
|
||||
## endif )", data) == "Yeah!" );
|
||||
## endif )", data) == R"(Yeah!
|
||||
)" );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST_CASE("functions") {
|
||||
inja::Environment env = inja::Environment();
|
||||
inja::Environment env;
|
||||
|
||||
json data;
|
||||
data["name"] = "Peter";
|
||||
@@ -206,7 +215,7 @@ TEST_CASE("functions") {
|
||||
CHECK( env.render("{{ default(nothing, 0) }}", data) == "0" );
|
||||
CHECK( env.render("{{ default(name, \"nobody\") }}", data) == "Peter" );
|
||||
CHECK( env.render("{{ default(surname, \"nobody\") }}", data) == "nobody" );
|
||||
CHECK_THROWS_WITH( env.render("{{ default(surname, lastname) }}", data), "[inja.exception.render_error] variable '/lastname' not found" );
|
||||
CHECK_THROWS_WITH( env.render("{{ default(surname, lastname) }}", data), "[inja.exception.render_error] variable 'lastname' not found" );
|
||||
}
|
||||
|
||||
SECTION("exists") {
|
||||
@@ -221,8 +230,8 @@ TEST_CASE("functions") {
|
||||
CHECK( env.render("{{ existsIn(brother, \"parents\") }}", data) == "false" );
|
||||
CHECK( env.render("{{ existsIn(brother, property) }}", data) == "true" );
|
||||
CHECK( env.render("{{ existsIn(brother, name) }}", data) == "false" );
|
||||
CHECK_THROWS_WITH( env.render("{{ existsIn(sister, \"lastname\") }}", data), "[inja.exception.render_error] variable '/sister' not found" );
|
||||
CHECK_THROWS_WITH( env.render("{{ existsIn(brother, sister) }}", data), "[inja.exception.render_error] variable '/sister' not found" );
|
||||
CHECK_THROWS_WITH( env.render("{{ existsIn(sister, \"lastname\") }}", data), "[inja.exception.render_error] variable 'sister' not found" );
|
||||
CHECK_THROWS_WITH( env.render("{{ existsIn(brother, sister) }}", data), "[inja.exception.render_error] variable 'sister' not found" );
|
||||
}
|
||||
|
||||
SECTION("isType") {
|
||||
@@ -243,40 +252,41 @@ TEST_CASE("functions") {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST_CASE("callbacks") {
|
||||
inja::Environment env = inja::Environment();
|
||||
inja::Environment env;
|
||||
json data;
|
||||
data["age"] = 28;
|
||||
|
||||
env.add_callback("double", 1, [&env](inja::Parsed::Arguments args, json data) {
|
||||
int number = env.get_argument<double>(args, 0, data);
|
||||
env.add_callback("double", 1, [](inja::Arguments& args) {
|
||||
int number = args.at(0)->get<double>();
|
||||
return 2 * number;
|
||||
});
|
||||
|
||||
env.add_callback("half", 1, [&env](inja::Parsed::Arguments args, json data) {
|
||||
int number = env.get_argument<double>(args, 0, data);
|
||||
env.add_callback("half", 1, [](inja::Arguments args) {
|
||||
int number = args.at(0)->get<double>();
|
||||
return number / 2;
|
||||
});
|
||||
|
||||
std::string greet = "Hello";
|
||||
env.add_callback("double-greetings", 0, [greet](inja::Parsed::Arguments args, json data) {
|
||||
env.add_callback("double-greetings", 0, [greet](inja::Arguments args) {
|
||||
return greet + " " + greet + "!";
|
||||
});
|
||||
|
||||
env.add_callback("multiply", 2, [&env](inja::Parsed::Arguments args, json data) {
|
||||
double number1 = env.get_argument(args, 0, data);
|
||||
auto number2 = env.get_argument<double>(args, 1, data);
|
||||
env.add_callback("multiply", 2, [](inja::Arguments args) {
|
||||
double number1 = args.at(0)->get<double>();
|
||||
auto number2 = args.at(1)->get<double>();
|
||||
return number1 * number2;
|
||||
});
|
||||
|
||||
env.add_callback("multiply", 3, [&env](inja::Parsed::Arguments args, json data) {
|
||||
double number1 = env.get_argument(args, 0, data);
|
||||
double number2 = env.get_argument(args, 1, data);
|
||||
double number3 = env.get_argument(args, 2, data);
|
||||
env.add_callback("multiply", 3, [](inja::Arguments args) {
|
||||
double number1 = args.at(0)->get<double>();
|
||||
double number2 = args.at(1)->get<double>();
|
||||
double number3 = args.at(2)->get<double>();
|
||||
return number1 * number2 * number3;
|
||||
});
|
||||
|
||||
env.add_callback("multiply", 0, [](inja::Parsed::Arguments args, json data) {
|
||||
env.add_callback("multiply", 0, [](inja::Arguments args) {
|
||||
return 1.0;
|
||||
});
|
||||
|
||||
@@ -289,8 +299,9 @@ TEST_CASE("callbacks") {
|
||||
CHECK( env.render("{{ multiply }}", data) == "1.0" );
|
||||
}
|
||||
|
||||
|
||||
TEST_CASE("combinations") {
|
||||
inja::Environment env = inja::Environment();
|
||||
inja::Environment env;
|
||||
json data;
|
||||
data["name"] = "Peter";
|
||||
data["city"] = "Brunswick";
|
||||
@@ -303,7 +314,7 @@ TEST_CASE("combinations") {
|
||||
|
||||
CHECK( env.render("{% if upper(\"Peter\") == \"PETER\" %}TRUE{% endif %}", data) == "TRUE" );
|
||||
CHECK( env.render("{% if lower(upper(name)) == \"peter\" %}TRUE{% endif %}", data) == "TRUE" );
|
||||
CHECK( env.render("{% for i in range(4) %}{{ loop/index1 }}{% endfor %}", data) == "1234" );
|
||||
CHECK( env.render("{% for i in range(4) %}{{ loop.index1 }}{% endfor %}", data) == "1234" );
|
||||
}
|
||||
|
||||
TEST_CASE("templates") {
|
||||
@@ -313,26 +324,27 @@ TEST_CASE("templates") {
|
||||
data["is_happy"] = true;
|
||||
|
||||
SECTION("reuse") {
|
||||
inja::Environment env = inja::Environment();
|
||||
inja::Environment env;
|
||||
inja::Template temp = env.parse("{% if is_happy %}{{ name }}{% else %}{{ city }}{% endif %}");
|
||||
|
||||
CHECK( env.render_template(temp, data) == "Peter" );
|
||||
CHECK( env.render(temp, data) == "Peter" );
|
||||
|
||||
data["is_happy"] = false;
|
||||
|
||||
CHECK( env.render_template(temp, data) == "Brunswick" );
|
||||
CHECK( env.render(temp, data) == "Brunswick" );
|
||||
}
|
||||
|
||||
SECTION("include") {
|
||||
inja::Environment env = inja::Environment();
|
||||
inja::Environment env;
|
||||
inja::Template t1 = env.parse("Hello {{ name }}");
|
||||
env.include_template("greeting", t1);
|
||||
|
||||
inja::Template t2 = env.parse("{% include \"greeting\" %}!");
|
||||
CHECK( env.render_template(t2, data) == "Hello Peter!" );
|
||||
CHECK( env.render(t2, data) == "Hello Peter!" );
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TEST_CASE("other-syntax") {
|
||||
json data;
|
||||
data["name"] = "Peter";
|
||||
@@ -345,31 +357,31 @@ TEST_CASE("other-syntax") {
|
||||
data["is_happy"] = true;
|
||||
|
||||
SECTION("variables") {
|
||||
inja::Environment env = inja::Environment();
|
||||
env.set_element_notation(inja::ElementNotation::Dot);
|
||||
inja::Environment env;
|
||||
env.set_element_notation(inja::ElementNotation::Pointer);
|
||||
|
||||
CHECK( env.render("{{ name }}", data) == "Peter" );
|
||||
CHECK( env.render("Hello {{ names.1 }}!", data) == "Hello Seb!" );
|
||||
CHECK( env.render("Hello {{ brother.name }}!", data) == "Hello Chris!" );
|
||||
CHECK( env.render("Hello {{ brother.daughter0.name }}!", data) == "Hello Maria!" );
|
||||
CHECK( env.render("Hello {{ names/1 }}!", data) == "Hello Seb!" );
|
||||
CHECK( env.render("Hello {{ brother/name }}!", data) == "Hello Chris!" );
|
||||
CHECK( env.render("Hello {{ brother/daughter0/name }}!", data) == "Hello Maria!" );
|
||||
|
||||
CHECK_THROWS_WITH( env.render("{{unknown.name}}", data), "[inja.exception.render_error] variable '/unknown/name' not found" );
|
||||
CHECK_THROWS_WITH( env.render("{{unknown/name}}", data), "[inja.exception.render_error] variable 'unknown/name' not found" );
|
||||
}
|
||||
|
||||
SECTION("other expression syntax") {
|
||||
inja::Environment env = inja::Environment();
|
||||
inja::Environment env;
|
||||
|
||||
CHECK( env.render("Hello {{ name }}!", data) == "Hello Peter!" );
|
||||
|
||||
env.set_expression("\\(&", "&\\)");
|
||||
env.set_expression("(&", "&)");
|
||||
|
||||
CHECK( env.render("Hello {{ name }}!", data) == "Hello {{ name }}!" );
|
||||
CHECK( env.render("Hello (& name &)!", data) == "Hello Peter!" );
|
||||
}
|
||||
|
||||
SECTION("other comment syntax") {
|
||||
inja::Environment env = inja::Environment();
|
||||
env.set_comment("\\(&", "&\\)");
|
||||
inja::Environment env;
|
||||
env.set_comment("(&", "&)");
|
||||
|
||||
CHECK( env.render("Hello {# Test #}", data) == "Hello {# Test #}" );
|
||||
CHECK( env.render("Hello (& Test &)", data) == "Hello " );
|
||||
Reference in New Issue
Block a user