@@ -5,4 +5,5 @@ __pycache__/ | |||
.mypy_cache/ | |||
*.dsd/ | |||
_prebuilt/ | |||
.dds-repo-lock | |||
.dds-repo-lock | |||
.pytest_cache |
@@ -1,6 +1,5 @@ | |||
Type: Index | |||
Package: taywee-args; external/taywee-args.lmp | |||
Package: spdlog; external/spdlog.lmp | |||
Package: ms-third; external/ms-third.lmp | |||
Package: ranges-v3; external/ranges-v3.lmp |
@@ -8,9 +8,11 @@ jobs: | |||
steps: | |||
- script: | | |||
echo Loading VS environment | |||
call "C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Enterprise\\Common7\\Tools\\vsdevcmd" -arch=x64 || exit 1 | |||
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\vsdevcmd" -arch=x64 || exit 1 | |||
echo Executing Build and Tests | |||
python -u tools/ci.py -B download --cxx cl.exe -T tools\\msvc.dds || exit 1 | |||
reg add HKLM\SYSTEM\CurrentControlSet\Control\FileSystem /v LongPathsEnabled /t REG_DWORD /d 1 /f || exit 1 | |||
python -m pip install pytest pytest-xdist || exit 1 | |||
python -u tools/ci.py -B download --cxx cl.exe -T tools\msvc.dds -T2 tools\msvc.p2.dds || exit 1 | |||
displayName: Full CI | |||
- publish: _build/dds.exe | |||
artifact: DDS Executable - Windows VS2019 | |||
@@ -19,9 +21,13 @@ jobs: | |||
pool: | |||
vmImage: ubuntu-18.04 | |||
steps: | |||
- script: sudo apt update -y && sudo apt install -y python3-minimal g++-8 | |||
- script: | | |||
set -eu | |||
sudo apt update -y | |||
sudo apt install -y python3-minimal g++-8 | |||
python3 -m pip install pytest pytest-xdist | |||
displayName: Prepare System | |||
- script: python3 -u tools/ci.py -B download --cxx g++-8 -T tools/gcc-8.dds | |||
- script: python3 -u tools/ci.py -B download --cxx g++-8 -T tools/gcc-8.dds -T2 tools/gcc-8.p2.dds | |||
displayName: Full CI | |||
- publish: _build/dds | |||
artifact: DDS Executable - Linux | |||
@@ -32,7 +38,10 @@ jobs: | |||
steps: | |||
- script: brew install gcc@8 | |||
displayName: Prepare System | |||
- script: python3 -u tools/ci.py -B download --cxx g++-8 -T tools/gcc-8.dds | |||
- script: | | |||
set -eu | |||
python3 -m pip install pytest pytest-xdist | |||
python3 -u tools/ci.py -B download --cxx g++-8 -T tools/gcc-8.dds -T2 tools/gcc-8.p2.dds | |||
displayName: Build and Run Unit Tests | |||
- publish: _build/dds | |||
artifact: DDS Executable - macOS |
@@ -1,5 +0,0 @@ | |||
Type: Library | |||
Name: args | |||
Include-Path: repo/taywee-args/include |
@@ -1,7 +1,7 @@ | |||
/* | |||
__ _____ _____ _____ | |||
__| | __| | | | JSON for Modern C++ | |||
| | |__ | | | | | | version 3.7.0 | |||
| | |__ | | | | | | version 3.7.1 | |||
|_____|_____|_____|_|___| https://github.com/nlohmann/json | |||
Licensed under the MIT License <http://opensource.org/licenses/MIT>. | |||
@@ -32,7 +32,7 @@ SOFTWARE. | |||
#define NLOHMANN_JSON_VERSION_MAJOR 3 | |||
#define NLOHMANN_JSON_VERSION_MINOR 7 | |||
#define NLOHMANN_JSON_VERSION_PATCH 0 | |||
#define NLOHMANN_JSON_VERSION_PATCH 1 | |||
#include <algorithm> // all_of, find, for_each | |||
#include <cassert> // assert | |||
@@ -122,11 +122,11 @@ struct position_t | |||
* SPDX-License-Identifier: CC0-1.0 | |||
*/ | |||
#if !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < 9) | |||
#if !defined(JSON_HEDLEY_VERSION) || (JSON_HEDLEY_VERSION < 11) | |||
#if defined(JSON_HEDLEY_VERSION) | |||
#undef JSON_HEDLEY_VERSION | |||
#endif | |||
#define JSON_HEDLEY_VERSION 9 | |||
#define JSON_HEDLEY_VERSION 11 | |||
#if defined(JSON_HEDLEY_STRINGIFY_EX) | |||
#undef JSON_HEDLEY_STRINGIFY_EX | |||
@@ -493,12 +493,29 @@ struct position_t | |||
#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE) | |||
#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE | |||
#endif | |||
#if defined(__has_cpp_attribute) && defined(__cplusplus) | |||
#if \ | |||
defined(__has_cpp_attribute) && \ | |||
defined(__cplusplus) && \ | |||
(!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) | |||
#define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) __has_cpp_attribute(attribute) | |||
#else | |||
#define JSON_HEDLEY_HAS_CPP_ATTRIBUTE(attribute) (0) | |||
#endif | |||
#if defined(JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS) | |||
#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS | |||
#endif | |||
#if !defined(__cplusplus) || !defined(__has_cpp_attribute) | |||
#define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) | |||
#elif \ | |||
!defined(JSON_HEDLEY_PGI_VERSION) && \ | |||
(!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) && \ | |||
(!defined(JSON_HEDLEY_MSVC_VERSION) || JSON_HEDLEY_MSVC_VERSION_CHECK(19,20,0)) | |||
#define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) JSON_HEDLEY_HAS_CPP_ATTRIBUTE(ns::attribute) | |||
#else | |||
#define JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(ns,attribute) (0) | |||
#endif | |||
#if defined(JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE) | |||
#undef JSON_HEDLEY_GNUC_HAS_CPP_ATTRIBUTE | |||
#endif | |||
@@ -652,6 +669,21 @@ struct position_t | |||
#define JSON_HEDLEY_GCC_HAS_WARNING(warning,major,minor,patch) JSON_HEDLEY_GCC_VERSION_CHECK(major,minor,patch) | |||
#endif | |||
/* JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ is for | |||
HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ | |||
#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_) | |||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ | |||
#endif | |||
#if defined(__cplusplus) && JSON_HEDLEY_HAS_WARNING("-Wc++98-compat") | |||
# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(xpr) \ | |||
JSON_HEDLEY_DIAGNOSTIC_PUSH \ | |||
_Pragma("clang diagnostic ignored \"-Wc++98-compat\"") \ | |||
xpr \ | |||
JSON_HEDLEY_DIAGNOSTIC_POP | |||
#else | |||
# define JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(x) x | |||
#endif | |||
#if \ | |||
(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L)) || \ | |||
defined(__clang__) || \ | |||
@@ -752,6 +784,27 @@ struct position_t | |||
#define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS | |||
#endif | |||
#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES) | |||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES | |||
#endif | |||
#if JSON_HEDLEY_HAS_WARNING("-Wunknown-attributes") | |||
#define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("clang diagnostic ignored \"-Wunknown-attributes\"") | |||
#elif JSON_HEDLEY_GCC_VERSION_CHECK(4,6,0) | |||
#define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") | |||
#elif JSON_HEDLEY_INTEL_VERSION_CHECK(17,0,0) | |||
#define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("warning(disable:1292)") | |||
#elif JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,0) | |||
#define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES __pragma(warning(disable:5030)) | |||
#elif JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) | |||
#define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1097") | |||
#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,14,0) && defined(__cplusplus) | |||
#define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("error_messages(off,attrskipunsup)") | |||
#elif JSON_HEDLEY_TI_VERSION_CHECK(8,0,0) | |||
#define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES _Pragma("diag_suppress 1173") | |||
#else | |||
#define JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES | |||
#endif | |||
#if defined(JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL) | |||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL | |||
#endif | |||
@@ -772,8 +825,8 @@ struct position_t | |||
#undef JSON_HEDLEY_DEPRECATED_FOR | |||
#endif | |||
#if defined(__cplusplus) && (__cplusplus >= 201402L) | |||
#define JSON_HEDLEY_DEPRECATED(since) [[deprecated("Since " #since)]] | |||
#define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) [[deprecated("Since " #since "; use " #replacement)]] | |||
#define JSON_HEDLEY_DEPRECATED(since) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since)]]) | |||
#define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[deprecated("Since " #since "; use " #replacement)]]) | |||
#elif \ | |||
JSON_HEDLEY_HAS_EXTENSION(attribute_deprecated_with_message) || \ | |||
JSON_HEDLEY_GCC_VERSION_CHECK(4,5,0) || \ | |||
@@ -798,7 +851,7 @@ struct position_t | |||
#elif \ | |||
JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) || \ | |||
JSON_HEDLEY_PELLES_VERSION_CHECK(6,50,0) | |||
#define JSON_HEDLEY_DEPRECATED(since) _declspec(deprecated) | |||
#define JSON_HEDLEY_DEPRECATED(since) __declspec(deprecated) | |||
#define JSON_HEDLEY_DEPRECATED_FOR(since, replacement) __declspec(deprecated) | |||
#elif JSON_HEDLEY_IAR_VERSION_CHECK(8,0,0) | |||
#define JSON_HEDLEY_DEPRECATED(since) _Pragma("deprecated") | |||
@@ -824,7 +877,7 @@ struct position_t | |||
#undef JSON_HEDLEY_WARN_UNUSED_RESULT | |||
#endif | |||
#if defined(__cplusplus) && (__cplusplus >= 201703L) | |||
#define JSON_HEDLEY_WARN_UNUSED_RESULT [[nodiscard]] | |||
#define JSON_HEDLEY_WARN_UNUSED_RESULT JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[nodiscard]]) | |||
#elif \ | |||
JSON_HEDLEY_HAS_ATTRIBUTE(warn_unused_result) || \ | |||
JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ | |||
@@ -863,7 +916,7 @@ struct position_t | |||
#elif defined(__STDC_VERSION__) && __STDC_VERSION__ >= 201112L | |||
#define JSON_HEDLEY_NO_RETURN _Noreturn | |||
#elif defined(__cplusplus) && (__cplusplus >= 201103L) | |||
#define JSON_HEDLEY_NO_RETURN [[noreturn]] | |||
#define JSON_HEDLEY_NO_RETURN JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[noreturn]]) | |||
#elif \ | |||
JSON_HEDLEY_HAS_ATTRIBUTE(noreturn) || \ | |||
JSON_HEDLEY_GCC_VERSION_CHECK(3,2,0) || \ | |||
@@ -873,6 +926,8 @@ struct position_t | |||
JSON_HEDLEY_TI_VERSION_CHECK(18,0,0) || \ | |||
(JSON_HEDLEY_TI_VERSION_CHECK(17,3,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) | |||
#define JSON_HEDLEY_NO_RETURN __attribute__((__noreturn__)) | |||
#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) | |||
#define JSON_HEDLEY_NO_RETURN _Pragma("does_not_return") | |||
#elif JSON_HEDLEY_MSVC_VERSION_CHECK(13,10,0) | |||
#define JSON_HEDLEY_NO_RETURN __declspec(noreturn) | |||
#elif JSON_HEDLEY_TI_VERSION_CHECK(6,0,0) && defined(__cplusplus) | |||
@@ -885,6 +940,15 @@ struct position_t | |||
#define JSON_HEDLEY_NO_RETURN | |||
#endif | |||
#if defined(JSON_HEDLEY_NO_ESCAPE) | |||
#undef JSON_HEDLEY_NO_ESCAPE | |||
#endif | |||
#if JSON_HEDLEY_HAS_ATTRIBUTE(noescape) | |||
#define JSON_HEDLEY_NO_ESCAPE __attribute__((__noescape__)) | |||
#else | |||
#define JSON_HEDLEY_NO_ESCAPE | |||
#endif | |||
#if defined(JSON_HEDLEY_UNREACHABLE) | |||
#undef JSON_HEDLEY_UNREACHABLE | |||
#endif | |||
@@ -941,11 +1005,14 @@ struct position_t | |||
#define JSON_HEDLEY_ASSUME(expr) ((void) (expr)) | |||
#endif | |||
JSON_HEDLEY_DIAGNOSTIC_PUSH | |||
#if \ | |||
JSON_HEDLEY_HAS_WARNING("-Wvariadic-macros") || \ | |||
JSON_HEDLEY_GCC_VERSION_CHECK(4,0,0) | |||
#if JSON_HEDLEY_HAS_WARNING("-Wpedantic") | |||
#pragma clang diagnostic ignored "-Wpedantic" | |||
#endif | |||
#if JSON_HEDLEY_HAS_WARNING("-Wc++98-compat-pedantic") && defined(__cplusplus) | |||
#pragma clang diagnostic ignored "-Wc++98-compat-pedantic" | |||
#endif | |||
#if JSON_HEDLEY_GCC_HAS_WARNING("-Wvariadic-macros",4,0,0) | |||
#if defined(__clang__) | |||
#pragma clang diagnostic ignored "-Wvariadic-macros" | |||
#elif defined(JSON_HEDLEY_GCC_VERSION) | |||
@@ -993,7 +1060,7 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
#endif | |||
#if defined(__cplusplus) | |||
#if __cplusplus >= 201103L | |||
#define JSON_HEDLEY_CONSTEXPR constexpr | |||
#define JSON_HEDLEY_CONSTEXPR JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(constexpr) | |||
#endif | |||
#endif | |||
#if !defined(JSON_HEDLEY_CONSTEXPR) | |||
@@ -1073,6 +1140,8 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
JSON_HEDLEY_TI_VERSION_CHECK(8,0,0) || \ | |||
(JSON_HEDLEY_TI_VERSION_CHECK(7,3,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) | |||
#define JSON_HEDLEY_MALLOC __attribute__((__malloc__)) | |||
#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) | |||
#define JSON_HEDLEY_MALLOC _Pragma("returns_new_memory") | |||
#elif JSON_HEDLEY_MSVC_VERSION_CHECK(14, 0, 0) | |||
#define JSON_HEDLEY_MALLOC __declspec(restrict) | |||
#else | |||
@@ -1093,6 +1162,8 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
(JSON_HEDLEY_TI_VERSION_CHECK(7,3,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ | |||
JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) | |||
#define JSON_HEDLEY_PURE __attribute__((__pure__)) | |||
#elif JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) | |||
#define JSON_HEDLEY_PURE _Pragma("does_not_write_global_data") | |||
#elif JSON_HEDLEY_TI_VERSION_CHECK(6,0,0) && defined(__cplusplus) | |||
#define JSON_HEDLEY_PURE _Pragma("FUNC_IS_PURE;") | |||
#else | |||
@@ -1113,6 +1184,9 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
(JSON_HEDLEY_TI_VERSION_CHECK(7,3,0) && defined(__TI_GNU_ATTRIBUTE_SUPPORT__)) || \ | |||
JSON_HEDLEY_PGI_VERSION_CHECK(17,10,0) | |||
#define JSON_HEDLEY_CONST __attribute__((__const__)) | |||
#elif \ | |||
JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) | |||
#define JSON_HEDLEY_CONST _Pragma("no_side_effect") | |||
#else | |||
#define JSON_HEDLEY_CONST JSON_HEDLEY_PURE | |||
#endif | |||
@@ -1263,28 +1337,16 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
#if defined(JSON_HEDLEY_FALL_THROUGH) | |||
#undef JSON_HEDLEY_FALL_THROUGH | |||
#endif | |||
#if \ | |||
defined(__cplusplus) && \ | |||
(!defined(JSON_HEDLEY_SUNPRO_VERSION) || JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,15,0)) && \ | |||
!defined(JSON_HEDLEY_PGI_VERSION) | |||
#if \ | |||
(__cplusplus >= 201703L) || \ | |||
((__cplusplus >= 201103L) && JSON_HEDLEY_HAS_CPP_ATTRIBUTE(fallthrough)) | |||
#define JSON_HEDLEY_FALL_THROUGH [[fallthrough]] | |||
#elif (__cplusplus >= 201103L) && JSON_HEDLEY_HAS_CPP_ATTRIBUTE(clang::fallthrough) | |||
#define JSON_HEDLEY_FALL_THROUGH [[clang::fallthrough]] | |||
#elif (__cplusplus >= 201103L) && JSON_HEDLEY_GCC_VERSION_CHECK(7,0,0) | |||
#define JSON_HEDLEY_FALL_THROUGH [[gnu::fallthrough]] | |||
#endif | |||
#endif | |||
#if !defined(JSON_HEDLEY_FALL_THROUGH) | |||
#if JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(fallthrough,7,0,0) && !defined(JSON_HEDLEY_PGI_VERSION) | |||
#define JSON_HEDLEY_FALL_THROUGH __attribute__((__fallthrough__)) | |||
#elif defined(__fallthrough) /* SAL */ | |||
#define JSON_HEDLEY_FALL_THROUGH __fallthrough | |||
#else | |||
#define JSON_HEDLEY_FALL_THROUGH | |||
#endif | |||
#if JSON_HEDLEY_GNUC_HAS_ATTRIBUTE(fallthrough,7,0,0) && !defined(JSON_HEDLEY_PGI_VERSION) | |||
#define JSON_HEDLEY_FALL_THROUGH __attribute__((__fallthrough__)) | |||
#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS(clang,fallthrough) | |||
#define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[clang::fallthrough]]) | |||
#elif JSON_HEDLEY_HAS_CPP_ATTRIBUTE(fallthrough) | |||
#define JSON_HEDLEY_FALL_THROUGH JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_([[fallthrough]]) | |||
#elif defined(__fallthrough) /* SAL */ | |||
#define JSON_HEDLEY_FALL_THROUGH __fallthrough | |||
#else | |||
#define JSON_HEDLEY_FALL_THROUGH | |||
#endif | |||
#if defined(JSON_HEDLEY_RETURNS_NON_NULL) | |||
@@ -1320,12 +1382,11 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
#if defined(JSON_HEDLEY_REQUIRE_CONSTEXPR) | |||
#undef JSON_HEDLEY_REQUIRE_CONSTEXPR | |||
#endif | |||
/* Note the double-underscore. For internal use only; no API | |||
* guarantees! */ | |||
#if defined(JSON_HEDLEY__IS_CONSTEXPR) | |||
#undef JSON_HEDLEY__IS_CONSTEXPR | |||
/* JSON_HEDLEY_IS_CONSTEXPR_ is for | |||
HEDLEY INTERNAL USE ONLY. API subject to change without notice. */ | |||
#if defined(JSON_HEDLEY_IS_CONSTEXPR_) | |||
#undef JSON_HEDLEY_IS_CONSTEXPR_ | |||
#endif | |||
#if \ | |||
JSON_HEDLEY_HAS_BUILTIN(__builtin_constant_p) || \ | |||
JSON_HEDLEY_GCC_VERSION_CHECK(3,4,0) || \ | |||
@@ -1334,7 +1395,7 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
JSON_HEDLEY_ARM_VERSION_CHECK(4,1,0) || \ | |||
JSON_HEDLEY_IBM_VERSION_CHECK(13,1,0) || \ | |||
JSON_HEDLEY_TI_VERSION_CHECK(6,1,0) || \ | |||
JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) || \ | |||
(JSON_HEDLEY_SUNPRO_VERSION_CHECK(5,10,0) && !defined(__cplusplus)) || \ | |||
JSON_HEDLEY_CRAY_VERSION_CHECK(8,1,0) | |||
#define JSON_HEDLEY_IS_CONSTANT(expr) __builtin_constant_p(expr) | |||
#endif | |||
@@ -1348,10 +1409,10 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
JSON_HEDLEY_ARM_VERSION_CHECK(5,4,0) || \ | |||
JSON_HEDLEY_TINYC_VERSION_CHECK(0,9,24) | |||
#if defined(__INTPTR_TYPE__) | |||
#define JSON_HEDLEY__IS_CONSTEXPR(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0)), int*) | |||
#define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0)), int*) | |||
#else | |||
#include <stdint.h> | |||
#define JSON_HEDLEY__IS_CONSTEXPR(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((intptr_t) ((expr) * 0)) : (int*) 0)), int*) | |||
#define JSON_HEDLEY_IS_CONSTEXPR_(expr) __builtin_types_compatible_p(__typeof__((1 ? (void*) ((intptr_t) ((expr) * 0)) : (int*) 0)), int*) | |||
#endif | |||
# elif \ | |||
(defined(__STDC_VERSION__) && (__STDC_VERSION__ >= 201112L) && !defined(JSON_HEDLEY_SUNPRO_VERSION) && !defined(JSON_HEDLEY_PGI_VERSION)) || \ | |||
@@ -1361,10 +1422,10 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
JSON_HEDLEY_IBM_VERSION_CHECK(12,1,0) || \ | |||
JSON_HEDLEY_ARM_VERSION_CHECK(5,3,0) | |||
#if defined(__INTPTR_TYPE__) | |||
#define JSON_HEDLEY__IS_CONSTEXPR(expr) _Generic((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0), int*: 1, void*: 0) | |||
#define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((__INTPTR_TYPE__) ((expr) * 0)) : (int*) 0), int*: 1, void*: 0) | |||
#else | |||
#include <stdint.h> | |||
#define JSON_HEDLEY__IS_CONSTEXPR(expr) _Generic((1 ? (void*) ((intptr_t) * 0) : (int*) 0), int*: 1, void*: 0) | |||
#define JSON_HEDLEY_IS_CONSTEXPR_(expr) _Generic((1 ? (void*) ((intptr_t) * 0) : (int*) 0), int*: 1, void*: 0) | |||
#endif | |||
# elif \ | |||
defined(JSON_HEDLEY_GCC_VERSION) || \ | |||
@@ -1372,7 +1433,7 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
defined(JSON_HEDLEY_TINYC_VERSION) || \ | |||
defined(JSON_HEDLEY_TI_VERSION) || \ | |||
defined(__clang__) | |||
# define JSON_HEDLEY__IS_CONSTEXPR(expr) ( \ | |||
# define JSON_HEDLEY_IS_CONSTEXPR_(expr) ( \ | |||
sizeof(void) != \ | |||
sizeof(*( \ | |||
1 ? \ | |||
@@ -1383,11 +1444,11 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
) | |||
# endif | |||
#endif | |||
#if defined(JSON_HEDLEY__IS_CONSTEXPR) | |||
#if defined(JSON_HEDLEY_IS_CONSTEXPR_) | |||
#if !defined(JSON_HEDLEY_IS_CONSTANT) | |||
#define JSON_HEDLEY_IS_CONSTANT(expr) JSON_HEDLEY__IS_CONSTEXPR(expr) | |||
#define JSON_HEDLEY_IS_CONSTANT(expr) JSON_HEDLEY_IS_CONSTEXPR_(expr) | |||
#endif | |||
#define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (JSON_HEDLEY__IS_CONSTEXPR(expr) ? (expr) : (-1)) | |||
#define JSON_HEDLEY_REQUIRE_CONSTEXPR(expr) (JSON_HEDLEY_IS_CONSTEXPR_(expr) ? (expr) : (-1)) | |||
#else | |||
#if !defined(JSON_HEDLEY_IS_CONSTANT) | |||
#define JSON_HEDLEY_IS_CONSTANT(expr) (0) | |||
@@ -1427,12 +1488,10 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
) | |||
# define JSON_HEDLEY_STATIC_ASSERT(expr, message) _Static_assert(expr, message) | |||
#elif \ | |||
(defined(__cplusplus) && (__cplusplus >= 201703L)) || \ | |||
(defined(__cplusplus) && (__cplusplus >= 201103L)) || \ | |||
JSON_HEDLEY_MSVC_VERSION_CHECK(16,0,0) || \ | |||
(defined(__cplusplus) && JSON_HEDLEY_TI_VERSION_CHECK(8,3,0)) | |||
# define JSON_HEDLEY_STATIC_ASSERT(expr, message) static_assert(expr, message) | |||
#elif defined(__cplusplus) && (__cplusplus >= 201103L) | |||
# define JSON_HEDLEY_STATIC_ASSERT(expr, message) static_assert(expr) | |||
# define JSON_HEDLEY_STATIC_ASSERT(expr, message) JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(static_assert(expr, message)) | |||
#else | |||
# define JSON_HEDLEY_STATIC_ASSERT(expr, message) | |||
#endif | |||
@@ -1483,6 +1542,23 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
#define JSON_HEDLEY_CPP_CAST(T, expr) (expr) | |||
#endif | |||
#if defined(JSON_HEDLEY_NULL) | |||
#undef JSON_HEDLEY_NULL | |||
#endif | |||
#if defined(__cplusplus) | |||
#if __cplusplus >= 201103L | |||
#define JSON_HEDLEY_NULL JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_(nullptr) | |||
#elif defined(NULL) | |||
#define JSON_HEDLEY_NULL NULL | |||
#else | |||
#define JSON_HEDLEY_NULL JSON_HEDLEY_STATIC_CAST(void*, 0) | |||
#endif | |||
#elif defined(NULL) | |||
#define JSON_HEDLEY_NULL NULL | |||
#else | |||
#define JSON_HEDLEY_NULL ((void*) 0) | |||
#endif | |||
#if defined(JSON_HEDLEY_MESSAGE) | |||
#undef JSON_HEDLEY_MESSAGE | |||
#endif | |||
@@ -1525,28 +1601,33 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
# define JSON_HEDLEY_WARNING(msg) JSON_HEDLEY_MESSAGE(msg) | |||
#endif | |||
#if defined(JSON_HEDLEY_REQUIRE) | |||
#undef JSON_HEDLEY_REQUIRE | |||
#endif | |||
#if defined(JSON_HEDLEY_REQUIRE_MSG) | |||
#undef JSON_HEDLEY_REQUIRE_MSG | |||
#endif | |||
#if JSON_HEDLEY_HAS_ATTRIBUTE(diagnose_if) | |||
# if JSON_HEDLEY_HAS_WARNING("-Wgcc-compat") | |||
# define JSON_HEDLEY_REQUIRE_MSG(expr, msg) \ | |||
# define JSON_HEDLEY_REQUIRE(expr) \ | |||
JSON_HEDLEY_DIAGNOSTIC_PUSH \ | |||
_Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ | |||
__attribute__((diagnose_if(!(expr), #expr, "error"))) \ | |||
JSON_HEDLEY_DIAGNOSTIC_POP | |||
# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) \ | |||
JSON_HEDLEY_DIAGNOSTIC_PUSH \ | |||
_Pragma("clang diagnostic ignored \"-Wgcc-compat\"") \ | |||
__attribute__((__diagnose_if__(!(expr), msg, "error"))) \ | |||
__attribute__((diagnose_if(!(expr), msg, "error"))) \ | |||
JSON_HEDLEY_DIAGNOSTIC_POP | |||
# else | |||
# define JSON_HEDLEY_REQUIRE_MSG(expr, msg) __attribute__((__diagnose_if__(!(expr), msg, "error"))) | |||
# define JSON_HEDLEY_REQUIRE(expr) __attribute__((diagnose_if(!(expr), #expr, "error"))) | |||
# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) __attribute__((diagnose_if(!(expr), msg, "error"))) | |||
# endif | |||
#else | |||
# define JSON_HEDLEY_REQUIRE_MSG(expr, msg) | |||
# define JSON_HEDLEY_REQUIRE(expr) | |||
# define JSON_HEDLEY_REQUIRE_MSG(expr,msg) | |||
#endif | |||
#if defined(JSON_HEDLEY_REQUIRE) | |||
#undef JSON_HEDLEY_REQUIRE | |||
#endif | |||
#define JSON_HEDLEY_REQUIRE(expr) JSON_HEDLEY_REQUIRE_MSG(expr, #expr) | |||
#if defined(JSON_HEDLEY_FLAGS) | |||
#undef JSON_HEDLEY_FLAGS | |||
#endif | |||
@@ -1568,6 +1649,15 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
# define JSON_HEDLEY_FLAGS_CAST(T, expr) JSON_HEDLEY_STATIC_CAST(T, expr) | |||
#endif | |||
#if defined(JSON_HEDLEY_EMPTY_BASES) | |||
#undef JSON_HEDLEY_EMPTY_BASES | |||
#endif | |||
#if JSON_HEDLEY_MSVC_VERSION_CHECK(19,0,23918) && !JSON_HEDLEY_MSVC_VERSION_CHECK(20,0,0) | |||
#define JSON_HEDLEY_EMPTY_BASES __declspec(empty_bases) | |||
#else | |||
#define JSON_HEDLEY_EMPTY_BASES | |||
#endif | |||
/* Remaining macros are deprecated. */ | |||
#if defined(JSON_HEDLEY_GCC_NOT_CLANG_VERSION_CHECK) | |||
@@ -1692,30 +1782,30 @@ JSON_HEDLEY_DIAGNOSTIC_POP | |||
@def NLOHMANN_JSON_SERIALIZE_ENUM | |||
@since version 3.4.0 | |||
*/ | |||
#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...) \ | |||
template<typename BasicJsonType> \ | |||
inline void to_json(BasicJsonType& j, const ENUM_TYPE& e) \ | |||
{ \ | |||
static_assert(std::is_enum<ENUM_TYPE>::value, #ENUM_TYPE " must be an enum!"); \ | |||
static const std::pair<ENUM_TYPE, BasicJsonType> m[] = __VA_ARGS__; \ | |||
auto it = std::find_if(std::begin(m), std::end(m), \ | |||
[e](const std::pair<ENUM_TYPE, BasicJsonType>& ej_pair) -> bool \ | |||
{ \ | |||
return ej_pair.first == e; \ | |||
}); \ | |||
j = ((it != std::end(m)) ? it : std::begin(m))->second; \ | |||
} \ | |||
template<typename BasicJsonType> \ | |||
inline void from_json(const BasicJsonType& j, ENUM_TYPE& e) \ | |||
{ \ | |||
static_assert(std::is_enum<ENUM_TYPE>::value, #ENUM_TYPE " must be an enum!"); \ | |||
static const std::pair<ENUM_TYPE, BasicJsonType> m[] = __VA_ARGS__; \ | |||
auto it = std::find_if(std::begin(m), std::end(m), \ | |||
[j](const std::pair<ENUM_TYPE, BasicJsonType>& ej_pair) -> bool \ | |||
{ \ | |||
return ej_pair.second == j; \ | |||
}); \ | |||
e = ((it != std::end(m)) ? it : std::begin(m))->first; \ | |||
#define NLOHMANN_JSON_SERIALIZE_ENUM(ENUM_TYPE, ...) \ | |||
template<typename BasicJsonType> \ | |||
inline void to_json(BasicJsonType& j, const ENUM_TYPE& e) \ | |||
{ \ | |||
static_assert(std::is_enum<ENUM_TYPE>::value, #ENUM_TYPE " must be an enum!"); \ | |||
static const std::pair<ENUM_TYPE, BasicJsonType> m[] = __VA_ARGS__; \ | |||
auto it = std::find_if(std::begin(m), std::end(m), \ | |||
[e](const std::pair<ENUM_TYPE, BasicJsonType>& ej_pair) -> bool \ | |||
{ \ | |||
return ej_pair.first == e; \ | |||
}); \ | |||
j = ((it != std::end(m)) ? it : std::begin(m))->second; \ | |||
} \ | |||
template<typename BasicJsonType> \ | |||
inline void from_json(const BasicJsonType& j, ENUM_TYPE& e) \ | |||
{ \ | |||
static_assert(std::is_enum<ENUM_TYPE>::value, #ENUM_TYPE " must be an enum!"); \ | |||
static const std::pair<ENUM_TYPE, BasicJsonType> m[] = __VA_ARGS__; \ | |||
auto it = std::find_if(std::begin(m), std::end(m), \ | |||
[&j](const std::pair<ENUM_TYPE, BasicJsonType>& ej_pair) -> bool \ | |||
{ \ | |||
return ej_pair.second == j; \ | |||
}); \ | |||
e = ((it != std::end(m)) ? it : std::begin(m))->first; \ | |||
} | |||
// Ugly macros to avoid uglier copy-paste when specializing basic_json. They | |||
@@ -2704,6 +2794,19 @@ struct is_compatible_type_impl < | |||
template <typename BasicJsonType, typename CompatibleType> | |||
struct is_compatible_type | |||
: is_compatible_type_impl<BasicJsonType, CompatibleType> {}; | |||
// https://en.cppreference.com/w/cpp/types/conjunction | |||
template<class...> struct conjunction : std::true_type { }; | |||
template<class B1> struct conjunction<B1> : B1 { }; | |||
template<class B1, class... Bn> | |||
struct conjunction<B1, Bn...> | |||
: std::conditional<bool(B1::value), conjunction<Bn...>, B1>::type {}; | |||
template <typename T1, typename T2> | |||
struct is_constructible_tuple : std::false_type {}; | |||
template <typename T1, typename... Args> | |||
struct is_constructible_tuple<T1, std::tuple<Args...>> : conjunction<std::is_constructible<T1, Args>...> {}; | |||
} // namespace detail | |||
} // namespace nlohmann | |||
@@ -2922,7 +3025,7 @@ void from_json(const BasicJsonType& j, std::valarray<T>& l) | |||
JSON_THROW(type_error::create(302, "type must be array, but is " + std::string(j.type_name()))); | |||
} | |||
l.resize(j.size()); | |||
std::copy(j.m_value.array->begin(), j.m_value.array->end(), std::begin(l)); | |||
std::copy(j.begin(), j.end(), std::begin(l)); | |||
} | |||
template <typename BasicJsonType, typename T, std::size_t N> | |||
@@ -3186,6 +3289,11 @@ namespace nlohmann | |||
{ | |||
namespace detail | |||
{ | |||
template<typename string_type> | |||
void int_to_string( string_type& target, std::size_t value ) | |||
{ | |||
target = std::to_string(value); | |||
} | |||
template <typename IteratorType> class iteration_proxy_value | |||
{ | |||
public: | |||
@@ -3194,6 +3302,7 @@ template <typename IteratorType> class iteration_proxy_value | |||
using pointer = value_type * ; | |||
using reference = value_type & ; | |||
using iterator_category = std::input_iterator_tag; | |||
using string_type = typename std::remove_cv< typename std::remove_reference<decltype( std::declval<IteratorType>().key() ) >::type >::type; | |||
private: | |||
/// the iterator | |||
@@ -3203,9 +3312,9 @@ template <typename IteratorType> class iteration_proxy_value | |||
/// last stringified array index | |||
mutable std::size_t array_index_last = 0; | |||
/// a string representation of the array index | |||
mutable std::string array_index_str = "0"; | |||
mutable string_type array_index_str = "0"; | |||
/// an empty string (to return a reference for primitive values) | |||
const std::string empty_str = ""; | |||
const string_type empty_str = ""; | |||
public: | |||
explicit iteration_proxy_value(IteratorType it) noexcept : anchor(it) {} | |||
@@ -3238,7 +3347,7 @@ template <typename IteratorType> class iteration_proxy_value | |||
} | |||
/// return key of the iterator | |||
const std::string& key() const | |||
const string_type& key() const | |||
{ | |||
assert(anchor.m_object != nullptr); | |||
@@ -3249,7 +3358,7 @@ template <typename IteratorType> class iteration_proxy_value | |||
{ | |||
if (array_index != array_index_last) | |||
{ | |||
array_index_str = std::to_string(array_index); | |||
int_to_string( array_index_str, array_index ); | |||
array_index_last = array_index; | |||
} | |||
return array_index_str; | |||
@@ -3489,7 +3598,10 @@ struct external_constructor<value_t::array> | |||
j.m_type = value_t::array; | |||
j.m_value = value_t::array; | |||
j.m_value.array->resize(arr.size()); | |||
std::copy(std::begin(arr), std::end(arr), j.m_value.array->begin()); | |||
if (arr.size() > 0) | |||
{ | |||
std::copy(std::begin(arr), std::end(arr), j.m_value.array->begin()); | |||
} | |||
j.assert_invariant(); | |||
} | |||
}; | |||
@@ -3634,8 +3746,8 @@ void to_json(BasicJsonType& j, const T(&arr)[N]) | |||
external_constructor<value_t::array>::construct(j, arr); | |||
} | |||
template<typename BasicJsonType, typename... Args> | |||
void to_json(BasicJsonType& j, const std::pair<Args...>& p) | |||
template < typename BasicJsonType, typename T1, typename T2, enable_if_t < std::is_constructible<BasicJsonType, T1>::value&& std::is_constructible<BasicJsonType, T2>::value, int > = 0 > | |||
void to_json(BasicJsonType& j, const std::pair<T1, T2>& p) | |||
{ | |||
j = { p.first, p.second }; | |||
} | |||
@@ -3654,10 +3766,10 @@ void to_json_tuple_impl(BasicJsonType& j, const Tuple& t, index_sequence<Idx...> | |||
j = { std::get<Idx>(t)... }; | |||
} | |||
template<typename BasicJsonType, typename... Args> | |||
void to_json(BasicJsonType& j, const std::tuple<Args...>& t) | |||
template<typename BasicJsonType, typename T, enable_if_t<is_constructible_tuple<BasicJsonType, T>::value, int > = 0> | |||
void to_json(BasicJsonType& j, const T& t) | |||
{ | |||
to_json_tuple_impl(j, t, index_sequence_for<Args...> {}); | |||
to_json_tuple_impl(j, t, make_index_sequence<std::tuple_size<T>::value> {}); | |||
} | |||
struct to_json_fn | |||
@@ -3882,9 +3994,8 @@ class input_stream_adapter : public input_adapter_protocol | |||
class input_buffer_adapter : public input_adapter_protocol | |||
{ | |||
public: | |||
JSON_HEDLEY_NON_NULL(2) | |||
input_buffer_adapter(const char* b, const std::size_t l) noexcept | |||
: cursor(b), limit(b + l) | |||
: cursor(b), limit(b == nullptr ? nullptr : (b + l)) | |||
{} | |||
// delete because of pointer members | |||
@@ -3898,6 +4009,7 @@ class input_buffer_adapter : public input_adapter_protocol | |||
{ | |||
if (JSON_HEDLEY_LIKELY(cursor < limit)) | |||
{ | |||
assert(cursor != nullptr and limit != nullptr); | |||
return std::char_traits<char>::to_int_type(*(cursor++)); | |||
} | |||
@@ -5675,7 +5787,7 @@ class binary_reader | |||
const int exp = (half >> 10u) & 0x1Fu; | |||
const unsigned int mant = half & 0x3FFu; | |||
assert(0 <= exp and exp <= 32); | |||
assert(0 <= mant and mant <= 1024); | |||
assert(mant <= 1024); | |||
switch (exp) | |||
{ | |||
case 0: | |||
@@ -6957,7 +7069,7 @@ class binary_reader | |||
/*! | |||
@param[in] format the current format | |||
@param[in] detail a detailed error message | |||
@param[in] context further contect information | |||
@param[in] context further context information | |||
@return a message string to use in the parse_error exceptions | |||
*/ | |||
std::string exception_message(const input_format_t format, | |||
@@ -9315,8 +9427,8 @@ class iter_impl | |||
/*! | |||
@brief const copy constructor | |||
@param[in] other const iterator to copy from | |||
@note This copy constuctor had to be defined explicitely to circumvent a bug | |||
occuring on msvc v19.0 compiler (VS 2015) debug build. For more | |||
@note This copy constructor had to be defined explicitly to circumvent a bug | |||
occurring on msvc v19.0 compiler (VS 2015) debug build. For more | |||
information refer to: https://github.com/nlohmann/json/issues/1608 | |||
*/ | |||
iter_impl(const iter_impl<const BasicJsonType>& other) noexcept | |||
@@ -10088,7 +10200,7 @@ class json_pointer | |||
/*! | |||
@brief append an array index at the end of this JSON pointer | |||
@param[in] array_index array index ot append | |||
@param[in] array_index array index to append | |||
@return JSON pointer with @a array_index appended | |||
@liveexample{The example shows the usage of `operator/=`.,json_pointer__operator_add} | |||
@@ -10229,7 +10341,7 @@ class json_pointer | |||
@since version 3.6.0 | |||
*/ | |||
const std::string& back() | |||
const std::string& back() const | |||
{ | |||
if (JSON_HEDLEY_UNLIKELY(empty())) | |||
{ | |||
@@ -12041,13 +12153,12 @@ class binary_writer | |||
*/ | |||
static std::size_t calc_bson_array_size(const typename BasicJsonType::array_t& value) | |||
{ | |||
std::size_t embedded_document_size = 0ul; | |||
std::size_t array_index = 0ul; | |||
for (const auto& el : value) | |||
const std::size_t embedded_document_size = std::accumulate(std::begin(value), std::end(value), 0ul, [&array_index](std::size_t result, const typename BasicJsonType::array_t::value_type & el) | |||
{ | |||
embedded_document_size += calc_bson_element_size(std::to_string(array_index++), el); | |||
} | |||
return result + calc_bson_element_size(std::to_string(array_index++), el); | |||
}); | |||
return sizeof(std::int32_t) + embedded_document_size + 1ul; | |||
} | |||
@@ -12868,7 +12979,7 @@ inline cached_power get_cached_power_for_binary_exponent(int e) | |||
// ==> 2^(q - 1 + alpha) <= c * 2^(e + q) | |||
// ==> 2^(alpha - e - 1) <= c | |||
// | |||
// If c were an exakt power of ten, i.e. c = 10^k, one may determine k as | |||
// If c were an exact power of ten, i.e. c = 10^k, one may determine k as | |||
// | |||
// k = ceil( log_10( 2^(alpha - e - 1) ) ) | |||
// = ceil( (alpha - e - 1) * log_10(2) ) | |||
@@ -14264,7 +14375,7 @@ class serializer | |||
if (is_negative) | |||
{ | |||
*buffer_ptr = '-'; | |||
abs_value = static_cast<number_unsigned_t>(std::abs(static_cast<std::intmax_t>(x))); | |||
abs_value = remove_sign(x); | |||
// account one more byte for the minus sign | |||
n_chars = 1 + count_digits(abs_value); | |||
@@ -14445,6 +14556,32 @@ class serializer | |||
return state; | |||
} | |||
/* | |||
* Overload to make the compiler happy while it is instantiating | |||
* dump_integer for number_unsigned_t. | |||
* Must never be called. | |||
*/ | |||
number_unsigned_t remove_sign(number_unsigned_t x) | |||
{ | |||
assert(false); // LCOV_EXCL_LINE | |||
return x; // LCOV_EXCL_LINE | |||
} | |||
/* | |||
* Helper function for dump_integer | |||
* | |||
* This function takes a negative signed integer and returns its absolute | |||
* value as unsigned integer. The plus/minus shuffling is necessary as we can | |||
* not directly remove the sign of an arbitrary signed integer as the | |||
* absolute values of INT_MIN and INT_MAX are usually not the same. See | |||
* #1708 for details. | |||
*/ | |||
inline number_unsigned_t remove_sign(number_integer_t x) noexcept | |||
{ | |||
assert(x < 0 and x < (std::numeric_limits<number_integer_t>::max)()); | |||
return static_cast<number_unsigned_t>(-(x + 1)) + 1; | |||
} | |||
private: | |||
/// the output of the serializer | |||
output_adapter_t<char> o = nullptr; | |||
@@ -15359,7 +15496,7 @@ class basic_json | |||
object = nullptr; // silence warning, see #821 | |||
if (JSON_HEDLEY_UNLIKELY(t == value_t::null)) | |||
{ | |||
JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.7.0")); // LCOV_EXCL_LINE | |||
JSON_THROW(other_error::create(500, "961c151d2e87f2686a955a9be24d316f1362bf21 3.7.1")); // LCOV_EXCL_LINE | |||
} | |||
break; | |||
} | |||
@@ -17039,11 +17176,11 @@ class basic_json | |||
detail::has_non_default_from_json<basic_json_t, ValueType>::value, | |||
int> = 0> | |||
ValueType get() const noexcept(noexcept( | |||
JSONSerializer<ValueTypeCV>::from_json(std::declval<const basic_json_t&>()))) | |||
JSONSerializer<ValueType>::from_json(std::declval<const basic_json_t&>()))) | |||
{ | |||
static_assert(not std::is_reference<ValueTypeCV>::value, | |||
"get() cannot be used with reference types, you might want to use get_ref()"); | |||
return JSONSerializer<ValueTypeCV>::from_json(*this); | |||
return JSONSerializer<ValueType>::from_json(*this); | |||
} | |||
/*! | |||
@@ -18416,7 +18553,7 @@ class basic_json | |||
@since version 3.6.0 | |||
*/ | |||
template<typename KeyT, typename std::enable_if< | |||
not std::is_same<KeyT, json_pointer>::value, int>::type = 0> | |||
not std::is_same<typename std::decay<KeyT>::type, json_pointer>::value, int>::type = 0> | |||
bool contains(KeyT && key) const | |||
{ | |||
return is_object() and m_value.object->find(std::forward<KeyT>(key)) != m_value.object->end(); | |||
@@ -18425,7 +18562,7 @@ class basic_json | |||
/*! | |||
@brief check the existence of an element in a JSON object given a JSON pointer | |||
Check wehther the given JSON pointer @a ptr can be resolved in the current | |||
Check whether the given JSON pointer @a ptr can be resolved in the current | |||
JSON value. | |||
@note This method can be executed on any JSON value type. | |||
@@ -22468,7 +22605,7 @@ struct hash<nlohmann::json> | |||
/// @note: do not remove the space after '<', | |||
/// see https://github.com/nlohmann/json/pull/679 | |||
template<> | |||
struct less< ::nlohmann::detail::value_t> | |||
struct less<::nlohmann::detail::value_t> | |||
{ | |||
/*! | |||
@brief compare two value_t enum values | |||
@@ -22563,6 +22700,7 @@ inline nlohmann::json::json_pointer operator "" _json_pointer(const char* s, std | |||
#undef JSON_HEDLEY_ARRAY_PARAM | |||
#undef JSON_HEDLEY_ASSUME | |||
#undef JSON_HEDLEY_BEGIN_C_DECLS | |||
#undef JSON_HEDLEY_C_DECL | |||
#undef JSON_HEDLEY_CLANG_HAS_ATTRIBUTE | |||
#undef JSON_HEDLEY_CLANG_HAS_BUILTIN | |||
#undef JSON_HEDLEY_CLANG_HAS_CPP_ATTRIBUTE | |||
@@ -22575,21 +22713,23 @@ inline nlohmann::json::json_pointer operator "" _json_pointer(const char* s, std | |||
#undef JSON_HEDLEY_CONCAT | |||
#undef JSON_HEDLEY_CONCAT_EX | |||
#undef JSON_HEDLEY_CONST | |||
#undef JSON_HEDLEY_CONSTEXPR | |||
#undef JSON_HEDLEY_CONST_CAST | |||
#undef JSON_HEDLEY_CONSTEXPR | |||
#undef JSON_HEDLEY_CPP_CAST | |||
#undef JSON_HEDLEY_CRAY_VERSION | |||
#undef JSON_HEDLEY_CRAY_VERSION_CHECK | |||
#undef JSON_HEDLEY_C_DECL | |||
#undef JSON_HEDLEY_DEPRECATED | |||
#undef JSON_HEDLEY_DEPRECATED_FOR | |||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CAST_QUAL | |||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_CPP98_COMPAT_WRAP_ | |||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_DEPRECATED | |||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_CPP_ATTRIBUTES | |||
#undef JSON_HEDLEY_DIAGNOSTIC_DISABLE_UNKNOWN_PRAGMAS | |||
#undef JSON_HEDLEY_DIAGNOSTIC_POP | |||
#undef JSON_HEDLEY_DIAGNOSTIC_PUSH | |||
#undef JSON_HEDLEY_DMC_VERSION | |||
#undef JSON_HEDLEY_DMC_VERSION_CHECK | |||
#undef JSON_HEDLEY_EMPTY_BASES | |||
#undef JSON_HEDLEY_EMSCRIPTEN_VERSION | |||
#undef JSON_HEDLEY_EMSCRIPTEN_VERSION_CHECK | |||
#undef JSON_HEDLEY_END_C_DECLS | |||
@@ -22618,6 +22758,7 @@ inline nlohmann::json::json_pointer operator "" _json_pointer(const char* s, std | |||
#undef JSON_HEDLEY_HAS_ATTRIBUTE | |||
#undef JSON_HEDLEY_HAS_BUILTIN | |||
#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE | |||
#undef JSON_HEDLEY_HAS_CPP_ATTRIBUTE_NS | |||
#undef JSON_HEDLEY_HAS_DECLSPEC_ATTRIBUTE | |||
#undef JSON_HEDLEY_HAS_EXTENSION | |||
#undef JSON_HEDLEY_HAS_FEATURE | |||
@@ -22631,15 +22772,18 @@ inline nlohmann::json::json_pointer operator "" _json_pointer(const char* s, std | |||
#undef JSON_HEDLEY_INTEL_VERSION | |||
#undef JSON_HEDLEY_INTEL_VERSION_CHECK | |||
#undef JSON_HEDLEY_IS_CONSTANT | |||
#undef JSON_HEDLEY_IS_CONSTEXPR_ | |||
#undef JSON_HEDLEY_LIKELY | |||
#undef JSON_HEDLEY_MALLOC | |||
#undef JSON_HEDLEY_MESSAGE | |||
#undef JSON_HEDLEY_MSVC_VERSION | |||
#undef JSON_HEDLEY_MSVC_VERSION_CHECK | |||
#undef JSON_HEDLEY_NEVER_INLINE | |||
#undef JSON_HEDLEY_NO_ESCAPE | |||
#undef JSON_HEDLEY_NON_NULL | |||
#undef JSON_HEDLEY_NO_RETURN | |||
#undef JSON_HEDLEY_NO_THROW | |||
#undef JSON_HEDLEY_NULL | |||
#undef JSON_HEDLEY_PELLES_VERSION | |||
#undef JSON_HEDLEY_PELLES_VERSION_CHECK | |||
#undef JSON_HEDLEY_PGI_VERSION |
@@ -1,3 +1,3 @@ | |||
Name: nlohmann-json | |||
Namespace: nlohmann | |||
Version: 3.7.0 | |||
Version: 3.7.1 |
@@ -1 +0,0 @@ | |||
Name: args |
@@ -1,3 +0,0 @@ | |||
Name: taywee-args | |||
Version: 0.0.0 | |||
Namespace: taywee |
@@ -1,3 +1,3 @@ | |||
Name: ms-wil | |||
Version: 0.0.0 | |||
Version: 2019.11.10 | |||
Namespace: Microsoft |
@@ -1,6 +0,0 @@ | |||
Type: Package | |||
Name: taywee-args | |||
Namespace: taywee | |||
Library: args.lml |
@@ -1,5 +1,5 @@ | |||
Type: Library | |||
Name: WIL | |||
Name: wil | |||
Include-Path: repo/wil/include |
@@ -1,7 +1,6 @@ | |||
Name: dds | |||
Uses: taywee/args | |||
Uses: spdlog/spdlog | |||
Uses: Microsoft/WIL | |||
Uses: Microsoft/wil | |||
Uses: Niebler/range-v3 | |||
Uses: nlohmann/json |
@@ -2,9 +2,7 @@ Name: dds | |||
Version: 0.1.0 | |||
Depends: neo-buffer 0.1.0 | |||
Depends: taywee-args 0.0.0 | |||
Depends: spdlog 1.4.2 | |||
Depends: ms-wil 0.0.0 | |||
Depends: ms-wil 2019.11.10 | |||
Depends: range-v3 0.9.1 | |||
Depends: nlohmann-json 3.7.0 | |||
Depends: nlohmann-json 3.7.0 | |||
Depends: nlohmann-json 3.7.1 |
@@ -1,9 +1,9 @@ | |||
imports: | |||
spdlog: external/spdlog | |||
taywee-args: external/taywee-args | |||
wil: external/wil | |||
ranges-v3: external/ranges-v3 | |||
nlohmann-json: external/nlohmann-json/include/nlohmann | |||
spdlog: external/repo/spdlog | |||
taywee-args: external/repo/taywee-args | |||
wil: external/repo/wil | |||
ranges-v3: external/repo/range-v3 | |||
nlohmann-json: external/repo/nlohmann-json/include/nlohmann | |||
git module spdlog: | |||
url: https://github.com/gabime/spdlog.git | |||
@@ -28,4 +28,4 @@ git module ranges-v3: | |||
pick: include/ | |||
curl module nlohmann-json: | |||
url: https://raw.githubusercontent.com/nlohmann/json/v3.7.0/single_include/nlohmann/json.hpp | |||
url: https://raw.githubusercontent.com/nlohmann/json/v3.7.1/single_include/nlohmann/json.hpp |
@@ -0,0 +1,11 @@ | |||
Remote-Package: range-v3 0.9.1; git url=https://github.com/ericniebler/range-v3.git ref=0.9.1 auto=Niebler/range-v3 | |||
Remote-Package: spdlog 1.4.2; git url=https://github.com/gabime/spdlog.git ref=v1.4.2 auto=spdlog/spdlog | |||
# Even a shallow clone of nlohmann-json is HUGE. This fork has only the minimal | |||
Remote-Package: nlohmann-json 3.7.1; git url=https://github.com/vector-of-bool/json.git ref=dds/3.7.1 | |||
# MS never tags anything in this repo, so we'll use a fork that has some tags. | |||
Remote-Package: ms-wil 2019.11.10; git url=https://github.com/vector-of-bool/wil.git ref=dds/2019.11.10 | |||
# XXX: Don't depend on a moving revision! | |||
Remote-Package: neo-buffer 0.1.0; git url=https://github.com/vector-of-bool/neo-buffer.git ref=develop |
@@ -112,10 +112,10 @@ load_usage_requirements(path_ref project_root, path_ref build_root, path_ref use | |||
return usage_requirement_map::from_lm_index(idx); | |||
} | |||
void prepare_catch2_driver(library_build_params& lib_params, | |||
test_lib test_driver, | |||
const build_params& params, | |||
const package_manifest& man) { | |||
void prepare_catch2_driver(library_build_params& lib_params, | |||
test_lib test_driver, | |||
const build_params& params, | |||
const package_manifest&) { | |||
fs::path test_include_root = params.out_root / "_test_inc"; | |||
lib_params.test_include_dirs.emplace_back(test_include_root); | |||
@@ -1,5 +1,6 @@ | |||
#include <dds/build.hpp> | |||
#include <dds/logging.hpp> | |||
#include <dds/repo/remote.hpp> | |||
#include <dds/repo/repo.hpp> | |||
#include <dds/sdist.hpp> | |||
#include <dds/toolchain/from_dds.hpp> | |||
@@ -10,7 +11,7 @@ | |||
#include <range/v3/view/group_by.hpp> | |||
#include <range/v3/view/transform.hpp> | |||
#include <args.hxx> | |||
#include <dds/3rd/args.hxx> | |||
#include <filesystem> | |||
#include <iostream> | |||
@@ -120,8 +121,8 @@ struct cli_repo { | |||
auto same_name | |||
= [](auto&& a, auto&& b) { return a.manifest.name == b.manifest.name; }; | |||
auto all_sdists = repo.load_sdists(); | |||
auto grp_by_name = all_sdists // | |||
auto all = repo.iter_sdists(); | |||
auto grp_by_name = all // | |||
| ranges::views::group_by(same_name) // | |||
| ranges::views::transform(ranges::to_vector) // | |||
| ranges::views::transform([](auto&& grp) { | |||
@@ -132,8 +133,7 @@ struct cli_repo { | |||
for (const auto& [name, grp] : grp_by_name) { | |||
spdlog::info("{}:", name); | |||
for (const dds::sdist& sd : grp) { | |||
spdlog::info(" - {}", | |||
sd.manifest.version.to_string()); | |||
spdlog::info(" - {}", sd.manifest.version.to_string()); | |||
} | |||
} | |||
@@ -189,14 +189,14 @@ struct cli_sdist { | |||
common_flags _common{cmd}; | |||
common_project_flags project{cmd}; | |||
args::Group sdist_group{cmd, "`sdist` commands"}; | |||
struct { | |||
cli_sdist& parent; | |||
args::Command cmd{parent.sdist_group, "create", "Create a source distribution"}; | |||
common_project_flags project{cmd}; | |||
path_flag out{cmd, | |||
"out", | |||
"The destination of the source distribution", | |||
@@ -210,7 +210,7 @@ struct cli_sdist { | |||
int run() { | |||
dds::sdist_params params; | |||
params.project_dir = parent.project.root.Get(); | |||
params.project_dir = project.root.Get(); | |||
params.dest_path = out.Get(); | |||
params.force = force.Get(); | |||
dds::create_sdist(params); | |||
@@ -224,6 +224,8 @@ struct cli_sdist { | |||
"export", | |||
"Export a source distribution to a repository"}; | |||
common_project_flags project{cmd}; | |||
repo_where_flag repo_where{cmd}; | |||
args::Flag force{cmd, | |||
"replace-if-exists", | |||
@@ -238,7 +240,7 @@ struct cli_sdist { | |||
dds::fs::remove_all(tmp_sdist); | |||
} | |||
dds::sdist_params params; | |||
params.project_dir = parent.project.root.Get(); | |||
params.project_dir = project.root.Get(); | |||
params.dest_path = tmp_sdist; | |||
params.force = true; | |||
auto sdist = dds::create_sdist(params); | |||
@@ -292,7 +294,7 @@ struct cli_build { | |||
path_flag lm_index{cmd, | |||
"lm_index", | |||
"Path to a libman index (usually INDEX.lmi)", | |||
{"--lm-index", 'I'}, | |||
{"lm-index", 'I'}, | |||
dds::fs::path()}; | |||
args::Flag enable_warnings{cmd, | |||
@@ -381,6 +383,55 @@ struct cli_deps { | |||
} | |||
} ls{*this}; | |||
struct { | |||
cli_deps& parent; | |||
args::Command cmd{parent.deps_group, | |||
"get", | |||
"Ensure we have local copies of the project dependencies"}; | |||
common_flags _common{cmd}; | |||
repo_where_flag repo_where{cmd}; | |||
path_flag remote_listing_file{ | |||
cmd, | |||
"remote-listing", | |||
"Path to a file containing listing of remote sdists and how to obtain them", | |||
{'R', "remote-list"}, | |||
"remote.dds"}; | |||
int run() { | |||
auto man = parent.load_package_manifest(); | |||
auto rd = dds::remote_directory::load_from_file(remote_listing_file.Get()); | |||
bool failed = false; | |||
dds::repository::with_repository( // | |||
repo_where.Get(), | |||
dds::repo_flags::write_lock | dds::repo_flags::create_if_absent, | |||
[&](dds::repository repo) { | |||
for (auto& dep : man.dependencies) { | |||
auto exists = !!repo.find(dep.name, dep.version); | |||
if (!exists) { | |||
spdlog::info("Pull remote: {} {}", dep.name, dep.version.to_string()); | |||
auto opt_remote = rd.find(dep.name, dep.version); | |||
if (opt_remote) { | |||
auto tsd = opt_remote->pull_sdist(); | |||
repo.add_sdist(tsd.sdist, dds::if_exists::ignore); | |||
} else { | |||
spdlog::error("No remote listing for {} {}", | |||
dep.name, | |||
dep.version.to_string()); | |||
failed = true; | |||
} | |||
} else { | |||
spdlog::info("Okay: {} {}", dep.name, dep.version.to_string()); | |||
} | |||
} | |||
}); | |||
if (failed) { | |||
return 1; | |||
} | |||
return 0; | |||
} | |||
} get{*this}; | |||
struct { | |||
cli_deps& parent; | |||
args::Command cmd{parent.deps_group, "build", "Build project dependencies"}; | |||
@@ -435,6 +486,8 @@ struct cli_deps { | |||
return ls.run(); | |||
} else if (build.cmd) { | |||
return build.run(); | |||
} else if (get.cmd) { | |||
return get.run(); | |||
} | |||
std::terminate(); | |||
} |
@@ -43,41 +43,13 @@ dependency dependency::parse_depends_string(std::string_view str) { | |||
} | |||
std::vector<sdist> dds::find_dependencies(const repository& repo, const dependency& dep) { | |||
auto all_dists = repo.load_sdists(); | |||
detail::sort_sdists(all_dists); | |||
std::vector<sdist> acc; | |||
detail::do_find_deps(all_dists, dep, acc); | |||
detail::do_find_deps(repo, dep, acc); | |||
return acc; | |||
} | |||
auto tie_sdist(const sdist& sd) { | |||
return std::tuple(sd.manifest.name, sd.manifest.version.to_string()); | |||
} | |||
auto sdist_compare | |||
= [](const sdist& lhs, const sdist& rhs) { return tie_sdist(lhs) < tie_sdist(rhs); }; | |||
void detail::sort_sdists(std::vector<sdist>& sd) { std::sort(sd.begin(), sd.end(), sdist_compare); } | |||
namespace { | |||
const sdist* | |||
get_sdist(const std::vector<sdist>& sorted_sds, std::string_view name, std::string_view version) { | |||
auto found | |||
= std::partition_point(sorted_sds.begin(), sorted_sds.end(), [&](const auto& candidate) { | |||
return tie_sdist(candidate) < std::tie(name, version); | |||
}); | |||
if (found->manifest.name == name && found->manifest.version.to_string() == version) { | |||
return &*found; | |||
} | |||
return nullptr; | |||
} | |||
} // namespace | |||
void detail::do_find_deps(const std::vector<sdist>& sdists, | |||
const dependency& dep, | |||
std::vector<sdist>& sd) { | |||
auto sdist_opt = get_sdist(sdists, dep.name, dep.version.to_string()); | |||
void detail::do_find_deps(const repository& repo, const dependency& dep, std::vector<sdist>& sd) { | |||
auto sdist_opt = repo.find(dep.name, dep.version); | |||
if (!sdist_opt) { | |||
throw std::runtime_error( | |||
fmt::format("Unable to find dependency to satisfy requirement: {} {}", | |||
@@ -86,7 +58,7 @@ void detail::do_find_deps(const std::vector<sdist>& sdists, | |||
} | |||
const sdist& new_sd = *sdist_opt; | |||
for (const auto& inner_dep : new_sd.manifest.dependencies) { | |||
do_find_deps(sdists, inner_dep, sd); | |||
do_find_deps(repo, inner_dep, sd); | |||
} | |||
auto insert_point = std::partition_point(sd.begin(), sd.end(), [&](const sdist& cand) { | |||
return cand.path < new_sd.path; | |||
@@ -218,7 +190,7 @@ fs::path generate_lml(const library_plan& lib, path_ref libdir, const build_env& | |||
} | |||
auto pub_inc_dir = lib.source_root() / "include"; | |||
auto src_dir = lib.source_root() / "src"; | |||
if (fs::exists(src_dir)) { | |||
if (!fs::exists(pub_inc_dir)) { | |||
pub_inc_dir = src_dir; | |||
} | |||
kvs.emplace_back("Include-Path", pub_inc_dir.string()); |
@@ -1,7 +1,6 @@ | |||
#pragma once | |||
#include <dds/build/plan/full.hpp> | |||
#include <dds/repo/repo.hpp> | |||
#include <semver/version.hpp> | |||
@@ -10,6 +9,7 @@ | |||
namespace dds { | |||
struct sdist; | |||
class repository; | |||
enum class version_strength { | |||
exact, | |||
@@ -27,8 +27,7 @@ struct dependency { | |||
namespace detail { | |||
void do_find_deps(const std::vector<sdist>&, const dependency& dep, std::vector<sdist>& acc); | |||
void sort_sdists(std::vector<sdist>& sds); | |||
void do_find_deps(const repository&, const dependency& dep, std::vector<sdist>& acc); | |||
} // namespace detail | |||
@@ -37,10 +36,8 @@ std::vector<sdist> find_dependencies(const repository& repo, const dependency& d | |||
template <typename Iter, typename Snt> | |||
inline std::vector<sdist> find_dependencies(const repository& repo, Iter it, Snt stop) { | |||
std::vector<sdist> acc; | |||
auto all_sds = repo.load_sdists(); | |||
detail::sort_sdists(all_sds); | |||
while (it != stop) { | |||
detail::do_find_deps(all_sds, *it++, acc); | |||
detail::do_find_deps(repo, *it++, acc); | |||
} | |||
return acc; | |||
} | |||
@@ -49,4 +46,4 @@ build_plan create_deps_build_plan(const std::vector<sdist>& deps, build_env_ref | |||
void write_libman_index(path_ref where, const build_plan& plan, const build_env& env); | |||
} // namespace dds | |||
} // namespace dds |
@@ -0,0 +1,154 @@ | |||
#include "./remote.hpp" | |||
#include <dds/deps.hpp> | |||
#include <dds/proc.hpp> | |||
#include <dds/repo/repo.hpp> | |||
#include <dds/sdist.hpp> | |||
#include <dds/temp.hpp> | |||
#include <dds/toolchain/toolchain.hpp> | |||
#include <spdlog/spdlog.h> | |||
#include <libman/parse.hpp> | |||
#include <algorithm> | |||
using namespace dds; | |||
namespace { | |||
struct read_listing_item { | |||
std::string_view _key; | |||
std::set<remote_listing, remote_listing_compare_t>& out; | |||
bool operator()(std::string_view context, std::string_view key, std::string_view value) { | |||
if (key != _key) { | |||
return false; | |||
} | |||
auto nested = lm::nested_kvlist::parse(value); | |||
auto name_ver_pair = split_shell_string(nested.primary); | |||
if (name_ver_pair.size() != 2) { | |||
throw std::runtime_error( | |||
fmt::format("{}: Invalid Remote-Package identity: '{}'", context, nested.primary)); | |||
} | |||
auto name = name_ver_pair[0]; | |||
auto version = semver::version::parse(name_ver_pair[1]); | |||
put_listing(context, name, version, nested.pairs); | |||
return true; | |||
} | |||
void put_listing(std::string_view context, | |||
std::string name, | |||
semver::version version, | |||
const lm::pair_list& pairs) { | |||
if (pairs.find("git")) { | |||
std::string url; | |||
std::string ref; | |||
std::optional<lm::usage> auto_id; | |||
lm::read(fmt::format("{}: Parsing Git remote listing", context), | |||
pairs, | |||
lm::read_required("url", url), | |||
lm::read_required("ref", ref), | |||
lm::read_check_eq("git", ""), | |||
lm::read_opt("auto", auto_id, &lm::split_usage_string), | |||
lm::reject_unknown()); | |||
auto did_insert = out.emplace(remote_listing{std::move(name), | |||
version, | |||
git_remote_listing{url, ref, auto_id}}) | |||
.second; | |||
if (!did_insert) { | |||
spdlog::warn("Duplicate remote package defintion for {} {}", | |||
name, | |||
version.to_string()); | |||
} | |||
} else { | |||
throw std::runtime_error(fmt::format("Unable to determine remote type of package {} {}", | |||
name, | |||
version.to_string())); | |||
} | |||
} | |||
}; | |||
temporary_sdist do_pull_sdist(const remote_listing& listing, const git_remote_listing& git) { | |||
auto tmpdir = dds::temporary_dir::create(); | |||
using namespace std::literals; | |||
spdlog::info("Cloning repository: {} [{}] ...", git.url, git.ref); | |||
auto command = {"git"s, | |||
"clone"s, | |||
"--depth=1"s, | |||
"--branch"s, | |||
git.ref, | |||
git.url, | |||
tmpdir.path().generic_string()}; | |||
auto git_res = run_proc(command); | |||
if (!git_res.okay()) { | |||
throw std::runtime_error( | |||
fmt::format("Git clone operation failed [Git command: {}] [Exitted {}]:\n{}", | |||
quote_command(command), | |||
git_res.retc, | |||
git_res.output)); | |||
} | |||
spdlog::info("Create sdist from clone ..."); | |||
if (git.auto_lib.has_value()) { | |||
spdlog::info("Generating library data automatically"); | |||
auto pkg_strm = dds::open(tmpdir.path() / "package.dds", std::ios::binary | std::ios::out); | |||
pkg_strm << "Name: " << listing.name << '\n' // | |||
<< "Version: " << listing.version.to_string() << '\n' // | |||
<< "Namespace: " << git.auto_lib->namespace_; | |||
auto lib_strm = dds::open(tmpdir.path() / "library.dds", std::ios::binary | std::ios::out); | |||
lib_strm << "Name: " << git.auto_lib->name; | |||
} | |||
sdist_params params; | |||
params.project_dir = tmpdir.path(); | |||
auto sd_tmp_dir = dds::temporary_dir::create(); | |||
params.dest_path = sd_tmp_dir.path(); | |||
params.force = true; | |||
auto sd = create_sdist(params); | |||
return {sd_tmp_dir, sd}; | |||
} | |||
} // namespace | |||
temporary_sdist remote_listing::pull_sdist() const { | |||
auto tsd = visit([&](auto&& actual) { return do_pull_sdist(*this, actual); }); | |||
if (tsd.sdist.manifest.name != name) { | |||
throw std::runtime_error( | |||
fmt::format("The name in the generated sdist ('{}') does not match the name listed in " | |||
"the remote listing file (expected '{}')", | |||
tsd.sdist.manifest.name, | |||
name)); | |||
} | |||
if (tsd.sdist.manifest.version != version) { | |||
throw std::runtime_error( | |||
fmt::format("The version of the generated sdist is '{}', which does not match the " | |||
"expected version '{}'", | |||
tsd.sdist.manifest.version.to_string(), | |||
version.to_string())); | |||
} | |||
return tsd; | |||
} | |||
remote_directory remote_directory::load_from_file(path_ref filepath) { | |||
auto kvs = lm::parse_file(filepath); | |||
listing_set listings; | |||
lm::read(fmt::format("Loading remote package listing from {}", filepath.string()), | |||
kvs, | |||
read_listing_item{"Remote-Package", listings}, | |||
lm::reject_unknown()); | |||
return {std::move(listings)}; | |||
} | |||
const remote_listing* remote_directory::find(std::string_view name, semver::version ver) const | |||
noexcept { | |||
auto found = _remotes.find(std::tie(name, ver)); | |||
if (found == _remotes.end()) { | |||
return nullptr; | |||
} | |||
return &*found; | |||
} | |||
void remote_directory::ensure_all_local(const repository&) const { | |||
spdlog::critical("Dependency download is not fully implemented!"); | |||
} |
@@ -0,0 +1,75 @@ | |||
#pragma once | |||
#include <dds/util/fs.hpp> | |||
#include <dds/sdist.hpp> | |||
#include <dds/temp.hpp> | |||
#include <semver/version.hpp> | |||
#include <libman/library.hpp> | |||
#include <set> | |||
#include <string> | |||
#include <tuple> | |||
#include <utility> | |||
#include <variant> | |||
namespace dds { | |||
struct temporary_sdist { | |||
temporary_dir tmpdir; | |||
dds::sdist sdist; | |||
}; | |||
struct git_remote_listing { | |||
std::string url; | |||
std::string ref; | |||
std::optional<lm::usage> auto_lib; | |||
void clone(path_ref path) const; | |||
}; | |||
struct remote_listing { | |||
std::string name; | |||
semver::version version; | |||
std::variant<git_remote_listing> remote; | |||
template <typename Func> | |||
decltype(auto) visit(Func&& fn) const { | |||
return std::visit(std::forward<Func>(fn), remote); | |||
} | |||
temporary_sdist pull_sdist() const; | |||
}; | |||
inline constexpr struct remote_listing_compare_t { | |||
using is_transparent = int; | |||
auto tie(const remote_listing& rl) const { return std::tie(rl.name, rl.version); } | |||
bool operator()(const remote_listing& lhs, const remote_listing& rhs) const { | |||
return tie(lhs) < tie(rhs); | |||
} | |||
template <typename Name, typename Version> | |||
bool operator()(const remote_listing& lhs, const std::tuple<Name, Version>& rhs) const { | |||
return tie(lhs) < rhs; | |||
} | |||
template <typename Name, typename Version> | |||
bool operator()(const std::tuple<Name, Version>& lhs, const remote_listing& rhs) const { | |||
return lhs < tie(rhs); | |||
} | |||
} remote_listing_compare; | |||
class remote_directory { | |||
using listing_set = std::set<remote_listing, remote_listing_compare_t>; | |||
listing_set _remotes; | |||
remote_directory(listing_set s) | |||
: _remotes(std::move(s)) {} | |||
public: | |||
static remote_directory load_from_file(path_ref); | |||
void ensure_all_local(const class repository& repo) const; | |||
const remote_listing* find(std::string_view name, semver::version ver) const noexcept; | |||
}; | |||
} // namespace dds |
@@ -6,6 +6,7 @@ | |||
#include <spdlog/spdlog.h> | |||
#include <range/v3/range/conversion.hpp> | |||
#include <range/v3/view/filter.hpp> | |||
#include <range/v3/view/transform.hpp> | |||
@@ -13,26 +14,62 @@ using namespace dds; | |||
using namespace ranges; | |||
namespace { | |||
auto load_sdists(path_ref root) { | |||
using namespace ranges; | |||
using namespace ranges::views; | |||
auto try_read_sdist = [](path_ref p) -> std::optional<sdist> { | |||
if (starts_with(p.filename().string(), ".")) { | |||
return std::nullopt; | |||
} | |||
try { | |||
return sdist::from_directory(p); | |||
} catch (const std::runtime_error& e) { | |||
spdlog::error("Failed to load source distribution from directory '{}': {}", | |||
p.string(), | |||
e.what()); | |||
return std::nullopt; | |||
} | |||
}; | |||
return | |||
// Get the top-level `name-version` dirs | |||
fs::directory_iterator(root) // | |||
// // Convert each dir into an `sdist` object | |||
| transform(try_read_sdist) // | |||
// // Drop items that failed to load | |||
| filter([](auto&& opt) { return opt.has_value(); }) // | |||
| transform([](auto&& opt) { return *opt; }) // | |||
; | |||
} | |||
} // namespace | |||
void repository::_log_blocking(path_ref dirpath) noexcept { | |||
spdlog::warn("Another process has the repository directory locked [{}]", dirpath.string()); | |||
spdlog::warn("Waiting for repository to be released..."); | |||
} | |||
void repository::_init_repo_dir(path_ref dirpath) noexcept { | |||
fs::create_directories(dirpath); | |||
} | |||
void repository::_init_repo_dir(path_ref dirpath) noexcept { fs::create_directories(dirpath); } | |||
fs::path repository::default_local_path() noexcept { return dds_data_dir() / "repo"; } | |||
repository repository::open_for_directory(path_ref dirpath) { | |||
auto dist_dir = dirpath; | |||
auto entries = fs::directory_iterator(dist_dir) | to_vector; | |||
return {dirpath}; | |||
repository repository::_open_for_directory(bool writeable, path_ref dirpath) { | |||
sdist_set entries = load_sdists(dirpath) | to<sdist_set>(); | |||
return {writeable, dirpath, std::move(entries)}; | |||
} | |||
void repository::add_sdist(const sdist& sd, if_exists ife_action) { | |||
auto sd_dest | |||
= _root / fmt::format("{}_{}", sd.manifest.name, sd.manifest.version.to_string()); | |||
if (!_write_enabled) { | |||
spdlog::critical( | |||
"DDS attempted to write into a repository that wasn't opened with a write-lock. This " | |||
"is a hard bug and should be reported. For the safety and integrity of the local " | |||
"repository, we'll hard-exit immediately."); | |||
std::terminate(); | |||
} | |||
auto sd_dest = _root / fmt::format("{}_{}", sd.manifest.name, sd.manifest.version.to_string()); | |||
if (fs::exists(sd_dest)) { | |||
auto msg = fmt::format("Source distribution '{}' is already available in the local repo", | |||
sd.path.string()); | |||
@@ -59,41 +96,10 @@ void repository::add_sdist(const sdist& sd, if_exists ife_action) { | |||
spdlog::info("Source distribution '{}' successfully exported", sd.ident()); | |||
} | |||
std::vector<sdist> repository::load_sdists() const { | |||
using namespace ranges; | |||
using namespace ranges::views; | |||
auto try_read_sdist = [](path_ref p) -> std::optional<sdist> { | |||
if (starts_with(p.filename().string(), ".")) { | |||
return std::nullopt; | |||
} | |||
try { | |||
return sdist::from_directory(p); | |||
} catch (const std::runtime_error& e) { | |||
spdlog::error("Failed to load source distribution from directory '{}': {}", | |||
p.string(), | |||
e.what()); | |||
return std::nullopt; | |||
} | |||
}; | |||
return | |||
// Get the top-level `name-version` dirs | |||
fs::directory_iterator(_root) // | |||
// // Convert each dir into an `sdist` object | |||
| transform(try_read_sdist) // | |||
// // Drop items that failed to load | |||
| filter([](auto&& opt) { return opt.has_value(); }) // | |||
| transform([](auto&& opt) { return *opt; }) // | |||
| to_vector // | |||
; | |||
} | |||
std::optional<sdist> repository::get_sdist(std::string_view name, std::string_view version) const { | |||
auto expect_path = _root / fmt::format("{}_{}", name, version); | |||
if (!fs::is_directory(expect_path)) { | |||
return std::nullopt; | |||
const sdist* repository::find(std::string_view name, semver::version ver) const noexcept { | |||
auto found = _sdists.find(std::tie(name, ver)); | |||
if (found == _sdists.end()) { | |||
return nullptr; | |||
} | |||
return sdist::from_directory(expect_path); | |||
return &*found; | |||
} |
@@ -1,17 +1,17 @@ | |||
#pragma once | |||
#include <dds/sdist.hpp> | |||
#include <dds/util/flock.hpp> | |||
#include <dds/util/fs.hpp> | |||
#include <functional> | |||
#include <optional> | |||
#include <set> | |||
#include <shared_mutex> | |||
#include <vector> | |||
namespace dds { | |||
struct sdist; | |||
enum repo_flags { | |||
none = 0b00, | |||
read = none, | |||
@@ -30,13 +30,20 @@ inline repo_flags operator|(repo_flags a, repo_flags b) { | |||
} | |||
class repository { | |||
fs::path _root; | |||
using sdist_set = std::set<sdist, sdist_compare_t>; | |||
bool _write_enabled = false; | |||
fs::path _root; | |||
sdist_set _sdists; | |||
repository(path_ref p) | |||
: _root(p) {} | |||
repository(bool writeable, path_ref p, sdist_set sds) | |||
: _write_enabled(writeable) | |||
, _root(p) | |||
, _sdists(std::move(sds)) {} | |||
static void _log_blocking(path_ref dir) noexcept; | |||
static void _init_repo_dir(path_ref dir) noexcept; | |||
static void _log_blocking(path_ref dir) noexcept; | |||
static void _init_repo_dir(path_ref dir) noexcept; | |||
static repository _open_for_directory(bool writeable, path_ref); | |||
public: | |||
template <typename Func> | |||
@@ -51,7 +58,9 @@ public: | |||
std::shared_lock shared_lk{mut, std::defer_lock}; | |||
std::unique_lock excl_lk{mut, std::defer_lock}; | |||
if (flags & repo_flags::write_lock) { | |||
bool writeable = (flags & repo_flags::write_lock) != repo_flags::none; | |||
if (writeable) { | |||
if (!excl_lk.try_lock()) { | |||
_log_blocking(dirpath); | |||
excl_lk.lock(); | |||
@@ -63,16 +72,29 @@ public: | |||
} | |||
} | |||
return std::invoke((Func &&) fn, open_for_directory(dirpath)); | |||
auto repo = _open_for_directory(writeable, dirpath); | |||
return std::invoke((Func &&) fn, std::move(repo)); | |||
} | |||
static repository open_for_directory(path_ref); | |||
static fs::path default_local_path() noexcept; | |||
void add_sdist(const sdist&, if_exists = if_exists::throw_exc); | |||
std::optional<sdist> get_sdist(std::string_view name, std::string_view version) const; | |||
std::vector<sdist> load_sdists() const; | |||
void add_sdist(const sdist&, if_exists = if_exists::throw_exc); | |||
const sdist* find(std::string_view name, semver::version ver) const noexcept; | |||
auto iter_sdists() const noexcept { | |||
class ret { | |||
const sdist_set& s; | |||
public: | |||
ret(const sdist_set& s) | |||
: s(s) {} | |||
auto begin() const { return s.cbegin(); } | |||
auto end() const { return s.cend(); } | |||
} r{_sdists}; | |||
return r; | |||
} | |||
}; | |||
} // namespace dds |
@@ -1,5 +1,7 @@ | |||
#pragma once | |||
#include <tuple> | |||
#include <dds/package_manifest.hpp> | |||
#include <dds/util/fs.hpp> | |||
@@ -28,6 +30,22 @@ struct sdist { | |||
} | |||
}; | |||
inline constexpr struct sdist_compare_t { | |||
bool operator()(const sdist& lhs, const sdist& rhs) const { | |||
return std::tie(lhs.manifest.name, lhs.manifest.version) | |||
< std::tie(rhs.manifest.name, rhs.manifest.version); | |||
} | |||
template <typename Name, typename Version> | |||
bool operator()(const sdist& lhs, const std::tuple<Name, Version>& rhs) const { | |||
return std::tie(lhs.manifest.name, lhs.manifest.version) < rhs; | |||
} | |||
template <typename Name, typename Version> | |||
bool operator()(const std::tuple<Name, Version>& lhs, const sdist& rhs) const { | |||
return lhs < std::tie(rhs.manifest.name, rhs.manifest.version); | |||
} | |||
using is_transparent = int; | |||
} sdist_compare; | |||
sdist create_sdist(const sdist_params&); | |||
sdist create_sdist_in_dir(path_ref, const sdist_params&); | |||
@@ -3,6 +3,7 @@ | |||
#include <dds/util/string.hpp> | |||
#include <algorithm> | |||
#include <cassert> | |||
#include <optional> | |||
#include <vector> | |||
@@ -10,15 +11,16 @@ using namespace dds; | |||
std::optional<source_kind> dds::infer_source_kind(path_ref p) noexcept { | |||
static std::vector<std::string_view> header_exts = { | |||
".h", | |||
".H", | |||
".H++", | |||
".h", | |||
".h++", | |||
".hh", | |||
".hpp", | |||
".hxx", | |||
".inl", | |||
}; | |||
assert(std::is_sorted(header_exts.begin(), header_exts.end())); | |||
static std::vector<std::string_view> source_exts = { | |||
".C", | |||
".c", | |||
@@ -27,6 +29,7 @@ std::optional<source_kind> dds::infer_source_kind(path_ref p) noexcept { | |||
".cpp", | |||
".cxx", | |||
}; | |||
assert(std::is_sorted(source_exts.begin(), source_exts.end())); | |||
auto leaf = p.filename(); | |||
auto ext_found |
@@ -11,9 +11,6 @@ library library::from_file(path_ref fpath) { | |||
library ret; | |||
std::vector<std::string> uses_strs; | |||
std::vector<std::string> links_strs; | |||
std::string _type_; | |||
read(fmt::format("Reading library manifest file '{}'", fpath.string()), | |||
pairs, | |||
@@ -23,18 +20,10 @@ library library::from_file(path_ref fpath) { | |||
read_opt("Path", ret.linkable_path), | |||
read_accumulate("Include-Path", ret.include_paths), | |||
read_accumulate("Preprocessor-Define", ret.preproc_defs), | |||
read_accumulate("Uses", uses_strs), | |||
read_accumulate("Links", links_strs), | |||
read_accumulate("Uses", ret.uses, &split_usage_string), | |||
read_accumulate("Links", ret.links, &split_usage_string), | |||
read_accumulate("Special-Uses", ret.special_uses)); | |||
for (auto&& uses_str : uses_strs) { | |||
ret.uses.push_back(split_usage_string(uses_str)); | |||
} | |||
for (auto&& links_str : links_strs) { | |||
ret.links.push_back(split_usage_string(links_str)); | |||
} | |||
auto make_absolute = [&](path_ref p) { return fpath.parent_path() / p; }; | |||
std::transform(ret.include_paths.begin(), | |||
ret.include_paths.end(), |
@@ -81,4 +81,33 @@ void lm::write_pairs(fs::path fpath, const std::vector<pair>& pairs) { | |||
for (auto& pair : pairs) { | |||
fstream << pair.key << ": " << pair.value << '\n'; | |||
} | |||
} | |||
nested_kvlist nested_kvlist::parse(const std::string_view line_) { | |||
const auto line = trim_view(line_); | |||
const auto semi_pos = line.find(';'); | |||
const auto primary = trim_view(line.substr(0, semi_pos)); | |||
auto tail = semi_pos == line.npos ? ""sv : trim_view(line.substr(semi_pos + 1)); | |||
std::vector<pair> pairs; | |||
while (!tail.empty()) { | |||
const auto space_pos = tail.find(' '); | |||
const auto item = tail.substr(0, space_pos); | |||
const auto eq_pos = item.find('='); | |||
if (eq_pos == item.npos) { | |||
pairs.emplace_back(item, ""sv); | |||
} else { | |||
const auto key = item.substr(0, eq_pos); | |||
const auto value = item.substr(eq_pos + 1); | |||
pairs.emplace_back(key, value); | |||
} | |||
if (space_pos == tail.npos) { | |||
break; | |||
} | |||
tail = trim_view(tail.substr(space_pos + 1)); | |||
} | |||
return nested_kvlist{std::string(primary), pair_list{std::move(pairs)}}; | |||
} |
@@ -113,6 +113,20 @@ inline void write_pairs(const std::filesystem::path& fpath, const pair_list& pai | |||
write_pairs(fpath, pairs.items()); | |||
} | |||
struct nested_kvlist { | |||
std::string primary; | |||
pair_list pairs; | |||
static nested_kvlist parse(std::string_view s); | |||
}; | |||
struct unchanged { | |||
template <typename Item> | |||
auto operator()(Item&& item) const { | |||
return item; | |||
} | |||
}; | |||
template <typename What> | |||
class read_required { | |||
std::string_view _key; | |||
@@ -145,16 +159,18 @@ public: | |||
} | |||
}; | |||
template <typename T> | |||
template <typename T, typename Transform = unchanged> | |||
class read_opt { | |||
std::string_view _key; | |||
T& _ref; | |||
bool _did_read = false; | |||
Transform _tr; | |||
public: | |||
read_opt(std::string_view key, T& ref) | |||
read_opt(std::string_view key, T& ref, Transform tr = unchanged()) | |||
: _key(key) | |||
, _ref(ref) {} | |||
, _ref(ref) | |||
, _tr(std::move(tr)) {} | |||
int operator()(std::string_view context, std::string_view key, std::string_view value) { | |||
if (key != _key) { | |||
@@ -164,7 +180,7 @@ public: | |||
throw std::runtime_error(std::string(context) + ": Duplicated key '" + std::string(key) | |||
+ "' is not allowed."); | |||
} | |||
_ref = T(value); | |||
_ref = T(_tr(value)); | |||
return 1; | |||
} | |||
}; | |||
@@ -202,6 +218,30 @@ public: | |||
} | |||
}; | |||
struct read_empty_set_true { | |||
std::string_view _key; | |||
bool& _bool; | |||
bool _seen = false; | |||
bool operator()(std::string_view context, std::string_view key, std::string_view value) { | |||
if (key != _key) { | |||
return false; | |||
} | |||
if (value != "") { | |||
throw std::runtime_error(std::string(context) + ": Key '" + std::string(key) | |||
+ "' does not expected a value (Got '" + std::string(value) | |||
+ "')."); | |||
} | |||
if (_seen) { | |||
throw std::runtime_error(std::string(context) + ": Duplicated key '" + std::string(key) | |||
+ "'"); | |||
} | |||
_bool = true; | |||
_seen = true; | |||
return true; | |||
} | |||
}; | |||
class read_check_eq { | |||
std::string_view _key; | |||
std::string_view _expect; | |||
@@ -224,19 +264,26 @@ public: | |||
} | |||
}; | |||
template <typename Container> | |||
template <typename Container, typename Transform = unchanged> | |||
class read_accumulate { | |||
std::string_view _key; | |||
Container& _items; | |||
Transform _tr; | |||
public: | |||
read_accumulate(std::string_view key, Container& c, Transform tr) | |||
: _key(key) | |||
, _items(c) | |||
, _tr(std::move(tr)) {} | |||
read_accumulate(std::string_view key, Container& c) | |||
: _key(key) | |||
, _items(c) {} | |||
, _items(c) | |||
, _tr(unchanged()) {} | |||
int operator()(std::string_view, std::string_view key, std::string_view value) const { | |||
if (key == _key) { | |||
_items.emplace_back(value); | |||
_items.emplace_back(_tr(value)); | |||
return 1; | |||
} | |||
return 0; |
@@ -80,9 +80,52 @@ void test_multi() { | |||
CHECK(!iter); | |||
} | |||
void test_nested_kvlist() { | |||
auto check_1 = [](auto str) { | |||
auto result = nested_kvlist::parse(str); | |||
CHECK(result.primary == "Foo"); | |||
CHECK(result.pairs.size() == 1); | |||
REQUIRE(result.pairs.find("bar")); | |||
CHECK(result.pairs.find("bar")->value == "baz"); | |||
}; | |||
check_1("Foo; bar=baz"); | |||
check_1("Foo ; bar=baz"); | |||
check_1("Foo ; bar=baz"); | |||
check_1("Foo ; bar=baz "); | |||
check_1("Foo;bar=baz "); | |||
check_1("Foo;bar=baz"); | |||
auto check_2 = [](auto str) { | |||
auto result = nested_kvlist::parse(str); | |||
CHECK(result.primary == "Foo"); | |||
CHECK(result.pairs.size() == 0); | |||
}; | |||
check_2("Foo"); | |||
check_2("Foo;"); | |||
check_2("Foo ;"); | |||
check_2("Foo ; "); | |||
check_2("Foo; "); | |||
auto check_3 = [](auto str) { | |||
auto result = nested_kvlist::parse(str); | |||
CHECK(result.primary == "Foo bar"); | |||
CHECK(result.pairs.size() == 2); | |||
REQUIRE(result.pairs.find("baz")); | |||
CHECK(result.pairs.find("baz")->value == "meow"); | |||
REQUIRE(result.pairs.find("quux")); | |||
CHECK(result.pairs.find("quux")->value == ""); | |||
}; | |||
check_3("Foo bar; baz=meow quux"); | |||
check_3("Foo bar ; baz=meow quux="); | |||
check_3("Foo bar ; quux= baz=meow"); | |||
check_3("Foo bar ;quux= baz=meow"); | |||
} | |||
void run_tests() { | |||
test_simple(); | |||
test_multi(); | |||
test_nested_kvlist(); | |||
} | |||
DDS_TEST_MAIN; |
@@ -0,0 +1,5 @@ | |||
import sys | |||
from pathlib import Path | |||
sys.path.append(str(Path(__file__).absolute().parent.parent / 'tools')) | |||
from .dds import DDS, DDSFixtureParams, scoped_dds, dds_fixture_conf, dds_fixture_conf_1 |
@@ -1 +0,0 @@ | |||
Name: app_only |
@@ -1,2 +0,0 @@ | |||
Name: dds-app_only-test | |||
Version: 0.0.0 |
@@ -1 +0,0 @@ | |||
int main() { return 0; } |
@@ -0,0 +1,14 @@ | |||
from contextlib import ExitStack | |||
from tests import DDS | |||
from tests.fileutil import set_contents | |||
def test_lib_with_just_app(dds: DDS, scope: ExitStack): | |||
scope.enter_context( | |||
set_contents( | |||
dds.source_root / 'src/foo.main.cpp', | |||
b'int main() {}', | |||
)) | |||
dds.build() | |||
assert (dds.build_dir / f'foo{dds.exe_suffix}').is_file() |
@@ -0,0 +1,43 @@ | |||
from contextlib import contextmanager | |||
from tests import DDS | |||
from tests.fileutil import ensure_dir, set_contents | |||
def test_build_empty(dds: DDS): | |||
assert not dds.source_root.exists() | |||
dds.scope.enter_context(ensure_dir(dds.source_root)) | |||
dds.build() | |||
def test_build_simple(dds: DDS): | |||
dds.scope.enter_context( | |||
set_contents(dds.source_root / 'src/f.cpp', b'void foo() {}')) | |||
dds.build() | |||
def basic_pkg_dds(dds: DDS): | |||
return set_contents( | |||
dds.source_root / 'package.dds', b''' | |||
Name: test-pkg | |||
Version: 0.2.2 | |||
''') | |||
def test_empty_with_pkg_dds(dds: DDS): | |||
dds.scope.enter_context(basic_pkg_dds(dds)) | |||
dds.build() | |||
def test_empty_with_lib_dds(dds: DDS): | |||
dds.scope.enter_context(basic_pkg_dds(dds)) | |||
dds.build() | |||
def test_empty_sdist_create(dds: DDS): | |||
dds.scope.enter_context(basic_pkg_dds(dds)) | |||
dds.sdist_create() | |||
def test_empty_sdist_export(dds: DDS): | |||
dds.scope.enter_context(basic_pkg_dds(dds)) | |||
dds.sdist_export() |
@@ -0,0 +1,30 @@ | |||
from contextlib import ExitStack | |||
from tests import DDS | |||
from tests.fileutil import set_contents | |||
def test_simple_lib(dds: DDS, scope: ExitStack): | |||
scope.enter_context( | |||
dds.set_contents( | |||
'src/foo.cpp', | |||
b'int the_answer() { return 42; }', | |||
)) | |||
scope.enter_context( | |||
dds.set_contents( | |||
'library.dds', | |||
b'Name: TestLibrary', | |||
)) | |||
scope.enter_context( | |||
dds.set_contents( | |||
'package.dds', | |||
b''' | |||
Name: TestProject | |||
Version: 0.0.0 | |||
''', | |||
)) | |||
dds.build(tests=True, apps=False, warnings=False, export=True) | |||
assert (dds.build_dir / 'compile_commands.json').is_file() | |||
assert list(dds.build_dir.glob('libTestLibrary*')) != [] |
@@ -0,0 +1,14 @@ | |||
from contextlib import ExitStack | |||
from tests import DDS | |||
from tests.fileutil import set_contents | |||
def test_lib_with_just_test(dds: DDS, scope: ExitStack): | |||
scope.enter_context( | |||
set_contents( | |||
dds.source_root / 'src/foo.test.cpp', | |||
b'int main() {}', | |||
)) | |||
dds.build(tests=True, apps=False, warnings=False, export=False) | |||
assert (dds.build_dir / f'test/foo{dds.exe_suffix}').is_file() |
@@ -0,0 +1,38 @@ | |||
from contextlib import ExitStack | |||
from typing import Optional | |||
from pathlib import Path | |||
import shutil | |||
import pytest | |||
from tests import scoped_dds, DDSFixtureParams | |||
@pytest.yield_fixture | |||
def dds(request, tmp_path: Path, worker_id: str, scope: ExitStack): | |||
test_source_dir = Path(request.fspath).absolute().parent | |||
test_root = test_source_dir | |||
# If we are running in parallel, use a unique directory as scratch | |||
# space so that we aren't stomping on anyone else | |||
if worker_id != 'master': | |||
test_root = tmp_path / request.function.__name__ | |||
shutil.copytree(test_source_dir, test_root) | |||
project_dir = test_root / 'project' | |||
# Check if we have a special configuration | |||
if hasattr(request, 'param'): | |||
assert isinstance(request.param, DDSFixtureParams), \ | |||
('Using the `dds` fixture requires passing in indirect ' | |||
'params. Use @dds_fixture_conf to configure the fixture') | |||
params: DDSFixtureParams = request.param | |||
project_dir = test_root / params.subdir | |||
# Create the instance. Auto-clean when we're done | |||
yield scope.enter_context(scoped_dds(test_root, project_dir, request.function.__name__)) | |||
@pytest.fixture | |||
def scope(): | |||
with ExitStack() as scope: | |||
yield scope |
@@ -0,0 +1,173 @@ | |||
import os | |||
import itertools | |||
from contextlib import contextmanager, ExitStack | |||
from pathlib import Path | |||
from typing import Iterable, Union, Any, Dict, NamedTuple, ContextManager | |||
import subprocess | |||
import shutil | |||
import pytest | |||
from dds_ci import proc | |||
from . import fileutil | |||
class DDS: | |||
def __init__(self, dds_exe: Path, test_dir: Path, project_dir: Path, | |||
scope: ExitStack) -> None: | |||
self.dds_exe = dds_exe | |||
self.test_dir = test_dir | |||
self.source_root = project_dir | |||
self.scratch_dir = project_dir / '_test_scratch' | |||
self.scope = scope | |||
self.scope.callback(self.cleanup) | |||
@property | |||
def repo_dir(self) -> Path: | |||
return self.scratch_dir / 'repo' | |||
@property | |||
def deps_build_dir(self) -> Path: | |||
return self.scratch_dir / 'deps-build' | |||
@property | |||
def build_dir(self) -> Path: | |||
return self.scratch_dir / 'build' | |||
@property | |||
def lmi_path(self) -> Path: | |||
return self.scratch_dir / 'INDEX.lmi' | |||
def cleanup(self): | |||
if self.scratch_dir.exists(): | |||
shutil.rmtree(self.scratch_dir) | |||
def run_unchecked(self, cmd: proc.CommandLine, *, | |||
cwd: Path = None) -> subprocess.CompletedProcess: | |||
full_cmd = itertools.chain([self.dds_exe], cmd) | |||
return proc.run(full_cmd, cwd=cwd or self.source_root) | |||
def run(self, cmd: proc.CommandLine, *, | |||
cwd: Path = None) -> subprocess.CompletedProcess: | |||
cmdline = list(proc.flatten_cmd(cmd)) | |||
res = self.run_unchecked(cmd) | |||
if res.returncode != 0: | |||
raise subprocess.CalledProcessError( | |||
res.returncode, [self.dds_exe] + cmdline, res.stdout) | |||
return res | |||
@property | |||
def repo_dir_arg(self) -> str: | |||
return f'--repo-dir={self.repo_dir}' | |||
@property | |||
def project_dir_arg(self) -> str: | |||
return f'--project-dir={self.source_root}' | |||
def deps_ls(self) -> subprocess.CompletedProcess: | |||
return self.run(['deps', 'ls']) | |||
def deps_get(self) -> subprocess.CompletedProcess: | |||
return self.run([ | |||
'deps', | |||
'get', | |||
self.repo_dir_arg, | |||
]) | |||
def deps_build(self, *, | |||
toolchain: str = None) -> subprocess.CompletedProcess: | |||
return self.run([ | |||
'deps', | |||
'build', | |||
f'--toolchain={toolchain or self.default_builtin_toolchain}', | |||
self.repo_dir_arg, | |||
f'--deps-build-dir={self.deps_build_dir}', | |||
f'--lmi-path={self.lmi_path}', | |||
]) | |||
def build(self, | |||
*, | |||
toolchain: str = None, | |||
apps: bool = True, | |||
warnings: bool = True, | |||
tests: bool = True, | |||
export: bool = False) -> subprocess.CompletedProcess: | |||
return self.run([ | |||
'build', | |||
f'--out={self.build_dir}', | |||
['--tests'] if tests else [], | |||
['--apps'] if apps else [], | |||
['--warnings'] if warnings else [], | |||
['--export'] if export else [], | |||
f'--toolchain={toolchain or self.default_builtin_toolchain}', | |||
f'--lm-index={self.lmi_path}', | |||
self.project_dir_arg, | |||
]) | |||
def sdist_create(self) -> subprocess.CompletedProcess: | |||
return self.run([ | |||
'sdist', | |||
'create', | |||
self.project_dir_arg, | |||
f'--out={self.build_dir / "created-sdist.sds"}', | |||
]) | |||
def sdist_export(self) -> subprocess.CompletedProcess: | |||
return self.run([ | |||
'sdist', | |||
'export', | |||
self.project_dir_arg, | |||
self.repo_dir_arg, | |||
]) | |||
@property | |||
def default_builtin_toolchain(self) -> str: | |||
if os.name == 'posix': | |||
return ':gcc-8' | |||
elif os.name == 'nt': | |||
return ':msvc' | |||
else: | |||
raise RuntimeError( | |||
f'No default builtin toolchain defined for tests on platform "{os.name}"' | |||
) | |||
@property | |||
def exe_suffix(self) -> str: | |||
if os.name == 'posix': | |||
return '' | |||
elif os.name == 'nt': | |||
return '.exe' | |||
else: | |||
raise RuntimeError( | |||
f'We don\'t know the executable suffix for the platform "{os.name}"' | |||
) | |||
def set_contents(self, path: Union[str, Path], | |||
content: bytes) -> ContextManager[Path]: | |||
return fileutil.set_contents(self.source_root / path, content) | |||
@contextmanager | |||
def scoped_dds(test_dir: Path, project_dir: Path, name: str): | |||
dds_exe = Path(__file__).absolute().parent.parent / '_build/dds' | |||
if os.name == 'nt': | |||
dds_exe = dds_exe.with_suffix('.exe') | |||
with ExitStack() as scope: | |||
yield DDS(dds_exe, test_dir, project_dir, scope) | |||
class DDSFixtureParams(NamedTuple): | |||
ident: str | |||
subdir: Union[Path, str] | |||
def dds_fixture_conf(*argsets: DDSFixtureParams): | |||
args = list(argsets) | |||
return pytest.mark.parametrize( | |||
'dds', args, indirect=True, ids=[p.ident for p in args]) | |||
def dds_fixture_conf_1(subdir: Union[Path, str]): | |||
params = DDSFixtureParams(ident='only', subdir=subdir) | |||
return pytest.mark.parametrize('dds', [params], indirect=True, ids=['.']) |
@@ -0,0 +1,36 @@ | |||
import pytest | |||
import subprocess | |||
from tests import DDS, DDSFixtureParams, dds_fixture_conf, dds_fixture_conf_1 | |||
dds_conf = dds_fixture_conf( | |||
DDSFixtureParams(ident='git-remote', subdir='git-remote'), | |||
DDSFixtureParams(ident='no-deps', subdir='no-deps'), | |||
) | |||
@dds_conf | |||
def test_ls(dds: DDS): | |||
dds.run(['deps', 'ls']) | |||
@dds_conf | |||
def test_deps_build(dds: DDS): | |||
assert not dds.repo_dir.exists() | |||
dds.deps_get() | |||
assert dds.repo_dir.exists(), '`deps get` did not generate a repo directory' | |||
assert not dds.lmi_path.exists() | |||
dds.deps_build() | |||
assert dds.lmi_path.exists(), '`deps build` did not generate the build dir' | |||
@dds_fixture_conf_1('use-remote') | |||
def test_use_nlohmann_json_remote(dds: DDS): | |||
dds.deps_get() | |||
dds.deps_build() | |||
dds.build(apps=True) | |||
app_exe = dds.build_dir / f'app{dds.exe_suffix}' | |||
assert app_exe.is_file() | |||
subprocess.check_call([str(app_exe)]) |
@@ -0,0 +1,5 @@ | |||
Name: deps-test | |||
Version: 0.0.0 | |||
Depends: neo-buffer 0.1.0 | |||
Depends: range-v3 0.9.1 |
@@ -0,0 +1,2 @@ | |||
Remote-Package: neo-buffer 0.1.0; git url=https://github.com/vector-of-bool/neo-buffer.git ref=develop | |||
Remote-Package: range-v3 0.9.1; git url=https://github.com/ericniebler/range-v3.git ref=0.9.1 auto=Niebler/range-v3 |
@@ -0,0 +1,2 @@ | |||
Name: deps-test | |||
Version: 0.0.0 |
@@ -0,0 +1,3 @@ | |||
Name: dummy | |||
Uses: nlohmann/json |
@@ -0,0 +1,4 @@ | |||
Name: json-test | |||
Version: 0.0.0 | |||
Depends: nlohmann-json 3.7.1 |
@@ -0,0 +1 @@ | |||
Remote-Package: nlohmann-json 3.7.1; git url=https://github.com/vector-of-bool/json.git ref=dds/3.7.1 |
@@ -0,0 +1,8 @@ | |||
#include <nlohmann/json.hpp> | |||
int main() { | |||
nlohmann::json j = { | |||
{"foo", "bar"}, | |||
}; | |||
return j.size() == 1 ? 0 : 12; | |||
} |
@@ -0,0 +1,4 @@ | |||
Compiler-ID: GNU | |||
C++-Version: C++17 | |||
C++-Compiler: g++-8 | |||
Flags: -D SPDLOG_COMPILED_LIB |
@@ -0,0 +1,2 @@ | |||
Compiler-ID: MSVC | |||
Flags: -D SPDLOG_COMPILED_LIB |
@@ -0,0 +1,3 @@ | |||
Name: spdlog-user | |||
Uses: spdlog/spdlog |
@@ -0,0 +1,4 @@ | |||
Name: test | |||
Version: 0.0.0 | |||
Depends: spdlog 1.4.2 |
@@ -0,0 +1 @@ | |||
Remote-Package: spdlog 1.4.2; git url=https://github.com/gabime/spdlog.git ref=v1.4.2 auto=spdlog/spdlog |
@@ -0,0 +1,6 @@ | |||
#include <spdlog/spdlog.h> | |||
int write_message() { | |||
spdlog::warn("This is a message generated by spdlog in the spdlog-user test library"); | |||
return 42; | |||
} |
@@ -0,0 +1,3 @@ | |||
#pragma once | |||
extern int write_message(); |
@@ -0,0 +1,14 @@ | |||
#include "./spdlog_user.hpp" | |||
#include <spdlog/spdlog.h> | |||
int main() { | |||
auto result = ::write_message(); | |||
if (result != 42) { | |||
spdlog::critical( | |||
"The test library returned the wrong value (This is a REAL dds test failure, and is " | |||
"very unexpected)"); | |||
return 1; | |||
} | |||
return 0; | |||
} |
@@ -0,0 +1,12 @@ | |||
from tests import DDS | |||
from dds_ci import proc | |||
def test_get_build_use_spdlog(dds: DDS): | |||
dds.deps_get() | |||
tc_fname = 'gcc.tc.dds' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.dds' | |||
tc = str(dds.test_dir / tc_fname) | |||
dds.deps_build(toolchain=tc) | |||
dds.build(toolchain=tc, apps=True) | |||
proc.check_run((dds.build_dir / 'use-spdlog').with_suffix(dds.exe_suffix)) |
@@ -0,0 +1,7 @@ | |||
from tests import DDS | |||
from tests.fileutil import ensure_dir | |||
def test_empty_dir(dds: DDS): | |||
with ensure_dir(dds.source_root): | |||
dds.build() |
@@ -0,0 +1,52 @@ | |||
from contextlib import contextmanager, ExitStack | |||
from pathlib import Path | |||
from typing import Iterator, Union, Optional | |||
import shutil | |||
@contextmanager | |||
def ensure_dir(dirpath: Path) -> Iterator[Path]: | |||
""" | |||
Ensure that the given directory (and any parents) exist. When the context | |||
exists, removes any directories that were created. | |||
""" | |||
dirpath = dirpath.absolute() | |||
if dirpath.exists(): | |||
assert dirpath.is_dir(), f'Directory {dirpath} is a non-directory file' | |||
yield dirpath | |||
return | |||
# Create the directory and clean it up when we are done | |||
with ensure_dir(dirpath.parent): | |||
dirpath.mkdir() | |||
try: | |||
yield dirpath | |||
finally: | |||
shutil.rmtree(dirpath) | |||
@contextmanager | |||
def auto_delete(fpath: Path) -> Iterator[Path]: | |||
try: | |||
yield fpath | |||
finally: | |||
if fpath.exists(): | |||
fpath.unlink() | |||
@contextmanager | |||
def set_contents(fpath: Path, content: bytes) -> Iterator[Path]: | |||
prev_content: Optional[bytes] = None | |||
if fpath.exists(): | |||
assert fpath.is_file(), 'File {fpath} exists and is not a regular file' | |||
prev_content = fpath.read_bytes() | |||
with ensure_dir(fpath.parent): | |||
fpath.write_bytes(content) | |||
try: | |||
yield fpath | |||
finally: | |||
if prev_content is None: | |||
fpath.unlink() | |||
else: | |||
fpath.write_bytes(prev_content) |
@@ -0,0 +1 @@ | |||
Name: foo |
@@ -0,0 +1,2 @@ | |||
Name: foo | |||
Version: 1.2.3 |
@@ -0,0 +1,13 @@ | |||
from tests.dds import DDS, dds_fixture_conf_1 | |||
@dds_fixture_conf_1('create') | |||
def test_create_sdist(dds: DDS): | |||
dds.sdist_create() | |||
sd_dir = dds.build_dir / 'created-sdist.sds' | |||
assert sd_dir.is_dir() | |||
foo_cpp = sd_dir / 'src/foo.cpp' | |||
assert foo_cpp.is_file() | |||
header_hpp = sd_dir / 'include/header.hpp' | |||
assert header_hpp.is_file() | |||
header_h = sd_dir / 'include/header.h' | |||
assert header_h.is_file() |
@@ -1 +0,0 @@ | |||
Name: simple |
@@ -1,2 +0,0 @@ | |||
Name: dds-simple-test | |||
Version: 0.0.0 |
@@ -1 +0,0 @@ | |||
int foo() { return 42; } |
@@ -1 +0,0 @@ | |||
Name: test_only |
@@ -1,2 +0,0 @@ | |||
Name: dds-test_only-test | |||
Version: 0.0.0 |
@@ -1 +0,0 @@ | |||
int main() { return 0; } |
@@ -1,20 +1,23 @@ | |||
import argparse | |||
import os | |||
import sys | |||
import pytest | |||
from pathlib import Path | |||
from typing import Sequence, NamedTuple | |||
import subprocess | |||
import urllib.request | |||
import shutil | |||
HERE = Path(__file__).parent.absolute() | |||
TOOLS_DIR = HERE | |||
PROJECT_ROOT = HERE.parent | |||
PREBUILT_DDS = PROJECT_ROOT / '_prebuilt/dds' | |||
from self_build import self_build | |||
from self_deps_get import self_deps_get | |||
from self_deps_build import self_deps_build | |||
from dds_ci import paths, proc | |||
class CIOptions(NamedTuple): | |||
cxx: Path | |||
toolchain: str | |||
toolchain_2: str | |||
def _do_bootstrap_build(opts: CIOptions) -> None: | |||
@@ -22,7 +25,7 @@ def _do_bootstrap_build(opts: CIOptions) -> None: | |||
subprocess.check_call([ | |||
sys.executable, | |||
'-u', | |||
str(TOOLS_DIR / 'bootstrap.py'), | |||
str(paths.TOOLS_DIR / 'bootstrap.py'), | |||
f'--cxx={opts.cxx}', | |||
]) | |||
@@ -34,13 +37,14 @@ def _do_bootstrap_download() -> None: | |||
'darwin': 'dds-macos-x64', | |||
}.get(sys.platform) | |||
if filename is None: | |||
raise RuntimeError(f'We do not have a prebuilt DDS binary for the "{sys.platform}" platform') | |||
raise RuntimeError(f'We do not have a prebuilt DDS binary for ' | |||
f'the "{sys.platform}" platform') | |||
url = f'https://github.com/vector-of-bool/dds/releases/download/bootstrap-p2/{filename}' | |||
print(f'Downloading prebuilt DDS executable: {url}') | |||
stream = urllib.request.urlopen(url) | |||
PREBUILT_DDS.parent.mkdir(exist_ok=True, parents=True) | |||
with PREBUILT_DDS.open('wb') as fd: | |||
paths.PREBUILT_DDS.parent.mkdir(exist_ok=True, parents=True) | |||
with paths.PREBUILT_DDS.open('wb') as fd: | |||
while True: | |||
buf = stream.read(1024 * 4) | |||
if not buf: | |||
@@ -49,9 +53,9 @@ def _do_bootstrap_download() -> None: | |||
if os.name != 'nt': | |||
# Mark the binary executable. By default it won't be | |||
mode = PREBUILT_DDS.stat().st_mode | |||
mode = paths.PREBUILT_DDS.stat().st_mode | |||
mode |= 0b001_001_001 | |||
PREBUILT_DDS.chmod(mode) | |||
paths.PREBUILT_DDS.chmod(mode) | |||
def main(argv: Sequence[str]) -> int: | |||
@@ -59,9 +63,8 @@ def main(argv: Sequence[str]) -> int: | |||
parser.add_argument( | |||
'-B', | |||
'--bootstrap-with', | |||
help= | |||
'Skip the prebuild-bootstrap step. This requires a _prebuilt/dds to exist!', | |||
choices=('download', 'build'), | |||
help='How are we to obtain a bootstrapped DDS executable?', | |||
choices=('download', 'build', 'skip'), | |||
required=True, | |||
) | |||
parser.add_argument( | |||
@@ -73,43 +76,56 @@ def main(argv: Sequence[str]) -> int: | |||
'-T', | |||
help='The toolchain to use for the CI process', | |||
required=True) | |||
parser.add_argument( | |||
'--toolchain-2', | |||
'-T2', | |||
help='Toolchain for the second-phase self-test', | |||
required=True) | |||
args = parser.parse_args(argv) | |||
opts = CIOptions(cxx=Path(args.cxx), toolchain=args.toolchain) | |||
opts = CIOptions( | |||
cxx=Path(args.cxx), | |||
toolchain=args.toolchain, | |||
toolchain_2=args.toolchain_2) | |||
if args.bootstrap_with == 'build': | |||
_do_bootstrap_build(opts) | |||
elif args.bootstrap_with == 'download': | |||
_do_bootstrap_download() | |||
elif args.bootstrap_with == 'skip': | |||
pass | |||
else: | |||
assert False, 'impossible' | |||
subprocess.check_call([ | |||
str(PREBUILT_DDS), | |||
'deps', | |||
'build', | |||
f'-T{opts.toolchain}', | |||
f'--repo-dir={PROJECT_ROOT / "external/repo"}', | |||
]) | |||
subprocess.check_call([ | |||
str(PREBUILT_DDS), | |||
proc.check_run( | |||
paths.PREBUILT_DDS, | |||
'build', | |||
'--full', | |||
f'-T{opts.toolchain}', | |||
]) | |||
('-T', opts.toolchain), | |||
) | |||
exe_suffix = '.exe' if os.name == 'nt' else '' | |||
subprocess.check_call([ | |||
sys.executable, | |||
'-u', | |||
str(TOOLS_DIR / 'test.py'), | |||
f'--exe={PROJECT_ROOT / f"_build/dds{exe_suffix}"}', | |||
f'-T{opts.toolchain}', | |||
self_build(paths.CUR_BUILT_DDS, toolchain=opts.toolchain) | |||
print('Bootstrap test PASSED!') | |||
if paths.SELF_TEST_REPO_DIR.exists(): | |||
shutil.rmtree(paths.SELF_TEST_REPO_DIR) | |||
self_deps_get(paths.CUR_BUILT_DDS, paths.SELF_TEST_REPO_DIR) | |||
self_deps_build(paths.CUR_BUILT_DDS, opts.toolchain_2, | |||
paths.SELF_TEST_REPO_DIR, | |||
paths.PROJECT_ROOT / 'remote.dds') | |||
self_build( | |||
paths.CUR_BUILT_DDS, | |||
toolchain=opts.toolchain, | |||
lmi_path=paths.BUILD_DIR / 'INDEX.lmi') | |||
return pytest.main([ | |||
'-v', | |||
'--durations=10', | |||
f'--basetemp={paths.BUILD_DIR / "_tmp"}', | |||
'-n4', | |||
]) | |||
return 0 | |||
if __name__ == "__main__": | |||
sys.exit(main(sys.argv[1:])) |
@@ -0,0 +1,19 @@ | |||
from argparse import ArgumentParser | |||
from dds_ci import paths | |||
def add_tc_arg(parser: ArgumentParser, *, required=True) -> None: | |||
parser.add_argument( | |||
'--toolchain', | |||
'-T', | |||
help='The DDS toolchain to use', | |||
required=required) | |||
def add_dds_exe_arg(parser: ArgumentParser, *, required=True) -> None: | |||
parser.add_argument( | |||
'--exe', | |||
'-e', | |||
help='Path to a DDS executable to use', | |||
required=required) |
@@ -0,0 +1,12 @@ | |||
import os | |||
from pathlib import Path | |||
TOOLS_DIR = Path(__file__).absolute().parent.parent | |||
PROJECT_ROOT = TOOLS_DIR.parent | |||
BUILD_DIR = PROJECT_ROOT / '_build' | |||
PREBUILT_DIR = PROJECT_ROOT / '_prebuilt' | |||
EXE_SUFFIX = '.exe' if os.name == 'nt' else '' | |||
PREBUILT_DDS = (PREBUILT_DIR / 'dds').with_suffix(EXE_SUFFIX) | |||
CUR_BUILT_DDS = (BUILD_DIR / 'dds').with_suffix(EXE_SUFFIX) | |||
EMBEDDED_REPO_DIR = PROJECT_ROOT / 'external/repo' | |||
SELF_TEST_REPO_DIR = BUILD_DIR / '_self-repo' |
@@ -0,0 +1,39 @@ | |||
from pathlib import PurePath, Path | |||
from typing import Iterable, Union | |||
import subprocess | |||
CommandLineArg = Union[str, PurePath, int, float] | |||
CommandLineArg1 = Union[CommandLineArg, Iterable[CommandLineArg]] | |||
CommandLineArg2 = Union[CommandLineArg1, Iterable[CommandLineArg1]] | |||
CommandLineArg3 = Union[CommandLineArg2, Iterable[CommandLineArg2]] | |||
CommandLineArg4 = Union[CommandLineArg3, Iterable[CommandLineArg3]] | |||
CommandLine = Union[CommandLineArg4, Iterable[CommandLineArg4]] | |||
def flatten_cmd(cmd: CommandLine) -> Iterable[str]: | |||
if isinstance(cmd, (str, PurePath)): | |||
yield str(cmd) | |||
elif isinstance(cmd, (int, float)): | |||
yield str(cmd) | |||
elif hasattr(cmd, '__iter__'): | |||
each = (flatten_cmd(arg) for arg in cmd) # type: ignore | |||
for item in each: | |||
yield from item | |||
else: | |||
assert False, f'Invalid command line element: {repr(cmd)}' | |||
def run(*cmd: CommandLine, cwd: Path = None) -> subprocess.CompletedProcess: | |||
return subprocess.run( | |||
list(flatten_cmd(cmd)), # type: ignore | |||
cwd=cwd, | |||
) | |||
def check_run(*cmd: CommandLine, | |||
cwd: Path = None) -> subprocess.CompletedProcess: | |||
flat_cmd = list(flatten_cmd(cmd)) # type: ignore | |||
res = run(flat_cmd, cwd=cwd) | |||
if res.returncode != 0: | |||
raise subprocess.CalledProcessError(res.returncode, flat_cmd) | |||
return res |
@@ -0,0 +1,6 @@ | |||
Compiler-ID: GNU | |||
C++-Version: C++17 | |||
C++-Compiler: g++-8 | |||
Flags: -fconcepts -Werror=return-type | |||
Flags: -D SPDLOG_COMPILED_LIB | |||
Optimize: True |
@@ -0,0 +1,4 @@ | |||
Compiler-ID: MSVC | |||
Flags: /experimental:preprocessor /D SPDLOG_COMPILED_LIB /wd5105 /std:c++latest | |||
Link-Flags: rpcrt4.lib | |||
Optimize: True |
@@ -0,0 +1,41 @@ | |||
#!/usr/bin/env python3 | |||
import argparse | |||
from pathlib import Path | |||
from typing import List, NamedTuple | |||
import shutil | |||
import subprocess | |||
import sys | |||
from dds_ci import cli, proc | |||
ROOT = Path(__file__).parent.parent.absolute() | |||
def self_build(exe: Path, *, toolchain: str, lmi_path: Path = None): | |||
# Copy the exe to another location, as windows refuses to let a binary be | |||
# replaced while it is executing | |||
new_exe = ROOT / '_dds.bootstrap-test.exe' | |||
shutil.copy2(exe, new_exe) | |||
try: | |||
proc.check_run( | |||
new_exe, | |||
'build', | |||
'--full', | |||
('--toolchain', toolchain), | |||
('-I', lmi_path) if lmi_path else (), | |||
) | |||
finally: | |||
new_exe.unlink() | |||
def main(argv: List[str]) -> int: | |||
parser = argparse.ArgumentParser() | |||
cli.add_tc_arg(parser) | |||
cli.add_dds_exe_arg(parser) | |||
args = parser.parse_args(argv) | |||
self_build(Path(args.exe), args.toolchain) | |||
return 0 | |||
if __name__ == "__main__": | |||
sys.exit(main(sys.argv[1:])) |
@@ -0,0 +1,30 @@ | |||
import argparse | |||
from pathlib import Path | |||
from dds_ci import cli, proc, paths | |||
def self_deps_build(exe: Path, toolchain: str, repo_dir: Path, | |||
remote_list: Path) -> None: | |||
proc.check_run( | |||
exe, | |||
'deps', | |||
'build', | |||
('--repo-dir', repo_dir), | |||
('-T', toolchain), | |||
) | |||
def main(): | |||
parser = argparse.ArgumentParser() | |||
cli.add_dds_exe_arg(parser) | |||
cli.add_tc_arg(parser) | |||
parser.add_argument('--repo-dir', default=paths.SELF_TEST_REPO_DIR) | |||
args = parser.parse_args() | |||
self_deps_build( | |||
Path(args.exe), args.toolchain, args.repo_dir, | |||
paths.PROJECT_ROOT / 'remote.dds') | |||
if __name__ == "__main__": | |||
main() |
@@ -0,0 +1,19 @@ | |||
from pathlib import Path | |||
from dds_ci import proc, paths | |||
PROJECT_ROOT = Path(__file__).absolute().parent.parent | |||
def self_deps_get(dds_exe: Path, repo_dir: Path) -> None: | |||
proc.check_run( | |||
dds_exe, | |||
'deps', | |||
'get', | |||
('--repo-dir', repo_dir), | |||
('--remote-list', PROJECT_ROOT / 'remote.dds'), | |||
) | |||
if __name__ == "__main__": | |||
self_deps_get(paths.CUR_BUILT_DDS, paths.SELF_TEST_REPO_DIR) |
@@ -1,155 +0,0 @@ | |||
#!/usr/bin/env python3 | |||
import argparse | |||
from pathlib import Path | |||
from typing import List, NamedTuple | |||
import shutil | |||
import subprocess | |||
import sys | |||
ROOT = Path(__file__).parent.parent.absolute() | |||
class TestOptions(NamedTuple): | |||
exe: Path | |||
toolchain: str | |||
def run_test_dir(dir: Path, opts: TestOptions) -> bool: | |||
fails = 0 | |||
fails += _run_subproc_test( | |||
dir, | |||
opts, | |||
'Full Build', | |||
'build', | |||
'--full', | |||
f'--toolchain={opts.toolchain}', | |||
) | |||
fails += _run_subproc_test( | |||
dir, | |||
opts, | |||
'Source Distribution', | |||
'sdist', | |||
'create', | |||
f'--out=_build/{dir.stem}/test.dsd', | |||
'--replace', | |||
) | |||
return fails == 0 | |||
def _run_subproc_test(dir: Path, opts: TestOptions, name: str, | |||
*args: str) -> int: | |||
print(f'Running test: {dir.stem} - {name} ', end='') | |||
out_dir = dir / '_build' | |||
if out_dir.exists(): | |||
shutil.rmtree(out_dir) | |||
res = subprocess.run( | |||
[ | |||
str(opts.exe), | |||
] + list(str(s) for s in args), | |||
cwd=dir, | |||
stdout=subprocess.PIPE, | |||
stderr=subprocess.STDOUT, | |||
) | |||
if res.returncode != 0: | |||
print('- FAILED') | |||
print(f'Test failed with exit code ' | |||
f'[{res.returncode}]:\n{res.stdout.decode()}') | |||
return 1 | |||
print('- PASSED') | |||
return 0 | |||
def _run_build_test(dir: Path, opts: TestOptions) -> int: | |||
print(f'Running test: {dir.stem} - build', end='') | |||
out_dir = dir / '_build' | |||
if out_dir.exists(): | |||
shutil.rmtree(out_dir) | |||
res = subprocess.run( | |||
[ | |||
str(opts.exe), | |||
'build', | |||
'--export', | |||
'--warnings', | |||
'--tests', | |||
'--full', | |||
f'--toolchain={opts.toolchain}', | |||
f'--out={out_dir}', | |||
f'--export-name={dir.stem}', | |||
], | |||
cwd=dir, | |||
stdout=subprocess.PIPE, | |||
stderr=subprocess.STDOUT, | |||
) | |||
if res.returncode != 0: | |||
print('- FAILED') | |||
print(f'Test failed with exit code ' | |||
f'[{res.returncode}]:\n{res.stdout.decode()}') | |||
return 1 | |||
print('- PASSED') | |||
return 0 | |||
def run_tests(opts: TestOptions) -> int: | |||
print('Sanity check...') | |||
subprocess.check_output([str(opts.exe), '--help']) | |||
tests_subdir = ROOT / 'tests' | |||
test_dirs = tests_subdir.glob('*.test') | |||
ret = 0 | |||
for td in test_dirs: | |||
if not run_test_dir(td, opts): | |||
ret = 1 | |||
return ret | |||
def bootstrap_self(opts: TestOptions): | |||
# Copy the exe to another location, as windows refuses to let a binary be | |||
# replaced while it is executing | |||
new_exe = ROOT / '_dds.bootstrap-test.exe' | |||
shutil.copy2(opts.exe, new_exe) | |||
res = subprocess.run([ | |||
str(new_exe), | |||
'build', | |||
f'-FT{opts.toolchain}', | |||
]) | |||
new_exe.unlink() | |||
if res.returncode != 0: | |||
print('The bootstrap test failed!', file=sys.stderr) | |||
return False | |||
print('Bootstrap test PASSED!') | |||
return True | |||
def main(argv: List[str]) -> int: | |||
parser = argparse.ArgumentParser() | |||
parser.add_argument( | |||
'--exe', | |||
'-e', | |||
help='Path to the dds executable to test', | |||
required=True) | |||
parser.add_argument( | |||
'--toolchain', | |||
'-T', | |||
help='The dds toolchain to use while testing', | |||
required=True, | |||
) | |||
parser.add_argument( | |||
'--skip-bootstrap-test', | |||
action='store_true', | |||
help='Skip the self-bootstrap test', | |||
) | |||
args = parser.parse_args(argv) | |||
tc = args.toolchain | |||
if not tc.startswith(':'): | |||
tc = Path(tc).absolute() | |||
opts = TestOptions(exe=Path(args.exe).absolute(), toolchain=tc) | |||
if not args.skip_bootstrap_test and not bootstrap_self(opts): | |||
return 2 | |||
return run_tests(opts) | |||
if __name__ == "__main__": | |||
sys.exit(main(sys.argv[1:])) |