@@ -1,5 +1,16 @@ | |||
{ | |||
"packages": { | |||
"ctre": { | |||
"2.7.0": { | |||
"depends": {}, | |||
"description": "A compile-time PCRE (almost) compatible regular expression matcher", | |||
"git": { | |||
"auto-lib": "hanickadot/ctre", | |||
"ref": "v2.7", | |||
"url": "https://github.com/hanickadot/compile-time-regular-expressions.git" | |||
} | |||
} | |||
}, | |||
"fmt": { | |||
"0.10.0": { | |||
"depends": {}, |
@@ -10,3 +10,4 @@ Uses: semver/semver | |||
Uses: vob/semester | |||
Uses: pubgrub/pubgrub | |||
Uses: vob/json5 | |||
Uses: hanickadot/ctre |
@@ -12,5 +12,6 @@ | |||
"pubgrub/pubgrub", | |||
"vob/json5", | |||
"vob/semester", | |||
"hanickadot/ctre", | |||
] | |||
} |
@@ -12,5 +12,6 @@ Depends: semver 0.2.1 | |||
Depends: pubgrub 0.2.0 | |||
Depends: vob-json5 0.1.5 | |||
Depends: vob-semester 0.1.0 | |||
Depends: ctre 2.7.0 | |||
Test-Driver: Catch-Main |
@@ -13,7 +13,8 @@ | |||
"semver": "0.2.1", | |||
"pubgrub": "0.2.0", | |||
"vob-json5": "0.1.5", | |||
"vob-semester": "0.1.0" | |||
"vob-semester": "0.1.0", | |||
"ctre": "2.7.0", | |||
}, | |||
"test_driver": "Catch-Main" | |||
} |
@@ -150,9 +150,13 @@ prepare_ureqs(const build_plan& plan, const toolchain& toolchain, path_ref out_r | |||
lib_reqs.include_paths.push_back(lib.library_().public_include_dir()); | |||
lib_reqs.uses = lib.library_().manifest().uses; | |||
lib_reqs.links = lib.library_().manifest().links; | |||
if (const auto& arc = lib.create_archive()) { | |||
if (const auto& arc = lib.archive_plan()) { | |||
lib_reqs.linkable_path = out_root / arc->calc_archive_file_path(toolchain); | |||
} | |||
auto gen_incdir_opt = lib.generated_include_dir(); | |||
if (gen_incdir_opt) { | |||
lib_reqs.include_paths.push_back(out_root / *gen_incdir_opt); | |||
} | |||
} | |||
} | |||
return ureqs; | |||
@@ -170,7 +174,7 @@ void write_lml(build_env_ref env, const library_plan& lib, path_ref lml_path) { | |||
for (auto&& link : lib.links()) { | |||
out << "Links: " << link.namespace_ << "/" << link.name << '\n'; | |||
} | |||
if (auto&& arc = lib.create_archive()) { | |||
if (auto&& arc = lib.archive_plan()) { | |||
out << "Path: " | |||
<< (env.output_root / arc->calc_archive_file_path(env.toolchain)).generic_string() | |||
<< '\n'; | |||
@@ -225,6 +229,8 @@ void builder::build(const build_params& params) const { | |||
generate_compdb(plan, env); | |||
} | |||
plan.render_all(env); | |||
dds::stopwatch sw; | |||
plan.compile_all(env, params.parallel_jobs); | |||
spdlog::info("Compilation completed in {:n}ms", sw.elapsed_ms().count()); |
@@ -24,12 +24,14 @@ inline auto iter_libraries(const build_plan& plan) { | |||
* Return a range iterating over ever file compilation defined in the given build plan | |||
*/ | |||
inline auto iter_compilations(const build_plan& plan) { | |||
auto lib_compiles = // | |||
iter_libraries(plan) // | |||
| ranges::views::transform(&library_plan::create_archive) // | |||
| ranges::views::filter([&](auto&& opt) { return bool(opt); }) // | |||
| ranges::views::transform([&](auto&& opt) -> auto& { return opt->compile_files(); }) // | |||
| ranges::views::join // | |||
auto lib_compiles = // | |||
iter_libraries(plan) // | |||
| ranges::views::transform(&library_plan::archive_plan) // | |||
| ranges::views::filter([&](auto&& opt) { return bool(opt); }) // | |||
| ranges::views::transform([&](auto&& opt) -> auto& { | |||
return opt->file_compilations(); | |||
}) // | |||
| ranges::views::join // | |||
; | |||
auto exe_compiles = // |
@@ -58,7 +58,7 @@ public: | |||
/** | |||
* Get the compilation plans for this library. | |||
*/ | |||
auto& compile_files() const noexcept { return _compile_files; } | |||
auto& file_compilations() const noexcept { return _compile_files; } | |||
/** | |||
* Perform the actual archive generation. Expects all compilations to have |
@@ -15,7 +15,13 @@ using namespace dds; | |||
compile_command_info compile_file_plan::generate_compile_command(build_env_ref env) const { | |||
compile_file_spec spec{_source.path, calc_object_file_path(env)}; | |||
spec.enable_warnings = _rules.enable_warnings(); | |||
extend(spec.include_dirs, _rules.include_dirs()); | |||
for (auto dirpath : _rules.include_dirs()) { | |||
if (!dirpath.is_absolute()) { | |||
dirpath = env.output_root / dirpath; | |||
} | |||
dirpath = fs::weakly_canonical(dirpath); | |||
spec.include_dirs.push_back(std::move(dirpath)); | |||
} | |||
for (const auto& use : _rules.uses()) { | |||
extend(spec.external_include_dirs, env.ureqs.include_paths(use)); | |||
} | |||
@@ -24,8 +30,7 @@ compile_command_info compile_file_plan::generate_compile_command(build_env_ref e | |||
} | |||
fs::path compile_file_plan::calc_object_file_path(const build_env& env) const noexcept { | |||
// `relpath` is just the path from the root of the source directory to the source file. | |||
auto relpath = fs::relative(_source.path, _source.basis_path); | |||
auto relpath = _source.relative_path(); | |||
// The full output directory is prefixed by `_subdir` | |||
auto ret = env.output_root / _subdir / relpath; | |||
ret.replace_filename(relpath.filename().string() + env.toolchain.object_suffix()); |
@@ -25,11 +25,11 @@ void link_executable_plan::link(build_env_ref env, const library_plan& lib) cons | |||
for (const lm::usage& links : _links) { | |||
extend(spec.inputs, env.ureqs.link_paths(links)); | |||
} | |||
if (lib.create_archive()) { | |||
if (lib.archive_plan()) { | |||
// The associated library has compiled components. Add the static library a as a linker | |||
// input | |||
spec.inputs.push_back(env.output_root | |||
/ lib.create_archive()->calc_archive_file_path(env.toolchain)); | |||
/ lib.archive_plan()->calc_archive_file_path(env.toolchain)); | |||
} | |||
// The main object should be a linker input, of course. |
@@ -7,7 +7,9 @@ | |||
#include <range/v3/view/concat.hpp> | |||
#include <range/v3/view/filter.hpp> | |||
#include <range/v3/view/join.hpp> | |||
#include <range/v3/view/repeat.hpp> | |||
#include <range/v3/view/transform.hpp> | |||
#include <range/v3/view/zip.hpp> | |||
#include <spdlog/spdlog.h> | |||
@@ -72,8 +74,25 @@ bool parallel_run(Range&& rng, int n_jobs, Fn&& fn) { | |||
return exceptions.empty(); | |||
} | |||
template <typename T, typename Range> | |||
decltype(auto) pair_up(T& left, Range& right) { | |||
auto rep = ranges::view::repeat(left); | |||
return ranges::view::zip(rep, right); | |||
} | |||
} // namespace | |||
void build_plan::render_all(build_env_ref env) const { | |||
auto templates = _packages // | |||
| ranges::view::transform(&package_plan::libraries) // | |||
| ranges::view::join // | |||
| ranges::view::transform([](const auto& lib) { return pair_up(lib, lib.templates()); }) // | |||
| ranges::view::join; | |||
for (const auto& [lib, tmpl] : templates) { | |||
tmpl.render(env, lib.library_()); | |||
} | |||
} | |||
void build_plan::compile_all(const build_env& env, int njobs) const { | |||
auto okay = dds::compile_all(iter_compilations(*this), env, njobs); | |||
if (!okay) { | |||
@@ -83,8 +102,8 @@ void build_plan::compile_all(const build_env& env, int njobs) const { | |||
void build_plan::archive_all(const build_env& env, int njobs) const { | |||
auto okay = parallel_run(iter_libraries(*this), njobs, [&](const library_plan& lib) { | |||
if (lib.create_archive()) { | |||
lib.create_archive()->archive(env); | |||
if (lib.archive_plan()) { | |||
lib.archive_plan()->archive(env); | |||
} | |||
}); | |||
if (!okay) { |
@@ -28,6 +28,10 @@ public: | |||
* All of the packages in this plan | |||
*/ | |||
auto& packages() const noexcept { return _packages; } | |||
/** | |||
* Render all config templates in the plan. | |||
*/ | |||
void render_all(const build_env& env) const; | |||
/** | |||
* Compile all files in the plan. | |||
*/ |
@@ -7,9 +7,24 @@ | |||
#include <range/v3/view/transform.hpp> | |||
#include <cassert> | |||
#include <string> | |||
using namespace dds; | |||
namespace { | |||
const std::string gen_dir_qual = "__dds/gen"; | |||
fs::path rebase_gen_incdir(path_ref subdir) { return gen_dir_qual / subdir; } | |||
} // namespace | |||
std::optional<fs::path> library_plan::generated_include_dir() const noexcept { | |||
if (_templates.empty()) { | |||
return std::nullopt; | |||
} | |||
return rebase_gen_incdir(output_subdirectory()); | |||
} | |||
library_plan library_plan::create(const library_root& lib, | |||
const library_build_params& params, | |||
std::optional<std::string_view> qual_name_) { | |||
@@ -17,6 +32,7 @@ library_plan library_plan::create(const library_root& lib, | |||
std::vector<source_file> app_sources; | |||
std::vector<source_file> test_sources; | |||
std::vector<source_file> lib_sources; | |||
std::vector<source_file> template_sources; | |||
auto qual_name = std::string(qual_name_.value_or(lib.manifest().name)); | |||
@@ -34,6 +50,8 @@ library_plan library_plan::create(const library_root& lib, | |||
app_sources.push_back(sfile); | |||
} else if (sfile.kind == source_kind::source) { | |||
lib_sources.push_back(sfile); | |||
} else if (sfile.kind == source_kind::header_template) { | |||
template_sources.push_back(sfile); | |||
} else { | |||
assert(sfile.kind == source_kind::header); | |||
} | |||
@@ -45,6 +63,12 @@ library_plan library_plan::create(const library_root& lib, | |||
compile_rules.enable_warnings() = params.enable_warnings; | |||
compile_rules.uses() = lib.manifest().uses; | |||
const auto codegen_subdir = rebase_gen_incdir(params.out_subdir); | |||
if (!template_sources.empty()) { | |||
compile_rules.include_dirs().push_back(codegen_subdir); | |||
} | |||
// Convert the library sources into their respective file compilation plans. | |||
auto lib_compile_files = // | |||
lib_sources // | |||
@@ -55,12 +79,12 @@ library_plan library_plan::create(const library_root& lib, | |||
// If we have any compiled library files, generate a static library archive | |||
// for this library | |||
std::optional<create_archive_plan> create_archive; | |||
std::optional<create_archive_plan> archive_plan; | |||
if (!lib_compile_files.empty()) { | |||
create_archive.emplace(lib.manifest().name, | |||
qual_name, | |||
params.out_subdir, | |||
std::move(lib_compile_files)); | |||
archive_plan.emplace(lib.manifest().name, | |||
qual_name, | |||
params.out_subdir, | |||
std::move(lib_compile_files)); | |||
} | |||
// Collect the paths to linker inputs that should be used when generating executables for this | |||
@@ -86,8 +110,7 @@ library_plan library_plan::create(const library_root& lib, | |||
// Pick a subdir based on app/test | |||
const auto subdir_base = is_test ? params.out_subdir / "test" : params.out_subdir; | |||
// Put test/app executables in a further subdirectory based on the source file path | |||
const auto subdir | |||
= subdir_base / fs::relative(source.path.parent_path(), lib.src_source_root().path); | |||
const auto subdir = subdir_base / source.relative_path().parent_path(); | |||
// Pick compile rules based on app/test | |||
auto rules = is_test ? test_rules : compile_rules; | |||
// Pick input libs based on app/test | |||
@@ -105,6 +128,16 @@ library_plan library_plan::create(const library_root& lib, | |||
link_executables.emplace_back(std::move(exe)); | |||
} | |||
std::vector<render_template_plan> render_templates; | |||
for (const auto& sf : template_sources) { | |||
render_templates.emplace_back(sf, codegen_subdir); | |||
} | |||
// Done! | |||
return library_plan{lib, qual_name, std::move(create_archive), std::move(link_executables)}; | |||
return library_plan{lib, | |||
qual_name, | |||
params.out_subdir, | |||
std::move(archive_plan), | |||
std::move(link_executables), | |||
std::move(render_templates)}; | |||
} |
@@ -2,6 +2,7 @@ | |||
#include <dds/build/plan/archive.hpp> | |||
#include <dds/build/plan/exe.hpp> | |||
#include <dds/build/plan/template.hpp> | |||
#include <dds/library/root.hpp> | |||
#include <dds/usage_reqs.hpp> | |||
#include <dds/util/fs.hpp> | |||
@@ -56,10 +57,14 @@ class library_plan { | |||
library_root _lib; | |||
/// The qualified name of the library | |||
std::string _qual_name; | |||
/// The library's subdirectory within the output directory | |||
fs::path _subdir; | |||
/// The `create_archive_plan` for this library, if applicable | |||
std::optional<create_archive_plan> _create_archive; | |||
/// The executables that should be linked as part of this library's build | |||
std::vector<link_executable_plan> _link_exes; | |||
/// The templates that must be rendered for this library | |||
std::vector<render_template_plan> _templates; | |||
public: | |||
/** | |||
@@ -70,12 +75,16 @@ public: | |||
*/ | |||
library_plan(library_root lib, | |||
std::string_view qual_name, | |||
fs::path subdir, | |||
std::optional<create_archive_plan> ar, | |||
std::vector<link_executable_plan> exes) | |||
std::vector<link_executable_plan> exes, | |||
std::vector<render_template_plan> tmpls) | |||
: _lib(std::move(lib)) | |||
, _qual_name(qual_name) | |||
, _subdir(std::move(subdir)) | |||
, _create_archive(std::move(ar)) | |||
, _link_exes(std::move(exes)) {} | |||
, _link_exes(std::move(exes)) | |||
, _templates(std::move(tmpls)) {} | |||
/** | |||
* Get the underlying library object | |||
@@ -89,6 +98,10 @@ public: | |||
* Get the qualified name of the library, as if for a libman usage requirement | |||
*/ | |||
auto& qualified_name() const noexcept { return _qual_name; } | |||
/** | |||
* The output subdirectory of this library plan | |||
*/ | |||
path_ref output_subdirectory() const noexcept { return _subdir; } | |||
/** | |||
* The directory that defines the source root of the library. | |||
*/ | |||
@@ -97,7 +110,11 @@ public: | |||
* A `create_archive_plan` object, or `nullopt`, depending on if this library has compiled | |||
* components | |||
*/ | |||
auto& create_archive() const noexcept { return _create_archive; } | |||
auto& archive_plan() const noexcept { return _create_archive; } | |||
/** | |||
* The template rendering plans for this library. | |||
*/ | |||
auto& templates() const noexcept { return _templates; } | |||
/** | |||
* The executables that should be created by this library | |||
*/ | |||
@@ -110,6 +127,12 @@ public: | |||
* The library identifiers that are linked by this library | |||
*/ | |||
auto& links() const noexcept { return _lib.manifest().links; } | |||
/** | |||
* The path to the directory that should be added for the #include search | |||
* path for this library, relative to the build root. Returns `nullopt` if | |||
* this library has no generated headers. | |||
*/ | |||
std::optional<fs::path> generated_include_dir() const noexcept; | |||
/** | |||
* Named constructor: Create a new `library_plan` automatically from some build-time parameters. | |||
@@ -129,4 +152,4 @@ public: | |||
std::optional<std::string_view> qual_name); | |||
}; | |||
} // namespace dds | |||
} // namespace dds |
@@ -0,0 +1,193 @@ | |||
#include <dds/build/plan/template.hpp> | |||
#include <dds/error/errors.hpp> | |||
#include <dds/library/root.hpp> | |||
#include <dds/util/fs.hpp> | |||
#include <dds/util/string.hpp> | |||
#include <ctre.hpp> | |||
#include <semester/json.hpp> | |||
#include <string> | |||
#include <string_view> | |||
using namespace dds; | |||
using json_data = semester::basic_data<semester::json_traits<std::allocator<void>>>; | |||
namespace { | |||
static constexpr ctll::fixed_string IDENT_RE{"([_a-zA-Z]\\w*)(.*)"}; | |||
std::string_view skip(std::string_view in) { | |||
auto nspace_pos = in.find_first_not_of(" \t\n\r\f"); | |||
in = in.substr(nspace_pos); | |||
if (starts_with(in, "/*")) { | |||
// It's a block comment. Find the block-end marker. | |||
auto block_end = in.find("*/"); | |||
if (block_end == in.npos) { | |||
throw_user_error<errc::template_error>("Unterminated block comment"); | |||
} | |||
in = in.substr(block_end + 2); | |||
// Recursively skip some more | |||
return skip(in); | |||
} | |||
if (starts_with(in, "//")) { | |||
more: | |||
// It's a line comment. Find the next not-continued newline | |||
auto cn_nl = in.find("\\\n"); | |||
auto nl = in.find("\n"); | |||
if (cn_nl < nl) { | |||
// The next newline is a continuation of the comment. Keep looking | |||
in = in.substr(nl + 1); | |||
goto more; | |||
} | |||
if (nl == in.npos) { | |||
// We've reached the end. Okay. | |||
return in.substr(nl); | |||
} | |||
} | |||
// Not a comment, and not whitespace. Okay. | |||
return in; | |||
} | |||
std::string stringify(const json_data& dat) { | |||
if (dat.is_bool()) { | |||
return dat.as_bool() ? "true" : "false"; | |||
} else if (dat.is_double()) { | |||
return std::to_string(dat.as_double()); | |||
} else if (dat.is_null()) { | |||
return "nullptr"; | |||
} else if (dat.is_string()) { | |||
/// XXX: This probably isn't quite enough sanitization for edge cases. | |||
auto str = dat.as_string(); | |||
str = replace(str, "\n", "\\n"); | |||
str = replace(str, "\"", "\\\""); | |||
return "\"" + str + "\""; | |||
} else { | |||
throw_user_error<errc::template_error>("Cannot render un-stringable data type"); | |||
} | |||
} | |||
std::pair<std::string, std::string_view> eval_expr_tail(std::string_view in, const json_data& dat) { | |||
in = skip(in); | |||
if (starts_with(in, ".")) { | |||
// Accessing a subproperty of the data | |||
in.remove_prefix(1); | |||
in = skip(in); | |||
// We _must_ see an identifier | |||
auto [is_ident, ident, tail] = ctre::match<IDENT_RE>(in); | |||
if (!is_ident) { | |||
throw_user_error<errc::template_error>("Expected identifier following dot `.`"); | |||
} | |||
if (!dat.is_mapping()) { | |||
throw_user_error<errc::template_error>("Cannot use dot `.` on non-mapping object"); | |||
} | |||
auto& map = dat.as_mapping(); | |||
auto found = map.find(ident.to_view()); | |||
if (found == map.end()) { | |||
throw_user_error<errc::template_error>("No subproperty '{}'", ident.to_view()); | |||
} | |||
return eval_expr_tail(tail, found->second); | |||
} | |||
return {stringify(dat), in}; | |||
} | |||
std::pair<std::string, std::string_view> eval_primary_expr(std::string_view in, | |||
const json_data& dat) { | |||
in = skip(in); | |||
if (in.empty()) { | |||
throw_user_error<errc::template_error>("Expected primary expression"); | |||
} | |||
if (in.front() == '(') { | |||
in = in.substr(1); | |||
auto [ret, tail] = eval_primary_expr(in, dat); | |||
if (!starts_with(tail, ")")) { | |||
throw_user_error<errc::template_error>( | |||
"Expected closing parenthesis `)` following expression"); | |||
} | |||
return {ret, tail.substr(1)}; | |||
} | |||
auto [is_ident, ident, tail_1] = ctre::match<IDENT_RE>(in); | |||
if (is_ident) { | |||
auto& map = dat.as_mapping(); | |||
auto found = map.find(ident.to_view()); | |||
if (found == map.end()) { | |||
throw_user_error<errc::template_error>("Unknown top-level identifier '{}'", | |||
ident.to_view()); | |||
} | |||
return eval_expr_tail(tail_1, found->second); | |||
} | |||
return {"nope", in}; | |||
} | |||
std::string render_template(std::string_view tmpl, const library_root& lib) { | |||
std::string acc; | |||
std::string_view MARKER_STRING = "__dds"; | |||
// Fill out a data structure that will be exposed to the template | |||
json_data dat = json_data::mapping_type({ | |||
{ | |||
"lib", | |||
json_data::mapping_type{ | |||
{"name", lib.manifest().name}, | |||
{"root", lib.path().string()}, | |||
}, | |||
}, | |||
}); | |||
while (!tmpl.empty()) { | |||
// Find the next marker in the template string | |||
auto next_marker = tmpl.find(MARKER_STRING); | |||
if (next_marker == tmpl.npos) { | |||
// We've reached the end of the template. Stop | |||
acc.append(tmpl); | |||
break; | |||
} | |||
// Append the string up to the next marker | |||
acc.append(tmpl.substr(0, next_marker)); | |||
// Consume up to the next marker | |||
tmpl = tmpl.substr(next_marker + MARKER_STRING.size()); | |||
auto next_not_space = tmpl.find_first_not_of(" \t"); | |||
if (next_not_space == tmpl.npos || tmpl[next_not_space] != '(') { | |||
throw_user_error<errc::template_error>( | |||
"Expected `(` following `__dds` identifier in template file"); | |||
} | |||
auto [inner, tail] = eval_primary_expr(tmpl, dat); | |||
acc.append(inner); | |||
tmpl = tail; | |||
} | |||
return acc; | |||
} | |||
} // namespace | |||
void render_template_plan::render(build_env_ref env, const library_root& lib) const { | |||
auto content = slurp_file(_source.path); | |||
// Calculate the destination of the template rendering | |||
auto dest = env.output_root / _subdir / _source.relative_path(); | |||
dest.replace_filename(dest.stem().stem().filename().string() + dest.extension().string()); | |||
fs::create_directories(dest.parent_path()); | |||
auto result = render_template(content, lib); | |||
if (fs::is_regular_file(dest)) { | |||
auto existing_content = slurp_file(dest); | |||
if (result == existing_content) { | |||
/// The content of the file has not changed. Do not write a file. | |||
return; | |||
} | |||
} | |||
auto ofile = open(dest, std::ios::binary | std::ios::out); | |||
ofile << result; | |||
ofile.close(); // Throw any exceptions while closing the file | |||
} |
@@ -0,0 +1,38 @@ | |||
#pragma once | |||
#include <dds/build/plan/base.hpp> | |||
#include <dds/source/file.hpp> | |||
#include <utility> | |||
namespace dds { | |||
class library_root; | |||
class render_template_plan { | |||
/** | |||
* The source file that defines the config template | |||
*/ | |||
source_file _source; | |||
/** | |||
* The subdirectory in which the template should be rendered. | |||
*/ | |||
fs::path _subdir; | |||
public: | |||
/** | |||
* Create a new instance | |||
* @param sf The source file of the template | |||
* @param subdir The subdirectort into which the template should render | |||
*/ | |||
render_template_plan(source_file sf, path_ref subdir) | |||
: _source(std::move(sf)) | |||
, _subdir(subdir) {} | |||
/** | |||
* Render the template into its output directory | |||
*/ | |||
void render(build_env_ref, const library_root& owning_library) const; | |||
}; | |||
} // namespace dds |
@@ -43,6 +43,8 @@ enum class errc { | |||
invalid_lib_filesystem, | |||
invalid_pkg_filesystem, | |||
template_error, | |||
}; | |||
std::string error_reference_of(errc) noexcept; |
@@ -35,6 +35,10 @@ std::optional<source_kind> dds::infer_source_kind(path_ref p) noexcept { | |||
auto ext_found | |||
= std::lower_bound(header_exts.begin(), header_exts.end(), p.extension(), std::less<>()); | |||
if (ext_found != header_exts.end() && *ext_found == p.extension()) { | |||
auto stem = p.stem(); | |||
if (stem.extension() == ".config") { | |||
return source_kind::header_template; | |||
} | |||
return source_kind::header; | |||
} | |||
@@ -44,11 +48,11 @@ std::optional<source_kind> dds::infer_source_kind(path_ref p) noexcept { | |||
return std::nullopt; | |||
} | |||
if (ends_with(p.stem().string(), ".test")) { | |||
if (p.stem().extension() == ".test") { | |||
return source_kind::test; | |||
} | |||
if (ends_with(p.stem().string(), ".main")) { | |||
if (p.stem().extension() == ".main") { | |||
return source_kind::app; | |||
} | |||
@@ -9,6 +9,7 @@ namespace dds { | |||
enum class source_kind { | |||
header, | |||
header_template, | |||
source, | |||
test, | |||
app, | |||
@@ -17,11 +18,22 @@ enum class source_kind { | |||
std::optional<source_kind> infer_source_kind(path_ref) noexcept; | |||
struct source_file { | |||
fs::path path; | |||
fs::path basis_path; | |||
/** | |||
* The actual path to the file | |||
*/ | |||
fs::path path; | |||
/** | |||
* The path to source root that contains the file in question | |||
*/ | |||
fs::path basis_path; | |||
/** | |||
* The kind of the source file | |||
*/ | |||
source_kind kind; | |||
static std::optional<source_file> from_path(path_ref path, path_ref base_path) noexcept; | |||
fs::path relative_path() const noexcept { return fs::relative(path, basis_path); } | |||
}; | |||
using source_list = std::vector<source_file>; |
@@ -0,0 +1,16 @@ | |||
#include <dds/source/file.hpp> | |||
#include <catch2/catch.hpp> | |||
using dds::source_kind; | |||
TEST_CASE("Infer source kind") { | |||
using dds::infer_source_kind; | |||
auto k = infer_source_kind("foo.h"); | |||
CHECK(k == source_kind::header); | |||
CHECK(infer_source_kind("foo.hpp") == source_kind::header); | |||
CHECK_FALSE(infer_source_kind("foo.txt")); // Not a source file extension | |||
CHECK(infer_source_kind("foo.hh") == source_kind::header); | |||
CHECK(infer_source_kind("foo.config.hpp") == source_kind::header_template); | |||
} |
@@ -0,0 +1,5 @@ | |||
#pragma once | |||
#include <string> | |||
int config_file_value = 42; |
@@ -0,0 +1,5 @@ | |||
#include <info.hpp> | |||
#include <cassert> | |||
int main() { assert(config_file_value == 42); } |
@@ -0,0 +1,3 @@ | |||
{ | |||
"name": "test-library" | |||
} |
@@ -0,0 +1,5 @@ | |||
{ | |||
"name": "test-simple", | |||
"version": "1.2.3-gamma", | |||
"namespace": "test" | |||
} |
@@ -0,0 +1,5 @@ | |||
#pragma once | |||
#include <string_view> | |||
std::string_view lib_name = __dds(lib.name); |
@@ -0,0 +1,5 @@ | |||
#include <simple/config.hpp> | |||
#include <cassert> | |||
int main() { assert(lib_name == "test-library"); } |
@@ -0,0 +1,26 @@ | |||
import pytest | |||
from time import sleep | |||
from tests import DDS, dds_fixture_conf_1 | |||
@dds_fixture_conf_1('copy_only') | |||
def test_config_template(dds: DDS): | |||
generated_fpath = dds.build_dir / '__dds/gen/info.hpp' | |||
assert not generated_fpath.is_file() | |||
dds.build() | |||
assert generated_fpath.is_file() | |||
# Check that re-running the build will not update the generated file (the | |||
# file's content has not changed. Re-generating it would invalidate the | |||
# cache and force a false-rebuild.) | |||
start_time = generated_fpath.stat().st_mtime | |||
sleep(0.1) # Wait just long enough to register a new stamp time | |||
dds.build() | |||
new_time = generated_fpath.stat().st_mtime | |||
assert new_time == start_time | |||
@dds_fixture_conf_1('simple') | |||
def test_simple_substitution(dds: DDS): | |||
dds.build() |
@@ -52,11 +52,11 @@ class DDS: | |||
full_cmd = itertools.chain([self.dds_exe], cmd) | |||
return proc.run(full_cmd, cwd=cwd or self.source_root) | |||
def run(self, cmd: proc.CommandLine, *, | |||
cwd: Path = None) -> subprocess.CompletedProcess: | |||
def run(self, cmd: proc.CommandLine, *, cwd: Path = None, | |||
check=True) -> subprocess.CompletedProcess: | |||
cmdline = list(proc.flatten_cmd(cmd)) | |||
res = self.run_unchecked(cmd, cwd=cwd) | |||
if res.returncode != 0: | |||
if res.returncode != 0 and check: | |||
raise subprocess.CalledProcessError( | |||
res.returncode, [self.dds_exe] + cmdline, res.stdout) | |||
return res | |||
@@ -86,18 +86,22 @@ class DDS: | |||
toolchain: str = None, | |||
apps: bool = True, | |||
warnings: bool = True, | |||
tests: bool = True) -> subprocess.CompletedProcess: | |||
return self.run([ | |||
'build', | |||
f'--out={self.build_dir}', | |||
f'--toolchain={toolchain or self.default_builtin_toolchain}', | |||
f'--catalog={self.catalog_path}', | |||
f'--repo-dir={self.repo_dir}', | |||
['--no-tests'] if not tests else [], | |||
['--no-apps'] if not apps else [], | |||
['--no-warnings'] if not warnings else [], | |||
self.project_dir_arg, | |||
]) | |||
tests: bool = True, | |||
check: bool = True) -> subprocess.CompletedProcess: | |||
return self.run( | |||
[ | |||
'build', | |||
f'--out={self.build_dir}', | |||
f'--toolchain={toolchain or self.default_builtin_toolchain}', | |||
f'--catalog={self.catalog_path}', | |||
f'--repo-dir={self.repo_dir}', | |||
['--no-tests'] if not tests else [], | |||
['--no-apps'] if not apps else [], | |||
['--no-warnings'] if not warnings else [], | |||
self.project_dir_arg, | |||
], | |||
check=check, | |||
) | |||
def sdist_create(self) -> subprocess.CompletedProcess: | |||
return self.run([ |
@@ -167,6 +167,17 @@ packages = [ | |||
'neo-concepts': '^0.2.1', | |||
}), | |||
]), | |||
Package('ctre', [ | |||
Version( | |||
'2.7.0', | |||
description= | |||
'A compile-time PCRE (almost) compatible regular expression matcher', | |||
remote=Git( | |||
'https://github.com/hanickadot/compile-time-regular-expressions.git', | |||
'v2.7', | |||
auto_lib='hanickadot/ctre', | |||
)) | |||
]), | |||
many_versions( | |||
'spdlog', | |||
( |