@@ -2,7 +2,5 @@ _build/ | |||
__pycache__/ | |||
.vscode/ | |||
.mypy_cache/ | |||
*.dsd/ | |||
_prebuilt/ | |||
.dds-repo-lock | |||
.pytest_cache |
@@ -1,4 +1,4 @@ | |||
#include <dds/build.hpp> | |||
#include <dds/build/builder.hpp> | |||
#include <dds/catalog/catalog.hpp> | |||
#include <dds/catalog/get.hpp> | |||
#include <dds/repo/repo.hpp> | |||
@@ -8,6 +8,8 @@ | |||
#include <dds/util/paths.hpp> | |||
#include <dds/util/signal.hpp> | |||
#include <range/v3/action/join.hpp> | |||
#include <range/v3/view/concat.hpp> | |||
#include <range/v3/view/group_by.hpp> | |||
#include <range/v3/view/transform.hpp> | |||
@@ -66,6 +68,15 @@ struct catalog_path_flag : path_flag { | |||
dds::catalog open() { return dds::catalog::open(Get()); } | |||
}; | |||
struct num_jobs_flag : args::ValueFlag<int> { | |||
num_jobs_flag(args::Group& cmd) | |||
: ValueFlag(cmd, | |||
"jobs", | |||
"Set the number of parallel jobs when compiling files", | |||
{"jobs", 'j'}, | |||
0) {} | |||
}; | |||
/** | |||
* Base class holds the actual argument parser | |||
*/ | |||
@@ -488,19 +499,13 @@ struct cli_build { | |||
args::Flag no_warnings{cmd, "no-warings", "Disable build warnings", {"no-warnings"}}; | |||
toolchain_flag tc_filepath{cmd}; | |||
args::Flag export_{cmd, "export", "Generate a library export", {"export", 'E'}}; | |||
path_flag | |||
lm_index{cmd, | |||
"lm_index", | |||
"Path to an existing libman index from which to load deps (usually INDEX.lmi)", | |||
{"lm-index", 'I'}}; | |||
args::ValueFlag<int> num_jobs{cmd, | |||
"jobs", | |||
"Set the number of parallel jobs when compiling files", | |||
{"jobs", 'j'}, | |||
0}; | |||
num_jobs_flag n_jobs{cmd}; | |||
path_flag out{cmd, | |||
"out", | |||
@@ -510,26 +515,29 @@ struct cli_build { | |||
int run() { | |||
dds::build_params params; | |||
params.root = project.root.Get(); | |||
params.out_root = out.Get(); | |||
params.toolchain = tc_filepath.get_toolchain(); | |||
params.do_export = export_.Get(); | |||
params.build_tests = !no_tests.Get(); | |||
params.build_apps = !no_apps.Get(); | |||
params.enable_warnings = !no_warnings.Get(); | |||
params.parallel_jobs = num_jobs.Get(); | |||
params.out_root = out.Get(); | |||
params.toolchain = tc_filepath.get_toolchain(); | |||
params.parallel_jobs = n_jobs.Get(); | |||
dds::package_manifest man; | |||
const auto man_filepath = params.root / "package.dds"; | |||
const auto man_filepath = project.root.Get() / "package.dds"; | |||
if (exists(man_filepath)) { | |||
man = dds::package_manifest::load_from_file(man_filepath); | |||
} | |||
dds::builder bd; | |||
dds::sdist_build_params main_params; | |||
main_params.build_apps = !no_apps.Get(); | |||
main_params.enable_warnings = !no_warnings.Get(); | |||
main_params.run_tests = main_params.build_tests = !no_tests.Get(); | |||
bd.add(dds::sdist{man, project.root.Get()}, main_params); | |||
if (lm_index) { | |||
params.existing_lm_index = lm_index.Get(); | |||
} else { | |||
// Download and build dependencies | |||
// Build the dependencies | |||
auto cat = cat_path.open(); | |||
params.dep_sdists = dds::repository::with_repository( // | |||
auto cat = cat_path.open(); | |||
dds::repository::with_repository( // | |||
this->repo_path.Get(), | |||
dds::repo_flags::write_lock | dds::repo_flags::create_if_absent, | |||
[&](dds::repository repo) { | |||
@@ -544,17 +552,110 @@ struct cli_build { | |||
auto tsd = dds::get_package_sdist(*opt_pkg); | |||
repo.add_sdist(tsd.sdist, dds::if_exists::throw_exc); | |||
} | |||
auto sdist_ptr = repo.find(pk); | |||
assert(sdist_ptr); | |||
dds::sdist_build_params deps_params; | |||
deps_params.subdir | |||
= dds::fs::path("_deps") / sdist_ptr->manifest.pkg_id.to_string(); | |||
bd.add(*sdist_ptr, deps_params); | |||
} | |||
return deps // | |||
| ranges::views::transform([&](auto& id) { | |||
auto ptr = repo.find(id); | |||
assert(ptr); | |||
return *ptr; | |||
}) | |||
| ranges::to_vector; | |||
}); | |||
} | |||
dds::build(params, man); | |||
bd.build(params); | |||
return 0; | |||
} | |||
}; | |||
/* | |||
######## ## ## #### ## ######## ######## ######## ######## ###### | |||
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ## ## ## ## ## | |||
######## ## ## ## ## ## ## ####### ## ## ###### ######## ###### | |||
## ## ## ## ## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ## ## ## ## ## | |||
######## ####### #### ######## ######## ######## ######## ## ###### | |||
*/ | |||
struct cli_build_deps { | |||
cli_base& base; | |||
args::Command cmd{base.cmd_group, | |||
"build-deps", | |||
"Build a set of dependencies and emit a libman index"}; | |||
toolchain_flag tc{cmd}; | |||
repo_path_flag repo_path{cmd}; | |||
catalog_path_flag cat_path{cmd}; | |||
num_jobs_flag n_jobs{cmd}; | |||
args::ValueFlagList<dds::fs::path> deps_files{cmd, | |||
"deps-file", | |||
"Install dependencies from the named files", | |||
{"deps", 'd'}}; | |||
path_flag out_path{cmd, | |||
"out-path", | |||
"Directory where build results should be placed", | |||
{"out", 'o'}, | |||
dds::fs::current_path() / "_deps"}; | |||
path_flag lmi_path{cmd, | |||
"lmi-path", | |||
"Path to the output libman index file (INDEX.lmi)", | |||
{"lmi-path"}, | |||
dds::fs::current_path() / "INDEX.lmi"}; | |||
args::PositionalList<std::string> deps{cmd, "deps", "List of dependencies to install"}; | |||
int run() { | |||
dds::build_params params; | |||
params.out_root = out_path.Get(); | |||
params.toolchain = tc.get_toolchain(); | |||
params.parallel_jobs = n_jobs.Get(); | |||
params.emit_lmi = lmi_path.Get(); | |||
dds::builder bd; | |||
dds::sdist_build_params sdist_params; | |||
auto all_file_deps = deps_files.Get() // | |||
| ranges::views::transform([&](auto dep_fpath) { | |||
spdlog::info("Reading deps from {}", dep_fpath.string()); | |||
return dds::dependency_manifest::from_file(dep_fpath).dependencies; | |||
}) | |||
| ranges::actions::join; | |||
auto cmd_deps = ranges::views::transform(deps.Get(), [&](auto dep_str) { | |||
return dds::dependency::parse_depends_string(dep_str); | |||
}); | |||
auto all_deps = ranges::views::concat(all_file_deps, cmd_deps) | ranges::to_vector; | |||
auto cat = cat_path.open(); | |||
dds::repository::with_repository( // | |||
repo_path.Get(), | |||
dds::repo_flags::write_lock | dds::repo_flags::create_if_absent, | |||
[&](dds::repository repo) { | |||
// Download dependencies | |||
spdlog::info("Loading {} dependencies", all_deps.size()); | |||
auto deps = repo.solve(all_deps, cat); | |||
for (const dds::package_id& pk : deps) { | |||
auto exists = !!repo.find(pk); | |||
if (!exists) { | |||
spdlog::info("Download dependency: {}", pk.to_string()); | |||
auto opt_pkg = cat.get(pk); | |||
assert(opt_pkg); | |||
auto tsd = dds::get_package_sdist(*opt_pkg); | |||
repo.add_sdist(tsd.sdist, dds::if_exists::throw_exc); | |||
} | |||
auto sdist_ptr = repo.find(pk); | |||
assert(sdist_ptr); | |||
dds::sdist_build_params deps_params; | |||
deps_params.subdir = sdist_ptr->manifest.pkg_id.to_string(); | |||
spdlog::info("Dependency: {}", sdist_ptr->manifest.pkg_id.to_string()); | |||
bd.add(*sdist_ptr, deps_params); | |||
} | |||
}); | |||
bd.build(params); | |||
return 0; | |||
} | |||
}; | |||
@@ -578,11 +679,12 @@ int main(int argc, char** argv) { | |||
spdlog::set_pattern("[%H:%M:%S] [%^%-5l%$] %v"); | |||
args::ArgumentParser parser("DDS - The drop-dead-simple library manager"); | |||
cli_base cli{parser}; | |||
cli_build build{cli}; | |||
cli_sdist sdist{cli}; | |||
cli_repo repo{cli}; | |||
cli_catalog catalog{cli}; | |||
cli_base cli{parser}; | |||
cli_build build{cli}; | |||
cli_sdist sdist{cli}; | |||
cli_repo repo{cli}; | |||
cli_catalog catalog{cli}; | |||
cli_build_deps build_deps{cli}; | |||
try { | |||
parser.ParseCLI(argc, argv); | |||
} catch (const args::Help&) { | |||
@@ -608,6 +710,8 @@ int main(int argc, char** argv) { | |||
return repo.run(); | |||
} else if (catalog.cmd) { | |||
return catalog.run(); | |||
} else if (build_deps.cmd) { | |||
return build_deps.run(); | |||
} else { | |||
assert(false); | |||
std::terminate(); |
@@ -1,343 +0,0 @@ | |||
#include "./build.hpp" | |||
#include <dds/build/plan/compile_exec.hpp> | |||
#include <dds/catch2_embedded.hpp> | |||
#include <dds/compdb.hpp> | |||
#include <dds/usage_reqs.hpp> | |||
#include <dds/util/algo.hpp> | |||
#include <dds/util/time.hpp> | |||
#include <libman/index.hpp> | |||
#include <libman/parse.hpp> | |||
#include <range/v3/algorithm/transform.hpp> | |||
#include <range/v3/range/conversion.hpp> | |||
#include <range/v3/view/transform.hpp> | |||
#include <spdlog/spdlog.h> | |||
#include <array> | |||
#include <map> | |||
#include <set> | |||
#include <stdexcept> | |||
using namespace dds; | |||
namespace { | |||
struct archive_failure : std::runtime_error { | |||
using runtime_error::runtime_error; | |||
}; | |||
void copy_headers(const fs::path& source, const fs::path& dest) { | |||
for (fs::path file : fs::recursive_directory_iterator(source)) { | |||
if (infer_source_kind(file) != source_kind::header) { | |||
continue; | |||
} | |||
auto relpath = fs::relative(file, source); | |||
auto dest_fpath = dest / relpath; | |||
spdlog::info("Export header: {}", relpath.string()); | |||
fs::create_directories(dest_fpath.parent_path()); | |||
fs::copy_file(file, dest_fpath); | |||
} | |||
} | |||
fs::path export_project_library(const library_plan& lib, build_env_ref env, path_ref export_root) { | |||
auto lib_out_root = export_root / lib.name(); | |||
auto header_root = lib.source_root() / "include"; | |||
if (!fs::is_directory(header_root)) { | |||
header_root = lib.source_root() / "src"; | |||
} | |||
auto lml_path = export_root / fmt::format("{}.lml", lib.name()); | |||
auto lml_parent_dir = lml_path.parent_path(); | |||
std::vector<lm::pair> pairs; | |||
pairs.emplace_back("Type", "Library"); | |||
pairs.emplace_back("Name", lib.name()); | |||
if (fs::is_directory(header_root)) { | |||
auto header_dest = lib_out_root / "include"; | |||
copy_headers(header_root, header_dest); | |||
pairs.emplace_back("Include-Path", fs::relative(header_dest, lml_parent_dir).string()); | |||
} | |||
if (lib.create_archive()) { | |||
auto ar_path = lib.create_archive()->calc_archive_file_path(env); | |||
auto ar_dest = lib_out_root / ar_path.filename(); | |||
fs::create_directories(ar_dest.parent_path()); | |||
fs::copy_file(ar_path, ar_dest); | |||
pairs.emplace_back("Path", fs::relative(ar_dest, lml_parent_dir).string()); | |||
} | |||
for (const auto& use : lib.uses()) { | |||
pairs.emplace_back("Uses", fmt::format("{}/{}", use.namespace_, use.name)); | |||
} | |||
for (const auto& links : lib.links()) { | |||
pairs.emplace_back("Links", fmt::format("{}/{}", links.namespace_, links.name)); | |||
} | |||
lm::write_pairs(lml_path, pairs); | |||
return lml_path; | |||
} | |||
void export_project(const package_plan& pkg, build_env_ref env) { | |||
if (pkg.name().empty()) { | |||
throw compile_failure( | |||
fmt::format("Cannot generate an export when the package has no name (Provide a " | |||
"package.dds with a `Name` field)")); | |||
} | |||
const auto export_root = env.output_root / fmt::format("{}.lpk", pkg.name()); | |||
spdlog::info("Generating project export: {}", export_root.string()); | |||
fs::remove_all(export_root); | |||
fs::create_directories(export_root); | |||
std::vector<lm::pair> pairs; | |||
pairs.emplace_back("Type", "Package"); | |||
pairs.emplace_back("Name", pkg.name()); | |||
pairs.emplace_back("Namespace", pkg.namespace_()); | |||
for (const auto& lib : pkg.libraries()) { | |||
export_project_library(lib, env, export_root); | |||
} | |||
lm::write_pairs(export_root / "package.lmp", pairs); | |||
} | |||
usage_requirement_map | |||
load_usage_requirements(path_ref project_root, path_ref build_root, path_ref user_lm_index) { | |||
fs::path lm_index_path = user_lm_index; | |||
for (auto cand : {project_root / "INDEX.lmi", build_root / "INDEX.lmi"}) { | |||
if (fs::exists(lm_index_path) || !user_lm_index.empty()) { | |||
break; | |||
} | |||
lm_index_path = cand; | |||
} | |||
if (!fs::exists(lm_index_path)) { | |||
spdlog::warn("No INDEX.lmi found, so we won't be able to load/use any dependencies"); | |||
return {}; | |||
} | |||
lm::index idx = lm::index::from_file(lm_index_path); | |||
return usage_requirement_map::from_lm_index(idx); | |||
} | |||
void prepare_catch2_driver(library_build_params& lib_params, | |||
test_lib test_driver, | |||
const build_params& params, | |||
build_env_ref env_) { | |||
fs::path test_include_root = params.out_root / "_catch-2.10.2"; | |||
lib_params.test_include_dirs.emplace_back(test_include_root); | |||
auto catch_hpp = test_include_root / "catch2/catch.hpp"; | |||
if (!fs::exists(catch_hpp)) { | |||
fs::create_directories(catch_hpp.parent_path()); | |||
auto hpp_strm = open(catch_hpp, std::ios::out | std::ios::binary); | |||
hpp_strm.write(detail::catch2_embedded_single_header_str, | |||
std::strlen(detail::catch2_embedded_single_header_str)); | |||
hpp_strm.close(); | |||
} | |||
if (test_driver == test_lib::catch_) { | |||
// Don't generate a test library helper | |||
return; | |||
} | |||
std::string fname; | |||
std::string definition; | |||
if (test_driver == test_lib::catch_main) { | |||
fname = "catch-main.cpp"; | |||
definition = "CATCH_CONFIG_MAIN"; | |||
} else { | |||
assert(false && "Impossible: Invalid `test_driver` for catch library"); | |||
std::terminate(); | |||
} | |||
shared_compile_file_rules comp_rules; | |||
comp_rules.defs().push_back(definition); | |||
auto catch_cpp = test_include_root / "catch2" / fname; | |||
auto cpp_strm = open(catch_cpp, std::ios::out | std::ios::binary); | |||
cpp_strm << "#include \"./catch.hpp\"\n"; | |||
cpp_strm.close(); | |||
auto sf = source_file::from_path(catch_cpp, test_include_root); | |||
assert(sf.has_value()); | |||
compile_file_plan plan{comp_rules, std::move(*sf), "Catch2", "v1"}; | |||
build_env env2 = env_; | |||
env2.output_root /= "_test-driver"; | |||
auto obj_file = plan.calc_object_file_path(env2); | |||
if (!fs::exists(obj_file)) { | |||
spdlog::info("Compiling Catch2 test driver (This will only happen once)..."); | |||
compile_all(std::array{plan}, env2, 1); | |||
} | |||
lib_params.test_link_files.push_back(obj_file); | |||
} | |||
void prepare_test_driver(library_build_params& lib_params, | |||
const build_params& params, | |||
const package_manifest& man, | |||
build_env_ref env) { | |||
auto& test_driver = *man.test_driver; | |||
if (test_driver == test_lib::catch_ || test_driver == test_lib::catch_main) { | |||
prepare_catch2_driver(lib_params, test_driver, params, env); | |||
} else { | |||
assert(false && "Unreachable"); | |||
std::terminate(); | |||
} | |||
} | |||
void add_ureqs(usage_requirement_map& ureqs, | |||
const sdist& sd, | |||
const library& lib, | |||
const library_plan& lib_plan, | |||
build_env_ref env) { | |||
lm::library& reqs = ureqs.add(sd.manifest.namespace_, lib.manifest().name); | |||
reqs.include_paths.push_back(lib.public_include_dir()); | |||
reqs.name = lib.manifest().name; | |||
reqs.uses = lib.manifest().uses; | |||
reqs.links = lib.manifest().links; | |||
if (lib_plan.create_archive()) { | |||
reqs.linkable_path = lib_plan.create_archive()->calc_archive_file_path(env); | |||
} | |||
// TODO: preprocessor definitions | |||
} | |||
using sdist_index_type = std::map<std::string, std::reference_wrapper<const sdist>>; | |||
using sdist_names = std::set<std::string>; | |||
void add_sdist_to_build(build_plan& plan, | |||
const sdist& sd, | |||
const sdist_index_type& sd_idx, | |||
build_env_ref env, | |||
usage_requirement_map& ureqs, | |||
sdist_names& already_added) { | |||
if (already_added.find(sd.manifest.pkg_id.name) != already_added.end()) { | |||
// This one has already been added | |||
return; | |||
} | |||
spdlog::debug("Adding dependent build: {}", sd.manifest.pkg_id.name); | |||
// Ensure that ever dependency is loaded up first) | |||
for (const auto& dep : sd.manifest.dependencies) { | |||
auto other = sd_idx.find(dep.name); | |||
assert(other != sd_idx.end() | |||
&& "Failed to load a transitive dependency shortly after initializing them. What?"); | |||
add_sdist_to_build(plan, other->second, sd_idx, env, ureqs, already_added); | |||
} | |||
// Record that we have been processed | |||
already_added.insert(sd.manifest.pkg_id.name); | |||
// Finally, actually add the package: | |||
auto& pkg = plan.add_package(package_plan(sd.manifest.pkg_id.name, sd.manifest.namespace_)); | |||
auto libs = collect_libraries(sd.path); | |||
for (const auto& lib : libs) { | |||
shared_compile_file_rules comp_rules = lib.base_compile_rules(); | |||
library_build_params lib_params; | |||
lib_params.out_subdir = fs::path("deps") / sd.manifest.pkg_id.name; | |||
auto lib_plan = library_plan::create(lib, lib_params, ureqs); | |||
// Create usage requirements for this libary. | |||
add_ureqs(ureqs, sd, lib, lib_plan, env); | |||
// Add it to the plan: | |||
pkg.add_library(std::move(lib_plan)); | |||
} | |||
} | |||
void add_deps_to_build(build_plan& plan, | |||
usage_requirement_map& ureqs, | |||
const build_params& params, | |||
build_env_ref env) { | |||
auto sd_idx = params.dep_sdists // | |||
| ranges::views::transform([](const auto& sd) { | |||
return std::pair(sd.manifest.pkg_id.name, std::cref(sd)); | |||
}) // | |||
| ranges::to<sdist_index_type>(); | |||
sdist_names already_added; | |||
for (const sdist& sd : params.dep_sdists) { | |||
add_sdist_to_build(plan, sd, sd_idx, env, ureqs, already_added); | |||
} | |||
} | |||
} // namespace | |||
void dds::build(const build_params& params, const package_manifest& man) { | |||
fs::create_directories(params.out_root); | |||
auto db = database::open(params.out_root / ".dds.db"); | |||
dds::build_env env{params.toolchain, params.out_root, db}; | |||
// The build plan we will fill out: | |||
build_plan plan; | |||
// Collect libraries for the current project | |||
auto libs = collect_libraries(params.root); | |||
if (!libs.size()) { | |||
spdlog::warn("Nothing found to build!"); | |||
return; | |||
} | |||
usage_requirement_map ureqs; | |||
if (params.existing_lm_index) { | |||
ureqs = load_usage_requirements(params.root, params.out_root, *params.existing_lm_index); | |||
} else { | |||
add_deps_to_build(plan, ureqs, params, env); | |||
} | |||
// Initialize the build plan for this project. | |||
auto& pkg = plan.add_package(package_plan(man.pkg_id.name, man.namespace_)); | |||
// assert(false && "Not ready yet!"); | |||
library_build_params lib_params; | |||
lib_params.build_tests = params.build_tests; | |||
lib_params.build_apps = params.build_apps; | |||
lib_params.enable_warnings = params.enable_warnings; | |||
if (man.test_driver) { | |||
prepare_test_driver(lib_params, params, man, env); | |||
} | |||
for (const library& lib : libs) { | |||
lib_params.out_subdir = fs::relative(lib.path(), params.root); | |||
pkg.add_library(library_plan::create(lib, lib_params, ureqs)); | |||
} | |||
if (params.generate_compdb) { | |||
generate_compdb(plan, env); | |||
} | |||
dds::stopwatch sw; | |||
plan.compile_all(env, params.parallel_jobs); | |||
spdlog::info("Compilation completed in {:n}ms", sw.elapsed_ms().count()); | |||
sw.reset(); | |||
plan.archive_all(env, params.parallel_jobs); | |||
spdlog::info("Archiving completed in {:n}ms", sw.elapsed_ms().count()); | |||
if (params.build_apps || params.build_tests) { | |||
sw.reset(); | |||
plan.link_all(env, params.parallel_jobs); | |||
spdlog::info("Runtime binary linking completed in {:n}ms", sw.elapsed_ms().count()); | |||
} | |||
if (params.build_tests) { | |||
sw.reset(); | |||
auto test_failures = plan.run_all_tests(env, params.parallel_jobs); | |||
spdlog::info("Test execution finished in {:n}ms", sw.elapsed_ms().count()); | |||
for (auto& failures : test_failures) { | |||
spdlog::error("Test {} failed! Output:\n{}[dds - test output end]", | |||
failures.executable_path.string(), | |||
failures.output); | |||
} | |||
if (!test_failures.empty()) { | |||
throw compile_failure("Test failures during the build!"); | |||
} | |||
} | |||
if (params.do_export) { | |||
export_project(pkg, env); | |||
} | |||
} |
@@ -1,10 +0,0 @@ | |||
#pragma once | |||
#include <dds/build/params.hpp> | |||
#include <dds/package/manifest.hpp> | |||
namespace dds { | |||
void build(const build_params&, const package_manifest& man); | |||
} // namespace dds |
@@ -0,0 +1,243 @@ | |||
#include "./builder.hpp" | |||
#include <dds/build/plan/compile_exec.hpp> | |||
#include <dds/build/plan/full.hpp> | |||
#include <dds/catch2_embedded.hpp> | |||
#include <dds/compdb.hpp> | |||
#include <dds/usage_reqs.hpp> | |||
#include <dds/util/time.hpp> | |||
#include <spdlog/spdlog.h> | |||
#include <array> | |||
#include <set> | |||
using namespace dds; | |||
namespace { | |||
struct state { | |||
bool generate_catch2_header = false; | |||
bool generate_catch2_main = false; | |||
}; | |||
lm::library | |||
prepare_catch2_driver(test_lib test_driver, const build_params& params, build_env_ref env_) { | |||
fs::path test_include_root = params.out_root / "_catch-2.10.2"; | |||
lm::library ret_lib; | |||
auto catch_hpp = test_include_root / "catch2/catch.hpp"; | |||
if (!fs::exists(catch_hpp)) { | |||
fs::create_directories(catch_hpp.parent_path()); | |||
auto hpp_strm = open(catch_hpp, std::ios::out | std::ios::binary); | |||
hpp_strm.write(detail::catch2_embedded_single_header_str, | |||
std::strlen(detail::catch2_embedded_single_header_str)); | |||
hpp_strm.close(); | |||
} | |||
ret_lib.include_paths.push_back(test_include_root); | |||
if (test_driver == test_lib::catch_) { | |||
// Don't compile a library helper | |||
return ret_lib; | |||
} | |||
std::string fname; | |||
std::string definition; | |||
if (test_driver == test_lib::catch_main) { | |||
fname = "catch-main.cpp"; | |||
definition = "CATCH_CONFIG_MAIN"; | |||
} else { | |||
assert(false && "Impossible: Invalid `test_driver` for catch library"); | |||
std::terminate(); | |||
} | |||
shared_compile_file_rules comp_rules; | |||
comp_rules.defs().push_back(definition); | |||
auto catch_cpp = test_include_root / "catch2" / fname; | |||
auto cpp_strm = open(catch_cpp, std::ios::out | std::ios::binary); | |||
cpp_strm << "#include \"./catch.hpp\"\n"; | |||
cpp_strm.close(); | |||
auto sf = source_file::from_path(catch_cpp, test_include_root); | |||
assert(sf.has_value()); | |||
compile_file_plan plan{comp_rules, std::move(*sf), "Catch2", "v1"}; | |||
build_env env2 = env_; | |||
env2.output_root /= "_test-driver"; | |||
auto obj_file = plan.calc_object_file_path(env2); | |||
if (!fs::exists(obj_file)) { | |||
spdlog::info("Compiling Catch2 test driver (This will only happen once)..."); | |||
compile_all(std::array{plan}, env2, 1); | |||
} | |||
ret_lib.linkable_path = obj_file; | |||
return ret_lib; | |||
} | |||
lm::library | |||
prepare_test_driver(const build_params& params, test_lib test_driver, build_env_ref env) { | |||
if (test_driver == test_lib::catch_ || test_driver == test_lib::catch_main) { | |||
return prepare_catch2_driver(test_driver, params, env); | |||
} else { | |||
assert(false && "Unreachable"); | |||
std::terminate(); | |||
} | |||
} | |||
library_plan prepare_library(state& st, | |||
const sdist_target& sdt, | |||
const library& lib, | |||
const package_manifest& pkg_man) { | |||
library_build_params lp; | |||
lp.out_subdir = sdt.params.subdir; | |||
lp.build_apps = sdt.params.build_apps; | |||
lp.build_tests = sdt.params.build_tests; | |||
lp.enable_warnings = sdt.params.enable_warnings; | |||
if (lp.build_tests) { | |||
if (pkg_man.test_driver == test_lib::catch_ | |||
|| pkg_man.test_driver == test_lib::catch_main) { | |||
lp.test_uses.push_back({".dds", "Catch"}); | |||
st.generate_catch2_header = true; | |||
if (pkg_man.test_driver == test_lib::catch_main) { | |||
lp.test_uses.push_back({".dds", "Catch-Main"}); | |||
st.generate_catch2_main = true; | |||
} | |||
} | |||
} | |||
return library_plan::create(lib, std::move(lp)); | |||
} | |||
package_plan prepare_one(state& st, const sdist_target& sd) { | |||
package_plan pkg{sd.sd.manifest.pkg_id.name, sd.sd.manifest.namespace_}; | |||
auto libs = collect_libraries(sd.sd.path); | |||
for (const auto& lib : libs) { | |||
pkg.add_library(prepare_library(st, sd, lib, sd.sd.manifest)); | |||
} | |||
return pkg; | |||
} | |||
build_plan prepare_build_plan(state& st, const std::vector<sdist_target>& sdists) { | |||
build_plan plan; | |||
for (const auto& sd_target : sdists) { | |||
plan.add_package(prepare_one(st, sd_target)); | |||
} | |||
return plan; | |||
} | |||
usage_requirement_map | |||
prepare_ureqs(const build_plan& plan, const toolchain& toolchain, path_ref out_root) { | |||
usage_requirement_map ureqs; | |||
for (const auto& pkg : plan.packages()) { | |||
for (const auto& lib : pkg.libraries()) { | |||
auto& lib_reqs = ureqs.add(pkg.namespace_(), lib.name()); | |||
lib_reqs.include_paths.push_back(lib.library_().public_include_dir()); | |||
lib_reqs.uses = lib.library_().manifest().uses; | |||
lib_reqs.links = lib.library_().manifest().links; | |||
if (const auto& arc = lib.create_archive()) { | |||
lib_reqs.linkable_path = out_root / arc->calc_archive_file_path(toolchain); | |||
} | |||
} | |||
} | |||
return ureqs; | |||
} | |||
void write_lml(build_env_ref env, const library_plan& lib, path_ref lml_path) { | |||
fs::create_directories(lml_path.parent_path()); | |||
auto out = open(lml_path, std::ios::binary | std::ios::out); | |||
out << "Type: Library\n" | |||
<< "Name: " << lib.name() << '\n' | |||
<< "Include-Path: " << lib.library_().public_include_dir().generic_string() << '\n'; | |||
for (auto&& use : lib.uses()) { | |||
out << "Uses: " << use.namespace_ << "/" << use.name << '\n'; | |||
} | |||
for (auto&& link : lib.links()) { | |||
out << "Links: " << link.namespace_ << "/" << link.name << '\n'; | |||
} | |||
if (auto&& arc = lib.create_archive()) { | |||
out << "Path: " | |||
<< (env.output_root / arc->calc_archive_file_path(env.toolchain)).generic_string() | |||
<< '\n'; | |||
} | |||
} | |||
void write_lmp(build_env_ref env, const package_plan& pkg, path_ref lmp_path) { | |||
fs::create_directories(lmp_path.parent_path()); | |||
auto out = open(lmp_path, std::ios::binary | std::ios::out); | |||
out << "Type: Package\n" | |||
<< "Name: " << pkg.name() << '\n' | |||
<< "Namespace: " << pkg.namespace_() << '\n'; | |||
for (const auto& lib : pkg.libraries()) { | |||
auto lml_path = lmp_path.parent_path() / pkg.namespace_() / (lib.name() + ".lml"); | |||
write_lml(env, lib, lml_path); | |||
out << "Library: " << lml_path.generic_string() << '\n'; | |||
} | |||
} | |||
void write_lmi(build_env_ref env, const build_plan& plan, path_ref base_dir, path_ref lmi_path) { | |||
fs::create_directories(lmi_path.parent_path()); | |||
auto out = open(lmi_path, std::ios::binary | std::ios::out); | |||
out << "Type: Index\n"; | |||
for (const auto& pkg : plan.packages()) { | |||
auto lmp_path = base_dir / "_libman" / (pkg.name() + ".lmp"); | |||
write_lmp(env, pkg, lmp_path); | |||
out << "Package: " << pkg.name() << "; " << lmp_path.generic_string() << '\n'; | |||
} | |||
} | |||
} // namespace | |||
void builder::build(const build_params& params) const { | |||
fs::create_directories(params.out_root); | |||
auto db = database::open(params.out_root / ".dds.db"); | |||
state st; | |||
auto plan = prepare_build_plan(st, _sdists); | |||
auto ureqs = prepare_ureqs(plan, params.toolchain, params.out_root); | |||
build_env env{params.toolchain, params.out_root, db, ureqs}; | |||
if (st.generate_catch2_main) { | |||
auto catch_lib = prepare_test_driver(params, test_lib::catch_main, env); | |||
ureqs.add(".dds", "Catch-Main") = catch_lib; | |||
} | |||
if (st.generate_catch2_header) { | |||
auto catch_lib = prepare_test_driver(params, test_lib::catch_, env); | |||
ureqs.add(".dds", "Catch") = catch_lib; | |||
} | |||
if (params.generate_compdb) { | |||
generate_compdb(plan, env); | |||
} | |||
dds::stopwatch sw; | |||
plan.compile_all(env, params.parallel_jobs); | |||
spdlog::info("Compilation completed in {:n}ms", sw.elapsed_ms().count()); | |||
sw.reset(); | |||
plan.archive_all(env, params.parallel_jobs); | |||
spdlog::info("Archiving completed in {:n}ms", sw.elapsed_ms().count()); | |||
sw.reset(); | |||
plan.link_all(env, params.parallel_jobs); | |||
spdlog::info("Runtime binary linking completed in {:n}ms", sw.elapsed_ms().count()); | |||
sw.reset(); | |||
auto test_failures = plan.run_all_tests(env, params.parallel_jobs); | |||
spdlog::info("Test execution finished in {:n}ms", sw.elapsed_ms().count()); | |||
for (auto& failures : test_failures) { | |||
spdlog::error("Test {} failed! Output:\n{}[dds - test output end]", | |||
failures.executable_path.string(), | |||
failures.output); | |||
} | |||
if (!test_failures.empty()) { | |||
throw compile_failure("Test failures during the build!"); | |||
} | |||
if (params.emit_lmi) { | |||
write_lmi(env, plan, params.out_root, *params.emit_lmi); | |||
} | |||
} |
@@ -0,0 +1,57 @@ | |||
#pragma once | |||
#include <dds/build/params.hpp> | |||
#include <dds/source/dist.hpp> | |||
#include <cassert> | |||
#include <map> | |||
namespace dds { | |||
/** | |||
* Parameters for building an individual source distribution as part of a larger build plan. | |||
*/ | |||
struct sdist_build_params { | |||
/// The subdirectory in which the source directory should be built | |||
fs::path subdir; | |||
/// Whether to build tests | |||
bool build_tests = false; | |||
/// Whether to run tests | |||
bool run_tests = false; | |||
/// Whether to build applications | |||
bool build_apps = false; | |||
/// Whether to enable build warnings | |||
bool enable_warnings = false; | |||
}; | |||
/** | |||
* Just a pairing of an sdist to the parameters that are used to build it. | |||
*/ | |||
struct sdist_target { | |||
/// The source distribution | |||
sdist sd; | |||
/// The build parameters thereof | |||
sdist_build_params params; | |||
}; | |||
/** | |||
* A builder object. Source distributions are added to the builder, and then they are all built in parallel via `build()` | |||
*/ | |||
class builder { | |||
/// Source distributions that have been added | |||
std::vector<sdist_target> _sdists; | |||
public: | |||
/// Add more source distributions | |||
void add(sdist sd) { add(std::move(sd), sdist_build_params()); } | |||
void add(sdist sd, sdist_build_params params) { | |||
_sdists.push_back({std::move(sd), std::move(params)}); | |||
} | |||
/** | |||
* Execute the build | |||
*/ | |||
void build(const build_params& params) const; | |||
}; | |||
} // namespace dds |
@@ -9,15 +9,10 @@ | |||
namespace dds { | |||
struct build_params { | |||
fs::path root; | |||
fs::path out_root; | |||
std::optional<fs::path> existing_lm_index; | |||
std::optional<fs::path> emit_lmi; | |||
dds::toolchain toolchain; | |||
std::vector<sdist> dep_sdists; | |||
bool do_export = false; | |||
bool build_tests = false; | |||
bool enable_warnings = false; | |||
bool build_apps = false; | |||
bool generate_compdb = true; | |||
int parallel_jobs = 0; | |||
}; |
@@ -8,9 +8,8 @@ | |||
using namespace dds; | |||
fs::path create_archive_plan::calc_archive_file_path(const build_env& env) const noexcept { | |||
return env.output_root / _subdir | |||
/ fmt::format("{}{}{}", "lib", _name, env.toolchain.archive_suffix()); | |||
fs::path create_archive_plan::calc_archive_file_path(const toolchain& tc) const noexcept { | |||
return _subdir / fmt::format("{}{}{}", "lib", _name, tc.archive_suffix()); | |||
} | |||
void create_archive_plan::archive(const build_env& env) const { | |||
@@ -23,7 +22,7 @@ void create_archive_plan::archive(const build_env& env) const { | |||
// Build up the archive command | |||
archive_spec ar; | |||
ar.input_files = std::move(objects); | |||
ar.out_path = calc_archive_file_path(env); | |||
ar.out_path = env.output_root / calc_archive_file_path(env.toolchain); | |||
auto ar_cmd = env.toolchain.create_archive_command(ar); | |||
// `out_relpath` is purely for the benefit of the user to have a short name |
@@ -43,11 +43,11 @@ public: | |||
const std::string& name() const noexcept { return _name; } | |||
/** | |||
* Calculate the absolute path where the generated archive libary file will | |||
* be generated after execution. | |||
* @param env The build environment for the archival. | |||
* Calculate the path relative to the build output root where the static library archive will be | |||
* placed upon creation. | |||
* @param tc The toolchain that will be used | |||
*/ | |||
fs::path calc_archive_file_path(build_env_ref env) const noexcept; | |||
fs::path calc_archive_file_path(const toolchain& tc) const noexcept; | |||
/** | |||
* Get the compilation plans for this library. |
@@ -2,6 +2,7 @@ | |||
#include <dds/db/database.hpp> | |||
#include <dds/toolchain/toolchain.hpp> | |||
#include <dds/usage_reqs.hpp> | |||
#include <dds/util/fs.hpp> | |||
namespace dds { | |||
@@ -10,6 +11,8 @@ struct build_env { | |||
dds::toolchain toolchain; | |||
fs::path output_root; | |||
database& db; | |||
const usage_requirement_map& ureqs; | |||
}; | |||
using build_env_ref = const build_env&; |
@@ -16,6 +16,9 @@ compile_command_info compile_file_plan::generate_compile_command(build_env_ref e | |||
compile_file_spec spec{_source.path, calc_object_file_path(env)}; | |||
spec.enable_warnings = _rules.enable_warnings(); | |||
extend(spec.include_dirs, _rules.include_dirs()); | |||
for (const auto& use : _rules.uses()) { | |||
extend(spec.include_dirs, env.ureqs.include_paths(use)); | |||
} | |||
extend(spec.definitions, _rules.defs()); | |||
return env.toolchain.create_compile_command(spec); | |||
} |
@@ -3,6 +3,8 @@ | |||
#include <dds/build/plan/base.hpp> | |||
#include <dds/source/file.hpp> | |||
#include <libman/library.hpp> | |||
#include <memory> | |||
namespace dds { | |||
@@ -25,6 +27,7 @@ class shared_compile_file_rules { | |||
struct rules_impl { | |||
std::vector<fs::path> inc_dirs; | |||
std::vector<std::string> defs; | |||
std::vector<lm::usage> uses; | |||
bool enable_warnings = false; | |||
}; | |||
@@ -53,6 +56,12 @@ public: | |||
auto& defs() noexcept { return _impl->defs; } | |||
auto& defs() const noexcept { return _impl->defs; } | |||
/** | |||
* Access the named usage requirements for this set of rules | |||
*/ | |||
auto& uses() noexcept { return _impl->uses; } | |||
auto& uses() const noexcept { return _impl->uses; } | |||
/** | |||
* A boolean to toggle compile warnings for the associated compiles | |||
*/ |
@@ -2,6 +2,7 @@ | |||
#include <dds/build/plan/library.hpp> | |||
#include <dds/proc.hpp> | |||
#include <dds/util/algo.hpp> | |||
#include <dds/util/time.hpp> | |||
#include <spdlog/spdlog.h> | |||
@@ -20,10 +21,14 @@ void link_executable_plan::link(build_env_ref env, const library_plan& lib) cons | |||
link_exe_spec spec; | |||
spec.output = calc_executable_path(env); | |||
spec.inputs = _input_libs; | |||
for (const lm::usage& links : _links) { | |||
extend(spec.inputs, env.ureqs.link_paths(links)); | |||
} | |||
if (lib.create_archive()) { | |||
// The associated library has compiled components. Add the static library a as a linker | |||
// input | |||
spec.inputs.push_back(lib.create_archive()->calc_archive_file_path(env)); | |||
spec.inputs.push_back(env.output_root | |||
/ lib.create_archive()->calc_archive_file_path(env.toolchain)); | |||
} | |||
// The main object should be a linker input, of course. |
@@ -3,6 +3,8 @@ | |||
#include <dds/build/plan/compile_file.hpp> | |||
#include <dds/util/fs.hpp> | |||
#include <libman/library.hpp> | |||
#include <string> | |||
#include <vector> | |||
@@ -26,6 +28,8 @@ struct test_failure { | |||
class link_executable_plan { | |||
/// The linker inputs that should be linked into the executable | |||
std::vector<fs::path> _input_libs; | |||
/// Usage requirements for this executable | |||
std::vector<lm::usage> _links; | |||
/// The compilation plan for the entry-point source file | |||
compile_file_plan _main_compile; | |||
/// The subdirectory in which the executable should be generated | |||
@@ -37,15 +41,18 @@ public: | |||
/** | |||
* Create a new instance | |||
* @param in_libs Linker inputs for the executable | |||
* @param links The library identifiers that the executable should link with | |||
* @param cfp The file compilation that defines the entrypoint of the application | |||
* @param out_subdir The subdirectory of the build root in which the executable should be placed | |||
* @param name_ The name of the executable | |||
*/ | |||
link_executable_plan(std::vector<fs::path> in_libs, | |||
compile_file_plan cfp, | |||
path_ref out_subdir, | |||
std::string name_) | |||
link_executable_plan(std::vector<fs::path> in_libs, | |||
std::vector<lm::usage> links, | |||
compile_file_plan cfp, | |||
path_ref out_subdir, | |||
std::string name_) | |||
: _input_libs(std::move(in_libs)) | |||
, _links(std::move(links)) | |||
, _main_compile(std::move(cfp)) | |||
, _out_subdir(out_subdir) | |||
, _name(std::move(name_)) {} |
@@ -10,9 +10,7 @@ | |||
using namespace dds; | |||
library_plan library_plan::create(const library& lib, | |||
const library_build_params& params, | |||
const usage_requirement_map& ureqs) { | |||
library_plan library_plan::create(const library& lib, const library_build_params& params) { | |||
// Source files are kept in three groups: | |||
std::vector<source_file> app_sources; | |||
std::vector<source_file> test_sources; | |||
@@ -41,12 +39,7 @@ library_plan library_plan::create(const library& lib, | |||
// Load up the compile rules | |||
auto compile_rules = lib.base_compile_rules(); | |||
compile_rules.enable_warnings() = params.enable_warnings; | |||
// Apply our transitive usage requirements. This gives us the search directories for our | |||
// dependencies. | |||
for (const auto& use : lib.manifest().uses) { | |||
ureqs.apply(compile_rules, use.namespace_, use.name); | |||
} | |||
compile_rules.uses() = lib.manifest().uses; | |||
// Convert the library sources into their respective file compilation plans. | |||
auto lib_compile_files = // | |||
@@ -70,21 +63,19 @@ library_plan library_plan::create(const library& lib, | |||
// Collect the paths to linker inputs that should be used when generating executables for this | |||
// library. | |||
std::vector<fs::path> link_libs; | |||
for (auto& use : lib.manifest().uses) { | |||
extend(link_libs, ureqs.link_paths(use.namespace_, use.name)); | |||
} | |||
for (auto& link : lib.manifest().links) { | |||
extend(link_libs, ureqs.link_paths(link.namespace_, link.name)); | |||
} | |||
std::vector<lm::usage> links; | |||
extend(links, lib.manifest().uses); | |||
extend(links, lib.manifest().links); | |||
// Linker inputs for tests may contain additional code for test execution | |||
auto test_link_libs = link_libs; | |||
extend(test_link_libs, params.test_link_files); | |||
std::vector<fs::path> link_libs; | |||
std::vector<fs::path> test_link_libs = params.test_link_files; | |||
// There may also be additional #include paths for test source files | |||
// There may also be additional usage requirements for tests | |||
auto test_rules = compile_rules.clone(); | |||
extend(test_rules.include_dirs(), params.test_include_dirs); | |||
auto test_links = links; | |||
extend(test_rules.uses(), params.test_uses); | |||
extend(test_links, params.test_uses); | |||
// Generate the plans to link any executables for this library | |||
std::vector<link_executable_plan> link_executables; | |||
@@ -99,21 +90,19 @@ library_plan library_plan::create(const library& lib, | |||
auto rules = is_test ? test_rules : compile_rules; | |||
// Pick input libs based on app/test | |||
auto& exe_link_libs = is_test ? test_link_libs : link_libs; | |||
auto& exe_links = is_test ? test_links : links; | |||
// TODO: Apps/tests should only see the _public_ include dir, not both | |||
link_executables.emplace_back(exe_link_libs, | |||
compile_file_plan(rules, | |||
source, | |||
lib.manifest().name, | |||
params.out_subdir / "obj"), | |||
subdir, | |||
source.path.stem().stem().string()); | |||
auto exe = link_executable_plan{exe_link_libs, | |||
exe_links, | |||
compile_file_plan(rules, | |||
source, | |||
lib.manifest().name, | |||
params.out_subdir / "obj"), | |||
subdir, | |||
source.path.stem().stem().string()}; | |||
link_executables.emplace_back(std::move(exe)); | |||
} | |||
// Done! | |||
return library_plan{lib.manifest().name, | |||
lib.path(), | |||
std::move(create_archive), | |||
std::move(link_executables), | |||
lib.manifest().uses, | |||
lib.manifest().links}; | |||
} | |||
return library_plan{lib, std::move(create_archive), std::move(link_executables)}; | |||
} |
@@ -6,6 +6,8 @@ | |||
#include <dds/usage_reqs.hpp> | |||
#include <dds/util/fs.hpp> | |||
#include <libman/library.hpp> | |||
#include <optional> | |||
#include <string> | |||
#include <vector> | |||
@@ -29,6 +31,9 @@ struct library_build_params { | |||
std::vector<fs::path> test_include_dirs; | |||
/// Files that should be added as inputs when linking test executables | |||
std::vector<fs::path> test_link_files; | |||
/// Libraries that are used by tests | |||
std::vector<lm::usage> test_uses; | |||
}; | |||
/** | |||
@@ -47,50 +52,39 @@ struct library_build_params { | |||
* initialize all of the constructor parameters correctly. | |||
*/ | |||
class library_plan { | |||
/// The name of the library | |||
std::string _name; | |||
/// The directory at the root of this library | |||
fs::path _source_root; | |||
/// The underlying library object | |||
library _lib; | |||
/// The `create_archive_plan` for this library, if applicable | |||
std::optional<create_archive_plan> _create_archive; | |||
/// The executables that should be linked as part of this library's build | |||
std::vector<link_executable_plan> _link_exes; | |||
/// The libraries that we use | |||
std::vector<lm::usage> _uses; | |||
/// The libraries that we link | |||
std::vector<lm::usage> _links; | |||
public: | |||
/** | |||
* Construct a new `library_plan` | |||
* @param name The name of the library | |||
* @param source_root The directory that contains this library | |||
* @param lib The `library` object underlying this plan. | |||
* @param ar The `create_archive_plan`, or `nullopt` for this library. | |||
* @param exes The `link_executable_plan` objects for this library. | |||
* @param uses The identities of the libraries that are used by this library | |||
* @param links The identities of the libraries that are linked by this library | |||
*/ | |||
library_plan(std::string_view name, | |||
path_ref source_root, | |||
library_plan(library lib, | |||
std::optional<create_archive_plan> ar, | |||
std::vector<link_executable_plan> exes, | |||
std::vector<lm::usage> uses, | |||
std::vector<lm::usage> links) | |||
: _name(name) | |||
, _source_root(source_root) | |||
std::vector<link_executable_plan> exes) | |||
: _lib(std::move(lib)) | |||
, _create_archive(std::move(ar)) | |||
, _link_exes(std::move(exes)) | |||
, _uses(std::move(uses)) | |||
, _links(std::move(links)) {} | |||
, _link_exes(std::move(exes)) {} | |||
/** | |||
* Get the underlying library object | |||
*/ | |||
auto& library_() const noexcept { return _lib; } | |||
/** | |||
* Get the name of the library | |||
*/ | |||
auto& name() const noexcept { return _name; } | |||
auto& name() const noexcept { return _lib.manifest().name; } | |||
/** | |||
* The directory that defines the source root of the library. | |||
*/ | |||
path_ref source_root() const noexcept { return _source_root; } | |||
path_ref source_root() const noexcept { return _lib.path(); } | |||
/** | |||
* A `create_archive_plan` object, or `nullopt`, depending on if this library has compiled | |||
* components | |||
@@ -103,11 +97,11 @@ public: | |||
/** | |||
* The library identifiers that are used by this library | |||
*/ | |||
auto& uses() const noexcept { return _uses; } | |||
auto& uses() const noexcept { return _lib.manifest().uses; } | |||
/** | |||
* The library identifiers that are linked by this library | |||
*/ | |||
auto& links() const noexcept { return _links; } | |||
auto& links() const noexcept { return _lib.manifest().links; } | |||
/** | |||
* Named constructor: Create a new `library_plan` automatically from some build-time parameters. | |||
@@ -115,14 +109,11 @@ public: | |||
* @param lib The `library` object from which we will inherit several properties. | |||
* @param params Parameters controlling the build of the library. i.e. if we create tests, | |||
* enable warnings, etc. | |||
* @param ureqs The usage requirements map. This should be populated as appropriate. | |||
* | |||
* The `lib` parameter defines the usage requirements of this library, and they are looked up in | |||
* the `ureqs` map. If there are any missing requirements, an exception will be thrown. | |||
*/ | |||
static library_plan create(const library& lib, | |||
const library_build_params& params, | |||
const usage_requirement_map& ureqs); | |||
static library_plan create(const library& lib, const library_build_params& params); | |||
}; | |||
} // namespace dds |
@@ -40,3 +40,20 @@ dependency dependency::parse_depends_string(std::string_view str) { | |||
str)); | |||
} | |||
} | |||
dependency_manifest dependency_manifest::from_file(path_ref fpath) { | |||
auto kvs = lm::parse_file(fpath); | |||
dependency_manifest ret; | |||
lm::read( | |||
fmt::format("Reading dependencies from '{}'", fpath.string()), | |||
kvs, | |||
[&](auto, auto key, auto val) { | |||
if (key == "Depends") { | |||
ret.dependencies.push_back(dependency::parse_depends_string(val)); | |||
return true; | |||
} | |||
return false; | |||
}, | |||
lm::reject_unknown()); | |||
return ret; | |||
} |
@@ -1,6 +1,6 @@ | |||
#pragma once | |||
#include <dds/build/plan/full.hpp> | |||
#include <dds/util/fs.hpp> | |||
#include <pubgrub/interval.hpp> | |||
#include <semver/range.hpp> | |||
@@ -19,4 +19,13 @@ struct dependency { | |||
static dependency parse_depends_string(std::string_view str); | |||
}; | |||
/** | |||
* Represents a dependency listing file, which is a subset of a package manifest | |||
*/ | |||
struct dependency_manifest { | |||
std::vector<dependency> dependencies; | |||
static dependency_manifest from_file(path_ref where); | |||
}; | |||
} // namespace dds |
@@ -1,5 +1,6 @@ | |||
#include "./dist.hpp" | |||
#include <dds/library/library.hpp> | |||
#include <dds/temp.hpp> | |||
#include <dds/util/fs.hpp> | |||
@@ -9,8 +9,8 @@ | |||
using namespace dds; | |||
const lm::library* usage_requirement_map::get(std::string ns, std::string name) const noexcept { | |||
auto found = _reqs.find(library_key{ns, name}); | |||
const lm::library* usage_requirement_map::get(const lm::usage& key) const noexcept { | |||
auto found = _reqs.find(key); | |||
if (found == _reqs.end()) { | |||
return nullptr; | |||
} | |||
@@ -54,21 +54,37 @@ usage_requirement_map usage_requirement_map::from_lm_index(const lm::index& idx) | |||
return ret; | |||
} | |||
std::vector<fs::path> usage_requirement_map::link_paths(std::string ns, std::string name) const { | |||
auto req = get(ns, name); | |||
std::vector<fs::path> usage_requirement_map::link_paths(const lm::usage& key) const { | |||
auto req = get(key); | |||
if (!req) { | |||
throw std::runtime_error( | |||
fmt::format("Unable to find linking requirement '{}/{}'", ns, name)); | |||
fmt::format("Unable to find linking requirement '{}/{}'", key.namespace_, key.name)); | |||
} | |||
std::vector<fs::path> ret; | |||
if (req->linkable_path) { | |||
ret.push_back(*req->linkable_path); | |||
} | |||
for (const auto& dep : req->uses) { | |||
extend(ret, link_paths(dep.namespace_, dep.name)); | |||
extend(ret, link_paths(dep)); | |||
} | |||
for (const auto& link : req->links) { | |||
extend(ret, link_paths(link.namespace_, link.name)); | |||
extend(ret, link_paths(link)); | |||
} | |||
return ret; | |||
} | |||
std::vector<fs::path> usage_requirement_map::include_paths(const lm::usage& usage) const { | |||
std::vector<fs::path> ret; | |||
auto lib = get(usage.namespace_, usage.name); | |||
if (!lib) { | |||
throw std::runtime_error( | |||
fmt::format("Cannot find non-existent usage requirements for '{}/{}'", | |||
usage.namespace_, | |||
usage.name)); | |||
} | |||
extend(ret, lib->include_paths); | |||
for (const auto& transitive : lib->uses) { | |||
extend(ret, include_paths(transitive)); | |||
} | |||
return ret; | |||
} |
@@ -13,10 +13,7 @@ class shared_compile_file_rules; | |||
class usage_requirement_map { | |||
struct library_key { | |||
std::string namespace_; | |||
std::string name; | |||
}; | |||
using library_key = lm::usage; | |||
struct library_key_compare { | |||
bool operator()(const library_key& lhs, const library_key& rhs) const noexcept { | |||
@@ -36,12 +33,16 @@ class usage_requirement_map { | |||
std::map<library_key, lm::library, library_key_compare> _reqs; | |||
public: | |||
const lm::library* get(std::string ns, std::string name) const noexcept; | |||
const lm::library* get(const lm::usage& key) const noexcept; | |||
const lm::library* get(std::string ns, std::string name) const noexcept { | |||
return get({ns, name}); | |||
} | |||
lm::library& add(std::string ns, std::string name); | |||
void add(std::string ns, std::string name, lm::library lib) { add(ns, name) = lib; } | |||
void apply(shared_compile_file_rules, std::string ns, std::string name) const; | |||
std::vector<fs::path> link_paths(std::string ns, std::string name) const; | |||
std::vector<fs::path> link_paths(const lm::usage&) const; | |||
std::vector<fs::path> include_paths(const lm::usage& req) const; | |||
static usage_requirement_map from_lm_index(const lm::index&) noexcept; | |||
}; |
@@ -25,6 +25,6 @@ def test_simple_lib(dds: DDS, scope: ExitStack): | |||
''', | |||
)) | |||
dds.build(tests=True, apps=False, warnings=False, export=True) | |||
dds.build(tests=True, apps=False, warnings=False) | |||
assert (dds.build_dir / 'compile_commands.json').is_file() | |||
assert list(dds.build_dir.glob('libTestLibrary*')) != [] |
@@ -10,5 +10,5 @@ def test_lib_with_just_test(dds: DDS, scope: ExitStack): | |||
b'int main() {}', | |||
)) | |||
dds.build(tests=True, apps=False, warnings=False, export=False) | |||
dds.build(tests=True, apps=False, warnings=False) | |||
assert (dds.build_dir / f'test/foo{dds.exe_suffix}').is_file() |
@@ -69,15 +69,16 @@ class DDS: | |||
def project_dir_arg(self) -> str: | |||
return f'--project-dir={self.source_root}' | |||
def deps_build(self, *, | |||
def build_deps(self, args: proc.CommandLine, *, | |||
toolchain: str = None) -> subprocess.CompletedProcess: | |||
return self.run([ | |||
'deps', | |||
'build', | |||
'build-deps', | |||
f'--toolchain={toolchain or self.default_builtin_toolchain}', | |||
self.repo_dir_arg, | |||
f'--deps-build-dir={self.deps_build_dir}', | |||
f'--catalog={self.catalog_path}', | |||
f'--repo-dir={self.repo_dir}', | |||
f'--out={self.deps_build_dir}', | |||
f'--lmi-path={self.lmi_path}', | |||
args, | |||
]) | |||
def build(self, | |||
@@ -85,8 +86,7 @@ class DDS: | |||
toolchain: str = None, | |||
apps: bool = True, | |||
warnings: bool = True, | |||
tests: bool = True, | |||
export: bool = False) -> subprocess.CompletedProcess: | |||
tests: bool = True) -> subprocess.CompletedProcess: | |||
return self.run([ | |||
'build', | |||
f'--out={self.build_dir}', | |||
@@ -96,7 +96,6 @@ class DDS: | |||
['--no-tests'] if not tests else [], | |||
['--no-apps'] if not apps else [], | |||
['--no-warnings'] if not warnings else [], | |||
['--export'] if export else [], | |||
self.project_dir_arg, | |||
]) | |||
@@ -119,9 +118,9 @@ class DDS: | |||
@property | |||
def default_builtin_toolchain(self) -> str: | |||
if os.name == 'posix': | |||
return ':gcc-9' | |||
return ':c++17:gcc-9' | |||
elif os.name == 'nt': | |||
return ':msvc' | |||
return ':c++17:msvc' | |||
else: | |||
raise RuntimeError( | |||
f'No default builtin toolchain defined for tests on platform "{os.name}"' |
@@ -0,0 +1,21 @@ | |||
{ | |||
"version": 1, | |||
"packages": { | |||
"neo-sqlite3": { | |||
"0.1.0": { | |||
"git": { | |||
"url": "https://github.com/vector-of-bool/neo-sqlite3.git", | |||
"ref": "0.1.0" | |||
}, | |||
"depends": {} | |||
}, | |||
"0.2.2": { | |||
"git": { | |||
"url": "https://github.com/vector-of-bool/neo-sqlite3.git", | |||
"ref": "0.2.2" | |||
}, | |||
"depends": {} | |||
} | |||
} | |||
} | |||
} |
@@ -0,0 +1,2 @@ | |||
Depends: neo-sqlite3 +0.2.2 |
@@ -0,0 +1,31 @@ | |||
from tests import dds, DDS | |||
def test_build_deps_from_file(dds: DDS): | |||
assert not dds.deps_build_dir.is_dir() | |||
dds.catalog_import(dds.source_root / 'catalog.json') | |||
dds.build_deps(['-d', 'deps.dds']) | |||
assert (dds.deps_build_dir / 'neo-sqlite3@0.2.2').is_dir() | |||
assert (dds.scratch_dir / 'INDEX.lmi').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() | |||
def test_build_deps_from_cmd(dds: DDS): | |||
assert not dds.deps_build_dir.is_dir() | |||
dds.catalog_import(dds.source_root / 'catalog.json') | |||
dds.build_deps(['neo-sqlite3 =0.2.2']) | |||
assert (dds.deps_build_dir / 'neo-sqlite3@0.2.2').is_dir() | |||
assert (dds.scratch_dir / 'INDEX.lmi').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() | |||
def test_multiple_deps(dds: DDS): | |||
assert not dds.deps_build_dir.is_dir() | |||
dds.catalog_import(dds.source_root / 'catalog.json') | |||
dds.build_deps(['neo-sqlite3 ^0.2.2', 'neo-sqlite3 ~0.2.0']) | |||
assert (dds.deps_build_dir / 'neo-sqlite3@0.2.2').is_dir() | |||
assert (dds.scratch_dir / 'INDEX.lmi').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() |