@@ -8,6 +8,10 @@ _invalid: | |||
echo "Specify a target name to execute" | |||
exit 1 | |||
clean: | |||
rm -f -r -- $(shell find -name __pycache__ -type d) | |||
rm -f -r -- _build/ _prebuilt/ | |||
docs: | |||
sphinx-build -b html \ | |||
docs \ | |||
@@ -31,8 +35,17 @@ docs-sync-server: | |||
--reload-delay 300 \ | |||
--watch **/*.html | |||
macos-ci: nix-ci | |||
linux-ci: nix-ci | |||
macos-ci: | |||
python3 -u tools/ci.py \ | |||
-B download \ | |||
-T tools/gcc-9.jsonc \ | |||
-T2 tools/gcc-9.next.jsonc \ | |||
linux-ci: | |||
python3 -u tools/ci.py \ | |||
-B download \ | |||
-T tools/gcc-9.jsonc \ | |||
-T2 tools/gcc-9-static.jsonc | |||
nix-ci: | |||
python3 -u tools/ci.py \ | |||
@@ -46,7 +59,8 @@ vagrant-freebsd-ci: | |||
cd /vagrant && \ | |||
python3.7 tools/ci.py \ | |||
-B download \ | |||
-T tools/freebsd-gcc-9.jsonc \ | |||
-T tools/freebsd-gcc-9.jsonc \ | |||
-T2 tools/freebsd-gcc-9.next.jsonc \ | |||
' | |||
vagrant scp freebsd11:/vagrant/_build/dds _build/dds-freebsd-x64 | |||
vagrant halt |
@@ -12,7 +12,7 @@ jobs: | |||
echo Executing Build and Tests | |||
reg add HKLM\SYSTEM\CurrentControlSet\Control\FileSystem /v LongPathsEnabled /t REG_DWORD /d 1 /f || exit 1 | |||
python -m pip install pytest pytest-xdist || exit 1 | |||
python -u tools/ci.py -B download -T tools\msvc.jsonc || exit 1 | |||
python -u tools/ci.py -B download -T tools\msvc.jsonc -T2 tools\msvc.next.jsonc || exit 1 | |||
displayName: Full CI | |||
- publish: _build/dds.exe | |||
artifact: DDS Executable - Windows VS2019 |
@@ -210,7 +210,7 @@ dependencies we want to install:: | |||
You can also list your dependencies as an inline string in your CMakeLists.txt | |||
instead of a separate file:: | |||
pmm(DDS DEPENDS "neo-sqlite3 ^0.2.2") | |||
pmm(DDS DEPENDS neo-sqlite3^0.2.2) | |||
Since you'll probably want to be using ``libman.cmake`` at the same time, the | |||
calls for ``CMakeCM`` and ``DDS`` can simply be combined. This is how our new | |||
@@ -225,7 +225,7 @@ CMake project might look: | |||
include(pmm.cmake) | |||
pmm(CMakeCM ROLLING | |||
DDS DEPENDS "neo-sqlite3 ^0.2.2" | |||
DDS DEPENDS neo-sqlite3^0.2.2 | |||
) | |||
include(libman) |
@@ -14,7 +14,7 @@ | |||
"pubgrub": "0.2.1", | |||
"vob-json5": "0.1.5", | |||
"vob-semester": "0.2.1", | |||
"ctre": "2.7.0", | |||
"ctre": "2.8.1", | |||
}, | |||
"test_driver": "Catch-Main" | |||
} |
@@ -170,6 +170,7 @@ struct cli_catalog { | |||
catalog_path_flag cat_path{cmd}; | |||
args::Flag import_stdin{cmd, "stdin", "Import JSON from stdin", {"stdin"}}; | |||
args::Flag init{cmd, "initial", "Re-import the initial catalog contents", {"initial"}}; | |||
args::ValueFlagList<std::string> | |||
json_paths{cmd, | |||
"json", | |||
@@ -178,6 +179,9 @@ struct cli_catalog { | |||
int run() { | |||
auto cat = cat_path.open(); | |||
if (init.Get()) { | |||
cat.import_initial(); | |||
} | |||
for (const auto& json_fpath : json_paths.Get()) { | |||
cat.import_json_file(json_fpath); | |||
} | |||
@@ -619,15 +623,8 @@ struct cli_build { | |||
[&](dds::repository repo) { | |||
// Download dependencies | |||
auto deps = repo.solve(man.dependencies, cat); | |||
dds::get_all(deps, repo, cat); | |||
for (const dds::package_id& pk : deps) { | |||
auto exists = !!repo.find(pk); | |||
if (!exists) { | |||
spdlog::info("Download dependency: {}", pk.to_string()); | |||
auto opt_pkg = cat.get(pk); | |||
assert(opt_pkg); | |||
auto tsd = dds::get_package_sdist(*opt_pkg); | |||
repo.add_sdist(tsd.sdist, dds::if_exists::throw_exc); | |||
} | |||
auto sdist_ptr = repo.find(pk); | |||
assert(sdist_ptr); | |||
dds::sdist_build_params deps_params; | |||
@@ -713,15 +710,8 @@ struct cli_build_deps { | |||
// Download dependencies | |||
spdlog::info("Loading {} dependencies", all_deps.size()); | |||
auto deps = repo.solve(all_deps, cat); | |||
dds::get_all(deps, repo, cat); | |||
for (const dds::package_id& pk : deps) { | |||
auto exists = !!repo.find(pk); | |||
if (!exists) { | |||
spdlog::info("Download dependency: {}", pk.to_string()); | |||
auto opt_pkg = cat.get(pk); | |||
assert(opt_pkg); | |||
auto tsd = dds::get_package_sdist(*opt_pkg); | |||
repo.add_sdist(tsd.sdist, dds::if_exists::throw_exc); | |||
} | |||
auto sdist_ptr = repo.find(pk); | |||
assert(sdist_ptr); | |||
dds::sdist_build_params deps_params; |
@@ -24,7 +24,7 @@ struct state { | |||
}; | |||
void log_failure(const test_failure& fail) { | |||
spdlog::error("Test '{}' failed! [exitted {}]", fail.executable_path.string(), fail.retc); | |||
spdlog::error("Test '{}' failed! [exited {}]", fail.executable_path.string(), fail.retc); | |||
if (fail.signal) { | |||
spdlog::error("Test execution received signal {}", fail.signal); | |||
} |
@@ -3,6 +3,7 @@ | |||
#include <dds/build/file_deps.hpp> | |||
#include <dds/error/errors.hpp> | |||
#include <dds/proc.hpp> | |||
#include <dds/util/parallel.hpp> | |||
#include <dds/util/string.hpp> | |||
#include <dds/util/time.hpp> | |||
@@ -20,59 +21,6 @@ using namespace ranges; | |||
namespace { | |||
template <typename Range, typename Fn> | |||
bool parallel_run(Range&& rng, int n_jobs, Fn&& fn) { | |||
// We don't bother with a nice thread pool, as the overhead of most build | |||
// tasks dwarf the cost of interlocking. | |||
std::mutex mut; | |||
auto iter = rng.begin(); | |||
const auto stop = rng.end(); | |||
std::vector<std::exception_ptr> exceptions; | |||
auto run_one = [&]() mutable { | |||
while (true) { | |||
std::unique_lock lk{mut}; | |||
if (!exceptions.empty()) { | |||
break; | |||
} | |||
if (iter == stop) { | |||
break; | |||
} | |||
auto&& item = *iter; | |||
++iter; | |||
lk.unlock(); | |||
try { | |||
fn(item); | |||
} catch (...) { | |||
lk.lock(); | |||
exceptions.push_back(std::current_exception()); | |||
break; | |||
} | |||
} | |||
}; | |||
std::unique_lock lk{mut}; | |||
std::vector<std::thread> threads; | |||
if (n_jobs < 1) { | |||
n_jobs = std::thread::hardware_concurrency() + 2; | |||
} | |||
std::generate_n(std::back_inserter(threads), n_jobs, [&] { return std::thread(run_one); }); | |||
lk.unlock(); | |||
for (auto& t : threads) { | |||
t.join(); | |||
} | |||
for (auto eptr : exceptions) { | |||
try { | |||
std::rethrow_exception(eptr); | |||
} catch (const std::exception& e) { | |||
spdlog::error(e.what()); | |||
} | |||
} | |||
return exceptions.empty(); | |||
} | |||
/// The actual "real" information that we need to perform a compilation. | |||
struct compile_file_full { | |||
const compile_file_plan& plan; |
@@ -78,13 +78,18 @@ std::optional<test_failure> link_executable_plan::run_test(build_env_ref env) co | |||
auto exe_path = calc_executable_path(env); | |||
auto msg = fmt::format("Run test: {:30}", fs::relative(exe_path, env.output_root).string()); | |||
spdlog::info(msg); | |||
auto&& [dur, res] | |||
= timed<std::chrono::microseconds>([&] { return run_proc({exe_path.string()}); }); | |||
using namespace std::chrono_literals; | |||
auto&& [dur, res] = timed<std::chrono::microseconds>( | |||
[&] { return run_proc({.command = {exe_path.string()}, .timeout = 10s}); }); | |||
if (res.okay()) { | |||
spdlog::info("{} - PASSED - {:>9n}μs", msg, dur.count()); | |||
return std::nullopt; | |||
} else { | |||
spdlog::error("{} - FAILED - {:>9n}μs [exited {}]", msg, dur.count(), res.retc); | |||
auto exit_msg = fmt::format(res.signal ? "signalled {}" : "exited {}", | |||
res.signal ? res.signal : res.retc); | |||
auto fail_str = res.timed_out ? "TIMEOUT" : "FAILED "; | |||
spdlog::error("{} - {} - {:>9n}μs [{}]", msg, fail_str, dur.count(), exit_msg); | |||
test_failure f; | |||
f.executable_path = exe_path; | |||
f.output = res.output; |
@@ -3,6 +3,7 @@ | |||
#include <dds/build/iter_compilations.hpp> | |||
#include <dds/build/plan/compile_exec.hpp> | |||
#include <dds/error/errors.hpp> | |||
#include <dds/util/parallel.hpp> | |||
#include <range/v3/view/concat.hpp> | |||
#include <range/v3/view/filter.hpp> | |||
@@ -20,60 +21,6 @@ using namespace dds; | |||
namespace { | |||
/// XXX: Duplicated in compile_exec.cpp !! | |||
template <typename Range, typename Fn> | |||
bool parallel_run(Range&& rng, int n_jobs, Fn&& fn) { | |||
// We don't bother with a nice thread pool, as the overhead of most build | |||
// tasks dwarf the cost of interlocking. | |||
std::mutex mut; | |||
auto iter = rng.begin(); | |||
const auto stop = rng.end(); | |||
std::vector<std::exception_ptr> exceptions; | |||
auto run_one = [&]() mutable { | |||
while (true) { | |||
std::unique_lock lk{mut}; | |||
if (!exceptions.empty()) { | |||
break; | |||
} | |||
if (iter == stop) { | |||
break; | |||
} | |||
auto&& item = *iter; | |||
++iter; | |||
lk.unlock(); | |||
try { | |||
fn(item); | |||
} catch (...) { | |||
lk.lock(); | |||
exceptions.push_back(std::current_exception()); | |||
break; | |||
} | |||
} | |||
}; | |||
std::unique_lock lk{mut}; | |||
std::vector<std::thread> threads; | |||
if (n_jobs < 1) { | |||
n_jobs = std::thread::hardware_concurrency() + 2; | |||
} | |||
std::generate_n(std::back_inserter(threads), n_jobs, [&] { return std::thread(run_one); }); | |||
lk.unlock(); | |||
for (auto& t : threads) { | |||
t.join(); | |||
} | |||
for (auto eptr : exceptions) { | |||
try { | |||
std::rethrow_exception(eptr); | |||
} catch (const std::exception& e) { | |||
spdlog::error(e.what()); | |||
} | |||
} | |||
return exceptions.empty(); | |||
} | |||
template <typename T, typename Range> | |||
decltype(auto) pair_up(T& left, Range& right) { | |||
auto rep = ranges::view::repeat(left); |
@@ -2,6 +2,7 @@ | |||
#include "./import.hpp" | |||
#include <dds/catalog/init_catalog.hpp> | |||
#include <dds/dym.hpp> | |||
#include <dds/error/errors.hpp> | |||
#include <dds/solve/solve.hpp> | |||
@@ -77,6 +78,102 @@ void migrate_repodb_2(sqlite3::database& db) { | |||
)"); | |||
} | |||
std::string transforms_to_json(const std::vector<fs_transformation>& trs) { | |||
std::string acc = "["; | |||
for (auto it = trs.begin(); it != trs.end(); ++it) { | |||
acc += it->as_json(); | |||
if (std::next(it) != trs.end()) { | |||
acc += ", "; | |||
} | |||
} | |||
return acc + "]"; | |||
} | |||
void store_with_remote(const neo::sqlite3::statement_cache&, | |||
const package_info& pkg, | |||
std::monostate) { | |||
neo_assert_always( | |||
invariant, | |||
false, | |||
"There was an attempt to insert a package listing into the database where that package " | |||
"listing does not have a remote listing. If you see this message, it is a dds bug.", | |||
pkg.ident.to_string()); | |||
} | |||
void store_with_remote(neo::sqlite3::statement_cache& stmts, | |||
const package_info& pkg, | |||
const git_remote_listing& git) { | |||
auto lm_usage = git.auto_lib.value_or(lm::usage{}); | |||
sqlite3::exec( // | |||
stmts, | |||
R"( | |||
INSERT OR REPLACE INTO dds_cat_pkgs ( | |||
name, | |||
version, | |||
git_url, | |||
git_ref, | |||
lm_name, | |||
lm_namespace, | |||
description, | |||
repo_transform | |||
) VALUES ( | |||
?1, | |||
?2, | |||
?3, | |||
?4, | |||
CASE WHEN ?5 = '' THEN NULL ELSE ?5 END, | |||
CASE WHEN ?6 = '' THEN NULL ELSE ?6 END, | |||
?7, | |||
?8 | |||
) | |||
)"_sql, | |||
std::forward_as_tuple( // | |||
pkg.ident.name, | |||
pkg.ident.version.to_string(), | |||
git.url, | |||
git.ref, | |||
lm_usage.name, | |||
lm_usage.namespace_, | |||
pkg.description, | |||
transforms_to_json(git.transforms))); | |||
} | |||
void do_store_pkg(neo::sqlite3::database& db, | |||
neo::sqlite3::statement_cache& st_cache, | |||
const package_info& pkg) { | |||
std::visit([&](auto&& remote) { store_with_remote(st_cache, pkg, remote); }, pkg.remote); | |||
auto db_pkg_id = db.last_insert_rowid(); | |||
auto& new_dep_st = st_cache(R"( | |||
INSERT INTO dds_cat_pkg_deps ( | |||
pkg_id, | |||
dep_name, | |||
low, | |||
high | |||
) VALUES ( | |||
?, | |||
?, | |||
?, | |||
? | |||
) | |||
)"_sql); | |||
for (const auto& dep : pkg.deps) { | |||
new_dep_st.reset(); | |||
assert(dep.versions.num_intervals() == 1); | |||
auto iv_1 = *dep.versions.iter_intervals().begin(); | |||
sqlite3::exec(new_dep_st, | |||
std::forward_as_tuple(db_pkg_id, | |||
dep.name, | |||
iv_1.low.to_string(), | |||
iv_1.high.to_string())); | |||
} | |||
} | |||
void store_init_packages(sqlite3::database& db, sqlite3::statement_cache& st_cache) { | |||
for (auto& pkg : init_catalog_packages()) { | |||
do_store_pkg(db, st_cache, pkg); | |||
} | |||
} | |||
void ensure_migrated(sqlite3::database& db) { | |||
sqlite3::transaction_guard tr{db}; | |||
db.exec(R"( | |||
@@ -102,6 +199,11 @@ void ensure_migrated(sqlite3::database& db) { | |||
constexpr int current_database_version = 2; | |||
int version = version_; | |||
// If this is the first time we're working here, import the initial | |||
// catalog with some useful tidbits. | |||
bool import_init_packages = version == 0; | |||
if (version > current_database_version) { | |||
throw_external_error<errc::catalog_too_new>(); | |||
} | |||
@@ -114,6 +216,14 @@ void ensure_migrated(sqlite3::database& db) { | |||
} | |||
meta["version"] = 2; | |||
exec(db, "UPDATE dds_cat_meta SET meta=?", std::forward_as_tuple(meta.dump())); | |||
if (import_init_packages) { | |||
spdlog::info( | |||
"A new catalog database case been created, and has been populated with some initial " | |||
"contents."); | |||
neo::sqlite3::statement_cache stmts{db}; | |||
store_init_packages(db, stmts); | |||
} | |||
} | |||
void check_json(bool b, std::string_view what) { | |||
@@ -144,95 +254,10 @@ catalog catalog::open(const std::string& db_path) { | |||
catalog::catalog(sqlite3::database db) | |||
: _db(std::move(db)) {} | |||
void catalog::_store_pkg(const package_info& pkg, std::monostate) { | |||
neo_assert_always( | |||
invariant, | |||
false, | |||
"There was an attempt to insert a package listing into the database where that package " | |||
"listing does not have a remote listing. If you see this message, it is a dds bug.", | |||
pkg.ident.to_string()); | |||
} | |||
namespace { | |||
std::string transforms_to_json(const std::vector<fs_transformation>& trs) { | |||
std::string acc = "["; | |||
for (auto it = trs.begin(); it != trs.end(); ++it) { | |||
acc += it->as_json(); | |||
if (std::next(it) != trs.end()) { | |||
acc += ", "; | |||
} | |||
} | |||
return acc + "]"; | |||
} | |||
} // namespace | |||
void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git) { | |||
auto lm_usage = git.auto_lib.value_or(lm::usage{}); | |||
sqlite3::exec( // | |||
_stmt_cache, | |||
R"( | |||
INSERT OR REPLACE INTO dds_cat_pkgs ( | |||
name, | |||
version, | |||
git_url, | |||
git_ref, | |||
lm_name, | |||
lm_namespace, | |||
description, | |||
repo_transform | |||
) VALUES ( | |||
?1, | |||
?2, | |||
?3, | |||
?4, | |||
CASE WHEN ?5 = '' THEN NULL ELSE ?5 END, | |||
CASE WHEN ?6 = '' THEN NULL ELSE ?6 END, | |||
?7, | |||
?8 | |||
) | |||
)"_sql, | |||
std::forward_as_tuple( // | |||
pkg.ident.name, | |||
pkg.ident.version.to_string(), | |||
git.url, | |||
git.ref, | |||
lm_usage.name, | |||
lm_usage.namespace_, | |||
pkg.description, | |||
transforms_to_json(git.transforms))); | |||
} | |||
void catalog::store(const package_info& pkg) { | |||
sqlite3::transaction_guard tr{_db}; | |||
std::visit([&](auto&& remote) { _store_pkg(pkg, remote); }, pkg.remote); | |||
auto db_pkg_id = _db.last_insert_rowid(); | |||
auto& new_dep_st = _stmt_cache(R"( | |||
INSERT INTO dds_cat_pkg_deps ( | |||
pkg_id, | |||
dep_name, | |||
low, | |||
high | |||
) VALUES ( | |||
?, | |||
?, | |||
?, | |||
? | |||
) | |||
)"_sql); | |||
for (const auto& dep : pkg.deps) { | |||
new_dep_st.reset(); | |||
assert(dep.versions.num_intervals() == 1); | |||
auto iv_1 = *dep.versions.iter_intervals().begin(); | |||
sqlite3::exec(new_dep_st, | |||
std::forward_as_tuple(db_pkg_id, | |||
dep.name, | |||
iv_1.low.to_string(), | |||
iv_1.high.to_string())); | |||
} | |||
do_store_pkg(_db, _stmt_cache, pkg); | |||
} | |||
std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept { | |||
@@ -384,3 +409,9 @@ void catalog::import_json_str(std::string_view content) { | |||
store(pkg); | |||
} | |||
} | |||
void catalog::import_initial() { | |||
sqlite3::transaction_guard tr{_db}; | |||
spdlog::info("Restoring built-in initial catalog contents"); | |||
store_init_packages(_db, _stmt_cache); | |||
} |
@@ -25,9 +25,6 @@ class catalog { | |||
explicit catalog(neo::sqlite3::database db); | |||
catalog(const catalog&) = delete; | |||
void _store_pkg(const package_info&, const git_remote_listing&); | |||
void _store_pkg(const package_info&, std::monostate); | |||
public: | |||
catalog(catalog&&) = default; | |||
catalog& operator=(catalog&&) = default; | |||
@@ -42,6 +39,7 @@ public: | |||
std::vector<package_id> by_name(std::string_view sv) const noexcept; | |||
std::vector<dependency> dependencies_of(const package_id& pkg) const noexcept; | |||
void import_initial(); | |||
void import_json_str(std::string_view json_str); | |||
void import_json_file(path_ref json_path) { | |||
auto content = dds::slurp_file(json_path); |
@@ -71,9 +71,9 @@ TEST_CASE_METHOD(catalog_test_case, "Parse JSON repo") { | |||
"packages": { | |||
"foo": { | |||
"1.2.3": { | |||
"depends": { | |||
"bar": "~4.2.1" | |||
}, | |||
"depends": [ | |||
"bar~4.2.1" | |||
], | |||
"git": { | |||
"url": "http://example.com", | |||
"ref": "master" |
@@ -2,6 +2,8 @@ | |||
#include <dds/catalog/catalog.hpp> | |||
#include <dds/error/errors.hpp> | |||
#include <dds/repo/repo.hpp> | |||
#include <dds/util/parallel.hpp> | |||
#include <neo/assert.hpp> | |||
#include <nlohmann/json.hpp> | |||
@@ -9,6 +11,8 @@ | |||
#include <range/v3/algorithm/any_of.hpp> | |||
#include <range/v3/distance.hpp> | |||
#include <range/v3/numeric/accumulate.hpp> | |||
#include <range/v3/view/filter.hpp> | |||
#include <range/v3/view/transform.hpp> | |||
#include <spdlog/spdlog.h> | |||
using namespace dds; | |||
@@ -75,4 +79,33 @@ temporary_sdist dds::get_package_sdist(const package_info& pkg) { | |||
tsd.sdist.manifest.pkg_id.to_string()); | |||
} | |||
return tsd; | |||
} | |||
} | |||
void dds::get_all(const std::vector<package_id>& pkgs, repository& repo, const catalog& cat) { | |||
std::mutex repo_mut; | |||
auto absent_pkg_infos = pkgs // | |||
| ranges::views::filter([&](auto pk) { | |||
std::scoped_lock lk{repo_mut}; | |||
return !repo.find(pk); | |||
}) | |||
| ranges::views::transform([&](auto id) { | |||
auto info = cat.get(id); | |||
neo_assert(invariant, | |||
info.has_value(), | |||
"No catalog entry for package id?", | |||
id.to_string()); | |||
return *info; | |||
}); | |||
auto okay = parallel_run(absent_pkg_infos, 8, [&](package_info inf) { | |||
spdlog::info("Download package: {}", inf.ident.to_string()); | |||
auto tsd = get_package_sdist(inf); | |||
std::scoped_lock lk{repo_mut}; | |||
repo.add_sdist(tsd.sdist, if_exists::throw_exc); | |||
}); | |||
if (!okay) { | |||
throw_external_error<errc::dependency_resolve_failure>("Downloading of packages failed."); | |||
} | |||
} |
@@ -5,6 +5,8 @@ | |||
namespace dds { | |||
class repository; | |||
class catalog; | |||
struct package_info; | |||
struct temporary_sdist { | |||
@@ -14,4 +16,6 @@ struct temporary_sdist { | |||
temporary_sdist get_package_sdist(const package_info&); | |||
} // namespace dds | |||
void get_all(const std::vector<package_id>& pkgs, dds::repository& repo, const catalog& cat); | |||
} // namespace dds |
@@ -89,18 +89,13 @@ parse_pkg_json_v1(std::string_view name, semver::version version, const json5::d | |||
using namespace semester::walk_ops; | |||
std::string dep_name; | |||
auto dep_range = semver::range::everything(); | |||
auto parse_dep_range = [&](const std::string& s) { | |||
auto make_dep = [&](std::string const& str) { | |||
try { | |||
return semver::range::parse_restricted(s); | |||
} catch (const semver::invalid_range& e) { | |||
return dependency::parse_depends_string(str); | |||
} catch (std::runtime_error const& e) { | |||
import_error(std::string(walk.path()) + e.what()); | |||
} | |||
}; | |||
auto make_dep = [&](auto&&) { | |||
return dependency{dep_name, {dep_range.low(), dep_range.high()}}; | |||
}; | |||
auto check_one_remote = [&](auto&&) { | |||
if (!semester::holds_alternative<std::monostate>(ret.remote)) { | |||
@@ -109,18 +104,14 @@ parse_pkg_json_v1(std::string_view name, semver::version version, const json5::d | |||
return walk.pass; | |||
}; | |||
auto add_dep = any_key{put_into(dep_name), | |||
require_str{"Dependency should specify a version range string"}, | |||
put_into_pass{dep_range, parse_dep_range}, | |||
put_into{std::back_inserter(ret.deps), make_dep}}; | |||
walk(data, | |||
mapping{if_key{"description", | |||
require_str{"'description' should be a string"}, | |||
put_into{ret.description}}, | |||
if_key{"depends", | |||
require_obj{"'depends' must be a JSON object"}, | |||
mapping{add_dep}}, | |||
require_array{"'depends' must be an array of dependency strings"}, | |||
for_each{require_str{"Each dependency should be a string"}, | |||
put_into{std::back_inserter(ret.deps), make_dep}}}, | |||
if_key{ | |||
"git", | |||
check_one_remote, |
@@ -0,0 +1,11 @@ | |||
#pragma once | |||
#include "./package_info.hpp" | |||
#include <vector> | |||
namespace dds { | |||
const std::vector<package_info>& init_catalog_packages() noexcept; | |||
} // namespace dds |
@@ -14,23 +14,24 @@ | |||
using namespace dds; | |||
dependency dependency::parse_depends_string(std::string_view str) { | |||
const auto str_begin = str.data(); | |||
auto str_iter = str_begin; | |||
const auto str_end = str_iter + str.size(); | |||
while (str_iter != str_end && !std::isspace(*str_iter)) { | |||
++str_iter; | |||
auto sep_pos = str.find_first_of("=@^~+"); | |||
if (sep_pos == str.npos) { | |||
throw_user_error<errc::invalid_version_range_string>("Invalid dependency string '{}'", str); | |||
} | |||
auto name = trim_view(std::string_view(str_begin, str_iter - str_begin)); | |||
auto version_str = trim_view(std::string_view(str_iter, str_end - str_iter)); | |||
auto name = str.substr(0, sep_pos); | |||
if (str[sep_pos] == '@') { | |||
++sep_pos; | |||
} | |||
auto range_str = str.substr(sep_pos); | |||
try { | |||
auto rng = semver::range::parse_restricted(version_str); | |||
auto rng = semver::range::parse_restricted(range_str); | |||
return dependency{std::string(name), {rng.low(), rng.high()}}; | |||
} catch (const semver::invalid_range&) { | |||
throw_user_error<errc::invalid_version_range_string>( | |||
"Invalid version range string '{}' in dependency declaration '{}'", version_str, str); | |||
"Invalid version range string '{}' in dependency string '{}'", range_str, str); | |||
} | |||
} | |||
@@ -0,0 +1,27 @@ | |||
#include <dds/deps.hpp> | |||
#include <catch2/catch.hpp> | |||
TEST_CASE("Parse dependency strings") { | |||
struct case_ { | |||
std::string depstr; | |||
std::string name; | |||
std::string low; | |||
std::string high; | |||
}; | |||
auto cur = GENERATE(Catch::Generators::values<case_>({ | |||
{"foo@1.2.3", "foo", "1.2.3", "1.2.4"}, | |||
{"foo=1.2.3", "foo", "1.2.3", "1.2.4"}, | |||
{"foo^1.2.3", "foo", "1.2.3", "2.0.0"}, | |||
{"foo~1.2.3", "foo", "1.2.3", "1.3.0"}, | |||
{"foo+1.2.3", "foo", "1.2.3", semver::version::max_version().to_string()}, | |||
})); | |||
auto dep = dds::dependency::parse_depends_string(cur.depstr); | |||
CHECK(dep.name == cur.name); | |||
CHECK(dep.versions.num_intervals() == 1); | |||
auto ver_iv = *dep.versions.iter_intervals().begin(); | |||
CHECK(ver_iv.low == semver::version::parse(cur.low)); | |||
CHECK(ver_iv.high == semver::version::parse(cur.high)); | |||
} |
@@ -3,7 +3,6 @@ | |||
#include <dds/dym.hpp> | |||
#include <dds/error/errors.hpp> | |||
#include <dds/util/algo.hpp> | |||
#include <libman/parse.hpp> | |||
#include <json5/parse_data.hpp> | |||
#include <range/v3/view/transform.hpp> | |||
@@ -12,27 +11,6 @@ | |||
using namespace dds; | |||
library_manifest library_manifest::load_from_dds_file(path_ref fpath) { | |||
spdlog::warn( | |||
"Using deprecated library.dds parsing (on file {}). This will be removed soon. Migrate!", | |||
fpath.string()); | |||
auto kvs = lm::parse_file(fpath); | |||
library_manifest ret; | |||
ret.name = fpath.parent_path().filename().string(); | |||
std::vector<std::string> uses_strings; | |||
std::vector<std::string> links_strings; | |||
lm::read(fmt::format("Reading library manifest {}", fpath.string()), | |||
kvs, | |||
lm::read_accumulate("Uses", uses_strings), | |||
lm::read_accumulate("Links", links_strings), | |||
lm::read_required("Name", ret.name), | |||
lm_reject_dym{{"Uses", "Links", "Name"}}); | |||
extend(ret.uses, ranges::views::transform(uses_strings, lm::split_usage_string)); | |||
extend(ret.links, ranges::views::transform(links_strings, lm::split_usage_string)); | |||
return ret; | |||
} | |||
library_manifest library_manifest::load_from_file(path_ref fpath) { | |||
auto content = slurp_file(fpath); | |||
auto data = json5::parse_data(content); | |||
@@ -76,24 +54,6 @@ library_manifest library_manifest::load_from_file(path_ref fpath) { | |||
if (rej) { | |||
throw_user_error<errc::invalid_lib_manifest>(rej->message); | |||
} | |||
// using namespace json_read::ops; | |||
// json_read::decompose( // | |||
// data.as_object(), | |||
// object(key("name", require_string(put_into{lib.name}, "`name` must be a string")), | |||
// key("uses", | |||
// array_each{require_string( | |||
// [&](auto&& uses) { | |||
// lib.uses.push_back(lm::split_usage_string(uses.as_string())); | |||
// return json_read::accept_t{}; | |||
// }, | |||
// "All `uses` items must be strings")}), | |||
// key("links", | |||
// array_each{require_string( | |||
// [&](auto&& links) { | |||
// lib.links.push_back(lm::split_usage_string(links.as_string())); | |||
// return json_read::accept_t{}; | |||
// }, | |||
// "All `links` items must be strings")}))); | |||
if (lib.name.empty()) { | |||
throw_user_error<errc::invalid_lib_manifest>( | |||
@@ -116,11 +76,6 @@ std::optional<fs::path> library_manifest::find_in_directory(path_ref dirpath) { | |||
} | |||
} | |||
auto dds_file = dirpath / "library.dds"; | |||
if (fs::is_regular_file(dds_file)) { | |||
return dds_file; | |||
} | |||
return std::nullopt; | |||
} | |||
@@ -130,9 +85,5 @@ std::optional<library_manifest> library_manifest::load_from_directory(path_ref d | |||
return std::nullopt; | |||
} | |||
if (found->extension() == ".dds") { | |||
return load_from_dds_file(*found); | |||
} else { | |||
return load_from_file(*found); | |||
} | |||
return load_from_file(*found); | |||
} |
@@ -25,7 +25,6 @@ struct library_manifest { | |||
* Load the library manifest from an existing file | |||
*/ | |||
static library_manifest load_from_file(path_ref); | |||
static library_manifest load_from_dds_file(path_ref); | |||
/** | |||
* Find a library manifest within a directory. This will search for a few |
@@ -59,11 +59,7 @@ library_root library_root::from_directory(path_ref lib_dir) { | |||
man.name = lib_dir.filename().string(); | |||
auto found = library_manifest::find_in_directory(lib_dir); | |||
if (found) { | |||
if (found->extension() == ".dds") { | |||
man = library_manifest::load_from_dds_file(*found); | |||
} else { | |||
man = library_manifest::load_from_file(*found); | |||
} | |||
man = library_manifest::load_from_file(*found); | |||
} | |||
auto lib = library_root(lib_dir, std::move(sources), std::move(man)); |
@@ -3,167 +3,112 @@ | |||
#include <dds/dym.hpp> | |||
#include <dds/error/errors.hpp> | |||
#include <dds/util/string.hpp> | |||
#include <libman/parse.hpp> | |||
#include <range/v3/view/split.hpp> | |||
#include <range/v3/view/split_when.hpp> | |||
#include <range/v3/view/transform.hpp> | |||
#include <semester/decomp.hpp> | |||
#include <semester/walk.hpp> | |||
#include <spdlog/spdlog.h> | |||
#include <json5/parse_data.hpp> | |||
using namespace dds; | |||
package_manifest package_manifest::load_from_dds_file(const fs::path& fpath) { | |||
spdlog::warn( | |||
"Using deprecated package.dds parsing (on file {}). This will be removed soon. Migrate!", | |||
fpath.string()); | |||
auto kvs = lm::parse_file(fpath); | |||
package_manifest ret; | |||
std::string version_str; | |||
std::vector<std::string> depends_strs; | |||
std::optional<std::string> opt_test_driver; | |||
lm::read(fmt::format("Reading package manifest '{}'", fpath.string()), | |||
kvs, | |||
lm::read_required("Name", ret.pkg_id.name), | |||
lm::read_opt("Namespace", ret.namespace_), | |||
lm::read_required("Version", version_str), | |||
lm::read_accumulate("Depends", depends_strs), | |||
lm::read_opt("Test-Driver", opt_test_driver), | |||
lm_reject_dym{{"Name", "Namespace", "Version", "Depends", "Test-Driver"}}); | |||
namespace { | |||
if (ret.pkg_id.name.empty()) { | |||
throw_user_error<errc::invalid_pkg_name>("'Name' field in [{}] may not be an empty string", | |||
fpath.string()); | |||
} | |||
if (version_str.empty()) { | |||
throw_user_error< | |||
errc::invalid_version_string>("'Version' field in [{}] may not be an empty string", | |||
fpath.string()); | |||
} | |||
if (opt_test_driver) { | |||
auto& test_driver_str = *opt_test_driver; | |||
if (test_driver_str == "Catch-Main") { | |||
ret.test_driver = test_lib::catch_main; | |||
} else if (test_driver_str == "Catch") { | |||
ret.test_driver = test_lib::catch_; | |||
} else { | |||
auto dym = *did_you_mean(test_driver_str, {"Catch-Main", "Catch"}); | |||
throw_user_error< | |||
errc::unknown_test_driver>("Unknown 'test_driver' '{}' (Did you mean '{}'?)", | |||
test_driver_str, | |||
dym); | |||
} | |||
} | |||
using require_obj = semester::require_type<json5::data::mapping_type>; | |||
using require_array = semester::require_type<json5::data::array_type>; | |||
using require_str = semester::require_type<std::string>; | |||
if (ret.namespace_.empty()) { | |||
ret.namespace_ = ret.pkg_id.name; | |||
} | |||
ret.pkg_id.version = semver::version::parse(version_str); | |||
package_manifest parse_json(const json5::data& data, std::string_view fpath) { | |||
package_manifest ret; | |||
ret.dependencies = depends_strs // | |||
| ranges::views::transform(dependency::parse_depends_string) // | |||
| ranges::to_vector; | |||
using namespace semester::walk_ops; | |||
auto push_depends_obj_kv = [&](std::string key, auto&& dat) { | |||
dependency pending_dep; | |||
if (!dat.is_string()) { | |||
return walk.reject("Dependency object values should be strings"); | |||
} | |||
try { | |||
auto rng = semver::range::parse_restricted(dat.as_string()); | |||
dependency dep{std::move(key), {rng.low(), rng.high()}}; | |||
ret.dependencies.push_back(std::move(dep)); | |||
} catch (const semver::invalid_range&) { | |||
throw_user_error<errc::invalid_version_range_string>( | |||
"Invalid version range string '{}' in dependency declaration for " | |||
"'{}'", | |||
dat.as_string(), | |||
key); | |||
} | |||
return walk.accept; | |||
}; | |||
walk(data, | |||
require_obj{"Root of package manifest should be a JSON object"}, | |||
mapping{ | |||
if_key{"$schema", just_accept}, | |||
required_key{"name", | |||
"A string 'name' is required", | |||
require_str{"'name' must be a string"}, | |||
put_into{ret.pkg_id.name}}, | |||
required_key{"namespace", | |||
"A string 'namespace' is a required ", | |||
require_str{"'namespace' must be a string"}, | |||
put_into{ret.namespace_}}, | |||
required_key{"version", | |||
"A 'version' string is requried", | |||
require_str{"'version' must be a string"}, | |||
put_into{ret.pkg_id.version, | |||
[](std::string s) { return semver::version::parse(s); }}}, | |||
if_key{"depends", | |||
[&](auto&& dat) { | |||
if (dat.is_object()) { | |||
spdlog::warn( | |||
"{}: Using a JSON object for 'depends' is deprecated. Use an " | |||
"array of strings instead.", | |||
fpath); | |||
return mapping{push_depends_obj_kv}(dat); | |||
} else if (dat.is_array()) { | |||
return for_each{put_into{std::back_inserter(ret.dependencies), | |||
[](const std::string& depstr) { | |||
return dependency::parse_depends_string( | |||
depstr); | |||
}}}(dat); | |||
} else { | |||
return walk.reject( | |||
"'depends' should be an array of dependency strings"); | |||
} | |||
}}, | |||
if_key{"test_driver", | |||
require_str{"'test_driver' must be a string"}, | |||
put_into{ret.test_driver, | |||
[](std::string const& td_str) { | |||
if (td_str == "Catch-Main") { | |||
return test_lib::catch_main; | |||
} else if (td_str == "Catch") { | |||
return test_lib::catch_; | |||
} else { | |||
auto dym = *did_you_mean(td_str, {"Catch-Main", "Catch"}); | |||
throw_user_error<errc::unknown_test_driver>( | |||
"Unknown 'test_driver' '{}' (Did you mean '{}'?)", | |||
td_str, | |||
dym); | |||
} | |||
}}}, | |||
}); | |||
return ret; | |||
} | |||
} // namespace | |||
package_manifest package_manifest::load_from_file(const fs::path& fpath) { | |||
auto content = slurp_file(fpath); | |||
auto data = json5::parse_data(content); | |||
if (!data.is_object()) { | |||
throw_user_error<errc::invalid_pkg_manifest>("Root value must be an object"); | |||
try { | |||
return parse_json(data, fpath.string()); | |||
} catch (const semester::walk_error& e) { | |||
throw_user_error<errc::invalid_pkg_manifest>(e.what()); | |||
} | |||
package_manifest ret; | |||
using namespace semester::decompose_ops; | |||
auto res = semester::decompose( // | |||
data, | |||
try_seq{ | |||
require_type<json5::data::mapping_type>{ | |||
"The root of a package manifest must be an object (mapping)"}, | |||
mapping{ | |||
if_key{"$schema", just_accept}, | |||
if_key{ | |||
"name", | |||
require_type<std::string>{"`name` must be a string"}, | |||
put_into{ret.pkg_id.name}, | |||
}, | |||
if_key{ | |||
"namespace", | |||
require_type<std::string>{"`namespace` must be a string"}, | |||
put_into{ret.namespace_}, | |||
}, | |||
if_key{ | |||
"version", | |||
require_type<std::string>{"`version` must be a string"}, | |||
[&](auto&& version_str_) { | |||
auto& version = version_str_.as_string(); | |||
ret.pkg_id.version = semver::version::parse(version); | |||
return semester::dc_accept; | |||
}, | |||
}, | |||
if_key{ | |||
"depends", | |||
require_type<json5::data::mapping_type>{ | |||
"`depends` must be a mapping between package names and version ranges"}, | |||
mapping{[&](auto pkg_name, auto&& range_str_) { | |||
if (!range_str_.is_string()) { | |||
throw_user_error<errc::invalid_pkg_manifest>( | |||
"Dependency for '{}' must be a range string", pkg_name); | |||
} | |||
try { | |||
auto rng = semver::range::parse_restricted(range_str_.as_string()); | |||
dependency dep{std::string(pkg_name), {rng.low(), rng.high()}}; | |||
ret.dependencies.push_back(std::move(dep)); | |||
} catch (const semver::invalid_range&) { | |||
throw_user_error<errc::invalid_version_range_string>( | |||
"Invalid version range string '{}' in dependency declaration for " | |||
"'{}'", | |||
range_str_.as_string(), | |||
pkg_name); | |||
} | |||
return semester::dc_accept; | |||
}}, | |||
}, | |||
if_key{"test_driver", | |||
require_type<std::string>{"`test_driver` must be a string"}, | |||
[&](auto&& test_driver_str_) { | |||
auto& test_driver = test_driver_str_.as_string(); | |||
if (test_driver == "Catch-Main") { | |||
ret.test_driver = test_lib::catch_main; | |||
} else if (test_driver == "Catch") { | |||
ret.test_driver = test_lib::catch_; | |||
} else { | |||
auto dym = *did_you_mean(test_driver, {"Catch-Main", "Catch"}); | |||
throw_user_error<errc::unknown_test_driver>( | |||
"Unknown 'test_driver' '{}' (Did you mean '{}'?)", | |||
test_driver, | |||
dym); | |||
} | |||
return semester::dc_accept; | |||
}}, | |||
[&](auto key, auto&&) { | |||
return semester::dc_reject_t{ | |||
fmt::format("Unknown key `{}` in package manifest", key)}; | |||
}}}); | |||
auto rej = std::get_if<semester::dc_reject_t>(&res); | |||
if (rej) { | |||
throw_user_error<errc::invalid_pkg_manifest>(rej->message); | |||
} | |||
if (ret.pkg_id.name.empty()) { | |||
throw_user_error<errc::invalid_pkg_manifest>("The 'name' field is required."); | |||
} | |||
if (ret.namespace_.empty()) { | |||
throw_user_error<errc::invalid_pkg_manifest>("The 'namespace'` field is required."); | |||
} | |||
return ret; | |||
} | |||
std::optional<fs::path> package_manifest::find_in_directory(path_ref dirpath) { | |||
@@ -179,10 +124,6 @@ std::optional<fs::path> package_manifest::find_in_directory(path_ref dirpath) { | |||
} | |||
} | |||
auto dds_fname = dirpath / "package.dds"; | |||
if (fs::is_regular_file(dds_fname)) { | |||
return dds_fname; | |||
} | |||
return std::nullopt; | |||
} | |||
@@ -191,9 +132,5 @@ std::optional<package_manifest> package_manifest::load_from_directory(path_ref d | |||
if (!found.has_value()) { | |||
return std::nullopt; | |||
} | |||
if (found->extension() == ".dds") { | |||
return load_from_dds_file(*found); | |||
} else { | |||
return load_from_file(*found); | |||
} | |||
} | |||
return load_from_file(*found); | |||
} |
@@ -35,7 +35,6 @@ struct package_manifest { | |||
* Load a package manifest from a file on disk. | |||
*/ | |||
static package_manifest load_from_file(path_ref); | |||
static package_manifest load_from_dds_file(path_ref); | |||
/** | |||
* Find a package manifest contained within a directory. This will search |
@@ -1,5 +1,7 @@ | |||
#pragma once | |||
#include <chrono> | |||
#include <optional> | |||
#include <string> | |||
#include <string_view> | |||
#include <vector> | |||
@@ -25,11 +27,25 @@ std::string quote_command(const Container& c) { | |||
struct proc_result { | |||
int signal = 0; | |||
int retc = 0; | |||
bool timed_out = false; | |||
std::string output; | |||
bool okay() const noexcept { return retc == 0 && signal == 0; } | |||
}; | |||
proc_result run_proc(const std::vector<std::string>& args); | |||
struct proc_options { | |||
std::vector<std::string> command; | |||
/** | |||
* Timeout for the subprocess, in milliseconds. If zero, will wait forever | |||
*/ | |||
std::optional<std::chrono::milliseconds> timeout = std::nullopt; | |||
}; | |||
proc_result run_proc(const proc_options& opts); | |||
inline proc_result run_proc(std::vector<std::string> args) { | |||
return run_proc(proc_options{.command = std::move(args)}); | |||
} | |||
} // namespace dds |
@@ -6,6 +6,7 @@ | |||
#include <spdlog/spdlog.h> | |||
#include <poll.h> | |||
#include <signal.h> | |||
#include <sys/wait.h> | |||
#include <unistd.h> | |||
@@ -61,8 +62,8 @@ spawn_child(const std::vector<std::string>& command, int stdout_pipe, int close_ | |||
} // namespace | |||
proc_result dds::run_proc(const std::vector<std::string>& command) { | |||
spdlog::debug("Spawning subprocess: {}", quote_command(command)); | |||
proc_result dds::run_proc(const proc_options& opts) { | |||
spdlog::debug("Spawning subprocess: {}", quote_command(opts.command)); | |||
int stdio_pipe[2] = {}; | |||
auto rc = ::pipe(stdio_pipe); | |||
check_rc(rc == 0, "Create stdio pipe for subprocess"); | |||
@@ -70,7 +71,7 @@ proc_result dds::run_proc(const std::vector<std::string>& command) { | |||
int read_pipe = stdio_pipe[0]; | |||
int write_pipe = stdio_pipe[1]; | |||
auto child = spawn_child(command, write_pipe, read_pipe); | |||
auto child = spawn_child(opts.command, write_pipe, read_pipe); | |||
::close(write_pipe); | |||
@@ -80,13 +81,23 @@ proc_result dds::run_proc(const std::vector<std::string>& command) { | |||
proc_result res; | |||
using namespace std::chrono_literals; | |||
auto timeout = opts.timeout; | |||
while (true) { | |||
rc = ::poll(&stdio_fd, 1, -1); | |||
rc = ::poll(&stdio_fd, 1, static_cast<int>(timeout.value_or(-1ms).count())); | |||
if (rc && errno == EINTR) { | |||
errno = 0; | |||
continue; | |||
} | |||
check_rc(rc > 0, "Failed in poll()"); | |||
if (rc == 0) { | |||
// Timeout! | |||
::kill(child, SIGINT); | |||
timeout = std::nullopt; | |||
res.timed_out = true; | |||
spdlog::debug("Subprocess [{}] timed out", quote_command(opts.command)); | |||
continue; | |||
} | |||
std::string buffer; | |||
buffer.resize(1024); | |||
auto nread = ::read(stdio_fd.fd, buffer.data(), buffer.size()); |
@@ -1,6 +1,7 @@ | |||
#ifdef _WIN32 | |||
#include "./proc.hpp" | |||
#include <neo/assert.hpp> | |||
#include <spdlog/spdlog.h> | |||
#include <wil/resource.h> | |||
@@ -12,6 +13,7 @@ | |||
#include <stdexcept> | |||
using namespace dds; | |||
using namespace std::chrono_literals; | |||
namespace { | |||
@@ -21,8 +23,8 @@ namespace { | |||
} // namespace | |||
proc_result dds::run_proc(const std::vector<std::string>& cmd) { | |||
auto cmd_str = quote_command(cmd); | |||
proc_result dds::run_proc(const proc_options& opts) { | |||
auto cmd_str = quote_command(opts.command); | |||
::SECURITY_ATTRIBUTES security = {}; | |||
security.bInheritHandle = TRUE; | |||
@@ -36,6 +38,8 @@ proc_result dds::run_proc(const std::vector<std::string>& cmd) { | |||
} | |||
::SetHandleInformation(reader.get(), HANDLE_FLAG_INHERIT, 0); | |||
::COMMTIMEOUTS timeouts; | |||
::GetCommTimeouts(reader.get(), &timeouts); | |||
wil::unique_process_information proc_info; | |||
@@ -50,7 +54,7 @@ proc_result dds::run_proc(const std::vector<std::string>& cmd) { | |||
nullptr, | |||
nullptr, | |||
true, | |||
0, | |||
CREATE_NEW_PROCESS_GROUP, | |||
nullptr, | |||
nullptr, | |||
&startup_info, | |||
@@ -62,11 +66,30 @@ proc_result dds::run_proc(const std::vector<std::string>& cmd) { | |||
writer.reset(); | |||
std::string output; | |||
proc_result res; | |||
auto timeout = opts.timeout; | |||
while (true) { | |||
const int buffer_size = 256; | |||
char buffer[buffer_size]; | |||
DWORD nread = 0; | |||
okay = ::ReadFile(reader.get(), buffer, buffer_size, &nread, nullptr); | |||
// Reload the timeout on the pipe | |||
timeouts.ReadTotalTimeoutConstant = static_cast<DWORD>(timeout.value_or(0ms).count()); | |||
::SetCommTimeouts(reader.get(), &timeouts); | |||
// Read some bytes from the process | |||
okay = ::ReadFile(reader.get(), buffer, buffer_size, &nread, nullptr); | |||
if (!okay && ::GetLastError() == ERROR_TIMEOUT) { | |||
// We didn't read any bytes. Hit the timeout | |||
neo_assert_always(invariant, | |||
nread == 0, | |||
"Didn't expect to read bytes when a timeout was reached", | |||
nread, | |||
timeout->count()); | |||
res.timed_out = true; | |||
timeout = std::nullopt; | |||
::GenerateConsoleCtrlEvent(CTRL_C_EVENT, proc_info.dwProcessId); | |||
continue; | |||
} | |||
if (!okay && ::GetLastError() != ERROR_BROKEN_PIPE) { | |||
throw_system_error("Failed while reading from the stdio pipe"); | |||
} | |||
@@ -85,7 +108,6 @@ proc_result dds::run_proc(const std::vector<std::string>& cmd) { | |||
throw_system_error("Failed reading exit code of process"); | |||
} | |||
proc_result res; | |||
res.retc = rc; | |||
res.output = std::move(output); | |||
return res; |
@@ -18,7 +18,7 @@ namespace { | |||
void sdist_export_file(path_ref out_root, path_ref in_root, path_ref filepath) { | |||
auto relpath = fs::relative(filepath, in_root); | |||
spdlog::info("Export file {}", relpath.string()); | |||
spdlog::debug("Export file {}", relpath.string()); | |||
auto dest = out_root / relpath; | |||
fs::create_directories(dest.parent_path()); | |||
fs::copy(filepath, dest); | |||
@@ -96,8 +96,7 @@ sdist dds::create_sdist_in_dir(path_ref out, const sdist_params& params) { | |||
params.project_dir.string()); | |||
} | |||
auto pkg_man = man_path->extension() == ".dds" ? package_manifest::load_from_dds_file(*man_path) | |||
: package_manifest::load_from_file(*man_path); | |||
auto pkg_man = package_manifest::load_from_file(*man_path); | |||
sdist_export_file(out, params.project_dir, *man_path); | |||
spdlog::info("Generated export as {}", pkg_man.pkg_id.to_string()); | |||
return sdist::from_directory(out); |
@@ -18,6 +18,7 @@ std::optional<source_kind> dds::infer_source_kind(path_ref p) noexcept { | |||
".hh", | |||
".hpp", | |||
".hxx", | |||
".inc", | |||
".inl", | |||
".ipp", | |||
}; |
@@ -85,6 +85,9 @@ dds::fs_transformation dds::fs_transformation::from_json(const json5::data& data | |||
}; | |||
}; | |||
struct fs_transformation::edit pending_edit; | |||
fs_transformation::one_edit pending_edit_item; | |||
walk(data, | |||
require_obj{"Each transform must be a JSON object"}, | |||
mapping{ | |||
@@ -115,6 +118,66 @@ dds::fs_transformation dds::fs_transformation::from_json(const json5::data& data | |||
require_str{"'content' must be a string"}, | |||
put_into(ret.write->content)}, | |||
}}, | |||
if_key{ | |||
"edit", | |||
require_obj{"'edit' should be a JSON object"}, | |||
prep_optional(ret.edit), | |||
mapping{ | |||
required_key{"path", | |||
"'path' is required", | |||
require_str{"'path' should be a string path"}, | |||
put_into(ret.edit->path, str_to_path)}, | |||
required_key{ | |||
"edits", | |||
"An 'edits' array is required", | |||
require_array{"'edits' should be an array"}, | |||
for_each{ | |||
require_obj{"Each edit should be a JSON object"}, | |||
[&](auto&&) { | |||
ret.edit->edits.emplace_back(); | |||
return walk.pass; | |||
}, | |||
[&](auto&& dat) { | |||
return mapping{ | |||
required_key{ | |||
"kind", | |||
"Edit 'kind' is required", | |||
require_str{"'kind' should be a string"}, | |||
[&](std::string s) { | |||
auto& ed = ret.edit->edits.back(); | |||
if (s == "delete") { | |||
ed.kind = ed.delete_; | |||
} else if (s == "insert") { | |||
ed.kind = ed.insert; | |||
} else { | |||
return walk.reject("Invalid edit kind"); | |||
} | |||
return walk.accept; | |||
}, | |||
}, | |||
required_key{ | |||
"line", | |||
"Edit 'line' number is required", | |||
require_type<double>{"'line' should be an integer"}, | |||
[&](double d) { | |||
ret.edit->edits.back().line = int(d); | |||
return walk.accept; | |||
}, | |||
}, | |||
if_key{ | |||
"content", | |||
require_str{"'content' should be a string"}, | |||
[&](std::string s) { | |||
ret.edit->edits.back().content = s; | |||
return walk.accept; | |||
}, | |||
}, | |||
}(dat); | |||
}, | |||
}, | |||
}, | |||
}, | |||
}, | |||
}); | |||
return ret; | |||
@@ -224,7 +287,8 @@ void do_remove(const struct fs_transformation::remove& oper, path_ref root) { | |||
if (child.is_directory()) { | |||
continue; | |||
} | |||
if (!oper.only_matching.empty() && !matches_any(child, oper.only_matching)) { | |||
auto relpath = child.path().lexically_proximate(from); | |||
if (!oper.only_matching.empty() && !matches_any(relpath, oper.only_matching)) { | |||
continue; | |||
} | |||
fs::remove_all(child); | |||
@@ -244,12 +308,36 @@ void do_write(const struct fs_transformation::write& oper, path_ref root) { | |||
root.string()); | |||
} | |||
std::cout << "Write content: " << oper.content; | |||
auto of = dds::open(dest, std::ios::binary | std::ios::out); | |||
of << oper.content; | |||
} | |||
void do_edit(path_ref filepath, const fs_transformation::one_edit& edit) { | |||
auto file = open(filepath, std::ios::in | std::ios::binary); | |||
file.exceptions(std::ios::badbit); | |||
std::string lines; | |||
std::string line; | |||
int line_n = 1; | |||
for (; std::getline(file, line, '\n'); ++line_n) { | |||
if (line_n != edit.line) { | |||
lines += line + "\n"; | |||
continue; | |||
} | |||
switch (edit.kind) { | |||
case edit.delete_: | |||
// Just delete the line. Ignore it. | |||
continue; | |||
case edit.insert: | |||
// Insert some new content | |||
lines += edit.content + "\n"; | |||
lines += line + "\n"; | |||
continue; | |||
} | |||
} | |||
file = open(filepath, std::ios::out | std::ios::binary); | |||
file << lines; | |||
} | |||
} // namespace | |||
void dds::fs_transformation::apply_to(dds::path_ref root_) const { | |||
@@ -266,6 +354,19 @@ void dds::fs_transformation::apply_to(dds::path_ref root_) const { | |||
if (write) { | |||
do_write(*write, root); | |||
} | |||
if (edit) { | |||
auto fpath = root / edit->path; | |||
if (!parent_dir_of(root, fpath)) { | |||
throw_external_error<errc::invalid_repo_transform>( | |||
"Filesystem transformation wants to edit a file outside of the root. Attempted to " | |||
"modify [{}]. Writing is restricted to [{}].", | |||
fpath.string(), | |||
root.string()); | |||
} | |||
for (auto&& ed : edit->edits) { | |||
do_edit(fpath, ed); | |||
} | |||
} | |||
} | |||
namespace { | |||
@@ -315,7 +416,7 @@ std::string fs_transformation::as_json() const noexcept { | |||
for (auto&& gl : remove->only_matching) { | |||
if_arr.push_back(gl.string()); | |||
} | |||
rm["only-matching"] = rm; | |||
rm["only-matching"] = if_arr; | |||
} | |||
obj["remove"] = rm; | |||
} | |||
@@ -325,6 +426,20 @@ std::string fs_transformation::as_json() const noexcept { | |||
wr["content"] = write->content; | |||
obj["write"] = wr; | |||
} | |||
if (edit) { | |||
auto ed = nlohmann::json::object(); | |||
ed["path"] = edit->path.string(); | |||
auto edits = nlohmann::json::array(); | |||
for (auto&& one : edit->edits) { | |||
auto one_ed = nlohmann::json::object(); | |||
one_ed["kind"] = one.kind == one.delete_ ? "delete" : "insert"; | |||
one_ed["line"] = one.line; | |||
one_ed["content"] = one.content; | |||
edits.push_back(std::move(one_ed)); | |||
} | |||
ed["edits"] = edits; | |||
obj["edit"] = ed; | |||
} | |||
return to_string(obj); | |||
} |
@@ -34,10 +34,26 @@ struct fs_transformation { | |||
std::string content; | |||
}; | |||
std::optional<struct copy> copy; | |||
std::optional<struct move> move; | |||
std::optional<remove> remove; | |||
std::optional<struct write> write; | |||
struct one_edit { | |||
int line = 0; | |||
std::string content; | |||
enum kind_t { | |||
delete_, | |||
insert, | |||
} kind | |||
= delete_; | |||
}; | |||
struct edit { | |||
fs::path path; | |||
std::vector<one_edit> edits; | |||
}; | |||
std::optional<struct copy> copy; | |||
std::optional<struct move> move; | |||
std::optional<struct remove> remove; | |||
std::optional<struct write> write; | |||
std::optional<struct edit> edit; | |||
void apply_to(path_ref root) const; | |||
@@ -221,6 +221,10 @@ bool check_matches(path_iter elem_it, | |||
} else { | |||
// An rglob pattern "**". Check by peeling of individual path elements | |||
const auto next_pat = std::next(pat_it); | |||
if (next_pat == pat_stop) { | |||
// The "**" is at the end of the glob. This matches everything. | |||
return true; | |||
} | |||
for (; elem_it != elem_stop; ++elem_it) { | |||
if (check_matches(elem_it, elem_stop, next_pat, pat_stop)) { | |||
return true; |
@@ -66,4 +66,7 @@ TEST_CASE("Check globs") { | |||
CHECK(glob.match("foo/thing/bar/thing/baz.txt")); | |||
CHECK(glob.match("foo/bar/thing/baz.txt")); | |||
CHECK(glob.match("foo/bar/baz/baz.txt")); | |||
glob = dds::glob::compile("doc/**"); | |||
CHECK(glob.match("doc/something.txt")); | |||
} |
@@ -0,0 +1,13 @@ | |||
#include "./parallel.hpp" | |||
#include <spdlog/spdlog.h> | |||
using namespace dds; | |||
void dds::log_exception(std::exception_ptr eptr) noexcept { | |||
try { | |||
std::rethrow_exception(eptr); | |||
} catch (const std::exception& e) { | |||
spdlog::error(e.what()); | |||
} | |||
} |
@@ -0,0 +1,62 @@ | |||
#pragma once | |||
#include <algorithm> | |||
#include <mutex> | |||
#include <stdexcept> | |||
#include <thread> | |||
#include <vector> | |||
namespace dds { | |||
void log_exception(std::exception_ptr) noexcept; | |||
template <typename Range, typename Func> | |||
bool parallel_run(Range&& rng, int n_jobs, Func&& fn) { | |||
// We don't bother with a nice thread pool, as the overhead of most build | |||
// tasks dwarf the cost of interlocking. | |||
std::mutex mut; | |||
auto iter = rng.begin(); | |||
const auto stop = rng.end(); | |||
std::vector<std::exception_ptr> exceptions; | |||
auto run_one = [&]() mutable { | |||
while (true) { | |||
std::unique_lock lk{mut}; | |||
if (!exceptions.empty()) { | |||
break; | |||
} | |||
if (iter == stop) { | |||
break; | |||
} | |||
auto&& item = *iter; | |||
++iter; | |||
lk.unlock(); | |||
try { | |||
fn(item); | |||
} catch (...) { | |||
lk.lock(); | |||
exceptions.push_back(std::current_exception()); | |||
break; | |||
} | |||
} | |||
}; | |||
std::unique_lock lk{mut}; | |||
std::vector<std::thread> threads; | |||
if (n_jobs < 1) { | |||
n_jobs = std::thread::hardware_concurrency() + 2; | |||
} | |||
std::generate_n(std::back_inserter(threads), n_jobs, [&] { return std::thread(run_one); }); | |||
lk.unlock(); | |||
for (auto& t : threads) { | |||
t.join(); | |||
} | |||
for (auto eptr : exceptions) { | |||
log_exception(eptr); | |||
} | |||
return exceptions.empty(); | |||
} | |||
} // namespace dds |
@@ -12,17 +12,20 @@ def test_simple_lib(dds: DDS, scope: ExitStack): | |||
scope.enter_context( | |||
dds.set_contents( | |||
'library.dds', | |||
b'Name: TestLibrary', | |||
'library.json5', | |||
b'''{ | |||
name: 'TestLibrary', | |||
}''', | |||
)) | |||
scope.enter_context( | |||
dds.set_contents( | |||
'package.dds', | |||
b''' | |||
Name: TestProject | |||
Version: 0.0.0 | |||
''', | |||
'package.json5', | |||
b'''{ | |||
name: 'TestProject', | |||
version: '0.0.0', | |||
namespace: 'test', | |||
}''', | |||
)) | |||
dds.build(tests=True, apps=False, warnings=False) |
@@ -15,13 +15,12 @@ def test_get(dds: DDS): | |||
'version': 1, | |||
'packages': { | |||
'neo-sqlite3': { | |||
'0.2.2': { | |||
'depends': {}, | |||
'0.3.0': { | |||
'git': { | |||
'url': | |||
'https://github.com/vector-of-bool/neo-sqlite3.git', | |||
'ref': | |||
'0.2.2', | |||
'0.3.0', | |||
}, | |||
}, | |||
}, | |||
@@ -33,6 +32,6 @@ def test_get(dds: DDS): | |||
dds.catalog_import(json_path) | |||
dds.catalog_get('neo-sqlite3@0.2.2') | |||
assert (dds.source_root / 'neo-sqlite3@0.2.2').is_dir() | |||
assert (dds.source_root / 'neo-sqlite3@0.2.2/package.dds').is_file() | |||
dds.catalog_get('neo-sqlite3@0.3.0') | |||
assert (dds.source_root / 'neo-sqlite3@0.3.0').is_dir() | |||
assert (dds.source_root / 'neo-sqlite3@0.3.0/package.jsonc').is_file() |
@@ -18,7 +18,7 @@ def test_import_json(dds: DDS): | |||
'url': 'http://example.com', | |||
'ref': 'master', | |||
}, | |||
'depends': {}, | |||
'depends': [], | |||
}, | |||
'1.2.5': { | |||
'git': { |
@@ -29,10 +29,37 @@ def dds(request, tmp_path: Path, worker_id: str, scope: ExitStack): | |||
project_dir = test_root / params.subdir | |||
# Create the instance. Auto-clean when we're done | |||
yield scope.enter_context(scoped_dds(test_root, project_dir, request.function.__name__)) | |||
yield scope.enter_context( | |||
scoped_dds(test_root, project_dir, request.function.__name__)) | |||
@pytest.fixture | |||
def scope(): | |||
with ExitStack() as scope: | |||
yield scope | |||
yield scope | |||
def pytest_addoption(parser): | |||
parser.addoption( | |||
'--test-deps', | |||
action='store_true', | |||
default=False, | |||
help='Run the exhaustive and intensive dds-deps tests') | |||
def pytest_configure(config): | |||
config.addinivalue_line( | |||
'markers', 'deps_test: Deps tests are slow. Enable with --test-deps') | |||
def pytest_collection_modifyitems(config, items): | |||
if config.getoption('--test-deps'): | |||
return | |||
for item in items: | |||
if 'deps_test' not in item.keywords: | |||
continue | |||
item.add_marker( | |||
pytest.mark.skip( | |||
reason= | |||
'Exhaustive deps tests are slow and perform many Git clones. Use --test-deps to run them.' | |||
)) |
@@ -7,14 +7,18 @@ | |||
"url": "https://github.com/vector-of-bool/neo-sqlite3.git", | |||
"ref": "0.1.0" | |||
}, | |||
"depends": {} | |||
}, | |||
"0.2.2": { | |||
"git": { | |||
"url": "https://github.com/vector-of-bool/neo-sqlite3.git", | |||
"ref": "0.2.2" | |||
}, | |||
"depends": {} | |||
}, | |||
"0.3.0": { | |||
"git": { | |||
"url": "https://github.com/vector-of-bool/neo-sqlite3.git", | |||
"ref": "0.3.0" | |||
}, | |||
} | |||
} | |||
} |
@@ -1,5 +1,5 @@ | |||
{ | |||
depends: { | |||
'neo-sqlite3': '+0.2.2', | |||
'neo-sqlite3': '+0.3.0', | |||
}, | |||
} |
@@ -5,7 +5,7 @@ def test_build_deps_from_file(dds: DDS): | |||
assert not dds.deps_build_dir.is_dir() | |||
dds.catalog_import(dds.source_root / 'catalog.json') | |||
dds.build_deps(['-d', 'deps.json5']) | |||
assert (dds.deps_build_dir / 'neo-sqlite3@0.2.2').is_dir() | |||
assert (dds.deps_build_dir / 'neo-sqlite3@0.3.0').is_dir() | |||
assert (dds.scratch_dir / 'INDEX.lmi').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() | |||
@@ -14,8 +14,8 @@ def test_build_deps_from_file(dds: DDS): | |||
def test_build_deps_from_cmd(dds: DDS): | |||
assert not dds.deps_build_dir.is_dir() | |||
dds.catalog_import(dds.source_root / 'catalog.json') | |||
dds.build_deps(['neo-sqlite3 =0.2.2']) | |||
assert (dds.deps_build_dir / 'neo-sqlite3@0.2.2').is_dir() | |||
dds.build_deps(['neo-sqlite3=0.3.0']) | |||
assert (dds.deps_build_dir / 'neo-sqlite3@0.3.0').is_dir() | |||
assert (dds.scratch_dir / 'INDEX.lmi').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() | |||
@@ -24,8 +24,8 @@ def test_build_deps_from_cmd(dds: DDS): | |||
def test_multiple_deps(dds: DDS): | |||
assert not dds.deps_build_dir.is_dir() | |||
dds.catalog_import(dds.source_root / 'catalog.json') | |||
dds.build_deps(['neo-sqlite3 ^0.2.2', 'neo-sqlite3 ~0.2.0']) | |||
assert (dds.deps_build_dir / 'neo-sqlite3@0.2.2').is_dir() | |||
dds.build_deps(['neo-sqlite3^0.2.0', 'neo-sqlite3~0.3.0']) | |||
assert (dds.deps_build_dir / 'neo-sqlite3@0.3.0').is_dir() | |||
assert (dds.scratch_dir / 'INDEX.lmi').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | |||
assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() |
@@ -0,0 +1,563 @@ | |||
import json | |||
from pathlib import Path | |||
from typing import NamedTuple, Sequence, List | |||
import pytest | |||
from tests import DDS, fileutil | |||
class DepsCase(NamedTuple): | |||
dep: str | |||
usage: str | |||
source: str | |||
def setup_root(self, dds: DDS): | |||
dds.scope.enter_context( | |||
fileutil.set_contents( | |||
dds.source_root / 'package.json', | |||
json.dumps({ | |||
'name': 'test-project', | |||
'namespace': 'test', | |||
'version': '0.0.0', | |||
'depends': [self.dep], | |||
}).encode())) | |||
dds.scope.enter_context( | |||
fileutil.set_contents( | |||
dds.source_root / 'library.json', | |||
json.dumps({ | |||
'name': 'test', | |||
'uses': [self.usage], | |||
}).encode())) | |||
dds.scope.enter_context( | |||
fileutil.set_contents(dds.source_root / 'src/test.test.cpp', | |||
self.source.encode())) | |||
CASES: List[DepsCase] = [] | |||
def get_default_pkg_versions(pkg: str) -> Sequence[str]: | |||
catalog_json = Path( | |||
__file__).resolve().parent.parent.parent / 'catalog.json' | |||
catalog_dict = json.loads(catalog_json.read_text()) | |||
return list(catalog_dict['packages'][pkg].keys()) | |||
def add_cases(pkg: str, uses: str, versions: Sequence[str], source: str): | |||
if versions == ['auto']: | |||
versions = get_default_pkg_versions(pkg) | |||
for ver in versions: | |||
CASES.append(DepsCase(f'{pkg}@{ver}', uses, source)) | |||
# magic_enum tests | |||
""" | |||
## ## ### ###### #### ###### ######## ## ## ## ## ## ## | |||
### ### ## ## ## ## ## ## ## ## ### ## ## ## ### ### | |||
#### #### ## ## ## ## ## ## #### ## ## ## #### #### | |||
## ### ## ## ## ## #### ## ## ###### ## ## ## ## ## ## ### ## | |||
## ## ######### ## ## ## ## ## ## #### ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## | |||
## ## ## ## ###### #### ###### ####### ######## ## ## ####### ## ## | |||
""" | |||
add_cases( | |||
'magic_enum', 'neargye/magic_enum', ['auto'], r''' | |||
#include <magic_enum.hpp> | |||
#include <string_view> | |||
enum my_enum { | |||
foo, | |||
bar, | |||
}; | |||
int main() { | |||
if (magic_enum::enum_name(my_enum::foo) != "foo") { | |||
return 1; | |||
} | |||
} | |||
''') | |||
# Range-v3 tests | |||
""" | |||
######## ### ## ## ###### ######## ## ## ####### | |||
## ## ## ## ### ## ## ## ## ## ## ## ## | |||
## ## ## ## #### ## ## ## ## ## ## | |||
######## ## ## ## ## ## ## #### ###### ####### ## ## ####### | |||
## ## ######### ## #### ## ## ## ## ## ## | |||
## ## ## ## ## ### ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ###### ######## ### ####### | |||
""" | |||
add_cases( | |||
'range-v3', 'range-v3/range-v3', ['auto'], r''' | |||
#include <range/v3/algorithm/remove_if.hpp> | |||
#include <vector> | |||
#include <algorithm> | |||
int main() { | |||
std::vector<int> nums = {1, 2, 3, 5, 1, 4, 2, 7, 8, 0, 9}; | |||
auto end = ranges::remove_if(nums, [](auto i) { return i % 2; }); | |||
return std::distance(nums.begin(), end) != 5; | |||
} | |||
''') | |||
# nlohmann-json | |||
""" | |||
## ## ## ####### ## ## ## ## ### ## ## ## ## ## ###### ####### ## ## | |||
### ## ## ## ## ## ## ### ### ## ## ### ## ### ## ## ## ## ## ## ### ## | |||
#### ## ## ## ## ## ## #### #### ## ## #### ## #### ## ## ## ## ## #### ## | |||
## ## ## ## ## ## ######### ## ### ## ## ## ## ## ## ## ## ## ####### ## ###### ## ## ## ## ## | |||
## #### ## ## ## ## ## ## ## ######### ## #### ## #### ## ## ## ## ## ## #### | |||
## ### ## ## ## ## ## ## ## ## ## ## ### ## ### ## ## ## ## ## ## ## ### | |||
## ## ######## ####### ## ## ## ## ## ## ## ## ## ## ###### ###### ####### ## ## | |||
""" | |||
add_cases('nlohmann-json', 'nlohmann/json', ['auto'], r''' | |||
#include <nlohmann/json.hpp> | |||
int main() {} | |||
''') | |||
# ctre | |||
""" | |||
###### ######## ######## ######## | |||
## ## ## ## ## ## | |||
## ## ## ## ## | |||
## ## ######## ###### | |||
## ## ## ## ## | |||
## ## ## ## ## ## | |||
###### ## ## ## ######## | |||
""" | |||
add_cases( | |||
'ctre', 'hanickadot/ctre', ['auto'], r''' | |||
#include <ctre.hpp> | |||
constexpr ctll::fixed_string MY_REGEX{"\\w+-[0-9]+"}; | |||
int main() { | |||
auto [did_match] = ctre::match<MY_REGEX>("foo-44"); | |||
if (!did_match) { | |||
return 1; | |||
} | |||
auto [did_match_2] = ctre::match<MY_REGEX>("bar-1ff"); | |||
if (did_match_2) { | |||
return 2; | |||
} | |||
} | |||
''') | |||
# fmt | |||
""" | |||
######## ## ## ######## | |||
## ### ### ## | |||
## #### #### ## | |||
###### ## ### ## ## | |||
## ## ## ## | |||
## ## ## ## | |||
## ## ## ## | |||
""" | |||
add_cases( | |||
'fmt', 'fmt/fmt', ['auto'], r''' | |||
#include <fmt/core.h> | |||
int main() { | |||
fmt::print("Hello!"); | |||
} | |||
''') | |||
# Catch2 | |||
""" | |||
###### ### ######## ###### ## ## ####### | |||
## ## ## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## | |||
## ## ## ## ## ######### ####### | |||
## ######### ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ## | |||
###### ## ## ## ###### ## ## ######### | |||
""" | |||
add_cases( | |||
'catch2', 'catch2/catch2', ['auto'], r''' | |||
#include <catch2/catch_with_main.hpp> | |||
TEST_CASE("I am a test case") { | |||
CHECK((2 + 2) == 4); | |||
CHECK_FALSE((2 + 2) == 5); | |||
} | |||
''') | |||
# Asio | |||
""" | |||
### ###### #### ####### | |||
## ## ## ## ## ## ## | |||
## ## ## ## ## ## | |||
## ## ###### ## ## ## | |||
######### ## ## ## ## | |||
## ## ## ## ## ## ## | |||
## ## ###### #### ####### | |||
""" | |||
add_cases( | |||
'asio', 'asio/asio', ['auto'], r''' | |||
#include <asio.hpp> | |||
int main() { | |||
asio::io_context ioc; | |||
int retcode = 12; | |||
ioc.post([&] { | |||
retcode = 0; | |||
}); | |||
ioc.run(); | |||
return retcode; | |||
} | |||
''') | |||
# Abseil | |||
""" | |||
### ######## ###### ######## #### ## | |||
## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## | |||
## ## ######## ###### ###### ## ## | |||
######### ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## | |||
## ## ######## ###### ######## #### ######## | |||
""" | |||
add_cases( | |||
'abseil', 'abseil/abseil', ['auto'], r''' | |||
#include <absl/strings/str_cat.h> | |||
int main() { | |||
std::string_view foo = "foo"; | |||
std::string_view bar = "bar"; | |||
auto cat = absl::StrCat(foo, bar); | |||
return cat != "foobar"; | |||
} | |||
''') | |||
# Zlib | |||
""" | |||
######## ## #### ######## | |||
## ## ## ## ## | |||
## ## ## ## ## | |||
## ## ## ######## | |||
## ## ## ## ## | |||
## ## ## ## ## | |||
######## ######## #### ######## | |||
""" | |||
add_cases( | |||
'zlib', 'zlib/zlib', ['auto'], r''' | |||
#include <zlib.h> | |||
#include <cassert> | |||
int main() { | |||
::z_stream strm = {}; | |||
deflateInit(&strm, 6); | |||
const char buffer[] = "foo bar baz"; | |||
strm.next_in = (Bytef*)buffer; | |||
strm.avail_in = sizeof buffer; | |||
char dest[256] = {}; | |||
strm.next_out = (Bytef*)dest; | |||
strm.avail_out = sizeof dest; | |||
auto ret = deflate(&strm, Z_FINISH); | |||
deflateEnd(&strm); | |||
assert(ret == Z_STREAM_END); | |||
assert(strm.avail_in == 0); | |||
assert(strm.avail_out != sizeof dest); | |||
} | |||
''') | |||
# sol2 | |||
""" | |||
###### ####### ## ####### | |||
## ## ## ## ## ## ## | |||
## ## ## ## ## | |||
###### ## ## ## ####### | |||
## ## ## ## ## | |||
## ## ## ## ## ## | |||
###### ####### ######## ######### | |||
""" | |||
add_cases( | |||
'sol2', 'sol2/sol2', ['3.2.1', '3.2.0', '3.0.3', '3.0.2'], r''' | |||
#include <sol/sol.hpp> | |||
int main() { | |||
sol::state lua; | |||
int x = 0; | |||
lua.set_function("beepboop", [&]{ ++x; }); | |||
lua.script("beepboop()"); | |||
return x != 1; | |||
} | |||
''') | |||
# pegtl | |||
""" | |||
######## ######## ###### ######## ## | |||
## ## ## ## ## ## ## | |||
## ## ## ## ## ## | |||
######## ###### ## #### ## ## | |||
## ## ## ## ## ## | |||
## ## ## ## ## ## | |||
## ######## ###### ## ######## | |||
""" | |||
add_cases( | |||
'pegtl', 'tao/pegtl', ['auto'], r''' | |||
#include <tao/pegtl.hpp> | |||
using namespace tao::pegtl; | |||
struct sign : one<'+', '-'> {}; | |||
struct integer : seq<opt<sign>, plus<digit>> {}; | |||
int main() { | |||
tao::pegtl::string_input str{"+44", "[test string]"}; | |||
tao::pegtl::parse<integer>(str); | |||
} | |||
''') | |||
# Boost.PFR | |||
""" | |||
######## ####### ####### ###### ######## ######## ######## ######## | |||
## ## ## ## ## ## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ## ## ## ## | |||
######## ## ## ## ## ###### ## ######## ###### ######## | |||
## ## ## ## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ### ## ## ## ## | |||
######## ####### ####### ###### ## ### ## ## ## ## | |||
""" | |||
add_cases( | |||
'boost.pfr', 'boost/pfr', ['auto'], r''' | |||
#include <iostream> | |||
#include <string> | |||
#include <boost/pfr/precise.hpp> | |||
struct some_person { | |||
std::string name; | |||
unsigned birth_year; | |||
}; | |||
int main() { | |||
some_person val{"Edgar Allan Poe", 1809}; | |||
std::cout << boost::pfr::get<0>(val) // No macro! | |||
<< " was born in " << boost::pfr::get<1>(val); // Works with any aggregate initializables! | |||
return boost::pfr::get<0>(val) != "Edgar Allan Poe"; | |||
} | |||
''') | |||
# Boost.LEAF | |||
""" | |||
## ######## ### ######## | |||
## ## ## ## ## | |||
## ## ## ## ## | |||
## ###### ## ## ###### | |||
## ## ######### ## | |||
## ## ## ## ## | |||
######## ######## ## ## ## | |||
""" | |||
add_cases( | |||
'boost.leaf', 'boost/leaf', ['auto'], r''' | |||
#include <boost/leaf/all.hpp> | |||
namespace leaf = boost::leaf; | |||
int main() { | |||
return leaf::try_handle_all( | |||
[&]() -> leaf::result<int> { | |||
return 0; | |||
}, | |||
[](leaf::error_info const&) { | |||
return 32; | |||
} | |||
); | |||
} | |||
''') | |||
# Boost.mp11 | |||
""" | |||
######## ####### ####### ###### ######## ## ## ######## ## ## | |||
## ## ## ## ## ## ## ## ## ### ### ## ## #### #### | |||
## ## ## ## ## ## ## ## #### #### ## ## ## ## | |||
######## ## ## ## ## ###### ## ## ### ## ######## ## ## | |||
## ## ## ## ## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ### ## ## ## ## ## | |||
######## ####### ####### ###### ## ### ## ## ## ###### ###### | |||
""" | |||
add_cases( | |||
'boost.mp11', 'boost/mp11', ['auto'], r''' | |||
#include <boost/mp11.hpp> | |||
int main() { | |||
return boost::mp11::mp_false() == boost::mp11::mp_true(); | |||
} | |||
''') | |||
# libsodium | |||
""" | |||
## #### ######## ###### ####### ######## #### ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ## ## ## ## ### ### | |||
## ## ## ## ## ## ## ## ## ## ## ## #### #### | |||
## ## ######## ###### ## ## ## ## ## ## ## ## ### ## | |||
## ## ## ## ## ## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## | |||
######## #### ######## ###### ####### ######## #### ####### ## ## | |||
""" | |||
add_cases( | |||
'libsodium', 'sodium/sodium', ['auto'], r''' | |||
#include <sodium.h> | |||
#include <algorithm> | |||
int main() { | |||
char arr[256] = {}; | |||
::randombytes_buf(arr, sizeof arr); | |||
for (auto b : arr) { | |||
if (b != '\x00') { | |||
return 0; | |||
} | |||
} | |||
return 1; | |||
} | |||
''') | |||
# toml++ | |||
""" | |||
######## ####### ## ## ## | |||
## ## ## ### ### ## ## ## | |||
## ## ## #### #### ## ## ## | |||
## ## ## ## ### ## ## ###### ###### | |||
## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## | |||
## ####### ## ## ######## | |||
""" | |||
add_cases( | |||
'tomlpp', 'tomlpp/tomlpp', ['auto'], r''' | |||
#include <toml++/toml.h> | |||
#include <string_view> | |||
int main() { | |||
std::string_view sv = R"( | |||
[library] | |||
something = "cats" | |||
person = "Joe" | |||
)"; | |||
toml::table tbl = toml::parse(sv); | |||
return tbl["library"]["person"] != "Joe"; | |||
} | |||
''') | |||
# Inja | |||
""" | |||
#### ## ## ## ### | |||
## ### ## ## ## ## | |||
## #### ## ## ## ## | |||
## ## ## ## ## ## ## | |||
## ## #### ## ## ######### | |||
## ## ### ## ## ## ## | |||
#### ## ## ###### ## ## | |||
""" | |||
add_cases( | |||
'inja', 'inja/inja', ['2.0.0', '2.0.1', '2.1.0', '2.2.0'], r''' | |||
#include <inja/inja.hpp> | |||
#include <nlohmann/json.hpp> | |||
int main() { | |||
nlohmann::json data; | |||
data["foo"] = "bar"; | |||
auto result = inja::render("foo {{foo}}", data); | |||
return result != "foo bar"; | |||
} | |||
''') | |||
# Cereal | |||
""" | |||
###### ######## ######## ######## ### ## | |||
## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## | |||
## ###### ######## ###### ## ## ## | |||
## ## ## ## ## ######### ## | |||
## ## ## ## ## ## ## ## ## | |||
###### ######## ## ## ######## ## ## ######## | |||
""" | |||
add_cases( | |||
'cereal', 'cereal/cereal', ['auto'], r''' | |||
#include <cereal/types/memory.hpp> | |||
#include <cereal/types/string.hpp> | |||
#include <cereal/archives/binary.hpp> | |||
#include <sstream> | |||
struct something { | |||
int a, b, c; | |||
std::string str; | |||
template <typename Ar> | |||
void serialize(Ar& ar) { | |||
ar(a, b, c, str); | |||
} | |||
}; | |||
int main() { | |||
std::stringstream strm; | |||
cereal::BinaryOutputArchive ar{strm}; | |||
something s; | |||
ar(s); | |||
return 0; | |||
} | |||
''') | |||
# pcg | |||
""" | |||
######## ###### ###### | |||
## ## ## ## ## ## | |||
## ## ## ## | |||
######## ## ## #### | |||
## ## ## ## | |||
## ## ## ## ## | |||
## ###### ###### | |||
""" | |||
add_cases( | |||
'pcg-cpp', 'pcg/pcg-cpp', ['auto'], r''' | |||
#include <pcg_random.hpp> | |||
#include <iostream> | |||
int main() { | |||
pcg64 rng{1729}; | |||
return rng() != 14925250045015479985; | |||
} | |||
''') | |||
# spdlog | |||
""" | |||
###### ######## ######## ## ####### ###### | |||
## ## ## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## | |||
###### ######## ## ## ## ## ## ## #### | |||
## ## ## ## ## ## ## ## ## | |||
## ## ## ## ## ## ## ## ## ## | |||
###### ## ######## ######## ####### ###### | |||
""" | |||
add_cases( | |||
'spdlog', 'spdlog/spdlog', ['auto'], r''' | |||
#include <spdlog/spdlog.h> | |||
int main() { | |||
spdlog::info("Howdy!"); | |||
} | |||
''') | |||
@pytest.mark.deps_test | |||
@pytest.mark.parametrize('case', CASES, ids=[c.dep for c in CASES]) | |||
def test_dep(case: DepsCase, dds: DDS) -> None: | |||
case.setup_root(dds) | |||
dds.build() |
@@ -1,13 +1,12 @@ | |||
{ | |||
"version": 1, | |||
"packages": { | |||
"neo-buffer": { | |||
"0.1.0": { | |||
"neo-fun": { | |||
"0.3.2": { | |||
"git": { | |||
"url": "https://github.com/vector-of-bool/neo-buffer.git", | |||
"ref": "0.1.0" | |||
}, | |||
"depends": {} | |||
"url": "https://github.com/vector-of-bool/neo-fun.git", | |||
"ref": "0.3.2" | |||
} | |||
} | |||
}, | |||
"range-v3": { | |||
@@ -16,8 +15,7 @@ | |||
"url": "https://github.com/ericniebler/range-v3.git", | |||
"ref": "0.9.1", | |||
"auto-lib": "Niebler/range-v3" | |||
}, | |||
"depends": {} | |||
} | |||
} | |||
} | |||
} |
@@ -2,8 +2,8 @@ | |||
name: 'deps-test', | |||
"namespace": "test", | |||
version: '0.0.0', | |||
depends: { | |||
'neo-buffer': '0.1.0', | |||
'range-v3': '0.9.1', | |||
} | |||
depends: [ | |||
'neo-fun@0.3.2', | |||
'range-v3@0.9.1', | |||
] | |||
} |
@@ -1,5 +0,0 @@ | |||
{ | |||
"compiler_id": 'gnu', | |||
"cxx_version": 'c++17', | |||
"cxx_compiler": 'g++-9', | |||
} |
@@ -1,3 +0,0 @@ | |||
{ | |||
"compiler_id": 'msvc', | |||
} |
@@ -1,43 +0,0 @@ | |||
{ | |||
"version": 1, | |||
"packages": { | |||
"catch2": { | |||
"2.12.4": { | |||
"git": { | |||
"url": "https://github.com/catchorg/Catch2.git", | |||
"ref": "v2.12.4", | |||
"auto-lib": "catch2/catch2", | |||
"transform": [ | |||
{ | |||
"move": { | |||
"from": "include", | |||
"to": "include/catch2", | |||
} | |||
}, | |||
{ | |||
"copy": { | |||
"from": "include", | |||
"to": "src" | |||
}, | |||
write: { | |||
path: 'include/catch2/catch_with_main.hpp', | |||
content: '\ | |||
#pragma once \n\ | |||
\n\ | |||
#define CATCH_CONFIG_MAIN \n\ | |||
#include "./catch.hpp" \n\ | |||
\n\ | |||
namespace Catch { \n\ | |||
\n\ | |||
CATCH_REGISTER_REPORTER("console", ConsoleReporter) \n\ | |||
\n\ | |||
} // namespace Catch \n\ | |||
' | |||
} | |||
} | |||
] | |||
} | |||
} | |||
} | |||
} | |||
} |
@@ -1,4 +0,0 @@ | |||
{ | |||
name: 'use-catch2', | |||
uses: ['catch2/catch2'] | |||
} |
@@ -1,8 +0,0 @@ | |||
{ | |||
name: 'use-catch2', | |||
version: '1.0.0', | |||
namespace: 'test', | |||
depends: { | |||
'catch2': '2.12.4' | |||
} | |||
} |
@@ -1,6 +0,0 @@ | |||
#include <catch2/catch_with_main.hpp> | |||
TEST_CASE("I am a simple test case") { | |||
CHECK((2 + 2) == 4); | |||
CHECK_FALSE((2 + 2) == 5); | |||
} |
@@ -1,11 +0,0 @@ | |||
from tests import DDS | |||
from dds_ci import proc | |||
def test_get_build_use_catch2(dds: DDS): | |||
dds.catalog_import(dds.source_root / 'catalog.json5') | |||
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc' | |||
tc = str(dds.test_dir / tc_fname) | |||
dds.build(toolchain=tc) | |||
proc.check_run((dds.build_dir / 'use-catch2').with_suffix(dds.exe_suffix)) |
@@ -2,7 +2,7 @@ | |||
name: 'use-cryptopp', | |||
version: '1.0.0', | |||
namespace: 'test', | |||
depends: { | |||
'cryptopp': '8.2.0' | |||
} | |||
depends: [ | |||
'cryptopp@8.2.0' | |||
] | |||
} |
@@ -1,8 +1,14 @@ | |||
from tests import DDS | |||
import platform | |||
import pytest | |||
from dds_ci import proc | |||
@pytest.mark.skipif( | |||
platform.system() == 'FreeBSD', | |||
reason='This one has trouble running on FreeBSD') | |||
def test_get_build_use_cryptopp(dds: DDS): | |||
dds.catalog_import(dds.source_root / 'catalog.json') | |||
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc' |
@@ -1,7 +0,0 @@ | |||
{ | |||
"compiler_id": 'gnu', | |||
"cxx_version": 'c++17', | |||
"cxx_compiler": 'g++-9', | |||
"flags": '-DSODIUM_STATIC', | |||
"link_flags": '-static-libgcc -static-libstdc++' | |||
} |
@@ -1,4 +0,0 @@ | |||
{ | |||
"compiler_id": 'msvc', | |||
"flags": '-DSODIUM_STATIC', | |||
} |
@@ -1,44 +0,0 @@ | |||
{ | |||
"version": 1, | |||
"packages": { | |||
"libsodium": { | |||
"1.0.18": { | |||
"git": { | |||
"url": "https://github.com/jedisct1/libsodium.git", | |||
"ref": "1.0.18", | |||
"auto-lib": "sodium/sodium", | |||
"transform": [ | |||
{ | |||
"move": { | |||
"from": "src/libsodium/include", | |||
"to": "include/" | |||
} | |||
}, | |||
{ | |||
"copy": { | |||
"from": "builds/msvc/version.h", | |||
"to": "include/sodium/version.h" | |||
} | |||
}, | |||
{ | |||
"move": { | |||
"from": "src/libsodium", | |||
"to": "src/" | |||
}, | |||
"remove": { | |||
"path": "src/libsodium" | |||
} | |||
}, | |||
{ | |||
"copy": { | |||
"from": "include/", | |||
"to": "src/", | |||
"strip-components": 1, | |||
} | |||
} | |||
] | |||
} | |||
} | |||
} | |||
} | |||
} |
@@ -1,4 +0,0 @@ | |||
{ | |||
name: 'use-libsodium', | |||
uses: ['sodium/sodium'] | |||
} |
@@ -1,8 +0,0 @@ | |||
{ | |||
name: 'use-libsodium', | |||
version: '1.0.0', | |||
namespace: 'test', | |||
depends: { | |||
'libsodium': '1.0.18' | |||
} | |||
} |
@@ -1,14 +0,0 @@ | |||
#include <sodium.h> | |||
#include <algorithm> | |||
int main() { | |||
char arr[256] = {}; | |||
::randombytes_buf(arr, sizeof arr); | |||
for (auto b : arr) { | |||
if (b != '\x00') { | |||
return 0; | |||
} | |||
} | |||
return 1; | |||
} |
@@ -1,12 +0,0 @@ | |||
from tests import DDS | |||
from dds_ci import proc | |||
def test_get_build_use_libsodium(dds: DDS): | |||
dds.catalog_import(dds.source_root / 'catalog.json') | |||
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc' | |||
tc = str(dds.test_dir / tc_fname) | |||
dds.build(toolchain=tc) | |||
proc.check_run( | |||
(dds.build_dir / 'use-libsodium').with_suffix(dds.exe_suffix)) |
@@ -7,7 +7,7 @@ | |||
"url": "https://github.com/vector-of-bool/json.git", | |||
"ref": "dds/3.7.1" | |||
}, | |||
"depends": {} | |||
"depends": [] | |||
} | |||
} | |||
} |
@@ -2,7 +2,7 @@ | |||
"name": "json-test", | |||
"version": "0.0.0", | |||
"namespace": "test", | |||
"depends": { | |||
"nlohmann-json": "3.7.1" | |||
} | |||
"depends": [ | |||
"nlohmann-json@3.7.1" | |||
] | |||
} |
@@ -8,7 +8,7 @@ | |||
"ref": "v1.4.2", | |||
"auto-lib": "spdlog/spdlog" | |||
}, | |||
"depends": {} | |||
"depends": [] | |||
} | |||
} | |||
} |
@@ -2,7 +2,7 @@ | |||
name: 'test', | |||
version: '0.0.0', | |||
"namespace": "test", | |||
depends: { | |||
'spdlog': '1.4.2', | |||
}, | |||
depends: [ | |||
'spdlog@1.4.2', | |||
], | |||
} |
@@ -4,6 +4,7 @@ import sys | |||
import pytest | |||
from pathlib import Path | |||
from typing import Sequence, NamedTuple | |||
import multiprocessing | |||
import subprocess | |||
import urllib.request | |||
import shutil | |||
@@ -14,6 +15,7 @@ from dds_ci import paths, proc | |||
class CIOptions(NamedTuple): | |||
toolchain: str | |||
toolchain_2: str | |||
def _do_bootstrap_build(opts: CIOptions) -> None: | |||
@@ -70,13 +72,19 @@ def main(argv: Sequence[str]) -> int: | |||
help='The toolchain to use for the CI process', | |||
required=True, | |||
) | |||
parser.add_argument( | |||
'--toolchain-2', | |||
'-T2', | |||
help='The toolchain to use for the self-build', | |||
required=True, | |||
) | |||
parser.add_argument( | |||
'--build-only', | |||
action='store_true', | |||
help='Only build the `dds` executable. Skip second-phase and tests.') | |||
args = parser.parse_args(argv) | |||
opts = CIOptions(toolchain=args.toolchain) | |||
opts = CIOptions(toolchain=args.toolchain, toolchain_2=args.toolchain_2) | |||
if args.bootstrap_with == 'build': | |||
_do_bootstrap_build(opts) | |||
@@ -91,21 +99,16 @@ def main(argv: Sequence[str]) -> int: | |||
if old_cat_path.is_file(): | |||
old_cat_path.unlink() | |||
ci_repo_dir = paths.PREBUILT_DIR / '_ci-repo' | |||
ci_repo_dir = paths.PREBUILT_DIR / 'ci-repo' | |||
if ci_repo_dir.exists(): | |||
shutil.rmtree(ci_repo_dir) | |||
proc.check_run([ | |||
self_build( | |||
paths.PREBUILT_DDS, | |||
'catalog', | |||
'import', | |||
('--catalog', old_cat_path), | |||
('--json', paths.PROJECT_ROOT / 'catalog.json'), | |||
]) | |||
self_build(paths.PREBUILT_DDS, | |||
toolchain=opts.toolchain, | |||
cat_path=old_cat_path, | |||
dds_flags=[('--repo-dir', ci_repo_dir)]) | |||
toolchain=opts.toolchain, | |||
cat_path=old_cat_path, | |||
cat_json_path=Path('catalog.old.json'), | |||
dds_flags=[('--repo-dir', ci_repo_dir)]) | |||
print('Main build PASSED!') | |||
print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}') | |||
@@ -115,25 +118,22 @@ def main(argv: Sequence[str]) -> int: | |||
) | |||
return 0 | |||
print('Bootstrapping myself:') | |||
new_cat_path = paths.BUILD_DIR / 'catalog.db' | |||
proc.check_run([ | |||
new_repo_dir = paths.BUILD_DIR / 'ci-repo' | |||
self_build( | |||
paths.CUR_BUILT_DDS, | |||
'catalog', | |||
'import', | |||
('--catalog', new_cat_path), | |||
('--json', paths.PROJECT_ROOT / 'catalog.json'), | |||
]) | |||
self_build(paths.CUR_BUILT_DDS, | |||
toolchain=opts.toolchain, | |||
cat_path=new_cat_path, | |||
dds_flags=[f'--repo-dir={ci_repo_dir}']) | |||
toolchain=opts.toolchain_2, | |||
cat_path=new_cat_path, | |||
dds_flags=[f'--repo-dir={new_repo_dir}']) | |||
print('Bootstrap test PASSED!') | |||
return pytest.main([ | |||
'-v', | |||
'--durations=10', | |||
f'--basetemp={paths.BUILD_DIR / "_tmp"}', | |||
'-n4', | |||
'-n', | |||
str(multiprocessing.cpu_count() + 2), | |||
'tests/', | |||
]) | |||
@@ -3,7 +3,6 @@ | |||
"compiler_id": "gnu", | |||
"c_compiler": "gcc9", | |||
"cxx_compiler": "g++9", | |||
// "cxx_version": "c++17", | |||
"flags": [ | |||
"-DSPDLOG_COMPILED_LIB", // Required to use a compiled spdlog | |||
"-Werror=return-type", |
@@ -0,0 +1,20 @@ | |||
{ | |||
"$schema": "../res/toolchain-schema.json", | |||
"compiler_id": "gnu", | |||
"c_compiler": "gcc9", | |||
"cxx_compiler": "g++9", | |||
"flags": [ | |||
"-Werror=return-type", | |||
], | |||
"cxx_flags": [ | |||
"-fconcepts", | |||
"-std=c++2a", | |||
], | |||
"link_flags": [ | |||
"-static-libgcc", | |||
"-static-libstdc++", | |||
], | |||
// "debug": true, | |||
"optimize": true, | |||
"compiler_launcher": "ccache" | |||
} |
@@ -0,0 +1,19 @@ | |||
{ | |||
"$schema": "../res/toolchain-schema.json", | |||
"compiler_id": "gnu", | |||
"c_compiler": "gcc-9", | |||
"cxx_compiler": "g++-9", | |||
"flags": [ | |||
"-Werror=return-type", | |||
], | |||
"cxx_flags": [ | |||
"-fconcepts", | |||
"-std=c++2a", | |||
], | |||
"link_flags": [ | |||
"-static", | |||
], | |||
// "debug": true, | |||
"optimize": true, | |||
"compiler_launcher": "ccache" | |||
} |
@@ -3,10 +3,10 @@ | |||
"compiler_id": "gnu", | |||
"c_compiler": "gcc-9", | |||
"cxx_compiler": "g++-9", | |||
// "cxx_version": "c++17", | |||
"flags": [ | |||
"-DSPDLOG_COMPILED_LIB", // Required to use a compiled spdlog | |||
"-Werror=return-type", | |||
// "-fsanitize=address", | |||
], | |||
"cxx_flags": [ | |||
"-fconcepts", | |||
@@ -15,6 +15,8 @@ | |||
"link_flags": [ | |||
"-static-libgcc", | |||
"-static-libstdc++" | |||
// "-fsanitize=address", | |||
// "-fuse-ld=lld", | |||
], | |||
// "debug": true, | |||
"optimize": true, |
@@ -0,0 +1,23 @@ | |||
{ | |||
"$schema": "../res/toolchain-schema.json", | |||
"compiler_id": "gnu", | |||
"c_compiler": "gcc-9", | |||
"cxx_compiler": "g++-9", | |||
"flags": [ | |||
"-Werror=return-type", | |||
// "-fsanitize=address", | |||
], | |||
"cxx_flags": [ | |||
"-fconcepts", | |||
"-std=c++2a", | |||
], | |||
"link_flags": [ | |||
"-static-libgcc", | |||
"-static-libstdc++" | |||
// "-fsanitize=address", | |||
// "-fuse-ld=lld", | |||
], | |||
"debug": true, | |||
// "optimize": true, | |||
"compiler_launcher": "ccache" | |||
} |
@@ -1,18 +1,110 @@ | |||
import argparse | |||
import json | |||
import itertools | |||
from typing import NamedTuple, Tuple, List, Sequence, Union, Optional, Mapping | |||
from pathlib import Path | |||
import sys | |||
import textwrap | |||
class CopyMoveTransform(NamedTuple): | |||
frm: str | |||
to: str | |||
strip_components: int = 0 | |||
include: Sequence[str] = [] | |||
exclude: Sequence[str] = [] | |||
def to_dict(self): | |||
return { | |||
'from': self.frm, | |||
'to': self.to, | |||
'include': self.include, | |||
'exclude': self.exclude, | |||
'strip-components': self.strip_components, | |||
} | |||
class OneEdit(NamedTuple): | |||
kind: str | |||
line: int | |||
content: Optional[str] = None | |||
def to_dict(self): | |||
d = { | |||
'kind': self.kind, | |||
'line': self.line, | |||
} | |||
if self.content: | |||
d['content'] = self.content | |||
return d | |||
class EditTransform(NamedTuple): | |||
path: str | |||
edits: Sequence[OneEdit] = [] | |||
def to_dict(self): | |||
return { | |||
'path': self.path, | |||
'edits': [e.to_dict() for e in self.edits], | |||
} | |||
class WriteTransform(NamedTuple): | |||
path: str | |||
content: str | |||
def to_dict(self): | |||
return { | |||
'path': self.path, | |||
'content': self.content, | |||
} | |||
class RemoveTransform(NamedTuple): | |||
path: str | |||
only_matching: Sequence[str] = () | |||
def to_dict(self): | |||
return { | |||
'path': self.path, | |||
'only-matching': self.only_matching, | |||
} | |||
class FSTransform(NamedTuple): | |||
copy: Optional[CopyMoveTransform] = None | |||
move: Optional[CopyMoveTransform] = None | |||
remove: Optional[RemoveTransform] = None | |||
write: Optional[WriteTransform] = None | |||
edit: Optional[EditTransform] = None | |||
def to_dict(self): | |||
d = {} | |||
if self.copy: | |||
d['copy'] = self.copy.to_dict() | |||
if self.move: | |||
d['move'] = self.move.to_dict() | |||
if self.remove: | |||
d['remove'] = self.remove.to_dict() | |||
if self.write: | |||
d['write'] = self.write.to_dict() | |||
if self.edit: | |||
d['edit'] = self.edit.to_dict() | |||
return d | |||
class Git(NamedTuple): | |||
url: str | |||
ref: str | |||
auto_lib: Optional[str] = None | |||
transforms: Sequence[FSTransform] = [] | |||
def to_dict(self) -> dict: | |||
d = { | |||
'url': self.url, | |||
'ref': self.ref, | |||
'transform': [f.to_dict() for f in self.transforms], | |||
} | |||
if self.auto_lib: | |||
d['auto-lib'] = self.auto_lib | |||
@@ -31,36 +123,94 @@ class Version(NamedTuple): | |||
def to_dict(self) -> dict: | |||
ret: dict = { | |||
'description': self.description, | |||
'depends': [k + v for k, v in self.depends.items()], | |||
} | |||
ret['depends'] = self.depends | |||
if isinstance(self.remote, Git): | |||
ret['git'] = self.remote.to_dict() | |||
return ret | |||
class VersionSet(NamedTuple): | |||
version: str | |||
depends: Sequence[Tuple[str, str]] | |||
class Package(NamedTuple): | |||
name: str | |||
versions: List[Version] | |||
def simple_packages(name: str, | |||
description: str, | |||
git_url: str, | |||
versions: Sequence[VersionSet], | |||
auto_lib: Optional[str] = None, | |||
*, | |||
tag_fmt: str = '{}') -> Package: | |||
return Package(name, [ | |||
Version( | |||
ver.version, | |||
description=description, | |||
remote=Git( | |||
git_url, tag_fmt.format(ver.version), auto_lib=auto_lib), | |||
depends={dep_name: dep_rng | |||
for dep_name, dep_rng in ver.depends}) for ver in versions | |||
]) | |||
def many_versions(name: str, | |||
versions: Sequence[str], | |||
*, | |||
tag_fmt: str = '{}', | |||
git_url: str, | |||
auto_lib: str = None, | |||
transforms: Sequence[FSTransform] = (), | |||
description='(No description was provided)') -> Package: | |||
return Package(name, [ | |||
Version( | |||
ver, | |||
description='\n'.join(textwrap.wrap(description)), | |||
remote=Git( | |||
url=git_url, ref=tag_fmt.format(ver), auto_lib=auto_lib)) | |||
for ver in versions | |||
url=git_url, | |||
ref=tag_fmt.format(ver), | |||
auto_lib=auto_lib, | |||
transforms=transforms)) for ver in versions | |||
]) | |||
packages = [ | |||
PACKAGES = [ | |||
many_versions( | |||
'magic_enum', | |||
( | |||
'0.5.0', | |||
'0.6.0', | |||
'0.6.1', | |||
'0.6.2', | |||
'0.6.3', | |||
'0.6.4', | |||
'0.6.5', | |||
'0.6.6', | |||
), | |||
description='Static reflection for enums', | |||
tag_fmt='v{}', | |||
git_url='https://github.com/Neargye/magic_enum.git', | |||
auto_lib='neargye/magic_enum', | |||
), | |||
many_versions( | |||
'nameof', | |||
[ | |||
'0.8.3', | |||
'0.9.0', | |||
'0.9.1', | |||
'0.9.2', | |||
'0.9.3', | |||
'0.9.4', | |||
], | |||
description='Nameof operator for modern C++', | |||
tag_fmt='v{}', | |||
git_url='https://github.com/Neargye/nameof.git', | |||
auto_lib='neargye/nameof', | |||
), | |||
many_versions( | |||
'range-v3', | |||
( | |||
@@ -77,21 +227,21 @@ packages = [ | |||
many_versions( | |||
'nlohmann-json', | |||
( | |||
'3.0.0', | |||
'3.0.1', | |||
'3.1.0', | |||
'3.1.1', | |||
'3.1.2', | |||
'3.2.0', | |||
'3.3.0', | |||
'3.4.0', | |||
'3.5.0', | |||
'3.6.0', | |||
'3.6.1', | |||
'3.7.0', | |||
'3.7.1', | |||
'3.7.2', | |||
'3.7.3', | |||
# '3.0.0', | |||
# '3.0.1', | |||
# '3.1.0', | |||
# '3.1.1', | |||
# '3.1.2', | |||
# '3.2.0', | |||
# '3.3.0', | |||
# '3.4.0', | |||
# '3.5.0', | |||
# '3.6.0', | |||
# '3.6.1', | |||
# '3.7.0', | |||
'3.7.1', # Only this version has the dds forked branch | |||
# '3.7.2', | |||
# '3.7.3', | |||
), | |||
git_url='https://github.com/vector-of-bool/json.git', | |||
tag_fmt='dds/{}', | |||
@@ -112,6 +262,7 @@ packages = [ | |||
'0.2.1', | |||
'0.2.2', | |||
'0.2.3', | |||
'0.3.0', | |||
), | |||
description='A modern and low-level C++ SQLite API', | |||
git_url='https://github.com/vector-of-bool/neo-sqlite3.git', | |||
@@ -178,124 +329,637 @@ packages = [ | |||
description='A C++ implementation of a JSON5 parser', | |||
git_url='https://github.com/vector-of-bool/json5.git', | |||
), | |||
Package('vob-semester', [ | |||
Version( | |||
'0.1.0', | |||
description='A C++ library to process recursive dynamic data', | |||
remote=Git('https://github.com/vector-of-bool/semester.git', | |||
'0.1.0'), | |||
depends={ | |||
'neo-fun': '^0.1.0', | |||
'neo-concepts': '^0.2.1', | |||
}), | |||
simple_packages( | |||
'vob-semester', | |||
description='A C++ library to process recursive dynamic data', | |||
git_url='https://github.com/vector-of-bool/semester.git', | |||
versions=[ | |||
VersionSet('0.1.0', [ | |||
('neo-fun', '^0.1.0'), | |||
('neo-concepts', '^0.2.1'), | |||
]), | |||
VersionSet('0.1.1', [ | |||
('neo-fun', '^0.1.1'), | |||
('neo-concepts', '^0.2.2'), | |||
]), | |||
VersionSet('0.2.0', [ | |||
('neo-fun', '^0.3.2'), | |||
('neo-concepts', '^0.3.2'), | |||
]), | |||
VersionSet('0.2.1', [ | |||
('neo-fun', '^0.3.2'), | |||
('neo-concepts', '^0.3.2'), | |||
]), | |||
], | |||
), | |||
many_versions( | |||
'ctre', | |||
( | |||
'2.8.1', | |||
'2.8.2', | |||
'2.8.3', | |||
'2.8.4', | |||
), | |||
git_url= | |||
'https://github.com/hanickadot/compile-time-regular-expressions.git', | |||
tag_fmt='v{}', | |||
auto_lib='hanickadot/ctre', | |||
description= | |||
'A compile-time PCRE (almost) compatible regular expression matcher', | |||
), | |||
Package( | |||
'spdlog', | |||
[ | |||
Version( | |||
ver, | |||
description='Fast C++ logging library', | |||
depends={'fmt': '+6.0.0'}, | |||
remote=Git( | |||
url='https://github.com/gabime/spdlog.git', | |||
ref=f'v{ver}', | |||
transforms=[ | |||
FSTransform( | |||
write=WriteTransform( | |||
path='package.json', | |||
content=json.dumps({ | |||
'name': 'spdlog', | |||
'namespace': 'spdlog', | |||
'version': ver, | |||
'depends': ['fmt+6.0.0'], | |||
}))), | |||
FSTransform( | |||
write=WriteTransform( | |||
path='library.json', | |||
content=json.dumps({ | |||
'name': 'spdlog', | |||
'uses': ['fmt/fmt'] | |||
}))), | |||
FSTransform( | |||
# It's all just template instantiations. | |||
remove=RemoveTransform(path='src/'), | |||
# Tell spdlog to use the external fmt library | |||
edit=EditTransform( | |||
path='include/spdlog/tweakme.h', | |||
edits=[ | |||
OneEdit( | |||
kind='insert', | |||
content='#define SPDLOG_FMT_EXTERNAL 1', | |||
line=13, | |||
), | |||
])), | |||
], | |||
), | |||
) for ver in ( | |||
'1.4.0', | |||
'1.4.1', | |||
'1.4.2', | |||
'1.5.0', | |||
'1.6.0', | |||
'1.6.1', | |||
'1.7.0', | |||
) | |||
]), | |||
many_versions( | |||
'fmt', | |||
( | |||
'6.0.0', | |||
'6.1.0', | |||
'6.1.1', | |||
'6.1.2', | |||
'6.2.0', | |||
'6.2.1', | |||
'7.0.0', | |||
'7.0.1', | |||
), | |||
git_url='https://github.com/fmtlib/fmt.git', | |||
auto_lib='fmt/fmt', | |||
description='A modern formatting library : https://fmt.dev/', | |||
), | |||
Package('catch2', [ | |||
Version( | |||
'0.1.1', | |||
description='A C++ library to process recursive dynamic data', | |||
remote=Git('https://github.com/vector-of-bool/semester.git', | |||
'0.1.1'), | |||
depends={ | |||
'neo-fun': '^0.1.1', | |||
'neo-concepts': '^0.2.2', | |||
}), | |||
'2.12.4', | |||
description='A modern C++ unit testing library', | |||
remote=Git( | |||
'https://github.com/catchorg/Catch2.git', | |||
'v2.12.4', | |||
auto_lib='catch2/catch2', | |||
transforms=[ | |||
FSTransform( | |||
move=CopyMoveTransform( | |||
frm='include', to='include/catch2')), | |||
FSTransform( | |||
copy=CopyMoveTransform(frm='include', to='src'), | |||
write=WriteTransform( | |||
path='include/catch2/catch_with_main.hpp', | |||
content=''' | |||
#pragma once | |||
#define CATCH_CONFIG_MAIN | |||
#include "./catch.hpp" | |||
namespace Catch { | |||
CATCH_REGISTER_REPORTER("console", ConsoleReporter) | |||
} | |||
''')), | |||
])) | |||
]), | |||
Package('asio', [ | |||
Version( | |||
'0.2.0', | |||
description='A C++ library to process recursive dynamic data', | |||
remote=Git('https://github.com/vector-of-bool/semester.git', | |||
'0.2.0'), | |||
depends={ | |||
'neo-fun': '^0.3.2', | |||
'neo-concepts': '^0.3.2', | |||
}), | |||
ver, | |||
description='Asio asynchronous I/O C++ library', | |||
remote=Git( | |||
'https://github.com/chriskohlhoff/asio.git', | |||
f'asio-{ver.replace(".", "-")}', | |||
auto_lib='asio/asio', | |||
transforms=[ | |||
FSTransform( | |||
move=CopyMoveTransform( | |||
frm='asio/src', | |||
to='src/', | |||
), | |||
remove=RemoveTransform( | |||
path='src/', | |||
only_matching=[ | |||
'doc/**', | |||
'examples/**', | |||
'tests/**', | |||
'tools/**', | |||
], | |||
), | |||
), | |||
FSTransform( | |||
move=CopyMoveTransform( | |||
frm='asio/include/', | |||
to='include/', | |||
), | |||
edit=EditTransform( | |||
path='include/asio/detail/config.hpp', | |||
edits=[ | |||
OneEdit( | |||
line=13, | |||
kind='insert', | |||
content='#define ASIO_STANDALONE 1'), | |||
OneEdit( | |||
line=14, | |||
kind='insert', | |||
content= | |||
'#define ASIO_SEPARATE_COMPILATION 1') | |||
]), | |||
), | |||
]), | |||
) for ver in [ | |||
'1.12.0', | |||
'1.12.1', | |||
'1.12.2', | |||
'1.13.0', | |||
'1.14.0', | |||
'1.14.1', | |||
'1.16.0', | |||
'1.16.1', | |||
] | |||
]), | |||
Package( | |||
'abseil', | |||
[ | |||
Version( | |||
ver, | |||
description='Abseil Common Libraries', | |||
remote=Git( | |||
'https://github.com/abseil/abseil-cpp.git', | |||
tag, | |||
auto_lib='abseil/abseil', | |||
transforms=[ | |||
FSTransform( | |||
move=CopyMoveTransform( | |||
frm='absl', | |||
to='src/absl/', | |||
), | |||
remove=RemoveTransform( | |||
path='src/', | |||
only_matching=[ | |||
'**/*_test.c*', | |||
'**/*_testing.c*', | |||
'**/*_benchmark.c*', | |||
'**/benchmarks.c*', | |||
'**/*_test_common.c*', | |||
'**/mocking_*.c*', | |||
# Misc files that should be removed: | |||
'**/test_util.cc', | |||
'**/mutex_nonprod.cc', | |||
'**/named_generator.cc', | |||
'**/print_hash_of.cc', | |||
'**/*_gentables.cc', | |||
]), | |||
) | |||
]), | |||
) for ver, tag in [ | |||
('2018.6.0', '20180600'), | |||
('2019.8.8', '20190808'), | |||
('2020.2.25', '20200225.2'), | |||
] | |||
]), | |||
Package( | |||
'zlib', | |||
[ | |||
Version( | |||
ver, | |||
description= | |||
'A massively spiffy yet delicately unobtrusive compression library', | |||
remote=Git( | |||
'https://github.com/madler/zlib.git', | |||
tag or f'v{ver}', | |||
auto_lib='zlib/zlib', | |||
transforms=[ | |||
FSTransform( | |||
move=CopyMoveTransform( | |||
frm='.', | |||
to='src/', | |||
include=[ | |||
'*.c', | |||
'*.h', | |||
], | |||
)), | |||
FSTransform( | |||
move=CopyMoveTransform( | |||
frm='src/', | |||
to='include/', | |||
include=['zlib.h', 'zconf.h'], | |||
)), | |||
]), | |||
) for ver, tag in [ | |||
('1.2.11', None), | |||
('1.2.10', None), | |||
('1.2.9', None), | |||
('1.2.8', None), | |||
('1.2.7', 'v1.2.7.3'), | |||
('1.2.6', 'v1.2.6.1'), | |||
('1.2.5', 'v1.2.5.3'), | |||
('1.2.4', 'v1.2.4.5'), | |||
('1.2.3', 'v1.2.3.8'), | |||
('1.2.2', 'v1.2.2.4'), | |||
('1.2.1', 'v1.2.1.2'), | |||
('1.2.0', 'v1.2.0.8'), | |||
('1.1.4', None), | |||
('1.1.3', None), | |||
('1.1.2', None), | |||
('1.1.1', None), | |||
('1.1.0', None), | |||
('1.0.9', None), | |||
('1.0.8', None), | |||
('1.0.7', None), | |||
# ('1.0.6', None), # Does not exist | |||
('1.0.5', None), | |||
('1.0.4', None), | |||
# ('1.0.3', None), # Does not exist | |||
('1.0.2', None), | |||
('1.0.1', None), | |||
] | |||
]), | |||
Package('sol2', [ | |||
Version( | |||
'0.2.1', | |||
description='A C++ library to process recursive dynamic data', | |||
remote=Git('https://github.com/vector-of-bool/semester.git', | |||
'0.2.1'), | |||
depends={ | |||
'neo-fun': '^0.3.2', | |||
'neo-concepts': '^0.3.2', | |||
}), | |||
ver, | |||
description= | |||
'A C++ <-> Lua API wrapper with advanced features and top notch performance', | |||
depends={'lua': '+0.0.0'}, | |||
remote=Git( | |||
'https://github.com/ThePhD/sol2.git', | |||
f'v{ver}', | |||
transforms=[ | |||
FSTransform( | |||
write=WriteTransform( | |||
path='package.json', | |||
content=json.dumps( | |||
{ | |||
'name': 'sol2', | |||
'namespace': 'sol2', | |||
'version': ver, | |||
'depends': [f'lua+0.0.0'], | |||
}, | |||
indent=2, | |||
)), | |||
move=(None | |||
if ver.startswith('3.') else CopyMoveTransform( | |||
frm='sol', | |||
to='src/sol', | |||
)), | |||
), | |||
FSTransform( | |||
write=WriteTransform( | |||
path='library.json', | |||
content=json.dumps( | |||
{ | |||
'name': 'sol2', | |||
'uses': ['lua/lua'], | |||
}, | |||
indent=2, | |||
))), | |||
]), | |||
) for ver in [ | |||
'3.2.1', | |||
'3.2.0', | |||
'3.0.3', | |||
'3.0.2', | |||
'2.20.6', | |||
'2.20.5', | |||
'2.20.4', | |||
'2.20.3', | |||
'2.20.2', | |||
'2.20.1', | |||
'2.20.0', | |||
] | |||
]), | |||
Package('ctre', [ | |||
Package('lua', [ | |||
Version( | |||
'2.7.0', | |||
ver, | |||
description= | |||
'A compile-time PCRE (almost) compatible regular expression matcher', | |||
'Lua is a powerful and fast programming language that is easy to learn and use and to embed into your application.', | |||
remote=Git( | |||
'https://github.com/hanickadot/compile-time-regular-expressions.git', | |||
'v2.7', | |||
auto_lib='hanickadot/ctre', | |||
)) | |||
'https://github.com/lua/lua.git', | |||
f'v{ver}', | |||
auto_lib='lua/lua', | |||
transforms=[ | |||
FSTransform( | |||
move=CopyMoveTransform( | |||
frm='.', | |||
to='src/', | |||
include=['*.c', '*.h'], | |||
)) | |||
]), | |||
) for ver in [ | |||
'5.4.0', | |||
'5.3.5', | |||
'5.3.4', | |||
'5.3.3', | |||
'5.3.2', | |||
'5.3.1', | |||
'5.3.0', | |||
'5.2.3', | |||
'5.2.2', | |||
'5.2.1', | |||
'5.2.0', | |||
'5.1.1', | |||
] | |||
]), | |||
Package('pegtl', [ | |||
Version( | |||
ver, | |||
description='Parsing Expression Grammar Template Library', | |||
remote=Git( | |||
'https://github.com/taocpp/PEGTL.git', | |||
ver, | |||
auto_lib='tao/pegtl', | |||
transforms=[FSTransform(remove=RemoveTransform(path='src/'))], | |||
)) for ver in [ | |||
'2.8.3', | |||
'2.8.2', | |||
'2.8.1', | |||
'2.8.0', | |||
'2.7.1', | |||
'2.7.0', | |||
'2.6.1', | |||
'2.6.0', | |||
] | |||
]), | |||
many_versions( | |||
'spdlog', | |||
( | |||
'0.9.0', | |||
'0.10.0', | |||
'0.11.0', | |||
'0.12.0', | |||
'0.13.0', | |||
'0.14.0', | |||
'0.16.0', | |||
'0.16.1', | |||
'0.16.2', | |||
'0.17.0', | |||
'boost.pfr', ['1.0.0', '1.0.1'], | |||
auto_lib='boost/pfr', | |||
git_url='https://github.com/apolukhin/magic_get.git'), | |||
many_versions( | |||
'boost.leaf', | |||
[ | |||
'0.1.0', | |||
'0.2.0', | |||
'0.2.1', | |||
'0.2.2', | |||
'0.2.3', | |||
'0.2.4', | |||
'0.2.5', | |||
'0.3.0', | |||
], | |||
auto_lib='boost/leaf', | |||
git_url='https://github.com/zajo/leaf.git', | |||
), | |||
many_versions( | |||
'boost.mp11', | |||
['1.70.0', '1.71.0', '1.72.0', '1.73.0'], | |||
tag_fmt='boost-{}', | |||
git_url='https://github.com/boostorg/mp11.git', | |||
auto_lib='boost/mp11', | |||
), | |||
many_versions( | |||
'libsodium', [ | |||
'1.0.10', | |||
'1.0.11', | |||
'1.0.12', | |||
'1.0.13', | |||
'1.0.14', | |||
'1.0.15', | |||
'1.0.16', | |||
'1.0.17', | |||
'1.0.18', | |||
], | |||
git_url='https://github.com/jedisct1/libsodium.git', | |||
auto_lib='sodium/sodium', | |||
description='Sodium is a new, easy-to-use software library ' | |||
'for encryption, decryption, signatures, password hashing and more.', | |||
transforms=[ | |||
FSTransform( | |||
move=CopyMoveTransform( | |||
frm='src/libsodium/include', to='include/'), | |||
edit=EditTransform( | |||
path='include/sodium/export.h', | |||
edits=[ | |||
OneEdit( | |||
line=8, | |||
kind='insert', | |||
content='#define SODIUM_STATIC 1') | |||
])), | |||
FSTransform( | |||
edit=EditTransform( | |||
path='include/sodium/private/common.h', | |||
edits=[ | |||
OneEdit( | |||
kind='insert', | |||
line=1, | |||
content=Path(__file__).parent.joinpath( | |||
'libsodium-config.h').read_text(), | |||
) | |||
])), | |||
FSTransform( | |||
copy=CopyMoveTransform( | |||
frm='builds/msvc/version.h', | |||
to='include/sodium/version.h', | |||
), | |||
move=CopyMoveTransform( | |||
frm='src/libsodium', | |||
to='src/', | |||
), | |||
remove=RemoveTransform(path='src/libsodium'), | |||
), | |||
FSTransform( | |||
copy=CopyMoveTransform( | |||
frm='include', to='src/', strip_components=1)), | |||
]), | |||
many_versions( | |||
'tomlpp', | |||
[ | |||
'1.0.0', | |||
'1.1.0', | |||
'1.2.0', | |||
'1.2.1', | |||
'1.2.3', | |||
'1.2.4', | |||
'1.2.5', | |||
'1.3.0', | |||
'1.3.1', | |||
'1.4.0', | |||
'1.4.1', | |||
'1.4.2', | |||
), | |||
git_url='https://github.com/gabime/spdlog.git', | |||
# '1.3.2', # Wrong tag name in upstream | |||
'1.3.3', | |||
], | |||
tag_fmt='v{}', | |||
auto_lib='spdlog/spdlog', | |||
description='Fast C++ logging library', | |||
), | |||
git_url='https://github.com/marzer/tomlplusplus.git', | |||
auto_lib='tomlpp/tomlpp', | |||
description= | |||
'Header-only TOML config file parser and serializer for modern C++'), | |||
Package('inja', [ | |||
*(Version( | |||
ver, | |||
description='A Template Engine for Modern C++', | |||
remote=Git( | |||
'https://github.com/pantor/inja.git', | |||
f'v{ver}', | |||
auto_lib='inja/inja')) for ver in ('1.0.0', '2.0.0', '2.0.1')), | |||
*(Version( | |||
ver, | |||
description='A Template Engine for Modern C++', | |||
depends={'nlohmann-json': '+0.0.0'}, | |||
remote=Git( | |||
'https://github.com/pantor/inja.git', | |||
f'v{ver}', | |||
transforms=[ | |||
FSTransform( | |||
write=WriteTransform( | |||
path='package.json', | |||
content=json.dumps({ | |||
'name': | |||
'inja', | |||
'namespace': | |||
'inja', | |||
'version': | |||
ver, | |||
'depends': [ | |||
'nlohmann-json+0.0.0', | |||
] | |||
}))), | |||
FSTransform( | |||
write=WriteTransform( | |||
path='library.json', | |||
content=json.dumps({ | |||
'name': 'inja', | |||
'uses': ['nlohmann/json'] | |||
}))), | |||
], | |||
)) for ver in ('2.1.0', '2.2.0')), | |||
]), | |||
many_versions( | |||
'fmt', | |||
( | |||
'0.8.0', | |||
'cereal', | |||
[ | |||
'0.9.0', | |||
'0.10.0', | |||
'0.12.0', | |||
'0.9.1', | |||
'1.0.0', | |||
'1.1.0', | |||
'1.1.1', | |||
'1.1.2', | |||
'1.2.0', | |||
'1.2.1', | |||
'1.2.2', | |||
'1.3.0', | |||
], | |||
auto_lib='cereal/cereal', | |||
git_url='https://github.com/USCiLab/cereal.git', | |||
tag_fmt='v{}', | |||
description='A C++11 library for serialization', | |||
), | |||
many_versions( | |||
'pybind11', | |||
[ | |||
'2.0.0', | |||
'2.0.1', | |||
'2.1.0', | |||
'2.1.1', | |||
'3.0.0', | |||
'3.0.1', | |||
'3.0.2', | |||
'4.0.0', | |||
'4.1.0', | |||
'5.0.0', | |||
'5.1.0', | |||
'5.2.0', | |||
'5.2.1', | |||
'5.3.0', | |||
'6.0.0', | |||
'6.1.0', | |||
'6.1.1', | |||
'6.1.2', | |||
), | |||
git_url='https://github.com/fmtlib/fmt.git', | |||
auto_lib='fmt/fmt', | |||
description='A modern formatting library : https://fmt.dev/', | |||
'2.2.0', | |||
'2.2.1', | |||
'2.2.2', | |||
'2.2.3', | |||
'2.2.4', | |||
'2.3.0', | |||
'2.4.0', | |||
'2.4.1', | |||
'2.4.2', | |||
'2.4.3', | |||
'2.5.0', | |||
], | |||
git_url='https://github.com/pybind/pybind11.git', | |||
description='Seamless operability between C++11 and Python', | |||
auto_lib='pybind/pybind11', | |||
tag_fmt='v{}', | |||
), | |||
Package('pcg-cpp', [ | |||
Version( | |||
'0.98.1', | |||
description='PCG Randum Number Generation, C++ Edition', | |||
remote=Git( | |||
url='https://github.com/imneme/pcg-cpp.git', | |||
ref='v0.98.1', | |||
auto_lib='pcg/pcg-cpp')) | |||
]), | |||
] | |||
data = { | |||
'version': 1, | |||
'packages': { | |||
pkg.name: {ver.version: ver.to_dict() | |||
for ver in pkg.versions} | |||
for pkg in packages | |||
if __name__ == "__main__": | |||
parser = argparse.ArgumentParser() | |||
args = parser.parse_args() | |||
data = { | |||
'version': 1, | |||
'packages': { | |||
pkg.name: {ver.version: ver.to_dict() | |||
for ver in pkg.versions} | |||
for pkg in PACKAGES | |||
} | |||
} | |||
} | |||
json_str = json.dumps(data, indent=2, sort_keys=True) | |||
Path('catalog.json').write_text(json_str) | |||
cpp_template = textwrap.dedent(r''' | |||
#include <dds/catalog/package_info.hpp> | |||
#include <dds/catalog/init_catalog.hpp> | |||
#include <dds/catalog/import.hpp> | |||
/** | |||
* The following array of integers is generated and contains the JSON | |||
* encoded initial catalog. MSVC can't handle string literals over | |||
* 64k large, so we have to resort to using a regular char array: | |||
*/ | |||
static constexpr const char INIT_PACKAGES_CONTENT[] = { | |||
@JSON@ | |||
}; | |||
static constexpr int INIT_PACKAGES_STR_LEN = @JSON_LEN@; | |||
const std::vector<dds::package_info>& | |||
dds::init_catalog_packages() noexcept { | |||
using std::nullopt; | |||
static auto pkgs = dds::parse_packages_json( | |||
std::string_view(INIT_PACKAGES_CONTENT, INIT_PACKAGES_STR_LEN)); | |||
return pkgs; | |||
} | |||
''') | |||
json_small = json.dumps(data, sort_keys=True) | |||
json_small_arr = ', '.join(str(ord(c)) for c in json_small) | |||
json_small_arr = '\n'.join(textwrap.wrap(json_small_arr, width=120)) | |||
json_small_arr = textwrap.indent(json_small_arr, prefix=' ' * 4) | |||
print(json.dumps(data, indent=2, sort_keys=True)) | |||
cpp_content = cpp_template.replace('@JSON@', json_small_arr).replace( | |||
'@JSON_LEN@', str(len(json_small))) | |||
Path('src/dds/catalog/init_catalog.cpp').write_text(cpp_content) |
@@ -0,0 +1,141 @@ | |||
#pragma once | |||
// clang-format off | |||
/** | |||
* Header checks | |||
*/ | |||
#if __has_include(<sys/mman.h>) | |||
#define HAVE_SYS_MMAN_H 1 | |||
#endif | |||
#if __has_include(<sys/random.h>) | |||
#define HAVE_SYS_RANDOM_H 1 | |||
#endif | |||
#if __has_include(<intrin.h>) | |||
#define HAVE_INTRIN_H 1 | |||
#endif | |||
#if __has_include(<sys/auxv.h>) | |||
#define HAVE_SYS_AUXV_H 1 | |||
#endif | |||
/** | |||
* Architectural checks for intrinsics | |||
*/ | |||
#if __has_include(<mmintrin.h>) && __MMX__ | |||
#define HAVE_MMINTRIN_H 1 | |||
#endif | |||
#if __has_include(<emmintrin.h>) && __SSE2__ | |||
#define HAVE_EMMINTRIN_H 1 | |||
#endif | |||
#if __SSE3__ | |||
#if __has_include(<pmmintrin.h>) | |||
#define HAVE_PMMINTRIN_H 1 | |||
#endif | |||
#if __has_include(<tmmintrin.h>) | |||
#define HAVE_TMMINTRIN_H 1 | |||
#endif | |||
#endif | |||
#if __has_include(<smmintrin.h>) && __SSE4_1__ | |||
#define HAVE_SMMINTRIN_H | |||
#endif | |||
#if __has_include(<immintrin.h>) | |||
#if __AVX__ | |||
#define HAVE_AVXINTRIN_H | |||
#endif | |||
#if __AVX2__ | |||
#define HAVE_AVX2INTRIN_H | |||
#endif | |||
#if __AVX512F__ | |||
#if defined(__clang__) && __clang_major__ < 4 | |||
// AVX512 may be broken | |||
#elif defined(__GNUC__) && __GNUC__ < 6 | |||
// '' | |||
#else | |||
#define HAVE_AVX512FINTRIN_H | |||
#endif | |||
#endif | |||
#endif | |||
#if __has_include(<wmmintrin.h>) && __AES__ | |||
#define HAVE_WMMINTRIN_H 1 | |||
#endif | |||
#if __RDRND__ | |||
#define HAVE_RDRAND | |||
#endif | |||
/** | |||
* Detect mman APIs | |||
*/ | |||
#if __has_include(<sys/mman.h>) | |||
#define HAVE_MMAP 1 | |||
#define HAVE_MPROTECT 1 | |||
#define HAVE_MLOCK 1 | |||
#if defined(_DEFAULT_SOURCE) || defined(_BSD_SOURCE) | |||
#define HAVE_MADVISE 1 | |||
#endif | |||
#endif | |||
#if __has_include(<sys/random.h>) | |||
#define HAVE_GETRANDOM 1 | |||
#endif | |||
/** | |||
* POSIX-Only stuff | |||
*/ | |||
#if __has_include(<unistd.h>) | |||
#if defined(_DEFAULT_SOURCE) | |||
#define HAVE_GETENTROPY 1 | |||
#endif | |||
/** | |||
* Default POSIX APIs | |||
*/ | |||
#define HAVE_POSIX_MEMALIGN 1 | |||
#define HAVE_GETPID 1 | |||
#define HAVE_NANOSLEEP 1 | |||
/** | |||
* Language/library features from C11 | |||
*/ | |||
#if __STDC_VERSION__ >= 201112L | |||
#define HAVE_MEMSET_S 1 | |||
#endif | |||
#if __linux__ | |||
#define HAVE_EXPLICIT_BZERO 1 | |||
#endif | |||
#endif | |||
/** | |||
* Miscellaneous | |||
*/ | |||
#if __has_include(<pthread.h>) | |||
#define HAVE_PTHREAD 1 | |||
#endif | |||
#if __has_include(<sys/param.h>) | |||
#include <sys/param.h> | |||
#if __BYTE_ORDER == __BIG_ENDIAN | |||
#define NATIVE_BIG_ENDIAN 1 | |||
#elif __BYTE_ORDER == __LITTLE_ENDIAN | |||
#define NATIVE_LITTLE_ENDIAN 1 | |||
#else | |||
#error "Unknown endianness for this platform." | |||
#endif | |||
#elif defined(_MSVC) | |||
// At time of writing, MSVC only targets little-endian. | |||
#define NATIVE_LITTLE_ENDIAN 1 | |||
#else | |||
#error "Unknown endianness for this platform." | |||
#endif | |||
#define CONFIGURED 1 |
@@ -0,0 +1,13 @@ | |||
{ | |||
"$schema": "../res/toolchain-schema.json", | |||
"compiler_id": "msvc", | |||
"flags": [ | |||
"/Zc:preprocessor", // Required for range-v3 | |||
"/std:c++latest", | |||
], | |||
"link_flags": [ | |||
"rpcrt4.lib", | |||
], | |||
// "debug": true, | |||
"optimize": true | |||
} |
@@ -16,6 +16,7 @@ def self_build(exe: Path, | |||
toolchain: str, | |||
lmi_path: Path = None, | |||
cat_path: Path = Path('_build/catalog.db'), | |||
cat_json_path: Path = Path('catalog.json'), | |||
dds_flags: proc.CommandLine = ()): | |||
# Copy the exe to another location, as windows refuses to let a binary be | |||
# replaced while it is executing | |||
@@ -27,7 +28,7 @@ def self_build(exe: Path, | |||
'catalog', | |||
'import', | |||
f'--catalog={cat_path}', | |||
f'--json=catalog.json', | |||
f'--json={cat_json_path}', | |||
) | |||
proc.check_run( | |||
new_exe, |