| echo "Specify a target name to execute" | echo "Specify a target name to execute" | ||||
| exit 1 | exit 1 | ||||
| clean: | |||||
| rm -f -r -- $(shell find -name __pycache__ -type d) | |||||
| rm -f -r -- _build/ _prebuilt/ | |||||
| docs: | docs: | ||||
| sphinx-build -b html \ | sphinx-build -b html \ | ||||
| docs \ | docs \ | ||||
| --reload-delay 300 \ | --reload-delay 300 \ | ||||
| --watch **/*.html | --watch **/*.html | ||||
| macos-ci: nix-ci | |||||
| linux-ci: nix-ci | |||||
| macos-ci: | |||||
| python3 -u tools/ci.py \ | |||||
| -B download \ | |||||
| -T tools/gcc-9.jsonc \ | |||||
| -T2 tools/gcc-9.next.jsonc \ | |||||
| linux-ci: | |||||
| python3 -u tools/ci.py \ | |||||
| -B download \ | |||||
| -T tools/gcc-9.jsonc \ | |||||
| -T2 tools/gcc-9-static.jsonc | |||||
| nix-ci: | nix-ci: | ||||
| python3 -u tools/ci.py \ | python3 -u tools/ci.py \ | ||||
| cd /vagrant && \ | cd /vagrant && \ | ||||
| python3.7 tools/ci.py \ | python3.7 tools/ci.py \ | ||||
| -B download \ | -B download \ | ||||
| -T tools/freebsd-gcc-9.jsonc \ | |||||
| -T tools/freebsd-gcc-9.jsonc \ | |||||
| -T2 tools/freebsd-gcc-9.next.jsonc \ | |||||
| ' | ' | ||||
| vagrant scp freebsd11:/vagrant/_build/dds _build/dds-freebsd-x64 | vagrant scp freebsd11:/vagrant/_build/dds _build/dds-freebsd-x64 | ||||
| vagrant halt | vagrant halt |
| echo Executing Build and Tests | echo Executing Build and Tests | ||||
| reg add HKLM\SYSTEM\CurrentControlSet\Control\FileSystem /v LongPathsEnabled /t REG_DWORD /d 1 /f || exit 1 | reg add HKLM\SYSTEM\CurrentControlSet\Control\FileSystem /v LongPathsEnabled /t REG_DWORD /d 1 /f || exit 1 | ||||
| python -m pip install pytest pytest-xdist || exit 1 | python -m pip install pytest pytest-xdist || exit 1 | ||||
| python -u tools/ci.py -B download -T tools\msvc.jsonc || exit 1 | |||||
| python -u tools/ci.py -B download -T tools\msvc.jsonc -T2 tools\msvc.next.jsonc || exit 1 | |||||
| displayName: Full CI | displayName: Full CI | ||||
| - publish: _build/dds.exe | - publish: _build/dds.exe | ||||
| artifact: DDS Executable - Windows VS2019 | artifact: DDS Executable - Windows VS2019 |
| You can also list your dependencies as an inline string in your CMakeLists.txt | You can also list your dependencies as an inline string in your CMakeLists.txt | ||||
| instead of a separate file:: | instead of a separate file:: | ||||
| pmm(DDS DEPENDS "neo-sqlite3 ^0.2.2") | |||||
| pmm(DDS DEPENDS neo-sqlite3^0.2.2) | |||||
| Since you'll probably want to be using ``libman.cmake`` at the same time, the | Since you'll probably want to be using ``libman.cmake`` at the same time, the | ||||
| calls for ``CMakeCM`` and ``DDS`` can simply be combined. This is how our new | calls for ``CMakeCM`` and ``DDS`` can simply be combined. This is how our new | ||||
| include(pmm.cmake) | include(pmm.cmake) | ||||
| pmm(CMakeCM ROLLING | pmm(CMakeCM ROLLING | ||||
| DDS DEPENDS "neo-sqlite3 ^0.2.2" | |||||
| DDS DEPENDS neo-sqlite3^0.2.2 | |||||
| ) | ) | ||||
| include(libman) | include(libman) |
| "pubgrub": "0.2.1", | "pubgrub": "0.2.1", | ||||
| "vob-json5": "0.1.5", | "vob-json5": "0.1.5", | ||||
| "vob-semester": "0.2.1", | "vob-semester": "0.2.1", | ||||
| "ctre": "2.7.0", | |||||
| "ctre": "2.8.1", | |||||
| }, | }, | ||||
| "test_driver": "Catch-Main" | "test_driver": "Catch-Main" | ||||
| } | } |
| catalog_path_flag cat_path{cmd}; | catalog_path_flag cat_path{cmd}; | ||||
| args::Flag import_stdin{cmd, "stdin", "Import JSON from stdin", {"stdin"}}; | args::Flag import_stdin{cmd, "stdin", "Import JSON from stdin", {"stdin"}}; | ||||
| args::Flag init{cmd, "initial", "Re-import the initial catalog contents", {"initial"}}; | |||||
| args::ValueFlagList<std::string> | args::ValueFlagList<std::string> | ||||
| json_paths{cmd, | json_paths{cmd, | ||||
| "json", | "json", | ||||
| int run() { | int run() { | ||||
| auto cat = cat_path.open(); | auto cat = cat_path.open(); | ||||
| if (init.Get()) { | |||||
| cat.import_initial(); | |||||
| } | |||||
| for (const auto& json_fpath : json_paths.Get()) { | for (const auto& json_fpath : json_paths.Get()) { | ||||
| cat.import_json_file(json_fpath); | cat.import_json_file(json_fpath); | ||||
| } | } | ||||
| [&](dds::repository repo) { | [&](dds::repository repo) { | ||||
| // Download dependencies | // Download dependencies | ||||
| auto deps = repo.solve(man.dependencies, cat); | auto deps = repo.solve(man.dependencies, cat); | ||||
| dds::get_all(deps, repo, cat); | |||||
| for (const dds::package_id& pk : deps) { | for (const dds::package_id& pk : deps) { | ||||
| auto exists = !!repo.find(pk); | |||||
| if (!exists) { | |||||
| spdlog::info("Download dependency: {}", pk.to_string()); | |||||
| auto opt_pkg = cat.get(pk); | |||||
| assert(opt_pkg); | |||||
| auto tsd = dds::get_package_sdist(*opt_pkg); | |||||
| repo.add_sdist(tsd.sdist, dds::if_exists::throw_exc); | |||||
| } | |||||
| auto sdist_ptr = repo.find(pk); | auto sdist_ptr = repo.find(pk); | ||||
| assert(sdist_ptr); | assert(sdist_ptr); | ||||
| dds::sdist_build_params deps_params; | dds::sdist_build_params deps_params; | ||||
| // Download dependencies | // Download dependencies | ||||
| spdlog::info("Loading {} dependencies", all_deps.size()); | spdlog::info("Loading {} dependencies", all_deps.size()); | ||||
| auto deps = repo.solve(all_deps, cat); | auto deps = repo.solve(all_deps, cat); | ||||
| dds::get_all(deps, repo, cat); | |||||
| for (const dds::package_id& pk : deps) { | for (const dds::package_id& pk : deps) { | ||||
| auto exists = !!repo.find(pk); | |||||
| if (!exists) { | |||||
| spdlog::info("Download dependency: {}", pk.to_string()); | |||||
| auto opt_pkg = cat.get(pk); | |||||
| assert(opt_pkg); | |||||
| auto tsd = dds::get_package_sdist(*opt_pkg); | |||||
| repo.add_sdist(tsd.sdist, dds::if_exists::throw_exc); | |||||
| } | |||||
| auto sdist_ptr = repo.find(pk); | auto sdist_ptr = repo.find(pk); | ||||
| assert(sdist_ptr); | assert(sdist_ptr); | ||||
| dds::sdist_build_params deps_params; | dds::sdist_build_params deps_params; |
| }; | }; | ||||
| void log_failure(const test_failure& fail) { | void log_failure(const test_failure& fail) { | ||||
| spdlog::error("Test '{}' failed! [exitted {}]", fail.executable_path.string(), fail.retc); | |||||
| spdlog::error("Test '{}' failed! [exited {}]", fail.executable_path.string(), fail.retc); | |||||
| if (fail.signal) { | if (fail.signal) { | ||||
| spdlog::error("Test execution received signal {}", fail.signal); | spdlog::error("Test execution received signal {}", fail.signal); | ||||
| } | } |
| #include <dds/build/file_deps.hpp> | #include <dds/build/file_deps.hpp> | ||||
| #include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
| #include <dds/proc.hpp> | #include <dds/proc.hpp> | ||||
| #include <dds/util/parallel.hpp> | |||||
| #include <dds/util/string.hpp> | #include <dds/util/string.hpp> | ||||
| #include <dds/util/time.hpp> | #include <dds/util/time.hpp> | ||||
| namespace { | namespace { | ||||
| template <typename Range, typename Fn> | |||||
| bool parallel_run(Range&& rng, int n_jobs, Fn&& fn) { | |||||
| // We don't bother with a nice thread pool, as the overhead of most build | |||||
| // tasks dwarf the cost of interlocking. | |||||
| std::mutex mut; | |||||
| auto iter = rng.begin(); | |||||
| const auto stop = rng.end(); | |||||
| std::vector<std::exception_ptr> exceptions; | |||||
| auto run_one = [&]() mutable { | |||||
| while (true) { | |||||
| std::unique_lock lk{mut}; | |||||
| if (!exceptions.empty()) { | |||||
| break; | |||||
| } | |||||
| if (iter == stop) { | |||||
| break; | |||||
| } | |||||
| auto&& item = *iter; | |||||
| ++iter; | |||||
| lk.unlock(); | |||||
| try { | |||||
| fn(item); | |||||
| } catch (...) { | |||||
| lk.lock(); | |||||
| exceptions.push_back(std::current_exception()); | |||||
| break; | |||||
| } | |||||
| } | |||||
| }; | |||||
| std::unique_lock lk{mut}; | |||||
| std::vector<std::thread> threads; | |||||
| if (n_jobs < 1) { | |||||
| n_jobs = std::thread::hardware_concurrency() + 2; | |||||
| } | |||||
| std::generate_n(std::back_inserter(threads), n_jobs, [&] { return std::thread(run_one); }); | |||||
| lk.unlock(); | |||||
| for (auto& t : threads) { | |||||
| t.join(); | |||||
| } | |||||
| for (auto eptr : exceptions) { | |||||
| try { | |||||
| std::rethrow_exception(eptr); | |||||
| } catch (const std::exception& e) { | |||||
| spdlog::error(e.what()); | |||||
| } | |||||
| } | |||||
| return exceptions.empty(); | |||||
| } | |||||
| /// The actual "real" information that we need to perform a compilation. | /// The actual "real" information that we need to perform a compilation. | ||||
| struct compile_file_full { | struct compile_file_full { | ||||
| const compile_file_plan& plan; | const compile_file_plan& plan; |
| auto exe_path = calc_executable_path(env); | auto exe_path = calc_executable_path(env); | ||||
| auto msg = fmt::format("Run test: {:30}", fs::relative(exe_path, env.output_root).string()); | auto msg = fmt::format("Run test: {:30}", fs::relative(exe_path, env.output_root).string()); | ||||
| spdlog::info(msg); | spdlog::info(msg); | ||||
| auto&& [dur, res] | |||||
| = timed<std::chrono::microseconds>([&] { return run_proc({exe_path.string()}); }); | |||||
| using namespace std::chrono_literals; | |||||
| auto&& [dur, res] = timed<std::chrono::microseconds>( | |||||
| [&] { return run_proc({.command = {exe_path.string()}, .timeout = 10s}); }); | |||||
| if (res.okay()) { | if (res.okay()) { | ||||
| spdlog::info("{} - PASSED - {:>9n}μs", msg, dur.count()); | spdlog::info("{} - PASSED - {:>9n}μs", msg, dur.count()); | ||||
| return std::nullopt; | return std::nullopt; | ||||
| } else { | } else { | ||||
| spdlog::error("{} - FAILED - {:>9n}μs [exited {}]", msg, dur.count(), res.retc); | |||||
| auto exit_msg = fmt::format(res.signal ? "signalled {}" : "exited {}", | |||||
| res.signal ? res.signal : res.retc); | |||||
| auto fail_str = res.timed_out ? "TIMEOUT" : "FAILED "; | |||||
| spdlog::error("{} - {} - {:>9n}μs [{}]", msg, fail_str, dur.count(), exit_msg); | |||||
| test_failure f; | test_failure f; | ||||
| f.executable_path = exe_path; | f.executable_path = exe_path; | ||||
| f.output = res.output; | f.output = res.output; |
| #include <dds/build/iter_compilations.hpp> | #include <dds/build/iter_compilations.hpp> | ||||
| #include <dds/build/plan/compile_exec.hpp> | #include <dds/build/plan/compile_exec.hpp> | ||||
| #include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
| #include <dds/util/parallel.hpp> | |||||
| #include <range/v3/view/concat.hpp> | #include <range/v3/view/concat.hpp> | ||||
| #include <range/v3/view/filter.hpp> | #include <range/v3/view/filter.hpp> | ||||
| namespace { | namespace { | ||||
| /// XXX: Duplicated in compile_exec.cpp !! | |||||
| template <typename Range, typename Fn> | |||||
| bool parallel_run(Range&& rng, int n_jobs, Fn&& fn) { | |||||
| // We don't bother with a nice thread pool, as the overhead of most build | |||||
| // tasks dwarf the cost of interlocking. | |||||
| std::mutex mut; | |||||
| auto iter = rng.begin(); | |||||
| const auto stop = rng.end(); | |||||
| std::vector<std::exception_ptr> exceptions; | |||||
| auto run_one = [&]() mutable { | |||||
| while (true) { | |||||
| std::unique_lock lk{mut}; | |||||
| if (!exceptions.empty()) { | |||||
| break; | |||||
| } | |||||
| if (iter == stop) { | |||||
| break; | |||||
| } | |||||
| auto&& item = *iter; | |||||
| ++iter; | |||||
| lk.unlock(); | |||||
| try { | |||||
| fn(item); | |||||
| } catch (...) { | |||||
| lk.lock(); | |||||
| exceptions.push_back(std::current_exception()); | |||||
| break; | |||||
| } | |||||
| } | |||||
| }; | |||||
| std::unique_lock lk{mut}; | |||||
| std::vector<std::thread> threads; | |||||
| if (n_jobs < 1) { | |||||
| n_jobs = std::thread::hardware_concurrency() + 2; | |||||
| } | |||||
| std::generate_n(std::back_inserter(threads), n_jobs, [&] { return std::thread(run_one); }); | |||||
| lk.unlock(); | |||||
| for (auto& t : threads) { | |||||
| t.join(); | |||||
| } | |||||
| for (auto eptr : exceptions) { | |||||
| try { | |||||
| std::rethrow_exception(eptr); | |||||
| } catch (const std::exception& e) { | |||||
| spdlog::error(e.what()); | |||||
| } | |||||
| } | |||||
| return exceptions.empty(); | |||||
| } | |||||
| template <typename T, typename Range> | template <typename T, typename Range> | ||||
| decltype(auto) pair_up(T& left, Range& right) { | decltype(auto) pair_up(T& left, Range& right) { | ||||
| auto rep = ranges::view::repeat(left); | auto rep = ranges::view::repeat(left); |
| #include "./import.hpp" | #include "./import.hpp" | ||||
| #include <dds/catalog/init_catalog.hpp> | |||||
| #include <dds/dym.hpp> | #include <dds/dym.hpp> | ||||
| #include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
| #include <dds/solve/solve.hpp> | #include <dds/solve/solve.hpp> | ||||
| )"); | )"); | ||||
| } | } | ||||
| std::string transforms_to_json(const std::vector<fs_transformation>& trs) { | |||||
| std::string acc = "["; | |||||
| for (auto it = trs.begin(); it != trs.end(); ++it) { | |||||
| acc += it->as_json(); | |||||
| if (std::next(it) != trs.end()) { | |||||
| acc += ", "; | |||||
| } | |||||
| } | |||||
| return acc + "]"; | |||||
| } | |||||
| void store_with_remote(const neo::sqlite3::statement_cache&, | |||||
| const package_info& pkg, | |||||
| std::monostate) { | |||||
| neo_assert_always( | |||||
| invariant, | |||||
| false, | |||||
| "There was an attempt to insert a package listing into the database where that package " | |||||
| "listing does not have a remote listing. If you see this message, it is a dds bug.", | |||||
| pkg.ident.to_string()); | |||||
| } | |||||
| void store_with_remote(neo::sqlite3::statement_cache& stmts, | |||||
| const package_info& pkg, | |||||
| const git_remote_listing& git) { | |||||
| auto lm_usage = git.auto_lib.value_or(lm::usage{}); | |||||
| sqlite3::exec( // | |||||
| stmts, | |||||
| R"( | |||||
| INSERT OR REPLACE INTO dds_cat_pkgs ( | |||||
| name, | |||||
| version, | |||||
| git_url, | |||||
| git_ref, | |||||
| lm_name, | |||||
| lm_namespace, | |||||
| description, | |||||
| repo_transform | |||||
| ) VALUES ( | |||||
| ?1, | |||||
| ?2, | |||||
| ?3, | |||||
| ?4, | |||||
| CASE WHEN ?5 = '' THEN NULL ELSE ?5 END, | |||||
| CASE WHEN ?6 = '' THEN NULL ELSE ?6 END, | |||||
| ?7, | |||||
| ?8 | |||||
| ) | |||||
| )"_sql, | |||||
| std::forward_as_tuple( // | |||||
| pkg.ident.name, | |||||
| pkg.ident.version.to_string(), | |||||
| git.url, | |||||
| git.ref, | |||||
| lm_usage.name, | |||||
| lm_usage.namespace_, | |||||
| pkg.description, | |||||
| transforms_to_json(git.transforms))); | |||||
| } | |||||
| void do_store_pkg(neo::sqlite3::database& db, | |||||
| neo::sqlite3::statement_cache& st_cache, | |||||
| const package_info& pkg) { | |||||
| std::visit([&](auto&& remote) { store_with_remote(st_cache, pkg, remote); }, pkg.remote); | |||||
| auto db_pkg_id = db.last_insert_rowid(); | |||||
| auto& new_dep_st = st_cache(R"( | |||||
| INSERT INTO dds_cat_pkg_deps ( | |||||
| pkg_id, | |||||
| dep_name, | |||||
| low, | |||||
| high | |||||
| ) VALUES ( | |||||
| ?, | |||||
| ?, | |||||
| ?, | |||||
| ? | |||||
| ) | |||||
| )"_sql); | |||||
| for (const auto& dep : pkg.deps) { | |||||
| new_dep_st.reset(); | |||||
| assert(dep.versions.num_intervals() == 1); | |||||
| auto iv_1 = *dep.versions.iter_intervals().begin(); | |||||
| sqlite3::exec(new_dep_st, | |||||
| std::forward_as_tuple(db_pkg_id, | |||||
| dep.name, | |||||
| iv_1.low.to_string(), | |||||
| iv_1.high.to_string())); | |||||
| } | |||||
| } | |||||
| void store_init_packages(sqlite3::database& db, sqlite3::statement_cache& st_cache) { | |||||
| for (auto& pkg : init_catalog_packages()) { | |||||
| do_store_pkg(db, st_cache, pkg); | |||||
| } | |||||
| } | |||||
| void ensure_migrated(sqlite3::database& db) { | void ensure_migrated(sqlite3::database& db) { | ||||
| sqlite3::transaction_guard tr{db}; | sqlite3::transaction_guard tr{db}; | ||||
| db.exec(R"( | db.exec(R"( | ||||
| constexpr int current_database_version = 2; | constexpr int current_database_version = 2; | ||||
| int version = version_; | int version = version_; | ||||
| // If this is the first time we're working here, import the initial | |||||
| // catalog with some useful tidbits. | |||||
| bool import_init_packages = version == 0; | |||||
| if (version > current_database_version) { | if (version > current_database_version) { | ||||
| throw_external_error<errc::catalog_too_new>(); | throw_external_error<errc::catalog_too_new>(); | ||||
| } | } | ||||
| } | } | ||||
| meta["version"] = 2; | meta["version"] = 2; | ||||
| exec(db, "UPDATE dds_cat_meta SET meta=?", std::forward_as_tuple(meta.dump())); | exec(db, "UPDATE dds_cat_meta SET meta=?", std::forward_as_tuple(meta.dump())); | ||||
| if (import_init_packages) { | |||||
| spdlog::info( | |||||
| "A new catalog database case been created, and has been populated with some initial " | |||||
| "contents."); | |||||
| neo::sqlite3::statement_cache stmts{db}; | |||||
| store_init_packages(db, stmts); | |||||
| } | |||||
| } | } | ||||
| void check_json(bool b, std::string_view what) { | void check_json(bool b, std::string_view what) { | ||||
| catalog::catalog(sqlite3::database db) | catalog::catalog(sqlite3::database db) | ||||
| : _db(std::move(db)) {} | : _db(std::move(db)) {} | ||||
| void catalog::_store_pkg(const package_info& pkg, std::monostate) { | |||||
| neo_assert_always( | |||||
| invariant, | |||||
| false, | |||||
| "There was an attempt to insert a package listing into the database where that package " | |||||
| "listing does not have a remote listing. If you see this message, it is a dds bug.", | |||||
| pkg.ident.to_string()); | |||||
| } | |||||
| namespace { | |||||
| std::string transforms_to_json(const std::vector<fs_transformation>& trs) { | |||||
| std::string acc = "["; | |||||
| for (auto it = trs.begin(); it != trs.end(); ++it) { | |||||
| acc += it->as_json(); | |||||
| if (std::next(it) != trs.end()) { | |||||
| acc += ", "; | |||||
| } | |||||
| } | |||||
| return acc + "]"; | |||||
| } | |||||
| } // namespace | |||||
| void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git) { | |||||
| auto lm_usage = git.auto_lib.value_or(lm::usage{}); | |||||
| sqlite3::exec( // | |||||
| _stmt_cache, | |||||
| R"( | |||||
| INSERT OR REPLACE INTO dds_cat_pkgs ( | |||||
| name, | |||||
| version, | |||||
| git_url, | |||||
| git_ref, | |||||
| lm_name, | |||||
| lm_namespace, | |||||
| description, | |||||
| repo_transform | |||||
| ) VALUES ( | |||||
| ?1, | |||||
| ?2, | |||||
| ?3, | |||||
| ?4, | |||||
| CASE WHEN ?5 = '' THEN NULL ELSE ?5 END, | |||||
| CASE WHEN ?6 = '' THEN NULL ELSE ?6 END, | |||||
| ?7, | |||||
| ?8 | |||||
| ) | |||||
| )"_sql, | |||||
| std::forward_as_tuple( // | |||||
| pkg.ident.name, | |||||
| pkg.ident.version.to_string(), | |||||
| git.url, | |||||
| git.ref, | |||||
| lm_usage.name, | |||||
| lm_usage.namespace_, | |||||
| pkg.description, | |||||
| transforms_to_json(git.transforms))); | |||||
| } | |||||
| void catalog::store(const package_info& pkg) { | void catalog::store(const package_info& pkg) { | ||||
| sqlite3::transaction_guard tr{_db}; | sqlite3::transaction_guard tr{_db}; | ||||
| std::visit([&](auto&& remote) { _store_pkg(pkg, remote); }, pkg.remote); | |||||
| auto db_pkg_id = _db.last_insert_rowid(); | |||||
| auto& new_dep_st = _stmt_cache(R"( | |||||
| INSERT INTO dds_cat_pkg_deps ( | |||||
| pkg_id, | |||||
| dep_name, | |||||
| low, | |||||
| high | |||||
| ) VALUES ( | |||||
| ?, | |||||
| ?, | |||||
| ?, | |||||
| ? | |||||
| ) | |||||
| )"_sql); | |||||
| for (const auto& dep : pkg.deps) { | |||||
| new_dep_st.reset(); | |||||
| assert(dep.versions.num_intervals() == 1); | |||||
| auto iv_1 = *dep.versions.iter_intervals().begin(); | |||||
| sqlite3::exec(new_dep_st, | |||||
| std::forward_as_tuple(db_pkg_id, | |||||
| dep.name, | |||||
| iv_1.low.to_string(), | |||||
| iv_1.high.to_string())); | |||||
| } | |||||
| do_store_pkg(_db, _stmt_cache, pkg); | |||||
| } | } | ||||
| std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept { | std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept { | ||||
| store(pkg); | store(pkg); | ||||
| } | } | ||||
| } | } | ||||
| void catalog::import_initial() { | |||||
| sqlite3::transaction_guard tr{_db}; | |||||
| spdlog::info("Restoring built-in initial catalog contents"); | |||||
| store_init_packages(_db, _stmt_cache); | |||||
| } |
| explicit catalog(neo::sqlite3::database db); | explicit catalog(neo::sqlite3::database db); | ||||
| catalog(const catalog&) = delete; | catalog(const catalog&) = delete; | ||||
| void _store_pkg(const package_info&, const git_remote_listing&); | |||||
| void _store_pkg(const package_info&, std::monostate); | |||||
| public: | public: | ||||
| catalog(catalog&&) = default; | catalog(catalog&&) = default; | ||||
| catalog& operator=(catalog&&) = default; | catalog& operator=(catalog&&) = default; | ||||
| std::vector<package_id> by_name(std::string_view sv) const noexcept; | std::vector<package_id> by_name(std::string_view sv) const noexcept; | ||||
| std::vector<dependency> dependencies_of(const package_id& pkg) const noexcept; | std::vector<dependency> dependencies_of(const package_id& pkg) const noexcept; | ||||
| void import_initial(); | |||||
| void import_json_str(std::string_view json_str); | void import_json_str(std::string_view json_str); | ||||
| void import_json_file(path_ref json_path) { | void import_json_file(path_ref json_path) { | ||||
| auto content = dds::slurp_file(json_path); | auto content = dds::slurp_file(json_path); |
| "packages": { | "packages": { | ||||
| "foo": { | "foo": { | ||||
| "1.2.3": { | "1.2.3": { | ||||
| "depends": { | |||||
| "bar": "~4.2.1" | |||||
| }, | |||||
| "depends": [ | |||||
| "bar~4.2.1" | |||||
| ], | |||||
| "git": { | "git": { | ||||
| "url": "http://example.com", | "url": "http://example.com", | ||||
| "ref": "master" | "ref": "master" |
| #include <dds/catalog/catalog.hpp> | #include <dds/catalog/catalog.hpp> | ||||
| #include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
| #include <dds/repo/repo.hpp> | |||||
| #include <dds/util/parallel.hpp> | |||||
| #include <neo/assert.hpp> | #include <neo/assert.hpp> | ||||
| #include <nlohmann/json.hpp> | #include <nlohmann/json.hpp> | ||||
| #include <range/v3/algorithm/any_of.hpp> | #include <range/v3/algorithm/any_of.hpp> | ||||
| #include <range/v3/distance.hpp> | #include <range/v3/distance.hpp> | ||||
| #include <range/v3/numeric/accumulate.hpp> | #include <range/v3/numeric/accumulate.hpp> | ||||
| #include <range/v3/view/filter.hpp> | |||||
| #include <range/v3/view/transform.hpp> | |||||
| #include <spdlog/spdlog.h> | #include <spdlog/spdlog.h> | ||||
| using namespace dds; | using namespace dds; | ||||
| tsd.sdist.manifest.pkg_id.to_string()); | tsd.sdist.manifest.pkg_id.to_string()); | ||||
| } | } | ||||
| return tsd; | return tsd; | ||||
| } | |||||
| } | |||||
| void dds::get_all(const std::vector<package_id>& pkgs, repository& repo, const catalog& cat) { | |||||
| std::mutex repo_mut; | |||||
| auto absent_pkg_infos = pkgs // | |||||
| | ranges::views::filter([&](auto pk) { | |||||
| std::scoped_lock lk{repo_mut}; | |||||
| return !repo.find(pk); | |||||
| }) | |||||
| | ranges::views::transform([&](auto id) { | |||||
| auto info = cat.get(id); | |||||
| neo_assert(invariant, | |||||
| info.has_value(), | |||||
| "No catalog entry for package id?", | |||||
| id.to_string()); | |||||
| return *info; | |||||
| }); | |||||
| auto okay = parallel_run(absent_pkg_infos, 8, [&](package_info inf) { | |||||
| spdlog::info("Download package: {}", inf.ident.to_string()); | |||||
| auto tsd = get_package_sdist(inf); | |||||
| std::scoped_lock lk{repo_mut}; | |||||
| repo.add_sdist(tsd.sdist, if_exists::throw_exc); | |||||
| }); | |||||
| if (!okay) { | |||||
| throw_external_error<errc::dependency_resolve_failure>("Downloading of packages failed."); | |||||
| } | |||||
| } |
| namespace dds { | namespace dds { | ||||
| class repository; | |||||
| class catalog; | |||||
| struct package_info; | struct package_info; | ||||
| struct temporary_sdist { | struct temporary_sdist { | ||||
| temporary_sdist get_package_sdist(const package_info&); | temporary_sdist get_package_sdist(const package_info&); | ||||
| } // namespace dds | |||||
| void get_all(const std::vector<package_id>& pkgs, dds::repository& repo, const catalog& cat); | |||||
| } // namespace dds |
| using namespace semester::walk_ops; | using namespace semester::walk_ops; | ||||
| std::string dep_name; | |||||
| auto dep_range = semver::range::everything(); | |||||
| auto parse_dep_range = [&](const std::string& s) { | |||||
| auto make_dep = [&](std::string const& str) { | |||||
| try { | try { | ||||
| return semver::range::parse_restricted(s); | |||||
| } catch (const semver::invalid_range& e) { | |||||
| return dependency::parse_depends_string(str); | |||||
| } catch (std::runtime_error const& e) { | |||||
| import_error(std::string(walk.path()) + e.what()); | import_error(std::string(walk.path()) + e.what()); | ||||
| } | } | ||||
| }; | }; | ||||
| auto make_dep = [&](auto&&) { | |||||
| return dependency{dep_name, {dep_range.low(), dep_range.high()}}; | |||||
| }; | |||||
| auto check_one_remote = [&](auto&&) { | auto check_one_remote = [&](auto&&) { | ||||
| if (!semester::holds_alternative<std::monostate>(ret.remote)) { | if (!semester::holds_alternative<std::monostate>(ret.remote)) { | ||||
| return walk.pass; | return walk.pass; | ||||
| }; | }; | ||||
| auto add_dep = any_key{put_into(dep_name), | |||||
| require_str{"Dependency should specify a version range string"}, | |||||
| put_into_pass{dep_range, parse_dep_range}, | |||||
| put_into{std::back_inserter(ret.deps), make_dep}}; | |||||
| walk(data, | walk(data, | ||||
| mapping{if_key{"description", | mapping{if_key{"description", | ||||
| require_str{"'description' should be a string"}, | require_str{"'description' should be a string"}, | ||||
| put_into{ret.description}}, | put_into{ret.description}}, | ||||
| if_key{"depends", | if_key{"depends", | ||||
| require_obj{"'depends' must be a JSON object"}, | |||||
| mapping{add_dep}}, | |||||
| require_array{"'depends' must be an array of dependency strings"}, | |||||
| for_each{require_str{"Each dependency should be a string"}, | |||||
| put_into{std::back_inserter(ret.deps), make_dep}}}, | |||||
| if_key{ | if_key{ | ||||
| "git", | "git", | ||||
| check_one_remote, | check_one_remote, |
| #pragma once | |||||
| #include "./package_info.hpp" | |||||
| #include <vector> | |||||
| namespace dds { | |||||
| const std::vector<package_info>& init_catalog_packages() noexcept; | |||||
| } // namespace dds |
| using namespace dds; | using namespace dds; | ||||
| dependency dependency::parse_depends_string(std::string_view str) { | dependency dependency::parse_depends_string(std::string_view str) { | ||||
| const auto str_begin = str.data(); | |||||
| auto str_iter = str_begin; | |||||
| const auto str_end = str_iter + str.size(); | |||||
| while (str_iter != str_end && !std::isspace(*str_iter)) { | |||||
| ++str_iter; | |||||
| auto sep_pos = str.find_first_of("=@^~+"); | |||||
| if (sep_pos == str.npos) { | |||||
| throw_user_error<errc::invalid_version_range_string>("Invalid dependency string '{}'", str); | |||||
| } | } | ||||
| auto name = trim_view(std::string_view(str_begin, str_iter - str_begin)); | |||||
| auto version_str = trim_view(std::string_view(str_iter, str_end - str_iter)); | |||||
| auto name = str.substr(0, sep_pos); | |||||
| if (str[sep_pos] == '@') { | |||||
| ++sep_pos; | |||||
| } | |||||
| auto range_str = str.substr(sep_pos); | |||||
| try { | try { | ||||
| auto rng = semver::range::parse_restricted(version_str); | |||||
| auto rng = semver::range::parse_restricted(range_str); | |||||
| return dependency{std::string(name), {rng.low(), rng.high()}}; | return dependency{std::string(name), {rng.low(), rng.high()}}; | ||||
| } catch (const semver::invalid_range&) { | } catch (const semver::invalid_range&) { | ||||
| throw_user_error<errc::invalid_version_range_string>( | throw_user_error<errc::invalid_version_range_string>( | ||||
| "Invalid version range string '{}' in dependency declaration '{}'", version_str, str); | |||||
| "Invalid version range string '{}' in dependency string '{}'", range_str, str); | |||||
| } | } | ||||
| } | } | ||||
| #include <dds/deps.hpp> | |||||
| #include <catch2/catch.hpp> | |||||
| TEST_CASE("Parse dependency strings") { | |||||
| struct case_ { | |||||
| std::string depstr; | |||||
| std::string name; | |||||
| std::string low; | |||||
| std::string high; | |||||
| }; | |||||
| auto cur = GENERATE(Catch::Generators::values<case_>({ | |||||
| {"foo@1.2.3", "foo", "1.2.3", "1.2.4"}, | |||||
| {"foo=1.2.3", "foo", "1.2.3", "1.2.4"}, | |||||
| {"foo^1.2.3", "foo", "1.2.3", "2.0.0"}, | |||||
| {"foo~1.2.3", "foo", "1.2.3", "1.3.0"}, | |||||
| {"foo+1.2.3", "foo", "1.2.3", semver::version::max_version().to_string()}, | |||||
| })); | |||||
| auto dep = dds::dependency::parse_depends_string(cur.depstr); | |||||
| CHECK(dep.name == cur.name); | |||||
| CHECK(dep.versions.num_intervals() == 1); | |||||
| auto ver_iv = *dep.versions.iter_intervals().begin(); | |||||
| CHECK(ver_iv.low == semver::version::parse(cur.low)); | |||||
| CHECK(ver_iv.high == semver::version::parse(cur.high)); | |||||
| } |
| #include <dds/dym.hpp> | #include <dds/dym.hpp> | ||||
| #include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
| #include <dds/util/algo.hpp> | #include <dds/util/algo.hpp> | ||||
| #include <libman/parse.hpp> | |||||
| #include <json5/parse_data.hpp> | #include <json5/parse_data.hpp> | ||||
| #include <range/v3/view/transform.hpp> | #include <range/v3/view/transform.hpp> | ||||
| using namespace dds; | using namespace dds; | ||||
| library_manifest library_manifest::load_from_dds_file(path_ref fpath) { | |||||
| spdlog::warn( | |||||
| "Using deprecated library.dds parsing (on file {}). This will be removed soon. Migrate!", | |||||
| fpath.string()); | |||||
| auto kvs = lm::parse_file(fpath); | |||||
| library_manifest ret; | |||||
| ret.name = fpath.parent_path().filename().string(); | |||||
| std::vector<std::string> uses_strings; | |||||
| std::vector<std::string> links_strings; | |||||
| lm::read(fmt::format("Reading library manifest {}", fpath.string()), | |||||
| kvs, | |||||
| lm::read_accumulate("Uses", uses_strings), | |||||
| lm::read_accumulate("Links", links_strings), | |||||
| lm::read_required("Name", ret.name), | |||||
| lm_reject_dym{{"Uses", "Links", "Name"}}); | |||||
| extend(ret.uses, ranges::views::transform(uses_strings, lm::split_usage_string)); | |||||
| extend(ret.links, ranges::views::transform(links_strings, lm::split_usage_string)); | |||||
| return ret; | |||||
| } | |||||
| library_manifest library_manifest::load_from_file(path_ref fpath) { | library_manifest library_manifest::load_from_file(path_ref fpath) { | ||||
| auto content = slurp_file(fpath); | auto content = slurp_file(fpath); | ||||
| auto data = json5::parse_data(content); | auto data = json5::parse_data(content); | ||||
| if (rej) { | if (rej) { | ||||
| throw_user_error<errc::invalid_lib_manifest>(rej->message); | throw_user_error<errc::invalid_lib_manifest>(rej->message); | ||||
| } | } | ||||
| // using namespace json_read::ops; | |||||
| // json_read::decompose( // | |||||
| // data.as_object(), | |||||
| // object(key("name", require_string(put_into{lib.name}, "`name` must be a string")), | |||||
| // key("uses", | |||||
| // array_each{require_string( | |||||
| // [&](auto&& uses) { | |||||
| // lib.uses.push_back(lm::split_usage_string(uses.as_string())); | |||||
| // return json_read::accept_t{}; | |||||
| // }, | |||||
| // "All `uses` items must be strings")}), | |||||
| // key("links", | |||||
| // array_each{require_string( | |||||
| // [&](auto&& links) { | |||||
| // lib.links.push_back(lm::split_usage_string(links.as_string())); | |||||
| // return json_read::accept_t{}; | |||||
| // }, | |||||
| // "All `links` items must be strings")}))); | |||||
| if (lib.name.empty()) { | if (lib.name.empty()) { | ||||
| throw_user_error<errc::invalid_lib_manifest>( | throw_user_error<errc::invalid_lib_manifest>( | ||||
| } | } | ||||
| } | } | ||||
| auto dds_file = dirpath / "library.dds"; | |||||
| if (fs::is_regular_file(dds_file)) { | |||||
| return dds_file; | |||||
| } | |||||
| return std::nullopt; | return std::nullopt; | ||||
| } | } | ||||
| return std::nullopt; | return std::nullopt; | ||||
| } | } | ||||
| if (found->extension() == ".dds") { | |||||
| return load_from_dds_file(*found); | |||||
| } else { | |||||
| return load_from_file(*found); | |||||
| } | |||||
| return load_from_file(*found); | |||||
| } | } |
| * Load the library manifest from an existing file | * Load the library manifest from an existing file | ||||
| */ | */ | ||||
| static library_manifest load_from_file(path_ref); | static library_manifest load_from_file(path_ref); | ||||
| static library_manifest load_from_dds_file(path_ref); | |||||
| /** | /** | ||||
| * Find a library manifest within a directory. This will search for a few | * Find a library manifest within a directory. This will search for a few |
| man.name = lib_dir.filename().string(); | man.name = lib_dir.filename().string(); | ||||
| auto found = library_manifest::find_in_directory(lib_dir); | auto found = library_manifest::find_in_directory(lib_dir); | ||||
| if (found) { | if (found) { | ||||
| if (found->extension() == ".dds") { | |||||
| man = library_manifest::load_from_dds_file(*found); | |||||
| } else { | |||||
| man = library_manifest::load_from_file(*found); | |||||
| } | |||||
| man = library_manifest::load_from_file(*found); | |||||
| } | } | ||||
| auto lib = library_root(lib_dir, std::move(sources), std::move(man)); | auto lib = library_root(lib_dir, std::move(sources), std::move(man)); |
| #include <dds/dym.hpp> | #include <dds/dym.hpp> | ||||
| #include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
| #include <dds/util/string.hpp> | #include <dds/util/string.hpp> | ||||
| #include <libman/parse.hpp> | |||||
| #include <range/v3/view/split.hpp> | #include <range/v3/view/split.hpp> | ||||
| #include <range/v3/view/split_when.hpp> | #include <range/v3/view/split_when.hpp> | ||||
| #include <range/v3/view/transform.hpp> | #include <range/v3/view/transform.hpp> | ||||
| #include <semester/decomp.hpp> | |||||
| #include <semester/walk.hpp> | |||||
| #include <spdlog/spdlog.h> | #include <spdlog/spdlog.h> | ||||
| #include <json5/parse_data.hpp> | #include <json5/parse_data.hpp> | ||||
| using namespace dds; | using namespace dds; | ||||
| package_manifest package_manifest::load_from_dds_file(const fs::path& fpath) { | |||||
| spdlog::warn( | |||||
| "Using deprecated package.dds parsing (on file {}). This will be removed soon. Migrate!", | |||||
| fpath.string()); | |||||
| auto kvs = lm::parse_file(fpath); | |||||
| package_manifest ret; | |||||
| std::string version_str; | |||||
| std::vector<std::string> depends_strs; | |||||
| std::optional<std::string> opt_test_driver; | |||||
| lm::read(fmt::format("Reading package manifest '{}'", fpath.string()), | |||||
| kvs, | |||||
| lm::read_required("Name", ret.pkg_id.name), | |||||
| lm::read_opt("Namespace", ret.namespace_), | |||||
| lm::read_required("Version", version_str), | |||||
| lm::read_accumulate("Depends", depends_strs), | |||||
| lm::read_opt("Test-Driver", opt_test_driver), | |||||
| lm_reject_dym{{"Name", "Namespace", "Version", "Depends", "Test-Driver"}}); | |||||
| namespace { | |||||
| if (ret.pkg_id.name.empty()) { | |||||
| throw_user_error<errc::invalid_pkg_name>("'Name' field in [{}] may not be an empty string", | |||||
| fpath.string()); | |||||
| } | |||||
| if (version_str.empty()) { | |||||
| throw_user_error< | |||||
| errc::invalid_version_string>("'Version' field in [{}] may not be an empty string", | |||||
| fpath.string()); | |||||
| } | |||||
| if (opt_test_driver) { | |||||
| auto& test_driver_str = *opt_test_driver; | |||||
| if (test_driver_str == "Catch-Main") { | |||||
| ret.test_driver = test_lib::catch_main; | |||||
| } else if (test_driver_str == "Catch") { | |||||
| ret.test_driver = test_lib::catch_; | |||||
| } else { | |||||
| auto dym = *did_you_mean(test_driver_str, {"Catch-Main", "Catch"}); | |||||
| throw_user_error< | |||||
| errc::unknown_test_driver>("Unknown 'test_driver' '{}' (Did you mean '{}'?)", | |||||
| test_driver_str, | |||||
| dym); | |||||
| } | |||||
| } | |||||
| using require_obj = semester::require_type<json5::data::mapping_type>; | |||||
| using require_array = semester::require_type<json5::data::array_type>; | |||||
| using require_str = semester::require_type<std::string>; | |||||
| if (ret.namespace_.empty()) { | |||||
| ret.namespace_ = ret.pkg_id.name; | |||||
| } | |||||
| ret.pkg_id.version = semver::version::parse(version_str); | |||||
| package_manifest parse_json(const json5::data& data, std::string_view fpath) { | |||||
| package_manifest ret; | |||||
| ret.dependencies = depends_strs // | |||||
| | ranges::views::transform(dependency::parse_depends_string) // | |||||
| | ranges::to_vector; | |||||
| using namespace semester::walk_ops; | |||||
| auto push_depends_obj_kv = [&](std::string key, auto&& dat) { | |||||
| dependency pending_dep; | |||||
| if (!dat.is_string()) { | |||||
| return walk.reject("Dependency object values should be strings"); | |||||
| } | |||||
| try { | |||||
| auto rng = semver::range::parse_restricted(dat.as_string()); | |||||
| dependency dep{std::move(key), {rng.low(), rng.high()}}; | |||||
| ret.dependencies.push_back(std::move(dep)); | |||||
| } catch (const semver::invalid_range&) { | |||||
| throw_user_error<errc::invalid_version_range_string>( | |||||
| "Invalid version range string '{}' in dependency declaration for " | |||||
| "'{}'", | |||||
| dat.as_string(), | |||||
| key); | |||||
| } | |||||
| return walk.accept; | |||||
| }; | |||||
| walk(data, | |||||
| require_obj{"Root of package manifest should be a JSON object"}, | |||||
| mapping{ | |||||
| if_key{"$schema", just_accept}, | |||||
| required_key{"name", | |||||
| "A string 'name' is required", | |||||
| require_str{"'name' must be a string"}, | |||||
| put_into{ret.pkg_id.name}}, | |||||
| required_key{"namespace", | |||||
| "A string 'namespace' is a required ", | |||||
| require_str{"'namespace' must be a string"}, | |||||
| put_into{ret.namespace_}}, | |||||
| required_key{"version", | |||||
| "A 'version' string is requried", | |||||
| require_str{"'version' must be a string"}, | |||||
| put_into{ret.pkg_id.version, | |||||
| [](std::string s) { return semver::version::parse(s); }}}, | |||||
| if_key{"depends", | |||||
| [&](auto&& dat) { | |||||
| if (dat.is_object()) { | |||||
| spdlog::warn( | |||||
| "{}: Using a JSON object for 'depends' is deprecated. Use an " | |||||
| "array of strings instead.", | |||||
| fpath); | |||||
| return mapping{push_depends_obj_kv}(dat); | |||||
| } else if (dat.is_array()) { | |||||
| return for_each{put_into{std::back_inserter(ret.dependencies), | |||||
| [](const std::string& depstr) { | |||||
| return dependency::parse_depends_string( | |||||
| depstr); | |||||
| }}}(dat); | |||||
| } else { | |||||
| return walk.reject( | |||||
| "'depends' should be an array of dependency strings"); | |||||
| } | |||||
| }}, | |||||
| if_key{"test_driver", | |||||
| require_str{"'test_driver' must be a string"}, | |||||
| put_into{ret.test_driver, | |||||
| [](std::string const& td_str) { | |||||
| if (td_str == "Catch-Main") { | |||||
| return test_lib::catch_main; | |||||
| } else if (td_str == "Catch") { | |||||
| return test_lib::catch_; | |||||
| } else { | |||||
| auto dym = *did_you_mean(td_str, {"Catch-Main", "Catch"}); | |||||
| throw_user_error<errc::unknown_test_driver>( | |||||
| "Unknown 'test_driver' '{}' (Did you mean '{}'?)", | |||||
| td_str, | |||||
| dym); | |||||
| } | |||||
| }}}, | |||||
| }); | |||||
| return ret; | return ret; | ||||
| } | } | ||||
| } // namespace | |||||
| package_manifest package_manifest::load_from_file(const fs::path& fpath) { | package_manifest package_manifest::load_from_file(const fs::path& fpath) { | ||||
| auto content = slurp_file(fpath); | auto content = slurp_file(fpath); | ||||
| auto data = json5::parse_data(content); | auto data = json5::parse_data(content); | ||||
| if (!data.is_object()) { | |||||
| throw_user_error<errc::invalid_pkg_manifest>("Root value must be an object"); | |||||
| try { | |||||
| return parse_json(data, fpath.string()); | |||||
| } catch (const semester::walk_error& e) { | |||||
| throw_user_error<errc::invalid_pkg_manifest>(e.what()); | |||||
| } | } | ||||
| package_manifest ret; | |||||
| using namespace semester::decompose_ops; | |||||
| auto res = semester::decompose( // | |||||
| data, | |||||
| try_seq{ | |||||
| require_type<json5::data::mapping_type>{ | |||||
| "The root of a package manifest must be an object (mapping)"}, | |||||
| mapping{ | |||||
| if_key{"$schema", just_accept}, | |||||
| if_key{ | |||||
| "name", | |||||
| require_type<std::string>{"`name` must be a string"}, | |||||
| put_into{ret.pkg_id.name}, | |||||
| }, | |||||
| if_key{ | |||||
| "namespace", | |||||
| require_type<std::string>{"`namespace` must be a string"}, | |||||
| put_into{ret.namespace_}, | |||||
| }, | |||||
| if_key{ | |||||
| "version", | |||||
| require_type<std::string>{"`version` must be a string"}, | |||||
| [&](auto&& version_str_) { | |||||
| auto& version = version_str_.as_string(); | |||||
| ret.pkg_id.version = semver::version::parse(version); | |||||
| return semester::dc_accept; | |||||
| }, | |||||
| }, | |||||
| if_key{ | |||||
| "depends", | |||||
| require_type<json5::data::mapping_type>{ | |||||
| "`depends` must be a mapping between package names and version ranges"}, | |||||
| mapping{[&](auto pkg_name, auto&& range_str_) { | |||||
| if (!range_str_.is_string()) { | |||||
| throw_user_error<errc::invalid_pkg_manifest>( | |||||
| "Dependency for '{}' must be a range string", pkg_name); | |||||
| } | |||||
| try { | |||||
| auto rng = semver::range::parse_restricted(range_str_.as_string()); | |||||
| dependency dep{std::string(pkg_name), {rng.low(), rng.high()}}; | |||||
| ret.dependencies.push_back(std::move(dep)); | |||||
| } catch (const semver::invalid_range&) { | |||||
| throw_user_error<errc::invalid_version_range_string>( | |||||
| "Invalid version range string '{}' in dependency declaration for " | |||||
| "'{}'", | |||||
| range_str_.as_string(), | |||||
| pkg_name); | |||||
| } | |||||
| return semester::dc_accept; | |||||
| }}, | |||||
| }, | |||||
| if_key{"test_driver", | |||||
| require_type<std::string>{"`test_driver` must be a string"}, | |||||
| [&](auto&& test_driver_str_) { | |||||
| auto& test_driver = test_driver_str_.as_string(); | |||||
| if (test_driver == "Catch-Main") { | |||||
| ret.test_driver = test_lib::catch_main; | |||||
| } else if (test_driver == "Catch") { | |||||
| ret.test_driver = test_lib::catch_; | |||||
| } else { | |||||
| auto dym = *did_you_mean(test_driver, {"Catch-Main", "Catch"}); | |||||
| throw_user_error<errc::unknown_test_driver>( | |||||
| "Unknown 'test_driver' '{}' (Did you mean '{}'?)", | |||||
| test_driver, | |||||
| dym); | |||||
| } | |||||
| return semester::dc_accept; | |||||
| }}, | |||||
| [&](auto key, auto&&) { | |||||
| return semester::dc_reject_t{ | |||||
| fmt::format("Unknown key `{}` in package manifest", key)}; | |||||
| }}}); | |||||
| auto rej = std::get_if<semester::dc_reject_t>(&res); | |||||
| if (rej) { | |||||
| throw_user_error<errc::invalid_pkg_manifest>(rej->message); | |||||
| } | |||||
| if (ret.pkg_id.name.empty()) { | |||||
| throw_user_error<errc::invalid_pkg_manifest>("The 'name' field is required."); | |||||
| } | |||||
| if (ret.namespace_.empty()) { | |||||
| throw_user_error<errc::invalid_pkg_manifest>("The 'namespace'` field is required."); | |||||
| } | |||||
| return ret; | |||||
| } | } | ||||
| std::optional<fs::path> package_manifest::find_in_directory(path_ref dirpath) { | std::optional<fs::path> package_manifest::find_in_directory(path_ref dirpath) { | ||||
| } | } | ||||
| } | } | ||||
| auto dds_fname = dirpath / "package.dds"; | |||||
| if (fs::is_regular_file(dds_fname)) { | |||||
| return dds_fname; | |||||
| } | |||||
| return std::nullopt; | return std::nullopt; | ||||
| } | } | ||||
| if (!found.has_value()) { | if (!found.has_value()) { | ||||
| return std::nullopt; | return std::nullopt; | ||||
| } | } | ||||
| if (found->extension() == ".dds") { | |||||
| return load_from_dds_file(*found); | |||||
| } else { | |||||
| return load_from_file(*found); | |||||
| } | |||||
| } | |||||
| return load_from_file(*found); | |||||
| } |
| * Load a package manifest from a file on disk. | * Load a package manifest from a file on disk. | ||||
| */ | */ | ||||
| static package_manifest load_from_file(path_ref); | static package_manifest load_from_file(path_ref); | ||||
| static package_manifest load_from_dds_file(path_ref); | |||||
| /** | /** | ||||
| * Find a package manifest contained within a directory. This will search | * Find a package manifest contained within a directory. This will search |
| #pragma once | #pragma once | ||||
| #include <chrono> | |||||
| #include <optional> | |||||
| #include <string> | #include <string> | ||||
| #include <string_view> | #include <string_view> | ||||
| #include <vector> | #include <vector> | ||||
| struct proc_result { | struct proc_result { | ||||
| int signal = 0; | int signal = 0; | ||||
| int retc = 0; | int retc = 0; | ||||
| bool timed_out = false; | |||||
| std::string output; | std::string output; | ||||
| bool okay() const noexcept { return retc == 0 && signal == 0; } | bool okay() const noexcept { return retc == 0 && signal == 0; } | ||||
| }; | }; | ||||
| proc_result run_proc(const std::vector<std::string>& args); | |||||
| struct proc_options { | |||||
| std::vector<std::string> command; | |||||
| /** | |||||
| * Timeout for the subprocess, in milliseconds. If zero, will wait forever | |||||
| */ | |||||
| std::optional<std::chrono::milliseconds> timeout = std::nullopt; | |||||
| }; | |||||
| proc_result run_proc(const proc_options& opts); | |||||
| inline proc_result run_proc(std::vector<std::string> args) { | |||||
| return run_proc(proc_options{.command = std::move(args)}); | |||||
| } | |||||
| } // namespace dds | } // namespace dds |
| #include <spdlog/spdlog.h> | #include <spdlog/spdlog.h> | ||||
| #include <poll.h> | #include <poll.h> | ||||
| #include <signal.h> | |||||
| #include <sys/wait.h> | #include <sys/wait.h> | ||||
| #include <unistd.h> | #include <unistd.h> | ||||
| } // namespace | } // namespace | ||||
| proc_result dds::run_proc(const std::vector<std::string>& command) { | |||||
| spdlog::debug("Spawning subprocess: {}", quote_command(command)); | |||||
| proc_result dds::run_proc(const proc_options& opts) { | |||||
| spdlog::debug("Spawning subprocess: {}", quote_command(opts.command)); | |||||
| int stdio_pipe[2] = {}; | int stdio_pipe[2] = {}; | ||||
| auto rc = ::pipe(stdio_pipe); | auto rc = ::pipe(stdio_pipe); | ||||
| check_rc(rc == 0, "Create stdio pipe for subprocess"); | check_rc(rc == 0, "Create stdio pipe for subprocess"); | ||||
| int read_pipe = stdio_pipe[0]; | int read_pipe = stdio_pipe[0]; | ||||
| int write_pipe = stdio_pipe[1]; | int write_pipe = stdio_pipe[1]; | ||||
| auto child = spawn_child(command, write_pipe, read_pipe); | |||||
| auto child = spawn_child(opts.command, write_pipe, read_pipe); | |||||
| ::close(write_pipe); | ::close(write_pipe); | ||||
| proc_result res; | proc_result res; | ||||
| using namespace std::chrono_literals; | |||||
| auto timeout = opts.timeout; | |||||
| while (true) { | while (true) { | ||||
| rc = ::poll(&stdio_fd, 1, -1); | |||||
| rc = ::poll(&stdio_fd, 1, static_cast<int>(timeout.value_or(-1ms).count())); | |||||
| if (rc && errno == EINTR) { | if (rc && errno == EINTR) { | ||||
| errno = 0; | errno = 0; | ||||
| continue; | continue; | ||||
| } | } | ||||
| check_rc(rc > 0, "Failed in poll()"); | |||||
| if (rc == 0) { | |||||
| // Timeout! | |||||
| ::kill(child, SIGINT); | |||||
| timeout = std::nullopt; | |||||
| res.timed_out = true; | |||||
| spdlog::debug("Subprocess [{}] timed out", quote_command(opts.command)); | |||||
| continue; | |||||
| } | |||||
| std::string buffer; | std::string buffer; | ||||
| buffer.resize(1024); | buffer.resize(1024); | ||||
| auto nread = ::read(stdio_fd.fd, buffer.data(), buffer.size()); | auto nread = ::read(stdio_fd.fd, buffer.data(), buffer.size()); |
| #ifdef _WIN32 | #ifdef _WIN32 | ||||
| #include "./proc.hpp" | #include "./proc.hpp" | ||||
| #include <neo/assert.hpp> | |||||
| #include <spdlog/spdlog.h> | #include <spdlog/spdlog.h> | ||||
| #include <wil/resource.h> | #include <wil/resource.h> | ||||
| #include <stdexcept> | #include <stdexcept> | ||||
| using namespace dds; | using namespace dds; | ||||
| using namespace std::chrono_literals; | |||||
| namespace { | namespace { | ||||
| } // namespace | } // namespace | ||||
| proc_result dds::run_proc(const std::vector<std::string>& cmd) { | |||||
| auto cmd_str = quote_command(cmd); | |||||
| proc_result dds::run_proc(const proc_options& opts) { | |||||
| auto cmd_str = quote_command(opts.command); | |||||
| ::SECURITY_ATTRIBUTES security = {}; | ::SECURITY_ATTRIBUTES security = {}; | ||||
| security.bInheritHandle = TRUE; | security.bInheritHandle = TRUE; | ||||
| } | } | ||||
| ::SetHandleInformation(reader.get(), HANDLE_FLAG_INHERIT, 0); | ::SetHandleInformation(reader.get(), HANDLE_FLAG_INHERIT, 0); | ||||
| ::COMMTIMEOUTS timeouts; | |||||
| ::GetCommTimeouts(reader.get(), &timeouts); | |||||
| wil::unique_process_information proc_info; | wil::unique_process_information proc_info; | ||||
| nullptr, | nullptr, | ||||
| nullptr, | nullptr, | ||||
| true, | true, | ||||
| 0, | |||||
| CREATE_NEW_PROCESS_GROUP, | |||||
| nullptr, | nullptr, | ||||
| nullptr, | nullptr, | ||||
| &startup_info, | &startup_info, | ||||
| writer.reset(); | writer.reset(); | ||||
| std::string output; | std::string output; | ||||
| proc_result res; | |||||
| auto timeout = opts.timeout; | |||||
| while (true) { | while (true) { | ||||
| const int buffer_size = 256; | const int buffer_size = 256; | ||||
| char buffer[buffer_size]; | char buffer[buffer_size]; | ||||
| DWORD nread = 0; | DWORD nread = 0; | ||||
| okay = ::ReadFile(reader.get(), buffer, buffer_size, &nread, nullptr); | |||||
| // Reload the timeout on the pipe | |||||
| timeouts.ReadTotalTimeoutConstant = static_cast<DWORD>(timeout.value_or(0ms).count()); | |||||
| ::SetCommTimeouts(reader.get(), &timeouts); | |||||
| // Read some bytes from the process | |||||
| okay = ::ReadFile(reader.get(), buffer, buffer_size, &nread, nullptr); | |||||
| if (!okay && ::GetLastError() == ERROR_TIMEOUT) { | |||||
| // We didn't read any bytes. Hit the timeout | |||||
| neo_assert_always(invariant, | |||||
| nread == 0, | |||||
| "Didn't expect to read bytes when a timeout was reached", | |||||
| nread, | |||||
| timeout->count()); | |||||
| res.timed_out = true; | |||||
| timeout = std::nullopt; | |||||
| ::GenerateConsoleCtrlEvent(CTRL_C_EVENT, proc_info.dwProcessId); | |||||
| continue; | |||||
| } | |||||
| if (!okay && ::GetLastError() != ERROR_BROKEN_PIPE) { | if (!okay && ::GetLastError() != ERROR_BROKEN_PIPE) { | ||||
| throw_system_error("Failed while reading from the stdio pipe"); | throw_system_error("Failed while reading from the stdio pipe"); | ||||
| } | } | ||||
| throw_system_error("Failed reading exit code of process"); | throw_system_error("Failed reading exit code of process"); | ||||
| } | } | ||||
| proc_result res; | |||||
| res.retc = rc; | res.retc = rc; | ||||
| res.output = std::move(output); | res.output = std::move(output); | ||||
| return res; | return res; |
| void sdist_export_file(path_ref out_root, path_ref in_root, path_ref filepath) { | void sdist_export_file(path_ref out_root, path_ref in_root, path_ref filepath) { | ||||
| auto relpath = fs::relative(filepath, in_root); | auto relpath = fs::relative(filepath, in_root); | ||||
| spdlog::info("Export file {}", relpath.string()); | |||||
| spdlog::debug("Export file {}", relpath.string()); | |||||
| auto dest = out_root / relpath; | auto dest = out_root / relpath; | ||||
| fs::create_directories(dest.parent_path()); | fs::create_directories(dest.parent_path()); | ||||
| fs::copy(filepath, dest); | fs::copy(filepath, dest); | ||||
| params.project_dir.string()); | params.project_dir.string()); | ||||
| } | } | ||||
| auto pkg_man = man_path->extension() == ".dds" ? package_manifest::load_from_dds_file(*man_path) | |||||
| : package_manifest::load_from_file(*man_path); | |||||
| auto pkg_man = package_manifest::load_from_file(*man_path); | |||||
| sdist_export_file(out, params.project_dir, *man_path); | sdist_export_file(out, params.project_dir, *man_path); | ||||
| spdlog::info("Generated export as {}", pkg_man.pkg_id.to_string()); | spdlog::info("Generated export as {}", pkg_man.pkg_id.to_string()); | ||||
| return sdist::from_directory(out); | return sdist::from_directory(out); |
| ".hh", | ".hh", | ||||
| ".hpp", | ".hpp", | ||||
| ".hxx", | ".hxx", | ||||
| ".inc", | |||||
| ".inl", | ".inl", | ||||
| ".ipp", | ".ipp", | ||||
| }; | }; |
| }; | }; | ||||
| }; | }; | ||||
| struct fs_transformation::edit pending_edit; | |||||
| fs_transformation::one_edit pending_edit_item; | |||||
| walk(data, | walk(data, | ||||
| require_obj{"Each transform must be a JSON object"}, | require_obj{"Each transform must be a JSON object"}, | ||||
| mapping{ | mapping{ | ||||
| require_str{"'content' must be a string"}, | require_str{"'content' must be a string"}, | ||||
| put_into(ret.write->content)}, | put_into(ret.write->content)}, | ||||
| }}, | }}, | ||||
| if_key{ | |||||
| "edit", | |||||
| require_obj{"'edit' should be a JSON object"}, | |||||
| prep_optional(ret.edit), | |||||
| mapping{ | |||||
| required_key{"path", | |||||
| "'path' is required", | |||||
| require_str{"'path' should be a string path"}, | |||||
| put_into(ret.edit->path, str_to_path)}, | |||||
| required_key{ | |||||
| "edits", | |||||
| "An 'edits' array is required", | |||||
| require_array{"'edits' should be an array"}, | |||||
| for_each{ | |||||
| require_obj{"Each edit should be a JSON object"}, | |||||
| [&](auto&&) { | |||||
| ret.edit->edits.emplace_back(); | |||||
| return walk.pass; | |||||
| }, | |||||
| [&](auto&& dat) { | |||||
| return mapping{ | |||||
| required_key{ | |||||
| "kind", | |||||
| "Edit 'kind' is required", | |||||
| require_str{"'kind' should be a string"}, | |||||
| [&](std::string s) { | |||||
| auto& ed = ret.edit->edits.back(); | |||||
| if (s == "delete") { | |||||
| ed.kind = ed.delete_; | |||||
| } else if (s == "insert") { | |||||
| ed.kind = ed.insert; | |||||
| } else { | |||||
| return walk.reject("Invalid edit kind"); | |||||
| } | |||||
| return walk.accept; | |||||
| }, | |||||
| }, | |||||
| required_key{ | |||||
| "line", | |||||
| "Edit 'line' number is required", | |||||
| require_type<double>{"'line' should be an integer"}, | |||||
| [&](double d) { | |||||
| ret.edit->edits.back().line = int(d); | |||||
| return walk.accept; | |||||
| }, | |||||
| }, | |||||
| if_key{ | |||||
| "content", | |||||
| require_str{"'content' should be a string"}, | |||||
| [&](std::string s) { | |||||
| ret.edit->edits.back().content = s; | |||||
| return walk.accept; | |||||
| }, | |||||
| }, | |||||
| }(dat); | |||||
| }, | |||||
| }, | |||||
| }, | |||||
| }, | |||||
| }, | |||||
| }); | }); | ||||
| return ret; | return ret; | ||||
| if (child.is_directory()) { | if (child.is_directory()) { | ||||
| continue; | continue; | ||||
| } | } | ||||
| if (!oper.only_matching.empty() && !matches_any(child, oper.only_matching)) { | |||||
| auto relpath = child.path().lexically_proximate(from); | |||||
| if (!oper.only_matching.empty() && !matches_any(relpath, oper.only_matching)) { | |||||
| continue; | continue; | ||||
| } | } | ||||
| fs::remove_all(child); | fs::remove_all(child); | ||||
| root.string()); | root.string()); | ||||
| } | } | ||||
| std::cout << "Write content: " << oper.content; | |||||
| auto of = dds::open(dest, std::ios::binary | std::ios::out); | auto of = dds::open(dest, std::ios::binary | std::ios::out); | ||||
| of << oper.content; | of << oper.content; | ||||
| } | } | ||||
| void do_edit(path_ref filepath, const fs_transformation::one_edit& edit) { | |||||
| auto file = open(filepath, std::ios::in | std::ios::binary); | |||||
| file.exceptions(std::ios::badbit); | |||||
| std::string lines; | |||||
| std::string line; | |||||
| int line_n = 1; | |||||
| for (; std::getline(file, line, '\n'); ++line_n) { | |||||
| if (line_n != edit.line) { | |||||
| lines += line + "\n"; | |||||
| continue; | |||||
| } | |||||
| switch (edit.kind) { | |||||
| case edit.delete_: | |||||
| // Just delete the line. Ignore it. | |||||
| continue; | |||||
| case edit.insert: | |||||
| // Insert some new content | |||||
| lines += edit.content + "\n"; | |||||
| lines += line + "\n"; | |||||
| continue; | |||||
| } | |||||
| } | |||||
| file = open(filepath, std::ios::out | std::ios::binary); | |||||
| file << lines; | |||||
| } | |||||
| } // namespace | } // namespace | ||||
| void dds::fs_transformation::apply_to(dds::path_ref root_) const { | void dds::fs_transformation::apply_to(dds::path_ref root_) const { | ||||
| if (write) { | if (write) { | ||||
| do_write(*write, root); | do_write(*write, root); | ||||
| } | } | ||||
| if (edit) { | |||||
| auto fpath = root / edit->path; | |||||
| if (!parent_dir_of(root, fpath)) { | |||||
| throw_external_error<errc::invalid_repo_transform>( | |||||
| "Filesystem transformation wants to edit a file outside of the root. Attempted to " | |||||
| "modify [{}]. Writing is restricted to [{}].", | |||||
| fpath.string(), | |||||
| root.string()); | |||||
| } | |||||
| for (auto&& ed : edit->edits) { | |||||
| do_edit(fpath, ed); | |||||
| } | |||||
| } | |||||
| } | } | ||||
| namespace { | namespace { | ||||
| for (auto&& gl : remove->only_matching) { | for (auto&& gl : remove->only_matching) { | ||||
| if_arr.push_back(gl.string()); | if_arr.push_back(gl.string()); | ||||
| } | } | ||||
| rm["only-matching"] = rm; | |||||
| rm["only-matching"] = if_arr; | |||||
| } | } | ||||
| obj["remove"] = rm; | obj["remove"] = rm; | ||||
| } | } | ||||
| wr["content"] = write->content; | wr["content"] = write->content; | ||||
| obj["write"] = wr; | obj["write"] = wr; | ||||
| } | } | ||||
| if (edit) { | |||||
| auto ed = nlohmann::json::object(); | |||||
| ed["path"] = edit->path.string(); | |||||
| auto edits = nlohmann::json::array(); | |||||
| for (auto&& one : edit->edits) { | |||||
| auto one_ed = nlohmann::json::object(); | |||||
| one_ed["kind"] = one.kind == one.delete_ ? "delete" : "insert"; | |||||
| one_ed["line"] = one.line; | |||||
| one_ed["content"] = one.content; | |||||
| edits.push_back(std::move(one_ed)); | |||||
| } | |||||
| ed["edits"] = edits; | |||||
| obj["edit"] = ed; | |||||
| } | |||||
| return to_string(obj); | return to_string(obj); | ||||
| } | } |
| std::string content; | std::string content; | ||||
| }; | }; | ||||
| std::optional<struct copy> copy; | |||||
| std::optional<struct move> move; | |||||
| std::optional<remove> remove; | |||||
| std::optional<struct write> write; | |||||
| struct one_edit { | |||||
| int line = 0; | |||||
| std::string content; | |||||
| enum kind_t { | |||||
| delete_, | |||||
| insert, | |||||
| } kind | |||||
| = delete_; | |||||
| }; | |||||
| struct edit { | |||||
| fs::path path; | |||||
| std::vector<one_edit> edits; | |||||
| }; | |||||
| std::optional<struct copy> copy; | |||||
| std::optional<struct move> move; | |||||
| std::optional<struct remove> remove; | |||||
| std::optional<struct write> write; | |||||
| std::optional<struct edit> edit; | |||||
| void apply_to(path_ref root) const; | void apply_to(path_ref root) const; | ||||
| } else { | } else { | ||||
| // An rglob pattern "**". Check by peeling of individual path elements | // An rglob pattern "**". Check by peeling of individual path elements | ||||
| const auto next_pat = std::next(pat_it); | const auto next_pat = std::next(pat_it); | ||||
| if (next_pat == pat_stop) { | |||||
| // The "**" is at the end of the glob. This matches everything. | |||||
| return true; | |||||
| } | |||||
| for (; elem_it != elem_stop; ++elem_it) { | for (; elem_it != elem_stop; ++elem_it) { | ||||
| if (check_matches(elem_it, elem_stop, next_pat, pat_stop)) { | if (check_matches(elem_it, elem_stop, next_pat, pat_stop)) { | ||||
| return true; | return true; |
| CHECK(glob.match("foo/thing/bar/thing/baz.txt")); | CHECK(glob.match("foo/thing/bar/thing/baz.txt")); | ||||
| CHECK(glob.match("foo/bar/thing/baz.txt")); | CHECK(glob.match("foo/bar/thing/baz.txt")); | ||||
| CHECK(glob.match("foo/bar/baz/baz.txt")); | CHECK(glob.match("foo/bar/baz/baz.txt")); | ||||
| glob = dds::glob::compile("doc/**"); | |||||
| CHECK(glob.match("doc/something.txt")); | |||||
| } | } |
| #include "./parallel.hpp" | |||||
| #include <spdlog/spdlog.h> | |||||
| using namespace dds; | |||||
| void dds::log_exception(std::exception_ptr eptr) noexcept { | |||||
| try { | |||||
| std::rethrow_exception(eptr); | |||||
| } catch (const std::exception& e) { | |||||
| spdlog::error(e.what()); | |||||
| } | |||||
| } |
| #pragma once | |||||
| #include <algorithm> | |||||
| #include <mutex> | |||||
| #include <stdexcept> | |||||
| #include <thread> | |||||
| #include <vector> | |||||
| namespace dds { | |||||
| void log_exception(std::exception_ptr) noexcept; | |||||
| template <typename Range, typename Func> | |||||
| bool parallel_run(Range&& rng, int n_jobs, Func&& fn) { | |||||
| // We don't bother with a nice thread pool, as the overhead of most build | |||||
| // tasks dwarf the cost of interlocking. | |||||
| std::mutex mut; | |||||
| auto iter = rng.begin(); | |||||
| const auto stop = rng.end(); | |||||
| std::vector<std::exception_ptr> exceptions; | |||||
| auto run_one = [&]() mutable { | |||||
| while (true) { | |||||
| std::unique_lock lk{mut}; | |||||
| if (!exceptions.empty()) { | |||||
| break; | |||||
| } | |||||
| if (iter == stop) { | |||||
| break; | |||||
| } | |||||
| auto&& item = *iter; | |||||
| ++iter; | |||||
| lk.unlock(); | |||||
| try { | |||||
| fn(item); | |||||
| } catch (...) { | |||||
| lk.lock(); | |||||
| exceptions.push_back(std::current_exception()); | |||||
| break; | |||||
| } | |||||
| } | |||||
| }; | |||||
| std::unique_lock lk{mut}; | |||||
| std::vector<std::thread> threads; | |||||
| if (n_jobs < 1) { | |||||
| n_jobs = std::thread::hardware_concurrency() + 2; | |||||
| } | |||||
| std::generate_n(std::back_inserter(threads), n_jobs, [&] { return std::thread(run_one); }); | |||||
| lk.unlock(); | |||||
| for (auto& t : threads) { | |||||
| t.join(); | |||||
| } | |||||
| for (auto eptr : exceptions) { | |||||
| log_exception(eptr); | |||||
| } | |||||
| return exceptions.empty(); | |||||
| } | |||||
| } // namespace dds |
| scope.enter_context( | scope.enter_context( | ||||
| dds.set_contents( | dds.set_contents( | ||||
| 'library.dds', | |||||
| b'Name: TestLibrary', | |||||
| 'library.json5', | |||||
| b'''{ | |||||
| name: 'TestLibrary', | |||||
| }''', | |||||
| )) | )) | ||||
| scope.enter_context( | scope.enter_context( | ||||
| dds.set_contents( | dds.set_contents( | ||||
| 'package.dds', | |||||
| b''' | |||||
| Name: TestProject | |||||
| Version: 0.0.0 | |||||
| ''', | |||||
| 'package.json5', | |||||
| b'''{ | |||||
| name: 'TestProject', | |||||
| version: '0.0.0', | |||||
| namespace: 'test', | |||||
| }''', | |||||
| )) | )) | ||||
| dds.build(tests=True, apps=False, warnings=False) | dds.build(tests=True, apps=False, warnings=False) |
| 'version': 1, | 'version': 1, | ||||
| 'packages': { | 'packages': { | ||||
| 'neo-sqlite3': { | 'neo-sqlite3': { | ||||
| '0.2.2': { | |||||
| 'depends': {}, | |||||
| '0.3.0': { | |||||
| 'git': { | 'git': { | ||||
| 'url': | 'url': | ||||
| 'https://github.com/vector-of-bool/neo-sqlite3.git', | 'https://github.com/vector-of-bool/neo-sqlite3.git', | ||||
| 'ref': | 'ref': | ||||
| '0.2.2', | |||||
| '0.3.0', | |||||
| }, | }, | ||||
| }, | }, | ||||
| }, | }, | ||||
| dds.catalog_import(json_path) | dds.catalog_import(json_path) | ||||
| dds.catalog_get('neo-sqlite3@0.2.2') | |||||
| assert (dds.source_root / 'neo-sqlite3@0.2.2').is_dir() | |||||
| assert (dds.source_root / 'neo-sqlite3@0.2.2/package.dds').is_file() | |||||
| dds.catalog_get('neo-sqlite3@0.3.0') | |||||
| assert (dds.source_root / 'neo-sqlite3@0.3.0').is_dir() | |||||
| assert (dds.source_root / 'neo-sqlite3@0.3.0/package.jsonc').is_file() |
| 'url': 'http://example.com', | 'url': 'http://example.com', | ||||
| 'ref': 'master', | 'ref': 'master', | ||||
| }, | }, | ||||
| 'depends': {}, | |||||
| 'depends': [], | |||||
| }, | }, | ||||
| '1.2.5': { | '1.2.5': { | ||||
| 'git': { | 'git': { |
| project_dir = test_root / params.subdir | project_dir = test_root / params.subdir | ||||
| # Create the instance. Auto-clean when we're done | # Create the instance. Auto-clean when we're done | ||||
| yield scope.enter_context(scoped_dds(test_root, project_dir, request.function.__name__)) | |||||
| yield scope.enter_context( | |||||
| scoped_dds(test_root, project_dir, request.function.__name__)) | |||||
| @pytest.fixture | @pytest.fixture | ||||
| def scope(): | def scope(): | ||||
| with ExitStack() as scope: | with ExitStack() as scope: | ||||
| yield scope | |||||
| yield scope | |||||
| def pytest_addoption(parser): | |||||
| parser.addoption( | |||||
| '--test-deps', | |||||
| action='store_true', | |||||
| default=False, | |||||
| help='Run the exhaustive and intensive dds-deps tests') | |||||
| def pytest_configure(config): | |||||
| config.addinivalue_line( | |||||
| 'markers', 'deps_test: Deps tests are slow. Enable with --test-deps') | |||||
| def pytest_collection_modifyitems(config, items): | |||||
| if config.getoption('--test-deps'): | |||||
| return | |||||
| for item in items: | |||||
| if 'deps_test' not in item.keywords: | |||||
| continue | |||||
| item.add_marker( | |||||
| pytest.mark.skip( | |||||
| reason= | |||||
| 'Exhaustive deps tests are slow and perform many Git clones. Use --test-deps to run them.' | |||||
| )) |
| "url": "https://github.com/vector-of-bool/neo-sqlite3.git", | "url": "https://github.com/vector-of-bool/neo-sqlite3.git", | ||||
| "ref": "0.1.0" | "ref": "0.1.0" | ||||
| }, | }, | ||||
| "depends": {} | |||||
| }, | }, | ||||
| "0.2.2": { | "0.2.2": { | ||||
| "git": { | "git": { | ||||
| "url": "https://github.com/vector-of-bool/neo-sqlite3.git", | "url": "https://github.com/vector-of-bool/neo-sqlite3.git", | ||||
| "ref": "0.2.2" | "ref": "0.2.2" | ||||
| }, | }, | ||||
| "depends": {} | |||||
| }, | |||||
| "0.3.0": { | |||||
| "git": { | |||||
| "url": "https://github.com/vector-of-bool/neo-sqlite3.git", | |||||
| "ref": "0.3.0" | |||||
| }, | |||||
| } | } | ||||
| } | } | ||||
| } | } |
| { | { | ||||
| depends: { | depends: { | ||||
| 'neo-sqlite3': '+0.2.2', | |||||
| 'neo-sqlite3': '+0.3.0', | |||||
| }, | }, | ||||
| } | } |
| assert not dds.deps_build_dir.is_dir() | assert not dds.deps_build_dir.is_dir() | ||||
| dds.catalog_import(dds.source_root / 'catalog.json') | dds.catalog_import(dds.source_root / 'catalog.json') | ||||
| dds.build_deps(['-d', 'deps.json5']) | dds.build_deps(['-d', 'deps.json5']) | ||||
| assert (dds.deps_build_dir / 'neo-sqlite3@0.2.2').is_dir() | |||||
| assert (dds.deps_build_dir / 'neo-sqlite3@0.3.0').is_dir() | |||||
| assert (dds.scratch_dir / 'INDEX.lmi').is_file() | assert (dds.scratch_dir / 'INDEX.lmi').is_file() | ||||
| assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | ||||
| assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() | assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() | ||||
| def test_build_deps_from_cmd(dds: DDS): | def test_build_deps_from_cmd(dds: DDS): | ||||
| assert not dds.deps_build_dir.is_dir() | assert not dds.deps_build_dir.is_dir() | ||||
| dds.catalog_import(dds.source_root / 'catalog.json') | dds.catalog_import(dds.source_root / 'catalog.json') | ||||
| dds.build_deps(['neo-sqlite3 =0.2.2']) | |||||
| assert (dds.deps_build_dir / 'neo-sqlite3@0.2.2').is_dir() | |||||
| dds.build_deps(['neo-sqlite3=0.3.0']) | |||||
| assert (dds.deps_build_dir / 'neo-sqlite3@0.3.0').is_dir() | |||||
| assert (dds.scratch_dir / 'INDEX.lmi').is_file() | assert (dds.scratch_dir / 'INDEX.lmi').is_file() | ||||
| assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | ||||
| assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() | assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() | ||||
| def test_multiple_deps(dds: DDS): | def test_multiple_deps(dds: DDS): | ||||
| assert not dds.deps_build_dir.is_dir() | assert not dds.deps_build_dir.is_dir() | ||||
| dds.catalog_import(dds.source_root / 'catalog.json') | dds.catalog_import(dds.source_root / 'catalog.json') | ||||
| dds.build_deps(['neo-sqlite3 ^0.2.2', 'neo-sqlite3 ~0.2.0']) | |||||
| assert (dds.deps_build_dir / 'neo-sqlite3@0.2.2').is_dir() | |||||
| dds.build_deps(['neo-sqlite3^0.2.0', 'neo-sqlite3~0.3.0']) | |||||
| assert (dds.deps_build_dir / 'neo-sqlite3@0.3.0').is_dir() | |||||
| assert (dds.scratch_dir / 'INDEX.lmi').is_file() | assert (dds.scratch_dir / 'INDEX.lmi').is_file() | ||||
| assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | assert (dds.deps_build_dir / '_libman/neo-sqlite3.lmp').is_file() | ||||
| assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() | assert (dds.deps_build_dir / '_libman/neo/sqlite3.lml').is_file() |
| import json | |||||
| from pathlib import Path | |||||
| from typing import NamedTuple, Sequence, List | |||||
| import pytest | |||||
| from tests import DDS, fileutil | |||||
| class DepsCase(NamedTuple): | |||||
| dep: str | |||||
| usage: str | |||||
| source: str | |||||
| def setup_root(self, dds: DDS): | |||||
| dds.scope.enter_context( | |||||
| fileutil.set_contents( | |||||
| dds.source_root / 'package.json', | |||||
| json.dumps({ | |||||
| 'name': 'test-project', | |||||
| 'namespace': 'test', | |||||
| 'version': '0.0.0', | |||||
| 'depends': [self.dep], | |||||
| }).encode())) | |||||
| dds.scope.enter_context( | |||||
| fileutil.set_contents( | |||||
| dds.source_root / 'library.json', | |||||
| json.dumps({ | |||||
| 'name': 'test', | |||||
| 'uses': [self.usage], | |||||
| }).encode())) | |||||
| dds.scope.enter_context( | |||||
| fileutil.set_contents(dds.source_root / 'src/test.test.cpp', | |||||
| self.source.encode())) | |||||
| CASES: List[DepsCase] = [] | |||||
| def get_default_pkg_versions(pkg: str) -> Sequence[str]: | |||||
| catalog_json = Path( | |||||
| __file__).resolve().parent.parent.parent / 'catalog.json' | |||||
| catalog_dict = json.loads(catalog_json.read_text()) | |||||
| return list(catalog_dict['packages'][pkg].keys()) | |||||
| def add_cases(pkg: str, uses: str, versions: Sequence[str], source: str): | |||||
| if versions == ['auto']: | |||||
| versions = get_default_pkg_versions(pkg) | |||||
| for ver in versions: | |||||
| CASES.append(DepsCase(f'{pkg}@{ver}', uses, source)) | |||||
| # magic_enum tests | |||||
| """ | |||||
| ## ## ### ###### #### ###### ######## ## ## ## ## ## ## | |||||
| ### ### ## ## ## ## ## ## ## ## ### ## ## ## ### ### | |||||
| #### #### ## ## ## ## ## ## #### ## ## ## #### #### | |||||
| ## ### ## ## ## ## #### ## ## ###### ## ## ## ## ## ## ### ## | |||||
| ## ## ######### ## ## ## ## ## ## #### ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ## ## ### ## ## ## ## | |||||
| ## ## ## ## ###### #### ###### ####### ######## ## ## ####### ## ## | |||||
| """ | |||||
| add_cases( | |||||
| 'magic_enum', 'neargye/magic_enum', ['auto'], r''' | |||||
| #include <magic_enum.hpp> | |||||
| #include <string_view> | |||||
| enum my_enum { | |||||
| foo, | |||||
| bar, | |||||
| }; | |||||
| int main() { | |||||
| if (magic_enum::enum_name(my_enum::foo) != "foo") { | |||||
| return 1; | |||||
| } | |||||
| } | |||||
| ''') | |||||
| # Range-v3 tests | |||||
| """ | |||||
| ######## ### ## ## ###### ######## ## ## ####### | |||||
| ## ## ## ## ### ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## #### ## ## ## ## ## ## | |||||
| ######## ## ## ## ## ## ## #### ###### ####### ## ## ####### | |||||
| ## ## ######### ## #### ## ## ## ## ## ## | |||||
| ## ## ## ## ## ### ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ###### ######## ### ####### | |||||
| """ | |||||
| add_cases( | |||||
| 'range-v3', 'range-v3/range-v3', ['auto'], r''' | |||||
| #include <range/v3/algorithm/remove_if.hpp> | |||||
| #include <vector> | |||||
| #include <algorithm> | |||||
| int main() { | |||||
| std::vector<int> nums = {1, 2, 3, 5, 1, 4, 2, 7, 8, 0, 9}; | |||||
| auto end = ranges::remove_if(nums, [](auto i) { return i % 2; }); | |||||
| return std::distance(nums.begin(), end) != 5; | |||||
| } | |||||
| ''') | |||||
| # nlohmann-json | |||||
| """ | |||||
| ## ## ## ####### ## ## ## ## ### ## ## ## ## ## ###### ####### ## ## | |||||
| ### ## ## ## ## ## ## ### ### ## ## ### ## ### ## ## ## ## ## ## ### ## | |||||
| #### ## ## ## ## ## ## #### #### ## ## #### ## #### ## ## ## ## ## #### ## | |||||
| ## ## ## ## ## ## ######### ## ### ## ## ## ## ## ## ## ## ## ####### ## ###### ## ## ## ## ## | |||||
| ## #### ## ## ## ## ## ## ## ######### ## #### ## #### ## ## ## ## ## ## #### | |||||
| ## ### ## ## ## ## ## ## ## ## ## ## ### ## ### ## ## ## ## ## ## ## ### | |||||
| ## ## ######## ####### ## ## ## ## ## ## ## ## ## ## ###### ###### ####### ## ## | |||||
| """ | |||||
| add_cases('nlohmann-json', 'nlohmann/json', ['auto'], r''' | |||||
| #include <nlohmann/json.hpp> | |||||
| int main() {} | |||||
| ''') | |||||
| # ctre | |||||
| """ | |||||
| ###### ######## ######## ######## | |||||
| ## ## ## ## ## ## | |||||
| ## ## ## ## ## | |||||
| ## ## ######## ###### | |||||
| ## ## ## ## ## | |||||
| ## ## ## ## ## ## | |||||
| ###### ## ## ## ######## | |||||
| """ | |||||
| add_cases( | |||||
| 'ctre', 'hanickadot/ctre', ['auto'], r''' | |||||
| #include <ctre.hpp> | |||||
| constexpr ctll::fixed_string MY_REGEX{"\\w+-[0-9]+"}; | |||||
| int main() { | |||||
| auto [did_match] = ctre::match<MY_REGEX>("foo-44"); | |||||
| if (!did_match) { | |||||
| return 1; | |||||
| } | |||||
| auto [did_match_2] = ctre::match<MY_REGEX>("bar-1ff"); | |||||
| if (did_match_2) { | |||||
| return 2; | |||||
| } | |||||
| } | |||||
| ''') | |||||
| # fmt | |||||
| """ | |||||
| ######## ## ## ######## | |||||
| ## ### ### ## | |||||
| ## #### #### ## | |||||
| ###### ## ### ## ## | |||||
| ## ## ## ## | |||||
| ## ## ## ## | |||||
| ## ## ## ## | |||||
| """ | |||||
| add_cases( | |||||
| 'fmt', 'fmt/fmt', ['auto'], r''' | |||||
| #include <fmt/core.h> | |||||
| int main() { | |||||
| fmt::print("Hello!"); | |||||
| } | |||||
| ''') | |||||
| # Catch2 | |||||
| """ | |||||
| ###### ### ######## ###### ## ## ####### | |||||
| ## ## ## ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ######### ####### | |||||
| ## ######### ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ## | |||||
| ###### ## ## ## ###### ## ## ######### | |||||
| """ | |||||
| add_cases( | |||||
| 'catch2', 'catch2/catch2', ['auto'], r''' | |||||
| #include <catch2/catch_with_main.hpp> | |||||
| TEST_CASE("I am a test case") { | |||||
| CHECK((2 + 2) == 4); | |||||
| CHECK_FALSE((2 + 2) == 5); | |||||
| } | |||||
| ''') | |||||
| # Asio | |||||
| """ | |||||
| ### ###### #### ####### | |||||
| ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## | |||||
| ## ## ###### ## ## ## | |||||
| ######### ## ## ## ## | |||||
| ## ## ## ## ## ## ## | |||||
| ## ## ###### #### ####### | |||||
| """ | |||||
| add_cases( | |||||
| 'asio', 'asio/asio', ['auto'], r''' | |||||
| #include <asio.hpp> | |||||
| int main() { | |||||
| asio::io_context ioc; | |||||
| int retcode = 12; | |||||
| ioc.post([&] { | |||||
| retcode = 0; | |||||
| }); | |||||
| ioc.run(); | |||||
| return retcode; | |||||
| } | |||||
| ''') | |||||
| # Abseil | |||||
| """ | |||||
| ### ######## ###### ######## #### ## | |||||
| ## ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## | |||||
| ## ## ######## ###### ###### ## ## | |||||
| ######### ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## | |||||
| ## ## ######## ###### ######## #### ######## | |||||
| """ | |||||
| add_cases( | |||||
| 'abseil', 'abseil/abseil', ['auto'], r''' | |||||
| #include <absl/strings/str_cat.h> | |||||
| int main() { | |||||
| std::string_view foo = "foo"; | |||||
| std::string_view bar = "bar"; | |||||
| auto cat = absl::StrCat(foo, bar); | |||||
| return cat != "foobar"; | |||||
| } | |||||
| ''') | |||||
| # Zlib | |||||
| """ | |||||
| ######## ## #### ######## | |||||
| ## ## ## ## ## | |||||
| ## ## ## ## ## | |||||
| ## ## ## ######## | |||||
| ## ## ## ## ## | |||||
| ## ## ## ## ## | |||||
| ######## ######## #### ######## | |||||
| """ | |||||
| add_cases( | |||||
| 'zlib', 'zlib/zlib', ['auto'], r''' | |||||
| #include <zlib.h> | |||||
| #include <cassert> | |||||
| int main() { | |||||
| ::z_stream strm = {}; | |||||
| deflateInit(&strm, 6); | |||||
| const char buffer[] = "foo bar baz"; | |||||
| strm.next_in = (Bytef*)buffer; | |||||
| strm.avail_in = sizeof buffer; | |||||
| char dest[256] = {}; | |||||
| strm.next_out = (Bytef*)dest; | |||||
| strm.avail_out = sizeof dest; | |||||
| auto ret = deflate(&strm, Z_FINISH); | |||||
| deflateEnd(&strm); | |||||
| assert(ret == Z_STREAM_END); | |||||
| assert(strm.avail_in == 0); | |||||
| assert(strm.avail_out != sizeof dest); | |||||
| } | |||||
| ''') | |||||
| # sol2 | |||||
| """ | |||||
| ###### ####### ## ####### | |||||
| ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## | |||||
| ###### ## ## ## ####### | |||||
| ## ## ## ## ## | |||||
| ## ## ## ## ## ## | |||||
| ###### ####### ######## ######### | |||||
| """ | |||||
| add_cases( | |||||
| 'sol2', 'sol2/sol2', ['3.2.1', '3.2.0', '3.0.3', '3.0.2'], r''' | |||||
| #include <sol/sol.hpp> | |||||
| int main() { | |||||
| sol::state lua; | |||||
| int x = 0; | |||||
| lua.set_function("beepboop", [&]{ ++x; }); | |||||
| lua.script("beepboop()"); | |||||
| return x != 1; | |||||
| } | |||||
| ''') | |||||
| # pegtl | |||||
| """ | |||||
| ######## ######## ###### ######## ## | |||||
| ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## | |||||
| ######## ###### ## #### ## ## | |||||
| ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## | |||||
| ## ######## ###### ## ######## | |||||
| """ | |||||
| add_cases( | |||||
| 'pegtl', 'tao/pegtl', ['auto'], r''' | |||||
| #include <tao/pegtl.hpp> | |||||
| using namespace tao::pegtl; | |||||
| struct sign : one<'+', '-'> {}; | |||||
| struct integer : seq<opt<sign>, plus<digit>> {}; | |||||
| int main() { | |||||
| tao::pegtl::string_input str{"+44", "[test string]"}; | |||||
| tao::pegtl::parse<integer>(str); | |||||
| } | |||||
| ''') | |||||
| # Boost.PFR | |||||
| """ | |||||
| ######## ####### ####### ###### ######## ######## ######## ######## | |||||
| ## ## ## ## ## ## ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ## ## ## ## | |||||
| ######## ## ## ## ## ###### ## ######## ###### ######## | |||||
| ## ## ## ## ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ### ## ## ## ## | |||||
| ######## ####### ####### ###### ## ### ## ## ## ## | |||||
| """ | |||||
| add_cases( | |||||
| 'boost.pfr', 'boost/pfr', ['auto'], r''' | |||||
| #include <iostream> | |||||
| #include <string> | |||||
| #include <boost/pfr/precise.hpp> | |||||
| struct some_person { | |||||
| std::string name; | |||||
| unsigned birth_year; | |||||
| }; | |||||
| int main() { | |||||
| some_person val{"Edgar Allan Poe", 1809}; | |||||
| std::cout << boost::pfr::get<0>(val) // No macro! | |||||
| << " was born in " << boost::pfr::get<1>(val); // Works with any aggregate initializables! | |||||
| return boost::pfr::get<0>(val) != "Edgar Allan Poe"; | |||||
| } | |||||
| ''') | |||||
| # Boost.LEAF | |||||
| """ | |||||
| ## ######## ### ######## | |||||
| ## ## ## ## ## | |||||
| ## ## ## ## ## | |||||
| ## ###### ## ## ###### | |||||
| ## ## ######### ## | |||||
| ## ## ## ## ## | |||||
| ######## ######## ## ## ## | |||||
| """ | |||||
| add_cases( | |||||
| 'boost.leaf', 'boost/leaf', ['auto'], r''' | |||||
| #include <boost/leaf/all.hpp> | |||||
| namespace leaf = boost::leaf; | |||||
| int main() { | |||||
| return leaf::try_handle_all( | |||||
| [&]() -> leaf::result<int> { | |||||
| return 0; | |||||
| }, | |||||
| [](leaf::error_info const&) { | |||||
| return 32; | |||||
| } | |||||
| ); | |||||
| } | |||||
| ''') | |||||
| # Boost.mp11 | |||||
| """ | |||||
| ######## ####### ####### ###### ######## ## ## ######## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ### ### ## ## #### #### | |||||
| ## ## ## ## ## ## ## ## #### #### ## ## ## ## | |||||
| ######## ## ## ## ## ###### ## ## ### ## ######## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ### ## ## ## ## ## | |||||
| ######## ####### ####### ###### ## ### ## ## ## ###### ###### | |||||
| """ | |||||
| add_cases( | |||||
| 'boost.mp11', 'boost/mp11', ['auto'], r''' | |||||
| #include <boost/mp11.hpp> | |||||
| int main() { | |||||
| return boost::mp11::mp_false() == boost::mp11::mp_true(); | |||||
| } | |||||
| ''') | |||||
| # libsodium | |||||
| """ | |||||
| ## #### ######## ###### ####### ######## #### ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ## ## ## ## ### ### | |||||
| ## ## ## ## ## ## ## ## ## ## ## ## #### #### | |||||
| ## ## ######## ###### ## ## ## ## ## ## ## ## ### ## | |||||
| ## ## ## ## ## ## ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## | |||||
| ######## #### ######## ###### ####### ######## #### ####### ## ## | |||||
| """ | |||||
| add_cases( | |||||
| 'libsodium', 'sodium/sodium', ['auto'], r''' | |||||
| #include <sodium.h> | |||||
| #include <algorithm> | |||||
| int main() { | |||||
| char arr[256] = {}; | |||||
| ::randombytes_buf(arr, sizeof arr); | |||||
| for (auto b : arr) { | |||||
| if (b != '\x00') { | |||||
| return 0; | |||||
| } | |||||
| } | |||||
| return 1; | |||||
| } | |||||
| ''') | |||||
| # toml++ | |||||
| """ | |||||
| ######## ####### ## ## ## | |||||
| ## ## ## ### ### ## ## ## | |||||
| ## ## ## #### #### ## ## ## | |||||
| ## ## ## ## ### ## ## ###### ###### | |||||
| ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## | |||||
| ## ####### ## ## ######## | |||||
| """ | |||||
| add_cases( | |||||
| 'tomlpp', 'tomlpp/tomlpp', ['auto'], r''' | |||||
| #include <toml++/toml.h> | |||||
| #include <string_view> | |||||
| int main() { | |||||
| std::string_view sv = R"( | |||||
| [library] | |||||
| something = "cats" | |||||
| person = "Joe" | |||||
| )"; | |||||
| toml::table tbl = toml::parse(sv); | |||||
| return tbl["library"]["person"] != "Joe"; | |||||
| } | |||||
| ''') | |||||
| # Inja | |||||
| """ | |||||
| #### ## ## ## ### | |||||
| ## ### ## ## ## ## | |||||
| ## #### ## ## ## ## | |||||
| ## ## ## ## ## ## ## | |||||
| ## ## #### ## ## ######### | |||||
| ## ## ### ## ## ## ## | |||||
| #### ## ## ###### ## ## | |||||
| """ | |||||
| add_cases( | |||||
| 'inja', 'inja/inja', ['2.0.0', '2.0.1', '2.1.0', '2.2.0'], r''' | |||||
| #include <inja/inja.hpp> | |||||
| #include <nlohmann/json.hpp> | |||||
| int main() { | |||||
| nlohmann::json data; | |||||
| data["foo"] = "bar"; | |||||
| auto result = inja::render("foo {{foo}}", data); | |||||
| return result != "foo bar"; | |||||
| } | |||||
| ''') | |||||
| # Cereal | |||||
| """ | |||||
| ###### ######## ######## ######## ### ## | |||||
| ## ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## | |||||
| ## ###### ######## ###### ## ## ## | |||||
| ## ## ## ## ## ######### ## | |||||
| ## ## ## ## ## ## ## ## ## | |||||
| ###### ######## ## ## ######## ## ## ######## | |||||
| """ | |||||
| add_cases( | |||||
| 'cereal', 'cereal/cereal', ['auto'], r''' | |||||
| #include <cereal/types/memory.hpp> | |||||
| #include <cereal/types/string.hpp> | |||||
| #include <cereal/archives/binary.hpp> | |||||
| #include <sstream> | |||||
| struct something { | |||||
| int a, b, c; | |||||
| std::string str; | |||||
| template <typename Ar> | |||||
| void serialize(Ar& ar) { | |||||
| ar(a, b, c, str); | |||||
| } | |||||
| }; | |||||
| int main() { | |||||
| std::stringstream strm; | |||||
| cereal::BinaryOutputArchive ar{strm}; | |||||
| something s; | |||||
| ar(s); | |||||
| return 0; | |||||
| } | |||||
| ''') | |||||
| # pcg | |||||
| """ | |||||
| ######## ###### ###### | |||||
| ## ## ## ## ## ## | |||||
| ## ## ## ## | |||||
| ######## ## ## #### | |||||
| ## ## ## ## | |||||
| ## ## ## ## ## | |||||
| ## ###### ###### | |||||
| """ | |||||
| add_cases( | |||||
| 'pcg-cpp', 'pcg/pcg-cpp', ['auto'], r''' | |||||
| #include <pcg_random.hpp> | |||||
| #include <iostream> | |||||
| int main() { | |||||
| pcg64 rng{1729}; | |||||
| return rng() != 14925250045015479985; | |||||
| } | |||||
| ''') | |||||
| # spdlog | |||||
| """ | |||||
| ###### ######## ######## ## ####### ###### | |||||
| ## ## ## ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## | |||||
| ###### ######## ## ## ## ## ## ## #### | |||||
| ## ## ## ## ## ## ## ## ## | |||||
| ## ## ## ## ## ## ## ## ## ## | |||||
| ###### ## ######## ######## ####### ###### | |||||
| """ | |||||
| add_cases( | |||||
| 'spdlog', 'spdlog/spdlog', ['auto'], r''' | |||||
| #include <spdlog/spdlog.h> | |||||
| int main() { | |||||
| spdlog::info("Howdy!"); | |||||
| } | |||||
| ''') | |||||
| @pytest.mark.deps_test | |||||
| @pytest.mark.parametrize('case', CASES, ids=[c.dep for c in CASES]) | |||||
| def test_dep(case: DepsCase, dds: DDS) -> None: | |||||
| case.setup_root(dds) | |||||
| dds.build() |
| { | { | ||||
| "version": 1, | "version": 1, | ||||
| "packages": { | "packages": { | ||||
| "neo-buffer": { | |||||
| "0.1.0": { | |||||
| "neo-fun": { | |||||
| "0.3.2": { | |||||
| "git": { | "git": { | ||||
| "url": "https://github.com/vector-of-bool/neo-buffer.git", | |||||
| "ref": "0.1.0" | |||||
| }, | |||||
| "depends": {} | |||||
| "url": "https://github.com/vector-of-bool/neo-fun.git", | |||||
| "ref": "0.3.2" | |||||
| } | |||||
| } | } | ||||
| }, | }, | ||||
| "range-v3": { | "range-v3": { | ||||
| "url": "https://github.com/ericniebler/range-v3.git", | "url": "https://github.com/ericniebler/range-v3.git", | ||||
| "ref": "0.9.1", | "ref": "0.9.1", | ||||
| "auto-lib": "Niebler/range-v3" | "auto-lib": "Niebler/range-v3" | ||||
| }, | |||||
| "depends": {} | |||||
| } | |||||
| } | } | ||||
| } | } | ||||
| } | } |
| name: 'deps-test', | name: 'deps-test', | ||||
| "namespace": "test", | "namespace": "test", | ||||
| version: '0.0.0', | version: '0.0.0', | ||||
| depends: { | |||||
| 'neo-buffer': '0.1.0', | |||||
| 'range-v3': '0.9.1', | |||||
| } | |||||
| depends: [ | |||||
| 'neo-fun@0.3.2', | |||||
| 'range-v3@0.9.1', | |||||
| ] | |||||
| } | } |
| { | |||||
| "compiler_id": 'gnu', | |||||
| "cxx_version": 'c++17', | |||||
| "cxx_compiler": 'g++-9', | |||||
| } |
| { | |||||
| "compiler_id": 'msvc', | |||||
| } |
| { | |||||
| "version": 1, | |||||
| "packages": { | |||||
| "catch2": { | |||||
| "2.12.4": { | |||||
| "git": { | |||||
| "url": "https://github.com/catchorg/Catch2.git", | |||||
| "ref": "v2.12.4", | |||||
| "auto-lib": "catch2/catch2", | |||||
| "transform": [ | |||||
| { | |||||
| "move": { | |||||
| "from": "include", | |||||
| "to": "include/catch2", | |||||
| } | |||||
| }, | |||||
| { | |||||
| "copy": { | |||||
| "from": "include", | |||||
| "to": "src" | |||||
| }, | |||||
| write: { | |||||
| path: 'include/catch2/catch_with_main.hpp', | |||||
| content: '\ | |||||
| #pragma once \n\ | |||||
| \n\ | |||||
| #define CATCH_CONFIG_MAIN \n\ | |||||
| #include "./catch.hpp" \n\ | |||||
| \n\ | |||||
| namespace Catch { \n\ | |||||
| \n\ | |||||
| CATCH_REGISTER_REPORTER("console", ConsoleReporter) \n\ | |||||
| \n\ | |||||
| } // namespace Catch \n\ | |||||
| ' | |||||
| } | |||||
| } | |||||
| ] | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| } |
| { | |||||
| name: 'use-catch2', | |||||
| uses: ['catch2/catch2'] | |||||
| } |
| { | |||||
| name: 'use-catch2', | |||||
| version: '1.0.0', | |||||
| namespace: 'test', | |||||
| depends: { | |||||
| 'catch2': '2.12.4' | |||||
| } | |||||
| } |
| #include <catch2/catch_with_main.hpp> | |||||
| TEST_CASE("I am a simple test case") { | |||||
| CHECK((2 + 2) == 4); | |||||
| CHECK_FALSE((2 + 2) == 5); | |||||
| } |
| from tests import DDS | |||||
| from dds_ci import proc | |||||
| def test_get_build_use_catch2(dds: DDS): | |||||
| dds.catalog_import(dds.source_root / 'catalog.json5') | |||||
| tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc' | |||||
| tc = str(dds.test_dir / tc_fname) | |||||
| dds.build(toolchain=tc) | |||||
| proc.check_run((dds.build_dir / 'use-catch2').with_suffix(dds.exe_suffix)) |
| name: 'use-cryptopp', | name: 'use-cryptopp', | ||||
| version: '1.0.0', | version: '1.0.0', | ||||
| namespace: 'test', | namespace: 'test', | ||||
| depends: { | |||||
| 'cryptopp': '8.2.0' | |||||
| } | |||||
| depends: [ | |||||
| 'cryptopp@8.2.0' | |||||
| ] | |||||
| } | } |
| from tests import DDS | from tests import DDS | ||||
| import platform | |||||
| import pytest | |||||
| from dds_ci import proc | from dds_ci import proc | ||||
| @pytest.mark.skipif( | |||||
| platform.system() == 'FreeBSD', | |||||
| reason='This one has trouble running on FreeBSD') | |||||
| def test_get_build_use_cryptopp(dds: DDS): | def test_get_build_use_cryptopp(dds: DDS): | ||||
| dds.catalog_import(dds.source_root / 'catalog.json') | dds.catalog_import(dds.source_root / 'catalog.json') | ||||
| tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc' | tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc' |
| { | |||||
| "compiler_id": 'gnu', | |||||
| "cxx_version": 'c++17', | |||||
| "cxx_compiler": 'g++-9', | |||||
| "flags": '-DSODIUM_STATIC', | |||||
| "link_flags": '-static-libgcc -static-libstdc++' | |||||
| } |
| { | |||||
| "compiler_id": 'msvc', | |||||
| "flags": '-DSODIUM_STATIC', | |||||
| } |
| { | |||||
| "version": 1, | |||||
| "packages": { | |||||
| "libsodium": { | |||||
| "1.0.18": { | |||||
| "git": { | |||||
| "url": "https://github.com/jedisct1/libsodium.git", | |||||
| "ref": "1.0.18", | |||||
| "auto-lib": "sodium/sodium", | |||||
| "transform": [ | |||||
| { | |||||
| "move": { | |||||
| "from": "src/libsodium/include", | |||||
| "to": "include/" | |||||
| } | |||||
| }, | |||||
| { | |||||
| "copy": { | |||||
| "from": "builds/msvc/version.h", | |||||
| "to": "include/sodium/version.h" | |||||
| } | |||||
| }, | |||||
| { | |||||
| "move": { | |||||
| "from": "src/libsodium", | |||||
| "to": "src/" | |||||
| }, | |||||
| "remove": { | |||||
| "path": "src/libsodium" | |||||
| } | |||||
| }, | |||||
| { | |||||
| "copy": { | |||||
| "from": "include/", | |||||
| "to": "src/", | |||||
| "strip-components": 1, | |||||
| } | |||||
| } | |||||
| ] | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| } |
| { | |||||
| name: 'use-libsodium', | |||||
| uses: ['sodium/sodium'] | |||||
| } |
| { | |||||
| name: 'use-libsodium', | |||||
| version: '1.0.0', | |||||
| namespace: 'test', | |||||
| depends: { | |||||
| 'libsodium': '1.0.18' | |||||
| } | |||||
| } |
| #include <sodium.h> | |||||
| #include <algorithm> | |||||
| int main() { | |||||
| char arr[256] = {}; | |||||
| ::randombytes_buf(arr, sizeof arr); | |||||
| for (auto b : arr) { | |||||
| if (b != '\x00') { | |||||
| return 0; | |||||
| } | |||||
| } | |||||
| return 1; | |||||
| } |
| from tests import DDS | |||||
| from dds_ci import proc | |||||
| def test_get_build_use_libsodium(dds: DDS): | |||||
| dds.catalog_import(dds.source_root / 'catalog.json') | |||||
| tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc' | |||||
| tc = str(dds.test_dir / tc_fname) | |||||
| dds.build(toolchain=tc) | |||||
| proc.check_run( | |||||
| (dds.build_dir / 'use-libsodium').with_suffix(dds.exe_suffix)) |
| "url": "https://github.com/vector-of-bool/json.git", | "url": "https://github.com/vector-of-bool/json.git", | ||||
| "ref": "dds/3.7.1" | "ref": "dds/3.7.1" | ||||
| }, | }, | ||||
| "depends": {} | |||||
| "depends": [] | |||||
| } | } | ||||
| } | } | ||||
| } | } |
| "name": "json-test", | "name": "json-test", | ||||
| "version": "0.0.0", | "version": "0.0.0", | ||||
| "namespace": "test", | "namespace": "test", | ||||
| "depends": { | |||||
| "nlohmann-json": "3.7.1" | |||||
| } | |||||
| "depends": [ | |||||
| "nlohmann-json@3.7.1" | |||||
| ] | |||||
| } | } |
| "ref": "v1.4.2", | "ref": "v1.4.2", | ||||
| "auto-lib": "spdlog/spdlog" | "auto-lib": "spdlog/spdlog" | ||||
| }, | }, | ||||
| "depends": {} | |||||
| "depends": [] | |||||
| } | } | ||||
| } | } | ||||
| } | } |
| name: 'test', | name: 'test', | ||||
| version: '0.0.0', | version: '0.0.0', | ||||
| "namespace": "test", | "namespace": "test", | ||||
| depends: { | |||||
| 'spdlog': '1.4.2', | |||||
| }, | |||||
| depends: [ | |||||
| 'spdlog@1.4.2', | |||||
| ], | |||||
| } | } |
| import pytest | import pytest | ||||
| from pathlib import Path | from pathlib import Path | ||||
| from typing import Sequence, NamedTuple | from typing import Sequence, NamedTuple | ||||
| import multiprocessing | |||||
| import subprocess | import subprocess | ||||
| import urllib.request | import urllib.request | ||||
| import shutil | import shutil | ||||
| class CIOptions(NamedTuple): | class CIOptions(NamedTuple): | ||||
| toolchain: str | toolchain: str | ||||
| toolchain_2: str | |||||
| def _do_bootstrap_build(opts: CIOptions) -> None: | def _do_bootstrap_build(opts: CIOptions) -> None: | ||||
| help='The toolchain to use for the CI process', | help='The toolchain to use for the CI process', | ||||
| required=True, | required=True, | ||||
| ) | ) | ||||
| parser.add_argument( | |||||
| '--toolchain-2', | |||||
| '-T2', | |||||
| help='The toolchain to use for the self-build', | |||||
| required=True, | |||||
| ) | |||||
| parser.add_argument( | parser.add_argument( | ||||
| '--build-only', | '--build-only', | ||||
| action='store_true', | action='store_true', | ||||
| help='Only build the `dds` executable. Skip second-phase and tests.') | help='Only build the `dds` executable. Skip second-phase and tests.') | ||||
| args = parser.parse_args(argv) | args = parser.parse_args(argv) | ||||
| opts = CIOptions(toolchain=args.toolchain) | |||||
| opts = CIOptions(toolchain=args.toolchain, toolchain_2=args.toolchain_2) | |||||
| if args.bootstrap_with == 'build': | if args.bootstrap_with == 'build': | ||||
| _do_bootstrap_build(opts) | _do_bootstrap_build(opts) | ||||
| if old_cat_path.is_file(): | if old_cat_path.is_file(): | ||||
| old_cat_path.unlink() | old_cat_path.unlink() | ||||
| ci_repo_dir = paths.PREBUILT_DIR / '_ci-repo' | |||||
| ci_repo_dir = paths.PREBUILT_DIR / 'ci-repo' | |||||
| if ci_repo_dir.exists(): | if ci_repo_dir.exists(): | ||||
| shutil.rmtree(ci_repo_dir) | shutil.rmtree(ci_repo_dir) | ||||
| proc.check_run([ | |||||
| self_build( | |||||
| paths.PREBUILT_DDS, | paths.PREBUILT_DDS, | ||||
| 'catalog', | |||||
| 'import', | |||||
| ('--catalog', old_cat_path), | |||||
| ('--json', paths.PROJECT_ROOT / 'catalog.json'), | |||||
| ]) | |||||
| self_build(paths.PREBUILT_DDS, | |||||
| toolchain=opts.toolchain, | |||||
| cat_path=old_cat_path, | |||||
| dds_flags=[('--repo-dir', ci_repo_dir)]) | |||||
| toolchain=opts.toolchain, | |||||
| cat_path=old_cat_path, | |||||
| cat_json_path=Path('catalog.old.json'), | |||||
| dds_flags=[('--repo-dir', ci_repo_dir)]) | |||||
| print('Main build PASSED!') | print('Main build PASSED!') | ||||
| print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}') | print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}') | ||||
| ) | ) | ||||
| return 0 | return 0 | ||||
| print('Bootstrapping myself:') | |||||
| new_cat_path = paths.BUILD_DIR / 'catalog.db' | new_cat_path = paths.BUILD_DIR / 'catalog.db' | ||||
| proc.check_run([ | |||||
| new_repo_dir = paths.BUILD_DIR / 'ci-repo' | |||||
| self_build( | |||||
| paths.CUR_BUILT_DDS, | paths.CUR_BUILT_DDS, | ||||
| 'catalog', | |||||
| 'import', | |||||
| ('--catalog', new_cat_path), | |||||
| ('--json', paths.PROJECT_ROOT / 'catalog.json'), | |||||
| ]) | |||||
| self_build(paths.CUR_BUILT_DDS, | |||||
| toolchain=opts.toolchain, | |||||
| cat_path=new_cat_path, | |||||
| dds_flags=[f'--repo-dir={ci_repo_dir}']) | |||||
| toolchain=opts.toolchain_2, | |||||
| cat_path=new_cat_path, | |||||
| dds_flags=[f'--repo-dir={new_repo_dir}']) | |||||
| print('Bootstrap test PASSED!') | print('Bootstrap test PASSED!') | ||||
| return pytest.main([ | return pytest.main([ | ||||
| '-v', | '-v', | ||||
| '--durations=10', | '--durations=10', | ||||
| f'--basetemp={paths.BUILD_DIR / "_tmp"}', | f'--basetemp={paths.BUILD_DIR / "_tmp"}', | ||||
| '-n4', | |||||
| '-n', | |||||
| str(multiprocessing.cpu_count() + 2), | |||||
| 'tests/', | 'tests/', | ||||
| ]) | ]) | ||||
| "compiler_id": "gnu", | "compiler_id": "gnu", | ||||
| "c_compiler": "gcc9", | "c_compiler": "gcc9", | ||||
| "cxx_compiler": "g++9", | "cxx_compiler": "g++9", | ||||
| // "cxx_version": "c++17", | |||||
| "flags": [ | "flags": [ | ||||
| "-DSPDLOG_COMPILED_LIB", // Required to use a compiled spdlog | "-DSPDLOG_COMPILED_LIB", // Required to use a compiled spdlog | ||||
| "-Werror=return-type", | "-Werror=return-type", |
| { | |||||
| "$schema": "../res/toolchain-schema.json", | |||||
| "compiler_id": "gnu", | |||||
| "c_compiler": "gcc9", | |||||
| "cxx_compiler": "g++9", | |||||
| "flags": [ | |||||
| "-Werror=return-type", | |||||
| ], | |||||
| "cxx_flags": [ | |||||
| "-fconcepts", | |||||
| "-std=c++2a", | |||||
| ], | |||||
| "link_flags": [ | |||||
| "-static-libgcc", | |||||
| "-static-libstdc++", | |||||
| ], | |||||
| // "debug": true, | |||||
| "optimize": true, | |||||
| "compiler_launcher": "ccache" | |||||
| } |
| { | |||||
| "$schema": "../res/toolchain-schema.json", | |||||
| "compiler_id": "gnu", | |||||
| "c_compiler": "gcc-9", | |||||
| "cxx_compiler": "g++-9", | |||||
| "flags": [ | |||||
| "-Werror=return-type", | |||||
| ], | |||||
| "cxx_flags": [ | |||||
| "-fconcepts", | |||||
| "-std=c++2a", | |||||
| ], | |||||
| "link_flags": [ | |||||
| "-static", | |||||
| ], | |||||
| // "debug": true, | |||||
| "optimize": true, | |||||
| "compiler_launcher": "ccache" | |||||
| } |
| "compiler_id": "gnu", | "compiler_id": "gnu", | ||||
| "c_compiler": "gcc-9", | "c_compiler": "gcc-9", | ||||
| "cxx_compiler": "g++-9", | "cxx_compiler": "g++-9", | ||||
| // "cxx_version": "c++17", | |||||
| "flags": [ | "flags": [ | ||||
| "-DSPDLOG_COMPILED_LIB", // Required to use a compiled spdlog | "-DSPDLOG_COMPILED_LIB", // Required to use a compiled spdlog | ||||
| "-Werror=return-type", | "-Werror=return-type", | ||||
| // "-fsanitize=address", | |||||
| ], | ], | ||||
| "cxx_flags": [ | "cxx_flags": [ | ||||
| "-fconcepts", | "-fconcepts", | ||||
| "link_flags": [ | "link_flags": [ | ||||
| "-static-libgcc", | "-static-libgcc", | ||||
| "-static-libstdc++" | "-static-libstdc++" | ||||
| // "-fsanitize=address", | |||||
| // "-fuse-ld=lld", | |||||
| ], | ], | ||||
| // "debug": true, | // "debug": true, | ||||
| "optimize": true, | "optimize": true, |
| { | |||||
| "$schema": "../res/toolchain-schema.json", | |||||
| "compiler_id": "gnu", | |||||
| "c_compiler": "gcc-9", | |||||
| "cxx_compiler": "g++-9", | |||||
| "flags": [ | |||||
| "-Werror=return-type", | |||||
| // "-fsanitize=address", | |||||
| ], | |||||
| "cxx_flags": [ | |||||
| "-fconcepts", | |||||
| "-std=c++2a", | |||||
| ], | |||||
| "link_flags": [ | |||||
| "-static-libgcc", | |||||
| "-static-libstdc++" | |||||
| // "-fsanitize=address", | |||||
| // "-fuse-ld=lld", | |||||
| ], | |||||
| "debug": true, | |||||
| // "optimize": true, | |||||
| "compiler_launcher": "ccache" | |||||
| } |
| import argparse | |||||
| import json | import json | ||||
| import itertools | |||||
| from typing import NamedTuple, Tuple, List, Sequence, Union, Optional, Mapping | from typing import NamedTuple, Tuple, List, Sequence, Union, Optional, Mapping | ||||
| from pathlib import Path | |||||
| import sys | import sys | ||||
| import textwrap | import textwrap | ||||
| class CopyMoveTransform(NamedTuple): | |||||
| frm: str | |||||
| to: str | |||||
| strip_components: int = 0 | |||||
| include: Sequence[str] = [] | |||||
| exclude: Sequence[str] = [] | |||||
| def to_dict(self): | |||||
| return { | |||||
| 'from': self.frm, | |||||
| 'to': self.to, | |||||
| 'include': self.include, | |||||
| 'exclude': self.exclude, | |||||
| 'strip-components': self.strip_components, | |||||
| } | |||||
| class OneEdit(NamedTuple): | |||||
| kind: str | |||||
| line: int | |||||
| content: Optional[str] = None | |||||
| def to_dict(self): | |||||
| d = { | |||||
| 'kind': self.kind, | |||||
| 'line': self.line, | |||||
| } | |||||
| if self.content: | |||||
| d['content'] = self.content | |||||
| return d | |||||
| class EditTransform(NamedTuple): | |||||
| path: str | |||||
| edits: Sequence[OneEdit] = [] | |||||
| def to_dict(self): | |||||
| return { | |||||
| 'path': self.path, | |||||
| 'edits': [e.to_dict() for e in self.edits], | |||||
| } | |||||
| class WriteTransform(NamedTuple): | |||||
| path: str | |||||
| content: str | |||||
| def to_dict(self): | |||||
| return { | |||||
| 'path': self.path, | |||||
| 'content': self.content, | |||||
| } | |||||
| class RemoveTransform(NamedTuple): | |||||
| path: str | |||||
| only_matching: Sequence[str] = () | |||||
| def to_dict(self): | |||||
| return { | |||||
| 'path': self.path, | |||||
| 'only-matching': self.only_matching, | |||||
| } | |||||
| class FSTransform(NamedTuple): | |||||
| copy: Optional[CopyMoveTransform] = None | |||||
| move: Optional[CopyMoveTransform] = None | |||||
| remove: Optional[RemoveTransform] = None | |||||
| write: Optional[WriteTransform] = None | |||||
| edit: Optional[EditTransform] = None | |||||
| def to_dict(self): | |||||
| d = {} | |||||
| if self.copy: | |||||
| d['copy'] = self.copy.to_dict() | |||||
| if self.move: | |||||
| d['move'] = self.move.to_dict() | |||||
| if self.remove: | |||||
| d['remove'] = self.remove.to_dict() | |||||
| if self.write: | |||||
| d['write'] = self.write.to_dict() | |||||
| if self.edit: | |||||
| d['edit'] = self.edit.to_dict() | |||||
| return d | |||||
| class Git(NamedTuple): | class Git(NamedTuple): | ||||
| url: str | url: str | ||||
| ref: str | ref: str | ||||
| auto_lib: Optional[str] = None | auto_lib: Optional[str] = None | ||||
| transforms: Sequence[FSTransform] = [] | |||||
| def to_dict(self) -> dict: | def to_dict(self) -> dict: | ||||
| d = { | d = { | ||||
| 'url': self.url, | 'url': self.url, | ||||
| 'ref': self.ref, | 'ref': self.ref, | ||||
| 'transform': [f.to_dict() for f in self.transforms], | |||||
| } | } | ||||
| if self.auto_lib: | if self.auto_lib: | ||||
| d['auto-lib'] = self.auto_lib | d['auto-lib'] = self.auto_lib | ||||
| def to_dict(self) -> dict: | def to_dict(self) -> dict: | ||||
| ret: dict = { | ret: dict = { | ||||
| 'description': self.description, | 'description': self.description, | ||||
| 'depends': [k + v for k, v in self.depends.items()], | |||||
| } | } | ||||
| ret['depends'] = self.depends | |||||
| if isinstance(self.remote, Git): | if isinstance(self.remote, Git): | ||||
| ret['git'] = self.remote.to_dict() | ret['git'] = self.remote.to_dict() | ||||
| return ret | return ret | ||||
| class VersionSet(NamedTuple): | |||||
| version: str | |||||
| depends: Sequence[Tuple[str, str]] | |||||
| class Package(NamedTuple): | class Package(NamedTuple): | ||||
| name: str | name: str | ||||
| versions: List[Version] | versions: List[Version] | ||||
| def simple_packages(name: str, | |||||
| description: str, | |||||
| git_url: str, | |||||
| versions: Sequence[VersionSet], | |||||
| auto_lib: Optional[str] = None, | |||||
| *, | |||||
| tag_fmt: str = '{}') -> Package: | |||||
| return Package(name, [ | |||||
| Version( | |||||
| ver.version, | |||||
| description=description, | |||||
| remote=Git( | |||||
| git_url, tag_fmt.format(ver.version), auto_lib=auto_lib), | |||||
| depends={dep_name: dep_rng | |||||
| for dep_name, dep_rng in ver.depends}) for ver in versions | |||||
| ]) | |||||
| def many_versions(name: str, | def many_versions(name: str, | ||||
| versions: Sequence[str], | versions: Sequence[str], | ||||
| *, | *, | ||||
| tag_fmt: str = '{}', | tag_fmt: str = '{}', | ||||
| git_url: str, | git_url: str, | ||||
| auto_lib: str = None, | auto_lib: str = None, | ||||
| transforms: Sequence[FSTransform] = (), | |||||
| description='(No description was provided)') -> Package: | description='(No description was provided)') -> Package: | ||||
| return Package(name, [ | return Package(name, [ | ||||
| Version( | Version( | ||||
| ver, | ver, | ||||
| description='\n'.join(textwrap.wrap(description)), | description='\n'.join(textwrap.wrap(description)), | ||||
| remote=Git( | remote=Git( | ||||
| url=git_url, ref=tag_fmt.format(ver), auto_lib=auto_lib)) | |||||
| for ver in versions | |||||
| url=git_url, | |||||
| ref=tag_fmt.format(ver), | |||||
| auto_lib=auto_lib, | |||||
| transforms=transforms)) for ver in versions | |||||
| ]) | ]) | ||||
| packages = [ | |||||
| PACKAGES = [ | |||||
| many_versions( | |||||
| 'magic_enum', | |||||
| ( | |||||
| '0.5.0', | |||||
| '0.6.0', | |||||
| '0.6.1', | |||||
| '0.6.2', | |||||
| '0.6.3', | |||||
| '0.6.4', | |||||
| '0.6.5', | |||||
| '0.6.6', | |||||
| ), | |||||
| description='Static reflection for enums', | |||||
| tag_fmt='v{}', | |||||
| git_url='https://github.com/Neargye/magic_enum.git', | |||||
| auto_lib='neargye/magic_enum', | |||||
| ), | |||||
| many_versions( | |||||
| 'nameof', | |||||
| [ | |||||
| '0.8.3', | |||||
| '0.9.0', | |||||
| '0.9.1', | |||||
| '0.9.2', | |||||
| '0.9.3', | |||||
| '0.9.4', | |||||
| ], | |||||
| description='Nameof operator for modern C++', | |||||
| tag_fmt='v{}', | |||||
| git_url='https://github.com/Neargye/nameof.git', | |||||
| auto_lib='neargye/nameof', | |||||
| ), | |||||
| many_versions( | many_versions( | ||||
| 'range-v3', | 'range-v3', | ||||
| ( | ( | ||||
| many_versions( | many_versions( | ||||
| 'nlohmann-json', | 'nlohmann-json', | ||||
| ( | ( | ||||
| '3.0.0', | |||||
| '3.0.1', | |||||
| '3.1.0', | |||||
| '3.1.1', | |||||
| '3.1.2', | |||||
| '3.2.0', | |||||
| '3.3.0', | |||||
| '3.4.0', | |||||
| '3.5.0', | |||||
| '3.6.0', | |||||
| '3.6.1', | |||||
| '3.7.0', | |||||
| '3.7.1', | |||||
| '3.7.2', | |||||
| '3.7.3', | |||||
| # '3.0.0', | |||||
| # '3.0.1', | |||||
| # '3.1.0', | |||||
| # '3.1.1', | |||||
| # '3.1.2', | |||||
| # '3.2.0', | |||||
| # '3.3.0', | |||||
| # '3.4.0', | |||||
| # '3.5.0', | |||||
| # '3.6.0', | |||||
| # '3.6.1', | |||||
| # '3.7.0', | |||||
| '3.7.1', # Only this version has the dds forked branch | |||||
| # '3.7.2', | |||||
| # '3.7.3', | |||||
| ), | ), | ||||
| git_url='https://github.com/vector-of-bool/json.git', | git_url='https://github.com/vector-of-bool/json.git', | ||||
| tag_fmt='dds/{}', | tag_fmt='dds/{}', | ||||
| '0.2.1', | '0.2.1', | ||||
| '0.2.2', | '0.2.2', | ||||
| '0.2.3', | '0.2.3', | ||||
| '0.3.0', | |||||
| ), | ), | ||||
| description='A modern and low-level C++ SQLite API', | description='A modern and low-level C++ SQLite API', | ||||
| git_url='https://github.com/vector-of-bool/neo-sqlite3.git', | git_url='https://github.com/vector-of-bool/neo-sqlite3.git', | ||||
| description='A C++ implementation of a JSON5 parser', | description='A C++ implementation of a JSON5 parser', | ||||
| git_url='https://github.com/vector-of-bool/json5.git', | git_url='https://github.com/vector-of-bool/json5.git', | ||||
| ), | ), | ||||
| Package('vob-semester', [ | |||||
| Version( | |||||
| '0.1.0', | |||||
| description='A C++ library to process recursive dynamic data', | |||||
| remote=Git('https://github.com/vector-of-bool/semester.git', | |||||
| '0.1.0'), | |||||
| depends={ | |||||
| 'neo-fun': '^0.1.0', | |||||
| 'neo-concepts': '^0.2.1', | |||||
| }), | |||||
| simple_packages( | |||||
| 'vob-semester', | |||||
| description='A C++ library to process recursive dynamic data', | |||||
| git_url='https://github.com/vector-of-bool/semester.git', | |||||
| versions=[ | |||||
| VersionSet('0.1.0', [ | |||||
| ('neo-fun', '^0.1.0'), | |||||
| ('neo-concepts', '^0.2.1'), | |||||
| ]), | |||||
| VersionSet('0.1.1', [ | |||||
| ('neo-fun', '^0.1.1'), | |||||
| ('neo-concepts', '^0.2.2'), | |||||
| ]), | |||||
| VersionSet('0.2.0', [ | |||||
| ('neo-fun', '^0.3.2'), | |||||
| ('neo-concepts', '^0.3.2'), | |||||
| ]), | |||||
| VersionSet('0.2.1', [ | |||||
| ('neo-fun', '^0.3.2'), | |||||
| ('neo-concepts', '^0.3.2'), | |||||
| ]), | |||||
| ], | |||||
| ), | |||||
| many_versions( | |||||
| 'ctre', | |||||
| ( | |||||
| '2.8.1', | |||||
| '2.8.2', | |||||
| '2.8.3', | |||||
| '2.8.4', | |||||
| ), | |||||
| git_url= | |||||
| 'https://github.com/hanickadot/compile-time-regular-expressions.git', | |||||
| tag_fmt='v{}', | |||||
| auto_lib='hanickadot/ctre', | |||||
| description= | |||||
| 'A compile-time PCRE (almost) compatible regular expression matcher', | |||||
| ), | |||||
| Package( | |||||
| 'spdlog', | |||||
| [ | |||||
| Version( | |||||
| ver, | |||||
| description='Fast C++ logging library', | |||||
| depends={'fmt': '+6.0.0'}, | |||||
| remote=Git( | |||||
| url='https://github.com/gabime/spdlog.git', | |||||
| ref=f'v{ver}', | |||||
| transforms=[ | |||||
| FSTransform( | |||||
| write=WriteTransform( | |||||
| path='package.json', | |||||
| content=json.dumps({ | |||||
| 'name': 'spdlog', | |||||
| 'namespace': 'spdlog', | |||||
| 'version': ver, | |||||
| 'depends': ['fmt+6.0.0'], | |||||
| }))), | |||||
| FSTransform( | |||||
| write=WriteTransform( | |||||
| path='library.json', | |||||
| content=json.dumps({ | |||||
| 'name': 'spdlog', | |||||
| 'uses': ['fmt/fmt'] | |||||
| }))), | |||||
| FSTransform( | |||||
| # It's all just template instantiations. | |||||
| remove=RemoveTransform(path='src/'), | |||||
| # Tell spdlog to use the external fmt library | |||||
| edit=EditTransform( | |||||
| path='include/spdlog/tweakme.h', | |||||
| edits=[ | |||||
| OneEdit( | |||||
| kind='insert', | |||||
| content='#define SPDLOG_FMT_EXTERNAL 1', | |||||
| line=13, | |||||
| ), | |||||
| ])), | |||||
| ], | |||||
| ), | |||||
| ) for ver in ( | |||||
| '1.4.0', | |||||
| '1.4.1', | |||||
| '1.4.2', | |||||
| '1.5.0', | |||||
| '1.6.0', | |||||
| '1.6.1', | |||||
| '1.7.0', | |||||
| ) | |||||
| ]), | |||||
| many_versions( | |||||
| 'fmt', | |||||
| ( | |||||
| '6.0.0', | |||||
| '6.1.0', | |||||
| '6.1.1', | |||||
| '6.1.2', | |||||
| '6.2.0', | |||||
| '6.2.1', | |||||
| '7.0.0', | |||||
| '7.0.1', | |||||
| ), | |||||
| git_url='https://github.com/fmtlib/fmt.git', | |||||
| auto_lib='fmt/fmt', | |||||
| description='A modern formatting library : https://fmt.dev/', | |||||
| ), | |||||
| Package('catch2', [ | |||||
| Version( | Version( | ||||
| '0.1.1', | |||||
| description='A C++ library to process recursive dynamic data', | |||||
| remote=Git('https://github.com/vector-of-bool/semester.git', | |||||
| '0.1.1'), | |||||
| depends={ | |||||
| 'neo-fun': '^0.1.1', | |||||
| 'neo-concepts': '^0.2.2', | |||||
| }), | |||||
| '2.12.4', | |||||
| description='A modern C++ unit testing library', | |||||
| remote=Git( | |||||
| 'https://github.com/catchorg/Catch2.git', | |||||
| 'v2.12.4', | |||||
| auto_lib='catch2/catch2', | |||||
| transforms=[ | |||||
| FSTransform( | |||||
| move=CopyMoveTransform( | |||||
| frm='include', to='include/catch2')), | |||||
| FSTransform( | |||||
| copy=CopyMoveTransform(frm='include', to='src'), | |||||
| write=WriteTransform( | |||||
| path='include/catch2/catch_with_main.hpp', | |||||
| content=''' | |||||
| #pragma once | |||||
| #define CATCH_CONFIG_MAIN | |||||
| #include "./catch.hpp" | |||||
| namespace Catch { | |||||
| CATCH_REGISTER_REPORTER("console", ConsoleReporter) | |||||
| } | |||||
| ''')), | |||||
| ])) | |||||
| ]), | |||||
| Package('asio', [ | |||||
| Version( | Version( | ||||
| '0.2.0', | |||||
| description='A C++ library to process recursive dynamic data', | |||||
| remote=Git('https://github.com/vector-of-bool/semester.git', | |||||
| '0.2.0'), | |||||
| depends={ | |||||
| 'neo-fun': '^0.3.2', | |||||
| 'neo-concepts': '^0.3.2', | |||||
| }), | |||||
| ver, | |||||
| description='Asio asynchronous I/O C++ library', | |||||
| remote=Git( | |||||
| 'https://github.com/chriskohlhoff/asio.git', | |||||
| f'asio-{ver.replace(".", "-")}', | |||||
| auto_lib='asio/asio', | |||||
| transforms=[ | |||||
| FSTransform( | |||||
| move=CopyMoveTransform( | |||||
| frm='asio/src', | |||||
| to='src/', | |||||
| ), | |||||
| remove=RemoveTransform( | |||||
| path='src/', | |||||
| only_matching=[ | |||||
| 'doc/**', | |||||
| 'examples/**', | |||||
| 'tests/**', | |||||
| 'tools/**', | |||||
| ], | |||||
| ), | |||||
| ), | |||||
| FSTransform( | |||||
| move=CopyMoveTransform( | |||||
| frm='asio/include/', | |||||
| to='include/', | |||||
| ), | |||||
| edit=EditTransform( | |||||
| path='include/asio/detail/config.hpp', | |||||
| edits=[ | |||||
| OneEdit( | |||||
| line=13, | |||||
| kind='insert', | |||||
| content='#define ASIO_STANDALONE 1'), | |||||
| OneEdit( | |||||
| line=14, | |||||
| kind='insert', | |||||
| content= | |||||
| '#define ASIO_SEPARATE_COMPILATION 1') | |||||
| ]), | |||||
| ), | |||||
| ]), | |||||
| ) for ver in [ | |||||
| '1.12.0', | |||||
| '1.12.1', | |||||
| '1.12.2', | |||||
| '1.13.0', | |||||
| '1.14.0', | |||||
| '1.14.1', | |||||
| '1.16.0', | |||||
| '1.16.1', | |||||
| ] | |||||
| ]), | |||||
| Package( | |||||
| 'abseil', | |||||
| [ | |||||
| Version( | |||||
| ver, | |||||
| description='Abseil Common Libraries', | |||||
| remote=Git( | |||||
| 'https://github.com/abseil/abseil-cpp.git', | |||||
| tag, | |||||
| auto_lib='abseil/abseil', | |||||
| transforms=[ | |||||
| FSTransform( | |||||
| move=CopyMoveTransform( | |||||
| frm='absl', | |||||
| to='src/absl/', | |||||
| ), | |||||
| remove=RemoveTransform( | |||||
| path='src/', | |||||
| only_matching=[ | |||||
| '**/*_test.c*', | |||||
| '**/*_testing.c*', | |||||
| '**/*_benchmark.c*', | |||||
| '**/benchmarks.c*', | |||||
| '**/*_test_common.c*', | |||||
| '**/mocking_*.c*', | |||||
| # Misc files that should be removed: | |||||
| '**/test_util.cc', | |||||
| '**/mutex_nonprod.cc', | |||||
| '**/named_generator.cc', | |||||
| '**/print_hash_of.cc', | |||||
| '**/*_gentables.cc', | |||||
| ]), | |||||
| ) | |||||
| ]), | |||||
| ) for ver, tag in [ | |||||
| ('2018.6.0', '20180600'), | |||||
| ('2019.8.8', '20190808'), | |||||
| ('2020.2.25', '20200225.2'), | |||||
| ] | |||||
| ]), | |||||
| Package( | |||||
| 'zlib', | |||||
| [ | |||||
| Version( | |||||
| ver, | |||||
| description= | |||||
| 'A massively spiffy yet delicately unobtrusive compression library', | |||||
| remote=Git( | |||||
| 'https://github.com/madler/zlib.git', | |||||
| tag or f'v{ver}', | |||||
| auto_lib='zlib/zlib', | |||||
| transforms=[ | |||||
| FSTransform( | |||||
| move=CopyMoveTransform( | |||||
| frm='.', | |||||
| to='src/', | |||||
| include=[ | |||||
| '*.c', | |||||
| '*.h', | |||||
| ], | |||||
| )), | |||||
| FSTransform( | |||||
| move=CopyMoveTransform( | |||||
| frm='src/', | |||||
| to='include/', | |||||
| include=['zlib.h', 'zconf.h'], | |||||
| )), | |||||
| ]), | |||||
| ) for ver, tag in [ | |||||
| ('1.2.11', None), | |||||
| ('1.2.10', None), | |||||
| ('1.2.9', None), | |||||
| ('1.2.8', None), | |||||
| ('1.2.7', 'v1.2.7.3'), | |||||
| ('1.2.6', 'v1.2.6.1'), | |||||
| ('1.2.5', 'v1.2.5.3'), | |||||
| ('1.2.4', 'v1.2.4.5'), | |||||
| ('1.2.3', 'v1.2.3.8'), | |||||
| ('1.2.2', 'v1.2.2.4'), | |||||
| ('1.2.1', 'v1.2.1.2'), | |||||
| ('1.2.0', 'v1.2.0.8'), | |||||
| ('1.1.4', None), | |||||
| ('1.1.3', None), | |||||
| ('1.1.2', None), | |||||
| ('1.1.1', None), | |||||
| ('1.1.0', None), | |||||
| ('1.0.9', None), | |||||
| ('1.0.8', None), | |||||
| ('1.0.7', None), | |||||
| # ('1.0.6', None), # Does not exist | |||||
| ('1.0.5', None), | |||||
| ('1.0.4', None), | |||||
| # ('1.0.3', None), # Does not exist | |||||
| ('1.0.2', None), | |||||
| ('1.0.1', None), | |||||
| ] | |||||
| ]), | |||||
| Package('sol2', [ | |||||
| Version( | Version( | ||||
| '0.2.1', | |||||
| description='A C++ library to process recursive dynamic data', | |||||
| remote=Git('https://github.com/vector-of-bool/semester.git', | |||||
| '0.2.1'), | |||||
| depends={ | |||||
| 'neo-fun': '^0.3.2', | |||||
| 'neo-concepts': '^0.3.2', | |||||
| }), | |||||
| ver, | |||||
| description= | |||||
| 'A C++ <-> Lua API wrapper with advanced features and top notch performance', | |||||
| depends={'lua': '+0.0.0'}, | |||||
| remote=Git( | |||||
| 'https://github.com/ThePhD/sol2.git', | |||||
| f'v{ver}', | |||||
| transforms=[ | |||||
| FSTransform( | |||||
| write=WriteTransform( | |||||
| path='package.json', | |||||
| content=json.dumps( | |||||
| { | |||||
| 'name': 'sol2', | |||||
| 'namespace': 'sol2', | |||||
| 'version': ver, | |||||
| 'depends': [f'lua+0.0.0'], | |||||
| }, | |||||
| indent=2, | |||||
| )), | |||||
| move=(None | |||||
| if ver.startswith('3.') else CopyMoveTransform( | |||||
| frm='sol', | |||||
| to='src/sol', | |||||
| )), | |||||
| ), | |||||
| FSTransform( | |||||
| write=WriteTransform( | |||||
| path='library.json', | |||||
| content=json.dumps( | |||||
| { | |||||
| 'name': 'sol2', | |||||
| 'uses': ['lua/lua'], | |||||
| }, | |||||
| indent=2, | |||||
| ))), | |||||
| ]), | |||||
| ) for ver in [ | |||||
| '3.2.1', | |||||
| '3.2.0', | |||||
| '3.0.3', | |||||
| '3.0.2', | |||||
| '2.20.6', | |||||
| '2.20.5', | |||||
| '2.20.4', | |||||
| '2.20.3', | |||||
| '2.20.2', | |||||
| '2.20.1', | |||||
| '2.20.0', | |||||
| ] | |||||
| ]), | ]), | ||||
| Package('ctre', [ | |||||
| Package('lua', [ | |||||
| Version( | Version( | ||||
| '2.7.0', | |||||
| ver, | |||||
| description= | description= | ||||
| 'A compile-time PCRE (almost) compatible regular expression matcher', | |||||
| 'Lua is a powerful and fast programming language that is easy to learn and use and to embed into your application.', | |||||
| remote=Git( | remote=Git( | ||||
| 'https://github.com/hanickadot/compile-time-regular-expressions.git', | |||||
| 'v2.7', | |||||
| auto_lib='hanickadot/ctre', | |||||
| )) | |||||
| 'https://github.com/lua/lua.git', | |||||
| f'v{ver}', | |||||
| auto_lib='lua/lua', | |||||
| transforms=[ | |||||
| FSTransform( | |||||
| move=CopyMoveTransform( | |||||
| frm='.', | |||||
| to='src/', | |||||
| include=['*.c', '*.h'], | |||||
| )) | |||||
| ]), | |||||
| ) for ver in [ | |||||
| '5.4.0', | |||||
| '5.3.5', | |||||
| '5.3.4', | |||||
| '5.3.3', | |||||
| '5.3.2', | |||||
| '5.3.1', | |||||
| '5.3.0', | |||||
| '5.2.3', | |||||
| '5.2.2', | |||||
| '5.2.1', | |||||
| '5.2.0', | |||||
| '5.1.1', | |||||
| ] | |||||
| ]), | |||||
| Package('pegtl', [ | |||||
| Version( | |||||
| ver, | |||||
| description='Parsing Expression Grammar Template Library', | |||||
| remote=Git( | |||||
| 'https://github.com/taocpp/PEGTL.git', | |||||
| ver, | |||||
| auto_lib='tao/pegtl', | |||||
| transforms=[FSTransform(remove=RemoveTransform(path='src/'))], | |||||
| )) for ver in [ | |||||
| '2.8.3', | |||||
| '2.8.2', | |||||
| '2.8.1', | |||||
| '2.8.0', | |||||
| '2.7.1', | |||||
| '2.7.0', | |||||
| '2.6.1', | |||||
| '2.6.0', | |||||
| ] | |||||
| ]), | ]), | ||||
| many_versions( | many_versions( | ||||
| 'spdlog', | |||||
| ( | |||||
| '0.9.0', | |||||
| '0.10.0', | |||||
| '0.11.0', | |||||
| '0.12.0', | |||||
| '0.13.0', | |||||
| '0.14.0', | |||||
| '0.16.0', | |||||
| '0.16.1', | |||||
| '0.16.2', | |||||
| '0.17.0', | |||||
| 'boost.pfr', ['1.0.0', '1.0.1'], | |||||
| auto_lib='boost/pfr', | |||||
| git_url='https://github.com/apolukhin/magic_get.git'), | |||||
| many_versions( | |||||
| 'boost.leaf', | |||||
| [ | |||||
| '0.1.0', | |||||
| '0.2.0', | |||||
| '0.2.1', | |||||
| '0.2.2', | |||||
| '0.2.3', | |||||
| '0.2.4', | |||||
| '0.2.5', | |||||
| '0.3.0', | |||||
| ], | |||||
| auto_lib='boost/leaf', | |||||
| git_url='https://github.com/zajo/leaf.git', | |||||
| ), | |||||
| many_versions( | |||||
| 'boost.mp11', | |||||
| ['1.70.0', '1.71.0', '1.72.0', '1.73.0'], | |||||
| tag_fmt='boost-{}', | |||||
| git_url='https://github.com/boostorg/mp11.git', | |||||
| auto_lib='boost/mp11', | |||||
| ), | |||||
| many_versions( | |||||
| 'libsodium', [ | |||||
| '1.0.10', | |||||
| '1.0.11', | |||||
| '1.0.12', | |||||
| '1.0.13', | |||||
| '1.0.14', | |||||
| '1.0.15', | |||||
| '1.0.16', | |||||
| '1.0.17', | |||||
| '1.0.18', | |||||
| ], | |||||
| git_url='https://github.com/jedisct1/libsodium.git', | |||||
| auto_lib='sodium/sodium', | |||||
| description='Sodium is a new, easy-to-use software library ' | |||||
| 'for encryption, decryption, signatures, password hashing and more.', | |||||
| transforms=[ | |||||
| FSTransform( | |||||
| move=CopyMoveTransform( | |||||
| frm='src/libsodium/include', to='include/'), | |||||
| edit=EditTransform( | |||||
| path='include/sodium/export.h', | |||||
| edits=[ | |||||
| OneEdit( | |||||
| line=8, | |||||
| kind='insert', | |||||
| content='#define SODIUM_STATIC 1') | |||||
| ])), | |||||
| FSTransform( | |||||
| edit=EditTransform( | |||||
| path='include/sodium/private/common.h', | |||||
| edits=[ | |||||
| OneEdit( | |||||
| kind='insert', | |||||
| line=1, | |||||
| content=Path(__file__).parent.joinpath( | |||||
| 'libsodium-config.h').read_text(), | |||||
| ) | |||||
| ])), | |||||
| FSTransform( | |||||
| copy=CopyMoveTransform( | |||||
| frm='builds/msvc/version.h', | |||||
| to='include/sodium/version.h', | |||||
| ), | |||||
| move=CopyMoveTransform( | |||||
| frm='src/libsodium', | |||||
| to='src/', | |||||
| ), | |||||
| remove=RemoveTransform(path='src/libsodium'), | |||||
| ), | |||||
| FSTransform( | |||||
| copy=CopyMoveTransform( | |||||
| frm='include', to='src/', strip_components=1)), | |||||
| ]), | |||||
| many_versions( | |||||
| 'tomlpp', | |||||
| [ | |||||
| '1.0.0', | '1.0.0', | ||||
| '1.1.0', | '1.1.0', | ||||
| '1.2.0', | '1.2.0', | ||||
| '1.2.1', | |||||
| '1.2.3', | |||||
| '1.2.4', | |||||
| '1.2.5', | |||||
| '1.3.0', | '1.3.0', | ||||
| '1.3.1', | |||||
| '1.4.0', | |||||
| '1.4.1', | |||||
| '1.4.2', | |||||
| ), | |||||
| git_url='https://github.com/gabime/spdlog.git', | |||||
| # '1.3.2', # Wrong tag name in upstream | |||||
| '1.3.3', | |||||
| ], | |||||
| tag_fmt='v{}', | tag_fmt='v{}', | ||||
| auto_lib='spdlog/spdlog', | |||||
| description='Fast C++ logging library', | |||||
| ), | |||||
| git_url='https://github.com/marzer/tomlplusplus.git', | |||||
| auto_lib='tomlpp/tomlpp', | |||||
| description= | |||||
| 'Header-only TOML config file parser and serializer for modern C++'), | |||||
| Package('inja', [ | |||||
| *(Version( | |||||
| ver, | |||||
| description='A Template Engine for Modern C++', | |||||
| remote=Git( | |||||
| 'https://github.com/pantor/inja.git', | |||||
| f'v{ver}', | |||||
| auto_lib='inja/inja')) for ver in ('1.0.0', '2.0.0', '2.0.1')), | |||||
| *(Version( | |||||
| ver, | |||||
| description='A Template Engine for Modern C++', | |||||
| depends={'nlohmann-json': '+0.0.0'}, | |||||
| remote=Git( | |||||
| 'https://github.com/pantor/inja.git', | |||||
| f'v{ver}', | |||||
| transforms=[ | |||||
| FSTransform( | |||||
| write=WriteTransform( | |||||
| path='package.json', | |||||
| content=json.dumps({ | |||||
| 'name': | |||||
| 'inja', | |||||
| 'namespace': | |||||
| 'inja', | |||||
| 'version': | |||||
| ver, | |||||
| 'depends': [ | |||||
| 'nlohmann-json+0.0.0', | |||||
| ] | |||||
| }))), | |||||
| FSTransform( | |||||
| write=WriteTransform( | |||||
| path='library.json', | |||||
| content=json.dumps({ | |||||
| 'name': 'inja', | |||||
| 'uses': ['nlohmann/json'] | |||||
| }))), | |||||
| ], | |||||
| )) for ver in ('2.1.0', '2.2.0')), | |||||
| ]), | |||||
| many_versions( | many_versions( | ||||
| 'fmt', | |||||
| ( | |||||
| '0.8.0', | |||||
| 'cereal', | |||||
| [ | |||||
| '0.9.0', | '0.9.0', | ||||
| '0.10.0', | |||||
| '0.12.0', | |||||
| '0.9.1', | |||||
| '1.0.0', | '1.0.0', | ||||
| '1.1.0', | '1.1.0', | ||||
| '1.1.1', | |||||
| '1.1.2', | |||||
| '1.2.0', | |||||
| '1.2.1', | |||||
| '1.2.2', | |||||
| '1.3.0', | |||||
| ], | |||||
| auto_lib='cereal/cereal', | |||||
| git_url='https://github.com/USCiLab/cereal.git', | |||||
| tag_fmt='v{}', | |||||
| description='A C++11 library for serialization', | |||||
| ), | |||||
| many_versions( | |||||
| 'pybind11', | |||||
| [ | |||||
| '2.0.0', | '2.0.0', | ||||
| '2.0.1', | '2.0.1', | ||||
| '2.1.0', | '2.1.0', | ||||
| '2.1.1', | '2.1.1', | ||||
| '3.0.0', | |||||
| '3.0.1', | |||||
| '3.0.2', | |||||
| '4.0.0', | |||||
| '4.1.0', | |||||
| '5.0.0', | |||||
| '5.1.0', | |||||
| '5.2.0', | |||||
| '5.2.1', | |||||
| '5.3.0', | |||||
| '6.0.0', | |||||
| '6.1.0', | |||||
| '6.1.1', | |||||
| '6.1.2', | |||||
| ), | |||||
| git_url='https://github.com/fmtlib/fmt.git', | |||||
| auto_lib='fmt/fmt', | |||||
| description='A modern formatting library : https://fmt.dev/', | |||||
| '2.2.0', | |||||
| '2.2.1', | |||||
| '2.2.2', | |||||
| '2.2.3', | |||||
| '2.2.4', | |||||
| '2.3.0', | |||||
| '2.4.0', | |||||
| '2.4.1', | |||||
| '2.4.2', | |||||
| '2.4.3', | |||||
| '2.5.0', | |||||
| ], | |||||
| git_url='https://github.com/pybind/pybind11.git', | |||||
| description='Seamless operability between C++11 and Python', | |||||
| auto_lib='pybind/pybind11', | |||||
| tag_fmt='v{}', | |||||
| ), | ), | ||||
| Package('pcg-cpp', [ | |||||
| Version( | |||||
| '0.98.1', | |||||
| description='PCG Randum Number Generation, C++ Edition', | |||||
| remote=Git( | |||||
| url='https://github.com/imneme/pcg-cpp.git', | |||||
| ref='v0.98.1', | |||||
| auto_lib='pcg/pcg-cpp')) | |||||
| ]), | |||||
| ] | ] | ||||
| data = { | |||||
| 'version': 1, | |||||
| 'packages': { | |||||
| pkg.name: {ver.version: ver.to_dict() | |||||
| for ver in pkg.versions} | |||||
| for pkg in packages | |||||
| if __name__ == "__main__": | |||||
| parser = argparse.ArgumentParser() | |||||
| args = parser.parse_args() | |||||
| data = { | |||||
| 'version': 1, | |||||
| 'packages': { | |||||
| pkg.name: {ver.version: ver.to_dict() | |||||
| for ver in pkg.versions} | |||||
| for pkg in PACKAGES | |||||
| } | |||||
| } | } | ||||
| } | |||||
| json_str = json.dumps(data, indent=2, sort_keys=True) | |||||
| Path('catalog.json').write_text(json_str) | |||||
| cpp_template = textwrap.dedent(r''' | |||||
| #include <dds/catalog/package_info.hpp> | |||||
| #include <dds/catalog/init_catalog.hpp> | |||||
| #include <dds/catalog/import.hpp> | |||||
| /** | |||||
| * The following array of integers is generated and contains the JSON | |||||
| * encoded initial catalog. MSVC can't handle string literals over | |||||
| * 64k large, so we have to resort to using a regular char array: | |||||
| */ | |||||
| static constexpr const char INIT_PACKAGES_CONTENT[] = { | |||||
| @JSON@ | |||||
| }; | |||||
| static constexpr int INIT_PACKAGES_STR_LEN = @JSON_LEN@; | |||||
| const std::vector<dds::package_info>& | |||||
| dds::init_catalog_packages() noexcept { | |||||
| using std::nullopt; | |||||
| static auto pkgs = dds::parse_packages_json( | |||||
| std::string_view(INIT_PACKAGES_CONTENT, INIT_PACKAGES_STR_LEN)); | |||||
| return pkgs; | |||||
| } | |||||
| ''') | |||||
| json_small = json.dumps(data, sort_keys=True) | |||||
| json_small_arr = ', '.join(str(ord(c)) for c in json_small) | |||||
| json_small_arr = '\n'.join(textwrap.wrap(json_small_arr, width=120)) | |||||
| json_small_arr = textwrap.indent(json_small_arr, prefix=' ' * 4) | |||||
| print(json.dumps(data, indent=2, sort_keys=True)) | |||||
| cpp_content = cpp_template.replace('@JSON@', json_small_arr).replace( | |||||
| '@JSON_LEN@', str(len(json_small))) | |||||
| Path('src/dds/catalog/init_catalog.cpp').write_text(cpp_content) |
| #pragma once | |||||
| // clang-format off | |||||
| /** | |||||
| * Header checks | |||||
| */ | |||||
| #if __has_include(<sys/mman.h>) | |||||
| #define HAVE_SYS_MMAN_H 1 | |||||
| #endif | |||||
| #if __has_include(<sys/random.h>) | |||||
| #define HAVE_SYS_RANDOM_H 1 | |||||
| #endif | |||||
| #if __has_include(<intrin.h>) | |||||
| #define HAVE_INTRIN_H 1 | |||||
| #endif | |||||
| #if __has_include(<sys/auxv.h>) | |||||
| #define HAVE_SYS_AUXV_H 1 | |||||
| #endif | |||||
| /** | |||||
| * Architectural checks for intrinsics | |||||
| */ | |||||
| #if __has_include(<mmintrin.h>) && __MMX__ | |||||
| #define HAVE_MMINTRIN_H 1 | |||||
| #endif | |||||
| #if __has_include(<emmintrin.h>) && __SSE2__ | |||||
| #define HAVE_EMMINTRIN_H 1 | |||||
| #endif | |||||
| #if __SSE3__ | |||||
| #if __has_include(<pmmintrin.h>) | |||||
| #define HAVE_PMMINTRIN_H 1 | |||||
| #endif | |||||
| #if __has_include(<tmmintrin.h>) | |||||
| #define HAVE_TMMINTRIN_H 1 | |||||
| #endif | |||||
| #endif | |||||
| #if __has_include(<smmintrin.h>) && __SSE4_1__ | |||||
| #define HAVE_SMMINTRIN_H | |||||
| #endif | |||||
| #if __has_include(<immintrin.h>) | |||||
| #if __AVX__ | |||||
| #define HAVE_AVXINTRIN_H | |||||
| #endif | |||||
| #if __AVX2__ | |||||
| #define HAVE_AVX2INTRIN_H | |||||
| #endif | |||||
| #if __AVX512F__ | |||||
| #if defined(__clang__) && __clang_major__ < 4 | |||||
| // AVX512 may be broken | |||||
| #elif defined(__GNUC__) && __GNUC__ < 6 | |||||
| // '' | |||||
| #else | |||||
| #define HAVE_AVX512FINTRIN_H | |||||
| #endif | |||||
| #endif | |||||
| #endif | |||||
| #if __has_include(<wmmintrin.h>) && __AES__ | |||||
| #define HAVE_WMMINTRIN_H 1 | |||||
| #endif | |||||
| #if __RDRND__ | |||||
| #define HAVE_RDRAND | |||||
| #endif | |||||
| /** | |||||
| * Detect mman APIs | |||||
| */ | |||||
| #if __has_include(<sys/mman.h>) | |||||
| #define HAVE_MMAP 1 | |||||
| #define HAVE_MPROTECT 1 | |||||
| #define HAVE_MLOCK 1 | |||||
| #if defined(_DEFAULT_SOURCE) || defined(_BSD_SOURCE) | |||||
| #define HAVE_MADVISE 1 | |||||
| #endif | |||||
| #endif | |||||
| #if __has_include(<sys/random.h>) | |||||
| #define HAVE_GETRANDOM 1 | |||||
| #endif | |||||
| /** | |||||
| * POSIX-Only stuff | |||||
| */ | |||||
| #if __has_include(<unistd.h>) | |||||
| #if defined(_DEFAULT_SOURCE) | |||||
| #define HAVE_GETENTROPY 1 | |||||
| #endif | |||||
| /** | |||||
| * Default POSIX APIs | |||||
| */ | |||||
| #define HAVE_POSIX_MEMALIGN 1 | |||||
| #define HAVE_GETPID 1 | |||||
| #define HAVE_NANOSLEEP 1 | |||||
| /** | |||||
| * Language/library features from C11 | |||||
| */ | |||||
| #if __STDC_VERSION__ >= 201112L | |||||
| #define HAVE_MEMSET_S 1 | |||||
| #endif | |||||
| #if __linux__ | |||||
| #define HAVE_EXPLICIT_BZERO 1 | |||||
| #endif | |||||
| #endif | |||||
| /** | |||||
| * Miscellaneous | |||||
| */ | |||||
| #if __has_include(<pthread.h>) | |||||
| #define HAVE_PTHREAD 1 | |||||
| #endif | |||||
| #if __has_include(<sys/param.h>) | |||||
| #include <sys/param.h> | |||||
| #if __BYTE_ORDER == __BIG_ENDIAN | |||||
| #define NATIVE_BIG_ENDIAN 1 | |||||
| #elif __BYTE_ORDER == __LITTLE_ENDIAN | |||||
| #define NATIVE_LITTLE_ENDIAN 1 | |||||
| #else | |||||
| #error "Unknown endianness for this platform." | |||||
| #endif | |||||
| #elif defined(_MSVC) | |||||
| // At time of writing, MSVC only targets little-endian. | |||||
| #define NATIVE_LITTLE_ENDIAN 1 | |||||
| #else | |||||
| #error "Unknown endianness for this platform." | |||||
| #endif | |||||
| #define CONFIGURED 1 |
| { | |||||
| "$schema": "../res/toolchain-schema.json", | |||||
| "compiler_id": "msvc", | |||||
| "flags": [ | |||||
| "/Zc:preprocessor", // Required for range-v3 | |||||
| "/std:c++latest", | |||||
| ], | |||||
| "link_flags": [ | |||||
| "rpcrt4.lib", | |||||
| ], | |||||
| // "debug": true, | |||||
| "optimize": true | |||||
| } |
| toolchain: str, | toolchain: str, | ||||
| lmi_path: Path = None, | lmi_path: Path = None, | ||||
| cat_path: Path = Path('_build/catalog.db'), | cat_path: Path = Path('_build/catalog.db'), | ||||
| cat_json_path: Path = Path('catalog.json'), | |||||
| dds_flags: proc.CommandLine = ()): | dds_flags: proc.CommandLine = ()): | ||||
| # Copy the exe to another location, as windows refuses to let a binary be | # Copy the exe to another location, as windows refuses to let a binary be | ||||
| # replaced while it is executing | # replaced while it is executing | ||||
| 'catalog', | 'catalog', | ||||
| 'import', | 'import', | ||||
| f'--catalog={cat_path}', | f'--catalog={cat_path}', | ||||
| f'--json=catalog.json', | |||||
| f'--json={cat_json_path}', | |||||
| ) | ) | ||||
| proc.check_run( | proc.check_run( | ||||
| new_exe, | new_exe, |