Pārlūkot izejas kodu

revive pkg-transform with a better semester loader

default_compile_flags
vector-of-bool pirms 4 gadiem
vecāks
revīzija
a1ad20fb7e
29 mainītis faili ar 887 papildinājumiem un 290 dzēšanām
  1. +46
    -40
      catalog.json
  2. +2
    -2
      package.jsonc
  3. +2
    -2
      src/dds.main.cpp
  4. +41
    -24
      src/dds/catalog/catalog.cpp
  5. +3
    -4
      src/dds/catalog/get.cpp
  6. +149
    -180
      src/dds/catalog/import.cpp
  7. +77
    -2
      src/dds/catalog/import.test.cpp
  8. +0
    -2
      src/dds/catalog/package_info.hpp
  9. +3
    -0
      src/dds/catalog/remote/git.hpp
  10. +1
    -0
      src/dds/error/errors.hpp
  11. +329
    -0
      src/dds/util/fs_transform.cpp
  12. +16
    -4
      src/dds/util/fs_transform.hpp
  13. +1
    -1
      src/dds/util/glob.cpp
  14. +5
    -0
      tests/deps/use-catch2/gcc.tc.jsonc
  15. +3
    -0
      tests/deps/use-catch2/msvc.tc.jsonc
  16. +43
    -0
      tests/deps/use-catch2/project/catalog.json5
  17. +4
    -0
      tests/deps/use-catch2/project/library.json5
  18. +8
    -0
      tests/deps/use-catch2/project/package.json5
  19. +6
    -0
      tests/deps/use-catch2/project/src/use-catch2.main.cpp
  20. +11
    -0
      tests/deps/use-catch2/test_use_catch2.py
  21. +7
    -0
      tests/deps/use-cryptopp/gcc.tc.jsonc
  22. +3
    -0
      tests/deps/use-cryptopp/msvc.tc.jsonc
  23. +27
    -0
      tests/deps/use-cryptopp/project/catalog.json
  24. +4
    -0
      tests/deps/use-cryptopp/project/library.json5
  25. +8
    -0
      tests/deps/use-cryptopp/project/package.json5
  26. +17
    -0
      tests/deps/use-cryptopp/project/src/use-cryptopp.main.cpp
  27. +12
    -0
      tests/deps/use-cryptopp/test_use_cryptopp.py
  28. +34
    -27
      tests/deps/use-libsodium/project/catalog.json
  29. +25
    -2
      tools/gen-catalog-json.py

+ 46
- 40
catalog.json Parādīt failu

@@ -234,7 +234,6 @@
"depends": {},
"description": "The Windows Implementation Library",
"git": {
"auto-lib": null,
"ref": "dds/2020.03.16",
"url": "https://github.com/vector-of-bool/wil.git"
}
@@ -245,7 +244,6 @@
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"auto-lib": null,
"ref": "0.1.0",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
@@ -254,7 +252,6 @@
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"auto-lib": null,
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
@@ -263,7 +260,6 @@
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"auto-lib": null,
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
@@ -272,10 +268,33 @@
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"auto-lib": null,
"ref": "0.2.2",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
},
"0.3.0": {
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"ref": "0.3.0",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
},
"0.3.1": {
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"ref": "0.3.1",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
},
"0.3.2": {
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"ref": "0.3.2",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
}
},
"neo-fun": {
@@ -283,7 +302,6 @@
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"auto-lib": null,
"ref": "0.1.0",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
@@ -292,7 +310,6 @@
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"auto-lib": null,
"ref": "0.1.1",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
@@ -301,7 +318,6 @@
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"auto-lib": null,
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
@@ -310,7 +326,6 @@
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"auto-lib": null,
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
@@ -319,7 +334,6 @@
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"auto-lib": null,
"ref": "0.3.0",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
@@ -328,7 +342,6 @@
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"auto-lib": null,
"ref": "0.3.1",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
@@ -337,7 +350,6 @@
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"auto-lib": null,
"ref": "0.3.2",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
@@ -348,7 +360,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.1.0",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -357,7 +368,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -366,7 +376,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -375,7 +384,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.2.2",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -384,7 +392,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.2.3",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -395,7 +402,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.0.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -404,7 +410,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.0.1",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -413,7 +418,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.1.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -422,7 +426,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.1.1",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -431,7 +434,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.1.2",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -440,7 +442,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.2.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -449,7 +450,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.3.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -458,7 +458,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.4.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -467,7 +466,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.5.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -476,7 +474,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.6.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -485,7 +482,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.6.1",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -494,7 +490,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.7.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -503,7 +498,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.7.1",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -512,7 +506,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.7.2",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -521,7 +514,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.7.3",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -532,7 +524,6 @@
"depends": {},
"description": "A C++ implementation of the Pubgrub version solving algorithm",
"git": {
"auto-lib": null,
"ref": "0.1.2",
"url": "https://github.com/vector-of-bool/pubgrub.git"
}
@@ -541,7 +532,6 @@
"depends": {},
"description": "A C++ implementation of the Pubgrub version solving algorithm",
"git": {
"auto-lib": null,
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/pubgrub.git"
}
@@ -550,7 +540,6 @@
"depends": {},
"description": "A C++ implementation of the Pubgrub version solving algorithm",
"git": {
"auto-lib": null,
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/pubgrub.git"
}
@@ -599,7 +588,6 @@
"depends": {},
"description": "A C++ library that implements Semantic Versioning parsing, emitting, types, ordering, and operations. See https://semver.org/",
"git": {
"auto-lib": null,
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/semver.git"
}
@@ -608,7 +596,6 @@
"depends": {},
"description": "A C++ library that implements Semantic Versioning parsing, emitting, types, ordering, and operations. See https://semver.org/",
"git": {
"auto-lib": null,
"ref": "0.2.2",
"url": "https://github.com/vector-of-bool/semver.git"
}
@@ -792,7 +779,6 @@
"depends": {},
"description": "A C++ implementation of a JSON5 parser",
"git": {
"auto-lib": null,
"ref": "0.1.5",
"url": "https://github.com/vector-of-bool/json5.git"
}
@@ -806,7 +792,6 @@
},
"description": "A C++ library to process recursive dynamic data",
"git": {
"auto-lib": null,
"ref": "0.1.0",
"url": "https://github.com/vector-of-bool/semester.git"
}
@@ -818,10 +803,31 @@
},
"description": "A C++ library to process recursive dynamic data",
"git": {
"auto-lib": null,
"ref": "0.1.1",
"url": "https://github.com/vector-of-bool/semester.git"
}
},
"0.2.0": {
"depends": {
"neo-concepts": "^0.3.2",
"neo-fun": "^0.3.2"
},
"description": "A C++ library to process recursive dynamic data",
"git": {
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/semester.git"
}
},
"0.2.1": {
"depends": {
"neo-concepts": "^0.3.2",
"neo-fun": "^0.3.2"
},
"description": "A C++ library to process recursive dynamic data",
"git": {
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/semester.git"
}
}
}
},

+ 2
- 2
package.jsonc Parādīt failu

@@ -9,11 +9,11 @@
"range-v3": "0.10.0",
"nlohmann-json": "3.7.1",
"neo-sqlite3": "0.2.3",
"neo-fun": "0.3.0",
"neo-fun": "0.3.2",
"semver": "0.2.2",
"pubgrub": "0.2.1",
"vob-json5": "0.1.5",
"vob-semester": "0.1.1",
"vob-semester": "0.2.1",
"ctre": "2.7.0",
},
"test_driver": "Catch-Main"

+ 2
- 2
src/dds.main.cpp Parādīt failu

@@ -270,13 +270,13 @@ struct cli_catalog {
// deps.push_back({dep_id.name, dep_id.version});
}

dds::package_info info{ident, std::move(deps), description.Get(), {}, {}};
dds::package_info info{ident, std::move(deps), description.Get(), {}};

if (git_url) {
if (!git_ref) {
dds::throw_user_error<dds::errc::git_url_ref_mutual_req>();
}
auto git = dds::git_remote_listing{git_url.Get(), git_ref.Get(), std::nullopt};
auto git = dds::git_remote_listing{git_url.Get(), git_ref.Get(), std::nullopt, {}};
if (auto_lib) {
git.auto_lib = lm::split_usage_string(auto_lib.Get());
}

+ 41
- 24
src/dds/catalog/catalog.cpp Parādīt failu

@@ -6,7 +6,9 @@
#include <dds/error/errors.hpp>
#include <dds/solve/solve.hpp>

#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <neo/concepts.hpp>
#include <neo/sqlite3/exec.hpp>
#include <neo/sqlite3/iter_tuples.hpp>
#include <neo/sqlite3/single.hpp>
@@ -120,20 +122,6 @@ void check_json(bool b, std::string_view what) {
}
}

std::vector<dds::glob> parse_glob_list(const nlohmann::json& data, std::string_view what) {
std::vector<dds::glob> ret;

if (!data.is_null()) {
check_json(data.is_array(), fmt::format("'{}' must be an array of strings", what));
for (nlohmann::json const& glob : data) {
check_json(glob.is_string(), fmt::format("'{}[.]' must be strings", what));
ret.emplace_back(dds::glob::compile(std::string(glob)));
}
}

return ret;
}

} // namespace

catalog catalog::open(const std::string& db_path) {
@@ -165,6 +153,21 @@ void catalog::_store_pkg(const package_info& pkg, std::monostate) {
pkg.ident.to_string());
}

namespace {

std::string transforms_to_json(const std::vector<fs_transformation>& trs) {
std::string acc = "[";
for (auto it = trs.begin(); it != trs.end(); ++it) {
acc += it->as_json();
if (std::next(it) != trs.end()) {
acc += ", ";
}
}
return acc + "]";
}

} // namespace

void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git) {
auto lm_usage = git.auto_lib.value_or(lm::usage{});
sqlite3::exec( //
@@ -197,9 +200,8 @@ void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git)
git.ref,
lm_usage.name,
lm_usage.namespace_,
pkg.description
//, transform_to_json(pkg.transforms))
));
pkg.description,
transforms_to_json(git.transforms)));
}

void catalog::store(const package_info& pkg) {
@@ -293,18 +295,33 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept
*git_url,
*git_ref,
lm_name ? std::make_optional(lm::usage{*lm_namespace, *lm_name}) : std::nullopt,
{},
},
{},
};

auto append_transform = [](auto transform) {
return [transform = std::move(transform)](auto& remote) {
if constexpr (neo::alike<decltype(remote), std::monostate>) {
// Do nothing
} else {
remote.transforms.push_back(std::move(transform));
}
};
};

if (!repo_transform.empty()) {
auto tr_json = nlohmann::json::parse(repo_transform);
auto tr_json = json5::parse_data(repo_transform);
check_json(tr_json.is_array(),
fmt::format("Database record for {} has an invalid 'repo_transform' field",
fmt::format("Database record for {} has an invalid 'repo_transform' field [1]",
pkg_id));
/// XXX:
// for (const auto& el : tr_json) {
// info.transforms.push_back(parse_transform(el));
// }
for (const auto& el : tr_json.as_array()) {
check_json(
el.is_object(),
fmt::format("Database record for {} has an invalid 'repo_transform' field [2]",
pkg_id));
auto tr = fs_transformation::from_json(el);
std::visit(append_transform(tr), info.remote);
}
}
return info;
}

+ 3
- 4
src/dds/catalog/get.cpp Parādīt failu

@@ -31,10 +31,9 @@ temporary_sdist do_pull_sdist(const package_info& listing, const git_remote_list
spdlog::info("Cloning Git repository: {} [{}] ...", git.url, git.ref);
git.clone(tmpdir.path());

/// XXX:
// for (const auto& tr : listing.transforms) {
// tr.apply_to(tmpdir.path());
// }
for (const auto& tr : git.transforms) {
tr.apply_to(tmpdir.path());
}

spdlog::info("Create sdist from clone ...");
if (git.auto_lib.has_value()) {

+ 149
- 180
src/dds/catalog/import.cpp Parādīt failu

@@ -4,213 +4,177 @@

#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <semester/decomp.hpp>
#include <semester/walk.hpp>
#include <spdlog/fmt/fmt.h>

#include <optional>

using namespace dds;

template <typename... Args>
template <typename KeyFunc, typename... Args>
struct any_key {
semester::try_seq<Args...> _seq;
std::string_view& _key;
KeyFunc _key_fn;
semester::walk_seq<Args...> _seq;

any_key(std::string_view& key_var, Args&&... args)
: _seq(NEO_FWD(args)...)
, _key{key_var} {}
any_key(KeyFunc&& kf, Args&&... args)
: _key_fn(kf)
, _seq(NEO_FWD(args)...) {}

template <typename Data>
semester::dc_result_t operator()(std::string_view key, Data&& dat) const {
_key = key;
return _seq.invoke(dat);
semester::walk_result operator()(std::string_view key, Data&& dat) {
auto res = _key_fn(key);
if (res.rejected()) {
return res;
}
return _seq.invoke(NEO_FWD(dat));
}
};

template <typename... Args>
any_key(std::string_view, Args&&...) -> any_key<Args&&...>;
template <typename KF, typename... Args>
any_key(KF&&, Args&&...) -> any_key<KF, Args...>;

namespace {

semester::dc_result_t reject(std::string s) { return semester::dc_reject_t{s}; }
semester::dc_result_t pass = semester::dc_pass;
semester::dc_result_t accept = semester::dc_accept;
using require_obj = semester::require_type<json5::data::mapping_type>;
using require_obj = semester::require_type<json5::data::mapping_type>;
using require_array = semester::require_type<json5::data::array_type>;
using require_str = semester::require_type<std::string>;

auto reject_unknown_key(std::string_view path) {
return [path = std::string(path)](auto key, auto&&) { //
return reject(fmt::format("{}: unknown key '{}'", path, key));
};
template <typename... Args>
[[noreturn]] void import_error(Args&&... args) {
throw_user_error<dds::errc::invalid_catalog_json>(NEO_FWD(args)...);
}

std::vector<dependency> parse_deps_json_v1(const json5::data& deps, std::string_view path) {
std::vector<dependency> acc_deps;
std::string_view dep_name;
std::string_view dep_version_range_str;
using namespace semester::decompose_ops;
auto result = semester::decompose( //
deps,
mapping{any_key{
dep_name,
[&](auto&& range_str) {
if (!range_str.is_string()) {
throw_user_error<
errc::invalid_catalog_json>("{}/{} should be a string version range",
path,
dep_name);
}
try {
auto rng = semver::range::parse_restricted(range_str.as_string());
acc_deps.push_back(dependency{std::string{dep_name}, {rng.low(), rng.high()}});
return accept;
} catch (const semver::invalid_range&) {
throw_user_error<errc::invalid_version_range_string>(
"Invalid version range string '{}' at {}/{}",
range_str.as_string(),
path,
dep_name);
}
},
}});
neo_assert(invariant,
std::holds_alternative<semester::dc_accept_t>(result),
"Parsing dependency object did not accept??");
return acc_deps;
git_remote_listing parse_git_remote(const json5::data& data) {
git_remote_listing git;
using namespace semester::walk_ops;
walk(data,
require_obj{"Git remote should be an object"},
mapping{required_key{"url",
"A git 'url' string is required",
require_str("Git URL should be a string"),
put_into(git.url)},
required_key{"ref",
"A git 'ref' is required, and must be a tag or branch name",
require_str("Git ref should be a string"),
put_into(git.ref)},
if_key{"auto-lib",
require_str("'auto-lib' should be a string"),
put_into(git.auto_lib,
[](std::string const& str) {
try {
return lm::split_usage_string(str);
} catch (const std::runtime_error& e) {
import_error("{}: {}", walk.path(), e.what());
}
})},
if_key{"transform",
require_array{"Expect an array of transforms"},
for_each{put_into(std::back_inserter(git.transforms), [](auto&& dat) {
try {
return fs_transformation::from_json(dat);
} catch (const semester::walk_error& e) {
import_error(e.what());
}
})}}});
return git;
}

package_info parse_pkg_json_v1(std::string_view name,
semver::version version,
std::string_view path,
const json5::data& pkg) {
using namespace semester::decompose_ops;
package_info
parse_pkg_json_v1(std::string_view name, semver::version version, const json5::data& data) {
package_info ret;
ret.ident = package_id{std::string{name}, version};

auto result = semester::decompose( //
pkg,
mapping{if_key{"description",
require_type<std::string>{
fmt::format("{}/description should be a string", path)},
put_into{ret.description}},
if_key{"depends",
require_obj{fmt::format("{}/depends must be a JSON object", path)},
[&](auto&& dep_obj) {
ret.deps = parse_deps_json_v1(dep_obj, fmt::format("{}/depends", path));
return accept;
}},
if_key{
"git",
require_obj{fmt::format("{}/git must be a JSON object", path)},
[&](auto&& git_obj) {
git_remote_listing git_remote;

auto r = semester::decompose(
git_obj,
mapping{
if_key{"url", put_into{git_remote.url}},
if_key{"ref", put_into{git_remote.ref}},
if_key{"auto-lib",
require_type<std::string>{
fmt::format("{}/git/auto-lib must be a string", path)},
[&](auto&& al) {
git_remote.auto_lib
= lm::split_usage_string(al.as_string());
return accept;
}},
reject_unknown_key(std::string(path) + "/git"),
});

if (git_remote.url.empty() || git_remote.ref.empty()) {
throw_user_error<errc::invalid_catalog_json>(
"{}/git requires both 'url' and 'ref' non-empty string properties",
path);
}

ret.remote = git_remote;
return r;
},
},
reject_unknown_key(path)});

if (std::holds_alternative<std::monostate>(ret.remote)) {
throw_user_error<
errc::invalid_catalog_json>("{}: Requires a remote listing (e.g. a 'git' proprety).",
path);
}
auto rej = std::get_if<semester::dc_reject_t>(&result);
if (rej) {
throw_user_error<errc::invalid_catalog_json>("{}: {}", path, rej->message);
using namespace semester::walk_ops;

std::string dep_name;
auto dep_range = semver::range::everything();
auto parse_dep_range = [&](const std::string& s) {
try {
return semver::range::parse_restricted(s);
} catch (const semver::invalid_range& e) {
import_error(std::string(walk.path()) + e.what());
}
};
auto make_dep = [&](auto&&) {
return dependency{dep_name, {dep_range.low(), dep_range.high()}};
};

auto check_one_remote = [&](auto&&) {
if (!semester::holds_alternative<std::monostate>(ret.remote)) {
return walk.reject("Cannot specify multiple remotes for a package");
}
return walk.pass;
};

auto add_dep = any_key{put_into(dep_name),
require_str{"Dependency should specify a version range string"},
put_into_pass{dep_range, parse_dep_range},
put_into{std::back_inserter(ret.deps), make_dep}};

walk(data,
mapping{if_key{"description",
require_str{"'description' should be a string"},
put_into{ret.description}},
if_key{"depends",
require_obj{"'depends' must be a JSON object"},
mapping{add_dep}},
if_key{
"git",
check_one_remote,
put_into(ret.remote, parse_git_remote),
}});

if (semester::holds_alternative<std::monostate>(ret.remote)) {
import_error("{}: Package listing for {} does not have any remote information",
walk.path(),
ret.ident.to_string());
}

return ret;
}

std::vector<package_info> parse_json_v1(const json5::data& data) {
using namespace semester::decompose_ops;
auto packages_it = data.as_object().find("packages");
if (packages_it == data.as_object().end() || !packages_it->second.is_object()) {
throw_user_error<errc::invalid_catalog_json>(
"Root JSON object requires a 'packages' property");
}

std::vector<package_info> acc_pkgs;

std::string_view pkg_name;
std::string_view pkg_version_str;

auto result = semester::decompose(
data,
mapping{
// Ignore the "version" key at this level
if_key{"version", just_accept},
if_key{
"packages",
mapping{any_key{
pkg_name,
[&](auto&& entry) {
if (!entry.is_object()) {
return reject(
fmt::format("/packages/{} must be a JSON object", pkg_name));
}
return pass;
},
mapping{any_key{
pkg_version_str,
[&](auto&& pkg_def) {
semver::version version;
try {
version = semver::version::parse(pkg_version_str);
} catch (const semver::invalid_version& e) {
throw_user_error<errc::invalid_catalog_json>(
"/packages/{} version string '{}' is invalid: {}",
pkg_name,
pkg_version_str,
e.what());
}
if (!pkg_def.is_object()) {
return reject(fmt::format("/packages/{}/{} must be a JSON object"));
}
auto pkg = parse_pkg_json_v1(pkg_name,
version,
fmt::format("/packages/{}/{}",
pkg_name,
pkg_version_str),
pkg_def);
acc_pkgs.emplace_back(std::move(pkg));
return accept;
},
}},
}},
},
reject_unknown_key("/"),
});

auto rej = std::get_if<semester::dc_reject_t>(&result);
if (rej) {
throw_user_error<errc::invalid_catalog_json>(rej->message);
}
std::string pkg_name;
semver::version pkg_version;
package_info dummy;

using namespace semester::walk_ops;

auto convert_pkg_obj
= [&](auto&& dat) { return parse_pkg_json_v1(pkg_name, pkg_version, dat); };

auto convert_version_str = [&](std::string_view str) {
try {
return semver::version::parse(str);
} catch (const semver::invalid_version& e) {
throw_user_error<errc::invalid_catalog_json>("{}: version string '{}' is invalid: {}",
walk.path(),
pkg_name,
str,
e.what());
}
};

auto import_pkg_versions
= walk_seq{require_obj{"Package entries must be JSON objects"},
mapping{any_key{put_into(pkg_version, convert_version_str),
require_obj{"Package+version entries must be JSON"},
put_into{std::back_inserter(acc_pkgs), convert_pkg_obj}}}};

auto import_pkgs = walk_seq{require_obj{"'packages' should be a JSON object"},
mapping{any_key{put_into(pkg_name), import_pkg_versions}}};

walk(data,
mapping{
if_key{"version", just_accept},
required_key{"packages", "'packages' should be an object of packages", import_pkgs},
});

return acc_pkgs;
}

@@ -237,9 +201,14 @@ std::vector<package_info> dds::parse_packages_json(std::string_view content) {

double version = version_it->second.as_number();

if (version == 1.0) {
return parse_json_v1(data);
} else {
throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'", version);
try {
if (version == 1.0) {
return parse_json_v1(data);
} else {
throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'",
version);
}
} catch (const semester::walk_error& e) {
throw_user_error<errc::invalid_catalog_json>(e.what());
}
}

+ 77
- 2
src/dds/catalog/import.test.cpp Parādīt failu

@@ -37,6 +37,30 @@ TEST_CASE("Valid/invalid package JSON5") {
// Objects in 'packages' shuold have version strings
"{version:1, packages:{foo:{'lol':{}}}}",
"{version:1, packages:{foo:{'1.2':{}}}}",
// No remote
"{version:1, packages:{foo:{'1.2.3':{}}}}",
// Bad empty git
"{version:1, packages:{foo:{'1.2.3':{git:{}}}}}",
// Git `url` and `ref` should be a string
"{version:1, packages:{foo:{'1.2.3':{git:{url:2, ref:''}}}}}",
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:2}}}}}",
// 'auto-lib' should be a usage string
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':3}}}}}",
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'ffasdf'}}}}}",
// 'transform' should be an array
R"(
{
version: 1,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: 'lol hi',
}
}}}
}
)",
};

for (auto bad : bads) {
@@ -50,6 +74,49 @@ TEST_CASE("Valid/invalid package JSON5") {
"{version:1, packages:{}}",
// No versions for 'foo' is weird, but okay
"{version:1, packages:{foo:{}}}",
// Basic package with minimum info:
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:''}}}}}",
// Minimal auto-lib:
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'a/b'}}}}}",
// Empty transforms:
R"(
{
version: 1,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: [],
}
}}}
}
)",
// Basic transform:
R"(
{
version: 1,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: [{
copy: {
from: 'here',
to: 'there',
include: [
"*.c",
"*.cpp",
"*.h",
'*.txt'
]
}
}],
}
}}}
}
)",
};
for (auto good : goods) {
INFO("Parse: " << good);
@@ -66,14 +133,22 @@ TEST_CASE("Check a single object") {
'1.2.3': {
git: {
url: 'foo',
ref: 'fasdf'
ref: 'fasdf',
'auto-lib': 'a/b',
}
}
}
}
})");
CHECK(pkgs.size() == 1);
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].ident.name == "foo");
CHECK(pkgs[0].ident.to_string() == "foo@1.2.3");
CHECK(std::holds_alternative<dds::git_remote_listing>(pkgs[0].remote));

auto git = std::get<dds::git_remote_listing>(pkgs[0].remote);
CHECK(git.url == "foo");
CHECK(git.ref == "fasdf");
REQUIRE(git.auto_lib);
CHECK(git.auto_lib->namespace_ == "a");
CHECK(git.auto_lib->name == "b");
}

+ 0
- 2
src/dds/catalog/package_info.hpp Parādīt failu

@@ -20,8 +20,6 @@ struct package_info {
std::string description;

std::variant<std::monostate, git_remote_listing> remote;

std::vector<fs_transformation> transforms;
};

} // namespace dds

+ 3
- 0
src/dds/catalog/remote/git.hpp Parādīt failu

@@ -2,6 +2,7 @@

#include <dds/catalog/get.hpp>
#include <dds/util/fs.hpp>
#include <dds/util/fs_transform.hpp>

#include <libman/package.hpp>

@@ -15,6 +16,8 @@ struct git_remote_listing {
std::string ref;
std::optional<lm::usage> auto_lib;

std::vector<fs_transformation> transforms;

void clone(path_ref path) const;
};


+ 1
- 0
src/dds/error/errors.hpp Parādīt failu

@@ -24,6 +24,7 @@ enum class errc {
no_catalog_remote_info,

git_clone_failure,
invalid_repo_transform,
sdist_ident_mismatch,
sdist_exists,


+ 329
- 0
src/dds/util/fs_transform.cpp Parādīt failu

@@ -0,0 +1,329 @@
#include "./fs_transform.hpp"

#include <dds/error/errors.hpp>
#include <dds/util/fs.hpp>

#include <range/v3/algorithm/any_of.hpp>
#include <range/v3/distance.hpp>
#include <range/v3/numeric/accumulate.hpp>
#include <semester/walk.hpp>

#include <nlohmann/json.hpp>

#include <iostream>

using namespace dds;

using require_obj = semester::require_type<json5::data::mapping_type>;
using require_array = semester::require_type<json5::data::array_type>;
using require_str = semester::require_type<std::string>;

dds::fs_transformation dds::fs_transformation::from_json(const json5::data& data) {
fs_transformation ret;
using namespace semester::walk_ops;

auto prep_optional = [](auto& opt) {
return [&](auto&&) {
opt.emplace();
return walk.pass;
};
};

auto str_to_path = [](std::string const& s) {
auto p = fs::path(s);
if (p.is_absolute()) {
throw semester::walk_error(std::string(walk.path())
+ ": Only relative paths are accepted");
}
return p;
};

auto get_strip_components = [](double d) {
if (d != double(int(d)) || d < 0) {
throw semester::walk_error(std::string(walk.path()) + ": "
+ "'strip-components' should be a positive whole number");
}
return int(d);
};

auto populate_globs = [&](std::vector<dds::glob>& globs) {
return for_each{
require_str{"Include/exclude list should be a list of globs"},
put_into(std::back_inserter(globs),
[](const std::string& glob) {
try {
return dds::glob::compile(glob);
} catch (const std::runtime_error& e) {
throw semester::walk_error{std::string(walk.path()) + ": " + e.what()};
}
}),
};
};

auto populate_reloc = [&](auto& op) {
return [&](auto&& dat) {
op.emplace();
return mapping{
required_key{"from",
"a 'from' path is required",
require_str{"'from' should be a path string"},
put_into(op->from, str_to_path)},
required_key{"to",
"a 'to' path is required",
require_str{"'to' should be a path string"},
put_into(op->to, str_to_path)},
if_key{"strip-components",
require_type<double>{"'strip-components' should be an integer"},
put_into(op->strip_components, get_strip_components)},
if_key{"include",
require_array{"'include' should be an array"},
populate_globs(op->include)},
if_key{"exclude",
require_array{"'exclude' should be an array"},
populate_globs(op->exclude)},
}(dat);
};
};

walk(data,
require_obj{"Each transform must be a JSON object"},
mapping{
if_key{"copy", populate_reloc(ret.copy)},
if_key{"move", populate_reloc(ret.move)},
if_key{"remove",
require_obj{"'remove' should be a JSON object"},
prep_optional(ret.remove),
mapping{
required_key{"path",
"'path' is required",
require_str{"'path' should be a string path to remove"},
put_into(ret.remove->path, str_to_path)},
if_key{"only-matching",
require_array{"'only-matching' should be an array of globs"},
populate_globs(ret.remove->only_matching)},
}},
if_key{"write",
require_obj{"'write' should be a JSON object"},
prep_optional(ret.write),
mapping{
required_key{"path",
"'path' is required",
require_str{"'path' should be a string path to write to"},
put_into(ret.write->path, str_to_path)},
required_key{"content",
"'content' is required",
require_str{"'content' must be a string"},
put_into(ret.write->content)},
}},
});

return ret;
}

namespace {

bool matches_any(path_ref path, const std::vector<glob>& globs) {
return std::any_of(globs.begin(), globs.end(), [&](auto&& gl) { return gl.match(path); });
}

bool parent_dir_of(fs::path root, fs::path child) {
auto root_str = (root += "/").lexically_normal().generic_string();
auto child_str = (child += "/").lexically_normal().generic_string();
return child_str.find(root_str) == 0;
}

void do_relocate(const dds::fs_transformation::copy_move_base& oper,
dds::path_ref root,
bool is_copy) {
auto from = fs::weakly_canonical(root / oper.from);
auto to = fs::weakly_canonical(root / oper.to);
if (!parent_dir_of(root, from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to copy/move a file/directory from outside of the "
"root [{}] into the root [{}].",
from.string(),
root.string());
}
if (!parent_dir_of(root, to)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to copy/move a file/directory [{}] to a "
"destination outside of the restricted root [{}].",
to.string(),
root.string());
}

if (!fs::exists(from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempting to copy/move a non-existint file/directory [{}] "
"to [{}].",
from.string(),
to.string());
}

fs::create_directories(to.parent_path());

if (fs::is_regular_file(from)) {
if (is_copy) {
fs::copy_file(from, to, fs::copy_options::overwrite_existing);
} else {
safe_rename(from, to);
}
return;
}

for (auto item : fs::recursive_directory_iterator(from)) {
auto relpath = fs::relative(item, from);
auto matches_glob = [&](auto glob) { return glob.match(relpath.string()); };
auto included = oper.include.empty() || ranges::any_of(oper.include, matches_glob);
auto excluded = ranges::any_of(oper.exclude, matches_glob);
if (!included || excluded) {
continue;
}

auto n_components = ranges::distance(relpath);
if (n_components <= oper.strip_components) {
continue;
}

auto it = relpath.begin();
std::advance(it, oper.strip_components);
relpath = ranges::accumulate(it, relpath.end(), fs::path(), std::divides<>());

auto dest = to / relpath;
fs::create_directories(dest.parent_path());
if (item.is_directory()) {
fs::create_directories(dest);
} else {
if (is_copy) {
fs::copy_file(item, dest, fs::copy_options::overwrite_existing);
} else {
safe_rename(item, dest);
}
}
}
}

void do_remove(const struct fs_transformation::remove& oper, path_ref root) {
auto from = fs::weakly_canonical(root / oper.path);
if (!parent_dir_of(root, from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to deletes files/directories outside of the "
"root. Attempted to remove [{}]. Removal is restricted to [{}].",
from.string(),
root.string());
}

if (!fs::exists(from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to delete a non-existint file/directory [{}].",
from.string());
}

if (fs::is_directory(from)) {
for (auto child : fs::recursive_directory_iterator{from}) {
if (child.is_directory()) {
continue;
}
if (!oper.only_matching.empty() && !matches_any(child, oper.only_matching)) {
continue;
}
fs::remove_all(child);
}
} else {
fs::remove_all(from);
}
}

void do_write(const struct fs_transformation::write& oper, path_ref root) {
auto dest = fs::weakly_canonical(root / oper.path);
if (!parent_dir_of(root, dest)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation is trying to write outside of the root. Attempted to write "
"to [{}]. Writing is restricted to [{}].",
dest.string(),
root.string());
}

std::cout << "Write content: " << oper.content;

auto of = dds::open(dest, std::ios::binary | std::ios::out);
of << oper.content;
}

} // namespace

void dds::fs_transformation::apply_to(dds::path_ref root) const {
if (copy) {
do_relocate(*copy, root, true);
}
if (move) {
do_relocate(*move, root, false);
}
if (remove) {
do_remove(*remove, root);
}
if (write) {
do_write(*write, root);
}
}

namespace {

nlohmann::json reloc_as_json(const fs_transformation::copy_move_base& oper) {
auto obj = nlohmann::json::object();
obj["from"] = oper.from.string();
obj["to"] = oper.to.string();

obj["strip-components"] = oper.strip_components;

auto inc_list = nlohmann::json::array();
for (auto& inc : oper.include) {
inc_list.push_back(inc.string());
}

auto exc_list = nlohmann::json::array();
for (auto& exc : oper.exclude) {
exc_list.push_back(exc.string());
}

if (!inc_list.empty()) {
obj["include"] = inc_list;
}
if (!exc_list.empty()) {
obj["exclude"] = exc_list;
}

return obj;
}

} // namespace

std::string fs_transformation::as_json() const noexcept {
auto obj = nlohmann::json::object();
if (copy) {
obj["copy"] = reloc_as_json(*copy);
}
if (move) {
obj["move"] = reloc_as_json(*move);
}
if (remove) {
auto rm = nlohmann::json::object();
rm["path"] = remove->path.string();
if (!remove->only_matching.empty()) {
auto if_arr = nlohmann::json::array();
for (auto&& gl : remove->only_matching) {
if_arr.push_back(gl.string());
}
rm["only-matching"] = rm;
}
obj["remove"] = rm;
}
if (write) {
auto wr = nlohmann::json::object();
wr["path"] = write->path.string();
wr["content"] = write->content;
obj["write"] = wr;
}

return to_string(obj);
}

+ 16
- 4
src/dds/util/fs_transform.hpp Parādīt failu

@@ -3,12 +3,14 @@
#include "./fs.hpp"
#include "./glob.hpp"

#include <json5/data.hpp>

#include <optional>
#include <variant>

namespace dds {

class fs_transformation {
struct fs_transformation {
struct copy_move_base {
fs::path from;
fs::path to;
@@ -27,11 +29,21 @@ class fs_transformation {
std::vector<dds::glob> only_matching;
};

std::optional<struct copy> copy;
std::optional<struct move> move;
std::optional<remove> remove;
struct write {
fs::path path;
std::string content;
};

std::optional<struct copy> copy;
std::optional<struct move> move;
std::optional<remove> remove;
std::optional<struct write> write;

void apply_to(path_ref root) const;

static fs_transformation from_json(const json5::data&);

std::string as_json() const noexcept;
};

} // namespace dds

+ 1
- 1
src/dds/util/glob.cpp Parādīt failu

@@ -144,6 +144,7 @@ dds::detail::glob_impl compile_glob_expr(std::string_view pattern) {
using namespace dds::detail;

glob_impl acc{};
acc.spelling = std::string(pattern);

while (!pattern.empty()) {
const auto next_slash = pattern.find('/');
@@ -165,7 +166,6 @@ dds::detail::glob_impl compile_glob_expr(std::string_view pattern) {
throw std::runtime_error("Invalid path glob expression (Must not be empty!)");
}

acc.spelling = std::string(pattern);
return acc;
}


+ 5
- 0
tests/deps/use-catch2/gcc.tc.jsonc Parādīt failu

@@ -0,0 +1,5 @@
{
"compiler_id": 'gnu',
"cxx_version": 'c++17',
"cxx_compiler": 'g++-9',
}

+ 3
- 0
tests/deps/use-catch2/msvc.tc.jsonc Parādīt failu

@@ -0,0 +1,3 @@
{
"compiler_id": 'msvc',
}

+ 43
- 0
tests/deps/use-catch2/project/catalog.json5 Parādīt failu

@@ -0,0 +1,43 @@
{
"version": 1,
"packages": {
"catch2": {
"2.12.4": {
"git": {
"url": "https://github.com/catchorg/Catch2.git",
"ref": "v2.12.4",
"auto-lib": "catch2/catch2",
"transform": [
{
"move": {
"from": "include",
"to": "include/catch2",
}
},
{
"copy": {
"from": "include",
"to": "src"
},
write: {
path: 'include/catch2/catch_with_main.hpp',
content: '\
#pragma once \n\
\n\
#define CATCH_CONFIG_MAIN \n\
#include "./catch.hpp" \n\
\n\
namespace Catch { \n\
\n\
CATCH_REGISTER_REPORTER("console", ConsoleReporter) \n\
\n\
} // namespace Catch \n\
'
}
}
]
}
}
}
}
}

+ 4
- 0
tests/deps/use-catch2/project/library.json5 Parādīt failu

@@ -0,0 +1,4 @@
{
name: 'use-catch2',
uses: ['catch2/catch2']
}

+ 8
- 0
tests/deps/use-catch2/project/package.json5 Parādīt failu

@@ -0,0 +1,8 @@
{
name: 'use-catch2',
version: '1.0.0',
namespace: 'test',
depends: {
'catch2': '2.12.4'
}
}

+ 6
- 0
tests/deps/use-catch2/project/src/use-catch2.main.cpp Parādīt failu

@@ -0,0 +1,6 @@
#include <catch2/catch_with_main.hpp>

TEST_CASE("I am a simple test case") {
CHECK((2 + 2) == 4);
CHECK_FALSE((2 + 2) == 5);
}

+ 11
- 0
tests/deps/use-catch2/test_use_catch2.py Parādīt failu

@@ -0,0 +1,11 @@
from tests import DDS

from dds_ci import proc


def test_get_build_use_catch2(dds: DDS):
dds.catalog_import(dds.source_root / 'catalog.json5')
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc)
proc.check_run((dds.build_dir / 'use-catch2').with_suffix(dds.exe_suffix))

+ 7
- 0
tests/deps/use-cryptopp/gcc.tc.jsonc Parādīt failu

@@ -0,0 +1,7 @@
{
"compiler_id": 'gnu',
"cxx_version": 'c++17',
"cxx_compiler": 'g++-9',
"flags": '-march=native',
"link_flags": '-static-libgcc -static-libstdc++'
}

+ 3
- 0
tests/deps/use-cryptopp/msvc.tc.jsonc Parādīt failu

@@ -0,0 +1,3 @@
{
"compiler_id": 'msvc',
}

+ 27
- 0
tests/deps/use-cryptopp/project/catalog.json Parādīt failu

@@ -0,0 +1,27 @@
{
"version": 1,
"packages": {
"cryptopp": {
"8.2.0": {
"git": {
"url": "https://github.com/weidai11/cryptopp.git",
"ref": "CRYPTOPP_8_2_0",
"auto-lib": "cryptopp/cryptopp",
"transform": [
{
"copy": {
"from": ".",
"to": "src/cryptopp",
"include": [
"*.c",
"*.cpp",
"*.h"
]
}
}
]
}
}
}
}
}

+ 4
- 0
tests/deps/use-cryptopp/project/library.json5 Parādīt failu

@@ -0,0 +1,4 @@
{
name: 'use-cryptopp',
uses: ['cryptopp/cryptopp']
}

+ 8
- 0
tests/deps/use-cryptopp/project/package.json5 Parādīt failu

@@ -0,0 +1,8 @@
{
name: 'use-cryptopp',
version: '1.0.0',
namespace: 'test',
depends: {
'cryptopp': '8.2.0'
}
}

+ 17
- 0
tests/deps/use-cryptopp/project/src/use-cryptopp.main.cpp Parādīt failu

@@ -0,0 +1,17 @@
#include <cryptopp/osrng.h>

#include <string>

int main() {
std::string arr;
arr.resize(256);
CryptoPP::OS_GenerateRandomBlock(false,
reinterpret_cast<CryptoPP::byte*>(arr.data()),
arr.size());
for (auto b : arr) {
if (b != '\x00') {
return 0;
}
}
return 1;
}

+ 12
- 0
tests/deps/use-cryptopp/test_use_cryptopp.py Parādīt failu

@@ -0,0 +1,12 @@
from tests import DDS

from dds_ci import proc


def test_get_build_use_libsodium(dds: DDS):
dds.catalog_import(dds.source_root / 'catalog.json')
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc)
proc.check_run(
(dds.build_dir / 'use-cryptopp').with_suffix(dds.exe_suffix))

+ 34
- 27
tests/deps/use-libsodium/project/catalog.json Parādīt failu

@@ -6,34 +6,41 @@
"git": {
"url": "https://github.com/jedisct1/libsodium.git",
"ref": "1.0.18",
"auto-lib": "sodium/sodium"
},
"transform": [
{
"move": {
"from": "src/libsodium/include",
"to": "include/"
"auto-lib": "sodium/sodium",
"transform": [
{
"move": {
"from": "src/libsodium/include",
"to": "include/"
}
},
{
"copy": {
"from": "builds/msvc/version.h",
"to": "include/sodium/version.h"
}
},
{
"copy": {
"from": "include/sodium",
"to": "src/libsodium"
},
"move": {
"from": "src/libsodium",
"to": "src_root"
},
"remove": {
"path": "src"
}
},
{
"move": {
"from": "src_root",
"to": "src"
}
}
},
{
"copy": {
"from": "builds/msvc/version.h",
"to": "include/sodium/version.h"
}
},
{
"copy": {
"from": "include/sodium",
"to": "src/libsodium"
}
},
{
"move": {
"from": "src/libsodium",
"to": "src"
}
}
]
]
}
}
}
}

+ 25
- 2
tools/gen-catalog-json.py Parādīt failu

@@ -10,11 +10,13 @@ class Git(NamedTuple):
auto_lib: Optional[str] = None

def to_dict(self) -> dict:
return {
d = {
'url': self.url,
'ref': self.ref,
'auto-lib': self.auto_lib,
}
if self.auto_lib:
d['auto-lib'] = self.auto_lib
return d


RemoteInfo = Union[Git]
@@ -135,6 +137,9 @@ packages = [
'0.2.0',
'0.2.1',
'0.2.2',
'0.3.0',
'0.3.1',
'0.3.2',
),
description=
'Minimal C++ concepts library. Contains many definitions from C++20.',
@@ -192,6 +197,24 @@ packages = [
'neo-fun': '^0.1.1',
'neo-concepts': '^0.2.2',
}),
Version(
'0.2.0',
description='A C++ library to process recursive dynamic data',
remote=Git('https://github.com/vector-of-bool/semester.git',
'0.2.0'),
depends={
'neo-fun': '^0.3.2',
'neo-concepts': '^0.3.2',
}),
Version(
'0.2.1',
description='A C++ library to process recursive dynamic data',
remote=Git('https://github.com/vector-of-bool/semester.git',
'0.2.1'),
depends={
'neo-fun': '^0.3.2',
'neo-concepts': '^0.3.2',
}),
]),
Package('ctre', [
Version(

Notiek ielāde…
Atcelt
Saglabāt