Browse Source

Remove JSON catalog and FS transforms.

Significantly simplify some code, and removes a lot of hacks.
Now, the proper way to get packages is from an HTTP repository.
The mkrepo.py uses the content of the old catalog.json to populate
a dds repository. This is also used in the test cases to spawn
repositories as test fixtures.
default_compile_flags
vector-of-bool 4 years ago
parent
commit
74826991a1
29 changed files with 787 additions and 1335 deletions
  1. +0
    -30
      src/dds.main.cpp
  2. +11
    -76
      src/dds/catalog/catalog.cpp
  3. +0
    -6
      src/dds/catalog/catalog.hpp
  4. +3
    -29
      src/dds/catalog/catalog.test.cpp
  5. +0
    -1
      src/dds/catalog/get.cpp
  6. +0
    -212
      src/dds/catalog/import.cpp
  7. +0
    -136
      src/dds/catalog/import.test.cpp
  8. +0
    -1
      src/dds/catalog/package_info.hpp
  9. +0
    -6
      src/dds/catalog/remote/base.cpp
  10. +1
    -5
      src/dds/catalog/remote/base.hpp
  11. +0
    -445
      src/dds/util/fs_transform.cpp
  12. +0
    -65
      src/dds/util/fs_transform.hpp
  13. +2
    -1
      tests/__init__.py
  14. +38
    -42
      tests/catalog/get_test.py
  15. +0
    -89
      tests/catalog/import_test.py
  16. +1
    -0
      tests/conftest.py
  17. +1
    -10
      tests/dds.py
  18. +6
    -1
      tests/deps/build-deps/project/catalog.json
  19. +10
    -6
      tests/deps/build-deps/test_build_deps.py
  20. +7
    -4
      tests/deps/do_test.py
  21. +13
    -2
      tests/deps/git-remote/catalog.json
  22. +19
    -13
      tests/deps/use-cryptopp/project/catalog.json
  23. +6
    -7
      tests/deps/use-cryptopp/test_use_cryptopp.py
  24. +6
    -1
      tests/deps/use-remote/catalog.json
  25. +7
    -2
      tests/deps/use-spdlog/project/catalog.json
  26. +4
    -2
      tests/deps/use-spdlog/use_spdlog_test.py
  27. +105
    -0
      tests/http.py
  28. +125
    -143
      tools/gen-catalog-json.py
  29. +422
    -0
      tools/mkrepo.py

+ 0
- 30
src/dds.main.cpp View File

} }
} create{*this}; } create{*this};


struct {
cli_catalog& parent;
args::Command cmd{parent.cat_group, "import", "Import entries into a catalog"};
common_flags _common{cmd};

catalog_path_flag cat_path{cmd};

args::Flag import_stdin{cmd, "stdin", "Import JSON from stdin", {"stdin"}};
args::ValueFlagList<std::string>
json_paths{cmd,
"json",
"Import catalog entries from the given JSON files",
{"json", 'j'}};

int run() {
auto cat = cat_path.open();
for (const auto& json_fpath : json_paths.Get()) {
cat.import_json_file(json_fpath);
}
if (import_stdin.Get()) {
std::ostringstream strm;
strm << std::cin.rdbuf();
cat.import_json_str(strm.str());
}
return 0;
}
} import{*this};

struct { struct {
cli_catalog& parent; cli_catalog& parent;
args::Command cmd{parent.cat_group, "get", "Obtain an sdist from a catalog listing"}; args::Command cmd{parent.cat_group, "get", "Obtain an sdist from a catalog listing"};
int run() { int run() {
if (create.cmd) { if (create.cmd) {
return create.run(); return create.run();
} else if (import.cmd) {
return import.run();
} else if (get.cmd) { } else if (get.cmd) {
return get.run(); return get.run();
} else if (add.cmd) { } else if (add.cmd) {

+ 11
- 76
src/dds/catalog/catalog.cpp View File

remote_id INTEGER remote_id INTEGER
REFERENCES dds_cat_remotes REFERENCES dds_cat_remotes
ON DELETE CASCADE, ON DELETE CASCADE,
repo_transform TEXT NOT NULL DEFAULT '[]',
UNIQUE (name, version, remote_id) UNIQUE (name, version, remote_id)
); );


name, name,
version, version,
description, description,
remote_url,
repo_transform)
remote_url)
SELECT pkg_id, SELECT pkg_id,
name, name,
version, version,
WHEN lm_name ISNULL THEN '' WHEN lm_name ISNULL THEN ''
ELSE ('?lm=' || lm_namespace || '/' || lm_name) ELSE ('?lm=' || lm_namespace || '/' || lm_name)
END END
) || '#' || git_ref,
repo_transform
) || '#' || git_ref
FROM dds_cat_pkgs; FROM dds_cat_pkgs;


CREATE TABLE dds_cat_pkg_deps_new ( CREATE TABLE dds_cat_pkg_deps_new (
)"); )");
} }


std::string transforms_to_json(const std::vector<fs_transformation>& trs) {
std::string acc = "[";
for (auto it = trs.begin(); it != trs.end(); ++it) {
acc += it->as_json();
if (std::next(it) != trs.end()) {
acc += ", ";
}
}
return acc + "]";
}

void store_with_remote(const neo::sqlite3::statement_cache&, void store_with_remote(const neo::sqlite3::statement_cache&,
const package_info& pkg, const package_info& pkg,
std::monostate) { std::monostate) {
name, name,
version, version,
remote_url, remote_url,
description,
repo_transform
) VALUES (?1, ?2, ?3, ?4, ?5)
description
) VALUES (?1, ?2, ?3, ?4)
)"_sql), )"_sql),
pkg.ident.name, pkg.ident.name,
pkg.ident.version.to_string(), pkg.ident.version.to_string(),
http.url, http.url,
pkg.description,
transforms_to_json(http.transforms));
pkg.description);
} }


void store_with_remote(neo::sqlite3::statement_cache& stmts, void store_with_remote(neo::sqlite3::statement_cache& stmts,
name, name,
version, version,
remote_url, remote_url,
description,
repo_transform
description
) VALUES ( ) VALUES (
?1, ?1,
?2, ?2,
?3, ?3,
?4,
?5
?4
) )
)"_sql), )"_sql),
pkg.ident.name, pkg.ident.name,
pkg.ident.version.to_string(), pkg.ident.version.to_string(),
url, url,
pkg.description,
transforms_to_json(git.transforms));
pkg.description);
} }


void do_store_pkg(neo::sqlite3::database& db, void do_store_pkg(neo::sqlite3::database& db,
exec(db.prepare("UPDATE dds_cat_meta SET meta=?"), meta.dump()); exec(db.prepare("UPDATE dds_cat_meta SET meta=?"), meta.dump());
} }


void check_json(bool b, std::string_view what) {
if (!b) {
throw_user_error<errc::invalid_catalog_json>("Catalog JSON is invalid: {}", what);
}
}

} // namespace } // namespace


catalog catalog::open(const std::string& db_path) { catalog catalog::open(const std::string& db_path) {
name, name,
version, version,
remote_url, remote_url,
description,
repo_transform
description
FROM dds_cat_pkgs FROM dds_cat_pkgs
WHERE name = ?1 AND version = ?2 WHERE name = ?1 AND version = ?2
ORDER BY pkg_id DESC ORDER BY pkg_id DESC
pk_id.to_string(), pk_id.to_string(),
nsql::error_category().message(int(ec))); nsql::error_category().message(int(ec)));


const auto& [pkg_id, name, version, remote_url, description, repo_transform]
= st.row()
.unpack<std::int64_t,
std::string,
std::string,
std::string,
std::string,
std::string>();
const auto& [pkg_id, name, version, remote_url, description]
= st.row().unpack<std::int64_t, std::string, std::string, std::string, std::string>();


ec = st.step(std::nothrow); ec = st.step(std::nothrow);
if (ec == nsql::errc::row) { if (ec == nsql::errc::row) {
parse_remote_url(remote_url), parse_remote_url(remote_url),
}; };


if (!repo_transform.empty()) {
// Transforms are stored in the DB as JSON strings. Convert them back to real objects.
auto tr_data = json5::parse_data(repo_transform);
check_json(tr_data.is_array(),
fmt::format("Database record for {} has an invalid 'repo_transform' field [1]",
pkg_id));
for (const auto& el : tr_data.as_array()) {
check_json(
el.is_object(),
fmt::format("Database record for {} has an invalid 'repo_transform' field [2]",
pkg_id));
auto tr = fs_transformation::from_json(el);
std::visit(
[&](auto& remote) {
if constexpr (neo::alike<decltype(remote), std::monostate>) {
// Do nothing
} else {
remote.transforms.push_back(std::move(tr));
}
},
info.remote);
}
}
return info; return info;
} }


}) // }) //
| ranges::to_vector; | ranges::to_vector;
} }

void catalog::import_json_str(std::string_view content) {
dds_log(trace, "Importing JSON string into catalog");
auto pkgs = parse_packages_json(content);

nsql::transaction_guard tr{_db};
for (const auto& pkg : pkgs) {
do_store_pkg(_db, _stmt_cache, pkg);
}
}

+ 0
- 6
src/dds/catalog/catalog.hpp View File

std::vector<package_id> by_name(std::string_view sv) const noexcept; std::vector<package_id> by_name(std::string_view sv) const noexcept;
std::vector<dependency> dependencies_of(const package_id& pkg) const noexcept; std::vector<dependency> dependencies_of(const package_id& pkg) const noexcept;


void import_json_str(std::string_view json_str);
void import_json_file(path_ref json_path) {
auto content = dds::slurp_file(json_path);
import_json_str(content);
}

auto& database() noexcept { return _db; } auto& database() noexcept { return _db; }
auto& database() const noexcept { return _db; } auto& database() const noexcept { return _db; }
}; };

+ 3
- 29
src/dds/catalog/catalog.test.cpp View File

dds::package_id("foo", semver::version::parse("1.2.3")), dds::package_id("foo", semver::version::parse("1.2.3")),
{}, {},
"example", "example",
dds::git_remote_listing{std::nullopt, {}, "git+http://example.com", "master"},
dds::git_remote_listing{std::nullopt, "git+http://example.com", "master"},
}); });


auto pkgs = db.by_name("foo"); auto pkgs = db.by_name("foo");
dds::package_id("foo", semver::version::parse("1.2.3")), dds::package_id("foo", semver::version::parse("1.2.3")),
{}, {},
"example", "example",
dds::git_remote_listing{std::nullopt, {}, "git+http://example.com", "develop"},
dds::git_remote_listing{std::nullopt, "git+http://example.com", "develop"},
})); }));
// The previous pkg_id is still a valid lookup key // The previous pkg_id is still a valid lookup key
info = db.get(pkgs[0]); info = db.get(pkgs[0]);
{"baz", {semver::version::parse("5.3.0"), semver::version::parse("6.0.0")}}, {"baz", {semver::version::parse("5.3.0"), semver::version::parse("6.0.0")}},
}, },
"example", "example",
dds::git_remote_listing{std::nullopt, {}, "git+http://example.com", "master"},
dds::git_remote_listing{std::nullopt, "git+http://example.com", "master"},
}); });
auto pkgs = db.by_name("foo"); auto pkgs = db.by_name("foo");
REQUIRE(pkgs.size() == 1); REQUIRE(pkgs.size() == 1);
CHECK(deps[0].name == "bar"); CHECK(deps[0].name == "bar");
CHECK(deps[1].name == "baz"); CHECK(deps[1].name == "baz");
} }

TEST_CASE_METHOD(catalog_test_case, "Parse JSON repo") {
db.import_json_str(R"({
"version": 2,
"packages": {
"foo": {
"1.2.3": {
"depends": [
"bar~4.2.1"
],
url: "git+http://example.com#master"
}
}
}
})");
auto pkgs = db.by_name("foo");
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].name == "foo");
CHECK(pkgs[0].version == semver::version::parse("1.2.3"));
auto deps = db.dependencies_of(pkgs[0]);
REQUIRE(deps.size() == 1);
CHECK(deps[0].name == "bar");
CHECK(deps[0].versions
== dds::version_range_set{semver::version::parse("4.2.1"),
semver::version::parse("4.3.0")});
}

+ 0
- 1
src/dds/catalog/get.cpp View File

auto tmpdir = dds::temporary_dir::create(); auto tmpdir = dds::temporary_dir::create();


remote.pull_source(tmpdir.path()); remote.pull_source(tmpdir.path());
remote.apply_transforms(tmpdir.path());
remote.generate_auto_lib_files(listing.ident, tmpdir.path()); remote.generate_auto_lib_files(listing.ident, tmpdir.path());


dds_log(info, "Create sdist ..."); dds_log(info, "Create sdist ...");

+ 0
- 212
src/dds/catalog/import.cpp View File

#include "./import.hpp"

#include <dds/error/errors.hpp>
#include <dds/util/log.hpp>

#include <fmt/core.h>
#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <neo/url.hpp>
#include <semester/walk.hpp>

#include <optional>

using namespace dds;

template <typename KeyFunc, typename... Args>
struct any_key {
KeyFunc _key_fn;
semester::walk_seq<Args...> _seq;

any_key(KeyFunc&& kf, Args&&... args)
: _key_fn(kf)
, _seq(NEO_FWD(args)...) {}

template <typename Data>
semester::walk_result operator()(std::string_view key, Data&& dat) {
auto res = _key_fn(key);
if (res.rejected()) {
return res;
}
return _seq.invoke(NEO_FWD(dat));
}
};

template <typename KF, typename... Args>
any_key(KF&&, Args&&...) -> any_key<KF, Args...>;

namespace {

using require_obj = semester::require_type<json5::data::mapping_type>;
using require_array = semester::require_type<json5::data::array_type>;
using require_str = semester::require_type<std::string>;

template <typename... Args>
[[noreturn]] void import_error(Args&&... args) {
throw_user_error<dds::errc::invalid_catalog_json>(NEO_FWD(args)...);
}

auto make_dep = [](std::string const& str) {
using namespace semester::walk_ops;
try {
return dependency::parse_depends_string(str);
} catch (std::runtime_error const& e) {
import_error(std::string(walk.path()) + e.what());
}
};

auto convert_version_str = [](std::string_view str) {
using namespace semester::walk_ops;
try {
return semver::version::parse(str);
} catch (const semver::invalid_version& e) {
import_error("{}: version string '{}' is invalid: {}", walk.path(), str, e.what());
}
};

auto parse_remote = [](const std::string& str) {
using namespace semester::walk_ops;
try {
return parse_remote_url(str);
} catch (const neo::url_validation_error& e) {
import_error("{}: Invalid URL [{}]: {}", walk.path(), str, e.what());
} catch (const user_error<errc::invalid_remote_url>& e) {
import_error("{}: Invalid URL: {}", walk.path(), e.what());
}
};

auto parse_fs_transforms = [](auto&& tr_vec) {
using namespace semester::walk_ops;
return walk_seq{
require_array{"Expect an array of transforms"},
for_each{
put_into(std::back_inserter(tr_vec),
[&](auto&& dat) {
try {
return fs_transformation::from_json(dat);
} catch (const semester::walk_error& e) {
import_error(e.what());
}
}),
},
};
};

package_info
parse_pkg_json_v2(std::string_view name, semver::version version, const json5::data& data) {
package_info ret;
ret.ident = package_id{std::string{name}, version};
std::vector<fs_transformation> fs_trs;

using namespace semester::walk_ops;

auto check_one_remote = [&](auto&&) {
if (!semester::holds_alternative<std::monostate>(ret.remote)) {
return walk.reject("Cannot specify multiple remotes for a package");
}
return walk.pass;
};

walk(data,
mapping{if_key{"description",
require_str{"'description' should be a string"},
put_into{ret.description}},
if_key{"depends",
require_array{"'depends' must be an array of dependency strings"},
for_each{require_str{"Each dependency should be a string"},
put_into{std::back_inserter(ret.deps), make_dep}}},
if_key{
"url",
require_str{"Remote URL should be a string"},
check_one_remote,
put_into(ret.remote, parse_remote),
},
if_key{"transform", parse_fs_transforms(fs_trs)}});

if (semester::holds_alternative<std::monostate>(ret.remote)) {
import_error("{}: Package listing for {} does not have any remote information",
walk.path(),
ret.ident.to_string());
}

if (semester::holds_alternative<git_remote_listing>(ret.remote)) {
semester::get<git_remote_listing>(ret.remote).transforms = std::move(fs_trs);
} else {
if (!fs_trs.empty()) {
throw_user_error<errc::invalid_catalog_json>(
"{}: Filesystem transforms are not supported for this remote type", walk.path());
}
}

return ret;
}

std::vector<package_info> parse_json_v2(const json5::data& data) {
std::vector<package_info> acc_pkgs;

std::string pkg_name;
semver::version pkg_version;
package_info dummy;

using namespace semester::walk_ops;

auto convert_pkg_obj
= [&](auto&& dat) { return parse_pkg_json_v2(pkg_name, pkg_version, dat); };

auto import_pkg_versions
= walk_seq{require_obj{"Package entries must be JSON objects"},
mapping{any_key{put_into(pkg_version, convert_version_str),
require_obj{"Package+version entries must be JSON"},
put_into{std::back_inserter(acc_pkgs), convert_pkg_obj}}}};

auto import_pkgs = walk_seq{require_obj{"'packages' should be a JSON object"},
mapping{any_key{put_into(pkg_name), import_pkg_versions}}};

walk(data,
mapping{
if_key{"version", just_accept},
required_key{"packages", "'packages' should be an object of packages", import_pkgs},
});

return acc_pkgs;
}

} // namespace

std::vector<package_info> dds::parse_packages_json(std::string_view content) {
json5::data data;
try {
dds_log(trace, "Parsing packages JSON data: {}", content);
data = json5::parse_data(content);
} catch (const json5::parse_error& e) {
throw_user_error<errc::invalid_catalog_json>("JSON5 syntax error: {}", e.what());
}

if (!data.is_object()) {
throw_user_error<errc::invalid_catalog_json>("Root of import JSON must be a JSON object");
}

auto& data_obj = data.as_object();
auto version_it = data_obj.find("version");
if (version_it == data_obj.end() || !version_it->second.is_number()) {
throw_user_error<errc::invalid_catalog_json>(
"Root JSON import requires a 'version' property");
}

double version = version_it->second.as_number();

try {
if (version == 1.0) {
throw_user_error<errc::invalid_catalog_json>(
"Support for catalog JSON v1 has been removed");
} else if (version == 2.0) {
dds_log(trace, "Processing JSON data as v2 data");
return parse_json_v2(data);
} else {
throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'",
version);
}
} catch (const semester::walk_error& e) {
throw_user_error<errc::invalid_catalog_json>(e.what());
}
}

+ 0
- 136
src/dds/catalog/import.test.cpp View File

#include "./import.hpp"

#include <dds/error/errors.hpp>

#include <catch2/catch.hpp>

TEST_CASE("An empty import is okay") {
// An empty JSON with no packages in it
auto pkgs = dds::parse_packages_json("{version: 2, packages: {}}");
CHECK(pkgs.empty());
}

TEST_CASE("Valid/invalid package JSON5") {
std::string_view bads[] = {
// Invalid JSON:
"",
// Should be an object
"[]",
// Missing keys
"{}",
// Missing "packages"
"{version: 2}",
// Bad version
"{version: 2.7, packages: {}}",
"{version: [], packages: {}}",
"{version: null, packages: {}}",
// 'packages' should be an object
"{version: 2, packages: []}",
"{version: 2, packages: null}",
"{version: 2, packages: 4}",
"{version: 2, packages: 'lol'}",
// Objects in 'packages' should be objects
"{version:2, packages:{foo:null}}",
"{version:2, packages:{foo:[]}}",
"{version:2, packages:{foo:9}}",
"{version:2, packages:{foo:'lol'}}",
// Objects in 'packages' shuold have version strings
"{version:2, packages:{foo:{'lol':{}}}}",
"{version:2, packages:{foo:{'1.2':{}}}}",
// No remote
"{version:2, packages:{foo:{'1.2.3':{}}}}",
// Bad empty URL
"{version:2, packages:{foo:{'1.2.3':{url: ''}}}}",
// Git URL must have a fragment
"{version:2, packages:{foo:{'1.2.3':{url:'git+http://example.com'}}}}",
// 'auto-lib' should be a usage string
"{version:2, packages:{foo:{'1.2.3':{url:'git+http://example.com?lm=lol#1.0}}}}",
// 'transform' should be an array
R"(
{
version: 2,
packages: {foo: {'1.2.3': {
url: 'git+http://example.com#master,
transform: 'lol hi'
}}}
}
)",
};

for (auto bad : bads) {
INFO("Bad: " << bad);
CHECK_THROWS_AS(dds::parse_packages_json(bad),
dds::user_error<dds::errc::invalid_catalog_json>);
}

std::string_view goods[] = {
// Basic empty:
"{version:2, packages:{}}",
// No versions for 'foo' is weird, but okay
"{version:2, packages:{foo:{}}}",
// Basic package with minimum info:
"{version:2, packages:{foo:{'1.2.3':{url: 'git+http://example.com#master'}}}}",
// Minimal auto-lib:
"{version:2, packages:{foo:{'1.2.3':{url: 'git+http://example.com?lm=a/b#master'}}}}",
// Empty transforms:
R"(
{
version: 2,
packages: {foo: {'1.2.3': {
url: 'git+http://example.com#master',
transform: [],
}}}
}
)",
// Basic transform:
R"(
{
version: 2,
packages: {foo: {'1.2.3': {
url: 'git+http://example.com#master',
transform: [{
copy: {
from: 'here',
to: 'there',
include: [
"*.c",
"*.cpp",
"*.h",
'*.txt'
]
}
}],
}}}
}
)",
};
for (auto good : goods) {
INFO("Parse: " << good);
CHECK_NOTHROW(dds::parse_packages_json(good));
}
}

TEST_CASE("Check a single object") {
// An empty JSON with no packages in it
auto pkgs = dds::parse_packages_json(R"({
version: 2,
packages: {
foo: {
'1.2.3': {
url: 'git+http://example.com?lm=a/b#master',
}
}
}
})");
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].ident.name == "foo");
CHECK(pkgs[0].ident.to_string() == "foo@1.2.3");
CHECK(std::holds_alternative<dds::git_remote_listing>(pkgs[0].remote));

auto git = std::get<dds::git_remote_listing>(pkgs[0].remote);
CHECK(git.url == "http://example.com");
CHECK(git.ref == "master");
REQUIRE(git.auto_lib);
CHECK(git.auto_lib->namespace_ == "a");
CHECK(git.auto_lib->name == "b");
}

+ 0
- 1
src/dds/catalog/package_info.hpp View File



#include <dds/deps.hpp> #include <dds/deps.hpp>
#include <dds/package/id.hpp> #include <dds/package/id.hpp>
#include <dds/util/fs_transform.hpp>
#include <dds/util/glob.hpp> #include <dds/util/glob.hpp>


#include <optional> #include <optional>

+ 0
- 6
src/dds/catalog/remote/base.cpp View File



using namespace dds; using namespace dds;


void remote_listing_base::apply_transforms(path_ref root) const {
for (const auto& tr : transforms) {
tr.apply_to(root);
}
}

void remote_listing_base::generate_auto_lib_files(const package_id& pid, path_ref root) const { void remote_listing_base::generate_auto_lib_files(const package_id& pid, path_ref root) const {
if (auto_lib.has_value()) { if (auto_lib.has_value()) {
dds_log(info, "Generating library data automatically"); dds_log(info, "Generating library data automatically");

+ 1
- 5
src/dds/catalog/remote/base.hpp View File

#pragma once #pragma once


#include <dds/util/fs_transform.hpp>

#include <libman/package.hpp> #include <libman/package.hpp>
#include <neo/concepts.hpp> #include <neo/concepts.hpp>


struct package_id; struct package_id;


struct remote_listing_base { struct remote_listing_base {
std::optional<lm::usage> auto_lib{};
std::vector<fs_transformation> transforms{};
std::optional<lm::usage> auto_lib{};


void apply_transforms(path_ref root) const;
void generate_auto_lib_files(const package_id& pid, path_ref root) const; void generate_auto_lib_files(const package_id& pid, path_ref root) const;
}; };



+ 0
- 445
src/dds/util/fs_transform.cpp View File

#include "./fs_transform.hpp"

#include <dds/error/errors.hpp>
#include <dds/util/fs.hpp>

#include <range/v3/algorithm/any_of.hpp>
#include <range/v3/distance.hpp>
#include <range/v3/numeric/accumulate.hpp>
#include <semester/walk.hpp>

#include <nlohmann/json.hpp>

#include <iostream>

using namespace dds;

using require_obj = semester::require_type<json5::data::mapping_type>;
using require_array = semester::require_type<json5::data::array_type>;
using require_str = semester::require_type<std::string>;

dds::fs_transformation dds::fs_transformation::from_json(const json5::data& data) {
fs_transformation ret;
using namespace semester::walk_ops;

auto prep_optional = [](auto& opt) {
return [&](auto&&) {
opt.emplace();
return walk.pass;
};
};

auto str_to_path = [](std::string const& s) {
auto p = fs::path(s);
if (p.is_absolute()) {
throw semester::walk_error(std::string(walk.path())
+ ": Only relative paths are accepted");
}
return p;
};

auto get_strip_components = [](double d) {
if (d != double(int(d)) || d < 0) {
throw semester::walk_error(std::string(walk.path()) + ": "
+ "'strip-components' should be a positive whole number");
}
return int(d);
};

auto populate_globs = [&](std::vector<dds::glob>& globs) {
return for_each{
require_str{"Include/exclude list should be a list of globs"},
put_into(std::back_inserter(globs),
[](const std::string& glob) {
try {
return dds::glob::compile(glob);
} catch (const std::runtime_error& e) {
throw semester::walk_error{std::string(walk.path()) + ": " + e.what()};
}
}),
};
};

auto populate_reloc = [&](auto& op) {
return [&](auto&& dat) {
op.emplace();
return mapping{
required_key{"from",
"a 'from' path is required",
require_str{"'from' should be a path string"},
put_into(op->from, str_to_path)},
required_key{"to",
"a 'to' path is required",
require_str{"'to' should be a path string"},
put_into(op->to, str_to_path)},
if_key{"strip-components",
require_type<double>{"'strip-components' should be an integer"},
put_into(op->strip_components, get_strip_components)},
if_key{"include",
require_array{"'include' should be an array"},
populate_globs(op->include)},
if_key{"exclude",
require_array{"'exclude' should be an array"},
populate_globs(op->exclude)},
}(dat);
};
};

struct fs_transformation::edit pending_edit;
fs_transformation::one_edit pending_edit_item;

walk(data,
require_obj{"Each transform must be a JSON object"},
mapping{
if_key{"copy", populate_reloc(ret.copy)},
if_key{"move", populate_reloc(ret.move)},
if_key{"remove",
require_obj{"'remove' should be a JSON object"},
prep_optional(ret.remove),
mapping{
required_key{"path",
"'path' is required",
require_str{"'path' should be a string path to remove"},
put_into(ret.remove->path, str_to_path)},
if_key{"only-matching",
require_array{"'only-matching' should be an array of globs"},
populate_globs(ret.remove->only_matching)},
}},
if_key{"write",
require_obj{"'write' should be a JSON object"},
prep_optional(ret.write),
mapping{
required_key{"path",
"'path' is required",
require_str{"'path' should be a string path to write to"},
put_into(ret.write->path, str_to_path)},
required_key{"content",
"'content' is required",
require_str{"'content' must be a string"},
put_into(ret.write->content)},
}},
if_key{
"edit",
require_obj{"'edit' should be a JSON object"},
prep_optional(ret.edit),
mapping{
required_key{"path",
"'path' is required",
require_str{"'path' should be a string path"},
put_into(ret.edit->path, str_to_path)},
required_key{
"edits",
"An 'edits' array is required",
require_array{"'edits' should be an array"},
for_each{
require_obj{"Each edit should be a JSON object"},
[&](auto&&) {
ret.edit->edits.emplace_back();
return walk.pass;
},
[&](auto&& dat) {
return mapping{
required_key{
"kind",
"Edit 'kind' is required",
require_str{"'kind' should be a string"},
[&](std::string s) {
auto& ed = ret.edit->edits.back();
if (s == "delete") {
ed.kind = ed.delete_;
} else if (s == "insert") {
ed.kind = ed.insert;
} else {
return walk.reject("Invalid edit kind");
}
return walk.accept;
},
},
required_key{
"line",
"Edit 'line' number is required",
require_type<double>{"'line' should be an integer"},
[&](double d) {
ret.edit->edits.back().line = int(d);
return walk.accept;
},
},
if_key{
"content",
require_str{"'content' should be a string"},
[&](std::string s) {
ret.edit->edits.back().content = s;
return walk.accept;
},
},
}(dat);
},
},
},
},
},
});

return ret;
}

namespace {

bool matches_any(path_ref path, const std::vector<glob>& globs) {
return std::any_of(globs.begin(), globs.end(), [&](auto&& gl) { return gl.match(path); });
}

bool parent_dir_of(fs::path root, fs::path child) {
auto root_str = (root += "/").lexically_normal().generic_string();
auto child_str = (child += "/").lexically_normal().generic_string();
return child_str.find(root_str) == 0;
}

void do_relocate(const dds::fs_transformation::copy_move_base& oper,
dds::path_ref root,
bool is_copy) {
auto from = fs::weakly_canonical(root / oper.from);
auto to = fs::weakly_canonical(root / oper.to);
if (!parent_dir_of(root, from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to copy/move a file/directory from outside of the "
"root [{}] into the root [{}].",
from.string(),
root.string());
}
if (!parent_dir_of(root, to)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to copy/move a file/directory [{}] to a "
"destination outside of the restricted root [{}].",
to.string(),
root.string());
}

if (!fs::exists(from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempting to copy/move a non-existint file/directory [{}] "
"to [{}].",
from.string(),
to.string());
}

fs::create_directories(to.parent_path());

if (fs::is_regular_file(from)) {
if (is_copy) {
fs::copy_file(from, to, fs::copy_options::overwrite_existing);
} else {
safe_rename(from, to);
}
return;
}

for (auto item : fs::recursive_directory_iterator(from)) {
auto relpath = fs::relative(item, from);
auto matches_glob = [&](auto glob) { return glob.match(relpath.string()); };
auto included = oper.include.empty() || ranges::any_of(oper.include, matches_glob);
auto excluded = ranges::any_of(oper.exclude, matches_glob);
if (!included || excluded) {
continue;
}

auto n_components = ranges::distance(relpath);
if (n_components <= oper.strip_components) {
continue;
}

auto it = relpath.begin();
std::advance(it, oper.strip_components);
relpath = ranges::accumulate(it, relpath.end(), fs::path(), std::divides<>());

auto dest = to / relpath;
fs::create_directories(dest.parent_path());
if (item.is_directory()) {
fs::create_directories(dest);
} else {
if (is_copy) {
fs::copy_file(item, dest, fs::copy_options::overwrite_existing);
} else {
safe_rename(item, dest);
}
}
}
}

void do_remove(const struct fs_transformation::remove& oper, path_ref root) {
auto from = fs::weakly_canonical(root / oper.path);
if (!parent_dir_of(root, from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to deletes files/directories outside of the "
"root. Attempted to remove [{}]. Removal is restricted to [{}].",
from.string(),
root.string());
}

if (!fs::exists(from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to delete a non-existint file/directory [{}].",
from.string());
}

if (fs::is_directory(from)) {
for (auto child : fs::recursive_directory_iterator{from}) {
if (child.is_directory()) {
continue;
}
auto relpath = child.path().lexically_proximate(from);
if (!oper.only_matching.empty() && !matches_any(relpath, oper.only_matching)) {
continue;
}
fs::remove_all(child);
}
} else {
fs::remove_all(from);
}
}

void do_write(const struct fs_transformation::write& oper, path_ref root) {
auto dest = fs::weakly_canonical(root / oper.path);
if (!parent_dir_of(root, dest)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation is trying to write outside of the root. Attempted to write "
"to [{}]. Writing is restricted to [{}].",
dest.string(),
root.string());
}

auto of = dds::open(dest, std::ios::binary | std::ios::out);
of << oper.content;
}

void do_edit(path_ref filepath, const fs_transformation::one_edit& edit) {
auto file = open(filepath, std::ios::in | std::ios::binary);
file.exceptions(std::ios::badbit);
std::string lines;
std::string line;
int line_n = 1;
for (; std::getline(file, line, '\n'); ++line_n) {
if (line_n != edit.line) {
lines += line + "\n";
continue;
}
switch (edit.kind) {
case edit.delete_:
// Just delete the line. Ignore it.
continue;
case edit.insert:
// Insert some new content
lines += edit.content + "\n";
lines += line + "\n";
continue;
}
}
file = open(filepath, std::ios::out | std::ios::binary);
file << lines;
}

} // namespace

void dds::fs_transformation::apply_to(dds::path_ref root_) const {
auto root = fs::weakly_canonical(root_);
if (copy) {
do_relocate(*copy, root, true);
}
if (move) {
do_relocate(*move, root, false);
}
if (remove) {
do_remove(*remove, root);
}
if (write) {
do_write(*write, root);
}
if (edit) {
auto fpath = root / edit->path;
if (!parent_dir_of(root, fpath)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation wants to edit a file outside of the root. Attempted to "
"modify [{}]. Writing is restricted to [{}].",
fpath.string(),
root.string());
}
for (auto&& ed : edit->edits) {
do_edit(fpath, ed);
}
}
}

namespace {

nlohmann::json reloc_as_json(const fs_transformation::copy_move_base& oper) {
auto obj = nlohmann::json::object();
obj["from"] = oper.from.string();
obj["to"] = oper.to.string();

obj["strip-components"] = oper.strip_components;

auto inc_list = nlohmann::json::array();
for (auto& inc : oper.include) {
inc_list.push_back(inc.string());
}

auto exc_list = nlohmann::json::array();
for (auto& exc : oper.exclude) {
exc_list.push_back(exc.string());
}

if (!inc_list.empty()) {
obj["include"] = inc_list;
}
if (!exc_list.empty()) {
obj["exclude"] = exc_list;
}

return obj;
}

} // namespace

std::string fs_transformation::as_json() const noexcept {
auto obj = nlohmann::json::object();
if (copy) {
obj["copy"] = reloc_as_json(*copy);
}
if (move) {
obj["move"] = reloc_as_json(*move);
}
if (remove) {
auto rm = nlohmann::json::object();
rm["path"] = remove->path.string();
if (!remove->only_matching.empty()) {
auto if_arr = nlohmann::json::array();
for (auto&& gl : remove->only_matching) {
if_arr.push_back(gl.string());
}
rm["only-matching"] = if_arr;
}
obj["remove"] = rm;
}
if (write) {
auto wr = nlohmann::json::object();
wr["path"] = write->path.string();
wr["content"] = write->content;
obj["write"] = wr;
}
if (edit) {
auto ed = nlohmann::json::object();
ed["path"] = edit->path.string();
auto edits = nlohmann::json::array();
for (auto&& one : edit->edits) {
auto one_ed = nlohmann::json::object();
one_ed["kind"] = one.kind == one.delete_ ? "delete" : "insert";
one_ed["line"] = one.line;
one_ed["content"] = one.content;
edits.push_back(std::move(one_ed));
}
ed["edits"] = edits;
obj["edit"] = ed;
}

return to_string(obj);
}

+ 0
- 65
src/dds/util/fs_transform.hpp View File

#pragma once

#include "./fs.hpp"
#include "./glob.hpp"

#include <json5/data.hpp>

#include <optional>
#include <variant>

namespace dds {

struct fs_transformation {
struct copy_move_base {
fs::path from;
fs::path to;

int strip_components = 0;
std::vector<dds::glob> include;
std::vector<dds::glob> exclude;
};

struct copy : copy_move_base {};
struct move : copy_move_base {};

struct remove {
fs::path path;

std::vector<dds::glob> only_matching;
};

struct write {
fs::path path;
std::string content;
};

struct one_edit {
int line = 0;
std::string content;
enum kind_t {
delete_,
insert,
} kind
= delete_;
};

struct edit {
fs::path path;
std::vector<one_edit> edits;
};

std::optional<struct copy> copy;
std::optional<struct move> move;
std::optional<struct remove> remove;
std::optional<struct write> write;
std::optional<struct edit> edit;

void apply_to(path_ref root) const;

static fs_transformation from_json(const json5::data&);

std::string as_json() const noexcept;
};

} // namespace dds

+ 2
- 1
tests/__init__.py View File

from pathlib import Path from pathlib import Path
sys.path.append(str(Path(__file__).absolute().parent.parent / 'tools')) sys.path.append(str(Path(__file__).absolute().parent.parent / 'tools'))


from .dds import DDS, DDSFixtureParams, scoped_dds, dds_fixture_conf, dds_fixture_conf_1
from .dds import DDS, DDSFixtureParams, scoped_dds, dds_fixture_conf, dds_fixture_conf_1
from .http import http_repo, RepoFixture

+ 38
- 42
tests/catalog/get_test.py View File

import json import json
from contextlib import contextmanager


from tests import dds, DDS
from tests.fileutil import ensure_dir from tests.fileutil import ensure_dir

import pytest


def load_catalog(dds: DDS, data):
dds.scope.enter_context(ensure_dir(dds.build_dir))
dds.catalog_create()

json_path = dds.build_dir / 'catalog.json'
dds.scope.enter_context(
dds.set_contents(json_path,
json.dumps(data).encode()))
dds.catalog_import(json_path)


def test_get(dds: DDS):
load_catalog(
dds, {
'version': 2,
'packages': {
'neo-sqlite3': {
'0.3.0': {
'url':
'git+https://github.com/vector-of-bool/neo-sqlite3.git#0.3.0',
},
},
},
})

from tests import dds, DDS
from tests.http import RepoFixture


def test_get(dds: DDS, http_repo: RepoFixture):
http_repo.import_json_data({
'version': 2,
'packages': {
'neo-sqlite3': {
'0.3.0': {
'remote': {
'git': {
'url': 'https://github.com/vector-of-bool/neo-sqlite3.git',
'ref': '0.3.0',
}
}
}
}
}
})

dds.repo_add(http_repo.url)
dds.catalog_get('neo-sqlite3@0.3.0') dds.catalog_get('neo-sqlite3@0.3.0')
assert (dds.scratch_dir / 'neo-sqlite3@0.3.0').is_dir() assert (dds.scratch_dir / 'neo-sqlite3@0.3.0').is_dir()
assert (dds.scratch_dir / 'neo-sqlite3@0.3.0/package.jsonc').is_file() assert (dds.scratch_dir / 'neo-sqlite3@0.3.0/package.jsonc').is_file()




def test_get_http(dds: DDS):
load_catalog(
dds, {
'version': 2,
'packages': {
'cmcstl2': {
'2020.2.24': {
'url':
'https://github.com/CaseyCarter/cmcstl2/archive/684a96d527e4dc733897255c0177b784dc280980.tar.gz?dds_lm=cmc/stl2;',
},
def test_get_http(dds: DDS, http_repo: RepoFixture):
http_repo.import_json_data({
'packages': {
'cmcstl2': {
'2020.2.24': {
'remote': {
'http': {
'url':
'https://github.com/CaseyCarter/cmcstl2/archive/684a96d527e4dc733897255c0177b784dc280980.tar.gz?dds_lm=cmc/stl2;',
},
'auto-lib': 'cmc/stl2',
}
}, },
}, },
})
},
})
dds.scope.enter_context(ensure_dir(dds.source_root))
dds.repo_add(http_repo.url)
dds.catalog_get('cmcstl2@2020.2.24') dds.catalog_get('cmcstl2@2020.2.24')
assert dds.scratch_dir.joinpath('cmcstl2@2020.2.24/include').is_dir() assert dds.scratch_dir.joinpath('cmcstl2@2020.2.24/include').is_dir()

+ 0
- 89
tests/catalog/import_test.py View File

import json
from pathlib import Path
from functools import partial
from concurrent.futures import ThreadPoolExecutor
from http.server import SimpleHTTPRequestHandler, HTTPServer
import time

import pytest

from tests import dds, DDS
from tests.fileutil import ensure_dir


class DirectoryServingHTTPRequestHandler(SimpleHTTPRequestHandler):
def __init__(self, *args, **kwargs) -> None:
self.dir = kwargs.pop('dir')
super().__init__(*args, **kwargs)

def translate_path(self, path) -> str:
abspath = Path(super().translate_path(path))
relpath = abspath.relative_to(Path.cwd())
return self.dir / relpath


def test_import_json(dds: DDS):
dds.scope.enter_context(ensure_dir(dds.build_dir))
dds.catalog_create()

json_fpath = dds.build_dir / 'data.json'
import_data = {
'version': 2,
'packages': {
'foo': {
'1.2.4': {
'url': 'git+http://example.com#master',
'depends': [],
},
'1.2.5': {
'url': 'git+http://example.com#master',
},
},
'bar': {
'1.5.1': {
'url': 'http://example.com/bar-1.5.2.tgz'
},
}
},
}
dds.scope.enter_context(dds.set_contents(json_fpath, json.dumps(import_data).encode()))
dds.catalog_import(json_fpath)


@pytest.yield_fixture
def http_import_server():
handler = partial(DirectoryServingHTTPRequestHandler, dir=Path.cwd() / 'data/http-test-1')
addr = ('0.0.0.0', 8000)
pool = ThreadPoolExecutor()
with HTTPServer(addr, handler) as httpd:
pool.submit(lambda: httpd.serve_forever(poll_interval=0.1))
try:
yield
finally:
httpd.shutdown()


@pytest.yield_fixture
def http_repo_server():
handler = partial(DirectoryServingHTTPRequestHandler, dir=Path.cwd() / 'data/test-repo-1')
addr = ('0.0.0.0', 4646)
pool = ThreadPoolExecutor()
with HTTPServer(addr, handler) as httpd:
pool.submit(lambda: httpd.serve_forever(poll_interval=0.1))
try:
yield 'http://localhost:4646'
finally:
httpd.shutdown()


def test_repo_add(dds: DDS, http_repo_server):
dds.repo_dir.mkdir(parents=True, exist_ok=True)
dds.run([
'repo',
dds.repo_dir_arg,
'add',
dds.catalog_path_arg,
http_repo_server,
'--update',
])
dds.build_deps(['neo-fun@0.6.0'])

+ 1
- 0
tests/conftest.py View File

import pytest import pytest


from tests import scoped_dds, DDSFixtureParams from tests import scoped_dds, DDSFixtureParams
from .http import * # Exposes the HTTP fixtures




@pytest.fixture(scope='session') @pytest.fixture(scope='session')

+ 1
- 10
tests/dds.py View File

]) ])


def repo_add(self, url: str) -> None: def repo_add(self, url: str) -> None:
return self.run(['repo', 'add', url, '--update', self.catalog_path_arg])
self.run(['repo', 'add', url, '--update', self.catalog_path_arg])


def build(self, def build(self,
*, *,
self.scratch_dir.mkdir(parents=True, exist_ok=True) self.scratch_dir.mkdir(parents=True, exist_ok=True)
return self.run(['catalog', 'create', f'--catalog={self.catalog_path}'], cwd=self.test_dir) return self.run(['catalog', 'create', f'--catalog={self.catalog_path}'], cwd=self.test_dir)


def catalog_import(self, json_path: Path) -> subprocess.CompletedProcess:
self.scratch_dir.mkdir(parents=True, exist_ok=True)
return self.run([
'catalog',
'import',
f'--catalog={self.catalog_path}',
f'--json={json_path}',
])

def catalog_get(self, req: str) -> subprocess.CompletedProcess: def catalog_get(self, req: str) -> subprocess.CompletedProcess:
return self.run([ return self.run([
'catalog', 'catalog',

+ 6
- 1
tests/deps/build-deps/project/catalog.json View File

"packages": { "packages": {
"neo-fun": { "neo-fun": {
"0.3.0": { "0.3.0": {
"url": "git+https://github.com/vector-of-bool/neo-fun.git#0.3.0"
"remote": {
"git": {
"url": "https://github.com/vector-of-bool/neo-fun.git",
"ref": "0.3.0"
}
}
} }
} }
} }

+ 10
- 6
tests/deps/build-deps/test_build_deps.py View File

from tests import dds, DDS from tests import dds, DDS
from tests.http import RepoFixture




def test_build_deps_from_file(dds: DDS):
def test_build_deps_from_file(dds: DDS, http_repo: RepoFixture):
assert not dds.deps_build_dir.is_dir() assert not dds.deps_build_dir.is_dir()
dds.catalog_import(dds.source_root / 'catalog.json')
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
dds.build_deps(['-d', 'deps.json5']) dds.build_deps(['-d', 'deps.json5'])
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir() assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir()
assert (dds.scratch_dir / 'INDEX.lmi').is_file() assert (dds.scratch_dir / 'INDEX.lmi').is_file()
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file() assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file()




def test_build_deps_from_cmd(dds: DDS):
def test_build_deps_from_cmd(dds: DDS, http_repo: RepoFixture):
assert not dds.deps_build_dir.is_dir() assert not dds.deps_build_dir.is_dir()
dds.catalog_import(dds.source_root / 'catalog.json')
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
dds.build_deps(['neo-fun=0.3.0']) dds.build_deps(['neo-fun=0.3.0'])
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir() assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir()
assert (dds.scratch_dir / 'INDEX.lmi').is_file() assert (dds.scratch_dir / 'INDEX.lmi').is_file()
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file() assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file()




def test_multiple_deps(dds: DDS):
def test_multiple_deps(dds: DDS, http_repo: RepoFixture):
assert not dds.deps_build_dir.is_dir() assert not dds.deps_build_dir.is_dir()
dds.catalog_import(dds.source_root / 'catalog.json')
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
dds.build_deps(['neo-fun^0.2.0', 'neo-fun~0.3.0']) dds.build_deps(['neo-fun^0.2.0', 'neo-fun~0.3.0'])
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir() assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir()
assert (dds.scratch_dir / 'INDEX.lmi').is_file() assert (dds.scratch_dir / 'INDEX.lmi').is_file()

+ 7
- 4
tests/deps/do_test.py View File

import subprocess import subprocess


from tests import DDS, DDSFixtureParams, dds_fixture_conf, dds_fixture_conf_1 from tests import DDS, DDSFixtureParams, dds_fixture_conf, dds_fixture_conf_1
from tests.http import RepoFixture


dds_conf = dds_fixture_conf( dds_conf = dds_fixture_conf(
DDSFixtureParams(ident='git-remote', subdir='git-remote'), DDSFixtureParams(ident='git-remote', subdir='git-remote'),




@dds_conf @dds_conf
def test_deps_build(dds: DDS):
dds.catalog_import(dds.source_root / 'catalog.json')
def test_deps_build(dds: DDS, http_repo: RepoFixture):
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
assert not dds.repo_dir.exists() assert not dds.repo_dir.exists()
dds.build() dds.build()
assert dds.repo_dir.exists(), '`Building` did not generate a repo directory' assert dds.repo_dir.exists(), '`Building` did not generate a repo directory'




@dds_fixture_conf_1('use-remote') @dds_fixture_conf_1('use-remote')
def test_use_nlohmann_json_remote(dds: DDS):
dds.catalog_import(dds.source_root / 'catalog.json')
def test_use_nlohmann_json_remote(dds: DDS, http_repo: RepoFixture):
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
dds.build(apps=True) dds.build(apps=True)


app_exe = dds.build_dir / f'app{dds.exe_suffix}' app_exe = dds.build_dir / f'app{dds.exe_suffix}'

+ 13
- 2
tests/deps/git-remote/catalog.json View File

"packages": { "packages": {
"neo-fun": { "neo-fun": {
"0.3.2": { "0.3.2": {
"url": "git+https://github.com/vector-of-bool/neo-fun.git#0.3.2"
"remote": {
"git": {
"url": "https://github.com/vector-of-bool/neo-fun.git",
"ref": "0.3.2"
}
}
} }
}, },
"range-v3": { "range-v3": {
"0.9.1": { "0.9.1": {
"url": "git+https://github.com/ericniebler/range-v3.git?lm=Niebler/range-v3#0.9.1"
"remote": {
"auto-lib": "Niebler/range-v3",
"git": {
"url": "https://github.com/ericniebler/range-v3.git",
"ref": "0.9.1"
}
}
} }
} }
} }

+ 19
- 13
tests/deps/use-cryptopp/project/catalog.json View File

"packages": { "packages": {
"cryptopp": { "cryptopp": {
"8.2.0": { "8.2.0": {
"url": "git+https://github.com/weidai11/cryptopp.git?lm=cryptopp/cryptopp#CRYPTOPP_8_2_0",
"transform": [
{
"move": {
"from": ".",
"to": "src/cryptopp",
"include": [
"*.c",
"*.cpp",
"*.h"
]
"remote": {
"git": {
"url": "https://github.com/weidai11/cryptopp.git",
"ref": "CRYPTOPP_8_2_0"
},
"auto-lib": "cryptopp/cryptopp",
"transform": [
{
"move": {
"from": ".",
"to": "src/cryptopp",
"include": [
"*.c",
"*.cpp",
"*.h"
]
}
} }
}
]
]
}
} }
} }
} }

+ 6
- 7
tests/deps/use-cryptopp/test_use_cryptopp.py View File

from tests import DDS from tests import DDS
from tests.http import RepoFixture
import platform import platform


import pytest import pytest
from dds_ci import proc from dds_ci import proc




@pytest.mark.skipif(
platform.system() == 'FreeBSD',
reason='This one has trouble running on FreeBSD')
def test_get_build_use_cryptopp(dds: DDS):
dds.catalog_import(dds.source_root / 'catalog.json')
@pytest.mark.skipif(platform.system() == 'FreeBSD', reason='This one has trouble running on FreeBSD')
def test_get_build_use_cryptopp(dds: DDS, http_repo: RepoFixture):
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc' tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname) tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc) dds.build(toolchain=tc)
proc.check_run(
(dds.build_dir / 'use-cryptopp').with_suffix(dds.exe_suffix))
proc.check_run((dds.build_dir / 'use-cryptopp').with_suffix(dds.exe_suffix))

+ 6
- 1
tests/deps/use-remote/catalog.json View File

"packages": { "packages": {
"nlohmann-json": { "nlohmann-json": {
"3.7.1": { "3.7.1": {
"url": "git+https://github.com/vector-of-bool/json.git#dds/3.7.1",
"remote": {
"git": {
"url": "https://github.com/vector-of-bool/json.git",
"ref": "dds/3.7.1"
}
},
"depends": [] "depends": []
} }
} }

+ 7
- 2
tests/deps/use-spdlog/project/catalog.json View File

"packages": { "packages": {
"spdlog": { "spdlog": {
"1.4.2": { "1.4.2": {
"url": "git+https://github.com/gabime/spdlog.git?lm=spdlog/spdlog#v1.4.2",
"depends": []
"remote": {
"git": {
"url": "https://github.com/gabime/spdlog.git",
"ref": "v1.4.2"
},
"auto-lib": "spdlog/spdlog"
}
} }
} }
} }

+ 4
- 2
tests/deps/use-spdlog/use_spdlog_test.py View File

from tests import DDS from tests import DDS
from tests.http import RepoFixture


from dds_ci import proc from dds_ci import proc




def test_get_build_use_spdlog(dds: DDS):
dds.catalog_import(dds.source_root / 'catalog.json')
def test_get_build_use_spdlog(dds: DDS, http_repo: RepoFixture):
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc' tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname) tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc, apps=True) dds.build(toolchain=tc, apps=True)

+ 105
- 0
tests/http.py View File

from pathlib import Path
from contextlib import contextmanager
import json
from http.server import SimpleHTTPRequestHandler, HTTPServer
from typing import NamedTuple
from concurrent.futures import ThreadPoolExecutor
from functools import partial
import tempfile
import sys
import subprocess

import pytest


class DirectoryServingHTTPRequestHandler(SimpleHTTPRequestHandler):
"""
A simple HTTP request handler that simply serves files from a directory given to the constructor.
"""

def __init__(self, *args, **kwargs) -> None:
self.dir = kwargs.pop('dir')
super().__init__(*args, **kwargs)

def translate_path(self, path) -> str:
# Convert the given URL path to a path relative to the directory we are serving
abspath = Path(super().translate_path(path))
relpath = abspath.relative_to(Path.cwd())
return self.dir / relpath


class ServerInfo(NamedTuple):
"""
Information about an HTTP server fixture
"""
base_url: str
root: Path


@contextmanager
def run_http_server(dirpath: Path, port: int):
"""
Context manager that spawns an HTTP server that serves thegiven directory on
the given TCP port.
"""
handler = partial(DirectoryServingHTTPRequestHandler, dir=dirpath)
addr = ('localhost', port)
pool = ThreadPoolExecutor()
with HTTPServer(addr, handler) as httpd:
pool.submit(lambda: httpd.serve_forever(poll_interval=0.1))
try:
yield ServerInfo(f'http://localhost:{port}', dirpath)
finally:
httpd.shutdown()


@pytest.yield_fixture()
def http_tmp_dir_server(tmp_path: Path, unused_tcp_port: int):
"""
Creates an HTTP server that serves the contents of a new
temporary directory.
"""
with run_http_server(tmp_path, unused_tcp_port) as s:
yield s


class RepoFixture:
"""
A fixture handle to a dds HTTP repository, including a path and URL.
"""

def __init__(self, dds_exe: Path, info: ServerInfo) -> None:
self.server = info
self.url = info.base_url
self.dds_exe = dds_exe

def import_json_data(self, data) -> None:
"""
Import some packages into the repo for the given JSON data. Uses
mkrepo.py
"""
with tempfile.NamedTemporaryFile() as f:
f.write(json.dumps(data).encode())
f.flush()
self.import_json_file(Path(f.name))

def import_json_file(self, fpath: Path) -> None:
"""
Import some package into the repo for the given JSON file. Uses mkrepo.py
"""
subprocess.check_call([
sys.executable,
str(Path.cwd() / 'tools/mkrepo.py'),
f'--dir={self.server.root}',
f'--spec={fpath}',
])


@pytest.yield_fixture()
def http_repo(dds_exe: Path, http_tmp_dir_server: ServerInfo):
"""
Fixture that creates a new empty dds repository and an HTTP server to serve
it.
"""
subprocess.check_call([dds_exe, 'repoman', 'init', str(http_tmp_dir_server.root)])
yield RepoFixture(dds_exe, http_tmp_dir_server)

+ 125
- 143
tools/gen-catalog-json.py View File

from pathlib import Path from pathlib import Path
import sys import sys
import textwrap import textwrap
import requests
from threading import local
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor




class Git(NamedTuple): class Git(NamedTuple):
url: str url: str
ref: str ref: str
auto_lib: Optional[str] = None
transforms: Sequence[FSTransform] = []


def to_dict(self) -> dict: def to_dict(self) -> dict:
d = { d = {
'url': self.url, 'url': self.url,
'ref': self.ref, 'ref': self.ref,
'transform': [f.to_dict() for f in self.transforms],
} }
if self.auto_lib:
d['auto-lib'] = self.auto_lib
return d return d




RemoteInfo = Union[Git] RemoteInfo = Union[Git]




class ForeignInfo(NamedTuple):
remote: RemoteInfo
auto_lib: Optional[str] = None
transforms: Sequence[FSTransform] = []

def to_dict(self) -> dict:
d = {
'transform': [tr.to_dict() for tr in self.transforms],
}
if isinstance(self.remote, Git):
d['git'] = self.remote.to_dict()
if self.auto_lib:
d['auto-lib'] = self.auto_lib
return d


class Version(NamedTuple): class Version(NamedTuple):
version: str version: str
remote: RemoteInfo
remote: ForeignInfo
depends: Sequence[str] = [] depends: Sequence[str] = []
description: str = '(No description provided)' description: str = '(No description provided)'


ret: dict = { ret: dict = {
'description': self.description, 'description': self.description,
'depends': list(self.depends), 'depends': list(self.depends),
'remote': self.remote.to_dict(),
} }
if isinstance(self.remote, Git):
ret['git'] = self.remote.to_dict()
return ret return ret






HTTP_POOL = ThreadPoolExecutor(10) HTTP_POOL = ThreadPoolExecutor(10)


HTTP_SESSION = requests.Session()



def github_http_get(url: str): def github_http_get(url: str):
url_dat = url_parse.urlparse(url) url_dat = url_parse.urlparse(url)
req.add_header('Authorization', f'token {os.environ["GITHUB_API_TOKEN"]}') req.add_header('Authorization', f'token {os.environ["GITHUB_API_TOKEN"]}')
if url_dat.hostname != 'api.github.com': if url_dat.hostname != 'api.github.com':
raise RuntimeError(f'Request is outside of api.github.com [{url}]') raise RuntimeError(f'Request is outside of api.github.com [{url}]')
resp = request.urlopen(req)
if resp.status != 200:
raise RuntimeError(f'Request to [{url}] failed [{resp.status} {resp.reason}]')
return json5.loads(resp.read())
print(f'Request {url}')
resp = HTTP_SESSION.get(url, headers=req.headers)
# resp = request.urlopen(req)
resp.raise_for_status()
# if resp.status != 200:
# raise RuntimeError(f'Request to [{url}] failed [{resp.status} {resp.reason}]')
return json5.loads(resp.text)




def _get_github_tree_file_content(url: str) -> bytes: def _get_github_tree_file_content(url: str) -> bytes:
raise RuntimeError(f'Unknown "depends" object from json file: {depends!r}') raise RuntimeError(f'Unknown "depends" object from json file: {depends!r}')


remote = Git(url=clone_url, ref=tag['name']) remote = Git(url=clone_url, ref=tag['name'])
return Version(version, description=desc, depends=list(pairs), remote=remote)
return Version(version, description=desc, depends=list(pairs), remote=ForeignInfo(remote))




def github_package(name: str, repo: str, want_tags: Iterable[str]) -> Package: def github_package(name: str, repo: str, want_tags: Iterable[str]) -> Package:
Version( Version(
ver.version, ver.version,
description=description, description=description,
remote=Git(git_url, tag_fmt.format(ver.version), auto_lib=auto_lib),
remote=ForeignInfo(remote=Git(git_url, tag_fmt.format(ver.version)), auto_lib=auto_lib),
depends=ver.depends) for ver in versions depends=ver.depends) for ver in versions
]) ])


Version( Version(
ver, ver,
description='\n'.join(textwrap.wrap(description)), description='\n'.join(textwrap.wrap(description)),
remote=Git(url=git_url, ref=tag_fmt.format(ver), auto_lib=auto_lib, transforms=transforms))
remote=ForeignInfo(
remote=Git(url=git_url, ref=tag_fmt.format(ver)), auto_lib=auto_lib, transforms=transforms))
for ver in versions for ver in versions
]) ])




# yapf: disable # yapf: disable
PACKAGES = [ PACKAGES = [
github_package('neo-buffer', 'vector-of-bool/neo-buffer',
['0.2.1', '0.3.0', '0.4.0', '0.4.1', '0.4.2']),
github_package('neo-buffer', 'vector-of-bool/neo-buffer', ['0.2.1', '0.3.0', '0.4.0', '0.4.1', '0.4.2']),
github_package('neo-compress', 'vector-of-bool/neo-compress', ['0.1.0', '0.1.1', '0.2.0']), github_package('neo-compress', 'vector-of-bool/neo-compress', ['0.1.0', '0.1.1', '0.2.0']),
github_package('neo-url', 'vector-of-bool/neo-url',
['0.1.0', '0.1.1', '0.1.2', '0.2.0', '0.2.1', '0.2.2']),
github_package('neo-sqlite3', 'vector-of-bool/neo-sqlite3',
['0.2.3', '0.3.0', '0.4.0', '0.4.1']),
github_package('neo-url', 'vector-of-bool/neo-url', ['0.1.0', '0.1.1', '0.1.2', '0.2.0', '0.2.1', '0.2.2']),
github_package('neo-sqlite3', 'vector-of-bool/neo-sqlite3', ['0.2.3', '0.3.0', '0.4.0', '0.4.1']),
github_package('neo-fun', 'vector-of-bool/neo-fun', [ github_package('neo-fun', 'vector-of-bool/neo-fun', [
'0.1.1', '0.2.0', '0.2.1', '0.3.0', '0.3.1', '0.3.2', '0.4.0', '0.4.1',
'0.4.2', '0.5.0', '0.5.1', '0.5.2', '0.5.3', '0.5.4', '0.5.5', '0.6.0',
'0.1.1',
'0.2.0',
'0.2.1',
'0.3.0',
'0.3.1',
'0.3.2',
'0.4.0',
'0.4.1',
'0.4.2',
'0.5.0',
'0.5.1',
'0.5.2',
'0.5.3',
'0.5.4',
'0.5.5',
'0.6.0',
]), ]),
github_package('neo-io', 'vector-of-bool/neo-io', ['0.1.0', '0.1.1']), github_package('neo-io', 'vector-of-bool/neo-io', ['0.1.0', '0.1.1']),
github_package('neo-http', 'vector-of-bool/neo-http', ['0.1.0']), github_package('neo-http', 'vector-of-bool/neo-http', ['0.1.0']),
github_package('semver', 'vector-of-bool/semver', ['0.2.2']), github_package('semver', 'vector-of-bool/semver', ['0.2.2']),
github_package('pubgrub', 'vector-of-bool/pubgrub', ['0.2.1']), github_package('pubgrub', 'vector-of-bool/pubgrub', ['0.2.1']),
github_package('vob-json5', 'vector-of-bool/json5', ['0.1.5']), github_package('vob-json5', 'vector-of-bool/json5', ['0.1.5']),
github_package('vob-semester', 'vector-of-bool/semester',
['0.1.0', '0.1.1', '0.2.0', '0.2.1', '0.2.2']),
github_package('vob-semester', 'vector-of-bool/semester', ['0.1.0', '0.1.1', '0.2.0', '0.2.1', '0.2.2']),
many_versions( many_versions(
'magic_enum', 'magic_enum',
( (
), ),
git_url='https://github.com/ericniebler/range-v3.git', git_url='https://github.com/ericniebler/range-v3.git',
auto_lib='range-v3/range-v3', auto_lib='range-v3/range-v3',
description=
'Range library for C++14/17/20, basis for C++20\'s std::ranges',
description='Range library for C++14/17/20, basis for C++20\'s std::ranges',
), ),
many_versions( many_versions(
'nlohmann-json', 'nlohmann-json',
), ),
Package('ms-wil', [ Package('ms-wil', [
Version( Version(
'2020.03.16',
'2020.3.16',
description='The Windows Implementation Library', description='The Windows Implementation Library',
remote=Git('https://github.com/vector-of-bool/wil.git',
'dds/2020.03.16'))
remote=ForeignInfo(Git('https://github.com/vector-of-bool/wil.git', 'dds/2020.03.16')))
]),
Package('p-ranav.argparse', [
Version(
'2.1.0',
description='Argument Parser for Modern C++',
remote=ForeignInfo(Git('https://github.com/p-ranav/argparse.git', 'v2.1'), auto_lib='p-ranav/argparse'))
]), ]),
many_versions( many_versions(
'ctre', 'ctre',
'2.8.3', '2.8.3',
'2.8.4', '2.8.4',
), ),
git_url=
'https://github.com/hanickadot/compile-time-regular-expressions.git',
git_url='https://github.com/hanickadot/compile-time-regular-expressions.git',
tag_fmt='v{}', tag_fmt='v{}',
auto_lib='hanickadot/ctre', auto_lib='hanickadot/ctre',
description=
'A compile-time PCRE (almost) compatible regular expression matcher',
description='A compile-time PCRE (almost) compatible regular expression matcher',
), ),
Package( Package(
'spdlog', 'spdlog',
ver, ver,
description='Fast C++ logging library', description='Fast C++ logging library',
depends=['fmt+6.0.0'], depends=['fmt+6.0.0'],
remote=Git(
url='https://github.com/gabime/spdlog.git',
ref=f'v{ver}',
remote=ForeignInfo(
Git(url='https://github.com/gabime/spdlog.git', ref=f'v{ver}'),
transforms=[ transforms=[
FSTransform( FSTransform(
write=WriteTransform( write=WriteTransform(
}))), }))),
FSTransform( FSTransform(
write=WriteTransform( write=WriteTransform(
path='library.json',
content=json.dumps({
path='library.json', content=json.dumps({
'name': 'spdlog', 'name': 'spdlog',
'uses': ['fmt/fmt'] 'uses': ['fmt/fmt']
}))), }))),
Version( Version(
'2.12.4', '2.12.4',
description='A modern C++ unit testing library', description='A modern C++ unit testing library',
remote=Git(
'https://github.com/catchorg/Catch2.git',
'v2.12.4',
remote=ForeignInfo(
Git('https://github.com/catchorg/Catch2.git', 'v2.12.4'),
auto_lib='catch2/catch2', auto_lib='catch2/catch2',
transforms=[ transforms=[
FSTransform(
move=CopyMoveTransform(
frm='include', to='include/catch2')),
FSTransform(move=CopyMoveTransform(frm='include', to='include/catch2')),
FSTransform( FSTransform(
copy=CopyMoveTransform(frm='include', to='src'), copy=CopyMoveTransform(frm='include', to='src'),
write=WriteTransform( write=WriteTransform(
Version( Version(
ver, ver,
description='Asio asynchronous I/O C++ library', description='Asio asynchronous I/O C++ library',
remote=Git(
'https://github.com/chriskohlhoff/asio.git',
f'asio-{ver.replace(".", "-")}',
remote=ForeignInfo(
Git('https://github.com/chriskohlhoff/asio.git', f'asio-{ver.replace(".", "-")}'),
auto_lib='asio/asio', auto_lib='asio/asio',
transforms=[ transforms=[
FSTransform( FSTransform(
edit=EditTransform( edit=EditTransform(
path='include/asio/detail/config.hpp', path='include/asio/detail/config.hpp',
edits=[ edits=[
OneEdit(
line=13,
kind='insert',
content='#define ASIO_STANDALONE 1'),
OneEdit(
line=14,
kind='insert',
content=
'#define ASIO_SEPARATE_COMPILATION 1')
OneEdit(line=13, kind='insert', content='#define ASIO_STANDALONE 1'),
OneEdit(line=14, kind='insert', content='#define ASIO_SEPARATE_COMPILATION 1')
]), ]),
), ),
]), ]),
Version( Version(
ver, ver,
description='Abseil Common Libraries', description='Abseil Common Libraries',
remote=Git(
'https://github.com/abseil/abseil-cpp.git',
tag,
remote=ForeignInfo(
Git('https://github.com/abseil/abseil-cpp.git', tag),
auto_lib='abseil/abseil', auto_lib='abseil/abseil',
transforms=[ transforms=[
FSTransform( FSTransform(
Package('zlib', [ Package('zlib', [
Version( Version(
ver, ver,
description=
'A massively spiffy yet delicately unobtrusive compression library',
remote=Git(
'https://github.com/madler/zlib.git',
tag or f'v{ver}',
description='A massively spiffy yet delicately unobtrusive compression library',
remote=ForeignInfo(
Git('https://github.com/madler/zlib.git', tag or f'v{ver}'),
auto_lib='zlib/zlib', auto_lib='zlib/zlib',
transforms=[ transforms=[
FSTransform(
move=CopyMoveTransform(
frm='.',
to='src/',
include=[
'*.c',
'*.h',
],
)),
FSTransform(
move=CopyMoveTransform(
frm='src/',
to='include/',
include=['zlib.h', 'zconf.h'],
)),
FSTransform(move=CopyMoveTransform(
frm='.',
to='src/',
include=[
'*.c',
'*.h',
],
)),
FSTransform(move=CopyMoveTransform(
frm='src/',
to='include/',
include=['zlib.h', 'zconf.h'],
)),
]), ]),
) for ver, tag in [ ) for ver, tag in [
('1.2.11', None), ('1.2.11', None),
Package('sol2', [ Package('sol2', [
Version( Version(
ver, ver,
description=
'A C++ <-> Lua API wrapper with advanced features and top notch performance',
description='A C++ <-> Lua API wrapper with advanced features and top notch performance',
depends=['lua+0.0.0'], depends=['lua+0.0.0'],
remote=Git(
'https://github.com/ThePhD/sol2.git',
f'v{ver}',
remote=ForeignInfo(
Git('https://github.com/ThePhD/sol2.git', f'v{ver}'),
transforms=[ transforms=[
FSTransform( FSTransform(
write=WriteTransform( write=WriteTransform(
}, },
indent=2, indent=2,
)), )),
move=(None
if ver.startswith('3.') else CopyMoveTransform(
frm='sol',
to='src/sol',
)),
move=(None if ver.startswith('3.') else CopyMoveTransform(
frm='sol',
to='src/sol',
)),
), ),
FSTransform( FSTransform(
write=WriteTransform( write=WriteTransform(
ver, ver,
description= description=
'Lua is a powerful and fast programming language that is easy to learn and use and to embed into your application.', 'Lua is a powerful and fast programming language that is easy to learn and use and to embed into your application.',
remote=Git(
'https://github.com/lua/lua.git',
f'v{ver}',
remote=ForeignInfo(
Git('https://github.com/lua/lua.git', f'v{ver}'),
auto_lib='lua/lua', auto_lib='lua/lua',
transforms=[
FSTransform(
move=CopyMoveTransform(
frm='.',
to='src/',
include=['*.c', '*.h'],
))
]),
transforms=[FSTransform(move=CopyMoveTransform(
frm='.',
to='src/',
include=['*.c', '*.h'],
))]),
) for ver in [ ) for ver in [
'5.4.0', '5.4.0',
'5.3.5', '5.3.5',
Version( Version(
ver, ver,
description='Parsing Expression Grammar Template Library', description='Parsing Expression Grammar Template Library',
remote=Git(
'https://github.com/taocpp/PEGTL.git',
ver,
remote=ForeignInfo(
Git('https://github.com/taocpp/PEGTL.git', ver),
auto_lib='tao/pegtl', auto_lib='tao/pegtl',
transforms=[FSTransform(remove=RemoveTransform(path='src/'))], transforms=[FSTransform(remove=RemoveTransform(path='src/'))],
)) for ver in [ )) for ver in [
] ]
]), ]),
many_versions( many_versions(
'boost.pfr', ['1.0.0', '1.0.1'],
auto_lib='boost/pfr',
git_url='https://github.com/apolukhin/magic_get.git'),
'boost.pfr', ['1.0.0', '1.0.1'], auto_lib='boost/pfr', git_url='https://github.com/apolukhin/magic_get.git'),
many_versions( many_versions(
'boost.leaf', 'boost.leaf',
[ [
'for encryption, decryption, signatures, password hashing and more.', 'for encryption, decryption, signatures, password hashing and more.',
transforms=[ transforms=[
FSTransform( FSTransform(
move=CopyMoveTransform(
frm='src/libsodium/include', to='include/'),
move=CopyMoveTransform(frm='src/libsodium/include', to='include/'),
edit=EditTransform( edit=EditTransform(
path='include/sodium/export.h', path='include/sodium/export.h',
edits=[
OneEdit(
line=8,
kind='insert',
content='#define SODIUM_STATIC 1')
])),
edits=[OneEdit(line=8, kind='insert', content='#define SODIUM_STATIC 1')])),
FSTransform( FSTransform(
edit=EditTransform( edit=EditTransform(
path='include/sodium/private/common.h', path='include/sodium/private/common.h',
OneEdit( OneEdit(
kind='insert', kind='insert',
line=1, line=1,
content=Path(__file__).parent.joinpath(
'libsodium-config.h').read_text(),
content=Path(__file__).parent.joinpath('libsodium-config.h').read_text(),
) )
])), ])),
FSTransform( FSTransform(
), ),
remove=RemoveTransform(path='src/libsodium'), remove=RemoveTransform(path='src/libsodium'),
), ),
FSTransform(
copy=CopyMoveTransform(
frm='include', to='src/', strip_components=1)),
FSTransform(copy=CopyMoveTransform(frm='include', to='src/', strip_components=1)),
]), ]),
many_versions( many_versions(
'tomlpp', 'tomlpp',
tag_fmt='v{}', tag_fmt='v{}',
git_url='https://github.com/marzer/tomlplusplus.git', git_url='https://github.com/marzer/tomlplusplus.git',
auto_lib='tomlpp/tomlpp', auto_lib='tomlpp/tomlpp',
description=
'Header-only TOML config file parser and serializer for modern C++'),
description='Header-only TOML config file parser and serializer for modern C++'),
Package('inja', [ Package('inja', [
*(Version( *(Version(
ver, ver,
description='A Template Engine for Modern C++', description='A Template Engine for Modern C++',
remote=Git(
'https://github.com/pantor/inja.git',
f'v{ver}',
auto_lib='inja/inja')) for ver in ('1.0.0', '2.0.0', '2.0.1')),
remote=ForeignInfo(Git('https://github.com/pantor/inja.git', f'v{ver}'), auto_lib='inja/inja'))
for ver in ('1.0.0', '2.0.0', '2.0.1')),
*(Version( *(Version(
ver, ver,
description='A Template Engine for Modern C++', description='A Template Engine for Modern C++',
depends=['nlohmann-json+0.0.0'], depends=['nlohmann-json+0.0.0'],
remote=Git(
'https://github.com/pantor/inja.git',
f'v{ver}',
remote=ForeignInfo(
Git('https://github.com/pantor/inja.git', f'v{ver}'),
transforms=[ transforms=[
FSTransform( FSTransform(
write=WriteTransform( write=WriteTransform(
path='package.json', path='package.json',
content=json.dumps({ content=json.dumps({
'name':
'inja',
'namespace':
'inja',
'version':
ver,
'name': 'inja',
'namespace': 'inja',
'version': ver,
'depends': [ 'depends': [
'nlohmann-json+0.0.0', 'nlohmann-json+0.0.0',
] ]
}))), }))),
FSTransform( FSTransform(
write=WriteTransform( write=WriteTransform(
path='library.json',
content=json.dumps({
path='library.json', content=json.dumps({
'name': 'inja', 'name': 'inja',
'uses': ['nlohmann/json'] 'uses': ['nlohmann/json']
}))), }))),
], ],
auto_lib='inja/inja',
)) for ver in ('2.1.0', '2.2.0')), )) for ver in ('2.1.0', '2.2.0')),
]), ]),
many_versions( many_versions(
Version( Version(
'0.98.1', '0.98.1',
description='PCG Randum Number Generation, C++ Edition', description='PCG Randum Number Generation, C++ Edition',
remote=Git(
url='https://github.com/imneme/pcg-cpp.git',
ref='v0.98.1',
auto_lib='pcg/pcg-cpp'))
remote=ForeignInfo(Git(url='https://github.com/imneme/pcg-cpp.git', ref='v0.98.1'), auto_lib='pcg/pcg-cpp'))
]), ]),
many_versions( many_versions(
'hinnant-date', 'hinnant-date',
['2.4.1', '3.0.0'], ['2.4.1', '3.0.0'],
description=
'A date and time library based on the C++11/14/17 <chrono> header',
description='A date and time library based on the C++11/14/17 <chrono> header',
auto_lib='hinnant/date', auto_lib='hinnant/date',
git_url='https://github.com/HowardHinnant/date.git', git_url='https://github.com/HowardHinnant/date.git',
tag_fmt='v{}', tag_fmt='v{}',

+ 422
- 0
tools/mkrepo.py View File

"""
Script for populating a repository with packages declaratively.
"""

import argparse
import itertools
import json
import tarfile
import re
import shutil
import sys
import tempfile
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
from pathlib import Path
from subprocess import check_call
from threading import Lock
from urllib import request
from typing import (Any, Dict, Iterable, Iterator, NamedTuple, NoReturn, Optional, Sequence, Tuple, TypeVar, Type,
Union)

from semver import VersionInfo
from typing_extensions import Protocol

T = TypeVar('T')

I32_MAX = 0xffff_ffff - 1
MAX_VERSION = VersionInfo(I32_MAX, I32_MAX, I32_MAX)


class Dependency(NamedTuple):
name: str
low: VersionInfo
high: VersionInfo

@classmethod
def parse(cls: Type[T], depstr: str) -> T:
mat = re.match(r'(.+?)([\^~\+@])(.+?)$', depstr)
if not mat:
raise ValueError(f'Invalid dependency string "{depstr}"')
name, kind, version_str = mat.groups()
version = VersionInfo.parse(version_str)
high = {
'^': version.bump_major,
'~': version.bump_minor,
'@': version.bump_patch,
'+': lambda: MAX_VERSION,
}[kind]()
return cls(name, version, high)


def glob_if_exists(path: Path, pat: str) -> Iterable[Path]:
try:
yield from path.glob(pat)
except FileNotFoundError:
yield from ()


class MoveTransform(NamedTuple):
frm: str
to: str
strip_components: int = 0
include: Sequence[str] = []
exclude: Sequence[str] = []

@classmethod
def parse_data(cls: Type[T], data: Any) -> T:
return cls(
frm=data.pop('from'),
to=data.pop('to'),
include=data.pop('include', []),
strip_components=data.pop('strip-components', 0),
exclude=data.pop('exclude', []))

def apply_to(self, p: Path) -> None:
src = p / self.frm
dest = p / self.to
if src.is_file():
self.do_reloc_file(src, dest)
return

inc_pats = self.include or ['**/*']
include = set(itertools.chain.from_iterable(glob_if_exists(src, pat) for pat in inc_pats))
exclude = set(itertools.chain.from_iterable(glob_if_exists(src, pat) for pat in self.exclude))
to_reloc = sorted(include - exclude)
for source_file in to_reloc:
relpath = source_file.relative_to(src)
strip_relpath = Path('/'.join(relpath.parts[self.strip_components:]))
dest_file = dest / strip_relpath
self.do_reloc_file(source_file, dest_file)

def do_reloc_file(self, src: Path, dest: Path) -> None:
if src.is_dir():
dest.mkdir(exist_ok=True, parents=True)
else:
dest.parent.mkdir(exist_ok=True, parents=True)
src.rename(dest)


class CopyTransform(MoveTransform):
def do_reloc_file(self, src: Path, dest: Path) -> None:
if src.is_dir():
dest.mkdir(exist_ok=True, parents=True)
else:
shutil.copy2(src, dest)


class OneEdit(NamedTuple):
kind: str
line: int
content: Optional[str] = None

@classmethod
def parse_data(cls, data: Dict) -> 'OneEdit':
return OneEdit(data.pop('kind'), data.pop('line'), data.pop('content', None))

def apply_to(self, fpath: Path) -> None:
fn = {
'insert': self._insert,
# 'delete': self._delete,
}[self.kind]
fn(fpath)

def _insert(self, fpath: Path) -> None:
content = fpath.read_bytes()
lines = content.split(b'\n')
assert self.content
lines.insert(self.line, self.content.encode())
fpath.write_bytes(b'\n'.join(lines))


class EditTransform(NamedTuple):
path: str
edits: Sequence[OneEdit] = []

@classmethod
def parse_data(cls, data: Dict) -> 'EditTransform':
return EditTransform(data.pop('path'), [OneEdit.parse_data(ed) for ed in data.pop('edits')])

def apply_to(self, p: Path) -> None:
fpath = p / self.path
for ed in self.edits:
ed.apply_to(fpath)


class WriteTransform(NamedTuple):
path: str
content: str

@classmethod
def parse_data(self, data: Dict) -> 'WriteTransform':
return WriteTransform(data.pop('path'), data.pop('content'))

def apply_to(self, p: Path) -> None:
fpath = p / self.path
print('Writing to file', p, self.content)
fpath.write_text(self.content)


class RemoveTransform(NamedTuple):
path: Path
only_matching: Sequence[str] = ()

@classmethod
def parse_data(self, d: Any) -> 'RemoveTransform':
p = d.pop('path')
pat = d.pop('only-matching')
return RemoveTransform(Path(p), pat)

def apply_to(self, p: Path) -> None:
if p.is_dir():
self._apply_dir(p)
else:
p.unlink()

def _apply_dir(self, p: Path) -> None:
abspath = p / self.path
if not self.only_matching:
# Remove everything
if abspath.is_dir():
shutil.rmtree(abspath)
else:
abspath.unlink()
return

for pat in self.only_matching:
items = glob_if_exists(abspath, pat)
for f in items:
if f.is_dir():
shutil.rmtree(f)
else:
f.unlink()


class FSTransform(NamedTuple):
copy: Optional[CopyTransform] = None
move: Optional[MoveTransform] = None
remove: Optional[RemoveTransform] = None
write: Optional[WriteTransform] = None
edit: Optional[EditTransform] = None

def apply_to(self, p: Path) -> None:
for tr in (self.copy, self.move, self.remove, self.write, self.edit):
if tr:
tr.apply_to(p)

@classmethod
def parse_data(self, data: Any) -> 'FSTransform':
move = data.pop('move', None)
copy = data.pop('copy', None)
remove = data.pop('remove', None)
write = data.pop('write', None)
edit = data.pop('edit', None)
return FSTransform(
copy=None if copy is None else CopyTransform.parse_data(copy),
move=None if move is None else MoveTransform.parse_data(move),
remove=None if remove is None else RemoveTransform.parse_data(remove),
write=None if write is None else WriteTransform.parse_data(write),
edit=None if edit is None else EditTransform.parse_data(edit),
)


class HTTPRemoteSpec(NamedTuple):
url: str
transform: Sequence[FSTransform]

@classmethod
def parse_data(cls, data: Dict[str, Any]) -> 'HTTPRemoteSpec':
url = data.pop('url')
trs = [FSTransform.parse_data(tr) for tr in data.pop('transforms', [])]
return HTTPRemoteSpec(url, trs)

def make_local_dir(self):
return http_dl_unpack(self.url)


class GitSpec(NamedTuple):
url: str
ref: str
transform: Sequence[FSTransform]

@classmethod
def parse_data(cls, data: Dict[str, Any]) -> 'GitSpec':
ref = data.pop('ref')
url = data.pop('url')
trs = [FSTransform.parse_data(tr) for tr in data.pop('transform', [])]
return GitSpec(url=url, ref=ref, transform=trs)

@contextmanager
def make_local_dir(self) -> Iterator[Path]:
tdir = Path(tempfile.mkdtemp())
try:
check_call(['git', 'clone', '--quiet', self.url, f'--depth=1', f'--branch={self.ref}', str(tdir)])
yield tdir
finally:
shutil.rmtree(tdir)


class ForeignPackage(NamedTuple):
remote: Union[HTTPRemoteSpec, GitSpec]
transform: Sequence[FSTransform]
auto_lib: Optional[Tuple]

@classmethod
def parse_data(cls, data: Dict[str, Any]) -> 'ForeignPackage':
git = data.pop('git', None)
http = data.pop('http', None)
chosen = git or http
assert chosen, data
trs = data.pop('transform', [])
al = data.pop('auto-lib', None)
return ForeignPackage(
remote=GitSpec.parse_data(git) if git else HTTPRemoteSpec.parse_data(http),
transform=[FSTransform.parse_data(tr) for tr in trs],
auto_lib=al.split('/') if al else None,
)

@contextmanager
def make_local_dir(self, name: str, ver: VersionInfo) -> Iterator[Path]:
with self.remote.make_local_dir() as tdir:
for tr in self.transform:
tr.apply_to(tdir)
if self.auto_lib:
pkg_json = {
'name': name,
'version': str(ver),
'namespace': self.auto_lib[0],
}
lib_json = {'name': self.auto_lib[1]}
tdir.joinpath('package.jsonc').write_text(json.dumps(pkg_json))
tdir.joinpath('library.jsonc').write_text(json.dumps(lib_json))
yield tdir


class SpecPackage(NamedTuple):
name: str
version: VersionInfo
depends: Sequence[Dependency]
description: str
remote: ForeignPackage

@classmethod
def parse_data(cls, name: str, version: str, data: Any) -> 'SpecPackage':
deps = data.pop('depends', [])
desc = data.pop('description', '[No description]')
remote = ForeignPackage.parse_data(data.pop('remote'))
return SpecPackage(
name,
VersionInfo.parse(version),
description=desc,
depends=[Dependency.parse(d) for d in deps],
remote=remote)


def iter_spec(path: Path) -> Iterable[SpecPackage]:
data = json.loads(path.read_text())
pkgs = data['packages']
return iter_spec_packages(pkgs)


def iter_spec_packages(data: Dict[str, Any]) -> Iterable[SpecPackage]:
for name, versions in data.items():
for version, defin in versions.items():
yield SpecPackage.parse_data(name, version, defin)


@contextmanager
def http_dl_unpack(url: str) -> Iterator[Path]:
req = request.urlopen(url)
tdir = Path(tempfile.mkdtemp())
ofile = tdir / '.dl-archive'
try:
with ofile.open('wb') as fd:
fd.write(req.read())
tf = tarfile.open(ofile)
tf.extractall(tdir)
tf.close()
ofile.unlink()
subdir = next(iter(Path(tdir).iterdir()))
yield subdir
finally:
shutil.rmtree(tdir)


@contextmanager
def spec_as_local_tgz(spec: SpecPackage) -> Iterator[Path]:
with spec.remote.make_local_dir(spec.name, spec.version) as clone_dir:
out_tgz = clone_dir / 'sdist.tgz'
check_call(['dds', 'sdist', 'create', f'--project-dir={clone_dir}', f'--out={out_tgz}'])
yield out_tgz


class Repository:
def __init__(self, path: Path) -> None:
self._path = path
self._import_lock = Lock()

@property
def pkg_dir(self) -> Path:
return self._path / 'pkg'

@classmethod
def create(cls, dirpath: Path, name: str) -> 'Repository':
check_call(['dds', 'repoman', 'init', str(dirpath), f'--name={name}'])
return Repository(dirpath)

@classmethod
def open(cls, dirpath: Path) -> 'Repository':
return Repository(dirpath)

def import_tgz(self, path: Path) -> None:
check_call(['dds', 'repoman', 'import', str(self._path), str(path)])

def remove(self, name: str) -> None:
check_call(['dds', 'repoman', 'remove', str(self._path), name])

def spec_import(self, spec: Path) -> None:
all_specs = iter_spec(spec)
want_import = (s for s in all_specs if self._shoule_import(s))
pool = ThreadPoolExecutor(10)
futs = pool.map(self._get_and_import, want_import)
for res in futs:
pass

def _shoule_import(self, spec: SpecPackage) -> bool:
expect_file = self.pkg_dir / spec.name / str(spec.version) / 'sdist.tar.gz'
return not expect_file.is_file()

def _get_and_import(self, spec: SpecPackage) -> None:
print(f'Import: {spec.name}@{spec.version}')
with spec_as_local_tgz(spec) as tgz:
with self._import_lock:
self.import_tgz(tgz)


class Arguments(Protocol):
dir: Path
spec: Path


def main(argv: Sequence[str]) -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--dir', '-d', help='Path to a repository to manage', required=True, type=Path)
parser.add_argument(
'--spec',
metavar='<spec-path>',
type=Path,
required=True,
help='Provide a JSON document specifying how to obtain an import some packages')
args: Arguments = parser.parse_args(argv)
repo = Repository.open(args.dir)
repo.spec_import(args.spec)

return 0


def start() -> NoReturn:
sys.exit(main(sys.argv[1:]))


if __name__ == "__main__":
start()

Loading…
Cancel
Save