ソースを参照

Update neo-sqlite3, generalize package remotes via URLs

This is a large changeset that changes the way we store package
remote info. In these changes, package remotes are entirely
encoded in a single URL. This will help reduce complexity
down the road when multiple different remote types are supported.

The kind of a remote is specified by the URL's scheme, and the
URL parsing differs based on the scheme. For now, only git+http
and git+https are supported.

This comes along with a change to the format of the catalog JSON.
Remote information is now entirely encoded in a URL string.
default_compile_flags
vector-of-bool 4年前
コミット
c160953106
31個のファイルの変更3695行の追加4162行の削除
  1. +2403
    -3032
      catalog.json
  2. +845
    -772
      catalog.old.json
  3. +14
    -0
      docs/err/invalid-remote-url.rst
  4. +2
    -1
      library.jsonc
  5. +3
    -2
      package.jsonc
  6. +128
    -92
      src/dds/catalog/catalog.cpp
  7. +5
    -8
      src/dds/catalog/catalog.test.cpp
  8. +66
    -62
      src/dds/catalog/import.cpp
  9. +47
    -65
      src/dds/catalog/import.test.cpp
  10. +2
    -2
      src/dds/catalog/init_catalog.cpp
  11. +20
    -0
      src/dds/catalog/package_info.cpp
  12. +5
    -1
      src/dds/catalog/package_info.hpp
  13. +44
    -3
      src/dds/catalog/remote/git.cpp
  14. +2
    -0
      src/dds/catalog/remote/git.hpp
  15. +26
    -29
      src/dds/db/database.cpp
  16. +6
    -0
      src/dds/error/errors.cpp
  17. +1
    -0
      src/dds/error/errors.hpp
  18. +4
    -3
      src/dds/library/root.cpp
  19. +5
    -4
      src/dds/repo/repo.cpp
  20. +3
    -3
      src/dds/source/root.cpp
  21. +0
    -10
      src/dds/util/ranges.hpp
  22. +3
    -7
      tests/catalog/get_test.py
  23. +3
    -9
      tests/catalog/import_test.py
  24. +4
    -13
      tests/deps/build-deps/project/catalog.json
  25. +3
    -10
      tests/deps/git-remote/catalog.json
  26. +1
    -1
      tests/deps/no-deps/catalog.json
  27. +14
    -18
      tests/deps/use-cryptopp/project/catalog.json
  28. +2
    -5
      tests/deps/use-remote/catalog.json
  29. +2
    -6
      tests/deps/use-spdlog/project/catalog.json
  30. +1
    -1
      tools/ci.py
  31. +31
    -3
      tools/gen-catalog-json.py

+ 2403
- 3032
catalog.json
ファイル差分が大きすぎるため省略します
ファイルの表示


+ 845
- 772
catalog.old.json
ファイル差分が大きすぎるため省略します
ファイルの表示


+ 14
- 0
docs/err/invalid-remote-url.rst ファイルの表示

@@ -0,0 +1,14 @@
Error: Invalid Remote/Package URL
#################################

``dds`` encodes a lot of information about remotes repositories and remote
packages in URLs. If you received this error, it may be because:

1. The URL syntax is invalid. Make sure that you have spelled it correctly.
2. The URL scheme (the part at the beginning, before the ``://``) is unsupported
by ``dds``. ``dds`` only supports a subset of possible URL schemes in
different contexts. Check the output carefully and read the documentation
about the task you are trying to solve.
3. There are missing URL components that the task is expecting. For example,
``git`` remote URLs require that the URL have a URL fragment specifying the
tag/branch to clone. (The fragment is the final ``#`` component.)

+ 2
- 1
library.jsonc ファイルの表示

@@ -6,14 +6,15 @@
"microsoft/wil",
"range-v3/range-v3",
"nlohmann/json",
"neo/sqlite3",
"neo/fun",
"neo/sqlite3",
"vob/semver",
"vob/pubgrub",
"vob/json5",
"vob/semester",
"hanickadot/ctre",
// "neo/io",
"neo/url",
// Explicit zlib link is required due to linker input order bug.
// Can be removed after alpha.5
"zlib/zlib",

+ 3
- 2
package.jsonc ファイルの表示

@@ -8,9 +8,10 @@
"ms-wil@2020.3.16",
"range-v3@0.11.0",
"nlohmann-json@3.7.1",
"neo-sqlite3@0.2.3",
"neo-fun^0.3.2",
"neo-sqlite3@0.4.1",
"neo-fun~0.5.3",
"neo-compress^0.1.0",
"neo-url~0.1.2",
"semver@0.2.2",
"pubgrub@0.2.1",
"vob-json5@0.1.5",

+ 128
- 92
src/dds/catalog/catalog.cpp ファイルの表示

@@ -7,7 +7,6 @@
#include <dds/error/errors.hpp>
#include <dds/solve/solve.hpp>
#include <dds/util/log.hpp>
#include <dds/util/ranges.hpp>

#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
@@ -22,12 +21,12 @@

using namespace dds;

namespace sqlite3 = neo::sqlite3;
using namespace sqlite3::literals;
namespace nsql = neo::sqlite3;
using namespace neo::sqlite3::literals;

namespace {

void migrate_repodb_1(sqlite3::database& db) {
void migrate_repodb_1(nsql::database& db) {
db.exec(R"(
CREATE TABLE dds_cat_pkgs (
pkg_id INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -71,13 +70,70 @@ void migrate_repodb_1(sqlite3::database& db) {
)");
}

void migrate_repodb_2(sqlite3::database& db) {
void migrate_repodb_2(nsql::database& db) {
db.exec(R"(
ALTER TABLE dds_cat_pkgs
ADD COLUMN repo_transform TEXT NOT NULL DEFAULT '[]'
)");
}

void migrate_repodb_3(nsql::database& db) {
db.exec(R"(
CREATE TABLE dds_cat_remotes (
remote_id INTEGER PRIMARY KEY AUTOINCREMENT,
ident TEXT NOT NULL UNIQUE,
gen_ident TEXT NOT NULL,
remote_url TEXT NOT NULL
);

CREATE TABLE dds_cat_pkgs_new (
pkg_id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
version TEXT NOT NULL,
description TEXT NOT NULL,
remote_url TEXT NOT NULL,
remote_id INTEGER REFERENCES dds_cat_remotes DEFAULT NULL,
repo_transform TEXT NOT NULL DEFAULT '[]',
UNIQUE (name, version)
);

INSERT INTO dds_cat_pkgs_new(pkg_id,
name,
version,
description,
remote_url,
repo_transform)
SELECT pkg_id,
name,
version,
description,
'git+' || git_url || (
CASE
WHEN lm_name ISNULL THEN ''
ELSE ('?lm=' || lm_namespace || '/' || lm_name)
END
) || '#' || git_ref,
repo_transform
FROM dds_cat_pkgs;

CREATE TABLE dds_cat_pkg_deps_new (
dep_id INTEGER PRIMARY KEY AUTOINCREMENT,
pkg_id INTEGER NOT NULL REFERENCES dds_cat_pkgs_new(pkg_id),
dep_name TEXT NOT NULL,
low TEXT NOT NULL,
high TEXT NOT NULL,
UNIQUE(pkg_id, dep_name)
);

INSERT INTO dds_cat_pkg_deps_new SELECT * FROM dds_cat_pkg_deps;

DROP TABLE dds_cat_pkg_deps;
DROP TABLE dds_cat_pkgs;
ALTER TABLE dds_cat_pkgs_new RENAME TO dds_cat_pkgs;
ALTER TABLE dds_cat_pkg_deps_new RENAME TO dds_cat_pkg_deps;
)");
}

std::string transforms_to_json(const std::vector<fs_transformation>& trs) {
std::string acc = "[";
for (auto it = trs.begin(); it != trs.end(); ++it) {
@@ -104,16 +160,22 @@ void store_with_remote(neo::sqlite3::statement_cache& stmts,
const package_info& pkg,
const git_remote_listing& git) {
auto lm_usage = git.auto_lib.value_or(lm::usage{});
sqlite3::exec( //
stmts,
R"(

std::string url = git.url;
if (url.starts_with("https://") || url.starts_with("http://")) {
url = "git+" + url;
}
if (git.auto_lib.has_value()) {
url += "?lm=" + git.auto_lib->namespace_ + "/" + git.auto_lib->name;
}
url += "#" + git.ref;

nsql::exec( //
stmts(R"(
INSERT OR REPLACE INTO dds_cat_pkgs (
name,
version,
git_url,
git_ref,
lm_name,
lm_namespace,
remote_url,
description,
repo_transform
) VALUES (
@@ -121,21 +183,14 @@ void store_with_remote(neo::sqlite3::statement_cache& stmts,
?2,
?3,
?4,
CASE WHEN ?5 = '' THEN NULL ELSE ?5 END,
CASE WHEN ?6 = '' THEN NULL ELSE ?6 END,
?7,
?8
?5
)
)"_sql,
std::forward_as_tuple( //
pkg.ident.name,
pkg.ident.version.to_string(),
git.url,
git.ref,
lm_usage.name,
lm_usage.namespace_,
pkg.description,
transforms_to_json(git.transforms)));
)"_sql),
pkg.ident.name,
pkg.ident.version.to_string(),
url,
pkg.description,
transforms_to_json(git.transforms));
}

void do_store_pkg(neo::sqlite3::database& db,
@@ -162,23 +217,19 @@ void do_store_pkg(neo::sqlite3::database& db,
assert(dep.versions.num_intervals() == 1);
auto iv_1 = *dep.versions.iter_intervals().begin();
dds_log(trace, " Depends on: {}", dep.to_string());
sqlite3::exec(new_dep_st,
std::forward_as_tuple(db_pkg_id,
dep.name,
iv_1.low.to_string(),
iv_1.high.to_string()));
nsql::exec(new_dep_st, db_pkg_id, dep.name, iv_1.low.to_string(), iv_1.high.to_string());
}
}

void store_init_packages(sqlite3::database& db, sqlite3::statement_cache& st_cache) {
void store_init_packages(nsql::database& db, nsql::statement_cache& st_cache) {
dds_log(debug, "Restoring initial package data");
for (auto& pkg : init_catalog_packages()) {
do_store_pkg(db, st_cache, pkg);
}
}

void ensure_migrated(sqlite3::database& db) {
sqlite3::transaction_guard tr{db};
void ensure_migrated(nsql::database& db) {
nsql::transaction_guard tr{db};
db.exec(R"(
PRAGMA foreign_keys = 1;
CREATE TABLE IF NOT EXISTS dds_cat_meta AS
@@ -186,7 +237,7 @@ void ensure_migrated(sqlite3::database& db) {
SELECT * FROM init;
)");
auto meta_st = db.prepare("SELECT meta FROM dds_cat_meta");
auto [meta_json] = sqlite3::unpack_single<std::string>(meta_st);
auto [meta_json] = nsql::unpack_single<std::string>(meta_st);

auto meta = nlohmann::json::parse(meta_json);
if (!meta.is_object()) {
@@ -201,7 +252,7 @@ void ensure_migrated(sqlite3::database& db) {
"The catalog database metadata is invalid [bad dds_meta.version]");
}

constexpr int current_database_version = 2;
constexpr int current_database_version = 3;

int version = version_;

@@ -225,8 +276,12 @@ void ensure_migrated(sqlite3::database& db) {
dds_log(debug, "Applying catalog migration 2");
migrate_repodb_2(db);
}
meta["version"] = 2;
exec(db, "UPDATE dds_cat_meta SET meta=?", std::forward_as_tuple(meta.dump()));
if (version < 3) {
dds_log(debug, "Applying catalog migration 3");
migrate_repodb_3(db);
}
meta["version"] = current_database_version;
exec(db.prepare("UPDATE dds_cat_meta SET meta=?"), meta.dump());

if (import_init_packages) {
dds_log(
@@ -253,10 +308,10 @@ catalog catalog::open(const std::string& db_path) {
fs::create_directories(pardir);
}
dds_log(debug, "Opening package catalog [{}]", db_path);
auto db = sqlite3::database::open(db_path);
auto db = nsql::database::open(db_path);
try {
ensure_migrated(db);
} catch (const sqlite3::sqlite3_error& e) {
} catch (const nsql::sqlite3_error& e) {
dds_log(critical,
"Failed to load the repository database. It appears to be invalid/corrupted. The "
"exception message is: {}",
@@ -267,11 +322,11 @@ catalog catalog::open(const std::string& db_path) {
return catalog(std::move(db));
}

catalog::catalog(sqlite3::database db)
catalog::catalog(nsql::database db)
: _db(std::move(db)) {}

void catalog::store(const package_info& pkg) {
sqlite3::transaction_guard tr{_db};
nsql::transaction_guard tr{_db};
do_store_pkg(_db, _stmt_cache, pkg);
}

@@ -283,26 +338,20 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept
pkg_id,
name,
version,
git_url,
git_ref,
lm_name,
lm_namespace,
remote_url,
description,
repo_transform
FROM dds_cat_pkgs
WHERE name = ? AND version = ?
)"_sql);
st.reset();
st.bindings = std::forward_as_tuple(pk_id.name, ver_str);
auto opt_tup = sqlite3::unpack_single_opt<std::int64_t,
std::string,
std::string,
std::optional<std::string>,
std::optional<std::string>,
std::optional<std::string>,
std::optional<std::string>,
std::string,
std::string>(st);
st.bindings() = std::forward_as_tuple(pk_id.name, ver_str);
auto opt_tup = nsql::unpack_single_opt<std::int64_t,
std::string,
std::string,
std::string,
std::string,
std::string>(st);
if (!opt_tup) {
dym_target::fill([&] {
auto all_ids = this->all();
@@ -312,20 +361,9 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept
});
return std::nullopt;
}
const auto& [pkg_id,
name,
version,
git_url,
git_ref,
lm_name,
lm_namespace,
description,
repo_transform]
= *opt_tup;
const auto& [pkg_id, name, version, remote_url, description, repo_transform] = *opt_tup;
assert(pk_id.name == name);
assert(pk_id.version == semver::version::parse(version));
assert(git_url);
assert(git_ref);

auto deps = dependencies_of(pk_id);

@@ -333,12 +371,7 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept
pk_id,
std::move(deps),
std::move(description),
git_remote_listing{
*git_url,
*git_ref,
lm_name ? std::make_optional(lm::usage{*lm_namespace, *lm_name}) : std::nullopt,
{},
},
parse_remote_url(remote_url),
};

if (!repo_transform.empty()) {
@@ -373,33 +406,34 @@ auto pair_to_pkg_id = [](auto&& pair) {
};

std::vector<package_id> catalog::all() const noexcept {
return view_safe(sqlite3::exec_iter<std::string, std::string>( //
_stmt_cache,
"SELECT name, version FROM dds_cat_pkgs"_sql))
return nsql::exec_tuples<std::string, std::string>(
_stmt_cache("SELECT name, version FROM dds_cat_pkgs"_sql))
| neo::lref //
| ranges::views::transform(pair_to_pkg_id) //
| ranges::to_vector;
}

std::vector<package_id> catalog::by_name(std::string_view sv) const noexcept {
return view_safe(sqlite3::exec_iter<std::string, std::string>( //
_stmt_cache,
R"(
return nsql::exec_tuples<std::string, std::string>( //
_stmt_cache(
R"(
SELECT name, version
FROM dds_cat_pkgs
WHERE name = ?
)"_sql,
std::tie(sv))) //
)"_sql),
sv) //
| neo::lref //
| ranges::views::transform(pair_to_pkg_id) //
| ranges::to_vector;
}

std::vector<dependency> catalog::dependencies_of(const package_id& pkg) const noexcept {
dds_log(trace, "Lookup dependencies of {}@{}", pkg.name, pkg.version.to_string());
return view_safe(sqlite3::exec_iter<std::string,
std::string,
std::string>( //
_stmt_cache,
R"(
return nsql::exec_tuples<std::string,
std::string,
std::string>( //
_stmt_cache(
R"(
WITH this_pkg_id AS (
SELECT pkg_id
FROM dds_cat_pkgs
@@ -409,8 +443,10 @@ std::vector<dependency> catalog::dependencies_of(const package_id& pkg) const no
FROM dds_cat_pkg_deps
WHERE pkg_id IN this_pkg_id
ORDER BY dep_name
)"_sql,
std::forward_as_tuple(pkg.name, pkg.version.to_string()))) //
)"_sql),
pkg.name,
pkg.version.to_string()) //
| neo::lref //
| ranges::views::transform([](auto&& pair) {
auto& [name, low, high] = pair;
auto dep
@@ -425,14 +461,14 @@ void catalog::import_json_str(std::string_view content) {
dds_log(trace, "Importing JSON string into catalog");
auto pkgs = parse_packages_json(content);

sqlite3::transaction_guard tr{_db};
nsql::transaction_guard tr{_db};
for (const auto& pkg : pkgs) {
store(pkg);
do_store_pkg(_db, _stmt_cache, pkg);
}
}

void catalog::import_initial() {
sqlite3::transaction_guard tr{_db};
nsql::transaction_guard tr{_db};
dds_log(info, "Restoring built-in initial catalog contents");
store_init_packages(_db, _stmt_cache);
}

+ 5
- 8
src/dds/catalog/catalog.test.cpp ファイルの表示

@@ -30,7 +30,7 @@ TEST_CASE_METHOD(catalog_test_case, "Store a simple package") {
dds::package_id("foo", semver::version::parse("1.2.3")),
{},
"example",
dds::git_remote_listing{"http://example.com", "master", std::nullopt, {}},
dds::git_remote_listing{"git+http://example.com", "master", std::nullopt, {}},
});

auto pkgs = db.by_name("foo");
@@ -49,7 +49,7 @@ TEST_CASE_METHOD(catalog_test_case, "Store a simple package") {
dds::package_id("foo", semver::version::parse("1.2.3")),
{},
"example",
dds::git_remote_listing{"http://example.com", "develop", std::nullopt, {}},
dds::git_remote_listing{"git+http://example.com", "develop", std::nullopt, {}},
}));
// The previous pkg_id is still a valid lookup key
info = db.get(pkgs[0]);
@@ -65,7 +65,7 @@ TEST_CASE_METHOD(catalog_test_case, "Package requirements") {
{"baz", {semver::version::parse("5.3.0"), semver::version::parse("6.0.0")}},
},
"example",
dds::git_remote_listing{"http://example.com", "master", std::nullopt, {}},
dds::git_remote_listing{"git+http://example.com", "master", std::nullopt, {}},
});
auto pkgs = db.by_name("foo");
REQUIRE(pkgs.size() == 1);
@@ -78,17 +78,14 @@ TEST_CASE_METHOD(catalog_test_case, "Package requirements") {

TEST_CASE_METHOD(catalog_test_case, "Parse JSON repo") {
db.import_json_str(R"({
"version": 1,
"version": 2,
"packages": {
"foo": {
"1.2.3": {
"depends": [
"bar~4.2.1"
],
"git": {
"url": "http://example.com",
"ref": "master"
}
url: "git+http://example.com#master"
}
}
}

+ 66
- 62
src/dds/catalog/import.cpp ファイルの表示

@@ -6,6 +6,7 @@
#include <fmt/core.h>
#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <neo/url.hpp>
#include <semester/walk.hpp>

#include <optional>
@@ -45,59 +46,60 @@ template <typename... Args>
throw_user_error<dds::errc::invalid_catalog_json>(NEO_FWD(args)...);
}

git_remote_listing parse_git_remote(const json5::data& data) {
git_remote_listing git;
auto make_dep = [](std::string const& str) {
using namespace semester::walk_ops;
try {
return dependency::parse_depends_string(str);
} catch (std::runtime_error const& e) {
import_error(std::string(walk.path()) + e.what());
}
};

auto convert_version_str = [](std::string_view str) {
using namespace semester::walk_ops;
try {
return semver::version::parse(str);
} catch (const semver::invalid_version& e) {
import_error("{}: version string '{}' is invalid: {}", walk.path(), str, e.what());
}
};

walk(data,
require_obj{"Git remote should be an object"},
mapping{required_key{"url",
"A git 'url' string is required",
require_str("Git URL should be a string"),
put_into(git.url)},
required_key{"ref",
"A git 'ref' is required, and must be a tag or branch name",
require_str("Git ref should be a string"),
put_into(git.ref)},
if_key{"auto-lib",
require_str("'auto-lib' should be a string"),
put_into(git.auto_lib,
[](std::string const& str) {
try {
return lm::split_usage_string(str);
} catch (const std::runtime_error& e) {
import_error("{}: {}", walk.path(), e.what());
}
})},
if_key{"transform",
require_array{"Expect an array of transforms"},
for_each{put_into(std::back_inserter(git.transforms), [](auto&& dat) {
try {
return fs_transformation::from_json(dat);
} catch (const semester::walk_error& e) {
import_error(e.what());
}
})}}});

return git;
}
auto parse_remote = [](const std::string& str) {
using namespace semester::walk_ops;
try {
return parse_remote_url(str);
} catch (const neo::url_validation_error& e) {
import_error("{}: Invalid URL: {}", walk.path(), str);
} catch (const user_error<errc::invalid_remote_url>& e) {
import_error("{}: Invalid URL: {}", walk.path(), e.what());
}
};

auto parse_fs_transforms = [](auto&& tr_vec) {
using namespace semester::walk_ops;
return walk_seq{
require_array{"Expect an array of transforms"},
for_each{
put_into(std::back_inserter(tr_vec),
[&](auto&& dat) {
try {
return fs_transformation::from_json(dat);
} catch (const semester::walk_error& e) {
import_error(e.what());
}
}),
},
};
};

package_info
parse_pkg_json_v1(std::string_view name, semver::version version, const json5::data& data) {
parse_pkg_json_v2(std::string_view name, semver::version version, const json5::data& data) {
package_info ret;
ret.ident = package_id{std::string{name}, version};
std::vector<fs_transformation> fs_trs;

using namespace semester::walk_ops;

auto make_dep = [&](std::string const& str) {
try {
return dependency::parse_depends_string(str);
} catch (std::runtime_error const& e) {
import_error(std::string(walk.path()) + e.what());
}
};

auto check_one_remote = [&](auto&&) {
if (!semester::holds_alternative<std::monostate>(ret.remote)) {
return walk.reject("Cannot specify multiple remotes for a package");
@@ -114,10 +116,12 @@ parse_pkg_json_v1(std::string_view name, semver::version version, const json5::d
for_each{require_str{"Each dependency should be a string"},
put_into{std::back_inserter(ret.deps), make_dep}}},
if_key{
"git",
"url",
require_str{"Remote URL should be a string"},
check_one_remote,
put_into(ret.remote, parse_git_remote),
}});
put_into(ret.remote, parse_remote),
},
if_key{"transform", parse_fs_transforms(fs_trs)}});

if (semester::holds_alternative<std::monostate>(ret.remote)) {
import_error("{}: Package listing for {} does not have any remote information",
@@ -125,10 +129,19 @@ parse_pkg_json_v1(std::string_view name, semver::version version, const json5::d
ret.ident.to_string());
}

if (semester::holds_alternative<git_remote_listing>(ret.remote)) {
semester::get<git_remote_listing>(ret.remote).transforms = std::move(fs_trs);
} else {
if (!fs_trs.empty()) {
throw_user_error<errc::invalid_catalog_json>(
"{}: Filesystem transforms are not supported for this remote type", walk.path());
}
}

return ret;
}

std::vector<package_info> parse_json_v1(const json5::data& data) {
std::vector<package_info> parse_json_v2(const json5::data& data) {
std::vector<package_info> acc_pkgs;

std::string pkg_name;
@@ -138,19 +151,7 @@ std::vector<package_info> parse_json_v1(const json5::data& data) {
using namespace semester::walk_ops;

auto convert_pkg_obj
= [&](auto&& dat) { return parse_pkg_json_v1(pkg_name, pkg_version, dat); };

auto convert_version_str = [&](std::string_view str) {
try {
return semver::version::parse(str);
} catch (const semver::invalid_version& e) {
throw_user_error<errc::invalid_catalog_json>("{}: version string '{}' is invalid: {}",
walk.path(),
pkg_name,
str,
e.what());
}
};
= [&](auto&& dat) { return parse_pkg_json_v2(pkg_name, pkg_version, dat); };

auto import_pkg_versions
= walk_seq{require_obj{"Package entries must be JSON objects"},
@@ -196,8 +197,11 @@ std::vector<package_info> dds::parse_packages_json(std::string_view content) {

try {
if (version == 1.0) {
dds_log(trace, "Processing JSON data as v1 data");
return parse_json_v1(data);
throw_user_error<errc::invalid_catalog_json>(
"Support for catalog JSON v1 has been removed");
} else if (version == 2.0) {
dds_log(trace, "Processing JSON data as v2 data");
return parse_json_v2(data);
} else {
throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'",
version);

+ 47
- 65
src/dds/catalog/import.test.cpp ファイルの表示

@@ -6,7 +6,7 @@

TEST_CASE("An empty import is okay") {
// An empty JSON with no packages in it
auto pkgs = dds::parse_packages_json("{version: 1, packages: {}}");
auto pkgs = dds::parse_packages_json("{version: 2, packages: {}}");
CHECK(pkgs.empty());
}

@@ -19,45 +19,39 @@ TEST_CASE("Valid/invalid package JSON5") {
// Missing keys
"{}",
// Missing "packages"
"{version: 1}",
"{version: 2}",
// Bad version
"{version: 1.7, packages: {}}",
"{version: 2.7, packages: {}}",
"{version: [], packages: {}}",
"{version: null, packages: {}}",
// 'packages' should be an object
"{version: 1, packages: []}",
"{version: 1, packages: null}",
"{version: 1, packages: 4}",
"{version: 1, packages: 'lol'}",
"{version: 2, packages: []}",
"{version: 2, packages: null}",
"{version: 2, packages: 4}",
"{version: 2, packages: 'lol'}",
// Objects in 'packages' should be objects
"{version:1, packages:{foo:null}}",
"{version:1, packages:{foo:[]}}",
"{version:1, packages:{foo:9}}",
"{version:1, packages:{foo:'lol'}}",
"{version:2, packages:{foo:null}}",
"{version:2, packages:{foo:[]}}",
"{version:2, packages:{foo:9}}",
"{version:2, packages:{foo:'lol'}}",
// Objects in 'packages' shuold have version strings
"{version:1, packages:{foo:{'lol':{}}}}",
"{version:1, packages:{foo:{'1.2':{}}}}",
"{version:2, packages:{foo:{'lol':{}}}}",
"{version:2, packages:{foo:{'1.2':{}}}}",
// No remote
"{version:1, packages:{foo:{'1.2.3':{}}}}",
// Bad empty git
"{version:1, packages:{foo:{'1.2.3':{git:{}}}}}",
// Git `url` and `ref` should be a string
"{version:1, packages:{foo:{'1.2.3':{git:{url:2, ref:''}}}}}",
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:2}}}}}",
"{version:2, packages:{foo:{'1.2.3':{}}}}",
// Bad empty URL
"{version:2, packages:{foo:{'1.2.3':{url: ''}}}}",
// Git URL must have a fragment
"{version:2, packages:{foo:{'1.2.3':{url:'git+http://example.com'}}}}",
// 'auto-lib' should be a usage string
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':3}}}}}",
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'ffasdf'}}}}}",
"{version:2, packages:{foo:{'1.2.3':{url:'git+http://example.com?lm=lol#1.0}}}}",
// 'transform' should be an array
R"(
{
version: 1,
version: 2,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: 'lol hi',
}
url: 'git+http://example.com#master,
transform: 'lol hi'
}}}
}
)",
@@ -71,49 +65,41 @@ TEST_CASE("Valid/invalid package JSON5") {

std::string_view goods[] = {
// Basic empty:
"{version:1, packages:{}}",
"{version:2, packages:{}}",
// No versions for 'foo' is weird, but okay
"{version:1, packages:{foo:{}}}",
"{version:2, packages:{foo:{}}}",
// Basic package with minimum info:
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:''}}}}}",
"{version:2, packages:{foo:{'1.2.3':{url: 'git+http://example.com#master'}}}}",
// Minimal auto-lib:
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'a/b'}}}}}",
"{version:2, packages:{foo:{'1.2.3':{url: 'git+http://example.com?lm=a/b#master'}}}}",
// Empty transforms:
R"(
{
version: 1,
version: 2,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: [],
}
url: 'git+http://example.com#master',
transform: [],
}}}
}
)",
// Basic transform:
R"(
{
version: 1,
version: 2,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: [{
copy: {
from: 'here',
to: 'there',
include: [
"*.c",
"*.cpp",
"*.h",
'*.txt'
]
}
}],
}
url: 'git+http://example.com#master',
transform: [{
copy: {
from: 'here',
to: 'there',
include: [
"*.c",
"*.cpp",
"*.h",
'*.txt'
]
}
}],
}}}
}
)",
@@ -127,15 +113,11 @@ TEST_CASE("Valid/invalid package JSON5") {
TEST_CASE("Check a single object") {
// An empty JSON with no packages in it
auto pkgs = dds::parse_packages_json(R"({
version: 1,
version: 2,
packages: {
foo: {
'1.2.3': {
git: {
url: 'foo',
ref: 'fasdf',
'auto-lib': 'a/b',
}
url: 'git+http://example.com?lm=a/b#master',
}
}
}
@@ -146,8 +128,8 @@ TEST_CASE("Check a single object") {
CHECK(std::holds_alternative<dds::git_remote_listing>(pkgs[0].remote));

auto git = std::get<dds::git_remote_listing>(pkgs[0].remote);
CHECK(git.url == "foo");
CHECK(git.ref == "fasdf");
CHECK(git.url == "http://example.com");
CHECK(git.ref == "master");
REQUIRE(git.auto_lib);
CHECK(git.auto_lib->namespace_ == "a");
CHECK(git.auto_lib->name == "b");

+ 2
- 2
src/dds/catalog/init_catalog.cpp
ファイル差分が大きすぎるため省略します
ファイルの表示


+ 20
- 0
src/dds/catalog/package_info.cpp ファイルの表示

@@ -0,0 +1,20 @@
#include "./package_info.hpp"

#include <dds/error/errors.hpp>

#include <neo/url.hpp>

using namespace dds;

dds::remote_listing_var dds::parse_remote_url(std::string_view sv) {
auto url = neo::url::parse(sv);
if (url.scheme == "git+https" || url.scheme == "git+http" || url.scheme == "https+git"
|| url.scheme == "http+git" || url.scheme == "git") {
return git_remote_listing::from_url(sv);
} else {
throw_user_error<
errc::invalid_remote_url>("Unknown scheme '{}' for remote package URL '{}'",
url.scheme,
sv);
}
}

+ 5
- 1
src/dds/catalog/package_info.hpp ファイルの表示

@@ -14,12 +14,16 @@

namespace dds {

using remote_listing_var = std::variant<std::monostate, git_remote_listing>;

remote_listing_var parse_remote_url(std::string_view url);

struct package_info {
package_id ident;
std::vector<dependency> deps;
std::string description;

std::variant<std::monostate, git_remote_listing> remote;
remote_listing_var remote;
};

} // namespace dds

+ 44
- 3
src/dds/catalog/remote/git.cpp ファイルの表示

@@ -4,9 +4,13 @@
#include <dds/proc.hpp>
#include <dds/util/log.hpp>

#include <neo/url.hpp>
#include <neo/url/query.hpp>
#include <nlohmann/json.hpp>

void dds::git_remote_listing::pull_to(const dds::package_id& pid, dds::path_ref dest) const {
using namespace dds;

void git_remote_listing::pull_to(const package_id& pid, path_ref dest) const {
fs::remove_all(dest);
using namespace std::literals;
dds_log(info, "Clone Git repository [{}] (at {}) to [{}]", url, ref, dest.string());
@@ -27,16 +31,53 @@ void dds::git_remote_listing::pull_to(const dds::package_id& pid, dds::path_ref
if (auto_lib.has_value()) {
dds_log(info, "Generating library data automatically");

auto pkg_strm = dds::open(dest / "package.json5", std::ios::binary | std::ios::out);
auto pkg_strm = open(dest / "package.json5", std::ios::binary | std::ios::out);
auto man_json = nlohmann::json::object();
man_json["name"] = pid.name;
man_json["version"] = pid.version.to_string();
man_json["namespace"] = auto_lib->namespace_;
pkg_strm << nlohmann::to_string(man_json);

auto lib_strm = dds::open(dest / "library.json5", std::ios::binary | std::ios::out);
auto lib_strm = open(dest / "library.json5", std::ios::binary | std::ios::out);
auto lib_json = nlohmann::json::object();
lib_json["name"] = auto_lib->name;
lib_strm << nlohmann::to_string(lib_json);
}
}

git_remote_listing git_remote_listing::from_url(std::string_view sv) {
auto url = neo::url::parse(sv);
dds_log(trace, "Create Git remote listing from URL '{}'", sv);

auto ref = url.fragment;
url.fragment = {};
auto q = url.query;
url.query = {};

std::optional<lm::usage> auto_lib;

if (url.scheme.starts_with("git+")) {
url.scheme = url.scheme.substr(4);
} else if (url.scheme.ends_with("+git")) {
url.scheme = url.scheme.substr(0, url.scheme.size() - 4);
} else {
// Leave the URL as-is
}

if (q) {
neo::basic_query_string_view qsv{*q};
for (auto qstr : qsv) {
if (qstr.key_raw() != "lm") {
dds_log(warn, "Unknown query string parameter in package url: '{}'", qstr.string());
} else {
auto_lib = lm::split_usage_string(qstr.value_decoded());
}
}
}

if (!ref) {
throw_user_error<errc::invalid_remote_url>(
"Git URL requires a fragment specifying the Git ref to clone");
}
return {.url = url.to_string(), .ref = *ref, .auto_lib = auto_lib, .transforms = {}};
}

+ 2
- 0
src/dds/catalog/remote/git.hpp ファイルの表示

@@ -19,6 +19,8 @@ struct git_remote_listing {
std::vector<fs_transformation> transforms;

void pull_to(const package_id& pid, path_ref path) const;

static git_remote_listing from_url(std::string_view sv);
};

} // namespace dds

+ 26
- 29
src/dds/db/database.cpp ファイルの表示

@@ -14,13 +14,13 @@

using namespace dds;

namespace sqlite3 = neo::sqlite3;
using sqlite3::exec;
using namespace sqlite3::literals;
namespace nsql = neo::sqlite3;
using nsql::exec;
using namespace nsql::literals;

namespace {

void migrate_1(sqlite3::database& db) {
void migrate_1(nsql::database& db) {
db.exec(R"(
CREATE TABLE dds_files (
file_id INTEGER PRIMARY KEY,
@@ -51,8 +51,8 @@ void migrate_1(sqlite3::database& db) {
)");
}

void ensure_migrated(sqlite3::database& db) {
sqlite3::transaction_guard tr{db};
void ensure_migrated(nsql::database& db) {
nsql::transaction_guard tr{db};
db.exec(R"(
PRAGMA foreign_keys = 1;
CREATE TABLE IF NOT EXISTS dds_meta AS
@@ -60,7 +60,7 @@ void ensure_migrated(sqlite3::database& db) {
SELECT * FROM init;
)");
auto meta_st = db.prepare("SELECT meta FROM dds_meta");
auto [meta_json] = sqlite3::unpack_single<std::string>(meta_st);
auto [meta_json] = nsql::unpack_single<std::string>(meta_st);

auto meta = nlohmann::json::parse(meta_json);
if (!meta.is_object()) {
@@ -77,26 +77,26 @@ void ensure_migrated(sqlite3::database& db) {
migrate_1(db);
}
meta["version"] = 1;
exec(db, "UPDATE dds_meta SET meta=?", std::forward_as_tuple(meta.dump()));
exec(db.prepare("UPDATE dds_meta SET meta=?"), meta.dump());
}

} // namespace

database database::open(const std::string& db_path) {
auto db = sqlite3::database::open(db_path);
auto db = nsql::database::open(db_path);
try {
ensure_migrated(db);
} catch (const sqlite3::sqlite3_error& e) {
} catch (const nsql::sqlite3_error& e) {
dds_log(
error,
"Failed to load the databsae. It appears to be invalid/corrupted. We'll delete it and "
"create a new one. The exception message is: {}",
e.what());
fs::remove(db_path);
db = sqlite3::database::open(db_path);
db = nsql::database::open(db_path);
try {
ensure_migrated(db);
} catch (const sqlite3::sqlite3_error& e) {
} catch (const nsql::sqlite3_error& e) {
dds_log(critical,
"Failed to apply database migrations to recovery database. This is a critical "
"error. The exception message is: {}",
@@ -107,25 +107,25 @@ database database::open(const std::string& db_path) {
return database(std::move(db));
}

database::database(sqlite3::database db)
database::database(nsql::database db)
: _db(std::move(db)) {}

std::int64_t database::_record_file(path_ref path_) {
auto path = fs::weakly_canonical(path_);
sqlite3::exec(_stmt_cache(R"(
nsql::exec(_stmt_cache(R"(
INSERT OR IGNORE INTO dds_files (path)
VALUES (?)
)"_sql),
std::forward_as_tuple(path.generic_string()));
path.generic_string());
auto& st = _stmt_cache(R"(
SELECT file_id
FROM dds_files
WHERE path = ?1
)"_sql);
st.reset();
auto str = path.generic_string();
st.bindings[1] = str;
auto [rowid] = sqlite3::unpack_single<std::int64_t>(st);
auto str = path.generic_string();
st.bindings()[1] = str;
auto [rowid] = nsql::unpack_single<std::int64_t>(st);
return rowid;
}

@@ -136,7 +136,7 @@ void database::record_dep(path_ref input, path_ref output, fs::file_time_type in
INSERT OR REPLACE INTO dds_deps (input_file_id, output_file_id, input_mtime)
VALUES (?, ?, ?)
)"_sql);
sqlite3::exec(st, std::forward_as_tuple(in_id, out_id, input_mtime.time_since_epoch().count()));
nsql::exec(st, in_id, out_id, input_mtime.time_since_epoch().count());
}

void database::store_file_command(path_ref file, const command_info& cmd) {
@@ -147,10 +147,7 @@ void database::store_file_command(path_ref file, const command_info& cmd) {
INTO dds_file_commands(file_id, command, output)
VALUES (?1, ?2, ?3)
)"_sql);
sqlite3::exec(st,
std::forward_as_tuple(file_id,
std::string_view(cmd.command),
std::string_view(cmd.output)));
nsql::exec(st, file_id, std::string_view(cmd.command), std::string_view(cmd.output));
}

void database::forget_inputs_of(path_ref file) {
@@ -163,7 +160,7 @@ void database::forget_inputs_of(path_ref file) {
DELETE FROM dds_deps
WHERE output_file_id IN id_to_delete
)"_sql);
sqlite3::exec(st, std::forward_as_tuple(fs::weakly_canonical(file).generic_string()));
nsql::exec(st, fs::weakly_canonical(file).generic_string());
}

std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_) const {
@@ -180,11 +177,11 @@ std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_)
WHERE output_file_id IN file
)"_sql);
st.reset();
st.bindings[1] = file.generic_string();
auto tup_iter = sqlite3::iter_tuples<std::string, std::int64_t>(st);
st.bindings()[1] = file.generic_string();
auto tup_iter = nsql::iter_tuples<std::string, std::int64_t>(st);

std::vector<input_file_info> ret;
for (auto& [path, mtime] : tup_iter) {
for (auto [path, mtime] : tup_iter) {
ret.emplace_back(
input_file_info{path, fs::file_time_type(fs::file_time_type::duration(mtime))});
}
@@ -208,8 +205,8 @@ std::optional<command_info> database::command_of(path_ref file_) const {
WHERE file_id IN file
)"_sql);
st.reset();
st.bindings[1] = file.generic_string();
auto opt_res = sqlite3::unpack_single_opt<std::string, std::string>(st);
st.bindings()[1] = file.generic_string();
auto opt_res = nsql::unpack_single_opt<std::string, std::string>(st);
if (!opt_res) {
return std::nullopt;
}

+ 6
- 0
src/dds/error/errors.cpp ファイルの表示

@@ -37,6 +37,8 @@ std::string error_url_suffix(dds::errc ec) noexcept {
return "no-catalog-remote-info.html";
case errc::git_clone_failure:
return "git-clone-failure.html";
case errc::invalid_remote_url:
return "invalid-remote-url.html";
case errc::invalid_repo_transform:
return "invalid-repo-transform.html";
case errc::sdist_ident_mismatch:
@@ -172,6 +174,8 @@ dds tried to clone a repository using Git, but the clone operation failed.
There are a variety of possible causes. It is best to check the output from
Git in diagnosing this failure.
)";
case errc::invalid_remote_url:
return R"(The given package/remote URL is invalid)";
case errc::invalid_repo_transform:
return R"(
A 'transform' property in a catalog entry contains an invalid transformation.
@@ -284,6 +288,8 @@ std::string_view dds::default_error_string(dds::errc ec) noexcept {
"packages";
case errc::git_clone_failure:
return "A git-clone operation failed.";
case errc::invalid_remote_url:
return "The given package/remote URL is not valid";
case errc::invalid_repo_transform:
return "A repository filesystem transformation is invalid";
case errc::sdist_ident_mismatch:

+ 1
- 0
src/dds/error/errors.hpp ファイルの表示

@@ -24,6 +24,7 @@ enum class errc {
no_catalog_remote_info,

git_clone_failure,
invalid_remote_url,
invalid_repo_transform,
sdist_ident_mismatch,
sdist_exists,

+ 4
- 3
src/dds/library/root.cpp ファイルの表示

@@ -5,8 +5,8 @@
#include <dds/source/root.hpp>
#include <dds/util/algo.hpp>
#include <dds/util/log.hpp>
#include <dds/util/ranges.hpp>

#include <neo/ref.hpp>
#include <range/v3/view/filter.hpp>
#include <range/v3/view/transform.hpp>

@@ -105,8 +105,9 @@ std::vector<library_root> dds::collect_libraries(path_ref root) {

if (fs::is_directory(pf_libs_dir)) {
extend(ret,
view_safe(fs::directory_iterator(pf_libs_dir)) //
| ranges::views::filter(has_library_dirs) //
fs::directory_iterator(pf_libs_dir) //
| neo::lref //
| ranges::views::filter(has_library_dirs) //
| ranges::views::transform(
[&](auto p) { return library_root::from_directory(fs::canonical(p)); }));
}

+ 5
- 4
src/dds/repo/repo.cpp ファイルの表示

@@ -6,9 +6,9 @@
#include <dds/source/dist.hpp>
#include <dds/util/log.hpp>
#include <dds/util/paths.hpp>
#include <dds/util/ranges.hpp>
#include <dds/util/string.hpp>

#include <neo/ref.hpp>
#include <range/v3/action/sort.hpp>
#include <range/v3/action/unique.hpp>
#include <range/v3/range/conversion.hpp>
@@ -47,10 +47,11 @@ repository repository::_open_for_directory(bool writeable, path_ref dirpath) {

auto entries =
// Get the top-level `name-version` dirs
view_safe(fs::directory_iterator(dirpath)) //
// // Convert each dir into an `sdist` object
fs::directory_iterator(dirpath) //
| neo::lref //
// Convert each dir into an `sdist` object
| ranges::views::transform(try_read_sdist) //
// // Drop items that failed to load
// Drop items that failed to load
| ranges::views::filter([](auto&& opt) { return opt.has_value(); }) //
| ranges::views::transform([](auto&& opt) { return *opt; }) //
| to<sdist_set>();

+ 3
- 3
src/dds/source/root.cpp ファイルの表示

@@ -1,7 +1,6 @@
#include "./root.hpp"

#include <dds/util/ranges.hpp>

#include <neo/ref.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/filter.hpp>
#include <range/v3/view/transform.hpp>
@@ -12,7 +11,8 @@ std::vector<source_file> source_root::collect_sources() const {
using namespace ranges::views;
// Collect all source files from the directory
return //
view_safe(fs::recursive_directory_iterator(path)) //
fs::recursive_directory_iterator(path) //
| neo::lref //
| filter([](auto&& entry) { return entry.is_regular_file(); }) //
| transform([&](auto&& entry) { return source_file::from_path(entry, path); }) //
// source_file::from_path returns an optional. Drop nulls

+ 0
- 10
src/dds/util/ranges.hpp ファイルの表示

@@ -1,10 +0,0 @@
#pragma once

namespace dds {

template <typename T>
constexpr auto& view_safe(T&& t) {
return t;
}

} // namespace dds

+ 3
- 7
tests/catalog/get_test.py ファイルの表示

@@ -12,16 +12,12 @@ def test_get(dds: DDS):

json_path = dds.build_dir / 'catalog.json'
import_data = {
'version': 1,
'version': 2,
'packages': {
'neo-sqlite3': {
'0.3.0': {
'git': {
'url':
'https://github.com/vector-of-bool/neo-sqlite3.git',
'ref':
'0.3.0',
},
'url':
'git+https://github.com/vector-of-bool/neo-sqlite3.git#0.3.0',
},
},
},

+ 3
- 9
tests/catalog/import_test.py ファイルの表示

@@ -10,21 +10,15 @@ def test_import_json(dds: DDS):

json_fpath = dds.build_dir / 'data.json'
import_data = {
'version': 1,
'version': 2,
'packages': {
'foo': {
'1.2.4': {
'git': {
'url': 'http://example.com',
'ref': 'master',
},
'url': 'git+http://example.com#master',
'depends': [],
},
'1.2.5': {
'git': {
'url': 'http://example.com',
'ref': 'master',
},
'url': 'git+http://example.com#master',
},
},
},

+ 4
- 13
tests/deps/build-deps/project/catalog.json ファイルの表示

@@ -1,24 +1,15 @@
{
"version": 1,
"version": 2,
"packages": {
"neo-sqlite3": {
"0.1.0": {
"git": {
"url": "https://github.com/vector-of-bool/neo-sqlite3.git",
"ref": "0.1.0"
},
"url": "git+https://github.com/vector-of-bool/neo-sqlite3.git#0.1.0"
},
"0.2.2": {
"git": {
"url": "https://github.com/vector-of-bool/neo-sqlite3.git",
"ref": "0.2.2"
},
"url": "git+https://github.com/vector-of-bool/neo-sqlite3.git#0.2.2"
},
"0.3.0": {
"git": {
"url": "https://github.com/vector-of-bool/neo-sqlite3.git",
"ref": "0.3.0"
},
"url": "git+https://github.com/vector-of-bool/neo-sqlite3.git#0.3.0"
}
}
}

+ 3
- 10
tests/deps/git-remote/catalog.json ファイルの表示

@@ -1,21 +1,14 @@
{
"version": 1,
"version": 2,
"packages": {
"neo-fun": {
"0.3.2": {
"git": {
"url": "https://github.com/vector-of-bool/neo-fun.git",
"ref": "0.3.2"
}
"url": "git+https://github.com/vector-of-bool/neo-fun.git#0.3.2"
}
},
"range-v3": {
"0.9.1": {
"git": {
"url": "https://github.com/ericniebler/range-v3.git",
"ref": "0.9.1",
"auto-lib": "Niebler/range-v3"
}
"url": "git+https://github.com/ericniebler/range-v3.git?lm=Niebler/range-v3#0.9.1"
}
}
}

+ 1
- 1
tests/deps/no-deps/catalog.json ファイルの表示

@@ -1,4 +1,4 @@
{
"version": 1,
"version": 2,
"packages": {}
}

+ 14
- 18
tests/deps/use-cryptopp/project/catalog.json ファイルの表示

@@ -1,26 +1,22 @@
{
"version": 1,
"version": 2,
"packages": {
"cryptopp": {
"8.2.0": {
"git": {
"url": "https://github.com/weidai11/cryptopp.git",
"ref": "CRYPTOPP_8_2_0",
"auto-lib": "cryptopp/cryptopp",
"transform": [
{
"move": {
"from": ".",
"to": "src/cryptopp",
"include": [
"*.c",
"*.cpp",
"*.h"
]
}
"url": "git+https://github.com/weidai11/cryptopp.git?lm=cryptopp/cryptopp#CRYPTOPP_8_2_0",
"transform": [
{
"move": {
"from": ".",
"to": "src/cryptopp",
"include": [
"*.c",
"*.cpp",
"*.h"
]
}
]
}
}
]
}
}
}

+ 2
- 5
tests/deps/use-remote/catalog.json ファイルの表示

@@ -1,12 +1,9 @@
{
"version": 1,
"version": 2,
"packages": {
"nlohmann-json": {
"3.7.1": {
"git": {
"url": "https://github.com/vector-of-bool/json.git",
"ref": "dds/3.7.1"
},
"url": "git+https://github.com/vector-of-bool/json.git#dds/3.7.1",
"depends": []
}
}

+ 2
- 6
tests/deps/use-spdlog/project/catalog.json ファイルの表示

@@ -1,13 +1,9 @@
{
"version": 1,
"version": 2,
"packages": {
"spdlog": {
"1.4.2": {
"git": {
"url": "https://github.com/gabime/spdlog.git",
"ref": "v1.4.2",
"auto-lib": "spdlog/spdlog"
},
"url": "git+https://github.com/gabime/spdlog.git?lm=spdlog/spdlog#v1.4.2",
"depends": []
}
}

+ 1
- 1
tools/ci.py ファイルの表示

@@ -106,7 +106,7 @@ def main(argv: Sequence[str]) -> int:
paths.PREBUILT_DDS,
toolchain=opts.toolchain,
cat_path=old_cat_path,
cat_json_path=Path('catalog.json'),
cat_json_path=Path('catalog.old.json'),
dds_flags=[('--repo-dir', ci_repo_dir)])
print('Main build PASSED!')
print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}')

+ 31
- 3
tools/gen-catalog-json.py ファイルの表示

@@ -117,6 +117,13 @@ class Git(NamedTuple):
d['auto-lib'] = self.auto_lib
return d

def to_dict_2(self) -> str:
url = f'git+{self.url}'
if self.auto_lib:
url += f'?lm={self.auto_lib}'
url += f'#{self.ref}'
return url


RemoteInfo = Union[Git]

@@ -136,6 +143,15 @@ class Version(NamedTuple):
ret['git'] = self.remote.to_dict()
return ret

def to_dict_2(self) -> dict:
ret: dict = {
'description': self.description,
'depends': list(self.depends),
'transform': [f.to_dict() for f in self.remote.transforms],
}
ret['url'] = self.remote.to_dict_2()
return ret


class VersionSet(NamedTuple):
version: str
@@ -274,12 +290,14 @@ def many_versions(name: str,
# yapf: disable
PACKAGES = [
github_package('neo-buffer', 'vector-of-bool/neo-buffer',
['0.2.1', '0.3.0', '0.4.0', '0.4.1']),
['0.2.1', '0.3.0', '0.4.0', '0.4.1', '0.4.2']),
github_package('neo-compress', 'vector-of-bool/neo-compress', ['0.1.0']),
github_package('neo-url', 'vector-of-bool/neo-url', ['0.1.0', '0.1.1', '0.1.2']),
github_package('neo-sqlite3', 'vector-of-bool/neo-sqlite3',
['0.2.3', '0.3.0']),
['0.2.3', '0.3.0', '0.4.0', '0.4.1']),
github_package('neo-fun', 'vector-of-bool/neo-fun', [
'0.1.1', '0.2.0', '0.2.1', '0.3.0', '0.3.1', '0.3.2', '0.4.0', '0.4.1'
'0.1.1', '0.2.0', '0.2.1', '0.3.0', '0.3.1', '0.3.2', '0.4.0', '0.4.1',
'0.4.2', '0.5.0', '0.5.1', '0.5.2', '0.5.3',
]),
github_package('neo-concepts', 'vector-of-bool/neo-concepts', (
'0.2.2',
@@ -934,6 +952,14 @@ if __name__ == "__main__":
args = parser.parse_args()

data = {
'version': 2,
'packages': {
pkg.name: {ver.version: ver.to_dict_2()
for ver in pkg.versions}
for pkg in PACKAGES
}
}
old_data = {
'version': 1,
'packages': {
pkg.name: {ver.version: ver.to_dict()
@@ -943,6 +969,8 @@ if __name__ == "__main__":
}
json_str = json.dumps(data, indent=2, sort_keys=True)
Path('catalog.json').write_text(json_str)
Path('catalog.old.json').write_text(
json.dumps(old_data, indent=2, sort_keys=True))

cpp_template = textwrap.dedent(r'''
#include <dds/catalog/package_info.hpp>

読み込み中…
キャンセル
保存