This is a large changeset that changes the way we store package remote info. In these changes, package remotes are entirely encoded in a single URL. This will help reduce complexity down the road when multiple different remote types are supported. The kind of a remote is specified by the URL's scheme, and the URL parsing differs based on the scheme. For now, only git+http and git+https are supported. This comes along with a change to the format of the catalog JSON. Remote information is now entirely encoded in a URL string.default_compile_flags
Error: Invalid Remote/Package URL | |||||
################################# | |||||
``dds`` encodes a lot of information about remotes repositories and remote | |||||
packages in URLs. If you received this error, it may be because: | |||||
1. The URL syntax is invalid. Make sure that you have spelled it correctly. | |||||
2. The URL scheme (the part at the beginning, before the ``://``) is unsupported | |||||
by ``dds``. ``dds`` only supports a subset of possible URL schemes in | |||||
different contexts. Check the output carefully and read the documentation | |||||
about the task you are trying to solve. | |||||
3. There are missing URL components that the task is expecting. For example, | |||||
``git`` remote URLs require that the URL have a URL fragment specifying the | |||||
tag/branch to clone. (The fragment is the final ``#`` component.) |
"microsoft/wil", | "microsoft/wil", | ||||
"range-v3/range-v3", | "range-v3/range-v3", | ||||
"nlohmann/json", | "nlohmann/json", | ||||
"neo/sqlite3", | |||||
"neo/fun", | "neo/fun", | ||||
"neo/sqlite3", | |||||
"vob/semver", | "vob/semver", | ||||
"vob/pubgrub", | "vob/pubgrub", | ||||
"vob/json5", | "vob/json5", | ||||
"vob/semester", | "vob/semester", | ||||
"hanickadot/ctre", | "hanickadot/ctre", | ||||
// "neo/io", | // "neo/io", | ||||
"neo/url", | |||||
// Explicit zlib link is required due to linker input order bug. | // Explicit zlib link is required due to linker input order bug. | ||||
// Can be removed after alpha.5 | // Can be removed after alpha.5 | ||||
"zlib/zlib", | "zlib/zlib", |
"ms-wil@2020.3.16", | "ms-wil@2020.3.16", | ||||
"range-v3@0.11.0", | "range-v3@0.11.0", | ||||
"nlohmann-json@3.7.1", | "nlohmann-json@3.7.1", | ||||
"neo-sqlite3@0.2.3", | |||||
"neo-fun^0.3.2", | |||||
"neo-sqlite3@0.4.1", | |||||
"neo-fun~0.5.3", | |||||
"neo-compress^0.1.0", | "neo-compress^0.1.0", | ||||
"neo-url~0.1.2", | |||||
"semver@0.2.2", | "semver@0.2.2", | ||||
"pubgrub@0.2.1", | "pubgrub@0.2.1", | ||||
"vob-json5@0.1.5", | "vob-json5@0.1.5", |
#include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
#include <dds/solve/solve.hpp> | #include <dds/solve/solve.hpp> | ||||
#include <dds/util/log.hpp> | #include <dds/util/log.hpp> | ||||
#include <dds/util/ranges.hpp> | |||||
#include <json5/parse_data.hpp> | #include <json5/parse_data.hpp> | ||||
#include <neo/assert.hpp> | #include <neo/assert.hpp> | ||||
using namespace dds; | using namespace dds; | ||||
namespace sqlite3 = neo::sqlite3; | |||||
using namespace sqlite3::literals; | |||||
namespace nsql = neo::sqlite3; | |||||
using namespace neo::sqlite3::literals; | |||||
namespace { | namespace { | ||||
void migrate_repodb_1(sqlite3::database& db) { | |||||
void migrate_repodb_1(nsql::database& db) { | |||||
db.exec(R"( | db.exec(R"( | ||||
CREATE TABLE dds_cat_pkgs ( | CREATE TABLE dds_cat_pkgs ( | ||||
pkg_id INTEGER PRIMARY KEY AUTOINCREMENT, | pkg_id INTEGER PRIMARY KEY AUTOINCREMENT, | ||||
)"); | )"); | ||||
} | } | ||||
void migrate_repodb_2(sqlite3::database& db) { | |||||
void migrate_repodb_2(nsql::database& db) { | |||||
db.exec(R"( | db.exec(R"( | ||||
ALTER TABLE dds_cat_pkgs | ALTER TABLE dds_cat_pkgs | ||||
ADD COLUMN repo_transform TEXT NOT NULL DEFAULT '[]' | ADD COLUMN repo_transform TEXT NOT NULL DEFAULT '[]' | ||||
)"); | )"); | ||||
} | } | ||||
void migrate_repodb_3(nsql::database& db) { | |||||
db.exec(R"( | |||||
CREATE TABLE dds_cat_remotes ( | |||||
remote_id INTEGER PRIMARY KEY AUTOINCREMENT, | |||||
ident TEXT NOT NULL UNIQUE, | |||||
gen_ident TEXT NOT NULL, | |||||
remote_url TEXT NOT NULL | |||||
); | |||||
CREATE TABLE dds_cat_pkgs_new ( | |||||
pkg_id INTEGER PRIMARY KEY AUTOINCREMENT, | |||||
name TEXT NOT NULL, | |||||
version TEXT NOT NULL, | |||||
description TEXT NOT NULL, | |||||
remote_url TEXT NOT NULL, | |||||
remote_id INTEGER REFERENCES dds_cat_remotes DEFAULT NULL, | |||||
repo_transform TEXT NOT NULL DEFAULT '[]', | |||||
UNIQUE (name, version) | |||||
); | |||||
INSERT INTO dds_cat_pkgs_new(pkg_id, | |||||
name, | |||||
version, | |||||
description, | |||||
remote_url, | |||||
repo_transform) | |||||
SELECT pkg_id, | |||||
name, | |||||
version, | |||||
description, | |||||
'git+' || git_url || ( | |||||
CASE | |||||
WHEN lm_name ISNULL THEN '' | |||||
ELSE ('?lm=' || lm_namespace || '/' || lm_name) | |||||
END | |||||
) || '#' || git_ref, | |||||
repo_transform | |||||
FROM dds_cat_pkgs; | |||||
CREATE TABLE dds_cat_pkg_deps_new ( | |||||
dep_id INTEGER PRIMARY KEY AUTOINCREMENT, | |||||
pkg_id INTEGER NOT NULL REFERENCES dds_cat_pkgs_new(pkg_id), | |||||
dep_name TEXT NOT NULL, | |||||
low TEXT NOT NULL, | |||||
high TEXT NOT NULL, | |||||
UNIQUE(pkg_id, dep_name) | |||||
); | |||||
INSERT INTO dds_cat_pkg_deps_new SELECT * FROM dds_cat_pkg_deps; | |||||
DROP TABLE dds_cat_pkg_deps; | |||||
DROP TABLE dds_cat_pkgs; | |||||
ALTER TABLE dds_cat_pkgs_new RENAME TO dds_cat_pkgs; | |||||
ALTER TABLE dds_cat_pkg_deps_new RENAME TO dds_cat_pkg_deps; | |||||
)"); | |||||
} | |||||
std::string transforms_to_json(const std::vector<fs_transformation>& trs) { | std::string transforms_to_json(const std::vector<fs_transformation>& trs) { | ||||
std::string acc = "["; | std::string acc = "["; | ||||
for (auto it = trs.begin(); it != trs.end(); ++it) { | for (auto it = trs.begin(); it != trs.end(); ++it) { | ||||
const package_info& pkg, | const package_info& pkg, | ||||
const git_remote_listing& git) { | const git_remote_listing& git) { | ||||
auto lm_usage = git.auto_lib.value_or(lm::usage{}); | auto lm_usage = git.auto_lib.value_or(lm::usage{}); | ||||
sqlite3::exec( // | |||||
stmts, | |||||
R"( | |||||
std::string url = git.url; | |||||
if (url.starts_with("https://") || url.starts_with("http://")) { | |||||
url = "git+" + url; | |||||
} | |||||
if (git.auto_lib.has_value()) { | |||||
url += "?lm=" + git.auto_lib->namespace_ + "/" + git.auto_lib->name; | |||||
} | |||||
url += "#" + git.ref; | |||||
nsql::exec( // | |||||
stmts(R"( | |||||
INSERT OR REPLACE INTO dds_cat_pkgs ( | INSERT OR REPLACE INTO dds_cat_pkgs ( | ||||
name, | name, | ||||
version, | version, | ||||
git_url, | |||||
git_ref, | |||||
lm_name, | |||||
lm_namespace, | |||||
remote_url, | |||||
description, | description, | ||||
repo_transform | repo_transform | ||||
) VALUES ( | ) VALUES ( | ||||
?2, | ?2, | ||||
?3, | ?3, | ||||
?4, | ?4, | ||||
CASE WHEN ?5 = '' THEN NULL ELSE ?5 END, | |||||
CASE WHEN ?6 = '' THEN NULL ELSE ?6 END, | |||||
?7, | |||||
?8 | |||||
?5 | |||||
) | ) | ||||
)"_sql, | |||||
std::forward_as_tuple( // | |||||
pkg.ident.name, | |||||
pkg.ident.version.to_string(), | |||||
git.url, | |||||
git.ref, | |||||
lm_usage.name, | |||||
lm_usage.namespace_, | |||||
pkg.description, | |||||
transforms_to_json(git.transforms))); | |||||
)"_sql), | |||||
pkg.ident.name, | |||||
pkg.ident.version.to_string(), | |||||
url, | |||||
pkg.description, | |||||
transforms_to_json(git.transforms)); | |||||
} | } | ||||
void do_store_pkg(neo::sqlite3::database& db, | void do_store_pkg(neo::sqlite3::database& db, | ||||
assert(dep.versions.num_intervals() == 1); | assert(dep.versions.num_intervals() == 1); | ||||
auto iv_1 = *dep.versions.iter_intervals().begin(); | auto iv_1 = *dep.versions.iter_intervals().begin(); | ||||
dds_log(trace, " Depends on: {}", dep.to_string()); | dds_log(trace, " Depends on: {}", dep.to_string()); | ||||
sqlite3::exec(new_dep_st, | |||||
std::forward_as_tuple(db_pkg_id, | |||||
dep.name, | |||||
iv_1.low.to_string(), | |||||
iv_1.high.to_string())); | |||||
nsql::exec(new_dep_st, db_pkg_id, dep.name, iv_1.low.to_string(), iv_1.high.to_string()); | |||||
} | } | ||||
} | } | ||||
void store_init_packages(sqlite3::database& db, sqlite3::statement_cache& st_cache) { | |||||
void store_init_packages(nsql::database& db, nsql::statement_cache& st_cache) { | |||||
dds_log(debug, "Restoring initial package data"); | dds_log(debug, "Restoring initial package data"); | ||||
for (auto& pkg : init_catalog_packages()) { | for (auto& pkg : init_catalog_packages()) { | ||||
do_store_pkg(db, st_cache, pkg); | do_store_pkg(db, st_cache, pkg); | ||||
} | } | ||||
} | } | ||||
void ensure_migrated(sqlite3::database& db) { | |||||
sqlite3::transaction_guard tr{db}; | |||||
void ensure_migrated(nsql::database& db) { | |||||
nsql::transaction_guard tr{db}; | |||||
db.exec(R"( | db.exec(R"( | ||||
PRAGMA foreign_keys = 1; | PRAGMA foreign_keys = 1; | ||||
CREATE TABLE IF NOT EXISTS dds_cat_meta AS | CREATE TABLE IF NOT EXISTS dds_cat_meta AS | ||||
SELECT * FROM init; | SELECT * FROM init; | ||||
)"); | )"); | ||||
auto meta_st = db.prepare("SELECT meta FROM dds_cat_meta"); | auto meta_st = db.prepare("SELECT meta FROM dds_cat_meta"); | ||||
auto [meta_json] = sqlite3::unpack_single<std::string>(meta_st); | |||||
auto [meta_json] = nsql::unpack_single<std::string>(meta_st); | |||||
auto meta = nlohmann::json::parse(meta_json); | auto meta = nlohmann::json::parse(meta_json); | ||||
if (!meta.is_object()) { | if (!meta.is_object()) { | ||||
"The catalog database metadata is invalid [bad dds_meta.version]"); | "The catalog database metadata is invalid [bad dds_meta.version]"); | ||||
} | } | ||||
constexpr int current_database_version = 2; | |||||
constexpr int current_database_version = 3; | |||||
int version = version_; | int version = version_; | ||||
dds_log(debug, "Applying catalog migration 2"); | dds_log(debug, "Applying catalog migration 2"); | ||||
migrate_repodb_2(db); | migrate_repodb_2(db); | ||||
} | } | ||||
meta["version"] = 2; | |||||
exec(db, "UPDATE dds_cat_meta SET meta=?", std::forward_as_tuple(meta.dump())); | |||||
if (version < 3) { | |||||
dds_log(debug, "Applying catalog migration 3"); | |||||
migrate_repodb_3(db); | |||||
} | |||||
meta["version"] = current_database_version; | |||||
exec(db.prepare("UPDATE dds_cat_meta SET meta=?"), meta.dump()); | |||||
if (import_init_packages) { | if (import_init_packages) { | ||||
dds_log( | dds_log( | ||||
fs::create_directories(pardir); | fs::create_directories(pardir); | ||||
} | } | ||||
dds_log(debug, "Opening package catalog [{}]", db_path); | dds_log(debug, "Opening package catalog [{}]", db_path); | ||||
auto db = sqlite3::database::open(db_path); | |||||
auto db = nsql::database::open(db_path); | |||||
try { | try { | ||||
ensure_migrated(db); | ensure_migrated(db); | ||||
} catch (const sqlite3::sqlite3_error& e) { | |||||
} catch (const nsql::sqlite3_error& e) { | |||||
dds_log(critical, | dds_log(critical, | ||||
"Failed to load the repository database. It appears to be invalid/corrupted. The " | "Failed to load the repository database. It appears to be invalid/corrupted. The " | ||||
"exception message is: {}", | "exception message is: {}", | ||||
return catalog(std::move(db)); | return catalog(std::move(db)); | ||||
} | } | ||||
catalog::catalog(sqlite3::database db) | |||||
catalog::catalog(nsql::database db) | |||||
: _db(std::move(db)) {} | : _db(std::move(db)) {} | ||||
void catalog::store(const package_info& pkg) { | void catalog::store(const package_info& pkg) { | ||||
sqlite3::transaction_guard tr{_db}; | |||||
nsql::transaction_guard tr{_db}; | |||||
do_store_pkg(_db, _stmt_cache, pkg); | do_store_pkg(_db, _stmt_cache, pkg); | ||||
} | } | ||||
pkg_id, | pkg_id, | ||||
name, | name, | ||||
version, | version, | ||||
git_url, | |||||
git_ref, | |||||
lm_name, | |||||
lm_namespace, | |||||
remote_url, | |||||
description, | description, | ||||
repo_transform | repo_transform | ||||
FROM dds_cat_pkgs | FROM dds_cat_pkgs | ||||
WHERE name = ? AND version = ? | WHERE name = ? AND version = ? | ||||
)"_sql); | )"_sql); | ||||
st.reset(); | st.reset(); | ||||
st.bindings = std::forward_as_tuple(pk_id.name, ver_str); | |||||
auto opt_tup = sqlite3::unpack_single_opt<std::int64_t, | |||||
std::string, | |||||
std::string, | |||||
std::optional<std::string>, | |||||
std::optional<std::string>, | |||||
std::optional<std::string>, | |||||
std::optional<std::string>, | |||||
std::string, | |||||
std::string>(st); | |||||
st.bindings() = std::forward_as_tuple(pk_id.name, ver_str); | |||||
auto opt_tup = nsql::unpack_single_opt<std::int64_t, | |||||
std::string, | |||||
std::string, | |||||
std::string, | |||||
std::string, | |||||
std::string>(st); | |||||
if (!opt_tup) { | if (!opt_tup) { | ||||
dym_target::fill([&] { | dym_target::fill([&] { | ||||
auto all_ids = this->all(); | auto all_ids = this->all(); | ||||
}); | }); | ||||
return std::nullopt; | return std::nullopt; | ||||
} | } | ||||
const auto& [pkg_id, | |||||
name, | |||||
version, | |||||
git_url, | |||||
git_ref, | |||||
lm_name, | |||||
lm_namespace, | |||||
description, | |||||
repo_transform] | |||||
= *opt_tup; | |||||
const auto& [pkg_id, name, version, remote_url, description, repo_transform] = *opt_tup; | |||||
assert(pk_id.name == name); | assert(pk_id.name == name); | ||||
assert(pk_id.version == semver::version::parse(version)); | assert(pk_id.version == semver::version::parse(version)); | ||||
assert(git_url); | |||||
assert(git_ref); | |||||
auto deps = dependencies_of(pk_id); | auto deps = dependencies_of(pk_id); | ||||
pk_id, | pk_id, | ||||
std::move(deps), | std::move(deps), | ||||
std::move(description), | std::move(description), | ||||
git_remote_listing{ | |||||
*git_url, | |||||
*git_ref, | |||||
lm_name ? std::make_optional(lm::usage{*lm_namespace, *lm_name}) : std::nullopt, | |||||
{}, | |||||
}, | |||||
parse_remote_url(remote_url), | |||||
}; | }; | ||||
if (!repo_transform.empty()) { | if (!repo_transform.empty()) { | ||||
}; | }; | ||||
std::vector<package_id> catalog::all() const noexcept { | std::vector<package_id> catalog::all() const noexcept { | ||||
return view_safe(sqlite3::exec_iter<std::string, std::string>( // | |||||
_stmt_cache, | |||||
"SELECT name, version FROM dds_cat_pkgs"_sql)) | |||||
return nsql::exec_tuples<std::string, std::string>( | |||||
_stmt_cache("SELECT name, version FROM dds_cat_pkgs"_sql)) | |||||
| neo::lref // | |||||
| ranges::views::transform(pair_to_pkg_id) // | | ranges::views::transform(pair_to_pkg_id) // | ||||
| ranges::to_vector; | | ranges::to_vector; | ||||
} | } | ||||
std::vector<package_id> catalog::by_name(std::string_view sv) const noexcept { | std::vector<package_id> catalog::by_name(std::string_view sv) const noexcept { | ||||
return view_safe(sqlite3::exec_iter<std::string, std::string>( // | |||||
_stmt_cache, | |||||
R"( | |||||
return nsql::exec_tuples<std::string, std::string>( // | |||||
_stmt_cache( | |||||
R"( | |||||
SELECT name, version | SELECT name, version | ||||
FROM dds_cat_pkgs | FROM dds_cat_pkgs | ||||
WHERE name = ? | WHERE name = ? | ||||
)"_sql, | |||||
std::tie(sv))) // | |||||
)"_sql), | |||||
sv) // | |||||
| neo::lref // | |||||
| ranges::views::transform(pair_to_pkg_id) // | | ranges::views::transform(pair_to_pkg_id) // | ||||
| ranges::to_vector; | | ranges::to_vector; | ||||
} | } | ||||
std::vector<dependency> catalog::dependencies_of(const package_id& pkg) const noexcept { | std::vector<dependency> catalog::dependencies_of(const package_id& pkg) const noexcept { | ||||
dds_log(trace, "Lookup dependencies of {}@{}", pkg.name, pkg.version.to_string()); | dds_log(trace, "Lookup dependencies of {}@{}", pkg.name, pkg.version.to_string()); | ||||
return view_safe(sqlite3::exec_iter<std::string, | |||||
std::string, | |||||
std::string>( // | |||||
_stmt_cache, | |||||
R"( | |||||
return nsql::exec_tuples<std::string, | |||||
std::string, | |||||
std::string>( // | |||||
_stmt_cache( | |||||
R"( | |||||
WITH this_pkg_id AS ( | WITH this_pkg_id AS ( | ||||
SELECT pkg_id | SELECT pkg_id | ||||
FROM dds_cat_pkgs | FROM dds_cat_pkgs | ||||
FROM dds_cat_pkg_deps | FROM dds_cat_pkg_deps | ||||
WHERE pkg_id IN this_pkg_id | WHERE pkg_id IN this_pkg_id | ||||
ORDER BY dep_name | ORDER BY dep_name | ||||
)"_sql, | |||||
std::forward_as_tuple(pkg.name, pkg.version.to_string()))) // | |||||
)"_sql), | |||||
pkg.name, | |||||
pkg.version.to_string()) // | |||||
| neo::lref // | |||||
| ranges::views::transform([](auto&& pair) { | | ranges::views::transform([](auto&& pair) { | ||||
auto& [name, low, high] = pair; | auto& [name, low, high] = pair; | ||||
auto dep | auto dep | ||||
dds_log(trace, "Importing JSON string into catalog"); | dds_log(trace, "Importing JSON string into catalog"); | ||||
auto pkgs = parse_packages_json(content); | auto pkgs = parse_packages_json(content); | ||||
sqlite3::transaction_guard tr{_db}; | |||||
nsql::transaction_guard tr{_db}; | |||||
for (const auto& pkg : pkgs) { | for (const auto& pkg : pkgs) { | ||||
store(pkg); | |||||
do_store_pkg(_db, _stmt_cache, pkg); | |||||
} | } | ||||
} | } | ||||
void catalog::import_initial() { | void catalog::import_initial() { | ||||
sqlite3::transaction_guard tr{_db}; | |||||
nsql::transaction_guard tr{_db}; | |||||
dds_log(info, "Restoring built-in initial catalog contents"); | dds_log(info, "Restoring built-in initial catalog contents"); | ||||
store_init_packages(_db, _stmt_cache); | store_init_packages(_db, _stmt_cache); | ||||
} | } |
dds::package_id("foo", semver::version::parse("1.2.3")), | dds::package_id("foo", semver::version::parse("1.2.3")), | ||||
{}, | {}, | ||||
"example", | "example", | ||||
dds::git_remote_listing{"http://example.com", "master", std::nullopt, {}}, | |||||
dds::git_remote_listing{"git+http://example.com", "master", std::nullopt, {}}, | |||||
}); | }); | ||||
auto pkgs = db.by_name("foo"); | auto pkgs = db.by_name("foo"); | ||||
dds::package_id("foo", semver::version::parse("1.2.3")), | dds::package_id("foo", semver::version::parse("1.2.3")), | ||||
{}, | {}, | ||||
"example", | "example", | ||||
dds::git_remote_listing{"http://example.com", "develop", std::nullopt, {}}, | |||||
dds::git_remote_listing{"git+http://example.com", "develop", std::nullopt, {}}, | |||||
})); | })); | ||||
// The previous pkg_id is still a valid lookup key | // The previous pkg_id is still a valid lookup key | ||||
info = db.get(pkgs[0]); | info = db.get(pkgs[0]); | ||||
{"baz", {semver::version::parse("5.3.0"), semver::version::parse("6.0.0")}}, | {"baz", {semver::version::parse("5.3.0"), semver::version::parse("6.0.0")}}, | ||||
}, | }, | ||||
"example", | "example", | ||||
dds::git_remote_listing{"http://example.com", "master", std::nullopt, {}}, | |||||
dds::git_remote_listing{"git+http://example.com", "master", std::nullopt, {}}, | |||||
}); | }); | ||||
auto pkgs = db.by_name("foo"); | auto pkgs = db.by_name("foo"); | ||||
REQUIRE(pkgs.size() == 1); | REQUIRE(pkgs.size() == 1); | ||||
TEST_CASE_METHOD(catalog_test_case, "Parse JSON repo") { | TEST_CASE_METHOD(catalog_test_case, "Parse JSON repo") { | ||||
db.import_json_str(R"({ | db.import_json_str(R"({ | ||||
"version": 1, | |||||
"version": 2, | |||||
"packages": { | "packages": { | ||||
"foo": { | "foo": { | ||||
"1.2.3": { | "1.2.3": { | ||||
"depends": [ | "depends": [ | ||||
"bar~4.2.1" | "bar~4.2.1" | ||||
], | ], | ||||
"git": { | |||||
"url": "http://example.com", | |||||
"ref": "master" | |||||
} | |||||
url: "git+http://example.com#master" | |||||
} | } | ||||
} | } | ||||
} | } |
#include <fmt/core.h> | #include <fmt/core.h> | ||||
#include <json5/parse_data.hpp> | #include <json5/parse_data.hpp> | ||||
#include <neo/assert.hpp> | #include <neo/assert.hpp> | ||||
#include <neo/url.hpp> | |||||
#include <semester/walk.hpp> | #include <semester/walk.hpp> | ||||
#include <optional> | #include <optional> | ||||
throw_user_error<dds::errc::invalid_catalog_json>(NEO_FWD(args)...); | throw_user_error<dds::errc::invalid_catalog_json>(NEO_FWD(args)...); | ||||
} | } | ||||
git_remote_listing parse_git_remote(const json5::data& data) { | |||||
git_remote_listing git; | |||||
auto make_dep = [](std::string const& str) { | |||||
using namespace semester::walk_ops; | |||||
try { | |||||
return dependency::parse_depends_string(str); | |||||
} catch (std::runtime_error const& e) { | |||||
import_error(std::string(walk.path()) + e.what()); | |||||
} | |||||
}; | |||||
auto convert_version_str = [](std::string_view str) { | |||||
using namespace semester::walk_ops; | using namespace semester::walk_ops; | ||||
try { | |||||
return semver::version::parse(str); | |||||
} catch (const semver::invalid_version& e) { | |||||
import_error("{}: version string '{}' is invalid: {}", walk.path(), str, e.what()); | |||||
} | |||||
}; | |||||
walk(data, | |||||
require_obj{"Git remote should be an object"}, | |||||
mapping{required_key{"url", | |||||
"A git 'url' string is required", | |||||
require_str("Git URL should be a string"), | |||||
put_into(git.url)}, | |||||
required_key{"ref", | |||||
"A git 'ref' is required, and must be a tag or branch name", | |||||
require_str("Git ref should be a string"), | |||||
put_into(git.ref)}, | |||||
if_key{"auto-lib", | |||||
require_str("'auto-lib' should be a string"), | |||||
put_into(git.auto_lib, | |||||
[](std::string const& str) { | |||||
try { | |||||
return lm::split_usage_string(str); | |||||
} catch (const std::runtime_error& e) { | |||||
import_error("{}: {}", walk.path(), e.what()); | |||||
} | |||||
})}, | |||||
if_key{"transform", | |||||
require_array{"Expect an array of transforms"}, | |||||
for_each{put_into(std::back_inserter(git.transforms), [](auto&& dat) { | |||||
try { | |||||
return fs_transformation::from_json(dat); | |||||
} catch (const semester::walk_error& e) { | |||||
import_error(e.what()); | |||||
} | |||||
})}}}); | |||||
return git; | |||||
} | |||||
auto parse_remote = [](const std::string& str) { | |||||
using namespace semester::walk_ops; | |||||
try { | |||||
return parse_remote_url(str); | |||||
} catch (const neo::url_validation_error& e) { | |||||
import_error("{}: Invalid URL: {}", walk.path(), str); | |||||
} catch (const user_error<errc::invalid_remote_url>& e) { | |||||
import_error("{}: Invalid URL: {}", walk.path(), e.what()); | |||||
} | |||||
}; | |||||
auto parse_fs_transforms = [](auto&& tr_vec) { | |||||
using namespace semester::walk_ops; | |||||
return walk_seq{ | |||||
require_array{"Expect an array of transforms"}, | |||||
for_each{ | |||||
put_into(std::back_inserter(tr_vec), | |||||
[&](auto&& dat) { | |||||
try { | |||||
return fs_transformation::from_json(dat); | |||||
} catch (const semester::walk_error& e) { | |||||
import_error(e.what()); | |||||
} | |||||
}), | |||||
}, | |||||
}; | |||||
}; | |||||
package_info | package_info | ||||
parse_pkg_json_v1(std::string_view name, semver::version version, const json5::data& data) { | |||||
parse_pkg_json_v2(std::string_view name, semver::version version, const json5::data& data) { | |||||
package_info ret; | package_info ret; | ||||
ret.ident = package_id{std::string{name}, version}; | ret.ident = package_id{std::string{name}, version}; | ||||
std::vector<fs_transformation> fs_trs; | |||||
using namespace semester::walk_ops; | using namespace semester::walk_ops; | ||||
auto make_dep = [&](std::string const& str) { | |||||
try { | |||||
return dependency::parse_depends_string(str); | |||||
} catch (std::runtime_error const& e) { | |||||
import_error(std::string(walk.path()) + e.what()); | |||||
} | |||||
}; | |||||
auto check_one_remote = [&](auto&&) { | auto check_one_remote = [&](auto&&) { | ||||
if (!semester::holds_alternative<std::monostate>(ret.remote)) { | if (!semester::holds_alternative<std::monostate>(ret.remote)) { | ||||
return walk.reject("Cannot specify multiple remotes for a package"); | return walk.reject("Cannot specify multiple remotes for a package"); | ||||
for_each{require_str{"Each dependency should be a string"}, | for_each{require_str{"Each dependency should be a string"}, | ||||
put_into{std::back_inserter(ret.deps), make_dep}}}, | put_into{std::back_inserter(ret.deps), make_dep}}}, | ||||
if_key{ | if_key{ | ||||
"git", | |||||
"url", | |||||
require_str{"Remote URL should be a string"}, | |||||
check_one_remote, | check_one_remote, | ||||
put_into(ret.remote, parse_git_remote), | |||||
}}); | |||||
put_into(ret.remote, parse_remote), | |||||
}, | |||||
if_key{"transform", parse_fs_transforms(fs_trs)}}); | |||||
if (semester::holds_alternative<std::monostate>(ret.remote)) { | if (semester::holds_alternative<std::monostate>(ret.remote)) { | ||||
import_error("{}: Package listing for {} does not have any remote information", | import_error("{}: Package listing for {} does not have any remote information", | ||||
ret.ident.to_string()); | ret.ident.to_string()); | ||||
} | } | ||||
if (semester::holds_alternative<git_remote_listing>(ret.remote)) { | |||||
semester::get<git_remote_listing>(ret.remote).transforms = std::move(fs_trs); | |||||
} else { | |||||
if (!fs_trs.empty()) { | |||||
throw_user_error<errc::invalid_catalog_json>( | |||||
"{}: Filesystem transforms are not supported for this remote type", walk.path()); | |||||
} | |||||
} | |||||
return ret; | return ret; | ||||
} | } | ||||
std::vector<package_info> parse_json_v1(const json5::data& data) { | |||||
std::vector<package_info> parse_json_v2(const json5::data& data) { | |||||
std::vector<package_info> acc_pkgs; | std::vector<package_info> acc_pkgs; | ||||
std::string pkg_name; | std::string pkg_name; | ||||
using namespace semester::walk_ops; | using namespace semester::walk_ops; | ||||
auto convert_pkg_obj | auto convert_pkg_obj | ||||
= [&](auto&& dat) { return parse_pkg_json_v1(pkg_name, pkg_version, dat); }; | |||||
auto convert_version_str = [&](std::string_view str) { | |||||
try { | |||||
return semver::version::parse(str); | |||||
} catch (const semver::invalid_version& e) { | |||||
throw_user_error<errc::invalid_catalog_json>("{}: version string '{}' is invalid: {}", | |||||
walk.path(), | |||||
pkg_name, | |||||
str, | |||||
e.what()); | |||||
} | |||||
}; | |||||
= [&](auto&& dat) { return parse_pkg_json_v2(pkg_name, pkg_version, dat); }; | |||||
auto import_pkg_versions | auto import_pkg_versions | ||||
= walk_seq{require_obj{"Package entries must be JSON objects"}, | = walk_seq{require_obj{"Package entries must be JSON objects"}, | ||||
try { | try { | ||||
if (version == 1.0) { | if (version == 1.0) { | ||||
dds_log(trace, "Processing JSON data as v1 data"); | |||||
return parse_json_v1(data); | |||||
throw_user_error<errc::invalid_catalog_json>( | |||||
"Support for catalog JSON v1 has been removed"); | |||||
} else if (version == 2.0) { | |||||
dds_log(trace, "Processing JSON data as v2 data"); | |||||
return parse_json_v2(data); | |||||
} else { | } else { | ||||
throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'", | throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'", | ||||
version); | version); |
TEST_CASE("An empty import is okay") { | TEST_CASE("An empty import is okay") { | ||||
// An empty JSON with no packages in it | // An empty JSON with no packages in it | ||||
auto pkgs = dds::parse_packages_json("{version: 1, packages: {}}"); | |||||
auto pkgs = dds::parse_packages_json("{version: 2, packages: {}}"); | |||||
CHECK(pkgs.empty()); | CHECK(pkgs.empty()); | ||||
} | } | ||||
// Missing keys | // Missing keys | ||||
"{}", | "{}", | ||||
// Missing "packages" | // Missing "packages" | ||||
"{version: 1}", | |||||
"{version: 2}", | |||||
// Bad version | // Bad version | ||||
"{version: 1.7, packages: {}}", | |||||
"{version: 2.7, packages: {}}", | |||||
"{version: [], packages: {}}", | "{version: [], packages: {}}", | ||||
"{version: null, packages: {}}", | "{version: null, packages: {}}", | ||||
// 'packages' should be an object | // 'packages' should be an object | ||||
"{version: 1, packages: []}", | |||||
"{version: 1, packages: null}", | |||||
"{version: 1, packages: 4}", | |||||
"{version: 1, packages: 'lol'}", | |||||
"{version: 2, packages: []}", | |||||
"{version: 2, packages: null}", | |||||
"{version: 2, packages: 4}", | |||||
"{version: 2, packages: 'lol'}", | |||||
// Objects in 'packages' should be objects | // Objects in 'packages' should be objects | ||||
"{version:1, packages:{foo:null}}", | |||||
"{version:1, packages:{foo:[]}}", | |||||
"{version:1, packages:{foo:9}}", | |||||
"{version:1, packages:{foo:'lol'}}", | |||||
"{version:2, packages:{foo:null}}", | |||||
"{version:2, packages:{foo:[]}}", | |||||
"{version:2, packages:{foo:9}}", | |||||
"{version:2, packages:{foo:'lol'}}", | |||||
// Objects in 'packages' shuold have version strings | // Objects in 'packages' shuold have version strings | ||||
"{version:1, packages:{foo:{'lol':{}}}}", | |||||
"{version:1, packages:{foo:{'1.2':{}}}}", | |||||
"{version:2, packages:{foo:{'lol':{}}}}", | |||||
"{version:2, packages:{foo:{'1.2':{}}}}", | |||||
// No remote | // No remote | ||||
"{version:1, packages:{foo:{'1.2.3':{}}}}", | |||||
// Bad empty git | |||||
"{version:1, packages:{foo:{'1.2.3':{git:{}}}}}", | |||||
// Git `url` and `ref` should be a string | |||||
"{version:1, packages:{foo:{'1.2.3':{git:{url:2, ref:''}}}}}", | |||||
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:2}}}}}", | |||||
"{version:2, packages:{foo:{'1.2.3':{}}}}", | |||||
// Bad empty URL | |||||
"{version:2, packages:{foo:{'1.2.3':{url: ''}}}}", | |||||
// Git URL must have a fragment | |||||
"{version:2, packages:{foo:{'1.2.3':{url:'git+http://example.com'}}}}", | |||||
// 'auto-lib' should be a usage string | // 'auto-lib' should be a usage string | ||||
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':3}}}}}", | |||||
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'ffasdf'}}}}}", | |||||
"{version:2, packages:{foo:{'1.2.3':{url:'git+http://example.com?lm=lol#1.0}}}}", | |||||
// 'transform' should be an array | // 'transform' should be an array | ||||
R"( | R"( | ||||
{ | { | ||||
version: 1, | |||||
version: 2, | |||||
packages: {foo: {'1.2.3': { | packages: {foo: {'1.2.3': { | ||||
git: { | |||||
url: '', | |||||
ref: '', | |||||
'auto-lib': 'a/b', | |||||
transform: 'lol hi', | |||||
} | |||||
url: 'git+http://example.com#master, | |||||
transform: 'lol hi' | |||||
}}} | }}} | ||||
} | } | ||||
)", | )", | ||||
std::string_view goods[] = { | std::string_view goods[] = { | ||||
// Basic empty: | // Basic empty: | ||||
"{version:1, packages:{}}", | |||||
"{version:2, packages:{}}", | |||||
// No versions for 'foo' is weird, but okay | // No versions for 'foo' is weird, but okay | ||||
"{version:1, packages:{foo:{}}}", | |||||
"{version:2, packages:{foo:{}}}", | |||||
// Basic package with minimum info: | // Basic package with minimum info: | ||||
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:''}}}}}", | |||||
"{version:2, packages:{foo:{'1.2.3':{url: 'git+http://example.com#master'}}}}", | |||||
// Minimal auto-lib: | // Minimal auto-lib: | ||||
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'a/b'}}}}}", | |||||
"{version:2, packages:{foo:{'1.2.3':{url: 'git+http://example.com?lm=a/b#master'}}}}", | |||||
// Empty transforms: | // Empty transforms: | ||||
R"( | R"( | ||||
{ | { | ||||
version: 1, | |||||
version: 2, | |||||
packages: {foo: {'1.2.3': { | packages: {foo: {'1.2.3': { | ||||
git: { | |||||
url: '', | |||||
ref: '', | |||||
'auto-lib': 'a/b', | |||||
transform: [], | |||||
} | |||||
url: 'git+http://example.com#master', | |||||
transform: [], | |||||
}}} | }}} | ||||
} | } | ||||
)", | )", | ||||
// Basic transform: | // Basic transform: | ||||
R"( | R"( | ||||
{ | { | ||||
version: 1, | |||||
version: 2, | |||||
packages: {foo: {'1.2.3': { | packages: {foo: {'1.2.3': { | ||||
git: { | |||||
url: '', | |||||
ref: '', | |||||
'auto-lib': 'a/b', | |||||
transform: [{ | |||||
copy: { | |||||
from: 'here', | |||||
to: 'there', | |||||
include: [ | |||||
"*.c", | |||||
"*.cpp", | |||||
"*.h", | |||||
'*.txt' | |||||
] | |||||
} | |||||
}], | |||||
} | |||||
url: 'git+http://example.com#master', | |||||
transform: [{ | |||||
copy: { | |||||
from: 'here', | |||||
to: 'there', | |||||
include: [ | |||||
"*.c", | |||||
"*.cpp", | |||||
"*.h", | |||||
'*.txt' | |||||
] | |||||
} | |||||
}], | |||||
}}} | }}} | ||||
} | } | ||||
)", | )", | ||||
TEST_CASE("Check a single object") { | TEST_CASE("Check a single object") { | ||||
// An empty JSON with no packages in it | // An empty JSON with no packages in it | ||||
auto pkgs = dds::parse_packages_json(R"({ | auto pkgs = dds::parse_packages_json(R"({ | ||||
version: 1, | |||||
version: 2, | |||||
packages: { | packages: { | ||||
foo: { | foo: { | ||||
'1.2.3': { | '1.2.3': { | ||||
git: { | |||||
url: 'foo', | |||||
ref: 'fasdf', | |||||
'auto-lib': 'a/b', | |||||
} | |||||
url: 'git+http://example.com?lm=a/b#master', | |||||
} | } | ||||
} | } | ||||
} | } | ||||
CHECK(std::holds_alternative<dds::git_remote_listing>(pkgs[0].remote)); | CHECK(std::holds_alternative<dds::git_remote_listing>(pkgs[0].remote)); | ||||
auto git = std::get<dds::git_remote_listing>(pkgs[0].remote); | auto git = std::get<dds::git_remote_listing>(pkgs[0].remote); | ||||
CHECK(git.url == "foo"); | |||||
CHECK(git.ref == "fasdf"); | |||||
CHECK(git.url == "http://example.com"); | |||||
CHECK(git.ref == "master"); | |||||
REQUIRE(git.auto_lib); | REQUIRE(git.auto_lib); | ||||
CHECK(git.auto_lib->namespace_ == "a"); | CHECK(git.auto_lib->namespace_ == "a"); | ||||
CHECK(git.auto_lib->name == "b"); | CHECK(git.auto_lib->name == "b"); |
#include "./package_info.hpp" | |||||
#include <dds/error/errors.hpp> | |||||
#include <neo/url.hpp> | |||||
using namespace dds; | |||||
dds::remote_listing_var dds::parse_remote_url(std::string_view sv) { | |||||
auto url = neo::url::parse(sv); | |||||
if (url.scheme == "git+https" || url.scheme == "git+http" || url.scheme == "https+git" | |||||
|| url.scheme == "http+git" || url.scheme == "git") { | |||||
return git_remote_listing::from_url(sv); | |||||
} else { | |||||
throw_user_error< | |||||
errc::invalid_remote_url>("Unknown scheme '{}' for remote package URL '{}'", | |||||
url.scheme, | |||||
sv); | |||||
} | |||||
} |
namespace dds { | namespace dds { | ||||
using remote_listing_var = std::variant<std::monostate, git_remote_listing>; | |||||
remote_listing_var parse_remote_url(std::string_view url); | |||||
struct package_info { | struct package_info { | ||||
package_id ident; | package_id ident; | ||||
std::vector<dependency> deps; | std::vector<dependency> deps; | ||||
std::string description; | std::string description; | ||||
std::variant<std::monostate, git_remote_listing> remote; | |||||
remote_listing_var remote; | |||||
}; | }; | ||||
} // namespace dds | } // namespace dds |
#include <dds/proc.hpp> | #include <dds/proc.hpp> | ||||
#include <dds/util/log.hpp> | #include <dds/util/log.hpp> | ||||
#include <neo/url.hpp> | |||||
#include <neo/url/query.hpp> | |||||
#include <nlohmann/json.hpp> | #include <nlohmann/json.hpp> | ||||
void dds::git_remote_listing::pull_to(const dds::package_id& pid, dds::path_ref dest) const { | |||||
using namespace dds; | |||||
void git_remote_listing::pull_to(const package_id& pid, path_ref dest) const { | |||||
fs::remove_all(dest); | fs::remove_all(dest); | ||||
using namespace std::literals; | using namespace std::literals; | ||||
dds_log(info, "Clone Git repository [{}] (at {}) to [{}]", url, ref, dest.string()); | dds_log(info, "Clone Git repository [{}] (at {}) to [{}]", url, ref, dest.string()); | ||||
if (auto_lib.has_value()) { | if (auto_lib.has_value()) { | ||||
dds_log(info, "Generating library data automatically"); | dds_log(info, "Generating library data automatically"); | ||||
auto pkg_strm = dds::open(dest / "package.json5", std::ios::binary | std::ios::out); | |||||
auto pkg_strm = open(dest / "package.json5", std::ios::binary | std::ios::out); | |||||
auto man_json = nlohmann::json::object(); | auto man_json = nlohmann::json::object(); | ||||
man_json["name"] = pid.name; | man_json["name"] = pid.name; | ||||
man_json["version"] = pid.version.to_string(); | man_json["version"] = pid.version.to_string(); | ||||
man_json["namespace"] = auto_lib->namespace_; | man_json["namespace"] = auto_lib->namespace_; | ||||
pkg_strm << nlohmann::to_string(man_json); | pkg_strm << nlohmann::to_string(man_json); | ||||
auto lib_strm = dds::open(dest / "library.json5", std::ios::binary | std::ios::out); | |||||
auto lib_strm = open(dest / "library.json5", std::ios::binary | std::ios::out); | |||||
auto lib_json = nlohmann::json::object(); | auto lib_json = nlohmann::json::object(); | ||||
lib_json["name"] = auto_lib->name; | lib_json["name"] = auto_lib->name; | ||||
lib_strm << nlohmann::to_string(lib_json); | lib_strm << nlohmann::to_string(lib_json); | ||||
} | } | ||||
} | } | ||||
git_remote_listing git_remote_listing::from_url(std::string_view sv) { | |||||
auto url = neo::url::parse(sv); | |||||
dds_log(trace, "Create Git remote listing from URL '{}'", sv); | |||||
auto ref = url.fragment; | |||||
url.fragment = {}; | |||||
auto q = url.query; | |||||
url.query = {}; | |||||
std::optional<lm::usage> auto_lib; | |||||
if (url.scheme.starts_with("git+")) { | |||||
url.scheme = url.scheme.substr(4); | |||||
} else if (url.scheme.ends_with("+git")) { | |||||
url.scheme = url.scheme.substr(0, url.scheme.size() - 4); | |||||
} else { | |||||
// Leave the URL as-is | |||||
} | |||||
if (q) { | |||||
neo::basic_query_string_view qsv{*q}; | |||||
for (auto qstr : qsv) { | |||||
if (qstr.key_raw() != "lm") { | |||||
dds_log(warn, "Unknown query string parameter in package url: '{}'", qstr.string()); | |||||
} else { | |||||
auto_lib = lm::split_usage_string(qstr.value_decoded()); | |||||
} | |||||
} | |||||
} | |||||
if (!ref) { | |||||
throw_user_error<errc::invalid_remote_url>( | |||||
"Git URL requires a fragment specifying the Git ref to clone"); | |||||
} | |||||
return {.url = url.to_string(), .ref = *ref, .auto_lib = auto_lib, .transforms = {}}; | |||||
} |
std::vector<fs_transformation> transforms; | std::vector<fs_transformation> transforms; | ||||
void pull_to(const package_id& pid, path_ref path) const; | void pull_to(const package_id& pid, path_ref path) const; | ||||
static git_remote_listing from_url(std::string_view sv); | |||||
}; | }; | ||||
} // namespace dds | } // namespace dds |
using namespace dds; | using namespace dds; | ||||
namespace sqlite3 = neo::sqlite3; | |||||
using sqlite3::exec; | |||||
using namespace sqlite3::literals; | |||||
namespace nsql = neo::sqlite3; | |||||
using nsql::exec; | |||||
using namespace nsql::literals; | |||||
namespace { | namespace { | ||||
void migrate_1(sqlite3::database& db) { | |||||
void migrate_1(nsql::database& db) { | |||||
db.exec(R"( | db.exec(R"( | ||||
CREATE TABLE dds_files ( | CREATE TABLE dds_files ( | ||||
file_id INTEGER PRIMARY KEY, | file_id INTEGER PRIMARY KEY, | ||||
)"); | )"); | ||||
} | } | ||||
void ensure_migrated(sqlite3::database& db) { | |||||
sqlite3::transaction_guard tr{db}; | |||||
void ensure_migrated(nsql::database& db) { | |||||
nsql::transaction_guard tr{db}; | |||||
db.exec(R"( | db.exec(R"( | ||||
PRAGMA foreign_keys = 1; | PRAGMA foreign_keys = 1; | ||||
CREATE TABLE IF NOT EXISTS dds_meta AS | CREATE TABLE IF NOT EXISTS dds_meta AS | ||||
SELECT * FROM init; | SELECT * FROM init; | ||||
)"); | )"); | ||||
auto meta_st = db.prepare("SELECT meta FROM dds_meta"); | auto meta_st = db.prepare("SELECT meta FROM dds_meta"); | ||||
auto [meta_json] = sqlite3::unpack_single<std::string>(meta_st); | |||||
auto [meta_json] = nsql::unpack_single<std::string>(meta_st); | |||||
auto meta = nlohmann::json::parse(meta_json); | auto meta = nlohmann::json::parse(meta_json); | ||||
if (!meta.is_object()) { | if (!meta.is_object()) { | ||||
migrate_1(db); | migrate_1(db); | ||||
} | } | ||||
meta["version"] = 1; | meta["version"] = 1; | ||||
exec(db, "UPDATE dds_meta SET meta=?", std::forward_as_tuple(meta.dump())); | |||||
exec(db.prepare("UPDATE dds_meta SET meta=?"), meta.dump()); | |||||
} | } | ||||
} // namespace | } // namespace | ||||
database database::open(const std::string& db_path) { | database database::open(const std::string& db_path) { | ||||
auto db = sqlite3::database::open(db_path); | |||||
auto db = nsql::database::open(db_path); | |||||
try { | try { | ||||
ensure_migrated(db); | ensure_migrated(db); | ||||
} catch (const sqlite3::sqlite3_error& e) { | |||||
} catch (const nsql::sqlite3_error& e) { | |||||
dds_log( | dds_log( | ||||
error, | error, | ||||
"Failed to load the databsae. It appears to be invalid/corrupted. We'll delete it and " | "Failed to load the databsae. It appears to be invalid/corrupted. We'll delete it and " | ||||
"create a new one. The exception message is: {}", | "create a new one. The exception message is: {}", | ||||
e.what()); | e.what()); | ||||
fs::remove(db_path); | fs::remove(db_path); | ||||
db = sqlite3::database::open(db_path); | |||||
db = nsql::database::open(db_path); | |||||
try { | try { | ||||
ensure_migrated(db); | ensure_migrated(db); | ||||
} catch (const sqlite3::sqlite3_error& e) { | |||||
} catch (const nsql::sqlite3_error& e) { | |||||
dds_log(critical, | dds_log(critical, | ||||
"Failed to apply database migrations to recovery database. This is a critical " | "Failed to apply database migrations to recovery database. This is a critical " | ||||
"error. The exception message is: {}", | "error. The exception message is: {}", | ||||
return database(std::move(db)); | return database(std::move(db)); | ||||
} | } | ||||
database::database(sqlite3::database db) | |||||
database::database(nsql::database db) | |||||
: _db(std::move(db)) {} | : _db(std::move(db)) {} | ||||
std::int64_t database::_record_file(path_ref path_) { | std::int64_t database::_record_file(path_ref path_) { | ||||
auto path = fs::weakly_canonical(path_); | auto path = fs::weakly_canonical(path_); | ||||
sqlite3::exec(_stmt_cache(R"( | |||||
nsql::exec(_stmt_cache(R"( | |||||
INSERT OR IGNORE INTO dds_files (path) | INSERT OR IGNORE INTO dds_files (path) | ||||
VALUES (?) | VALUES (?) | ||||
)"_sql), | )"_sql), | ||||
std::forward_as_tuple(path.generic_string())); | |||||
path.generic_string()); | |||||
auto& st = _stmt_cache(R"( | auto& st = _stmt_cache(R"( | ||||
SELECT file_id | SELECT file_id | ||||
FROM dds_files | FROM dds_files | ||||
WHERE path = ?1 | WHERE path = ?1 | ||||
)"_sql); | )"_sql); | ||||
st.reset(); | st.reset(); | ||||
auto str = path.generic_string(); | |||||
st.bindings[1] = str; | |||||
auto [rowid] = sqlite3::unpack_single<std::int64_t>(st); | |||||
auto str = path.generic_string(); | |||||
st.bindings()[1] = str; | |||||
auto [rowid] = nsql::unpack_single<std::int64_t>(st); | |||||
return rowid; | return rowid; | ||||
} | } | ||||
INSERT OR REPLACE INTO dds_deps (input_file_id, output_file_id, input_mtime) | INSERT OR REPLACE INTO dds_deps (input_file_id, output_file_id, input_mtime) | ||||
VALUES (?, ?, ?) | VALUES (?, ?, ?) | ||||
)"_sql); | )"_sql); | ||||
sqlite3::exec(st, std::forward_as_tuple(in_id, out_id, input_mtime.time_since_epoch().count())); | |||||
nsql::exec(st, in_id, out_id, input_mtime.time_since_epoch().count()); | |||||
} | } | ||||
void database::store_file_command(path_ref file, const command_info& cmd) { | void database::store_file_command(path_ref file, const command_info& cmd) { | ||||
INTO dds_file_commands(file_id, command, output) | INTO dds_file_commands(file_id, command, output) | ||||
VALUES (?1, ?2, ?3) | VALUES (?1, ?2, ?3) | ||||
)"_sql); | )"_sql); | ||||
sqlite3::exec(st, | |||||
std::forward_as_tuple(file_id, | |||||
std::string_view(cmd.command), | |||||
std::string_view(cmd.output))); | |||||
nsql::exec(st, file_id, std::string_view(cmd.command), std::string_view(cmd.output)); | |||||
} | } | ||||
void database::forget_inputs_of(path_ref file) { | void database::forget_inputs_of(path_ref file) { | ||||
DELETE FROM dds_deps | DELETE FROM dds_deps | ||||
WHERE output_file_id IN id_to_delete | WHERE output_file_id IN id_to_delete | ||||
)"_sql); | )"_sql); | ||||
sqlite3::exec(st, std::forward_as_tuple(fs::weakly_canonical(file).generic_string())); | |||||
nsql::exec(st, fs::weakly_canonical(file).generic_string()); | |||||
} | } | ||||
std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_) const { | std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_) const { | ||||
WHERE output_file_id IN file | WHERE output_file_id IN file | ||||
)"_sql); | )"_sql); | ||||
st.reset(); | st.reset(); | ||||
st.bindings[1] = file.generic_string(); | |||||
auto tup_iter = sqlite3::iter_tuples<std::string, std::int64_t>(st); | |||||
st.bindings()[1] = file.generic_string(); | |||||
auto tup_iter = nsql::iter_tuples<std::string, std::int64_t>(st); | |||||
std::vector<input_file_info> ret; | std::vector<input_file_info> ret; | ||||
for (auto& [path, mtime] : tup_iter) { | |||||
for (auto [path, mtime] : tup_iter) { | |||||
ret.emplace_back( | ret.emplace_back( | ||||
input_file_info{path, fs::file_time_type(fs::file_time_type::duration(mtime))}); | input_file_info{path, fs::file_time_type(fs::file_time_type::duration(mtime))}); | ||||
} | } | ||||
WHERE file_id IN file | WHERE file_id IN file | ||||
)"_sql); | )"_sql); | ||||
st.reset(); | st.reset(); | ||||
st.bindings[1] = file.generic_string(); | |||||
auto opt_res = sqlite3::unpack_single_opt<std::string, std::string>(st); | |||||
st.bindings()[1] = file.generic_string(); | |||||
auto opt_res = nsql::unpack_single_opt<std::string, std::string>(st); | |||||
if (!opt_res) { | if (!opt_res) { | ||||
return std::nullopt; | return std::nullopt; | ||||
} | } |
return "no-catalog-remote-info.html"; | return "no-catalog-remote-info.html"; | ||||
case errc::git_clone_failure: | case errc::git_clone_failure: | ||||
return "git-clone-failure.html"; | return "git-clone-failure.html"; | ||||
case errc::invalid_remote_url: | |||||
return "invalid-remote-url.html"; | |||||
case errc::invalid_repo_transform: | case errc::invalid_repo_transform: | ||||
return "invalid-repo-transform.html"; | return "invalid-repo-transform.html"; | ||||
case errc::sdist_ident_mismatch: | case errc::sdist_ident_mismatch: | ||||
There are a variety of possible causes. It is best to check the output from | There are a variety of possible causes. It is best to check the output from | ||||
Git in diagnosing this failure. | Git in diagnosing this failure. | ||||
)"; | )"; | ||||
case errc::invalid_remote_url: | |||||
return R"(The given package/remote URL is invalid)"; | |||||
case errc::invalid_repo_transform: | case errc::invalid_repo_transform: | ||||
return R"( | return R"( | ||||
A 'transform' property in a catalog entry contains an invalid transformation. | A 'transform' property in a catalog entry contains an invalid transformation. | ||||
"packages"; | "packages"; | ||||
case errc::git_clone_failure: | case errc::git_clone_failure: | ||||
return "A git-clone operation failed."; | return "A git-clone operation failed."; | ||||
case errc::invalid_remote_url: | |||||
return "The given package/remote URL is not valid"; | |||||
case errc::invalid_repo_transform: | case errc::invalid_repo_transform: | ||||
return "A repository filesystem transformation is invalid"; | return "A repository filesystem transformation is invalid"; | ||||
case errc::sdist_ident_mismatch: | case errc::sdist_ident_mismatch: |
no_catalog_remote_info, | no_catalog_remote_info, | ||||
git_clone_failure, | git_clone_failure, | ||||
invalid_remote_url, | |||||
invalid_repo_transform, | invalid_repo_transform, | ||||
sdist_ident_mismatch, | sdist_ident_mismatch, | ||||
sdist_exists, | sdist_exists, |
#include <dds/source/root.hpp> | #include <dds/source/root.hpp> | ||||
#include <dds/util/algo.hpp> | #include <dds/util/algo.hpp> | ||||
#include <dds/util/log.hpp> | #include <dds/util/log.hpp> | ||||
#include <dds/util/ranges.hpp> | |||||
#include <neo/ref.hpp> | |||||
#include <range/v3/view/filter.hpp> | #include <range/v3/view/filter.hpp> | ||||
#include <range/v3/view/transform.hpp> | #include <range/v3/view/transform.hpp> | ||||
if (fs::is_directory(pf_libs_dir)) { | if (fs::is_directory(pf_libs_dir)) { | ||||
extend(ret, | extend(ret, | ||||
view_safe(fs::directory_iterator(pf_libs_dir)) // | |||||
| ranges::views::filter(has_library_dirs) // | |||||
fs::directory_iterator(pf_libs_dir) // | |||||
| neo::lref // | |||||
| ranges::views::filter(has_library_dirs) // | |||||
| ranges::views::transform( | | ranges::views::transform( | ||||
[&](auto p) { return library_root::from_directory(fs::canonical(p)); })); | [&](auto p) { return library_root::from_directory(fs::canonical(p)); })); | ||||
} | } |
#include <dds/source/dist.hpp> | #include <dds/source/dist.hpp> | ||||
#include <dds/util/log.hpp> | #include <dds/util/log.hpp> | ||||
#include <dds/util/paths.hpp> | #include <dds/util/paths.hpp> | ||||
#include <dds/util/ranges.hpp> | |||||
#include <dds/util/string.hpp> | #include <dds/util/string.hpp> | ||||
#include <neo/ref.hpp> | |||||
#include <range/v3/action/sort.hpp> | #include <range/v3/action/sort.hpp> | ||||
#include <range/v3/action/unique.hpp> | #include <range/v3/action/unique.hpp> | ||||
#include <range/v3/range/conversion.hpp> | #include <range/v3/range/conversion.hpp> | ||||
auto entries = | auto entries = | ||||
// Get the top-level `name-version` dirs | // Get the top-level `name-version` dirs | ||||
view_safe(fs::directory_iterator(dirpath)) // | |||||
// // Convert each dir into an `sdist` object | |||||
fs::directory_iterator(dirpath) // | |||||
| neo::lref // | |||||
// Convert each dir into an `sdist` object | |||||
| ranges::views::transform(try_read_sdist) // | | ranges::views::transform(try_read_sdist) // | ||||
// // Drop items that failed to load | |||||
// Drop items that failed to load | |||||
| ranges::views::filter([](auto&& opt) { return opt.has_value(); }) // | | ranges::views::filter([](auto&& opt) { return opt.has_value(); }) // | ||||
| ranges::views::transform([](auto&& opt) { return *opt; }) // | | ranges::views::transform([](auto&& opt) { return *opt; }) // | ||||
| to<sdist_set>(); | | to<sdist_set>(); |
#include "./root.hpp" | #include "./root.hpp" | ||||
#include <dds/util/ranges.hpp> | |||||
#include <neo/ref.hpp> | |||||
#include <range/v3/range/conversion.hpp> | #include <range/v3/range/conversion.hpp> | ||||
#include <range/v3/view/filter.hpp> | #include <range/v3/view/filter.hpp> | ||||
#include <range/v3/view/transform.hpp> | #include <range/v3/view/transform.hpp> | ||||
using namespace ranges::views; | using namespace ranges::views; | ||||
// Collect all source files from the directory | // Collect all source files from the directory | ||||
return // | return // | ||||
view_safe(fs::recursive_directory_iterator(path)) // | |||||
fs::recursive_directory_iterator(path) // | |||||
| neo::lref // | |||||
| filter([](auto&& entry) { return entry.is_regular_file(); }) // | | filter([](auto&& entry) { return entry.is_regular_file(); }) // | ||||
| transform([&](auto&& entry) { return source_file::from_path(entry, path); }) // | | transform([&](auto&& entry) { return source_file::from_path(entry, path); }) // | ||||
// source_file::from_path returns an optional. Drop nulls | // source_file::from_path returns an optional. Drop nulls |
#pragma once | |||||
namespace dds { | |||||
template <typename T> | |||||
constexpr auto& view_safe(T&& t) { | |||||
return t; | |||||
} | |||||
} // namespace dds |
json_path = dds.build_dir / 'catalog.json' | json_path = dds.build_dir / 'catalog.json' | ||||
import_data = { | import_data = { | ||||
'version': 1, | |||||
'version': 2, | |||||
'packages': { | 'packages': { | ||||
'neo-sqlite3': { | 'neo-sqlite3': { | ||||
'0.3.0': { | '0.3.0': { | ||||
'git': { | |||||
'url': | |||||
'https://github.com/vector-of-bool/neo-sqlite3.git', | |||||
'ref': | |||||
'0.3.0', | |||||
}, | |||||
'url': | |||||
'git+https://github.com/vector-of-bool/neo-sqlite3.git#0.3.0', | |||||
}, | }, | ||||
}, | }, | ||||
}, | }, |
json_fpath = dds.build_dir / 'data.json' | json_fpath = dds.build_dir / 'data.json' | ||||
import_data = { | import_data = { | ||||
'version': 1, | |||||
'version': 2, | |||||
'packages': { | 'packages': { | ||||
'foo': { | 'foo': { | ||||
'1.2.4': { | '1.2.4': { | ||||
'git': { | |||||
'url': 'http://example.com', | |||||
'ref': 'master', | |||||
}, | |||||
'url': 'git+http://example.com#master', | |||||
'depends': [], | 'depends': [], | ||||
}, | }, | ||||
'1.2.5': { | '1.2.5': { | ||||
'git': { | |||||
'url': 'http://example.com', | |||||
'ref': 'master', | |||||
}, | |||||
'url': 'git+http://example.com#master', | |||||
}, | }, | ||||
}, | }, | ||||
}, | }, |
{ | { | ||||
"version": 1, | |||||
"version": 2, | |||||
"packages": { | "packages": { | ||||
"neo-sqlite3": { | "neo-sqlite3": { | ||||
"0.1.0": { | "0.1.0": { | ||||
"git": { | |||||
"url": "https://github.com/vector-of-bool/neo-sqlite3.git", | |||||
"ref": "0.1.0" | |||||
}, | |||||
"url": "git+https://github.com/vector-of-bool/neo-sqlite3.git#0.1.0" | |||||
}, | }, | ||||
"0.2.2": { | "0.2.2": { | ||||
"git": { | |||||
"url": "https://github.com/vector-of-bool/neo-sqlite3.git", | |||||
"ref": "0.2.2" | |||||
}, | |||||
"url": "git+https://github.com/vector-of-bool/neo-sqlite3.git#0.2.2" | |||||
}, | }, | ||||
"0.3.0": { | "0.3.0": { | ||||
"git": { | |||||
"url": "https://github.com/vector-of-bool/neo-sqlite3.git", | |||||
"ref": "0.3.0" | |||||
}, | |||||
"url": "git+https://github.com/vector-of-bool/neo-sqlite3.git#0.3.0" | |||||
} | } | ||||
} | } | ||||
} | } |
{ | { | ||||
"version": 1, | |||||
"version": 2, | |||||
"packages": { | "packages": { | ||||
"neo-fun": { | "neo-fun": { | ||||
"0.3.2": { | "0.3.2": { | ||||
"git": { | |||||
"url": "https://github.com/vector-of-bool/neo-fun.git", | |||||
"ref": "0.3.2" | |||||
} | |||||
"url": "git+https://github.com/vector-of-bool/neo-fun.git#0.3.2" | |||||
} | } | ||||
}, | }, | ||||
"range-v3": { | "range-v3": { | ||||
"0.9.1": { | "0.9.1": { | ||||
"git": { | |||||
"url": "https://github.com/ericniebler/range-v3.git", | |||||
"ref": "0.9.1", | |||||
"auto-lib": "Niebler/range-v3" | |||||
} | |||||
"url": "git+https://github.com/ericniebler/range-v3.git?lm=Niebler/range-v3#0.9.1" | |||||
} | } | ||||
} | } | ||||
} | } |
{ | { | ||||
"version": 1, | |||||
"version": 2, | |||||
"packages": {} | "packages": {} | ||||
} | } |
{ | { | ||||
"version": 1, | |||||
"version": 2, | |||||
"packages": { | "packages": { | ||||
"cryptopp": { | "cryptopp": { | ||||
"8.2.0": { | "8.2.0": { | ||||
"git": { | |||||
"url": "https://github.com/weidai11/cryptopp.git", | |||||
"ref": "CRYPTOPP_8_2_0", | |||||
"auto-lib": "cryptopp/cryptopp", | |||||
"transform": [ | |||||
{ | |||||
"move": { | |||||
"from": ".", | |||||
"to": "src/cryptopp", | |||||
"include": [ | |||||
"*.c", | |||||
"*.cpp", | |||||
"*.h" | |||||
] | |||||
} | |||||
"url": "git+https://github.com/weidai11/cryptopp.git?lm=cryptopp/cryptopp#CRYPTOPP_8_2_0", | |||||
"transform": [ | |||||
{ | |||||
"move": { | |||||
"from": ".", | |||||
"to": "src/cryptopp", | |||||
"include": [ | |||||
"*.c", | |||||
"*.cpp", | |||||
"*.h" | |||||
] | |||||
} | } | ||||
] | |||||
} | |||||
} | |||||
] | |||||
} | } | ||||
} | } | ||||
} | } |
{ | { | ||||
"version": 1, | |||||
"version": 2, | |||||
"packages": { | "packages": { | ||||
"nlohmann-json": { | "nlohmann-json": { | ||||
"3.7.1": { | "3.7.1": { | ||||
"git": { | |||||
"url": "https://github.com/vector-of-bool/json.git", | |||||
"ref": "dds/3.7.1" | |||||
}, | |||||
"url": "git+https://github.com/vector-of-bool/json.git#dds/3.7.1", | |||||
"depends": [] | "depends": [] | ||||
} | } | ||||
} | } |
{ | { | ||||
"version": 1, | |||||
"version": 2, | |||||
"packages": { | "packages": { | ||||
"spdlog": { | "spdlog": { | ||||
"1.4.2": { | "1.4.2": { | ||||
"git": { | |||||
"url": "https://github.com/gabime/spdlog.git", | |||||
"ref": "v1.4.2", | |||||
"auto-lib": "spdlog/spdlog" | |||||
}, | |||||
"url": "git+https://github.com/gabime/spdlog.git?lm=spdlog/spdlog#v1.4.2", | |||||
"depends": [] | "depends": [] | ||||
} | } | ||||
} | } |
paths.PREBUILT_DDS, | paths.PREBUILT_DDS, | ||||
toolchain=opts.toolchain, | toolchain=opts.toolchain, | ||||
cat_path=old_cat_path, | cat_path=old_cat_path, | ||||
cat_json_path=Path('catalog.json'), | |||||
cat_json_path=Path('catalog.old.json'), | |||||
dds_flags=[('--repo-dir', ci_repo_dir)]) | dds_flags=[('--repo-dir', ci_repo_dir)]) | ||||
print('Main build PASSED!') | print('Main build PASSED!') | ||||
print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}') | print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}') |
d['auto-lib'] = self.auto_lib | d['auto-lib'] = self.auto_lib | ||||
return d | return d | ||||
def to_dict_2(self) -> str: | |||||
url = f'git+{self.url}' | |||||
if self.auto_lib: | |||||
url += f'?lm={self.auto_lib}' | |||||
url += f'#{self.ref}' | |||||
return url | |||||
RemoteInfo = Union[Git] | RemoteInfo = Union[Git] | ||||
ret['git'] = self.remote.to_dict() | ret['git'] = self.remote.to_dict() | ||||
return ret | return ret | ||||
def to_dict_2(self) -> dict: | |||||
ret: dict = { | |||||
'description': self.description, | |||||
'depends': list(self.depends), | |||||
'transform': [f.to_dict() for f in self.remote.transforms], | |||||
} | |||||
ret['url'] = self.remote.to_dict_2() | |||||
return ret | |||||
class VersionSet(NamedTuple): | class VersionSet(NamedTuple): | ||||
version: str | version: str | ||||
# yapf: disable | # yapf: disable | ||||
PACKAGES = [ | PACKAGES = [ | ||||
github_package('neo-buffer', 'vector-of-bool/neo-buffer', | github_package('neo-buffer', 'vector-of-bool/neo-buffer', | ||||
['0.2.1', '0.3.0', '0.4.0', '0.4.1']), | |||||
['0.2.1', '0.3.0', '0.4.0', '0.4.1', '0.4.2']), | |||||
github_package('neo-compress', 'vector-of-bool/neo-compress', ['0.1.0']), | github_package('neo-compress', 'vector-of-bool/neo-compress', ['0.1.0']), | ||||
github_package('neo-url', 'vector-of-bool/neo-url', ['0.1.0', '0.1.1', '0.1.2']), | |||||
github_package('neo-sqlite3', 'vector-of-bool/neo-sqlite3', | github_package('neo-sqlite3', 'vector-of-bool/neo-sqlite3', | ||||
['0.2.3', '0.3.0']), | |||||
['0.2.3', '0.3.0', '0.4.0', '0.4.1']), | |||||
github_package('neo-fun', 'vector-of-bool/neo-fun', [ | github_package('neo-fun', 'vector-of-bool/neo-fun', [ | ||||
'0.1.1', '0.2.0', '0.2.1', '0.3.0', '0.3.1', '0.3.2', '0.4.0', '0.4.1' | |||||
'0.1.1', '0.2.0', '0.2.1', '0.3.0', '0.3.1', '0.3.2', '0.4.0', '0.4.1', | |||||
'0.4.2', '0.5.0', '0.5.1', '0.5.2', '0.5.3', | |||||
]), | ]), | ||||
github_package('neo-concepts', 'vector-of-bool/neo-concepts', ( | github_package('neo-concepts', 'vector-of-bool/neo-concepts', ( | ||||
'0.2.2', | '0.2.2', | ||||
args = parser.parse_args() | args = parser.parse_args() | ||||
data = { | data = { | ||||
'version': 2, | |||||
'packages': { | |||||
pkg.name: {ver.version: ver.to_dict_2() | |||||
for ver in pkg.versions} | |||||
for pkg in PACKAGES | |||||
} | |||||
} | |||||
old_data = { | |||||
'version': 1, | 'version': 1, | ||||
'packages': { | 'packages': { | ||||
pkg.name: {ver.version: ver.to_dict() | pkg.name: {ver.version: ver.to_dict() | ||||
} | } | ||||
json_str = json.dumps(data, indent=2, sort_keys=True) | json_str = json.dumps(data, indent=2, sort_keys=True) | ||||
Path('catalog.json').write_text(json_str) | Path('catalog.json').write_text(json_str) | ||||
Path('catalog.old.json').write_text( | |||||
json.dumps(old_data, indent=2, sort_keys=True)) | |||||
cpp_template = textwrap.dedent(r''' | cpp_template = textwrap.dedent(r''' | ||||
#include <dds/catalog/package_info.hpp> | #include <dds/catalog/package_info.hpp> |