"neo/io", | "neo/io", | ||||
"neo/http", | "neo/http", | ||||
"neo/url", | "neo/url", | ||||
"boost/leaf", | |||||
// Explicit zlib link is required due to linker input order bug. | // Explicit zlib link is required due to linker input order bug. | ||||
// Can be removed after alpha.5 | // Can be removed after alpha.5 | ||||
"zlib/zlib", | "zlib/zlib", |
"fmt^7.0.3", | "fmt^7.0.3", | ||||
"neo-http^0.1.0", | "neo-http^0.1.0", | ||||
"neo-io^0.1.0", | "neo-io^0.1.0", | ||||
"boost.leaf~0.3.0", | |||||
], | ], | ||||
"test_driver": "Catch-Main" | "test_driver": "Catch-Main" | ||||
} | } |
#include <dds/dym.hpp> | #include <dds/dym.hpp> | ||||
#include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
#include <dds/repo/repo.hpp> | #include <dds/repo/repo.hpp> | ||||
#include <dds/repoman/repoman.hpp> | |||||
#include <dds/source/dist.hpp> | #include <dds/source/dist.hpp> | ||||
#include <dds/toolchain/from_json.hpp> | #include <dds/toolchain/from_json.hpp> | ||||
#include <dds/util/fs.hpp> | #include <dds/util/fs.hpp> | ||||
#include <dds/util/log.hpp> | #include <dds/util/log.hpp> | ||||
#include <dds/util/paths.hpp> | #include <dds/util/paths.hpp> | ||||
#include <dds/util/result.hpp> | |||||
#include <dds/util/signal.hpp> | #include <dds/util/signal.hpp> | ||||
#include <boost/leaf/handle_error.hpp> | |||||
#include <boost/leaf/handle_exception.hpp> | |||||
#include <fmt/ostream.h> | |||||
#include <json5/parse_data.hpp> | |||||
#include <neo/assert.hpp> | #include <neo/assert.hpp> | ||||
#include <neo/sqlite3/error.hpp> | |||||
#include <nlohmann/json.hpp> | |||||
#include <range/v3/action/join.hpp> | #include <range/v3/action/join.hpp> | ||||
#include <range/v3/range/conversion.hpp> | #include <range/v3/range/conversion.hpp> | ||||
#include <range/v3/view/concat.hpp> | #include <range/v3/view/concat.hpp> | ||||
} | } | ||||
}; | }; | ||||
/* | |||||
######## ######## ######## ####### ## ## ### ## ## | |||||
## ## ## ## ## ## ## ### ### ## ## ### ## | |||||
## ## ## ## ## ## ## #### #### ## ## #### ## | |||||
######## ###### ######## ## ## ## ### ## ## ## ## ## ## | |||||
## ## ## ## ## ## ## ## ######### ## #### | |||||
## ## ## ## ## ## ## ## ## ## ## ### | |||||
## ## ######## ## ####### ## ## ## ## ## ## | |||||
*/ | |||||
struct cli_repoman { | |||||
cli_base& base; | |||||
args::Command cmd{base.cmd_group, "repoman", "Manage a package package repository"}; | |||||
common_flags _common{cmd}; | |||||
args::Group repoman_group{cmd, "Repoman subcommand"}; | |||||
struct { | |||||
cli_repoman& parent; | |||||
args::Command cmd{parent.repoman_group, "init", "Initialize a new repository directory"}; | |||||
common_flags _common{cmd}; | |||||
args::Positional<dds::fs::path> where{cmd, | |||||
"<repo-path>", | |||||
"Directory where the repository will be created", | |||||
args::Options::Required}; | |||||
string_flag name{cmd, | |||||
"<name>", | |||||
"Give the repository a name (should be GLOBALLY unique). If not provided, " | |||||
"a new random one will be generated.", | |||||
{"name"}}; | |||||
int run() { | |||||
auto repo | |||||
= dds::repo_manager::create(where.Get(), | |||||
name ? std::make_optional(name.Get()) : std::nullopt); | |||||
dds_log(info, "Created new repository '{}' in {}", repo.root(), repo.name()); | |||||
return 0; | |||||
} | |||||
} init{*this}; | |||||
struct { | |||||
cli_repoman& parent; | |||||
args::Command cmd{parent.repoman_group, "import", "Import packages into a repository"}; | |||||
common_flags _common{cmd}; | |||||
args::Positional<dds::fs::path> where{cmd, | |||||
"<repo-path>", | |||||
"Directory of the repository to import", | |||||
args::Options::Required}; | |||||
args::PositionalList<dds::fs::path> files{cmd, | |||||
"<targz-path>", | |||||
"Path to one or more sdist archives to import"}; | |||||
int run() { | |||||
auto repo = dds::repo_manager::open(where.Get()); | |||||
for (auto pkg : files.Get()) { | |||||
repo.import_targz(pkg); | |||||
} | |||||
return 0; | |||||
} | |||||
} import{*this}; | |||||
struct { | |||||
cli_repoman& parent; | |||||
args::Command cmd{parent.repoman_group, "remove", "Remove packages from the repository"}; | |||||
common_flags _common{cmd}; | |||||
args::Positional<dds::fs::path> where{cmd, | |||||
"<repo-path>", | |||||
"Directory of the repository to import", | |||||
args::Options::Required}; | |||||
args::PositionalList<std::string> packages{cmd, | |||||
"<package-id>", | |||||
"One or more identifiers of packages to remove"}; | |||||
int run() { | |||||
auto repo = dds::repo_manager::open(where.Get()); | |||||
for (auto& str : packages) { | |||||
auto pkg_id = dds::package_id::parse(str); | |||||
repo.delete_package(pkg_id); | |||||
} | |||||
return 0; | |||||
} | |||||
} remove{*this}; | |||||
struct { | |||||
cli_repoman& parent; | |||||
args::Command cmd{parent.repoman_group, "ls", "List packages in the repository"}; | |||||
common_flags _common{cmd}; | |||||
args::Positional<dds::fs::path> where{cmd, | |||||
"<repo-path>", | |||||
"Directory of the repository to inspect", | |||||
args::Options::Required}; | |||||
int run() { | |||||
auto repo = dds::repo_manager::open(where.Get()); | |||||
for (auto pkg_id : repo.all_packages()) { | |||||
std::cout << pkg_id.to_string() << '\n'; | |||||
} | |||||
return 0; | |||||
} | |||||
} ls{*this}; | |||||
dds::result<int> _run() { | |||||
if (init.cmd) { | |||||
return init.run(); | |||||
} else if (import.cmd) { | |||||
return import.run(); | |||||
} else if (remove.cmd) { | |||||
return remove.run(); | |||||
} else if (ls.cmd) { | |||||
return ls.run(); | |||||
} | |||||
return 66; | |||||
} | |||||
int run() { | |||||
return boost::leaf::try_handle_all( // | |||||
[&]() -> dds::result<int> { | |||||
try { | |||||
return _run(); | |||||
} catch (...) { | |||||
return dds::capture_exception(); | |||||
} | |||||
}, | |||||
[](dds::e_sqlite3_error_exc, | |||||
boost::leaf::match<neo::sqlite3::errc, neo::sqlite3::errc::constraint_unique>, | |||||
dds::e_repo_import_targz tgz, | |||||
dds::package_id pkg_id) { | |||||
dds_log(error, | |||||
"Package {} (from {}) is already present in the repository", | |||||
pkg_id.to_string(), | |||||
tgz.path); | |||||
return 1; | |||||
}, | |||||
[](dds::e_sqlite3_error_exc e, dds::e_repo_import_targz tgz) { | |||||
dds_log(error, | |||||
"Database error while importing tar file {}: {}", | |||||
tgz.path, | |||||
e.message); | |||||
return 1; | |||||
}, | |||||
[](dds::e_sqlite3_error_exc e, dds::e_init_repo init, dds::e_init_repo_db init_db) { | |||||
dds_log( | |||||
error, | |||||
"SQLite error while initializing repository in [{}] (SQlite database {}): {}", | |||||
init.path, | |||||
init_db.path, | |||||
e.message); | |||||
return 1; | |||||
}, | |||||
[](dds::e_system_error_exc e, dds::e_repo_import_targz tgz) { | |||||
dds_log(error, "Failed to import package archive {}: {}", tgz.path, e.message); | |||||
return 1; | |||||
}, | |||||
[](dds::e_system_error_exc e, dds::e_open_repo_db db) { | |||||
dds_log(error, | |||||
"Error while opening repository database {}: {}", | |||||
db.path, | |||||
e.message); | |||||
return 1; | |||||
}, | |||||
[](dds::e_sqlite3_error_exc e, dds::e_init_repo init) { | |||||
dds_log(error, | |||||
"SQLite error while initializing repository in [{}]: {}", | |||||
init.path, | |||||
e.message); | |||||
return 1; | |||||
}, | |||||
[](dds::e_system_error_exc e, dds::e_repo_delete_targz tgz, dds::package_id pkg_id) { | |||||
dds_log(error, | |||||
"Cannot delete requested package '{}' from repository (Archive {}): {}", | |||||
pkg_id.to_string(), | |||||
tgz.path, | |||||
e.message); | |||||
return 1; | |||||
}, | |||||
[](dds::e_system_error_exc e) { | |||||
dds_log(error, "Unhandled system_error: {}", e.message); | |||||
return 1; | |||||
}, | |||||
[](boost::leaf::diagnostic_info const& info) { | |||||
dds_log(error, "Unknown error: {}", info); | |||||
return 42; | |||||
}); | |||||
} | |||||
}; | |||||
/* | /* | ||||
######## ######## ######## ####### | ######## ######## ######## ####### | ||||
## ## ## ## ## ## ## | ## ## ## ## ## ## ## | ||||
cli_build build{cli}; | cli_build build{cli}; | ||||
cli_sdist sdist{cli}; | cli_sdist sdist{cli}; | ||||
cli_repo repo{cli}; | cli_repo repo{cli}; | ||||
cli_repoman repoman{cli}; | |||||
cli_catalog catalog{cli}; | cli_catalog catalog{cli}; | ||||
cli_build_deps build_deps{cli}; | cli_build_deps build_deps{cli}; | ||||
return sdist.run(); | return sdist.run(); | ||||
} else if (repo.cmd) { | } else if (repo.cmd) { | ||||
return repo.run(); | return repo.run(); | ||||
} else if (repoman.cmd) { | |||||
return repoman.run(); | |||||
} else if (catalog.cmd) { | } else if (catalog.cmd) { | ||||
return catalog.run(); | return catalog.run(); | ||||
} else if (build_deps.cmd) { | } else if (build_deps.cmd) { |
package_manifest package_manifest::load_from_file(const fs::path& fpath) { | package_manifest package_manifest::load_from_file(const fs::path& fpath) { | ||||
auto content = slurp_file(fpath); | auto content = slurp_file(fpath); | ||||
auto data = json5::parse_data(content); | |||||
return load_from_json5_str(content, fpath.string()); | |||||
} | |||||
package_manifest package_manifest::load_from_json5_str(std::string_view content, | |||||
std::string_view input_name) { | |||||
auto data = json5::parse_data(content); | |||||
try { | try { | ||||
return parse_json(data, fpath.string()); | |||||
return parse_json(data, input_name); | |||||
} catch (const semester::walk_error& e) { | } catch (const semester::walk_error& e) { | ||||
throw_user_error<errc::invalid_pkg_manifest>(e.what()); | throw_user_error<errc::invalid_pkg_manifest>(e.what()); | ||||
} | } |
* Load a package manifest from a file on disk. | * Load a package manifest from a file on disk. | ||||
*/ | */ | ||||
static package_manifest load_from_file(path_ref); | static package_manifest load_from_file(path_ref); | ||||
/** | |||||
* @brief Load a package manifest from an in-memory string | |||||
*/ | |||||
static package_manifest load_from_json5_str(std::string_view, std::string_view input_name); | |||||
/** | /** | ||||
* Find a package manifest contained within a directory. This will search | * Find a package manifest contained within a directory. This will search |
#include "./repoman.hpp" | |||||
#include <dds/package/manifest.hpp> | |||||
#include <dds/util/log.hpp> | |||||
#include <dds/util/result.hpp> | |||||
#include <neo/gzip.hpp> | |||||
#include <neo/inflate.hpp> | |||||
#include <neo/io/stream/buffers.hpp> | |||||
#include <neo/io/stream/file.hpp> | |||||
#include <neo/sqlite3/exec.hpp> | |||||
#include <neo/sqlite3/single.hpp> | |||||
#include <neo/sqlite3/transaction.hpp> | |||||
#include <neo/tar/ustar.hpp> | |||||
#include <neo/transform_io.hpp> | |||||
#include <neo/utility.hpp> | |||||
#include <nlohmann/json.hpp> | |||||
using namespace dds; | |||||
namespace nsql = neo::sqlite3; | |||||
using namespace nsql::literals; | |||||
namespace { | |||||
void migrate_db_1(nsql::database_ref db) { | |||||
db.exec(R"( | |||||
CREATE TABLE dds_repo_packages ( | |||||
package_id INTEGER PRIMARY KEY, | |||||
name TEXT NOT NULL, | |||||
version TEXT NOT NULL, | |||||
description TEXT NOT NULL, | |||||
UNIQUE (name, version) | |||||
); | |||||
CREATE TABLE dds_repo_package_deps ( | |||||
dep_id INTEGER PRIMARY KEY, | |||||
package_id INTEGER NOT NULL | |||||
REFERENCES dds_repo_packages | |||||
ON DELETE CASCADE, | |||||
dep_name TEXT NOT NULL, | |||||
low TEXT NOT NULL, | |||||
high TEXT NOT NULL, | |||||
UNIQUE(package_id, dep_name) | |||||
); | |||||
)"); | |||||
} | |||||
void ensure_migrated(nsql::database_ref db, std::optional<std::string_view> name) { | |||||
db.exec(R"( | |||||
PRAGMA foreign_keys = 1; | |||||
CREATE TABLE IF NOT EXISTS dds_repo_meta ( | |||||
meta_version INTEGER DEFAULT 1, | |||||
version INTEGER NOT NULL, | |||||
name TEXT NOT NULL | |||||
); | |||||
-- Insert the initial metadata | |||||
INSERT INTO dds_repo_meta (version, name) | |||||
SELECT 0, 'dds-repo-' || lower(hex(randomblob(6))) | |||||
WHERE NOT EXISTS (SELECT 1 FROM dds_repo_meta); | |||||
)"); | |||||
nsql::transaction_guard tr{db}; | |||||
auto meta_st = db.prepare("SELECT version FROM dds_repo_meta"); | |||||
auto [version] = nsql::unpack_single<int>(meta_st); | |||||
constexpr int current_database_version = 1; | |||||
if (version < 1) { | |||||
migrate_db_1(db); | |||||
} | |||||
nsql::exec(db.prepare("UPDATE dds_repo_meta SET version=?"), current_database_version); | |||||
if (name) { | |||||
nsql::exec(db.prepare("UPDATE dds_repo_meta SET name=?"), *name); | |||||
} | |||||
} | |||||
} // namespace | |||||
repo_manager repo_manager::create(path_ref directory, std::optional<std::string_view> name) { | |||||
{ | |||||
DDS_E_SCOPE(e_init_repo{directory}); | |||||
fs::create_directories(directory); | |||||
auto db_path = directory / "repo.db"; | |||||
auto db = nsql::database::open(db_path.string()); | |||||
DDS_E_SCOPE(e_init_repo_db{db_path}); | |||||
DDS_E_SCOPE(e_open_repo_db{db_path}); | |||||
ensure_migrated(db, name); | |||||
fs::create_directories(directory / "data"); | |||||
} | |||||
return open(directory); | |||||
} | |||||
repo_manager repo_manager::open(path_ref directory) { | |||||
DDS_E_SCOPE(e_open_repo{directory}); | |||||
auto db_path = directory / "repo.db"; | |||||
DDS_E_SCOPE(e_open_repo_db{db_path}); | |||||
if (!fs::is_regular_file(db_path)) { | |||||
throw std::system_error(make_error_code(std::errc::no_such_file_or_directory), | |||||
"The database file does not exist"); | |||||
} | |||||
auto db = nsql::database::open(db_path.string()); | |||||
ensure_migrated(db, std::nullopt); | |||||
return repo_manager{fs::canonical(directory), std::move(db)}; | |||||
} | |||||
std::string repo_manager::name() const noexcept { | |||||
auto [name] = nsql::unpack_single<std::string>(_stmts("SELECT name FROM dds_repo_meta"_sql)); | |||||
return name; | |||||
} | |||||
void repo_manager::import_targz(path_ref tgz_file) { | |||||
neo_assertion_breadcrumbs("Importing targz file", tgz_file.string()); | |||||
DDS_E_SCOPE(e_repo_import_targz{tgz_file}); | |||||
dds_log(info, "Importing sdist archive [{}]", tgz_file.string()); | |||||
neo::ustar_reader tar{ | |||||
neo::buffer_transform_source{neo::stream_io_buffers{ | |||||
neo::file_stream::open(tgz_file, neo::open_mode::read)}, | |||||
neo::gzip_decompressor{neo::inflate_decompressor{}}}}; | |||||
std::optional<package_manifest> man; | |||||
for (auto mem : tar) { | |||||
if (fs::path(mem.filename_str()).lexically_normal() | |||||
== neo::oper::none_of("package.jsonc", "package.json5", "package.json")) { | |||||
continue; | |||||
} | |||||
auto content = tar.all_data(); | |||||
auto synth_filename = tgz_file / mem.filename_str(); | |||||
man = package_manifest::load_from_json5_str(std::string_view(content), | |||||
synth_filename.string()); | |||||
break; | |||||
} | |||||
if (!man) { | |||||
dds_log(critical, | |||||
"Given archive [{}] does not contain a package manifest file", | |||||
tgz_file.string()); | |||||
throw std::runtime_error("Invalid package archive"); | |||||
} | |||||
DDS_E_SCOPE(man->pkg_id); | |||||
neo::sqlite3::transaction_guard tr{_db}; | |||||
dds_log(debug, "Recording package {}@{}", man->pkg_id.name, man->pkg_id.version.to_string()); | |||||
nsql::exec( // | |||||
_stmts(R"( | |||||
INSERT INTO dds_repo_packages (name, version, description) | |||||
VALUES (?, ?, 'No description') | |||||
)"_sql), | |||||
man->pkg_id.name, | |||||
man->pkg_id.version.to_string()); | |||||
auto package_id = _db.last_insert_rowid(); | |||||
auto& insert_dep_st = _stmts(R"( | |||||
INSERT INTO dds_repo_package_deps(package_id, dep_name, low, high) | |||||
VALUES (?, ?, ?, ?) | |||||
)"_sql); | |||||
for (auto& dep : man->dependencies) { | |||||
assert(dep.versions.num_intervals() == 1); | |||||
auto iv_1 = *dep.versions.iter_intervals().begin(); | |||||
dds_log(trace, " Depends on: {}", dep.to_string()); | |||||
nsql::exec(insert_dep_st, | |||||
package_id, | |||||
dep.name, | |||||
iv_1.low.to_string(), | |||||
iv_1.high.to_string()); | |||||
} | |||||
auto dest_dir = data_dir() / man->pkg_id.name; | |||||
auto dest_path = dest_dir / fmt::format("{}.tar.gz", man->pkg_id.version.to_string()); | |||||
fs::create_directories(dest_dir); | |||||
fs::copy(tgz_file, dest_path); | |||||
tr.commit(); | |||||
} | |||||
void repo_manager::delete_package(package_id pkg_id) { | |||||
neo::sqlite3::transaction_guard tr{_db}; | |||||
DDS_E_SCOPE(pkg_id); | |||||
nsql::exec( // | |||||
_stmts(R"( | |||||
DELETE FROM dds_repo_packages | |||||
WHERE name = ? | |||||
AND version = ? | |||||
)"_sql), | |||||
pkg_id.name, | |||||
pkg_id.version.to_string()); | |||||
/// XXX: Verify with _db.changes() that we actually deleted one row | |||||
auto name_dir = data_dir() / pkg_id.name; | |||||
auto ver_file = name_dir / fmt::format("{}.tar.gz", pkg_id.version.to_string()); | |||||
DDS_E_SCOPE(e_repo_delete_targz{ver_file}); | |||||
if (!fs::is_regular_file(ver_file)) { | |||||
throw std::system_error(std::make_error_code(std::errc::no_such_file_or_directory), | |||||
"No source archive for the requested package"); | |||||
} | |||||
fs::remove(ver_file); | |||||
tr.commit(); | |||||
std::error_code ec; | |||||
fs::remove(name_dir, ec); | |||||
if (ec && ec != std::errc::directory_not_empty) { | |||||
throw std::system_error(ec, "Failed to delete package name directory"); | |||||
} | |||||
} |
#pragma once | |||||
#include <dds/package/id.hpp> | |||||
#include <dds/util/fs.hpp> | |||||
#include <neo/sqlite3/database.hpp> | |||||
#include <neo/sqlite3/iter_tuples.hpp> | |||||
#include <neo/sqlite3/statement_cache.hpp> | |||||
#include <range/v3/view/transform.hpp> | |||||
namespace dds { | |||||
struct e_init_repo { | |||||
fs::path path; | |||||
}; | |||||
struct e_open_repo { | |||||
fs::path path; | |||||
}; | |||||
struct e_init_repo_db { | |||||
fs::path path; | |||||
}; | |||||
struct e_open_repo_db { | |||||
fs::path path; | |||||
}; | |||||
struct e_repo_import_targz { | |||||
fs::path path; | |||||
}; | |||||
struct e_repo_delete_targz { | |||||
fs::path path; | |||||
}; | |||||
class repo_manager { | |||||
neo::sqlite3::database _db; | |||||
mutable neo::sqlite3::statement_cache _stmts{_db}; | |||||
fs::path _root; | |||||
explicit repo_manager(path_ref root, neo::sqlite3::database db) | |||||
: _db(std::move(db)) | |||||
, _root(root) {} | |||||
public: | |||||
repo_manager(repo_manager&&) = default; | |||||
static repo_manager create(path_ref directory, std::optional<std::string_view> name); | |||||
static repo_manager open(path_ref directory); | |||||
auto data_dir() const noexcept { return _root / "data"; } | |||||
path_ref root() const noexcept { return _root; } | |||||
std::string name() const noexcept; | |||||
void import_targz(path_ref tgz_path); | |||||
void delete_package(package_id id); | |||||
auto all_packages() const noexcept { | |||||
using namespace neo::sqlite3::literals; | |||||
auto& st = _stmts("SELECT name, version FROM dds_repo_packages"_sql); | |||||
auto tups = neo::sqlite3::iter_tuples<std::string, std::string>(st); | |||||
return tups | ranges::views::transform([](auto&& pair) { | |||||
auto [name, version] = pair; | |||||
return package_id{name, semver::version::parse(version)}; | |||||
}); | |||||
} | |||||
}; | |||||
} // namespace dds |
#include <dds/repoman/repoman.hpp> | |||||
#include <dds/temp.hpp> | |||||
#include <neo/sqlite3/error.hpp> | |||||
#include <catch2/catch.hpp> | |||||
namespace { | |||||
const auto THIS_FILE = dds::fs::canonical(__FILE__); | |||||
const auto THIS_DIR = THIS_FILE.parent_path(); | |||||
const auto REPO_ROOT = (THIS_DIR / "../../../").lexically_normal(); | |||||
const auto DATA_DIR = REPO_ROOT / "data"; | |||||
} // namespace | |||||
TEST_CASE("Open a repository") { | |||||
auto tdir = dds::temporary_dir::create(); | |||||
auto repo = dds::repo_manager::create(tdir.path(), "test-repo"); | |||||
auto neo_url_tgz = DATA_DIR / "neo-url@0.2.1.tar.gz"; | |||||
repo.import_targz(neo_url_tgz); | |||||
CHECK(dds::fs::is_directory(repo.data_dir() / "neo-url/")); | |||||
CHECK(dds::fs::is_regular_file(repo.data_dir() / "neo-url/0.2.1.tar.gz")); | |||||
CHECK_THROWS_AS(repo.import_targz(neo_url_tgz), neo::sqlite3::constraint_unique_error); | |||||
repo.delete_package(dds::package_id::parse("neo-url@0.2.1")); | |||||
CHECK_FALSE(dds::fs::is_regular_file(repo.data_dir() / "neo-url/0.2.1.tar.gz")); | |||||
CHECK_FALSE(dds::fs::is_directory(repo.data_dir() / "neo-url")); | |||||
CHECK_THROWS_AS(repo.delete_package(dds::package_id::parse("neo-url@0.2.1")), | |||||
std::system_error); | |||||
CHECK_NOTHROW(repo.import_targz(neo_url_tgz)); | |||||
} |
#include "./result.hpp" | |||||
#include <neo/sqlite3/error.hpp> | |||||
dds::error_id dds::capture_exception() { | |||||
try { | |||||
throw; | |||||
} catch (const neo::sqlite3::sqlite3_error& e) { | |||||
return current_error().load(e_sqlite3_error_exc{std::string(e.what()), e.code()}, | |||||
e.code(), | |||||
neo::sqlite3::errc{e.code().value()}); | |||||
} catch (const std::system_error& e) { | |||||
return current_error().load(e_system_error_exc{std::string(e.what()), e.code()}, e.code()); | |||||
} | |||||
} |
#pragma once | |||||
#include <neo/pp.hpp> | |||||
#include <boost/leaf/on_error.hpp> | |||||
#include <boost/leaf/result.hpp> | |||||
#include <exception> | |||||
#include <string> | |||||
namespace dds { | |||||
using boost::leaf::current_error; | |||||
using boost::leaf::error_id; | |||||
using boost::leaf::new_error; | |||||
using boost::leaf::result; | |||||
/** | |||||
* @brief Error object representing a captured system_error exception | |||||
*/ | |||||
struct e_system_error_exc { | |||||
std::string message; | |||||
std::error_code code; | |||||
}; | |||||
/** | |||||
* @brief Error object representing a captured neo::sqlite3::sqlite3_error | |||||
*/ | |||||
struct e_sqlite3_error_exc { | |||||
std::string message; | |||||
std::error_code code; | |||||
}; | |||||
/** | |||||
* @brief Capture currently in-flight special exceptions as new error object. Works around a bug in | |||||
* Boost.LEAF when catching std::system error. | |||||
*/ | |||||
error_id capture_exception(); | |||||
/** | |||||
* @brief Generate a leaf::on_error object that loads the given expression into the currently | |||||
* in-flight error if the current scope is exitted via exception or a bad result<> | |||||
*/ | |||||
#define DDS_E_SCOPE(...) \ | |||||
auto NEO_CONCAT(_err_info_, __LINE__) = boost::leaf::on_error([&] { return __VA_ARGS__; }) | |||||
} // namespace dds |