| #include <dds/catalog/get.hpp> | #include <dds/catalog/get.hpp> | ||||
| #include <dds/dym.hpp> | #include <dds/dym.hpp> | ||||
| #include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
| #include <dds/http/session.hpp> | |||||
| #include <dds/remote/remote.hpp> | |||||
| #include <dds/repo/repo.hpp> | #include <dds/repo/repo.hpp> | ||||
| #include <dds/repoman/repoman.hpp> | #include <dds/repoman/repoman.hpp> | ||||
| #include <dds/source/dist.hpp> | #include <dds/source/dist.hpp> | ||||
| } | } | ||||
| } import_{*this}; | } import_{*this}; | ||||
| struct { | |||||
| cli_repo& parent; | |||||
| args::Command cmd{parent.repo_group, "add", "Add a remote repository"}; | |||||
| common_flags _flags{cmd}; | |||||
| catalog_path_flag cat_path{cmd}; | |||||
| args::Positional<std::string> url{cmd, | |||||
| "<url>", | |||||
| "URL of a repository to add", | |||||
| args::Options::Required}; | |||||
| args::Flag update{cmd, "update", "Update catalog contents immediately", {"update", 'U'}}; | |||||
| int run() { | |||||
| return boost::leaf::try_handle_all( // | |||||
| [&]() -> dds::result<int> { | |||||
| try { | |||||
| auto cat = cat_path.open(); | |||||
| auto repo = dds::remote_repository::connect(url.Get()); | |||||
| repo.store(cat.database()); | |||||
| if (update) { | |||||
| repo.update_catalog(cat.database()); | |||||
| } | |||||
| } catch (...) { | |||||
| return dds::capture_exception(); | |||||
| } | |||||
| return 0; | |||||
| }, | |||||
| [&](neo::url_validation_error url_err, dds::e_url_string bad_url) { | |||||
| dds_log(error, "Invalid URL [{}]: {}", bad_url.value, url_err.what()); | |||||
| return 1; | |||||
| }, | |||||
| [&](const json5::parse_error& e, dds::e_http_url bad_url) { | |||||
| dds_log(error, | |||||
| "Error parsing JSON downloaded from URL [{}]: {}", | |||||
| bad_url.value, | |||||
| e.what()); | |||||
| return 1; | |||||
| }, | |||||
| [](dds::e_sqlite3_error_exc e, dds::e_url_string url) { | |||||
| dds_log(error, | |||||
| "Error accessing remote database (From {}): {}", | |||||
| url.value, | |||||
| e.message); | |||||
| return 1; | |||||
| }, | |||||
| [](dds::e_sqlite3_error_exc e) { | |||||
| dds_log(error, "Unexpected database error: {}", e.message); | |||||
| return 1; | |||||
| }, | |||||
| [&](dds::e_system_error_exc e, dds::e_http_connect conn) { | |||||
| dds_log(error, | |||||
| "Error opening connection to [{}:{}]: {}", | |||||
| conn.host, | |||||
| conn.port, | |||||
| e.message); | |||||
| return 1; | |||||
| }, | |||||
| [](const std::exception& e) { | |||||
| dds_log(error, "An unknown unhandled exception occurred: {}", e.what()); | |||||
| return 1; | |||||
| }, | |||||
| [](dds::e_system_error_exc e) { | |||||
| dds_log(error, "An unknown system_error occurred: {}", e.message); | |||||
| return 42; | |||||
| }, | |||||
| [](boost::leaf::diagnostic_info const& info) { | |||||
| dds_log(error, "An unnknown error occurred? {}", info); | |||||
| return 42; | |||||
| }); | |||||
| } | |||||
| } add{*this}; | |||||
| struct { | struct { | ||||
| cli_repo& parent; | cli_repo& parent; | ||||
| args::Command cmd{parent.repo_group, "init", "Initialize a directory as a repository"}; | args::Command cmd{parent.repo_group, "init", "Initialize a directory as a repository"}; | ||||
| return init.run(); | return init.run(); | ||||
| } else if (import_.cmd) { | } else if (import_.cmd) { | ||||
| return import_.run(); | return import_.run(); | ||||
| } else if (add.cmd) { | |||||
| return add.run(); | |||||
| } else { | } else { | ||||
| assert(false); | assert(false); | ||||
| std::terminate(); | std::terminate(); |
| db.exec(R"( | db.exec(R"( | ||||
| CREATE TABLE dds_cat_remotes ( | CREATE TABLE dds_cat_remotes ( | ||||
| remote_id INTEGER PRIMARY KEY AUTOINCREMENT, | remote_id INTEGER PRIMARY KEY AUTOINCREMENT, | ||||
| ident TEXT NOT NULL UNIQUE, | |||||
| name TEXT NOT NULL UNIQUE, | |||||
| gen_ident TEXT NOT NULL, | gen_ident TEXT NOT NULL, | ||||
| remote_url TEXT NOT NULL | remote_url TEXT NOT NULL | ||||
| ); | ); |
| #include "./remote.hpp" | |||||
| #include <dds/error/errors.hpp> | |||||
| #include <dds/http/session.hpp> | |||||
| #include <dds/temp.hpp> | |||||
| #include <dds/util/result.hpp> | |||||
| #include <neo/sqlite3/exec.hpp> | |||||
| #include <neo/sqlite3/single.hpp> | |||||
| #include <neo/sqlite3/transaction.hpp> | |||||
| #include <neo/url.hpp> | |||||
| #include <neo/utility.hpp> | |||||
| using namespace dds; | |||||
| namespace nsql = neo::sqlite3; | |||||
| namespace { | |||||
| struct remote_db { | |||||
| temporary_dir _tempdir; | |||||
| nsql::database db; | |||||
| static remote_db download_and_open(neo::url const& url) { | |||||
| neo_assert(expects, | |||||
| url.host.has_value(), | |||||
| "URL does not have a hostname??", | |||||
| url.to_string()); | |||||
| auto sess = url.scheme == "https" | |||||
| ? http_session::connect_ssl(*url.host, url.port_or_default_port_or(443)) | |||||
| : http_session::connect(*url.host, url.port_or_default_port_or(80)); | |||||
| auto tempdir = temporary_dir::create(); | |||||
| auto repo_db_dl = tempdir.path() / "repo.db"; | |||||
| fs::create_directories(tempdir.path()); | |||||
| sess.download_file( | |||||
| { | |||||
| .method = "GET", | |||||
| .path = url.path, | |||||
| }, | |||||
| repo_db_dl); | |||||
| auto db = nsql::open(repo_db_dl.string()); | |||||
| return {tempdir, std::move(db)}; | |||||
| } | |||||
| static remote_db download_and_open_for_base(neo::url url) { | |||||
| auto repo_url = url; | |||||
| repo_url.path = fs::path(url.path).append("repo.db").string(); | |||||
| return download_and_open(repo_url); | |||||
| } | |||||
| static remote_db download_and_open_for_base(std::string_view url_str) { | |||||
| return download_and_open_for_base(neo::url::parse(url_str)); | |||||
| } | |||||
| }; | |||||
| } // namespace | |||||
| remote_repository remote_repository::connect(std::string_view url_str) { | |||||
| DDS_E_SCOPE(e_url_string{std::string(url_str)}); | |||||
| const auto url = neo::url::parse(url_str); | |||||
| auto db = remote_db::download_and_open_for_base(url); | |||||
| auto name_st = db.db.prepare("SELECT name FROM dds_repo_meta"); | |||||
| auto [name] = nsql::unpack_single<std::string>(name_st); | |||||
| remote_repository ret; | |||||
| ret._base_url = url; | |||||
| ret._name = name; | |||||
| return ret; | |||||
| } | |||||
| void remote_repository::store(nsql::database_ref db) { | |||||
| auto st = db.prepare(R"( | |||||
| INSERT INTO dds_cat_remotes (name, gen_ident, remote_url) | |||||
| VALUES (?, ?, ?) | |||||
| )"); | |||||
| nsql::exec(st, _name, "[placeholder]", _base_url.to_string()); | |||||
| } | |||||
| void remote_repository::update_catalog(nsql::database_ref db) { | |||||
| auto rdb = remote_db::download_and_open_for_base(_base_url); | |||||
| auto db_path = rdb._tempdir.path() / "repo.db"; | |||||
| auto rid_st = db.prepare("SELECT remote_id FROM dds_cat_remotes WHERE name = ?"); | |||||
| rid_st.bindings()[1] = _name; | |||||
| auto [remote_id] = nsql::unpack_single<std::int64_t>(rid_st); | |||||
| nsql::transaction_guard tr{db}; | |||||
| nsql::exec(db.prepare("ATTACH DATABASE ? AS remote"), db_path.string()); | |||||
| nsql::exec( // | |||||
| db.prepare(R"( | |||||
| DELETE FROM dds_cat_pkgs | |||||
| WHERE remote_id = ? | |||||
| )"), | |||||
| remote_id); | |||||
| nsql::exec( // | |||||
| db.prepare(R"( | |||||
| INSERT INTO dds_cat_pkgs | |||||
| (name, version, description, remote_url, remote_id) | |||||
| SELECT | |||||
| name, | |||||
| version, | |||||
| description, | |||||
| printf('dds:%s/%s', name, version), | |||||
| ?1 | |||||
| FROM remote.dds_repo_packages | |||||
| )"), | |||||
| remote_id); | |||||
| } |
| #pragma once | |||||
| #include <dds/util/fs.hpp> | |||||
| #include <dds/util/result.hpp> | |||||
| #include <neo/concepts.hpp> | |||||
| #include <neo/sqlite3/database.hpp> | |||||
| #include <neo/url.hpp> | |||||
| #include <string_view> | |||||
| #include <variant> | |||||
| namespace dds { | |||||
| class remote_repository { | |||||
| std::string _name; | |||||
| neo::url _base_url; | |||||
| remote_repository() = default; | |||||
| public: | |||||
| static remote_repository connect(std::string_view url); | |||||
| // const repository_manifest& manifest() const noexcept; | |||||
| void store(neo::sqlite3::database_ref); | |||||
| void update_catalog(neo::sqlite3::database_ref); | |||||
| }; | |||||
| } // namespace dds |
| std::error_code code; | std::error_code code; | ||||
| }; | }; | ||||
| struct e_url_string { | |||||
| std::string value; | |||||
| }; | |||||
| /** | /** | ||||
| * @brief Capture currently in-flight special exceptions as new error object. Works around a bug in | * @brief Capture currently in-flight special exceptions as new error object. Works around a bug in | ||||
| * Boost.LEAF when catching std::system error. | * Boost.LEAF when catching std::system error. |
| httpd.shutdown() | httpd.shutdown() | ||||
| @pytest.yield_fixture | |||||
| def http_repo_server(): | |||||
| handler = partial( | |||||
| DirectoryServingHTTPRequestHandler, | |||||
| dir=Path.cwd() / 'data/test-repo-1') | |||||
| addr = ('0.0.0.0', 4646) | |||||
| pool = ThreadPoolExecutor() | |||||
| with HTTPServer(addr, handler) as httpd: | |||||
| pool.submit(lambda: httpd.serve_forever(poll_interval=0.1)) | |||||
| try: | |||||
| yield | |||||
| finally: | |||||
| httpd.shutdown() | |||||
| def test_import_http(dds: DDS, http_import_server): | def test_import_http(dds: DDS, http_import_server): | ||||
| dds.repo_dir.mkdir(parents=True, exist_ok=True) | dds.repo_dir.mkdir(parents=True, exist_ok=True) | ||||
| dds.run( | dds.run( | ||||
| 'repo', | 'repo', | ||||
| dds.repo_dir_arg, | dds.repo_dir_arg, | ||||
| 'import', | 'import', | ||||
| 'https://github.com/vector-of-bool/neo-buffer/archive/0.4.2.tar.gz?dds_strpcmp=1', | |||||
| 'http://localhost:8000/neo-buffer-0.4.2.tar.gz', | |||||
| ], | ], | ||||
| cwd=dds.repo_dir, | cwd=dds.repo_dir, | ||||
| ) | ) | ||||
| assert dds.repo_dir.joinpath('neo-buffer@0.4.2').is_dir() | assert dds.repo_dir.joinpath('neo-buffer@0.4.2').is_dir() | ||||
| def test_repo_add(dds: DDS, http_repo_server): | |||||
| dds.repo_dir.mkdir(parents=True, exist_ok=True) | |||||
| dds.run([ | |||||
| 'repo', | |||||
| dds.repo_dir_arg, | |||||
| 'add', | |||||
| dds.catalog_path_arg, | |||||
| 'http://localhost:4646', | |||||
| '--update', | |||||
| ]) | |||||
| # dds.build_deps(['neo-url@0.2.1']) |
| def project_dir_arg(self) -> str: | def project_dir_arg(self) -> str: | ||||
| return f'--project-dir={self.source_root}' | return f'--project-dir={self.source_root}' | ||||
| @property | |||||
| def catalog_path_arg(self) -> str: | |||||
| return f'--catalog={self.catalog_path}' | |||||
| def build_deps(self, args: proc.CommandLine, *, | def build_deps(self, args: proc.CommandLine, *, | ||||
| toolchain: str = None) -> subprocess.CompletedProcess: | toolchain: str = None) -> subprocess.CompletedProcess: | ||||
| return self.run([ | return self.run([ | ||||
| 'build-deps', | 'build-deps', | ||||
| f'--toolchain={toolchain or self.default_builtin_toolchain}', | f'--toolchain={toolchain or self.default_builtin_toolchain}', | ||||
| f'--catalog={self.catalog_path}', | |||||
| f'--repo-dir={self.repo_dir}', | |||||
| self.catalog_path_arg, | |||||
| self.repo_dir_arg, | |||||
| f'--out={self.deps_build_dir}', | f'--out={self.deps_build_dir}', | ||||
| f'--lmi-path={self.lmi_path}', | f'--lmi-path={self.lmi_path}', | ||||
| args, | args, |