| auto pkg_db = opts.open_pkg_db(); | auto pkg_db = opts.open_pkg_db(); | ||||
| neo::sqlite3::database_ref db = pkg_db.database(); | neo::sqlite3::database_ref db = pkg_db.database(); | ||||
| auto st = db.prepare("SELECT name, remote_url, db_mtime FROM dds_pkg_remotes"); | |||||
| auto st = db.prepare("SELECT name, url, db_mtime FROM dds_pkg_remotes"); | |||||
| auto tups = neo::sqlite3::iter_tuples<std::string, std::string, std::optional<std::string>>(st); | auto tups = neo::sqlite3::iter_tuples<std::string, std::string, std::optional<std::string>>(st); | ||||
| for (auto [name, remote_url, mtime] : tups) { | for (auto [name, remote_url, mtime] : tups) { | ||||
| fmt::print("Remote '{}':\n", name); | fmt::print("Remote '{}':\n", name); |
| #include "../options.hpp" | |||||
| #include <dds/error/nonesuch.hpp> | |||||
| #include <dds/pkg/db.hpp> | |||||
| #include <dds/pkg/search.hpp> | |||||
| #include <dds/util/result.hpp> | |||||
| #include <dds/util/string.hpp> | |||||
| #include <boost/leaf/handle_exception.hpp> | |||||
| #include <fansi/styled.hpp> | |||||
| #include <fmt/format.h> | |||||
| #include <range/v3/view/transform.hpp> | |||||
| using namespace fansi::literals; | |||||
| namespace dds::cli::cmd { | |||||
| static int _pkg_search(const options& opts) { | |||||
| auto cat = opts.open_pkg_db(); | |||||
| auto results = *dds::pkg_search(cat.database(), opts.pkg.search.pattern); | |||||
| for (pkg_group_search_result const& found : results.found) { | |||||
| fmt::print( | |||||
| " Name: .bold[{}]\n" | |||||
| "Versions: .bold[{}]\n" | |||||
| " From: .bold[{}]\n" | |||||
| " .bold[{}]\n\n"_styled, | |||||
| found.name, | |||||
| joinstr(", ", found.versions | ranges::views::transform(&semver::version::to_string)), | |||||
| found.remote_name, | |||||
| found.description); | |||||
| } | |||||
| if (results.found.empty()) { | |||||
| dds_log(error, | |||||
| "There are no packages that match the given pattern \".bold.red[{}]\""_styled, | |||||
| opts.pkg.search.pattern.value_or("*")); | |||||
| write_error_marker("pkg-search-no-result"); | |||||
| return 1; | |||||
| } | |||||
| return 0; | |||||
| } | |||||
| int pkg_search(const options& opts) { | |||||
| return boost::leaf::try_catch( | |||||
| [&] { | |||||
| try { | |||||
| return _pkg_search(opts); | |||||
| } catch (...) { | |||||
| capture_exception(); | |||||
| } | |||||
| }, | |||||
| [](e_nonesuch missing) { | |||||
| missing.log_error( | |||||
| "There are no packages that match the given pattern \".bold.red[{}]\""_styled); | |||||
| write_error_marker("pkg-search-no-result"); | |||||
| return 1; | |||||
| }); | |||||
| } | |||||
| } // namespace dds::cli::cmd |
| command pkg_repo_update; | command pkg_repo_update; | ||||
| command pkg_repo_ls; | command pkg_repo_ls; | ||||
| command pkg_repo_remove; | command pkg_repo_remove; | ||||
| command pkg_search; | |||||
| command repoman_add; | command repoman_add; | ||||
| command repoman_import; | command repoman_import; | ||||
| command repoman_init; | command repoman_init; | ||||
| } | } | ||||
| neo::unreachable(); | neo::unreachable(); | ||||
| } | } | ||||
| case pkg_subcommand::search: | |||||
| return cmd::pkg_search(opts); | |||||
| case pkg_subcommand::_none_:; | case pkg_subcommand::_none_:; | ||||
| } | } | ||||
| neo::unreachable(); | neo::unreachable(); |
| #include <dds/toolchain/toolchain.hpp> | #include <dds/toolchain/toolchain.hpp> | ||||
| #include <debate/enum.hpp> | #include <debate/enum.hpp> | ||||
| #include <fansi/styled.hpp> | |||||
| using namespace dds; | using namespace dds; | ||||
| using namespace debate; | using namespace debate; | ||||
| using namespace fansi::literals; | |||||
| namespace { | namespace { | ||||
| .name = "repo", | .name = "repo", | ||||
| .help = "Manage package repositories", | .help = "Manage package repositories", | ||||
| })); | })); | ||||
| setup_pkg_search_cmd(pkg_group.add_parser({ | |||||
| .name = "search", | |||||
| .help = "Search for packages available to download", | |||||
| })); | |||||
| } | } | ||||
| void setup_pkg_get_cmd(argument_parser& pkg_get_cmd) { | void setup_pkg_get_cmd(argument_parser& pkg_get_cmd) { | ||||
| = "What to do if any of the named repositories do not exist"; | = "What to do if any of the named repositories do not exist"; | ||||
| } | } | ||||
| void setup_pkg_search_cmd(argument_parser& pkg_repo_search_cmd) noexcept { | |||||
| pkg_repo_search_cmd.add_argument({ | |||||
| .help = std::string( // | |||||
| "A name or glob-style pattern. Only matching packages will be returned. \n" | |||||
| "Searching is case-insensitive. Only the .italic[name] will be matched (not the \n" | |||||
| "version).\n\nIf this parameter is omitted, the search will return .italic[all] \n" | |||||
| "available packages."_styled), | |||||
| .valname = "<name-or-pattern>", | |||||
| .action = put_into(opts.pkg.search.pattern), | |||||
| }); | |||||
| } | |||||
| void setup_sdist_cmd(argument_parser& sdist_cmd) noexcept { | void setup_sdist_cmd(argument_parser& sdist_cmd) noexcept { | ||||
| auto& sdist_grp = sdist_cmd.add_subparsers({ | auto& sdist_grp = sdist_cmd.add_subparsers({ | ||||
| .valname = "<sdist-subcommand>", | .valname = "<sdist-subcommand>", |
| get, | get, | ||||
| import, | import, | ||||
| repo, | repo, | ||||
| search, | |||||
| }; | }; | ||||
| /** | /** | ||||
| /// Package IDs to download | /// Package IDs to download | ||||
| std::vector<string> pkgs; | std::vector<string> pkgs; | ||||
| } get; | } get; | ||||
| /** | |||||
| * @brief Parameters for 'dds pkg search' | |||||
| */ | |||||
| struct { | |||||
| /// The search pattern, if provided | |||||
| opt_string pattern; | |||||
| } search; | |||||
| } pkg; | } pkg; | ||||
| struct { | struct { |
| CREATE TABLE dds_pkg_remotes ( | CREATE TABLE dds_pkg_remotes ( | ||||
| remote_id INTEGER PRIMARY KEY AUTOINCREMENT, | remote_id INTEGER PRIMARY KEY AUTOINCREMENT, | ||||
| name TEXT NOT NULL UNIQUE, | name TEXT NOT NULL UNIQUE, | ||||
| remote_url TEXT NOT NULL, | |||||
| url TEXT NOT NULL, | |||||
| db_etag TEXT, | db_etag TEXT, | ||||
| db_mtime TEXT | db_mtime TEXT | ||||
| ); | ); |
| void pkg_remote::store(nsql::database_ref db) { | void pkg_remote::store(nsql::database_ref db) { | ||||
| auto st = db.prepare(R"( | auto st = db.prepare(R"( | ||||
| INSERT INTO dds_pkg_remotes (name, remote_url) | |||||
| INSERT INTO dds_pkg_remotes (name, url) | |||||
| VALUES (?, ?) | VALUES (?, ?) | ||||
| ON CONFLICT (name) DO | ON CONFLICT (name) DO | ||||
| UPDATE SET remote_url = ?2 | |||||
| UPDATE SET url = ?2 | |||||
| )"); | )"); | ||||
| nsql::exec(st, _name, _base_url.to_string()); | nsql::exec(st, _name, _base_url.to_string()); | ||||
| } | } | ||||
| void dds::update_all_remotes(nsql::database_ref db) { | void dds::update_all_remotes(nsql::database_ref db) { | ||||
| dds_log(info, "Updating catalog from all remotes"); | dds_log(info, "Updating catalog from all remotes"); | ||||
| auto repos_st = db.prepare("SELECT name, remote_url, db_etag, db_mtime FROM dds_pkg_remotes"); | |||||
| auto repos_st = db.prepare("SELECT name, url, db_etag, db_mtime FROM dds_pkg_remotes"); | |||||
| auto tups = nsql::iter_tuples<std::string, | auto tups = nsql::iter_tuples<std::string, | ||||
| std::string, | std::string, | ||||
| std::optional<std::string>, | std::optional<std::string>, | ||||
| std::optional<std::string>>(repos_st) | std::optional<std::string>>(repos_st) | ||||
| | ranges::to_vector; | | ranges::to_vector; | ||||
| for (const auto& [name, remote_url, etag, db_mtime] : tups) { | |||||
| DDS_E_SCOPE(e_url_string{remote_url}); | |||||
| pkg_remote repo{name, neo::url::parse(remote_url)}; | |||||
| for (const auto& [name, url, etag, db_mtime] : tups) { | |||||
| DDS_E_SCOPE(e_url_string{url}); | |||||
| pkg_remote repo{name, neo::url::parse(url)}; | |||||
| repo.update_pkg_db(db, etag, db_mtime); | repo.update_pkg_db(db, etag, db_mtime); | ||||
| } | } | ||||
| #include "./search.hpp" | |||||
| #include <dds/dym.hpp> | |||||
| #include <dds/error/nonesuch.hpp> | |||||
| #include <dds/error/result.hpp> | |||||
| #include <dds/util/log.hpp> | |||||
| #include <dds/util/string.hpp> | |||||
| #include <neo/sqlite3/database.hpp> | |||||
| #include <neo/sqlite3/iter_tuples.hpp> | |||||
| #include <range/v3/algorithm/sort.hpp> | |||||
| #include <range/v3/range/conversion.hpp> | |||||
| #include <range/v3/view/transform.hpp> | |||||
| using namespace dds; | |||||
| namespace nsql = neo::sqlite3; | |||||
| result<pkg_search_results> dds::pkg_search(nsql::database_ref db, | |||||
| std::optional<std::string_view> pattern) noexcept { | |||||
| auto search_st = db.prepare(R"( | |||||
| SELECT pkg.name, | |||||
| group_concat(version, ';;'), | |||||
| description, | |||||
| remote.name, | |||||
| remote.url | |||||
| FROM dds_pkgs AS pkg | |||||
| JOIN dds_pkg_remotes AS remote USING(remote_id) | |||||
| WHERE lower(pkg.name) GLOB lower(:pattern) | |||||
| GROUP BY pkg.name, remote_id, description | |||||
| ORDER BY remote.name, pkg.name | |||||
| )"); | |||||
| // If no pattern, grab _everything_ | |||||
| auto final_pattern = pattern.value_or("*"); | |||||
| dds_log(debug, "Searching for packages matching pattern '{}'", final_pattern); | |||||
| search_st.bindings()[1] = final_pattern; | |||||
| auto rows = nsql::iter_tuples<std::string, std::string, std::string, std::string, std::string>( | |||||
| search_st); | |||||
| std::vector<pkg_group_search_result> found; | |||||
| for (auto [name, versions, desc, remote_name, remote_url] : rows) { | |||||
| dds_log(debug, | |||||
| "Found: {} with versions {} (Description: {}) from {} [{}]", | |||||
| name, | |||||
| versions, | |||||
| desc, | |||||
| remote_name, | |||||
| remote_url); | |||||
| auto version_strs = split(versions, ";;"); | |||||
| auto versions_semver | |||||
| = version_strs | ranges::views::transform(&semver::version::parse) | ranges::to_vector; | |||||
| ranges::sort(versions_semver); | |||||
| found.push_back(pkg_group_search_result{ | |||||
| .name = name, | |||||
| .versions = versions_semver, | |||||
| .description = desc, | |||||
| .remote_name = remote_name, | |||||
| }); | |||||
| } | |||||
| if (found.empty()) { | |||||
| return boost::leaf::new_error([&] { | |||||
| auto names_st = db.prepare("SELECT DISTINCT name from dds_pkgs"); | |||||
| auto tups = nsql::iter_tuples<std::string>(names_st); | |||||
| auto names_vec = tups | ranges::views::transform([](auto&& row) { | |||||
| auto [name] = row; | |||||
| return name; | |||||
| }) | |||||
| | ranges::to_vector; | |||||
| auto nearest = dds::did_you_mean(final_pattern, names_vec); | |||||
| return e_nonesuch{final_pattern, nearest}; | |||||
| }); | |||||
| } | |||||
| return pkg_search_results{.found = std::move(found)}; | |||||
| } |
| #pragma once | |||||
| #include <dds/error/result_fwd.hpp> | |||||
| #include <semver/version.hpp> | |||||
| #include <optional> | |||||
| #include <string_view> | |||||
| #include <vector> | |||||
| namespace neo::sqlite3 { | |||||
| class database_ref; | |||||
| } // namespace neo::sqlite3 | |||||
| namespace dds { | |||||
| struct pkg_group_search_result { | |||||
| std::string name; | |||||
| std::vector<semver::version> versions; | |||||
| std::string description; | |||||
| std::string remote_name; | |||||
| }; | |||||
| struct pkg_search_results { | |||||
| std::vector<pkg_group_search_result> found; | |||||
| }; | |||||
| result<pkg_search_results> pkg_search(neo::sqlite3::database_ref db, | |||||
| std::optional<std::string_view> query) noexcept; | |||||
| } // namespace dds |
| return strings; | return strings; | ||||
| } | } | ||||
| template <typename Range> | |||||
| inline std::string joinstr(std::string_view joiner, Range&& rng) { | |||||
| auto iter = std::begin(rng); | |||||
| auto end = std::end(rng); | |||||
| std::string ret; | |||||
| while (iter != end) { | |||||
| ret.append(*iter); | |||||
| ++iter; | |||||
| if (iter != end) { | |||||
| ret.append(joiner); | |||||
| } | |||||
| } | |||||
| return ret; | |||||
| } | |||||
| } // namespace string_utils | } // namespace string_utils | ||||
| } // namespace dds | } // namespace dds |
| import pytest | import pytest | ||||
| from dds_ci.testing import RepoFixture, Project | |||||
| from dds_ci.testing import RepoServer, Project | |||||
| SIMPLE_CATALOG = { | SIMPLE_CATALOG = { | ||||
| "packages": { | "packages": { | ||||
| @pytest.fixture() | @pytest.fixture() | ||||
| def test_repo(http_repo: RepoFixture) -> RepoFixture: | |||||
| def test_repo(http_repo: RepoServer) -> RepoServer: | |||||
| http_repo.import_json_data(SIMPLE_CATALOG) | http_repo.import_json_data(SIMPLE_CATALOG) | ||||
| return http_repo | return http_repo | ||||
| @pytest.fixture() | @pytest.fixture() | ||||
| def test_project(tmp_project: Project, test_repo: RepoFixture) -> Project: | |||||
| def test_project(tmp_project: Project, test_repo: RepoServer) -> Project: | |||||
| tmp_project.dds.repo_add(test_repo.url) | tmp_project.dds.repo_add(test_repo.url) | ||||
| return tmp_project | return tmp_project | ||||
| from dds_ci.dds import DDSWrapper | from dds_ci.dds import DDSWrapper | ||||
| from dds_ci.testing import Project, RepoFixture, PackageJSON | |||||
| from dds_ci.testing import Project, RepoServer, PackageJSON | |||||
| from dds_ci.testing.error import expect_error_marker | from dds_ci.testing.error import expect_error_marker | ||||
| from dds_ci.testing.http import HTTPRepoServerFactory, RepoServer | |||||
| import pytest | |||||
| NEO_SQLITE_PKG_JSON = { | NEO_SQLITE_PKG_JSON = { | ||||
| 'packages': { | 'packages': { | ||||
| } | } | ||||
| def test_pkg_get(http_repo: RepoFixture, tmp_project: Project) -> None: | |||||
| http_repo.import_json_data(NEO_SQLITE_PKG_JSON) | |||||
| tmp_project.dds.repo_add(http_repo.url) | |||||
| @pytest.fixture(scope='session') | |||||
| def _test_repo(http_repo_factory: HTTPRepoServerFactory) -> RepoServer: | |||||
| srv = http_repo_factory('test-pkg-db-repo') | |||||
| srv.import_json_data(NEO_SQLITE_PKG_JSON) | |||||
| return srv | |||||
| def test_pkg_get(_test_repo: RepoServer, tmp_project: Project) -> None: | |||||
| _test_repo.import_json_data(NEO_SQLITE_PKG_JSON) | |||||
| tmp_project.dds.repo_add(_test_repo.url) | |||||
| tmp_project.dds.pkg_get('neo-sqlite3@0.3.0') | tmp_project.dds.pkg_get('neo-sqlite3@0.3.0') | ||||
| assert tmp_project.root.joinpath('neo-sqlite3@0.3.0').is_dir() | assert tmp_project.root.joinpath('neo-sqlite3@0.3.0').is_dir() | ||||
| assert tmp_project.root.joinpath('neo-sqlite3@0.3.0/package.jsonc').is_file() | assert tmp_project.root.joinpath('neo-sqlite3@0.3.0/package.jsonc').is_file() | ||||
| def test_pkg_repo(http_repo: RepoFixture, tmp_project: Project) -> None: | |||||
| def test_pkg_repo(_test_repo: RepoServer, tmp_project: Project) -> None: | |||||
| dds = tmp_project.dds | dds = tmp_project.dds | ||||
| dds.repo_add(http_repo.url) | |||||
| dds.repo_add(_test_repo.url) | |||||
| dds.run(['pkg', 'repo', dds.catalog_path_arg, 'ls']) | dds.run(['pkg', 'repo', dds.catalog_path_arg, 'ls']) | ||||
| def test_pkg_repo_rm(http_repo: RepoFixture, tmp_project: Project) -> None: | |||||
| http_repo.import_json_data(NEO_SQLITE_PKG_JSON) | |||||
| def test_pkg_repo_rm(_test_repo: RepoServer, tmp_project: Project) -> None: | |||||
| _test_repo.import_json_data(NEO_SQLITE_PKG_JSON) | |||||
| dds = tmp_project.dds | dds = tmp_project.dds | ||||
| dds.repo_add(http_repo.url) | |||||
| dds.repo_add(_test_repo.url) | |||||
| # Okay: | # Okay: | ||||
| tmp_project.dds.pkg_get('neo-sqlite3@0.3.0') | tmp_project.dds.pkg_get('neo-sqlite3@0.3.0') | ||||
| # Remove the repo: | # Remove the repo: | ||||
| dds.run(['pkg', dds.catalog_path_arg, 'repo', 'ls']) | dds.run(['pkg', dds.catalog_path_arg, 'repo', 'ls']) | ||||
| dds.repo_remove(http_repo.repo_name) | |||||
| dds.repo_remove(_test_repo.repo_name) | |||||
| # Cannot double-remove a repo: | # Cannot double-remove a repo: | ||||
| with expect_error_marker('repo-rm-no-such-repo'): | with expect_error_marker('repo-rm-no-such-repo'): | ||||
| dds.repo_remove(http_repo.repo_name) | |||||
| dds.repo_remove(_test_repo.repo_name) | |||||
| # Now, fails: | # Now, fails: | ||||
| with expect_error_marker('pkg-get-no-pkg-id-listing'): | with expect_error_marker('pkg-get-no-pkg-id-listing'): | ||||
| tmp_project.dds.pkg_get('neo-sqlite3@0.3.0') | tmp_project.dds.pkg_get('neo-sqlite3@0.3.0') | ||||
| def test_pkg_search(_test_repo: RepoServer, tmp_project: Project) -> None: | |||||
| _test_repo.import_json_data(NEO_SQLITE_PKG_JSON) | |||||
| dds = tmp_project.dds | |||||
| with expect_error_marker('pkg-search-no-result'): | |||||
| dds.run(['pkg', dds.catalog_path_arg, 'search']) | |||||
| dds.repo_add(_test_repo.url) | |||||
| dds.run(['pkg', dds.catalog_path_arg, 'search']) | |||||
| dds.run(['pkg', dds.catalog_path_arg, 'search', 'neo-sqlite3']) | |||||
| dds.run(['pkg', dds.catalog_path_arg, 'search', 'neo-*']) | |||||
| with expect_error_marker('pkg-search-no-result'): | |||||
| dds.run(['pkg', dds.catalog_path_arg, 'search', 'nonexistent']) |
| from dds_ci.dds import DDSWrapper | from dds_ci.dds import DDSWrapper | ||||
| from dds_ci.testing.fixtures import Project | from dds_ci.testing.fixtures import Project | ||||
| from dds_ci.testing.http import RepoFixture | |||||
| from dds_ci.testing.http import RepoServer | |||||
| from dds_ci.testing.error import expect_error_marker | from dds_ci.testing.error import expect_error_marker | ||||
| from pathlib import Path | from pathlib import Path | ||||
| dds.run(['repoman', 'remove', tmp_repo, 'neo-fun@0.4.0']) | dds.run(['repoman', 'remove', tmp_repo, 'neo-fun@0.4.0']) | ||||
| def test_pkg_http(http_repo: RepoFixture, tmp_project: Project) -> None: | |||||
| def test_pkg_http(http_repo: RepoServer, tmp_project: Project) -> None: | |||||
| tmp_project.dds.run([ | tmp_project.dds.run([ | ||||
| 'repoman', '-ltrace', 'add', http_repo.server.root, 'neo-fun@0.4.0', | 'repoman', '-ltrace', 'add', http_repo.server.root, 'neo-fun@0.4.0', | ||||
| 'https://github.com/vector-of-bool/neo-fun/archive/0.4.0.tar.gz?__dds_strpcmp=1' | 'https://github.com/vector-of-bool/neo-fun/archive/0.4.0.tar.gz?__dds_strpcmp=1' |
| import pytest | import pytest | ||||
| from dds_ci.testing import RepoFixture, Project | |||||
| from dds_ci.testing import RepoServer, Project | |||||
| from dds_ci import proc, toolchain, paths | from dds_ci import proc, toolchain, paths | ||||
| CRYPTOPP_JSON = { | CRYPTOPP_JSON = { | ||||
| @pytest.mark.skipif(platform.system() == 'FreeBSD', reason='This one has trouble running on FreeBSD') | @pytest.mark.skipif(platform.system() == 'FreeBSD', reason='This one has trouble running on FreeBSD') | ||||
| def test_get_build_use_cryptopp(test_parent_dir: Path, tmp_project: Project, http_repo: RepoFixture) -> None: | |||||
| def test_get_build_use_cryptopp(test_parent_dir: Path, tmp_project: Project, http_repo: RepoServer) -> None: | |||||
| http_repo.import_json_data(CRYPTOPP_JSON) | http_repo.import_json_data(CRYPTOPP_JSON) | ||||
| tmp_project.dds.repo_add(http_repo.url) | tmp_project.dds.repo_add(http_repo.url) | ||||
| tmp_project.package_json = { | tmp_project.package_json = { |
| from pathlib import Path | from pathlib import Path | ||||
| from dds_ci.testing import RepoFixture, ProjectOpener | |||||
| from dds_ci.testing import RepoServer, ProjectOpener | |||||
| from dds_ci import proc, paths, toolchain | from dds_ci import proc, paths, toolchain | ||||
| def test_get_build_use_spdlog(test_parent_dir: Path, project_opener: ProjectOpener, http_repo: RepoFixture) -> None: | |||||
| def test_get_build_use_spdlog(test_parent_dir: Path, project_opener: ProjectOpener, http_repo: RepoServer) -> None: | |||||
| proj = project_opener.open('project') | proj = project_opener.open('project') | ||||
| http_repo.import_json_file(proj.root / 'catalog.json') | http_repo.import_json_file(proj.root / 'catalog.json') | ||||
| proj.dds.repo_add(http_repo.url) | proj.dds.repo_add(http_repo.url) |
| from .fixtures import Project, ProjectOpener, PackageJSON, LibraryJSON | from .fixtures import Project, ProjectOpener, PackageJSON, LibraryJSON | ||||
| from .http import RepoFixture | |||||
| from .http import RepoServer | |||||
| __all__ = ( | __all__ = ( | ||||
| 'Project', | 'Project', | ||||
| 'ProjectOpener', | 'ProjectOpener', | ||||
| 'PackageJSON', | 'PackageJSON', | ||||
| 'LibraryJSON', | 'LibraryJSON', | ||||
| 'RepoFixture', | |||||
| 'RepoServer', | |||||
| ) | ) |
| from pathlib import Path | from pathlib import Path | ||||
| from contextlib import contextmanager | |||||
| import socket | |||||
| from contextlib import contextmanager, ExitStack, closing | |||||
| import json | import json | ||||
| from http.server import SimpleHTTPRequestHandler, HTTPServer | from http.server import SimpleHTTPRequestHandler, HTTPServer | ||||
| from typing import NamedTuple, Any, Iterator | |||||
| from typing import NamedTuple, Any, Iterator, Callable | |||||
| from concurrent.futures import ThreadPoolExecutor | from concurrent.futures import ThreadPoolExecutor | ||||
| from functools import partial | from functools import partial | ||||
| import tempfile | import tempfile | ||||
| import pytest | import pytest | ||||
| from _pytest.fixtures import FixtureRequest | from _pytest.fixtures import FixtureRequest | ||||
| from _pytest.tmpdir import TempPathFactory | |||||
| from dds_ci.dds import DDSWrapper | |||||
| def _unused_tcp_port() -> int: | |||||
| """Find an unused localhost TCP port from 1024-65535 and return it.""" | |||||
| with closing(socket.socket()) as sock: | |||||
| sock.bind(('127.0.0.1', 0)) | |||||
| return sock.getsockname()[1] | |||||
| class DirectoryServingHTTPRequestHandler(SimpleHTTPRequestHandler): | class DirectoryServingHTTPRequestHandler(SimpleHTTPRequestHandler): | ||||
| httpd.shutdown() | httpd.shutdown() | ||||
| @pytest.fixture() | |||||
| def http_tmp_dir_server(tmp_path: Path, unused_tcp_port: int) -> Iterator[ServerInfo]: | |||||
| HTTPServerFactory = Callable[[Path], ServerInfo] | |||||
| @pytest.fixture(scope='session') | |||||
| def http_server_factory(request: FixtureRequest) -> HTTPServerFactory: | |||||
| """ | """ | ||||
| Creates an HTTP server that serves the contents of a new | |||||
| temporary directory. | |||||
| Spawn an HTTP server that serves the content of a directory. | |||||
| """ | """ | ||||
| with run_http_server(tmp_path, unused_tcp_port) as s: | |||||
| yield s | |||||
| def _make(p: Path) -> ServerInfo: | |||||
| st = ExitStack() | |||||
| server = st.enter_context(run_http_server(p, _unused_tcp_port())) | |||||
| request.addfinalizer(st.pop_all) | |||||
| return server | |||||
| return _make | |||||
| class RepoFixture: | |||||
| class RepoServer: | |||||
| """ | """ | ||||
| A fixture handle to a dds HTTP repository, including a path and URL. | A fixture handle to a dds HTTP repository, including a path and URL. | ||||
| """ | """ | ||||
| ]) | ]) | ||||
| RepoFactory = Callable[[str], Path] | |||||
| @pytest.fixture(scope='session') | |||||
| def repo_factory(tmp_path_factory: TempPathFactory, dds: DDSWrapper) -> RepoFactory: | |||||
| def _make(name: str) -> Path: | |||||
| tmpdir = tmp_path_factory.mktemp('test-repo-') | |||||
| dds.run(['repoman', 'init', tmpdir, f'--name={name}']) | |||||
| return tmpdir | |||||
| return _make | |||||
| HTTPRepoServerFactory = Callable[[str], RepoServer] | |||||
| @pytest.fixture(scope='session') | |||||
| def http_repo_factory(dds_exe: Path, repo_factory: RepoFactory, | |||||
| http_server_factory: HTTPServerFactory) -> HTTPRepoServerFactory: | |||||
| """ | |||||
| Fixture factory that creates new repositories with an HTTP server for them. | |||||
| """ | |||||
| def _make(name: str) -> RepoServer: | |||||
| repo_dir = repo_factory(name) | |||||
| server = http_server_factory(repo_dir) | |||||
| return RepoServer(dds_exe, server, name) | |||||
| return _make | |||||
| @pytest.fixture() | @pytest.fixture() | ||||
| def http_repo(dds_exe: Path, http_tmp_dir_server: ServerInfo, request: FixtureRequest) -> Iterator[RepoFixture]: | |||||
| def http_repo(http_repo_factory: HTTPRepoServerFactory, request: FixtureRequest) -> RepoServer: | |||||
| """ | """ | ||||
| Fixture that creates a new empty dds repository and an HTTP server to serve | Fixture that creates a new empty dds repository and an HTTP server to serve | ||||
| it. | it. | ||||
| """ | """ | ||||
| name = f'test-repo-{request.function.__name__}' | |||||
| subprocess.check_call([str(dds_exe), 'repoman', 'init', str(http_tmp_dir_server.root), f'--name={name}']) | |||||
| yield RepoFixture(dds_exe, http_tmp_dir_server, repo_name=name) | |||||
| return http_repo_factory(f'test-repo-{request.function.__name__}') |