| } | } | ||||
| } | } | ||||
| void print_remote_info(std::monostate) { | |||||
| std::cout << "THIS ENTRY IS MISSING REMOTE INFORMATION!\n"; | |||||
| } | |||||
| int run() { | int run() { | ||||
| auto pk_id = dds::package_id::parse(ident.Get()); | auto pk_id = dds::package_id::parse(ident.Get()); | ||||
| auto cat = cat_path.open(); | auto cat = cat_path.open(); |
| #include "./catalog.hpp" | #include "./catalog.hpp" | ||||
| #include "./import.hpp" | |||||
| #include <dds/dym.hpp> | #include <dds/dym.hpp> | ||||
| #include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
| #include <dds/solve/solve.hpp> | #include <dds/solve/solve.hpp> | ||||
| #include <neo/assert.hpp> | |||||
| #include <neo/sqlite3/exec.hpp> | #include <neo/sqlite3/exec.hpp> | ||||
| #include <neo/sqlite3/iter_tuples.hpp> | #include <neo/sqlite3/iter_tuples.hpp> | ||||
| #include <neo/sqlite3/single.hpp> | #include <neo/sqlite3/single.hpp> | ||||
| return ret; | return ret; | ||||
| } | } | ||||
| std::optional<dds::repo_transform::copy_move> parse_copy_move_transform(nlohmann::json copy) { | |||||
| if (copy.is_null()) { | |||||
| return std::nullopt; | |||||
| } | |||||
| check_json(copy.is_object(), "'transform[.]/{copy,move}' must be an object"); | |||||
| auto from = copy["from"]; | |||||
| auto to = copy["to"]; | |||||
| check_json(from.is_string(), | |||||
| "'transform[.]/{copy,move}/from' must be present and must be a string"); | |||||
| check_json(to.is_string(), | |||||
| "'transform[.]/{copy,move}/to' must be present and must be a string"); | |||||
| dds::repo_transform::copy_move operation; | |||||
| operation.from = fs::path(std::string(from)); | |||||
| operation.to = fs::path(std::string(to)); | |||||
| if (operation.from.is_absolute()) { | |||||
| throw_user_error<errc::invalid_catalog_json>( | |||||
| "The 'from' filepath for a copy/move operation [{}] is an absolute path. These paths " | |||||
| "*must* be relative paths only.", | |||||
| operation.from.string()); | |||||
| } | |||||
| if (operation.to.is_absolute()) { | |||||
| throw_user_error<errc::invalid_catalog_json>( | |||||
| "The 'to' filepath for a copy/move operation [{}] is an absolute path. These paths " | |||||
| "*must* be relative paths only.", | |||||
| operation.to.string()); | |||||
| } | |||||
| operation.include = parse_glob_list(copy["include"], "transform[.]/{copy,move}/include"); | |||||
| operation.exclude = parse_glob_list(copy["exclude"], "transform[.]/{copy,move}/exclude"); | |||||
| auto strip_comps = copy["strip_components"]; | |||||
| if (!strip_comps.is_null()) { | |||||
| check_json(strip_comps.is_number() || int(strip_comps) < 0, | |||||
| "transform[.]/{copy,move}/strip_components must be a positive integer"); | |||||
| operation.strip_components = int(strip_comps); | |||||
| } | |||||
| return operation; | |||||
| } | |||||
| dds::repo_transform parse_transform(nlohmann::json data) { | |||||
| assert(data.is_object()); | |||||
| dds::repo_transform transform; | |||||
| transform.copy = parse_copy_move_transform(data["copy"]); | |||||
| transform.move = parse_copy_move_transform(data["move"]); | |||||
| return transform; | |||||
| } | |||||
| nlohmann::json transform_to_json(const dds::repo_transform::copy_move& tr) { | |||||
| auto obj = nlohmann::json::object(); | |||||
| obj["from"] = tr.from.string(); | |||||
| obj["to"] = tr.to.string(); | |||||
| obj["include"] = ranges::views::all(tr.include) | ranges::views::transform(&dds::glob::string); | |||||
| obj["exclude"] = ranges::views::all(tr.exclude) | ranges::views::transform(&dds::glob::string); | |||||
| return obj; | |||||
| } | |||||
| nlohmann::json transform_to_json(const struct dds::repo_transform::remove& rm) { | |||||
| auto obj = nlohmann::json::object(); | |||||
| obj["path"] = rm.path.string(); | |||||
| obj["only_matching"] | |||||
| = ranges::views::all(rm.only_matching) | ranges::views::transform(&dds::glob::string); | |||||
| return obj; | |||||
| } | |||||
| nlohmann::json transform_to_json(const dds::repo_transform& tr) { | |||||
| auto obj = nlohmann::json::object(); | |||||
| if (tr.copy) { | |||||
| obj["copy"] = transform_to_json(*tr.copy); | |||||
| } | |||||
| if (tr.move) { | |||||
| obj["move"] = transform_to_json(*tr.move); | |||||
| } | |||||
| if (tr.remove) { | |||||
| obj["remove"] = transform_to_json(*tr.remove); | |||||
| } | |||||
| return obj; | |||||
| } | |||||
| std::string transform_to_json(const std::vector<dds::repo_transform>& trs) { | |||||
| auto arr = nlohmann::json::array(); | |||||
| for (auto& tr : trs) { | |||||
| arr.push_back(transform_to_json(tr)); | |||||
| } | |||||
| return to_string(arr); | |||||
| } | |||||
| } // namespace | } // namespace | ||||
| catalog catalog::open(const std::string& db_path) { | catalog catalog::open(const std::string& db_path) { | ||||
| catalog::catalog(sqlite3::database db) | catalog::catalog(sqlite3::database db) | ||||
| : _db(std::move(db)) {} | : _db(std::move(db)) {} | ||||
| void catalog::_store_pkg(const package_info& pkg, std::monostate) { | |||||
| neo_assert_always( | |||||
| invariant, | |||||
| false, | |||||
| "There was an attempt to insert a package listing into the database where that package " | |||||
| "listing does not have a remote listing. If you see this message, it is a dds bug.", | |||||
| pkg.ident.to_string()); | |||||
| } | |||||
| void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git) { | void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git) { | ||||
| auto lm_usage = git.auto_lib.value_or(lm::usage{}); | auto lm_usage = git.auto_lib.value_or(lm::usage{}); | ||||
| sqlite3::exec( // | sqlite3::exec( // | ||||
| git.ref, | git.ref, | ||||
| lm_usage.name, | lm_usage.name, | ||||
| lm_usage.namespace_, | lm_usage.namespace_, | ||||
| pkg.description, | |||||
| transform_to_json(pkg.transforms))); | |||||
| pkg.description | |||||
| //, transform_to_json(pkg.transforms)) | |||||
| )); | |||||
| } | } | ||||
| void catalog::store(const package_info& pkg) { | void catalog::store(const package_info& pkg) { | ||||
| check_json(tr_json.is_array(), | check_json(tr_json.is_array(), | ||||
| fmt::format("Database record for {} has an invalid 'repo_transform' field", | fmt::format("Database record for {} has an invalid 'repo_transform' field", | ||||
| pkg_id)); | pkg_id)); | ||||
| for (const auto& el : tr_json) { | |||||
| info.transforms.push_back(parse_transform(el)); | |||||
| } | |||||
| /// XXX: | |||||
| // for (const auto& el : tr_json) { | |||||
| // info.transforms.push_back(parse_transform(el)); | |||||
| // } | |||||
| } | } | ||||
| return info; | return info; | ||||
| } | } | ||||
| } | } | ||||
| void catalog::import_json_str(std::string_view content) { | void catalog::import_json_str(std::string_view content) { | ||||
| using nlohmann::json; | |||||
| auto root = json::parse(content); | |||||
| check_json(root.is_object(), "Root of JSON must be an object (key-value mapping)"); | |||||
| auto version = root["version"]; | |||||
| check_json(version.is_number_integer(), "/version must be an integral value"); | |||||
| check_json(version <= 1, "/version is too new. We don't know how to parse this."); | |||||
| auto packages = root["packages"]; | |||||
| check_json(packages.is_object(), "/packages must be an object"); | |||||
| auto pkgs = parse_packages_json(content); | |||||
| sqlite3::transaction_guard tr{_db}; | sqlite3::transaction_guard tr{_db}; | ||||
| for (const auto& [pkg_name_, versions_map] : packages.items()) { | |||||
| std::string pkg_name = pkg_name_; | |||||
| check_json(versions_map.is_object(), | |||||
| fmt::format("/packages/{} must be an object", pkg_name)); | |||||
| for (const auto& [version_, pkg_info] : versions_map.items()) { | |||||
| auto version = semver::version::parse(version_); | |||||
| check_json(pkg_info.is_object(), | |||||
| fmt::format("/packages/{}/{} must be an object", pkg_name, version_)); | |||||
| package_info info{{pkg_name, version}, {}, {}, {}, {}}; | |||||
| auto deps = pkg_info["depends"]; | |||||
| if (!deps.is_null()) { | |||||
| check_json(deps.is_object(), | |||||
| fmt::format("/packages/{}/{}/depends must be an object", | |||||
| pkg_name, | |||||
| version_)); | |||||
| for (const auto& [dep_name, dep_version] : deps.items()) { | |||||
| check_json(dep_version.is_string(), | |||||
| fmt::format("/packages/{}/{}/depends/{} must be a string", | |||||
| pkg_name, | |||||
| version_, | |||||
| dep_name)); | |||||
| auto range = semver::range::parse(std::string(dep_version)); | |||||
| info.deps.push_back({ | |||||
| std::string(dep_name), | |||||
| {range.low(), range.high()}, | |||||
| }); | |||||
| } | |||||
| } | |||||
| auto git_remote = pkg_info["git"]; | |||||
| if (!git_remote.is_null()) { | |||||
| check_json(git_remote.is_object(), "`git` must be an object"); | |||||
| std::string url = git_remote["url"]; | |||||
| std::string ref = git_remote["ref"]; | |||||
| auto lm_usage = git_remote["auto-lib"]; | |||||
| std::optional<lm::usage> autolib; | |||||
| if (!lm_usage.is_null()) { | |||||
| autolib = lm::split_usage_string(std::string(lm_usage)); | |||||
| } | |||||
| info.remote = git_remote_listing{url, ref, autolib}; | |||||
| } else { | |||||
| throw_user_error<errc::no_catalog_remote_info>("No remote info for /packages/{}/{}", | |||||
| pkg_name, | |||||
| version_); | |||||
| } | |||||
| auto transforms = pkg_info["transform"]; | |||||
| if (!transforms.is_null()) { | |||||
| check_json(transforms.is_array(), "`transform` must be an array of objects"); | |||||
| for (nlohmann::json const& el : transforms) { | |||||
| check_json(el.is_object(), "Each element of `transform` must be an object"); | |||||
| info.transforms.emplace_back(parse_transform(el)); | |||||
| } | |||||
| } | |||||
| auto desc_ = pkg_info["description"]; | |||||
| if (!desc_.is_null()) { | |||||
| check_json(desc_.is_string(), "`description` must be a string"); | |||||
| info.description = desc_; | |||||
| } | |||||
| store(info); | |||||
| } | |||||
| for (const auto& pkg : pkgs) { | |||||
| store(pkg); | |||||
| } | } | ||||
| } | } |
| #pragma once | #pragma once | ||||
| #include <dds/catalog/git.hpp> | |||||
| #include <dds/deps.hpp> | #include <dds/deps.hpp> | ||||
| #include <dds/package/id.hpp> | #include <dds/package/id.hpp> | ||||
| #include <dds/util/fs.hpp> | #include <dds/util/fs.hpp> | ||||
| #include <dds/util/glob.hpp> | #include <dds/util/glob.hpp> | ||||
| #include "./package_info.hpp" | |||||
| #include <neo/sqlite3/database.hpp> | #include <neo/sqlite3/database.hpp> | ||||
| #include <neo/sqlite3/statement.hpp> | #include <neo/sqlite3/statement.hpp> | ||||
| #include <neo/sqlite3/statement_cache.hpp> | #include <neo/sqlite3/statement_cache.hpp> | ||||
| namespace dds { | namespace dds { | ||||
| struct repo_transform { | |||||
| struct copy_move { | |||||
| fs::path from; | |||||
| fs::path to; | |||||
| int strip_components = 0; | |||||
| std::vector<dds::glob> include; | |||||
| std::vector<dds::glob> exclude; | |||||
| }; | |||||
| struct remove { | |||||
| fs::path path; | |||||
| std::vector<dds::glob> only_matching; | |||||
| }; | |||||
| std::optional<copy_move> copy; | |||||
| std::optional<copy_move> move; | |||||
| std::optional<remove> remove; | |||||
| }; | |||||
| struct package_info { | |||||
| package_id ident; | |||||
| std::vector<dependency> deps; | |||||
| std::string description; | |||||
| std::variant<git_remote_listing> remote; | |||||
| std::vector<repo_transform> transforms; | |||||
| }; | |||||
| class catalog { | class catalog { | ||||
| neo::sqlite3::database _db; | neo::sqlite3::database _db; | ||||
| mutable neo::sqlite3::statement_cache _stmt_cache{_db}; | mutable neo::sqlite3::statement_cache _stmt_cache{_db}; | ||||
| catalog(const catalog&) = delete; | catalog(const catalog&) = delete; | ||||
| void _store_pkg(const package_info&, const git_remote_listing&); | void _store_pkg(const package_info&, const git_remote_listing&); | ||||
| void _store_pkg(const package_info&, std::monostate); | |||||
| public: | public: | ||||
| catalog(catalog&&) = default; | catalog(catalog&&) = default; |
| {}, | {}, | ||||
| "example", | "example", | ||||
| dds::git_remote_listing{"http://example.com", "master", std::nullopt}, | dds::git_remote_listing{"http://example.com", "master", std::nullopt}, | ||||
| {}, | |||||
| }); | }); | ||||
| auto pkgs = db.by_name("foo"); | auto pkgs = db.by_name("foo"); | ||||
| {}, | {}, | ||||
| "example", | "example", | ||||
| dds::git_remote_listing{"http://example.com", "develop", std::nullopt}, | dds::git_remote_listing{"http://example.com", "develop", std::nullopt}, | ||||
| {}, | |||||
| })); | })); | ||||
| // The previous pkg_id is still a valid lookup key | // The previous pkg_id is still a valid lookup key | ||||
| info = db.get(pkgs[0]); | info = db.get(pkgs[0]); | ||||
| }, | }, | ||||
| "example", | "example", | ||||
| dds::git_remote_listing{"http://example.com", "master", std::nullopt}, | dds::git_remote_listing{"http://example.com", "master", std::nullopt}, | ||||
| {}, | |||||
| }); | }); | ||||
| auto pkgs = db.by_name("foo"); | auto pkgs = db.by_name("foo"); | ||||
| REQUIRE(pkgs.size() == 1); | REQUIRE(pkgs.size() == 1); |
| #include <dds/catalog/catalog.hpp> | #include <dds/catalog/catalog.hpp> | ||||
| #include <dds/error/errors.hpp> | #include <dds/error/errors.hpp> | ||||
| #include <dds/proc.hpp> | |||||
| #include <neo/assert.hpp> | |||||
| #include <nlohmann/json.hpp> | #include <nlohmann/json.hpp> | ||||
| #include <range/v3/algorithm/all_of.hpp> | #include <range/v3/algorithm/all_of.hpp> | ||||
| #include <range/v3/algorithm/any_of.hpp> | #include <range/v3/algorithm/any_of.hpp> | ||||
| namespace { | namespace { | ||||
| enum operation { move, copy }; | |||||
| void apply_copy(const dds::repo_transform::copy_move& copy, path_ref root, operation op) { | |||||
| auto copy_src = fs::weakly_canonical(root / copy.from); | |||||
| auto copy_dest = fs::weakly_canonical(root / copy.to); | |||||
| if (fs::relative(copy_src, root).generic_string().find("../") == 0) { | |||||
| throw std::runtime_error( | |||||
| fmt::format("A copy_src ends up copying from outside the root. (Relative path was " | |||||
| "[{}], resolved path was [{}])", | |||||
| copy.from.string(), | |||||
| copy_src.string())); | |||||
| } | |||||
| if (fs::relative(copy_dest, root).generic_string().find("../") == 0) { | |||||
| throw std::runtime_error( | |||||
| fmt::format("A copy_dest ends up copying from outside the root. (Relative path was " | |||||
| "[{}], resolved path was [{}])", | |||||
| copy.from.string(), | |||||
| copy_dest.string())); | |||||
| } | |||||
| if (fs::is_regular_file(copy_src)) { | |||||
| // Just copying a single file? Okay. | |||||
| if (op == move) { | |||||
| safe_rename(copy_src, copy_dest); | |||||
| } else { | |||||
| fs::copy_file(copy_src, copy_dest, fs::copy_options::overwrite_existing); | |||||
| } | |||||
| return; | |||||
| } | |||||
| auto f_iter = fs::recursive_directory_iterator(copy_src); | |||||
| for (auto item : f_iter) { | |||||
| auto relpath = fs::relative(item, copy_src); | |||||
| auto matches_glob = [&](auto glob) { return glob.match(relpath.string()); }; | |||||
| auto included = ranges::all_of(copy.include, matches_glob); | |||||
| auto excluded = ranges::any_of(copy.exclude, matches_glob); | |||||
| if (!included || excluded) { | |||||
| continue; | |||||
| } | |||||
| auto n_components = ranges::distance(relpath); | |||||
| if (n_components <= copy.strip_components) { | |||||
| continue; | |||||
| } | |||||
| auto it = relpath.begin(); | |||||
| std::advance(it, copy.strip_components); | |||||
| relpath = ranges::accumulate(it, relpath.end(), fs::path(), std::divides<>()); | |||||
| auto dest = copy_dest / relpath; | |||||
| fs::create_directories(dest.parent_path()); | |||||
| if (item.is_directory()) { | |||||
| fs::create_directories(dest); | |||||
| } else { | |||||
| if (op == move) { | |||||
| safe_rename(item, dest); | |||||
| } else { | |||||
| fs::copy_file(item, dest, fs::copy_options::overwrite_existing); | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| void apply_remove(const struct dds::repo_transform::remove& rm, path_ref root) { | |||||
| const auto item = fs::weakly_canonical(root / rm.path); | |||||
| if (fs::relative(item, root).generic_string().find("../") == 0) { | |||||
| throw std::runtime_error(fmt::format( | |||||
| "A 'remove' ends up removing files from outside the root. (Relative path was " | |||||
| "[{}], resolved path was [{}])", | |||||
| rm.path.string(), | |||||
| item.string())); | |||||
| } | |||||
| if (!rm.only_matching.empty()) { | |||||
| if (!fs::is_directory(item)) { | |||||
| throw std::runtime_error( | |||||
| fmt::format("A 'remove' item has an 'only_matching' pattern list, but the named " | |||||
| "path is not a directory [{}]", | |||||
| item.string())); | |||||
| } | |||||
| for (auto glob : rm.only_matching) { | |||||
| for (auto rm_item : glob.scan_from(item)) { | |||||
| fs::remove_all(rm_item); | |||||
| } | |||||
| } | |||||
| } else { | |||||
| fs::remove_all(item); | |||||
| } | |||||
| if (fs::is_directory(item)) { | |||||
| } | |||||
| } | |||||
| void apply_transform(const dds::repo_transform& transform, path_ref root) { | |||||
| if (transform.copy) { | |||||
| apply_copy(*transform.copy, root, copy); | |||||
| } | |||||
| if (transform.move) { | |||||
| apply_copy(*transform.move, root, move); | |||||
| } | |||||
| if (transform.remove) { | |||||
| apply_remove(*transform.remove, root); | |||||
| } | |||||
| temporary_sdist do_pull_sdist(const package_info& listing, std::monostate) { | |||||
| neo_assert_always( | |||||
| invariant, | |||||
| false, | |||||
| "A package listing in the catalog has no defined remote from which to pull. This " | |||||
| "shouldn't happen in normal usage. This will occur if the database has been " | |||||
| "manually altered, or if DDS has a bug.", | |||||
| listing.ident.to_string()); | |||||
| } | } | ||||
| temporary_sdist do_pull_sdist(const package_info& listing, const git_remote_listing& git) { | temporary_sdist do_pull_sdist(const package_info& listing, const git_remote_listing& git) { | ||||
| auto tmpdir = dds::temporary_dir::create(); | auto tmpdir = dds::temporary_dir::create(); | ||||
| using namespace std::literals; | |||||
| spdlog::info("Cloning Git repository: {} [{}] ...", git.url, git.ref); | spdlog::info("Cloning Git repository: {} [{}] ...", git.url, git.ref); | ||||
| auto command = {"git"s, | |||||
| "clone"s, | |||||
| "--depth=1"s, | |||||
| "--branch"s, | |||||
| git.ref, | |||||
| git.url, | |||||
| tmpdir.path().generic_string()}; | |||||
| auto git_res = run_proc(command); | |||||
| if (!git_res.okay()) { | |||||
| throw_external_error<errc::git_clone_failure>( | |||||
| "Git clone operation failed [Git command: {}] [Exitted {}]:\n{}", | |||||
| quote_command(command), | |||||
| git_res.retc, | |||||
| git_res.output); | |||||
| } | |||||
| git.clone(tmpdir.path()); | |||||
| /// XXX: | |||||
| // for (const auto& tr : listing.transforms) { | |||||
| // tr.apply_to(tmpdir.path()); | |||||
| // } | |||||
| spdlog::info("Create sdist from clone ..."); | spdlog::info("Create sdist from clone ..."); | ||||
| if (git.auto_lib.has_value()) { | if (git.auto_lib.has_value()) { | ||||
| spdlog::info("Generating library data automatically"); | spdlog::info("Generating library data automatically"); | ||||
| lib_strm << nlohmann::to_string(lib_json); | lib_strm << nlohmann::to_string(lib_json); | ||||
| } | } | ||||
| for (const auto& tr : listing.transforms) { | |||||
| apply_transform(tr, tmpdir.path()); | |||||
| } | |||||
| sdist_params params; | sdist_params params; | ||||
| params.project_dir = tmpdir.path(); | params.project_dir = tmpdir.path(); | ||||
| auto sd_tmp_dir = dds::temporary_dir::create(); | auto sd_tmp_dir = dds::temporary_dir::create(); |
| #include "./import.hpp" | |||||
| #include <dds/error/errors.hpp> | |||||
| #include <json5/parse_data.hpp> | |||||
| #include <neo/assert.hpp> | |||||
| #include <semester/decomp.hpp> | |||||
| #include <spdlog/fmt/fmt.h> | |||||
| #include <optional> | |||||
| using namespace dds; | |||||
| template <typename... Args> | |||||
| struct any_key { | |||||
| semester::try_seq<Args...> _seq; | |||||
| std::string_view& _key; | |||||
| any_key(std::string_view& key_var, Args&&... args) | |||||
| : _seq(NEO_FWD(args)...) | |||||
| , _key{key_var} {} | |||||
| template <typename Data> | |||||
| semester::dc_result_t operator()(std::string_view key, Data&& dat) const { | |||||
| _key = key; | |||||
| return _seq.invoke(dat); | |||||
| } | |||||
| }; | |||||
| template <typename... Args> | |||||
| any_key(std::string_view, Args&&...) -> any_key<Args&&...>; | |||||
| namespace { | |||||
| semester::dc_result_t reject(std::string s) { return semester::dc_reject_t{s}; } | |||||
| semester::dc_result_t pass = semester::dc_pass; | |||||
| semester::dc_result_t accept = semester::dc_accept; | |||||
| using require_obj = semester::require_type<json5::data::mapping_type>; | |||||
| auto reject_unknown_key(std::string_view path) { | |||||
| return [path = std::string(path)](auto key, auto&&) { // | |||||
| return reject(fmt::format("{}: unknown key '{}'", path, key)); | |||||
| }; | |||||
| }; | |||||
| std::vector<dependency> parse_deps_json_v1(const json5::data& deps, std::string_view path) { | |||||
| std::vector<dependency> acc_deps; | |||||
| std::string_view dep_name; | |||||
| std::string_view dep_version_range_str; | |||||
| using namespace semester::decompose_ops; | |||||
| auto result = semester::decompose( // | |||||
| deps, | |||||
| mapping{any_key{ | |||||
| dep_name, | |||||
| [&](auto&& range_str) { | |||||
| if (!range_str.is_string()) { | |||||
| throw_user_error< | |||||
| errc::invalid_catalog_json>("{}/{} should be a string version range", | |||||
| path, | |||||
| dep_name); | |||||
| } | |||||
| try { | |||||
| auto rng = semver::range::parse_restricted(range_str.as_string()); | |||||
| acc_deps.push_back(dependency{std::string{dep_name}, {rng.low(), rng.high()}}); | |||||
| return accept; | |||||
| } catch (const semver::invalid_range&) { | |||||
| throw_user_error<errc::invalid_version_range_string>( | |||||
| "Invalid version range string '{}' at {}/{}", | |||||
| range_str.as_string(), | |||||
| path, | |||||
| dep_name); | |||||
| } | |||||
| }, | |||||
| }}); | |||||
| neo_assert(invariant, | |||||
| std::holds_alternative<semester::dc_accept_t>(result), | |||||
| "Parsing dependency object did not accept??"); | |||||
| return acc_deps; | |||||
| } | |||||
| package_info parse_pkg_json_v1(std::string_view name, | |||||
| semver::version version, | |||||
| std::string_view path, | |||||
| const json5::data& pkg) { | |||||
| using namespace semester::decompose_ops; | |||||
| package_info ret; | |||||
| ret.ident = package_id{std::string{name}, version}; | |||||
| auto result = semester::decompose( // | |||||
| pkg, | |||||
| mapping{if_key{"description", | |||||
| require_type<std::string>{ | |||||
| fmt::format("{}/description should be a string", path)}, | |||||
| put_into{ret.description}}, | |||||
| if_key{"depends", | |||||
| require_obj{fmt::format("{}/depends must be a JSON object", path)}, | |||||
| [&](auto&& dep_obj) { | |||||
| ret.deps = parse_deps_json_v1(dep_obj, fmt::format("{}/depends", path)); | |||||
| return accept; | |||||
| }}, | |||||
| if_key{ | |||||
| "git", | |||||
| require_obj{fmt::format("{}/git must be a JSON object", path)}, | |||||
| [&](auto&& git_obj) { | |||||
| git_remote_listing git_remote; | |||||
| auto r = semester::decompose( | |||||
| git_obj, | |||||
| mapping{ | |||||
| if_key{"url", put_into{git_remote.url}}, | |||||
| if_key{"ref", put_into{git_remote.ref}}, | |||||
| if_key{"auto-lib", | |||||
| require_type<std::string>{ | |||||
| fmt::format("{}/git/auto-lib must be a string", path)}, | |||||
| [&](auto&& al) { | |||||
| git_remote.auto_lib | |||||
| = lm::split_usage_string(al.as_string()); | |||||
| return accept; | |||||
| }}, | |||||
| reject_unknown_key(std::string(path) + "/git"), | |||||
| }); | |||||
| if (git_remote.url.empty() || git_remote.ref.empty()) { | |||||
| throw_user_error<errc::invalid_catalog_json>( | |||||
| "{}/git requires both 'url' and 'ref' non-empty string properties", | |||||
| path); | |||||
| } | |||||
| ret.remote = git_remote; | |||||
| return r; | |||||
| }, | |||||
| }, | |||||
| reject_unknown_key(path)}); | |||||
| if (std::holds_alternative<std::monostate>(ret.remote)) { | |||||
| throw_user_error< | |||||
| errc::invalid_catalog_json>("{}: Requires a remote listing (e.g. a 'git' proprety).", | |||||
| path); | |||||
| } | |||||
| return ret; | |||||
| } | |||||
| std::vector<package_info> parse_json_v1(const json5::data& data) { | |||||
| using namespace semester::decompose_ops; | |||||
| auto packages_it = data.as_object().find("packages"); | |||||
| if (packages_it == data.as_object().end() || !packages_it->second.is_object()) { | |||||
| throw_user_error<errc::invalid_catalog_json>( | |||||
| "Root JSON object requires a 'packages' property"); | |||||
| } | |||||
| std::vector<package_info> acc_pkgs; | |||||
| std::string_view pkg_name; | |||||
| std::string_view pkg_version_str; | |||||
| auto result = semester::decompose( | |||||
| data, | |||||
| mapping{ | |||||
| // Ignore the "version" key at this level | |||||
| if_key{"version", just_accept}, | |||||
| if_key{ | |||||
| "packages", | |||||
| mapping{any_key{ | |||||
| pkg_name, | |||||
| [&](auto&& entry) { | |||||
| if (!entry.is_object()) { | |||||
| return reject( | |||||
| fmt::format("/packages/{} must be a JSON object", pkg_name)); | |||||
| } | |||||
| return pass; | |||||
| }, | |||||
| mapping{any_key{ | |||||
| pkg_version_str, | |||||
| [&](auto&& pkg_def) { | |||||
| semver::version version; | |||||
| try { | |||||
| version = semver::version::parse(pkg_version_str); | |||||
| } catch (const semver::invalid_version& e) { | |||||
| throw_user_error<errc::invalid_catalog_json>( | |||||
| "/packages/{} version string '{}' is invalid: {}", | |||||
| pkg_name, | |||||
| pkg_version_str, | |||||
| e.what()); | |||||
| } | |||||
| if (!pkg_def.is_object()) { | |||||
| return reject(fmt::format("/packages/{}/{} must be a JSON object")); | |||||
| } | |||||
| auto pkg = parse_pkg_json_v1(pkg_name, | |||||
| version, | |||||
| fmt::format("/packages/{}/{}", | |||||
| pkg_name, | |||||
| pkg_version_str), | |||||
| pkg_def); | |||||
| acc_pkgs.emplace_back(std::move(pkg)); | |||||
| return accept; | |||||
| }, | |||||
| }}, | |||||
| }}, | |||||
| }, | |||||
| reject_unknown_key("/"), | |||||
| }); | |||||
| auto rej = std::get_if<semester::dc_reject_t>(&result); | |||||
| if (rej) { | |||||
| throw_user_error<errc::invalid_catalog_json>(rej->message); | |||||
| } | |||||
| return acc_pkgs; | |||||
| } | |||||
| } // namespace | |||||
| std::vector<package_info> dds::parse_packages_json(std::string_view content) { | |||||
| json5::data data; | |||||
| try { | |||||
| data = json5::parse_data(content); | |||||
| } catch (const json5::parse_error& e) { | |||||
| throw_user_error<errc::invalid_catalog_json>("JSON5 syntax error: {}", e.what()); | |||||
| } | |||||
| if (!data.is_object()) { | |||||
| throw_user_error<errc::invalid_catalog_json>("Root of import JSON must be a JSON object"); | |||||
| } | |||||
| auto& data_obj = data.as_object(); | |||||
| auto version_it = data_obj.find("version"); | |||||
| if (version_it == data_obj.end() || !version_it->second.is_number()) { | |||||
| throw_user_error<errc::invalid_catalog_json>( | |||||
| "Root JSON import requires a 'version' property"); | |||||
| } | |||||
| double version = version_it->second.as_number(); | |||||
| if (version == 1.0) { | |||||
| return parse_json_v1(data); | |||||
| } else { | |||||
| throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'", version); | |||||
| } | |||||
| } |
| #pragma once | |||||
| #include "./package_info.hpp" | |||||
| namespace dds { | |||||
| std::vector<package_info> parse_packages_json(std::string_view); | |||||
| } // namespace dds |
| #include "./import.hpp" | |||||
| #include <dds/error/errors.hpp> | |||||
| #include <catch2/catch.hpp> | |||||
| TEST_CASE("An empty import is okay") { | |||||
| // An empty JSON with no packages in it | |||||
| auto pkgs = dds::parse_packages_json("{version: 1, packages: {}}"); | |||||
| CHECK(pkgs.empty()); | |||||
| } | |||||
| TEST_CASE("Valid/invalid package JSON5") { | |||||
| std::string_view bads[] = { | |||||
| // Invalid JSON: | |||||
| "", | |||||
| // Should be an object | |||||
| "[]", | |||||
| // Missing keys | |||||
| "{}", | |||||
| // Missing "packages" | |||||
| "{version: 1}", | |||||
| // Bad version | |||||
| "{version: 1.7, packages: {}}", | |||||
| "{version: [], packages: {}}", | |||||
| "{version: null, packages: {}}", | |||||
| // 'packages' should be an object | |||||
| "{version: 1, packages: []}", | |||||
| "{version: 1, packages: null}", | |||||
| "{version: 1, packages: 4}", | |||||
| "{version: 1, packages: 'lol'}", | |||||
| // Objects in 'packages' should be objects | |||||
| "{version:1, packages:{foo:null}}", | |||||
| "{version:1, packages:{foo:[]}}", | |||||
| "{version:1, packages:{foo:9}}", | |||||
| "{version:1, packages:{foo:'lol'}}", | |||||
| // Objects in 'packages' shuold have version strings | |||||
| "{version:1, packages:{foo:{'lol':{}}}}", | |||||
| "{version:1, packages:{foo:{'1.2':{}}}}", | |||||
| }; | |||||
| for (auto bad : bads) { | |||||
| INFO("Bad: " << bad); | |||||
| CHECK_THROWS_AS(dds::parse_packages_json(bad), | |||||
| dds::user_error<dds::errc::invalid_catalog_json>); | |||||
| } | |||||
| std::string_view goods[] = { | |||||
| // Basic empty: | |||||
| "{version:1, packages:{}}", | |||||
| // No versions for 'foo' is weird, but okay | |||||
| "{version:1, packages:{foo:{}}}", | |||||
| }; | |||||
| for (auto good : goods) { | |||||
| INFO("Parse: " << good); | |||||
| CHECK_NOTHROW(dds::parse_packages_json(good)); | |||||
| } | |||||
| } | |||||
| TEST_CASE("Check a single object") { | |||||
| // An empty JSON with no packages in it | |||||
| auto pkgs = dds::parse_packages_json(R"({ | |||||
| version: 1, | |||||
| packages: { | |||||
| foo: { | |||||
| '1.2.3': { | |||||
| git: { | |||||
| url: 'foo', | |||||
| ref: 'fasdf' | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| })"); | |||||
| CHECK(pkgs.size() == 1); | |||||
| CHECK(pkgs[0].ident.name == "foo"); | |||||
| CHECK(pkgs[0].ident.to_string() == "foo@1.2.3"); | |||||
| CHECK(std::holds_alternative<dds::git_remote_listing>(pkgs[0].remote)); | |||||
| } |
| #pragma once | |||||
| #include "./remote/git.hpp" | |||||
| #include <dds/deps.hpp> | |||||
| #include <dds/package/id.hpp> | |||||
| #include <dds/util/fs_transform.hpp> | |||||
| #include <dds/util/glob.hpp> | |||||
| #include <optional> | |||||
| #include <string> | |||||
| #include <variant> | |||||
| #include <vector> | |||||
| namespace dds { | |||||
| struct package_info { | |||||
| package_id ident; | |||||
| std::vector<dependency> deps; | |||||
| std::string description; | |||||
| std::variant<std::monostate, git_remote_listing> remote; | |||||
| std::vector<fs_transformation> transforms; | |||||
| }; | |||||
| } // namespace dds |
| #include "./git.hpp" | |||||
| #include <dds/error/errors.hpp> | |||||
| #include <dds/proc.hpp> | |||||
| void dds::git_remote_listing::clone(dds::path_ref dest) const { | |||||
| fs::remove_all(dest); | |||||
| using namespace std::literals; | |||||
| auto command = {"git"s, "clone"s, "--depth=1"s, "--branch"s, ref, url, dest.generic_string()}; | |||||
| auto git_res = run_proc(command); | |||||
| if (!git_res.okay()) { | |||||
| throw_external_error<errc::git_clone_failure>( | |||||
| "Git clone operation failed [Git command: {}] [Exitted {}]:\n{}", | |||||
| quote_command(command), | |||||
| git_res.retc, | |||||
| git_res.output); | |||||
| } | |||||
| } |
| #pragma once | |||||
| #include <dds/catalog/get.hpp> | |||||
| #include <dds/util/fs.hpp> | |||||
| #include <libman/package.hpp> | |||||
| #include <optional> | |||||
| #include <string> | |||||
| namespace dds { | |||||
| struct git_remote_listing { | |||||
| std::string url; | |||||
| std::string ref; | |||||
| std::optional<lm::usage> auto_lib; | |||||
| void clone(path_ref path) const; | |||||
| }; | |||||
| } // namespace dds |
| #pragma once | |||||
| #include "./fs.hpp" | |||||
| #include "./glob.hpp" | |||||
| #include <optional> | |||||
| #include <variant> | |||||
| namespace dds { | |||||
| class fs_transformation { | |||||
| struct copy_move_base { | |||||
| fs::path from; | |||||
| fs::path to; | |||||
| int strip_components = 0; | |||||
| std::vector<dds::glob> include; | |||||
| std::vector<dds::glob> exclude; | |||||
| }; | |||||
| struct copy : copy_move_base {}; | |||||
| struct move : copy_move_base {}; | |||||
| struct remove { | |||||
| fs::path path; | |||||
| std::vector<dds::glob> only_matching; | |||||
| }; | |||||
| std::optional<struct copy> copy; | |||||
| std::optional<struct move> move; | |||||
| std::optional<remove> remove; | |||||
| void apply_to(path_ref root) const; | |||||
| }; | |||||
| } // namespace dds |