Sfoglia il codice sorgente

New command line interface from the ground up.

default_compile_flags
vector-of-bool 4 anni fa
parent
commit
3153299e20
39 ha cambiato i file con 1775 aggiunte e 5600 eliminazioni
  1. +92
    -1243
      src/dds.main.cpp
  2. +0
    -4283
      src/dds/3rd/args.hxx
  3. +40
    -0
      src/dds/cli/cmd/build.cpp
  4. +44
    -0
      src/dds/cli/cmd/build_common.cpp
  5. +11
    -0
      src/dds/cli/cmd/build_common.hpp
  6. +63
    -0
      src/dds/cli/cmd/build_deps.cpp
  7. +20
    -0
      src/dds/cli/cmd/compile_file.cpp
  8. +71
    -0
      src/dds/cli/cmd/pkg_get.cpp
  9. +58
    -0
      src/dds/cli/cmd/pkg_import.cpp
  10. +61
    -0
      src/dds/cli/cmd/pkg_ls.cpp
  11. +24
    -0
      src/dds/cli/cmd/pkg_repo_add.cpp
  12. +47
    -0
      src/dds/cli/cmd/pkg_repo_err_handle.cpp
  13. +9
    -0
      src/dds/cli/cmd/pkg_repo_err_handle.hpp
  14. +19
    -0
      src/dds/cli/cmd/pkg_repo_update.cpp
  15. +57
    -0
      src/dds/cli/cmd/repoman_import.cpp
  16. +48
    -0
      src/dds/cli/cmd/repoman_init.cpp
  17. +37
    -0
      src/dds/cli/cmd/repoman_ls.cpp
  18. +54
    -0
      src/dds/cli/cmd/repoman_remove.cpp
  19. +24
    -0
      src/dds/cli/cmd/sdist_create.cpp
  20. +91
    -0
      src/dds/cli/dispatch_main.cpp
  21. +9
    -0
      src/dds/cli/dispatch_main.hpp
  22. +44
    -0
      src/dds/cli/error_handler.cpp
  23. +9
    -0
      src/dds/cli/error_handler.hpp
  24. +425
    -0
      src/dds/cli/options.cpp
  25. +242
    -0
      src/dds/cli/options.hpp
  26. +1
    -1
      src/dds/repoman/repoman.cpp
  27. +1
    -1
      src/dds/repoman/repoman.hpp
  28. +1
    -1
      src/dds/util/log.cpp
  29. +1
    -1
      src/dds/util/log.hpp
  30. +7
    -5
      src/dds/util/result.cpp
  31. +1
    -1
      src/dds/util/result.hpp
  32. +4
    -12
      tests/test_basics.py
  33. +2
    -12
      tests/test_catalog.py
  34. +22
    -0
      tests/test_compile_file.py
  35. +59
    -12
      tests/test_sdist.py
  36. +53
    -8
      tools/dds_ci/dds.py
  37. +5
    -11
      tools/dds_ci/proc.py
  38. +18
    -8
      tools/dds_ci/testing/fixtures.py
  39. +1
    -1
      tools/mkrepo.py

+ 92
- 1243
src/dds.main.cpp
File diff soppresso perché troppo grande
Vedi File


+ 0
- 4283
src/dds/3rd/args.hxx
File diff soppresso perché troppo grande
Vedi File


+ 40
- 0
src/dds/cli/cmd/build.cpp Vedi File

@@ -0,0 +1,40 @@
#include "../options.hpp"

#include "./build_common.hpp"

#include <dds/build/builder.hpp>
#include <dds/catalog/catalog.hpp>
#include <dds/error/errors.hpp>
#include <dds/remote/remote.hpp>
#include <dds/toolchain/from_json.hpp>

using namespace dds;

namespace dds::cli::cmd {

int build(const options& opts) {
if (!opts.build.add_repos.empty()) {
auto cat = opts.open_catalog();
for (auto& str : opts.build.add_repos) {
auto repo = remote_repository::connect(str);
repo.store(cat.database());
}
}

if (opts.build.update_repos || !opts.build.add_repos.empty()) {
update_all_remotes(opts.open_catalog().database());
}

auto builder = create_project_builder(opts);
builder.build({
.out_root = opts.out_path.value_or(fs::current_path() / "_build"),
.existing_lm_index = opts.build.lm_index,
.emit_lmi = {},
.toolchain = opts.load_toolchain(),
.parallel_jobs = opts.jobs,
});

return 0;
}

} // namespace dds::cli::cmd

+ 44
- 0
src/dds/cli/cmd/build_common.cpp Vedi File

@@ -0,0 +1,44 @@
#include "./build_common.hpp"

#include <dds/catalog/catalog.hpp>
#include <dds/catalog/get.hpp>
#include <dds/repo/repo.hpp>

using namespace dds;

builder dds::cli::create_project_builder(const dds::cli::options& opts) {
sdist_build_params main_params = {
.subdir = "",
.build_tests = opts.build.want_tests,
.run_tests = opts.build.want_tests,
.build_apps = opts.build.want_apps,
.enable_warnings = !opts.disable_warnings,
};

auto man = package_manifest::load_from_directory(opts.project_dir).value_or(package_manifest{});
auto cat_path = opts.pkg_db_dir.value_or(catalog::default_path());
auto repo_path = opts.pkg_cache_dir.value_or(repository::default_local_path());

builder builder;
if (!opts.build.lm_index.has_value()) {
auto cat = catalog::open(cat_path);
// Build the dependencies
repository::with_repository( //
repo_path,
repo_flags::write_lock | repo_flags::create_if_absent,
[&](repository repo) {
// Download dependencies
auto deps = repo.solve(man.dependencies, cat);
get_all(deps, repo, cat);
for (const package_id& pk : deps) {
auto sdist_ptr = repo.find(pk);
assert(sdist_ptr);
sdist_build_params deps_params;
deps_params.subdir = fs::path("_deps") / sdist_ptr->manifest.pkg_id.to_string();
builder.add(*sdist_ptr, deps_params);
}
});
}
builder.add(sdist{std::move(man), opts.project_dir}, main_params);
return builder;
}

+ 11
- 0
src/dds/cli/cmd/build_common.hpp Vedi File

@@ -0,0 +1,11 @@
#include "../options.hpp"

#include <dds/build/builder.hpp>

#include <functional>

namespace dds::cli {

dds::builder create_project_builder(const options& opts);

} // namespace dds::cli

+ 63
- 0
src/dds/cli/cmd/build_deps.cpp Vedi File

@@ -0,0 +1,63 @@
#include "../options.hpp"

#include <dds/build/builder.hpp>
#include <dds/build/params.hpp>
#include <dds/catalog/get.hpp>
#include <dds/repo/repo.hpp>

#include <range/v3/action/join.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/concat.hpp>
#include <range/v3/view/transform.hpp>

namespace dds::cli::cmd {

int build_deps(const options& opts) {
dds::build_params params{
.out_root = opts.out_path.value_or(fs::current_path() / "_deps"),
.existing_lm_index = {},
.emit_lmi = opts.build.lm_index.value_or("INDEX.lmi"),
.toolchain = opts.load_toolchain(),
.parallel_jobs = opts.jobs,
};

dds::builder bd;
dds::sdist_build_params sdist_params;

auto all_file_deps = opts.build_deps.deps_files //
| ranges::views::transform([&](auto dep_fpath) {
dds_log(info, "Reading deps from {}", dep_fpath.string());
return dds::dependency_manifest::from_file(dep_fpath).dependencies;
})
| ranges::actions::join;

auto cmd_deps = ranges::views::transform(opts.build_deps.deps, [&](auto dep_str) {
return dds::dependency::parse_depends_string(dep_str);
});

auto all_deps = ranges::views::concat(all_file_deps, cmd_deps) | ranges::to_vector;

auto cat = opts.open_catalog();
dds::repository::with_repository( //
opts.pkg_cache_dir.value_or(repository::default_local_path()),
dds::repo_flags::write_lock | dds::repo_flags::create_if_absent,
[&](dds::repository repo) {
// Download dependencies
dds_log(info, "Loading {} dependencies", all_deps.size());
auto deps = repo.solve(all_deps, cat);
dds::get_all(deps, repo, cat);
for (const dds::package_id& pk : deps) {
auto sdist_ptr = repo.find(pk);
assert(sdist_ptr);
dds::sdist_build_params deps_params;
deps_params.subdir = sdist_ptr->manifest.pkg_id.to_string();
dds_log(info, "Dependency: {}", sdist_ptr->manifest.pkg_id.to_string());
bd.add(*sdist_ptr, deps_params);
}
});

bd.build(params);
return 0;
}

} // namespace dds::cli::cmd

+ 20
- 0
src/dds/cli/cmd/compile_file.cpp Vedi File

@@ -0,0 +1,20 @@
#include "../options.hpp"

#include "./build_common.hpp"

namespace dds::cli::cmd {

int compile_file(const options& opts) {
auto builder = create_project_builder(opts);
builder.compile_files(opts.compile_file.files,
{
.out_root = opts.out_path.value_or(fs::current_path() / "_build"),
.existing_lm_index = opts.build.lm_index,
.emit_lmi = {},
.toolchain = opts.load_toolchain(),
.parallel_jobs = opts.jobs,
});
return 0;
}

} // namespace dds::cli::cmd

+ 71
- 0
src/dds/cli/cmd/pkg_get.cpp Vedi File

@@ -0,0 +1,71 @@
#include "../options.hpp"

#include <dds/catalog/catalog.hpp>
#include <dds/catalog/get.hpp>
#include <dds/dym.hpp>
#include <dds/error/errors.hpp>
#include <dds/http/session.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <json5/parse_data.hpp>

namespace dds::cli::cmd {

static int _pkg_get(const options& opts) {
auto cat = opts.open_catalog();
for (const auto& item : opts.pkg.get.pkgs) {
auto id = package_id::parse(item);
dds::dym_target dym;
auto info = cat.get(id);
if (!info) {
dds::throw_user_error<dds::errc::no_such_catalog_package>(
"No package in the catalog matched the ID '{}'.{}", item, dym.sentence_suffix());
}
auto tsd = get_package_sdist(*info);
auto dest = opts.out_path.value_or(fs::current_path()) / id.to_string();
dds_log(info, "Create sdist at {}", dest.string());
fs::remove_all(dest);
safe_rename(tsd.sdist.path, dest);
}
return 0;
}

int pkg_get(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _pkg_get(opts);
} catch (...) {
dds::capture_exception();
}
},
[&](neo::url_validation_error url_err, dds::e_url_string bad_url) {
dds_log(error,
"Invalid package URL in the database [{}]: {}",
bad_url.value,
url_err.what());
return 1;
},
[&](const json5::parse_error& e, dds::e_http_url bad_url) {
dds_log(error,
"Error parsing JSON5 document package downloaded from [{}]: {}",
bad_url.value,
e.what());
return 1;
},
[](dds::e_sqlite3_error_exc e) {
dds_log(error, "Error accessing the package database: {}", e.message);
return 1;
},
[&](dds::e_system_error_exc e, dds::e_http_connect conn) {
dds_log(error,
"Error opening connection to [{}:{}]: {}",
conn.host,
conn.port,
e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 58
- 0
src/dds/cli/cmd/pkg_import.cpp Vedi File

@@ -0,0 +1,58 @@
#include "../options.hpp"

#include <dds/http/session.hpp>
#include <dds/repo/repo.hpp>
#include <dds/source/dist.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <neo/url/parse.hpp>

#include <iostream>
#include <string_view>

namespace dds::cli::cmd {
static int _pkg_import(const options& opts) {
return repository::with_repository( //
opts.pkg_cache_dir.value_or(repository::default_local_path()),
repo_flags::write_lock | repo_flags::create_if_absent,
[&](auto repo) {
for (std::string_view tgz_where : opts.pkg.import.items) {
neo_assertion_breadcrumbs("Importing sdist", tgz_where);
auto tmp_sd
= (tgz_where.starts_with("http://") || tgz_where.starts_with("https://"))
? download_expand_sdist_targz(tgz_where)
: expand_sdist_targz(tgz_where);
neo_assertion_breadcrumbs("Importing from temporary directory",
tmp_sd.tmpdir.path());
repo.add_sdist(tmp_sd.sdist, dds::if_exists(opts.if_exists));
}
if (opts.pkg.import.from_stdin) {
auto tmp_sd = dds::expand_sdist_from_istream(std::cin, "<stdin>");
repo.add_sdist(tmp_sd.sdist, dds::if_exists(opts.if_exists));
}
return 0;
});
}

int pkg_import(const options& opts) {
return boost::leaf::try_catch(
[&] {
try {
return _pkg_import(opts);
} catch (...) {
dds::capture_exception();
}
},
[&](const json5::parse_error& e) {
dds_log(error, "Error parsing JSON in package archive: {}", e.what());
return 1;
},
[](dds::e_sqlite3_error_exc e) {
dds_log(error, "Unexpected database error: {}", e.message);
return 1;
});
}
} // namespace dds::cli::cmd

+ 61
- 0
src/dds/cli/cmd/pkg_ls.cpp Vedi File

@@ -0,0 +1,61 @@
#include "../options.hpp"

#include <dds/repo/repo.hpp>
#include <dds/source/dist.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <neo/assert.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/group_by.hpp>
#include <range/v3/view/transform.hpp>

#include <iostream>
#include <string_view>

namespace dds::cli::cmd {
static int _pkg_ls(const options& opts) {
auto list_contents = [&](repository repo) {
auto same_name
= [](auto&& a, auto&& b) { return a.manifest.pkg_id.name == b.manifest.pkg_id.name; };

auto all = repo.iter_sdists();
auto grp_by_name = all //
| ranges::views::group_by(same_name) //
| ranges::views::transform(ranges::to_vector) //
| ranges::views::transform([](auto&& grp) {
assert(grp.size() > 0);
return std::pair(grp[0].manifest.pkg_id.name, grp);
});

for (const auto& [name, grp] : grp_by_name) {
dds_log(info, "{}:", name);
for (const dds::sdist& sd : grp) {
dds_log(info, " - {}", sd.manifest.pkg_id.version.to_string());
}
}

return 0;
};

return dds::repository::with_repository(opts.pkg_cache_dir.value_or(
repository::default_local_path()),
dds::repo_flags::read,
list_contents);
}

int pkg_ls(const options& opts) {
return boost::leaf::try_catch(
[&] {
try {
return _pkg_ls(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_sqlite3_error_exc e) {
dds_log(error, "Unexpected database error: {}", e.message);
return 1;
});
}
} // namespace dds::cli::cmd

+ 24
- 0
src/dds/cli/cmd/pkg_repo_add.cpp Vedi File

@@ -0,0 +1,24 @@
#include "../options.hpp"

#include "./pkg_repo_err_handle.hpp"

#include <dds/catalog/catalog.hpp>
#include <dds/remote/remote.hpp>

namespace dds::cli::cmd {

static int _pkg_repo_add(const options& opts) {
auto cat = opts.open_catalog();
auto repo = remote_repository::connect(opts.pkg.repo.add.url);
repo.store(cat.database());
if (opts.pkg.repo.add.update) {
repo.update_catalog(cat.database());
}
return 0;
}

int pkg_repo_add(const options& opts) {
return handle_pkg_repo_remote_errors([&] { return _pkg_repo_add(opts); });
}

} // namespace dds::cli::cmd

+ 47
- 0
src/dds/cli/cmd/pkg_repo_err_handle.cpp Vedi File

@@ -0,0 +1,47 @@
#include "./pkg_repo_err_handle.hpp"

#include <dds/http/session.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <json5/parse_data.hpp>
#include <neo/url/parse.hpp>

int dds::cli::cmd::handle_pkg_repo_remote_errors(std::function<int()> fn) {
return boost::leaf::try_catch(
[&] {
try {
return fn();
} catch (...) {
dds::capture_exception();
}
},
[&](neo::url_validation_error url_err, dds::e_url_string bad_url) {
dds_log(error, "Invalid URL [{}]: {}", bad_url.value, url_err.what());
return 1;
},
[&](const json5::parse_error& e, dds::e_http_url bad_url) {
dds_log(error,
"Error parsing JSON downloaded from URL [{}]: {}",
bad_url.value,
e.what());
return 1;
},
[](dds::e_sqlite3_error_exc e, dds::e_url_string url) {
dds_log(error, "Error accessing remote database (From {}): {}", url.value, e.message);
return 1;
},
[](dds::e_sqlite3_error_exc e) {
dds_log(error, "Unexpected database error: {}", e.message);
return 1;
},
[&](dds::e_system_error_exc e, dds::e_http_connect conn) {
dds_log(error,
"Error opening connection to [{}:{}]: {}",
conn.host,
conn.port,
e.message);
return 1;
});
}

+ 9
- 0
src/dds/cli/cmd/pkg_repo_err_handle.hpp Vedi File

@@ -0,0 +1,9 @@
#pragma once

#include <functional>

namespace dds::cli::cmd {

int handle_pkg_repo_remote_errors(std::function<int()>);

} // namespace dds::cli::cmd

+ 19
- 0
src/dds/cli/cmd/pkg_repo_update.cpp Vedi File

@@ -0,0 +1,19 @@
#include "../options.hpp"

#include "./pkg_repo_err_handle.hpp"

#include <dds/catalog/catalog.hpp>
#include <dds/remote/remote.hpp>

namespace dds::cli::cmd {

static int _pkg_repo_update(const options& opts) {
update_all_remotes(opts.open_catalog().database());
return 0;
}

int pkg_repo_update(const options& opts) {
return handle_pkg_repo_remote_errors([&] { return _pkg_repo_update(opts); });
}

} // namespace dds::cli::cmd

+ 57
- 0
src/dds/cli/cmd/repoman_import.cpp Vedi File

@@ -0,0 +1,57 @@
#include "../options.hpp"

#include <dds/repoman/repoman.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fmt/ostream.h>
#include <neo/sqlite3/error.hpp>

namespace dds::cli::cmd {

static int _repoman_import(const options& opts) {
auto repo = repo_manager::open(opts.repoman.repo_dir);
for (auto pkg : opts.repoman.import.files) {
repo.import_targz(pkg);
}
return 0;
}

int repoman_import(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _repoman_import(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_sqlite3_error_exc,
boost::leaf::match<neo::sqlite3::errc, neo::sqlite3::errc::constraint_unique>,
dds::e_repo_import_targz tgz,
dds::package_id pkg_id) {
dds_log(error,
"Package {} (from {}) is already present in the repository",
pkg_id.to_string(),
tgz.path);
return 1;
},
[](dds::e_system_error_exc e, dds::e_repo_import_targz tgz) {
dds_log(error, "Failed to import file {}: {}", tgz.path, e.message);
return 1;
},
[](const std::runtime_error& e, dds::e_repo_import_targz tgz) {
dds_log(error, "Unknown error while importing file {}: {}", tgz.path, e.what());
return 1;
},
[](dds::e_sqlite3_error_exc e, dds::e_repo_import_targz tgz) {
dds_log(error, "Database error while importing tar file {}: {}", tgz.path, e.message);
return 1;
},
[](dds::e_system_error_exc e, dds::e_open_repo_db db) {
dds_log(error, "Error while opening repository database {}: {}", db.path, e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 48
- 0
src/dds/cli/cmd/repoman_init.cpp Vedi File

@@ -0,0 +1,48 @@
#include "../options.hpp"

#include <dds/repoman/repoman.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fmt/ostream.h>

namespace dds::cli::cmd {

static int _repoman_init(const options& opts) {
auto repo = repo_manager::create(opts.repoman.repo_dir, opts.repoman.init.name);
dds_log(info, "Created new repository '{}' in {}", repo.name(), repo.root());
return 0;
}

int repoman_init(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _repoman_init(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_sqlite3_error_exc e, dds::e_init_repo init, dds::e_init_repo_db init_db) {
dds_log(error,
"SQLite error while initializing repository in [{}] (SQlite database {}): {}",
init.path,
init_db.path,
e.message);
return 1;
},
[](dds::e_system_error_exc e, dds::e_open_repo_db db) {
dds_log(error, "Error while opening repository database {}: {}", db.path, e.message);
return 1;
},
[](dds::e_sqlite3_error_exc e, dds::e_init_repo init) {
dds_log(error,
"SQLite error while initializing repository in [{}]: {}",
init.path,
e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 37
- 0
src/dds/cli/cmd/repoman_ls.cpp Vedi File

@@ -0,0 +1,37 @@
#include "../options.hpp"

#include <dds/repoman/repoman.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fmt/ostream.h>

#include <iostream>

namespace dds::cli::cmd {

static int _repoman_ls(const options& opts) {
auto repo = repo_manager::open(opts.repoman.repo_dir);
for (auto pkg_id : repo.all_packages()) {
std::cout << pkg_id.to_string() << '\n';
}
return 0;
}

int repoman_ls(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _repoman_ls(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_system_error_exc e, dds::e_open_repo_db db) {
dds_log(error, "Error while opening repository database {}: {}", db.path, e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 54
- 0
src/dds/cli/cmd/repoman_remove.cpp Vedi File

@@ -0,0 +1,54 @@
#include "../options.hpp"

#include <dds/repoman/repoman.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fmt/ostream.h>
#include <neo/sqlite3/error.hpp>

namespace dds::cli::cmd {

static int _repoman_remove(const options& opts) {
auto repo = repo_manager::open(opts.repoman.repo_dir);
for (auto& str : opts.repoman.remove.pkgs) {
auto pkg_id = dds::package_id::parse(str);
repo.delete_package(pkg_id);
}
return 0;
}

int repoman_remove(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _repoman_remove(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_sqlite3_error_exc,
boost::leaf::match<neo::sqlite3::errc, neo::sqlite3::errc::constraint_unique>,
dds::e_repo_import_targz tgz,
dds::package_id pkg_id) {
dds_log(error,
"Package {} (from {}) is already present in the repository",
pkg_id.to_string(),
tgz.path);
return 1;
},
[](dds::e_system_error_exc e, dds::e_repo_delete_path tgz, dds::package_id pkg_id) {
dds_log(error,
"Cannot delete requested package '{}' from repository (Path {}): {}",
pkg_id.to_string(),
tgz.path,
e.message);
return 1;
},
[](dds::e_system_error_exc e, dds::e_open_repo_db db) {
dds_log(error, "Error while opening repository database {}: {}", db.path, e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 24
- 0
src/dds/cli/cmd/sdist_create.cpp Vedi File

@@ -0,0 +1,24 @@
#include "../options.hpp"

#include <dds/source/dist.hpp>

#include <fmt/core.h>

namespace dds::cli::cmd {

int sdist_create(const options& opts) {
dds::sdist_params params{
.project_dir = opts.project_dir,
.dest_path = {},
.force = opts.if_exists == if_exists::replace,
.include_apps = true,
.include_tests = true,
};
auto pkg_man = package_manifest::load_from_directory(params.project_dir);
auto default_filename = fmt::format("{}.tar.gz", pkg_man->pkg_id.to_string());
auto filepath = opts.out_path.value_or(fs::current_path() / default_filename);
create_sdist_targz(filepath, params);
return 0;
}

} // namespace dds::cli::cmd

+ 91
- 0
src/dds/cli/dispatch_main.cpp Vedi File

@@ -0,0 +1,91 @@
#include "./dispatch_main.hpp"

#include "./error_handler.hpp"
#include "./options.hpp"

#include <dds/catalog/catalog.hpp>
#include <dds/remote/remote.hpp>
#include <dds/util/paths.hpp>
#include <dds/util/result.hpp>

using namespace dds;

namespace dds::cli {

namespace cmd {
using command = int(const options&);

command build_deps;
command build;
command compile_file;
command pkg_get;
command pkg_import;
command pkg_ls;
command pkg_repo_add;
command pkg_repo_update;
command repoman_import;
command repoman_init;
command repoman_ls;
command repoman_remove;
command sdist_create;

} // namespace cmd

int dispatch_main(const options& opts) noexcept {
dds::log::current_log_level = opts.log_level;
return dds::handle_cli_errors([&] {
switch (opts.subcommand) {
case subcommand::build:
return cmd::build(opts);
case subcommand::sdist:
switch (opts.sdist.subcommand) {
case sdist_subcommand::create:
return cmd::sdist_create(opts);
case sdist_subcommand::_none_:;
}
neo::unreachable();
case subcommand::pkg:
switch (opts.pkg.subcommand) {
case pkg_subcommand::ls:
return cmd::pkg_ls(opts);
case pkg_subcommand::get:
return cmd::pkg_get(opts);
case pkg_subcommand::import:
return cmd::pkg_import(opts);
case pkg_subcommand::repo:
switch (opts.pkg.repo.subcommand) {
case cli_pkg_repo_subcommand::add:
return cmd::pkg_repo_add(opts);
case cli_pkg_repo_subcommand::update:
return cmd::pkg_repo_update(opts);
case cli_pkg_repo_subcommand::_none_:;
}
neo::unreachable();
case pkg_subcommand::_none_:;
}
neo::unreachable();
case subcommand::repoman:
switch (opts.repoman.subcommand) {
case repoman_subcommand::import:
return cmd::repoman_import(opts);
case repoman_subcommand::init:
return cmd::repoman_init(opts);
case repoman_subcommand::remove:
return cmd::repoman_remove(opts);
case repoman_subcommand::ls:
return cmd::repoman_ls(opts);
case repoman_subcommand::_none_:;
}
neo::unreachable();
case subcommand::compile_file:
return cmd::compile_file(opts);
case subcommand::build_deps:
return cmd::build_deps(opts);
case subcommand::_none_:;
}
neo::unreachable();
return 6;
});
}

} // namespace dds::cli

+ 9
- 0
src/dds/cli/dispatch_main.hpp Vedi File

@@ -0,0 +1,9 @@
#pragma once

namespace dds::cli {

struct options;

int dispatch_main(const options&) noexcept;

} // namespace dds

+ 44
- 0
src/dds/cli/error_handler.cpp Vedi File

@@ -0,0 +1,44 @@
#include "./error_handler.hpp"
#include "./options.hpp"

#include <dds/error/errors.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>
#include <dds/util/signal.hpp>

#include <boost/leaf/handle_error.hpp>
#include <boost/leaf/handle_exception.hpp>
#include <boost/leaf/result.hpp>
#include <fmt/ostream.h>
#include <neo/url/parse.hpp>

namespace {

template <dds::cli::subcommand Val>
using subcommand = boost::leaf::match<dds::cli::subcommand, Val>;

auto handlers = std::tuple( //
[](neo::url_validation_error exc, dds::e_url_string bad_url) {
dds_log(error, "Invalid URL '{}': {}", bad_url.value, exc.what());
return 1;
},
[](boost::leaf::catch_<dds::error_base> exc) {
dds_log(error, "{}", exc.value().what());
dds_log(error, "{}", exc.value().explanation());
dds_log(error, "Refer: {}", exc.value().error_reference());
return 1;
},
[](dds::user_cancelled) {
dds_log(critical, "Operation cancelled by the user");
return 2;
},
[](boost::leaf::verbose_diagnostic_info const& diag) {
dds_log(critical, "An unhandled error arose. THIS IS A DDS BUG! Info: {}", diag);
return 42;
});
} // namespace

int dds::handle_cli_errors(std::function<int()> fn) noexcept {
return boost::leaf::try_handle_all([&]() -> boost::leaf::result<int> { return fn(); },
handlers);
}

+ 9
- 0
src/dds/cli/error_handler.hpp Vedi File

@@ -0,0 +1,9 @@
#pragma once

#include <functional>

namespace dds {

int handle_cli_errors(std::function<int()>) noexcept;

} // namespace dds

+ 425
- 0
src/dds/cli/options.cpp Vedi File

@@ -0,0 +1,425 @@
#include "./options.hpp"

#include <dds/catalog/catalog.hpp>
#include <dds/error/errors.hpp>
#include <dds/toolchain/from_json.hpp>
#include <dds/toolchain/toolchain.hpp>

#include <debate/enum.hpp>

using namespace dds;
using namespace debate;

namespace {

struct setup {
dds::cli::options& opts;

// Util argument common to a lot of operations
argument if_exists_arg{
.long_spellings = {"if-exists"},
.help = "What to do if the resource already exists",
.valname = "{replace,skip,fail}",
.action = put_into(opts.if_exists),
};

argument toolchain_arg{
.long_spellings = {"toolchain"},
.short_spellings = {"t"},
.help = "The toolchain to use when building",
.valname = "<file-or-id>",
.action = put_into(opts.toolchain),
};

argument project_arg{
.long_spellings = {"project"},
.short_spellings = {"p"},
.help = "The project to build. If not given, uses the current working directory",
.valname = "<project-path>",
.action = put_into(opts.project_dir),
};

argument no_warn_arg{
.long_spellings = {"no-warn", "no-warnings"},
.help = "Disable build warnings",
.nargs = 0,
.action = store_true(opts.disable_warnings),
};

argument out_arg{
.long_spellings = {"out", "output"},
.short_spellings = {"o"},
.help = "Path to the output",
.valname = "<path>",
.action = put_into(opts.out_path),
};

argument lm_index_arg{
.long_spellings = {"libman-index"},
.help = "Path to a libman index to use",
.valname = "<lmi-path>",
.action = put_into(opts.build.lm_index),
};

argument jobs_arg{
.long_spellings = {"jobs"},
.short_spellings = {"j"},
.help = "Set the maximum number of parallel jobs to execute",
.valname = "<job-count>",
.action = put_into(opts.jobs),
};

argument repoman_repo_dir_arg{
.help = "The directory of the repository to manage",
.valname = "<repo-dir>",
.required = true,
.action = put_into(opts.repoman.repo_dir),
};

void do_setup(argument_parser& parser) noexcept {
parser.add_argument({
.long_spellings = {"log-level"},
.short_spellings = {"l"},
.help = ""
"Set the dds logging level. One of 'trace', 'debug', 'info', \n"
"'warn', 'error', 'critical', or 'silent'",
.valname = "<level>",
.action = put_into(opts.log_level),
});
parser.add_argument({
.long_spellings = {"data-dir"},
.help
= ""
"(Advanced) "
"Override dds's data directory. This is used for various caches and databases.\n"
"The default is a user-local directory that differs depending on platform.",
.valname = "<directory>",
.action = put_into(opts.data_dir),
});
parser.add_argument({
.long_spellings = {"pkg-cache-dir"},
.help = "(Advanced) Override dds's local package cache directory.",
.valname = "<directory>",
.action = put_into(opts.pkg_cache_dir),
});
parser.add_argument({
.long_spellings = {"pkg-db-path"},
.help = "(Advanced) Override dds's default package database path.",
.valname = "<database-path>",
.action = put_into(opts.pkg_db_dir),
});

setup_main_commands(parser.add_subparsers({
.description = "The operation to perform",
.action = put_into(opts.subcommand),
}));
}

void setup_main_commands(subparser_group& group) {
setup_build_cmd(group.add_parser({
.name = "build",
.help = "Build a project",
}));
setup_compile_file_cmd(group.add_parser({
.name = "compile-file",
.help = "Compile individual files in the project",
}));
setup_build_deps_cmd(group.add_parser({
.name = "build-deps",
.help = "Build a set of dependencies and generate a libman index",
}));
setup_pkg_cmd(group.add_parser({
.name = "pkg",
.help = "Manage packages and package remotes",
}));
setup_sdist_cmd(group.add_parser({
.name = "sdist",
.help = "Work with source distribution packages",
}));
setup_repoman_cmd(group.add_parser({
.name = "repoman",
.help = "Manage a dds package repository",
}));
}

void setup_build_cmd(argument_parser& build_cmd) {
build_cmd.add_argument(toolchain_arg.dup());
build_cmd.add_argument(project_arg.dup());
build_cmd.add_argument({
.long_spellings = {"no-tests"},
.help = "Do not build and run project tests",
.nargs = 0,
.action = debate::store_false(opts.build.want_tests),
});
build_cmd.add_argument({
.long_spellings = {"no-apps"},
.help = "Do not build project applications",
.nargs = 0,
.action = debate::store_false(opts.build.want_apps),
});
build_cmd.add_argument(no_warn_arg.dup());
build_cmd.add_argument(out_arg.dup()).help = "Directory where dds will write build results";

build_cmd.add_argument({
.long_spellings = {"add-repo"},
.help = ""
"Add remote repositories to the package catalog before building\n"
"(Implies --update-repos)",
.valname = "<repo-url>",
.can_repeat = true,
.action = debate::push_back_onto(opts.build.add_repos),
});
build_cmd.add_argument({
.long_spellings = {"update-repos"},
.short_spellings = {"U"},
.help = "Update package repositories before building",
.nargs = 0,
.action = debate::store_true(opts.build.update_repos),
});
build_cmd.add_argument(lm_index_arg.dup()).help
= "Path to a libman index file to use for loading project dependencies";
build_cmd.add_argument(jobs_arg.dup());
}

void setup_compile_file_cmd(argument_parser& compile_file_cmd) noexcept {
compile_file_cmd.add_argument(project_arg.dup());
compile_file_cmd.add_argument(toolchain_arg.dup());
compile_file_cmd.add_argument(no_warn_arg.dup()).help = "Disable compiler warnings";
compile_file_cmd.add_argument(jobs_arg.dup()).help
= "Set the maximum number of files to compile in parallel";
compile_file_cmd.add_argument(lm_index_arg.dup());
compile_file_cmd.add_argument(out_arg.dup());
compile_file_cmd.add_argument({
.help = "One or more source files to compile",
.valname = "<source-files>",
.can_repeat = true,
.action = debate::push_back_onto(opts.compile_file.files),
});
}

void setup_build_deps_cmd(argument_parser& build_deps_cmd) noexcept {
build_deps_cmd.add_argument(toolchain_arg.dup()).required;
build_deps_cmd.add_argument(jobs_arg.dup());
build_deps_cmd.add_argument(out_arg.dup());
build_deps_cmd.add_argument(lm_index_arg.dup()).help
= "Destination path for the generated libman index file";
build_deps_cmd.add_argument({
.long_spellings = {"deps-file"},
.short_spellings = {"d"},
.help = "Path to a JSON5 file listing dependencies",
.valname = "<deps-file>",
.can_repeat = true,
.action = debate::push_back_onto(opts.build_deps.deps_files),
});
build_deps_cmd.add_argument({
.help = "Dependency statement strings",
.valname = "<dependency>",
.can_repeat = true,
.action = debate::push_back_onto(opts.build_deps.deps),
});
}

void setup_pkg_cmd(argument_parser& pkg_cmd) {
auto& pkg_group = pkg_cmd.add_subparsers({
.valname = "<pkg-subcommand>",
.action = put_into(opts.pkg.subcommand),
});
pkg_group.add_parser({
.name = "ls",
.help = "List locally available packages",
});
setup_pkg_get_cmd(pkg_group.add_parser({
.name = "get",
.help = "Obtain a copy of a package from a remote",
}));
setup_pkg_init_db_cmd(pkg_group.add_parser({
.name = "init-db",
.help = "Initialize a new package database file (Path specified with '--pkg-db-path')",
}));
setup_pkg_import_cmd(pkg_group.add_parser({
.name = "import",
.help = "Import a source distribution archive into the local package cache",
}));
setup_pkg_repo_cmd(pkg_group.add_parser({
.name = "repo",
.help = "Manage package repositories",
}));
}

void setup_pkg_get_cmd(argument_parser& pkg_get_cmd) {
pkg_get_cmd.add_argument({
.valname = "<pkg-id>",
.can_repeat = true,
.action = push_back_onto(opts.pkg.get.pkgs),
});
pkg_get_cmd.add_argument(out_arg.dup()).help
= "Directory where obtained packages will be placed.\n"
"Default is the current working directory.";
}

void setup_pkg_init_db_cmd(argument_parser& pkg_init_db_cmd) {
pkg_init_db_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the database file already exists";
}

void setup_pkg_import_cmd(argument_parser& pkg_import_cmd) noexcept {
pkg_import_cmd.add_argument({
.long_spellings = {"stdin"},
.help = "Import a source distribution archive from standard input",
.nargs = 0,
.action = debate::store_true(opts.pkg.import.from_stdin),
});
pkg_import_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the package already exists in the local cache";
pkg_import_cmd.add_argument({
.help = "One or more paths/URLs to source distribution archives to import",
.valname = "<path-or-url>",
.can_repeat = true,
.action = debate::push_back_onto(opts.pkg.import.items),
});
}

void setup_pkg_repo_cmd(argument_parser& pkg_repo_cmd) noexcept {
auto& pkg_repo_grp = pkg_repo_cmd.add_subparsers({
.valname = "<pkg-repo-subcommand>",
.action = put_into(opts.pkg.repo.subcommand),
});
setup_pkg_repo_add_cmd(pkg_repo_grp.add_parser({
.name = "add",
.help = "Add a package repository",
}));

pkg_repo_grp.add_parser({
.name = "update",
.help = "Update package repository information",
});
}

void setup_pkg_repo_add_cmd(argument_parser& pkg_repo_add_cmd) noexcept {
pkg_repo_add_cmd.add_argument({
.help = "URL of a repository to add",
.valname = "<url>",
.required = true,
.action = debate::put_into(opts.pkg.repo.add.url),
});
pkg_repo_add_cmd.add_argument({
.long_spellings = {"no-update"},
.help = "Do not immediately update for the new package repository",
.nargs = 0,
.action = debate::store_false(opts.pkg.repo.add.update),
});
}

void setup_sdist_cmd(argument_parser& sdist_cmd) noexcept {
auto& sdist_grp = sdist_cmd.add_subparsers({
.valname = "<sdist-subcommand>",
.action = put_into(opts.sdist.subcommand),
});
setup_sdist_create_cmd(sdist_grp.add_parser({
.name = "create",
.help = "Create a source distribution from a project tree",
}));
}

void setup_sdist_create_cmd(argument_parser& sdist_create_cmd) {
sdist_create_cmd.add_argument(project_arg.dup()).help
= "Path to the project for which to create a source distribution.\n"
"Default is the current working directory.";
sdist_create_cmd.add_argument(out_arg.dup()).help
= "Destination path for the source distributnion archive";
sdist_create_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the destination names an existing file";
}

void setup_repoman_cmd(argument_parser& repoman_cmd) {
auto& grp = repoman_cmd.add_subparsers({
.valname = "<repoman-subcommand>",
.action = put_into(opts.repoman.subcommand),
});

setup_repoman_init_cmd(grp.add_parser({
.name = "init",
.help = "Initialize a directory as a new repository",
}));
setup_repoman_import_cmd(grp.add_parser({
.name = "import",
.help = "Import a source distribution into the repository",
}));
auto& ls_cmd = grp.add_parser({
.name = "ls",
.help = "List the contents of a package repository directory",
});
ls_cmd.add_argument(repoman_repo_dir_arg.dup());
setup_repoman_remove_cmd(grp.add_parser({
.name = "remove",
.help = "Remove packages from a package repository",
}));
}

void setup_repoman_init_cmd(argument_parser& repoman_init_cmd) {
repoman_init_cmd.add_argument(repoman_repo_dir_arg.dup());
repoman_init_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the directory exists and is already repository";
repoman_init_cmd.add_argument({
.long_spellings = {"name"},
.short_spellings = {"n"},
.help = "Specifiy the name of the new repository",
.valname = "<name>",
.action = put_into(opts.repoman.init.name),
});
}

void setup_repoman_import_cmd(argument_parser& repoman_import_cmd) {
repoman_import_cmd.add_argument(repoman_repo_dir_arg.dup());
repoman_import_cmd.add_argument({
.help = "Paths to source distribution archives to import",
.valname = "<sdist-file-path>",
.can_repeat = true,
.action = push_back_onto(opts.repoman.import.files),
});
}

void setup_repoman_remove_cmd(argument_parser& repoman_remove_cmd) {
repoman_remove_cmd.add_argument(repoman_repo_dir_arg.dup());
repoman_remove_cmd.add_argument({
.help = "One or more identifiers of packages to remove",
.valname = "<pkg-id>",
.can_repeat = true,
.action = push_back_onto(opts.repoman.remove.pkgs),
});
}
};

} // namespace

void cli::options::setup_parser(debate::argument_parser& parser) noexcept {
setup{*this}.do_setup(parser);
}

catalog dds::cli::options::open_catalog() const {
return catalog::open(this->pkg_db_dir.value_or(catalog::default_path()));
}

toolchain dds::cli::options::load_toolchain() const {
if (!toolchain) {
auto def = dds::toolchain::get_default();
if (!def) {
throw_user_error<errc::no_default_toolchain>();
}
return *def;
}
// Convert the given string to a toolchain
auto& tc_str = *toolchain;
if (tc_str.starts_with(":")) {
auto default_tc = tc_str.substr(1);
auto tc = dds::toolchain::get_builtin(default_tc);
if (!tc.has_value()) {
throw_user_error<
errc::invalid_builtin_toolchain>("Invalid built-in toolchain name '{}'",
default_tc);
}
return std::move(*tc);
} else {
return parse_toolchain_json5(slurp_file(tc_str));
}
}

+ 242
- 0
src/dds/cli/options.hpp Vedi File

@@ -0,0 +1,242 @@
#pragma once

#include <dds/util/log.hpp>
#include <debate/argument_parser.hpp>

#include <filesystem>
#include <optional>
#include <string>
#include <vector>

namespace dds {

namespace fs = std::filesystem;
class catalog;
class toolchain;

namespace cli {

/**
* @brief Top-level dds subcommands
*/
enum class subcommand {
_none_,
build,
compile_file,
build_deps,
pkg,
sdist,
repoman,
};

/**
* @brief 'dds sdist' subcommands
*/
enum class sdist_subcommand {
_none_,
create,
};

/**
* @brief 'dds pkg' subcommands
*/
enum class pkg_subcommand {
_none_,
ls,
get,
import,
repo,
};

/**
* @brief 'dds pkg repo' subcommands
*/
enum class cli_pkg_repo_subcommand {
_none_,
add,
update,
};

/**
* @brief 'dds repoman' subcommands
*
*/
enum class repoman_subcommand {
_none_,
init,
import,
remove,
ls,
};

/**
* @brief Options for `--if-exists` on the CLI
*/
enum class if_exists {
replace,
fail,
ignore,
};

/**
* @brief Complete aggregate of all dds command-line options, and some utilities
*/
struct options {
using path = fs::path;
using opt_path = std::optional<fs::path>;
using string = std::string;
using opt_string = std::optional<std::string>;

// The `--data-dir` argument
opt_path data_dir;
// The `--pkg-cache-dir' argument
opt_path pkg_cache_dir;
// The `--pkg-db-dir` argument
opt_path pkg_db_dir;
// The `--log-level` argument
log::level log_level = log::level::info;

// The top-most selected subcommand
enum subcommand subcommand;

// Many subcommands use a '--project' argument, stored here, using the CWD as the default
path project_dir = fs::current_path();

// Compile and build commands with `--no-warnings`/`--no-warn`
bool disable_warnings = true;
// Compile and build commands' `--jobs` parameter
int jobs = 0;
// Compile and build commands' `--toolchain` option:
opt_string toolchain;
opt_path out_path;

// Shared `--if-exists` argument:
cli::if_exists if_exists = cli::if_exists::fail;

/**
* @brief Open the package catalog based on the user-specified options.
* @return catalog
*/
catalog open_catalog() const;
/**
* @brief Load a dds toolchain as specified by the user, or a default.
* @return dds::toolchain
*/
dds::toolchain load_toolchain() const;

/**
* @brief Parameters specific to 'dds build'
*/
struct {
bool want_tests = true;
bool want_apps = true;
opt_path lm_index;
std::vector<string> add_repos;
bool update_repos = false;
} build;

/**
* @brief Parameters specific to 'dds compile-file'
*/
struct {
/// The files that the user has requested to be compiled
std::vector<fs::path> files;
} compile_file;

/**
* @brief Parameters specific to 'dds build-deps'
*/
struct {
/// Files listed with '--deps-file'
std::vector<fs::path> deps_files;
/// Dependency strings provided directly in the command-line
std::vector<string> deps;
} build_deps;

/**
* @brief Parameters and subcommands for 'dds pkg'
*
*/
struct {
/// The 'dds pkg' subcommand
pkg_subcommand subcommand;

/**
* @brief Parameters for 'dds pkg import'
*/
struct {
/// File paths or URLs of packages to import
std::vector<string> items;
/// Allow piping a package tarball in through stdin
bool from_stdin = false;
} import;

/**
* @brief Parameters for 'dds pkg repo'
*/
struct {
/// The 'pkg repo' subcommand
cli_pkg_repo_subcommand subcommand;

/**
* @brief Parameters of 'dds pkg repo add'
*/
struct {
/// The repository URL
string url;
/// Whether we should update repo data after adding the repository
bool update = true;
} add;
} repo;

/**
* @brief Paramters for 'dds pkg get'
*/
struct {
/// Package IDs to download
std::vector<string> pkgs;
} get;
} pkg;

struct {
sdist_subcommand subcommand;
} sdist;

/**
* @brief Parameters for 'dds repoman'
*/
struct {
/// Shared parameter between repoman subcommands: The directory we are acting upon
path repo_dir;

/// The actual operation we are performing on the repository dir
repoman_subcommand subcommand;

/// Options for 'dds repoman init'
struct {
/// The name of the new repository. If not provided, a random one will be generated
opt_string name;
} init;

/// Options for 'dds repoman import'
struct {
/// sdist tarball file paths to import into the repository
std::vector<fs::path> files;
} import;

/// Options for 'dds repoman remove'
struct {
/// Package IDs of packages to remove
std::vector<string> pkgs;
} remove;
} repoman;

/**
* @brief Attach arguments and subcommands to the given argument parser, binding those arguments
* to the values in this object.
*/
void setup_parser(debate::argument_parser& parser) noexcept;
};

} // namespace cli
} // namespace dds

+ 1
- 1
src/dds/repoman/repoman.cpp Vedi File

@@ -205,7 +205,7 @@ void repo_manager::delete_package(package_id pkg_id) {
auto name_dir = pkg_dir() / pkg_id.name;
auto ver_dir = name_dir / pkg_id.version.to_string();

DDS_E_SCOPE(e_repo_delete_targz{ver_dir});
DDS_E_SCOPE(e_repo_delete_path{ver_dir});

if (!fs::is_directory(ver_dir)) {
throw std::system_error(std::make_error_code(std::errc::no_such_file_or_directory),

+ 1
- 1
src/dds/repoman/repoman.hpp Vedi File

@@ -30,7 +30,7 @@ struct e_repo_import_targz {
fs::path path;
};

struct e_repo_delete_targz {
struct e_repo_delete_path {
fs::path path;
};


+ 1
- 1
src/dds/util/log.cpp Vedi File

@@ -45,7 +45,7 @@ void dds::log::log_print(dds::log::level l, std::string_view msg) noexcept {
return spdlog::level::err;
case level::critical:
return spdlog::level::critical;
case level::_silent:
case level::silent:
return spdlog::level::off;
}
neo_assert_always(invariant, false, "Invalid log level", msg, int(l));

+ 1
- 1
src/dds/util/log.hpp Vedi File

@@ -13,7 +13,7 @@ enum class level : int {
warn,
error,
critical,
_silent,
silent,
};

inline level current_log_level = level::info;

+ 7
- 5
src/dds/util/result.cpp Vedi File

@@ -2,14 +2,16 @@

#include <neo/sqlite3/error.hpp>

dds::error_id dds::capture_exception() {
void dds::capture_exception() {
try {
throw;
} catch (const neo::sqlite3::sqlite3_error& e) {
return current_error().load(e_sqlite3_error_exc{std::string(e.what()), e.code()},
e.code(),
neo::sqlite3::errc{e.code().value()});
current_error().load(e_sqlite3_error_exc{std::string(e.what()), e.code()},
e.code(),
neo::sqlite3::errc{e.code().value()});
} catch (const std::system_error& e) {
return current_error().load(e_system_error_exc{std::string(e.what()), e.code()}, e.code());
current_error().load(e_system_error_exc{std::string(e.what()), e.code()}, e.code());
}
// Re-throw as a bare exception.
throw std::exception();
}

+ 1
- 1
src/dds/util/result.hpp Vedi File

@@ -38,7 +38,7 @@ struct e_url_string {
* @brief Capture currently in-flight special exceptions as new error object. Works around a bug in
* Boost.LEAF when catching std::system error.
*/
error_id capture_exception();
[[noreturn]] void capture_exception();

/**
* @brief Generate a leaf::on_error object that loads the given expression into the currently

+ 4
- 12
tests/test_basics.py Vedi File

@@ -31,7 +31,7 @@ def test_build_simple(tmp_project: Project) -> None:
tmp_project.write('src/f.cpp', r'void f() {}')
tmp_project.build()
# Writing again will build again:
time.sleep(0.2) # Sleep long enough to register a file change
time.sleep(0.5) # Sleep long enough to register a file change
tmp_project.write('src/f.cpp', r'bad again')
with pytest.raises(CalledProcessError):
tmp_project.build()
@@ -67,12 +67,7 @@ TEST_PACKAGE: PackageJSON = {
}


def test_empty_with_pkg_dds(tmp_project: Project) -> None:
tmp_project.package_json = TEST_PACKAGE
tmp_project.build()


def test_empty_with_lib_dds(tmp_project: Project) -> None:
def test_empty_with_pkg_json(tmp_project: Project) -> None:
tmp_project.package_json = TEST_PACKAGE
tmp_project.build()

@@ -80,8 +75,5 @@ def test_empty_with_lib_dds(tmp_project: Project) -> None:
def test_empty_sdist_create(tmp_project: Project) -> None:
tmp_project.package_json = TEST_PACKAGE
tmp_project.sdist_create()


def test_empty_sdist_export(tmp_project: Project) -> None:
tmp_project.package_json = TEST_PACKAGE
tmp_project.sdist_export()
assert tmp_project.build_root.joinpath('test-pkg@0.2.2.tar.gz').is_file(), \
'The expected sdist tarball was not generated'

+ 2
- 12
tests/test_catalog.py Vedi File

@@ -1,17 +1,7 @@
from pathlib import Path

from dds_ci.testing import Project, RepoFixture
from dds_ci.dds import DDSWrapper


def test_catalog_create(dds_2: DDSWrapper, tmp_path: Path) -> None:
cat_db = tmp_path / 'catalog.db'
assert not cat_db.is_file()
dds_2.run(['catalog', 'create', '--catalog', cat_db])
assert cat_db.is_file()


def test_catalog_get_git(http_repo: RepoFixture, tmp_project: Project) -> None:
def test_pkg_get(http_repo: RepoFixture, tmp_project: Project) -> None:
http_repo.import_json_data({
'packages': {
'neo-sqlite3': {
@@ -27,6 +17,6 @@ def test_catalog_get_git(http_repo: RepoFixture, tmp_project: Project) -> None:
}
})
tmp_project.dds.repo_add(http_repo.url)
tmp_project.dds.catalog_get('neo-sqlite3@0.3.0')
tmp_project.dds.pkg_get('neo-sqlite3@0.3.0')
assert tmp_project.root.joinpath('neo-sqlite3@0.3.0').is_dir()
assert tmp_project.root.joinpath('neo-sqlite3@0.3.0/package.jsonc').is_file()

+ 22
- 0
tests/test_compile_file.py Vedi File

@@ -0,0 +1,22 @@
import subprocess

import pytest
import time

from dds_ci.testing import Project


def test_simple_compile_file(tmp_project: Project) -> None:
"""
Check that changing a source file will update the resulting application.
"""
with pytest.raises(subprocess.CalledProcessError):
tmp_project.compile_file('src/answer.cpp')
tmp_project.write('src/answer.cpp', 'int get_answer() { return 42; }')
# No error:
tmp_project.compile_file('src/answer.cpp')
# Fail:
time.sleep(0.5)
tmp_project.write('src/answer.cpp', 'int get_answer() { return "How many roads must a man walk down?"; }')
with pytest.raises(subprocess.CalledProcessError):
tmp_project.compile_file('src/answer.cpp')

+ 59
- 12
tests/test_sdist.py Vedi File

@@ -1,5 +1,9 @@
import pytest
from pathlib import Path
from typing import Tuple
import subprocess

from dds_ci import proc
from dds_ci.testing import ProjectOpener, Project


@@ -8,24 +12,67 @@ def test_project(project_opener: ProjectOpener) -> Project:
return project_opener.open('projects/sdist')


def test_create_sdist(test_project: Project) -> None:
def test_create_sdist(test_project: Project, tmp_path: Path) -> None:
# Create in the default location
test_project.sdist_create()
sd_dir = test_project.build_root / 'foo@1.2.3.tar.gz'
assert sd_dir.is_file()
assert sd_dir.is_file(), 'Did not create an sdist in the default location'
# Create in a different location
dest = tmp_path / 'dummy.tar.gz'
test_project.sdist_create(dest=dest)
assert dest.is_file(), 'Did not create an sdist in the new location'


def test_export_sdist(test_project: Project) -> None:
test_project.sdist_export()
assert (test_project.dds.repo_dir / 'foo@1.2.3').is_dir()


def test_import_sdist_archive(test_project: Project) -> None:
@pytest.fixture()
def test_sdist(test_project: Project) -> Tuple[Path, Project]:
repo_content_path = test_project.dds.repo_dir / 'foo@1.2.3'
assert not repo_content_path.is_dir()
test_project.sdist_create()
assert not repo_content_path.is_dir()
test_project.dds.repo_import(test_project.build_root / 'foo@1.2.3.tar.gz')
assert repo_content_path.is_dir()
assert repo_content_path.joinpath('library.jsonc').is_file()
return test_project.build_root / 'foo@1.2.3.tar.gz', test_project


def test_import_sdist_archive(test_sdist: Tuple[Path, Project]) -> None:
sdist, project = test_sdist
repo_content_path = project.dds.repo_dir / 'foo@1.2.3'
project.dds.pkg_import(sdist)
assert repo_content_path.is_dir(), \
'The package did not appear in the local cache'
assert repo_content_path.joinpath('library.jsonc').is_file(), \
'The package\'s library.jsonc did not get imported'
# Excluded file will not be in the sdist:
assert not repo_content_path.joinpath('other-file.txt').is_file(), \
'Non-package content appeared in the package cache'


def test_import_sdist_stdin(test_sdist: Tuple[Path, Project]) -> None:
sdist, project = test_sdist
repo_content_path = project.dds.repo_dir / 'foo@1.2.3'
pipe = subprocess.Popen(
list(proc.flatten_cmd([
project.dds.path,
project.dds.repo_dir_arg,
'pkg',
'import',
'--stdin',
])),
stdin=subprocess.PIPE,
)
assert pipe.stdin
with sdist.open('rb') as sdist_bin:
buf = sdist_bin.read(1024)
while buf:
pipe.stdin.write(buf)
buf = sdist_bin.read(1024)
pipe.stdin.close()

rc = pipe.wait()
assert rc == 0, 'Subprocess failed'
# project.dds.pkg_import(sdist)
assert repo_content_path.is_dir(), \
'The package did not appear in the local cache'
assert repo_content_path.joinpath('library.jsonc').is_file(), \
'The package\'s library.jsonc did not get imported'
# Excluded file will not be in the sdist:
assert not repo_content_path.joinpath('other-file.txt').is_file()
assert not repo_content_path.joinpath('other-file.txt').is_file(), \
'Non-package content appeared in the package cache'

+ 53
- 8
tools/dds_ci/dds.py Vedi File

@@ -1,11 +1,14 @@
import multiprocessing
import shutil
from pathlib import Path
from typing import Optional
import copy
from typing import Optional, TypeVar, Iterable

from . import paths, proc, toolchain as tc_mod
from dds_ci.util import Pathish

T = TypeVar('T')


class DDSWrapper:
"""
@@ -23,11 +26,8 @@ class DDSWrapper:
self.catalog_path = Path(catalog_path or (self.repo_dir.parent / 'ci-catalog.db'))
self.default_cwd = default_cwd or Path.cwd()

def clone(self) -> 'DDSWrapper':
return DDSWrapper(self.path,
repo_dir=self.repo_dir,
catalog_path=self.catalog_path,
default_cwd=self.default_cwd)
def clone(self: T) -> T:
return copy.deepcopy(self)

@property
def catalog_path_arg(self) -> str:
@@ -39,6 +39,10 @@ class DDSWrapper:
"""The arguments for --repo-dir"""
return f'--repo-dir={self.repo_dir}'

@property
def project_dir_flag(self) -> str:
return '--project-dir'

def set_repo_scratch(self, path: Pathish) -> None:
self.repo_dir = Path(path) / 'data'
self.catalog_path = Path(path) / 'catalog.db'
@@ -66,12 +70,18 @@ class DDSWrapper:
def catalog_get(self, what: str) -> None:
self.run(['catalog', 'get', self.catalog_path_arg, what])

def pkg_get(self, what: str) -> None:
self.run(['pkg', 'get', self.catalog_path_arg, what])

def repo_add(self, url: str) -> None:
self.run(['repo', 'add', self.catalog_path_arg, url, '--update'])
self.run(['pkg', 'repo', 'add', self.catalog_path_arg, url])

def repo_import(self, sdist: Path) -> None:
self.run(['repo', self.repo_dir_arg, 'import', sdist])

def pkg_import(self, filepath: Pathish) -> None:
self.run(['pkg', 'import', filepath, self.repo_dir_arg])

def build(self,
*,
root: Path,
@@ -94,10 +104,28 @@ class DDSWrapper:
self.repo_dir_arg,
self.catalog_path_arg,
f'--jobs={jobs}',
f'--project-dir={root}',
f'{self.project_dir_flag}={root}',
f'--out={build_root}',
])

def compile_file(self,
paths: Iterable[Pathish],
*,
toolchain: Optional[Pathish] = None,
project_dir: Pathish,
out: Optional[Pathish] = None) -> None:
"""
Run 'dds compile-file' for the given paths.
"""
toolchain = toolchain or tc_mod.get_default_audit_toolchain()
self.run([
'compile-file',
paths,
f'--toolchain={toolchain}',
f'{self.project_dir_flag}={project_dir}',
f'--out={out}',
])

def build_deps(self, args: proc.CommandLine, *, toolchain: Optional[Path] = None) -> None:
toolchain = toolchain or tc_mod.get_default_audit_toolchain()
self.run([
@@ -107,3 +135,20 @@ class DDSWrapper:
self.repo_dir_arg,
args,
])


class NewDDSWrapper(DDSWrapper):
"""
Wraps the new 'dds' executable with some convenience APIs
"""
@property
def repo_dir_arg(self) -> str:
return f'--pkg-cache-dir={self.repo_dir}'

@property
def catalog_path_arg(self) -> str:
return f'--pkg-db-path={self.catalog_path}'

@property
def project_dir_flag(self) -> str:
return '--project'

+ 5
- 11
tools/dds_ci/proc.py Vedi File

@@ -5,7 +5,7 @@ import subprocess

from .util import Pathish

CommandLineArg = Union[str, PurePath, int, float]
CommandLineArg = Union[str, Pathish, int, float]
CommandLineArg1 = Union[CommandLineArg, Iterable[CommandLineArg]]
CommandLineArg2 = Union[CommandLineArg1, Iterable[CommandLineArg1]]
CommandLineArg3 = Union[CommandLineArg2, Iterable[CommandLineArg2]]
@@ -39,16 +39,10 @@ def flatten_cmd(cmd: CommandLine) -> Iterable[str]:


def run(*cmd: CommandLine, cwd: Optional[Pathish] = None, check: bool = False) -> ProcessResult:
return subprocess.run(
list(flatten_cmd(cmd)),
cwd=cwd,
check=check,
)
command = list(flatten_cmd(cmd))
return subprocess.run(command, cwd=cwd, check=check)


def check_run(*cmd: CommandLine, cwd: Optional[Pathish] = None) -> ProcessResult:
return subprocess.run(
list(flatten_cmd(cmd)),
cwd=cwd,
check=True,
)
command = list(flatten_cmd(cmd))
return subprocess.run(command, cwd=cwd, check=True)

+ 18
- 8
tools/dds_ci/testing/fixtures.py Vedi File

@@ -14,7 +14,7 @@ from _pytest.tmpdir import TempPathFactory
from _pytest.fixtures import FixtureRequest

from dds_ci import toolchain, paths
from ..dds import DDSWrapper
from ..dds import DDSWrapper, NewDDSWrapper
from ..util import Pathish
tc_mod = toolchain

@@ -72,8 +72,8 @@ class Project:

@property
def project_dir_arg(self) -> str:
"""Argument for --project-dir"""
return f'--project-dir={self.root}'
"""Argument for --project"""
return f'--project={self.root}'

def build(self, *, toolchain: Optional[Pathish] = None) -> None:
"""
@@ -82,9 +82,18 @@ class Project:
with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc:
self.dds.build(root=self.root, build_root=self.build_root, toolchain=tc)

def sdist_create(self) -> None:
def compile_file(self, *paths: Pathish, toolchain: Optional[Pathish] = None) -> None:
with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc:
self.dds.compile_file(paths, toolchain=tc, out=self.build_root, project_dir=self.root)

def sdist_create(self, *, dest: Optional[Pathish] = None) -> None:
self.build_root.mkdir(exist_ok=True, parents=True)
self.dds.run(['sdist', 'create', self.project_dir_arg], cwd=self.build_root)
self.dds.run([
'sdist',
'create',
self.project_dir_arg,
f'--out={dest}' if dest else (),
], cwd=self.build_root)

def sdist_export(self) -> None:
self.dds.run(['sdist', 'export', self.dds.repo_dir_arg, self.project_dir_arg])
@@ -170,11 +179,12 @@ def tmp_project(request: FixtureRequest, worker_id: str, project_opener: Project


@pytest.fixture(scope='session')
def dds_2(dds_exe: Path) -> DDSWrapper:
return DDSWrapper(dds_exe)
def dds_2(dds_exe: Path) -> NewDDSWrapper:
wr = NewDDSWrapper(dds_exe)
return wr


@pytest.fixture(scope='session')
def dds_exe(pytestconfig: PyTestConfig) -> Path:
opt = pytestconfig.getoption('--dds-exe') or paths.CUR_BUILT_DDS
opt = pytestconfig.getoption('--dds-exe') or paths.BUILD_DIR / 'dds'
return Path(opt)

+ 1
- 1
tools/mkrepo.py Vedi File

@@ -371,7 +371,7 @@ def http_dl_unpack(url: str) -> Iterator[Path]:
def spec_as_local_tgz(dds_exe: Path, spec: SpecPackage) -> Iterator[Path]:
with spec.remote.make_local_dir(spec.name, spec.version) as clone_dir:
out_tgz = clone_dir / 'sdist.tgz'
check_call([str(dds_exe), 'sdist', 'create', f'--project-dir={clone_dir}', f'--out={out_tgz}'])
check_call([str(dds_exe), 'sdist', 'create', f'--project={clone_dir}', f'--out={out_tgz}'])
yield out_tgz



Loading…
Annulla
Salva