ソースを参照

Merge branch 'feature/sdist-tar' into develop

default_compile_flags
vector-of-bool 4年前
コミット
7c2cb1c26c
26個のファイルの変更443行の追加4167行の削除
  1. +3
    -4
      Makefile
  2. +115
    -420
      catalog.json
  3. +5
    -0
      library.jsonc
  4. +15
    -14
      package.jsonc
  5. +3
    -7
      res/package-schema.json
  6. +61
    -9
      src/dds.main.cpp
  7. +11
    -15
      src/dds/build/plan/exe.cpp
  8. +2
    -7
      src/dds/build/plan/exe.hpp
  9. +13
    -12
      src/dds/build/plan/full.cpp
  10. +10
    -8
      src/dds/build/plan/library.cpp
  11. +0
    -2
      src/dds/build/plan/library.hpp
  12. +0
    -5
      src/dds/catalog/get.hpp
  13. +19
    -3557
      src/dds/catalog/init_catalog.cpp
  14. +4
    -2
      src/dds/repo/repo.cpp
  15. +1
    -1
      src/dds/solve/solve.cpp
  16. +32
    -0
      src/dds/source/dist.cpp
  17. +11
    -1
      src/dds/source/dist.hpp
  18. +1
    -0
      src/dds/util/parallel.hpp
  19. +6
    -6
      tests/dds.py
  20. +16
    -8
      tests/sdist/sdist_test.py
  21. +8
    -2
      tools/ci.py
  22. +2
    -4
      tools/freebsd-gcc-9.jsonc
  23. +22
    -0
      tools/gcc-9-dev.jsonc
  24. +1
    -3
      tools/gcc-9-rel.jsonc
  25. +2
    -4
      tools/gcc-9-static-rel.jsonc
  26. +80
    -76
      tools/gen-catalog-json.py

+ 3
- 4
Makefile ファイルの表示

@@ -41,19 +41,19 @@ docs-sync-server:
macos-ci:
python3 -u tools/ci.py \
-B download \
-T tools/gcc-9.jsonc
-T tools/gcc-9-rel.jsonc
mv _build/dds _build/dds-macos-x64

linux-ci:
python3 -u tools/ci.py \
-B download \
-T tools/gcc-9-static.jsonc
-T tools/gcc-9-static-rel.jsonc
mv _build/dds _build/dds-linux-x64

nix-ci:
python3 -u tools/ci.py \
-B download \
-T tools/gcc-9.jsonc
-T tools/gcc-9-rel.jsonc

vagrant-freebsd-ci:
vagrant up freebsd11
@@ -63,7 +63,6 @@ vagrant-freebsd-ci:
python3.7 tools/ci.py \
-B download \
-T tools/freebsd-gcc-9.jsonc \
-T2 tools/freebsd-gcc-9.next.jsonc \
'
vagrant scp freebsd11:/vagrant/_build/dds _build/dds-freebsd-x64
vagrant halt

+ 115
- 420
catalog.json ファイルの表示

@@ -941,6 +941,26 @@
"transform": [],
"url": "https://github.com/fmtlib/fmt.git"
}
},
"7.0.2": {
"depends": [],
"description": "A modern formatting library : https://fmt.dev/",
"git": {
"auto-lib": "fmt/fmt",
"ref": "7.0.2",
"transform": [],
"url": "https://github.com/fmtlib/fmt.git"
}
},
"7.0.3": {
"depends": [],
"description": "A modern formatting library : https://fmt.dev/",
"git": {
"auto-lib": "fmt/fmt",
"ref": "7.0.3",
"transform": [],
"url": "https://github.com/fmtlib/fmt.git"
}
}
},
"hinnant-date": {
@@ -2133,6 +2153,71 @@
}
}
},
"neo-buffer": {
"0.2.1": {
"depends": [
"neo-concepts^0.2.2",
"neo-fun^0.1.1"
],
"description": "Buffer and byte algorithms/types based on those of Asio",
"git": {
"ref": "0.2.1",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-buffer.git"
}
},
"0.3.0": {
"depends": [
"neo-concepts^0.3.2",
"neo-fun^0.4.0"
],
"description": "Buffer and byte algorithms/types based on those of Asio",
"git": {
"ref": "0.3.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-buffer.git"
}
},
"0.4.0": {
"depends": [
"neo-concepts^0.4.0",
"neo-fun^0.4.1"
],
"description": "Buffer and byte algorithms/types based on those of Asio",
"git": {
"ref": "0.4.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-buffer.git"
}
},
"0.4.1": {
"depends": [
"neo-concepts^0.4.0",
"neo-fun^0.4.1"
],
"description": "Buffer and byte algorithms/types based on those of Asio",
"git": {
"ref": "0.4.1",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-buffer.git"
}
}
},
"neo-compress": {
"0.1.0": {
"depends": [
"neo-buffer^0.4.1",
"neo-fun^0.4.0",
"zlib^1.2.9"
],
"description": "Compression, archiving, etc. for C++20",
"git": {
"ref": "0.1.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-compress.git"
}
}
},
"neo-concepts": {
"0.2.2": {
"depends": [],
@@ -2169,6 +2254,15 @@
"transform": [],
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
},
"0.4.0": {
"depends": [],
"description": "A (mostly) backport of C++20 concepts library, with some additions.",
"git": {
"ref": "0.4.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
}
},
"neo-fun": {
@@ -2234,6 +2328,15 @@
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.4.1": {
"depends": [],
"description": "Some library components that didn't quite fit anywhere else...",
"git": {
"ref": "0.4.1",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
}
},
"neo-sqlite3": {
@@ -3391,429 +3494,21 @@
"transform": [],
"url": "https://github.com/vector-of-bool/semester.git"
}
}
},
"zlib": {
"1.0.1": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.0.1",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.0.2": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.0.2",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.0.4": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.0.4",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.0.5": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.0.5",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.0.7": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.0.7",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.0.8": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.0.8",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.0.9": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.0.9",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.1.0": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.1.0",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.1.1": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.1.1",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.1.2": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.1.2",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.1.3": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.1.3",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
}
},
"1.1.4": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",
"0.2.2": {
"depends": [
"neo-fun^0.3.2",
"neo-concepts^0.3.2"
],
"description": "A generic library for dealing with semistructured data",
"git": {
"auto-lib": "zlib/zlib",
"ref": "v1.1.4",
"transform": [
{
"move": {
"exclude": [],
"from": ".",
"include": [
"*.c",
"*.h"
],
"strip-components": 0,
"to": "src/"
}
},
{
"move": {
"exclude": [],
"from": "src/",
"include": [
"zlib.h",
"zconf.h"
],
"strip-components": 0,
"to": "include/"
}
}
],
"url": "https://github.com/madler/zlib.git"
"ref": "0.2.2",
"transform": [],
"url": "https://github.com/vector-of-bool/semester.git"
}
},
}
},
"zlib": {
"1.2.0": {
"depends": [],
"description": "A massively spiffy yet delicately unobtrusive compression library",

+ 5
- 0
library.jsonc ファイルの表示

@@ -13,5 +13,10 @@
"vob/json5",
"vob/semester",
"hanickadot/ctre",
// "neo/io",
// Explicit zlib link is required due to linker input order bug.
// Can be removed after alpha.5
"zlib/zlib",
"neo/compress"
]
}

+ 15
- 14
package.jsonc ファイルの表示

@@ -3,19 +3,20 @@
"name": "dds",
"version": "0.1.0-alpha.4",
"namespace": "dds",
"depends": {
"spdlog": "1.7.0",
"ms-wil": "2020.3.16",
"range-v3": "0.11.0",
"nlohmann-json": "3.7.1",
"neo-sqlite3": "0.2.3",
"neo-fun": "0.3.2",
"semver": "0.2.2",
"pubgrub": "0.2.1",
"vob-json5": "0.1.5",
"vob-semester": "0.2.1",
"ctre": "2.8.1",
"fmt": "^7.0.0"
},
"depends": [
"spdlog@1.7.0",
"ms-wil@2020.3.16",
"range-v3@0.11.0",
"nlohmann-json@3.7.1",
"neo-sqlite3@0.2.3",
"neo-fun^0.3.2",
"neo-compress^0.1.0",
"semver@0.2.2",
"pubgrub@0.2.1",
"vob-json5@0.1.5",
"vob-semester@0.2.2",
"ctre@2.8.1",
"fmt^7.0.3"
],
"test_driver": "Catch-Main"
}

+ 3
- 7
res/package-schema.json ファイルの表示

@@ -32,13 +32,9 @@
"description": "JSON schema tag. Ignored by dds."
},
"depends": {
"type": "object",
"patternProperties": {
"^[a-z][a-z0-9_]*((\\.|-)[a-z0-9_]+)*$": {
"type": "string",
"description": "The version of the dependency. Must be a valid Semantic Version string",
"pattern": "^(0|[1-9]\\d*)\\.(0|[1-9]\\d*)\\.(0|[1-9]\\d*)(?:-((?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\\.(?:0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\\+([0-9a-zA-Z-]+(?:\\.[0-9a-zA-Z-]+)*))?$"
}
"type": "array",
"items": {
"type": "string"
}
},
"test_driver": {

+ 61
- 9
src/dds.main.cpp ファイルの表示

@@ -450,6 +450,47 @@ struct cli_repo {
}
} ls{*this};

struct {
cli_repo& parent;
args::Command cmd{parent.repo_group,
"import",
"Import a source distribution archive file into the repository"};
common_flags _common{cmd};

args::PositionalList<dds::fs::path>
sdist_paths{cmd, "sdist-path", "Path to one or more source distribution archive"};

args::Flag force{cmd,
"replace-if-exists",
"Replace an existing package in the repository",
{"replace"}};

args::Flag import_stdin{cmd,
"import-stdin",
"Import a source distribution tarball from stdin",
{"stdin"}};

int run() {
auto import_sdists = [&](dds::repository repo) {
auto if_exists_action
= force.Get() ? dds::if_exists::replace : dds::if_exists::throw_exc;
for (auto& tgz_path : sdist_paths.Get()) {
auto tmp_sd = dds::expand_sdist_targz(tgz_path);
repo.add_sdist(tmp_sd.sdist, if_exists_action);
}
if (import_stdin) {
auto tmp_sd = dds::expand_sdist_from_istream(std::cin, "<stdin>");
repo.add_sdist(tmp_sd.sdist, if_exists_action);
}
return 0;
};
return dds::repository::with_repository(parent.where.Get(),
dds::repo_flags::write_lock
| dds::repo_flags::create_if_absent,
import_sdists);
}
} import_{*this};

struct {
cli_repo& parent;
args::Command cmd{parent.repo_group, "init", "Initialize a directory as a repository"};
@@ -471,6 +512,8 @@ struct cli_repo {
return ls.run();
} else if (init.cmd) {
return init.run();
} else if (import_.cmd) {
return import_.run();
} else {
assert(false);
std::terminate();
@@ -502,11 +545,7 @@ struct cli_sdist {

common_project_flags project{cmd};

path_flag out{cmd,
"out",
"The destination of the source distribution",
{"out"},
dds::fs::current_path() / "project.dsd"};
path_flag out{cmd, "out", "The destination of the source distribution", {"out"}};

args::Flag force{cmd,
"replace-if-exists",
@@ -515,10 +554,23 @@ struct cli_sdist {

int run() {
dds::sdist_params params;
params.project_dir = project.root.Get();
params.dest_path = out.Get();
params.force = force.Get();
dds::create_sdist(params);
params.project_dir = project.root.Get();
params.dest_path = out.Get();
params.force = force.Get();
params.include_apps = true;
params.include_tests = true;
auto pkg_man = dds::package_manifest::load_from_directory(project.root.Get());
if (!pkg_man) {
dds::throw_user_error<dds::errc::invalid_pkg_manifest>(
"Creating a source distribution requires a package manifest");
}
std::string default_filename = fmt::format("{}@{}.tar.gz",
pkg_man->pkg_id.name,
pkg_man->pkg_id.version.to_string());
auto default_filepath = dds::fs::current_path() / default_filename;
auto out_path = out.Matched() ? out.Get() : default_filepath;
dds::create_sdist_targz(out_path, params);
dds_log(info, "Generate source distribution at [{}]", out_path.string());
return 0;
}
} create{*this};

+ 11
- 15
src/dds/build/plan/exe.cpp ファイルの表示

@@ -20,12 +20,14 @@ void link_executable_plan::link(build_env_ref env, const library_plan& lib) cons
// Build up the link command
link_exe_spec spec;
spec.output = calc_executable_path(env);
spec.inputs = _input_libs;
dds_log(debug, "Performing link for {}", spec.output.string());
for (const lm::usage& links : _links) {
dds_log(trace, " - Link with: {}/{}", links.name, links.namespace_);
extend(spec.inputs, env.ureqs.link_paths(links));
}

// The main object should be a linker input, of course.
auto main_obj = _main_compile.calc_object_file_path(env);
dds_log(trace, "Add entry point object file: {}", main_obj.string());
spec.inputs.push_back(std::move(main_obj));

if (lib.archive_plan()) {
// The associated library has compiled components. Add the static library a as a linker
// input
@@ -36,16 +38,10 @@ void link_executable_plan::link(build_env_ref env, const library_plan& lib) cons
dds_log(trace, "Executable has no corresponding archive library input");
}

// The main object should be a linker input, of course.
auto main_obj = _main_compile.calc_object_file_path(env);
dds_log(trace, "Add entry point object file: {}", main_obj.string());
spec.inputs.push_back(std::move(main_obj));

// Linker inputs are order-dependent in some cases. The top-most input should appear first, and
// its dependencies should appear later. Because of the way inputs were generated, they appear
// sorted with the dependencies coming earlier than the dependees. We can simply reverse the
// order and linking will work.
std::reverse(spec.inputs.begin(), spec.inputs.end());
for (const lm::usage& links : _links) {
dds_log(trace, " - Link with: {}/{}", links.name, links.namespace_);
extend(spec.inputs, env.ureqs.link_paths(links));
}

// Do it!
const auto link_command

+ 2
- 7
src/dds/build/plan/exe.hpp ファイルの表示

@@ -27,8 +27,6 @@ struct test_failure {
* single source file defines the entry point and some set of linker inputs.
*/
class link_executable_plan {
/// The linker inputs that should be linked into the executable
std::vector<fs::path> _input_libs;
/// Usage requirements for this executable
std::vector<lm::usage> _links;
/// The compilation plan for the entry-point source file
@@ -41,19 +39,16 @@ class link_executable_plan {
public:
/**
* Create a new instance
* @param in_libs Linker inputs for the executable
* @param links The library identifiers that the executable should link with
* @param cfp The file compilation that defines the entrypoint of the application
* @param out_subdir The subdirectory of the build root in which the executable should be placed
* @param name_ The name of the executable
*/
link_executable_plan(std::vector<fs::path> in_libs,
std::vector<lm::usage> links,
link_executable_plan(std::vector<lm::usage> links,
compile_file_plan cfp,
path_ref out_subdir,
std::string name_)
: _input_libs(std::move(in_libs))
, _links(std::move(links))
: _links(std::move(links))
, _main_compile(std::move(cfp))
, _out_subdir(out_subdir)
, _name(std::move(name_)) {}

+ 13
- 12
src/dds/build/plan/full.cpp ファイルの表示

@@ -24,18 +24,19 @@ namespace {

template <typename T, typename Range>
decltype(auto) pair_up(T& left, Range& right) {
auto rep = ranges::view::repeat(left);
return ranges::view::zip(rep, right);
auto rep = ranges::views::repeat(left);
return ranges::views::zip(rep, right);
}

} // namespace

void build_plan::render_all(build_env_ref env) const {
auto templates = _packages //
| ranges::view::transform(&package_plan::libraries) //
| ranges::view::join //
| ranges::view::transform([](const auto& lib) { return pair_up(lib, lib.templates()); }) //
| ranges::view::join;
auto templates = _packages //
| ranges::views::transform(&package_plan::libraries) //
| ranges::views::join //
| ranges::views::transform(
[](const auto& lib) { return pair_up(lib, lib.templates()); }) //
| ranges::views::join;
for (const auto& [lib, tmpl] : templates) {
tmpl.render(env, lib.library_());
}
@@ -56,9 +57,9 @@ void build_plan::compile_files(const build_env& env,
fs::path filepath;
};

auto as_pending = //
ranges::view::all(filepaths) //
| ranges::view::transform([](auto&& path) {
auto as_pending = //
ranges::views::all(filepaths) //
| ranges::views::transform([](auto&& path) {
return pending_file{false, fs::weakly_canonical(path)};
})
| ranges::to_vector;
@@ -74,10 +75,10 @@ void build_plan::compile_files(const build_env& env,
};

auto comps
= iter_compilations(*this) | ranges::view::filter(check_compilation) | ranges::to_vector;
= iter_compilations(*this) | ranges::views::filter(check_compilation) | ranges::to_vector;

bool any_unmarked = false;
auto unmarked = ranges::view::filter(as_pending, ranges::not_fn(&pending_file::marked));
auto unmarked = ranges::views::filter(as_pending, ranges::not_fn(&pending_file::marked));
for (auto&& um : unmarked) {
dds_log(error, "Source file [{}] is not compiled by this project", um.filepath.string());
any_unmarked = true;

+ 10
- 8
src/dds/build/plan/library.cpp ファイルの表示

@@ -100,10 +100,6 @@ library_plan library_plan::create(const library_root& lib,
extend(links, lib.manifest().uses);
extend(links, lib.manifest().links);

// Linker inputs for tests may contain additional code for test execution
std::vector<fs::path> link_libs;
std::vector<fs::path> test_link_libs = params.test_link_files;

// There may also be additional usage requirements for tests
auto test_rules = compile_rules.clone();
auto test_links = links;
@@ -114,6 +110,14 @@ library_plan library_plan::create(const library_root& lib,
std::vector<link_executable_plan> link_executables;
for (const source_file& source : ranges::views::concat(app_sources, test_sources)) {
const bool is_test = source.kind == source_kind::test;
if (is_test && !params.build_tests) {
// This is a test, but we don't want to build tests
continue;
}
if (!is_test && !params.build_apps) {
// This is an app, but we don't want to build apps
continue;
}
// Pick a subdir based on app/test
const auto subdir_base = is_test ? params.out_subdir / "test" : params.out_subdir;
// Put test/app executables in a further subdirectory based on the source file path
@@ -121,11 +125,9 @@ library_plan library_plan::create(const library_root& lib,
// Pick compile rules based on app/test
auto rules = is_test ? test_rules : compile_rules;
// Pick input libs based on app/test
auto& exe_link_libs = is_test ? test_link_libs : link_libs;
auto& exe_links = is_test ? test_links : links;
auto& exe_links = is_test ? test_links : links;
// TODO: Apps/tests should only see the _public_ include dir, not both
auto exe = link_executable_plan{exe_link_libs,
exe_links,
auto exe = link_executable_plan{exe_links,
compile_file_plan(rules,
source,
qual_name,

+ 0
- 2
src/dds/build/plan/library.hpp ファイルの表示

@@ -30,8 +30,6 @@ struct library_build_params {

/// Directories that should be on the #include search path when compiling tests
std::vector<fs::path> test_include_dirs;
/// Files that should be added as inputs when linking test executables
std::vector<fs::path> test_link_files;

/// Libraries that are used by tests
std::vector<lm::usage> test_uses;

+ 0
- 5
src/dds/catalog/get.hpp ファイルの表示

@@ -9,11 +9,6 @@ class repository;
class catalog;
struct package_info;

struct temporary_sdist {
temporary_dir tmpdir;
dds::sdist sdist;
};

temporary_sdist get_package_sdist(const package_info&);

void get_all(const std::vector<package_id>& pkgs, dds::repository& repo, const catalog& cat);

+ 19
- 3557
src/dds/catalog/init_catalog.cpp
ファイル差分が大きすぎるため省略します
ファイルの表示


+ 4
- 2
src/dds/repo/repo.cpp ファイルの表示

@@ -69,8 +69,10 @@ void repository::add_sdist(const sdist& sd, if_exists ife_action) {
}
auto sd_dest = _root / sd.manifest.pkg_id.to_string();
if (fs::exists(sd_dest)) {
auto msg = fmt::format("Source distribution '{}' is already available in the local repo",
sd.path.string());
auto msg = fmt::
format("Package '{}' (Importing from [{}]) is already available in the local repo",
sd.manifest.pkg_id.to_string(),
sd.path.string());
if (ife_action == if_exists::throw_exc) {
throw_user_error<errc::sdist_exists>(msg);
} else if (ife_action == if_exists::ignore) {

+ 1
- 1
src/dds/solve/solve.cpp ファイルの表示

@@ -160,7 +160,7 @@ std::vector<package_id> dds::solve(const std::vector<dependency>& deps,
pkg_id_provider_fn pkgs_prov,
deps_provider_fn deps_prov) {
auto wrap_req
= deps | ranges::v3::views::transform([](const dependency& dep) { return req_type{dep}; });
= deps | ranges::views::transform([](const dependency& dep) { return req_type{dep}; });

try {
auto solution = pubgrub::solve(wrap_req, solver_provider{pkgs_prov, deps_prov});

+ 32
- 0
src/dds/source/dist.cpp ファイルの表示

@@ -9,6 +9,7 @@
#include <libman/parse.hpp>

#include <neo/assert.hpp>
#include <neo/tar/util.hpp>
#include <range/v3/algorithm/sort.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/filter.hpp>
@@ -82,6 +83,21 @@ sdist dds::create_sdist(const sdist_params& params) {
return sdist::from_directory(dest);
}

void dds::create_sdist_targz(path_ref filepath, const sdist_params& params) {
if (fs::exists(filepath)) {
if (!params.force) {
throw_user_error<errc::sdist_exists>("Destination path '{}' already exists",
filepath.string());
}
}

auto tempdir = temporary_dir::create();
dds_log(debug, "Generating source distribution in {}", tempdir.path().string());
create_sdist_in_dir(tempdir.path(), params);
fs::create_directories(filepath.parent_path());
neo::compress_directory_targz(tempdir.path(), filepath);
}

sdist dds::create_sdist_in_dir(path_ref out, const sdist_params& params) {
auto libs = collect_libraries(params.project_dir);

@@ -113,3 +129,19 @@ sdist sdist::from_directory(path_ref where) {
where.string());
return sdist{pkg_man.value(), where};
}

temporary_sdist dds::expand_sdist_targz(path_ref targz_path) {
auto tempdir = temporary_dir::create();
dds_log(debug, "Expanding source ditsribution content into {}", tempdir.path().string());
fs::create_directories(tempdir.path());
neo::expand_directory_targz(tempdir.path(), targz_path);
return {tempdir, sdist::from_directory(tempdir.path())};
}

temporary_sdist dds::expand_sdist_from_istream(std::istream& is, std::string_view input_name) {
auto tempdir = temporary_dir::create();
dds_log(debug, "Expanding source ditsribution content into {}", tempdir.path().string());
fs::create_directories(tempdir.path());
neo::expand_directory_targz(tempdir.path(), is, input_name);
return {tempdir, sdist::from_directory(tempdir.path())};
}

+ 11
- 1
src/dds/source/dist.hpp ファイルの表示

@@ -3,6 +3,7 @@
#include <tuple>

#include <dds/package/manifest.hpp>
#include <dds/temp.hpp>
#include <dds/util/fs.hpp>

namespace dds {
@@ -26,6 +27,11 @@ struct sdist {
static sdist from_directory(path_ref p);
};

struct temporary_sdist {
temporary_dir tmpdir;
struct sdist sdist;
};

inline constexpr struct sdist_compare_t {
bool operator()(const sdist& lhs, const sdist& rhs) const {
return lhs.manifest.pkg_id < rhs.manifest.pkg_id;
@@ -41,5 +47,9 @@ inline constexpr struct sdist_compare_t {

sdist create_sdist(const sdist_params&);
sdist create_sdist_in_dir(path_ref, const sdist_params&);
void create_sdist_targz(path_ref, const sdist_params&);

temporary_sdist expand_sdist_targz(path_ref targz);
temporary_sdist expand_sdist_from_istream(std::istream&, std::string_view input_name);

} // namespace dds
} // namespace dds

+ 1
- 0
src/dds/util/parallel.hpp ファイルの表示

@@ -1,6 +1,7 @@
#pragma once

#include <algorithm>
#include <iterator>
#include <mutex>
#include <stdexcept>
#include <thread>

+ 6
- 6
tests/dds.py ファイルの表示

@@ -104,12 +104,9 @@ class DDS:
)

def sdist_create(self) -> subprocess.CompletedProcess:
return self.run([
'sdist',
'create',
self.project_dir_arg,
f'--out={self.build_dir / "created-sdist.sds"}',
])
self.build_dir.mkdir(exist_ok=True, parents=True)
return self.run(['sdist', 'create', self.project_dir_arg],
cwd=self.build_dir)

def sdist_export(self) -> subprocess.CompletedProcess:
return self.run([
@@ -119,6 +116,9 @@ class DDS:
self.repo_dir_arg,
])

def repo_import(self, sdist: Path) -> subprocess.CompletedProcess:
return self.run(['repo', self.repo_dir_arg, 'import', sdist])

@property
def default_builtin_toolchain(self) -> str:
if os.name == 'posix':

+ 16
- 8
tests/sdist/sdist_test.py ファイルの表示

@@ -1,16 +1,24 @@
from tests.dds import DDS, dds_fixture_conf_1


@dds_fixture_conf_1('create')
def test_create_sdist(dds: DDS):
dds.sdist_create()
sd_dir = dds.build_dir / 'created-sdist.sds'
assert sd_dir.is_dir()
foo_cpp = sd_dir / 'src/foo.cpp'
assert foo_cpp.is_file()
header_hpp = sd_dir / 'include/header.hpp'
assert header_hpp.is_file()
header_h = sd_dir / 'include/header.h'
assert header_h.is_file()
sd_dir = dds.build_dir / 'foo@1.2.3.tar.gz'
assert sd_dir.is_file()


@dds_fixture_conf_1('create')
def test_export_sdist(dds: DDS):
dds.sdist_export()
assert (dds.repo_dir / 'foo@1.2.3').is_dir()


@dds_fixture_conf_1('create')
def test_import_sdist_archive(dds: DDS):
repo_content_path = dds.repo_dir / 'foo@1.2.3'
assert not repo_content_path.is_dir()
dds.sdist_create()
assert not repo_content_path.is_dir()
dds.repo_import(dds.build_dir / 'foo@1.2.3.tar.gz')
assert repo_content_path.is_dir()

+ 8
- 2
tools/ci.py ファイルの表示

@@ -75,6 +75,12 @@ def main(argv: Sequence[str]) -> int:
'--build-only',
action='store_true',
help='Only build the `dds` executable. Skip second-phase and tests.')
parser.add_argument(
'--no-clean',
action='store_false',
dest='clean',
help='Don\'t remove prior build/deps results',
)
args = parser.parse_args(argv)

opts = CIOptions(toolchain=args.toolchain)
@@ -89,11 +95,11 @@ def main(argv: Sequence[str]) -> int:
assert False, 'impossible'

old_cat_path = paths.PREBUILT_DIR / 'catalog.db'
if old_cat_path.is_file():
if old_cat_path.is_file() and args.clean:
old_cat_path.unlink()

ci_repo_dir = paths.PREBUILT_DIR / 'ci-repo'
if ci_repo_dir.exists():
if ci_repo_dir.exists() and args.clean:
shutil.rmtree(ci_repo_dir)

self_build(

+ 2
- 4
tools/freebsd-gcc-9.jsonc ファイルの表示

@@ -12,9 +12,7 @@
],
"link_flags": [
"-static-libgcc",
"-static-libstdc++",
"-static-libstdc++"
],
// "debug": true,
"optimize": true,
"compiler_launcher": "ccache"
"optimize": true
}

+ 22
- 0
tools/gcc-9-dev.jsonc ファイルの表示

@@ -0,0 +1,22 @@
{
"$schema": "../res/toolchain-schema.json",
"compiler_id": "gnu",
"c_compiler": "gcc-9",
"cxx_compiler": "g++-9",
"warning_flags": [
"-Werror",
],
"flags": [
"-fsanitize=address,undefined"
],
"cxx_flags": [
"-fconcepts",
"-std=c++2a",
],
"link_flags": [
"-fuse-ld=lld",
"-fsanitize=address,undefined"
],
"debug": true,
"compiler_launcher": "ccache"
}

tools/gcc-9.jsonc → tools/gcc-9-rel.jsonc ファイルの表示

@@ -14,7 +14,5 @@
"-static-libgcc",
"-static-libstdc++"
],
// "debug": true,
"optimize": true,
"compiler_launcher": "ccache"
"optimize": true
}

tools/gcc-9-static.jsonc → tools/gcc-9-static-rel.jsonc ファイルの表示

@@ -11,9 +11,7 @@
"-std=c++2a",
],
"link_flags": [
"-static",
"-static"
],
// "debug": true,
"optimize": true,
"compiler_launcher": "ccache"
"optimize": true
}

+ 80
- 76
tools/gen-catalog-json.py ファイルの表示

@@ -1,4 +1,5 @@
import argparse
import gzip
import os
import json
import json5
@@ -272,18 +273,26 @@ def many_versions(name: str,

# yapf: disable
PACKAGES = [
github_package('neo-sqlite3', 'vector-of-bool/neo-sqlite3', ['0.2.3', '0.3.0']),
github_package('neo-fun', 'vector-of-bool/neo-fun', ['0.1.1', '0.2.0', '0.2.1', '0.3.0', '0.3.1', '0.3.2', '0.4.0']),
github_package('neo-buffer', 'vector-of-bool/neo-buffer',
['0.2.1', '0.3.0', '0.4.0', '0.4.1']),
github_package('neo-compress', 'vector-of-bool/neo-compress', ['0.1.0']),
github_package('neo-sqlite3', 'vector-of-bool/neo-sqlite3',
['0.2.3', '0.3.0']),
github_package('neo-fun', 'vector-of-bool/neo-fun', [
'0.1.1', '0.2.0', '0.2.1', '0.3.0', '0.3.1', '0.3.2', '0.4.0', '0.4.1'
]),
github_package('neo-concepts', 'vector-of-bool/neo-concepts', (
'0.2.2',
'0.3.0',
'0.3.1',
'0.3.2',
)),
'0.2.2',
'0.3.0',
'0.3.1',
'0.3.2',
'0.4.0',
)),
github_package('semver', 'vector-of-bool/semver', ['0.2.2']),
github_package('pubgrub', 'vector-of-bool/pubgrub', ['0.2.1']),
github_package('vob-json5', 'vector-of-bool/json5', ['0.1.5']),
github_package('vob-semester', 'vector-of-bool/semester', ['0.1.0', '0.1.1', '0.2.0', '0.2.1']),
github_package('vob-semester', 'vector-of-bool/semester',
['0.1.0', '0.1.1', '0.2.0', '0.2.1', '0.2.2']),
many_versions(
'magic_enum',
(
@@ -438,6 +447,8 @@ PACKAGES = [
'6.2.1',
'7.0.0',
'7.0.1',
'7.0.2',
'7.0.3',
),
git_url='https://github.com/fmtlib/fmt.git',
auto_lib='fmt/fmt',
@@ -568,63 +579,47 @@ PACKAGES = [
('2020.2.25', '20200225.2'),
]
]),
Package(
'zlib',
[
Version(
ver,
description=
'A massively spiffy yet delicately unobtrusive compression library',
remote=Git(
'https://github.com/madler/zlib.git',
tag or f'v{ver}',
auto_lib='zlib/zlib',
transforms=[
FSTransform(
move=CopyMoveTransform(
frm='.',
to='src/',
include=[
'*.c',
'*.h',
],
)),
FSTransform(
move=CopyMoveTransform(
frm='src/',
to='include/',
include=['zlib.h', 'zconf.h'],
)),
]),
) for ver, tag in [
('1.2.11', None),
('1.2.10', None),
('1.2.9', None),
('1.2.8', None),
('1.2.7', 'v1.2.7.3'),
('1.2.6', 'v1.2.6.1'),
('1.2.5', 'v1.2.5.3'),
('1.2.4', 'v1.2.4.5'),
('1.2.3', 'v1.2.3.8'),
('1.2.2', 'v1.2.2.4'),
('1.2.1', 'v1.2.1.2'),
('1.2.0', 'v1.2.0.8'),
('1.1.4', None),
('1.1.3', None),
('1.1.2', None),
('1.1.1', None),
('1.1.0', None),
('1.0.9', None),
('1.0.8', None),
('1.0.7', None),
# ('1.0.6', None), # Does not exist
('1.0.5', None),
('1.0.4', None),
# ('1.0.3', None), # Does not exist
('1.0.2', None),
('1.0.1', None),
]
]),
Package('zlib', [
Version(
ver,
description=
'A massively spiffy yet delicately unobtrusive compression library',
remote=Git(
'https://github.com/madler/zlib.git',
tag or f'v{ver}',
auto_lib='zlib/zlib',
transforms=[
FSTransform(
move=CopyMoveTransform(
frm='.',
to='src/',
include=[
'*.c',
'*.h',
],
)),
FSTransform(
move=CopyMoveTransform(
frm='src/',
to='include/',
include=['zlib.h', 'zconf.h'],
)),
]),
) for ver, tag in [
('1.2.11', None),
('1.2.10', None),
('1.2.9', None),
('1.2.8', None),
('1.2.7', 'v1.2.7.3'),
('1.2.6', 'v1.2.6.1'),
('1.2.5', 'v1.2.5.3'),
('1.2.4', 'v1.2.4.5'),
('1.2.3', 'v1.2.3.8'),
('1.2.2', 'v1.2.2.4'),
('1.2.1', 'v1.2.1.2'),
('1.2.0', 'v1.2.0.8'),
]
]),
Package('sol2', [
Version(
ver,
@@ -954,31 +949,40 @@ if __name__ == "__main__":
#include <dds/catalog/init_catalog.hpp>
#include <dds/catalog/import.hpp>

#include <neo/gzip.hpp>
#include <neo/transform_io.hpp>
#include <neo/string_io.hpp>
#include <neo/inflate.hpp>

/**
* The following array of integers is generated and contains the JSON
* encoded initial catalog. MSVC can't handle string literals over
* The following array of integers is generated and contains gzip-compressed
* JSON encoded initial catalog. MSVC can't handle string literals over
* 64k large, so we have to resort to using a regular char array:
*/
static constexpr const char INIT_PACKAGES_CONTENT[] = {
static constexpr const unsigned char INIT_PACKAGES_CONTENT[] = {
@JSON@
};

static constexpr int INIT_PACKAGES_STR_LEN = @JSON_LEN@;

const std::vector<dds::package_info>&
dds::init_catalog_packages() noexcept {
using std::nullopt;
static auto pkgs = dds::parse_packages_json(
std::string_view(INIT_PACKAGES_CONTENT, INIT_PACKAGES_STR_LEN));
static auto pkgs = []{
using namespace neo;
string_dynbuf_io str_out;
buffer_copy(str_out,
buffer_transform_source{
buffers_consumer(as_buffer(INIT_PACKAGES_CONTENT)),
gzip_decompressor{inflate_decompressor{}}},
@JSON_LEN@);
return dds::parse_packages_json(str_out.read_area_view());
}();
return pkgs;
}
''')

json_small = json.dumps(data, sort_keys=True)
json_small_arr = ', '.join(str(ord(c)) for c in json_small)

json_small_arr = '\n'.join(textwrap.wrap(json_small_arr, width=120))
json_small_arr = textwrap.indent(json_small_arr, prefix=' ' * 4)
json_compr = gzip.compress(json_small.encode('utf-8'), compresslevel=9)
json_small_arr = ','.join(str(c) for c in json_compr)

cpp_content = cpp_template.replace('@JSON@', json_small_arr).replace(
'@JSON_LEN@', str(len(json_small)))

読み込み中…
キャンセル
保存