Quellcode durchsuchen

Merge branch 'feature/cleanup-1' into develop

default_compile_flags
vector-of-bool vor 5 Jahren
Ursprung
Commit
4c6471d925
49 geänderte Dateien mit 729 neuen und 564 gelöschten Zeilen
  1. +0
    -1
      src/browns/md5.cpp
  2. +0
    -186
      src/browns/md5.hpp
  3. +0
    -30
      src/browns/md5.test.cpp
  4. +0
    -72
      src/browns/output.hpp
  5. +1
    -1
      src/dds.main.cpp
  6. +4
    -4
      src/dds/build.cpp
  7. +0
    -46
      src/dds/build/deps.hpp
  8. +9
    -9
      src/dds/build/file_deps.cpp
  9. +142
    -0
      src/dds/build/file_deps.hpp
  10. +6
    -6
      src/dds/build/file_deps.test.cpp
  11. +6
    -0
      src/dds/build/iter_compilations.hpp
  12. +1
    -1
      src/dds/build/params.hpp
  13. +16
    -4
      src/dds/build/plan/archive.cpp
  14. +41
    -3
      src/dds/build/plan/archive.hpp
  15. +64
    -26
      src/dds/build/plan/compile_exec.cpp
  16. +8
    -0
      src/dds/build/plan/compile_exec.hpp
  17. +3
    -1
      src/dds/build/plan/compile_file.cpp
  18. +68
    -13
      src/dds/build/plan/compile_file.hpp
  19. +14
    -4
      src/dds/build/plan/exe.cpp
  20. +38
    -4
      src/dds/build/plan/exe.hpp
  21. +3
    -1
      src/dds/build/plan/full.cpp
  22. +30
    -3
      src/dds/build/plan/full.hpp
  23. +54
    -30
      src/dds/build/plan/library.cpp
  24. +86
    -17
      src/dds/build/plan/library.hpp
  25. +29
    -2
      src/dds/build/plan/package.hpp
  26. +0
    -18
      src/dds/build/source_dir.hpp
  27. +2
    -2
      src/dds/catalog/get.hpp
  28. +1
    -1
      src/dds/compdb.cpp
  29. +6
    -6
      src/dds/db/database.cpp
  30. +5
    -5
      src/dds/db/database.hpp
  31. +1
    -2
      src/dds/deps.cpp
  32. +3
    -3
      src/dds/library/library.cpp
  33. +2
    -2
      src/dds/library/library.hpp
  34. +1
    -1
      src/dds/proc.win.cpp
  35. +22
    -23
      src/dds/repo/repo.cpp
  36. +2
    -2
      src/dds/repo/repo.hpp
  37. +4
    -8
      src/dds/source/dir.cpp
  38. +29
    -0
      src/dds/source/dir.hpp
  39. +1
    -1
      src/dds/source/dist.cpp
  40. +0
    -0
      src/dds/source/dist.hpp
  41. +1
    -1
      src/dds/source/file.cpp
  42. +0
    -0
      src/dds/source/file.hpp
  43. +7
    -7
      src/dds/toolchain/from_dds.cpp
  44. +7
    -6
      src/dds/toolchain/from_dds.test.cpp
  45. +2
    -2
      src/dds/toolchain/prep.hpp
  46. +6
    -6
      src/dds/toolchain/toolchain.cpp
  47. +2
    -2
      src/dds/toolchain/toolchain.hpp
  48. +1
    -1
      src/dds/util/algo.hpp
  49. +1
    -1
      tests/test_drivers/catch/test_catch.py

+ 0
- 1
src/browns/md5.cpp Datei anzeigen

@@ -1 +0,0 @@
#include "./md5.hpp"

+ 0
- 186
src/browns/md5.hpp Datei anzeigen

@@ -1,186 +0,0 @@
#pragma once

#include <neo/buffer_algorithm.hpp>
#include <neo/const_buffer.hpp>

#include <array>
#include <cassert>
#include <cstddef>

namespace browns {

class md5 {
public:
using digest_type = std::array<std::byte, 16>;

private:
// clang-format off
static constexpr std::array<std::uint32_t, 64> magic_numbers_sierra = {
7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22,
5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20,
4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23,
6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21,
};
static constexpr std::array<std::uint32_t, 64> magic_numbers_kilo = {
0xd76aa478, 0xe8c7b756, 0x242070db, 0xc1bdceee,
0xf57c0faf, 0x4787c62a, 0xa8304613, 0xfd469501,
0x698098d8, 0x8b44f7af, 0xffff5bb1, 0x895cd7be,
0x6b901122, 0xfd987193, 0xa679438e, 0x49b40821,
0xf61e2562, 0xc040b340, 0x265e5a51, 0xe9b6c7aa,
0xd62f105d, 0x02441453, 0xd8a1e681, 0xe7d3fbc8,
0x21e1cde6, 0xc33707d6, 0xf4d50d87, 0x455a14ed,
0xa9e3e905, 0xfcefa3f8, 0x676f02d9, 0x8d2a4c8a,
0xfffa3942, 0x8771f681, 0x6d9d6122, 0xfde5380c,
0xa4beea44, 0x4bdecfa9, 0xf6bb4b60, 0xbebfbc70,
0x289b7ec6, 0xeaa127fa, 0xd4ef3085, 0x04881d05,
0xd9d4d039, 0xe6db99e5, 0x1fa27cf8, 0xc4ac5665,
0xf4292244, 0x432aff97, 0xab9423a7, 0xfc93a039,
0x655b59c3, 0x8f0ccc92, 0xffeff47d, 0x85845dd1,
0x6fa87e4f, 0xfe2ce6e0, 0xa3014314, 0x4e0811a1,
0xf7537e82, 0xbd3af235, 0x2ad7d2bb, 0xeb86d391,
};
// clang-format on

std::array<std::uint32_t, 4> _running_digest = {
0x67452301,
0xefcdab89,
0x98badcfe,
0x10325476,
};

constexpr static std::size_t bits_per_block = 512;
constexpr static std::size_t bits_per_byte = 8;
constexpr static std::size_t bytes_per_block = bits_per_block / bits_per_byte;

using chunk_type = std::array<std::byte, bytes_per_block>;

chunk_type _pending_blocks = {};
std::size_t _write_offset = 0;
std::uint64_t _msg_length = 0;

std::size_t _num_pending_blocks = 0;

constexpr void _consume_block() noexcept {
_write_offset = 0;
std::uint32_t alpha = _running_digest[0];
std::uint32_t bravo = _running_digest[1];
std::uint32_t charlie = _running_digest[2];
std::uint32_t delta = _running_digest[3];

const std::uint32_t* data
= static_cast<const std::uint32_t*>(static_cast<const void*>(_pending_blocks.data()));
for (int idx = 0; idx < 64; ++idx) {
std::uint32_t F = 0;
std::uint32_t g = 0;
if (idx < 16) {
F = (bravo & charlie) | ((~bravo) & delta);
g = idx;
} else if (idx < 32) {
F = (delta & bravo) | ((~delta) & charlie);
g = ((5 * idx) + 1) % 16;
} else if (idx < 48) {
F = bravo ^ charlie ^ delta;
g = ((3 * idx) + 5) % 16;
} else {
F = charlie ^ (bravo | (~delta));
g = (7 * idx) % 16;
}
F = F + alpha + magic_numbers_kilo[idx] + data[g];
alpha = delta;
delta = charlie;
charlie = bravo;
bravo = bravo
+ ((F << magic_numbers_sierra[idx]) | (F >> (32 - magic_numbers_sierra[idx])));
}
_running_digest[0] += alpha;
_running_digest[1] += bravo;
_running_digest[2] += charlie;
_running_digest[3] += delta;
}

constexpr static std::byte* _le_copy(std::uint64_t n, std::byte* ptr) noexcept {
auto n_ptr = neo::byte_pointer(&n);
auto n_end = n_ptr + sizeof n;
while (n_ptr != n_end) {
*ptr++ = *n_ptr++;
}
return ptr;
}

public:
constexpr md5() = default;

constexpr void feed(neo::const_buffer buf) noexcept {
feed(buf.data(), buf.data() + buf.size());
}

template <typename Iter, typename Sent>
constexpr void feed(Iter it, const Sent s) noexcept {
using source_val_type = typename std::iterator_traits<Iter>::value_type;
static_assert(std::disjunction_v<std::is_same<source_val_type, char>,
std::is_same<source_val_type, unsigned char>,
std::is_same<source_val_type, signed char>,
std::is_same<source_val_type, std::byte>>,
"Type fed to hash must have be std::byte-sized");
while (it != s) {
auto write_head = std::next(_pending_blocks.begin(), _write_offset);
const auto write_stop = _pending_blocks.end();
while (write_head != write_stop && it != s) {
*write_head++ = static_cast<std::byte>(*it);
++it;
++_write_offset;
_msg_length += 1;
}

if (write_head == write_stop) {
_consume_block();
}
}
}

constexpr void pad() noexcept {
// Length is recored in bits
const std::uint64_t len_nbits = _msg_length * 8;

// Calc how many bytes of padding need to be inserted
std::size_t n_to_next_boundary = bytes_per_block - _write_offset;
if (n_to_next_boundary < sizeof(len_nbits)) {
// We don't have enough room to the next boundary for the message
// length. Go another entire block of padding
n_to_next_boundary += bytes_per_block;
}

// Create an array for padding.
std::array<std::byte, bytes_per_block * 2> pad = {};
// Set the lead bit, as per spec
pad[0] = std::byte{0b1000'0000};

// Calc how many bytes from our pad object we should copy. We need
// to leave room at the end for the message length integer.
std::size_t n_to_copy = n_to_next_boundary - sizeof(_msg_length);

// Feed the padding
feed(neo::const_buffer(pad.data(), n_to_copy));
// Now feed the message length integer
feed(neo::const_buffer(neo::byte_pointer(&len_nbits), sizeof(len_nbits)));

assert(_write_offset == 0);
}

constexpr digest_type digest() const noexcept {
if (_write_offset != 0) {
assert(false && "Requested digest of incomplete md5. Be sure you called pad()!");
std::terminate();
}
digest_type ret = {};
auto data = neo::byte_pointer(_running_digest.data());
auto dest = ret.begin();
auto dest_end = ret.end();
while (dest != dest_end) {
*dest++ = *data++;
}
return ret;
}
};

} // namespace browns

+ 0
- 30
src/browns/md5.test.cpp Datei anzeigen

@@ -1,30 +0,0 @@
#include <browns/md5.hpp>

#include <browns/output.hpp>
#include <neo/as_buffer.hpp>

#include <catch2/catch.hpp>

#include <iostream>

auto md5_hash_str(std::string_view s) {
browns::md5 hash;
hash.feed(neo::as_buffer(s));
hash.pad();
return browns::format_digest(hash.digest());
}

void check_hash(std::string_view str, std::string_view digest) {
INFO("Hashed string: " << str);
CHECK(md5_hash_str(str) == digest);
}

TEST_CASE("Known hashes") {
check_hash("1234abcd1234abcd1234abcd1234abcd", "67aa636d72b967157c363f0acdf7011b");
check_hash("The quick brown fox jumps over the lazy dog", "9e107d9d372bb6826bd81d3542a419d6");
check_hash("", "d41d8cd98f00b204e9800998ecf8427e");
check_hash(
"WWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWWW",
"967ce152b23edc20ebd23b7eba57277c");
check_hash("WWWWWWWWWWWWWWWWWWWWWWWWWWWWW", "9aff577d3248b8889b22f24ee9665c17");
}

+ 0
- 72
src/browns/output.hpp Datei anzeigen

@@ -1,72 +0,0 @@
#pragma once

#include <array>
#include <cstddef>
#include <string>
#include <string_view>

namespace browns {

template <std::size_t N>
std::string format_digest(const std::array<std::byte, N>& dig) {
std::string ret;
ret.resize(N * 2);
for (std::size_t pos = 0; pos < N; ++pos) {
std::byte b = dig[pos];
char& c1 = ret[pos * 2];
char& c2 = ret[pos * 2 + 1];
const char tab[17] = "0123456789abcdef";
auto high = (static_cast<char>(b) & 0xf0) >> 4;
auto low = (static_cast<char>(b) & 0x0f);
c1 = tab[high];
c2 = tab[low];
}
return ret;
}

template <typename Digest>
Digest parse_digest(std::string_view str) {
Digest ret;
std::byte* out_ptr = ret.data();
std::byte* const out_end = out_ptr + ret.size();
auto str_ptr = str.begin();
const auto str_end = str.end();

auto invalid = [&] { //
throw std::runtime_error("Invalid hash digest string: " + std::string(str));
};

auto nibble = [&](char c) -> std::byte {
if (c >= 'A' && c <= 'F') {
c = static_cast<char>(c + ('a' - 'A'));
}
std::byte nib{0};
if (c >= '0' && c <= '9') {
nib = std::byte(c - '0');
} else if (c >= 'a' && c <= 'f') {
nib = std::byte(c - 'a');
} else {
invalid();
}
return nib;
};

// We must have an even number of chars to form full octets
if (str.size() % 2) {
invalid();
}

while (str_ptr != str_end && out_ptr != out_end) {
std::byte high = nibble(*str_ptr++);
std::byte low = nibble(*str_ptr++);
std::byte octet = (high << 4) | low;
*out_ptr++ = octet;
}

if (str_ptr != str_end || out_ptr != out_end) {
invalid();
}
return ret;
}

} // namespace browns

src/dds/dds.main.cpp → src/dds.main.cpp Datei anzeigen

@@ -2,7 +2,7 @@
#include <dds/catalog/catalog.hpp>
#include <dds/catalog/get.hpp>
#include <dds/repo/repo.hpp>
#include <dds/sdist.hpp>
#include <dds/source/dist.hpp>
#include <dds/toolchain/from_dds.hpp>
#include <dds/util/fs.hpp>
#include <dds/util/paths.hpp>

+ 4
- 4
src/dds/build.cpp Datei anzeigen

@@ -244,10 +244,10 @@ void add_sdist_to_build(build_plan& plan,
}
}

void add_deps_to_build(build_plan& plan,
usage_requirement_map& ureqs,
const build_params& params,
build_env_ref env) {
void add_deps_to_build(build_plan& plan,
usage_requirement_map& ureqs,
const build_params& params,
build_env_ref env) {
auto sd_idx = params.dep_sdists //
| ranges::views::transform([](const auto& sd) {
return std::pair(sd.manifest.pkg_id.name, std::cref(sd));

+ 0
- 46
src/dds/build/deps.hpp Datei anzeigen

@@ -1,46 +0,0 @@
#pragma once

#include <dds/util/fs.hpp>

#include <string>
#include <vector>
#include <string_view>

namespace dds {

enum class deps_mode {
none,
msvc,
gnu,
};

struct deps_info {
fs::path output;
std::vector<fs::path> inputs;
std::string command;
std::string command_output;
};

class database;

deps_info parse_mkfile_deps_file(path_ref where);
deps_info parse_mkfile_deps_str(std::string_view str);

struct msvc_deps_info {
struct deps_info deps_info;
std::string cleaned_output;
};

msvc_deps_info parse_msvc_output_for_deps(std::string_view output, std::string_view leader);

void update_deps_info(database& db, const deps_info&);

struct deps_rebuild_info {
std::vector<fs::path> newer_inputs;
std::string previous_command;
std::string previous_command_output;
};

deps_rebuild_info get_rebuild_info(database& db, path_ref output_path);

} // namespace dds

src/dds/build/deps.cpp → src/dds/build/file_deps.cpp Datei anzeigen

@@ -1,4 +1,4 @@
#include "./deps.hpp"
#include "./file_deps.hpp"

#include <dds/db/database.hpp>
#include <dds/proc.hpp>
@@ -11,13 +11,13 @@

using namespace dds;

deps_info dds::parse_mkfile_deps_file(path_ref where) {
file_deps_info dds::parse_mkfile_deps_file(path_ref where) {
auto content = slurp_file(where);
return parse_mkfile_deps_str(content);
}

deps_info dds::parse_mkfile_deps_str(std::string_view str) {
deps_info ret;
file_deps_info dds::parse_mkfile_deps_str(std::string_view str) {
file_deps_info ret;

// Remove escaped newlines
auto no_newlines = replace(str, "\\\n", " ");
@@ -45,9 +45,9 @@ deps_info dds::parse_mkfile_deps_str(std::string_view str) {
}

msvc_deps_info dds::parse_msvc_output_for_deps(std::string_view output, std::string_view leader) {
auto lines = split_view(output, "\n");
std::string cleaned_output;
deps_info deps;
auto lines = split_view(output, "\n");
std::string cleaned_output;
file_deps_info deps;
for (const auto full_line : lines) {
auto trimmed = trim_view(full_line);
if (!starts_with(trimmed, leader)) {
@@ -65,7 +65,7 @@ msvc_deps_info dds::parse_msvc_output_for_deps(std::string_view output, std::str
return {deps, cleaned_output};
}

void dds::update_deps_info(database& db, const deps_info& deps) {
void dds::update_deps_info(database& db, const file_deps_info& deps) {
db.store_file_command(deps.output, {deps.command, deps.command_output});
db.forget_inputs_of(deps.output);
for (auto&& inp : deps.inputs) {
@@ -74,7 +74,7 @@ void dds::update_deps_info(database& db, const deps_info& deps) {
}
}

deps_rebuild_info dds::get_rebuild_info(database& db, path_ref output_path) {
deps_rebuild_info dds::get_rebuild_info(const database& db, path_ref output_path) {
std::unique_lock lk{db.mutex()};
auto cmd_ = db.command_of(output_path);
if (!cmd_) {

+ 142
- 0
src/dds/build/file_deps.hpp Datei anzeigen

@@ -0,0 +1,142 @@
#pragma once

/**
* The `file_deps` module implements the interdependencies of inputs files to their outputs, as well
* as the command that was used to generate that output from the inputs.
*
* For a given output, there is exactly one command that was used to generate it, and some non-zero
* number of input relations. A single input relation encapsulates the path to that input as well as
* the file modification time at which that input was used. The modification times are specifically
* stored on the input relation, and not associated with the input file itself, as more than one
* output may make use of a single input, and each output will need to keep track of the
* outdated-ness of its inputs separately.
*
* A toolchain has an associated `file_deps_mode`, which can be deduced from the Compiler-ID. The
* three dependency modes are:
*
* 1. None - No dependency tracking takes place.
* 2. GNU-Style - Dependencies are tracked using Clang and GCC's -M flags, which write a
* Makefile-syntax file which contains the dependencies of the file that is being compiled. This
* file is generated at the same time that the primary output is generated, and does not occur in a
* pre-compile dependency pass.
* 2. MSVC-Style - Dependencies are tracked using the cl.exe /showIncludes flag, which writes the
* path of every file that is read by the preprocsesor to the compiler's output. This also happens
* at the same time as main compilation, and does not require a pre-scan pass. Unfortunately, MSVC
* localizes this string, so we cannot properly track dependencies without knowing what language it
* will emit beforehand. At the moment, we implement dependency tracking for English, but providing
* other languages is not difficult.
*/

#include <dds/util/fs.hpp>

#include <string>
#include <string_view>
#include <vector>

namespace dds {

/**
* The mode in which we can scan for compilation dependencies.
*/
enum class file_deps_mode {
/// Disable dependency tracking
none,
/// Track dependencies using MSVC semantics
msvc,
/// Track dependencies using GNU-style generated-Makefile semantics
gnu,
};

/**
* The result of performing a dependency scan. A simple aggregate type.
*/
struct file_deps_info {
/**
* The primary output path.
*/
fs::path output;
/**
* The paths to each input
*/
std::vector<fs::path> inputs;
/**
* The command that was used to generate the output
*/
std::string command;
/**
* The output of the command.
*/
std::string command_output;
};

class database;

/**
* Parse a compiler-generated Makefile that contains dependency information.
* @see `parse_mkfile_deps_str`
*/
file_deps_info parse_mkfile_deps_file(path_ref where);

/**
* Parse a Makefile-syntax string containing compile-generated dependency
* information.
* @param str A Makefile-syntax string that will be parsed.
* @note The returned `file_deps_info` object will only have the `output` and
* `inputs` fields filled in, as the other parameters cannot be deduced from
* the Makefile. It is on the caller to fill these fields before passing them
* to `update_deps_info`
*/
file_deps_info parse_mkfile_deps_str(std::string_view str);

/**
* The result of parsing MSVC output for dependencies
*/
struct msvc_deps_info {
/// The actual dependency information
file_deps_info deps_info;
/// The output from the MSVC compiler that has had the dependency information removed.
std::string cleaned_output;
};

/**
* Parse the output of the CL.exe compiler for file dependencies.
* @param output The output from `cl.exe` that has had the /showIncludes flag set
* @param leader The text prefix for each line that contains a dependency.
* @note The returned `file_deps_info` object only has the `input_files` field set, and does not
* include the primary input to the compiler. It is up to the caller to add the necessary fields and
* values.
* @note The `leader` parameter is localized depending on the language that `cl.exe` will use. In
* English, this string is `Note: including file:`. If a line begins with this string, the remainder
* of the line will be assumed to be a path to the file that the preprocessor read while compiling.
* If the `leader` string does not match the language that `cl.exe` emits, then this parsing will
* not see any of these notes, no dependencies will be seen, and the `cleaned_output` field in the
* return value will still contain the /showIncludes notes.
*/
msvc_deps_info parse_msvc_output_for_deps(std::string_view output, std::string_view leader);

/**
* Update the dependency information in the build database for later reference via
* `get_rebuild_info`.
* @param db The database to update
* @param info The dependency information to store
*/
void update_deps_info(database& db, const file_deps_info& info);

/**
* The information that is pertinent to the rebuild of a file. This will contain a list of inputs
* that have a newer mtime than we have recorded, and the previous command and previous command
* output that we have stored.
*/
struct deps_rebuild_info {
std::vector<fs::path> newer_inputs;
std::string previous_command;
std::string previous_command_output;
};

/**
* Given the path to an output file, read all the dependency information from the database. If the
* given output has never been recorded, then the resulting object will be empty.
*/
deps_rebuild_info get_rebuild_info(const database& db, path_ref output_path);

} // namespace dds

src/dds/build/deps.test.cpp → src/dds/build/file_deps.test.cpp Datei anzeigen

@@ -1,4 +1,4 @@
#include <dds/build/deps.hpp>
#include <dds/build/file_deps.hpp>

#include <catch2/catch.hpp>

@@ -42,9 +42,9 @@ Something else
CHECK(new_output == "\nOther line\n indented line\nSomething else\n");
CHECK(deps.inputs
== std::vector<dds::fs::path>({
"C:\\foo\\bar\\filepath/thing.hpp",
"C:\\foo\\bar\\filepath/baz.h",
"C:\\foo\\bar\\filepath/quux.h",
"C:\\foo\\bar\\filepath/cats/quux.h",
}));
"C:\\foo\\bar\\filepath/thing.hpp",
"C:\\foo\\bar\\filepath/baz.h",
"C:\\foo\\bar\\filepath/quux.h",
"C:\\foo\\bar\\filepath/cats/quux.h",
}));
}

+ 6
- 0
src/dds/build/iter_compilations.hpp Datei anzeigen

@@ -9,6 +9,9 @@

namespace dds {

/**
* Iterate over every library defined as part of the build plan
*/
inline auto iter_libraries(const build_plan& plan) {
return //
plan.packages() //
@@ -17,6 +20,9 @@ inline auto iter_libraries(const build_plan& plan) {
;
}

/**
* Return a range iterating over ever file compilation defined in the given build plan
*/
inline auto iter_compilations(const build_plan& plan) {
auto lib_compiles = //
iter_libraries(plan) //

+ 1
- 1
src/dds/build/params.hpp Datei anzeigen

@@ -1,8 +1,8 @@
#pragma once

#include <dds/source/dist.hpp>
#include <dds/toolchain/toolchain.hpp>
#include <dds/util/fs.hpp>
#include <dds/sdist.hpp>

#include <optional>


+ 16
- 4
src/dds/build/plan/archive.cpp Datei anzeigen

@@ -14,25 +14,37 @@ fs::path create_archive_plan::calc_archive_file_path(const build_env& env) const
}

void create_archive_plan::archive(const build_env& env) const {
// Convert the file compilation plans into the paths to their respective object files.
const auto objects = //
_compile_files //
| ranges::views::transform([&](auto&& cf) { return cf.calc_object_file_path(env); })
| ranges::to_vector //
;
// Build up the archive command
archive_spec ar;
ar.input_files = std::move(objects);
ar.out_path = calc_archive_file_path(env);
auto ar_cmd = env.toolchain.create_archive_command(ar);
ar.input_files = std::move(objects);
ar.out_path = calc_archive_file_path(env);
auto ar_cmd = env.toolchain.create_archive_command(ar);

// `out_relpath` is purely for the benefit of the user to have a short name
// in the logs
auto out_relpath = fs::relative(ar.out_path, env.output_root).string();

// Different archiving tools behave differently between platforms depending on whether the
// archive file exists. Make it uniform by simply removing the prior copy.
if (fs::exists(ar.out_path)) {
fs::remove(ar.out_path);
}

spdlog::info("[{}] Archive: {}", _name, out_relpath);
// Ensure the parent directory exists
fs::create_directories(ar.out_path.parent_path());

// Do it!
spdlog::info("[{}] Archive: {}", _name, out_relpath);
auto&& [dur_ms, ar_res] = timed<std::chrono::milliseconds>([&] { return run_proc(ar_cmd); });
spdlog::info("[{}] Archive: {} - {:n}ms", _name, out_relpath, dur_ms.count());

// Check, log, and throw
if (!ar_res.okay()) {
spdlog::error("Creating static library archive failed: {}", out_relpath);
spdlog::error("Subcommand FAILED: {}\n{}", quote_command(ar_cmd), ar_res.output);

+ 41
- 3
src/dds/build/plan/archive.hpp Datei anzeigen

@@ -4,23 +4,61 @@
#include <dds/util/fs.hpp>

#include <string>
#include <string_view>

namespace dds {

/**
* Represents the intention to create an library archive. This also contains
* the compile plans for individual files.
*
* This is distinct from `library_plan`, becuase this corresponds to an actual
* static library and its compiled source files.
*/
class create_archive_plan {
std::string _name;
fs::path _subdir;
/// The name of the archive. Not the filename, but the base name thereof
std::string _name;
/// The subdirectory in which the archive should be generated.
fs::path _subdir;
/// The plans for compiling the constituent source files of this library
std::vector<compile_file_plan> _compile_files;

public:
/**
* Construct an archive plan.
* @param name The name of the archive
* @param subdir The subdirectory in which the archive and its object files
* will be placed
* @param cfs The file compilation plans that will be collected together to
* form the static library.
*/
create_archive_plan(std::string_view name, path_ref subdir, std::vector<compile_file_plan> cfs)
: _name(name)
, _subdir(subdir)
, _compile_files(std::move(cfs)) {}

/**
* Get the name of the archive library.
*/
const std::string& name() const noexcept { return _name; }

/**
* Calculate the absolute path where the generated archive libary file will
* be generated after execution.
* @param env The build environment for the archival.
*/
fs::path calc_archive_file_path(build_env_ref env) const noexcept;
auto& compile_files() const noexcept { return _compile_files; }

/**
* Get the compilation plans for this library.
*/
auto& compile_files() const noexcept { return _compile_files; }

/**
* Perform the actual archive generation. Expects all compilations to have
* completed.
* @param env The build environment for the archival.
*/
void archive(build_env_ref env) const;
};


+ 64
- 26
src/dds/build/plan/compile_exec.cpp Datei anzeigen

@@ -1,6 +1,6 @@
#include "./compile_exec.hpp"

#include <dds/build/deps.hpp>
#include <dds/build/file_deps.hpp>
#include <dds/proc.hpp>
#include <dds/util/string.hpp>
#include <dds/util/time.hpp>
@@ -72,29 +72,42 @@ bool parallel_run(Range&& rng, int n_jobs, Fn&& fn) {
return exceptions.empty();
}

/// The actual "real" information that we need to perform a compilation.
struct compile_file_full {
const compile_file_plan& plan;
fs::path object_file_path;
compile_command_info cmd_info;
};

/// Simple aggregate that stores a counter for keeping track of compile progress
struct compile_counter {
std::atomic_size_t n;
const std::size_t max;
const std::size_t max_digits;
};

std::optional<deps_info>
/**
* Actually performs a compilation and collects deps information from that compilation
*
* @param cf The compilation to execute
* @param env The build environment
* @param counter A thread-safe counter for display progress to the user
*/
std::optional<file_deps_info>
do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& counter) {
// Create the parent directory
fs::create_directories(cf.object_file_path.parent_path());

// Generate a log message to display to the user
auto source_path = cf.plan.source_path();
auto msg = fmt::format("[{}] Compile: {}",
cf.plan.qualifier(),
fs::relative(source_path, cf.plan.source().basis_path).string());

// Do it!
spdlog::info(msg);
auto&& [dur_ms, proc_res]
= timed<std::chrono::milliseconds>([&] { return run_proc(cf.cmd_info.command); });

auto nth = counter.n.fetch_add(1);
spdlog::info("{:60} - {:>7n}ms [{:{}}/{}]",
msg,
@@ -108,16 +121,17 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun
const auto compile_signal = proc_res.signal;
std::string compiler_output = std::move(proc_res.output);

std::optional<deps_info> ret_deps_info;
// Build dependency information, if applicable to the toolchain
std::optional<file_deps_info> ret_deps_info;

if (env.toolchain.deps_mode() == deps_mode::gnu) {
if (env.toolchain.deps_mode() == file_deps_mode::gnu) {
// GNU-style deps using Makefile generation
assert(cf.cmd_info.gnu_depfile_path.has_value());
auto& df_path = *cf.cmd_info.gnu_depfile_path;
if (!fs::is_regular_file(df_path)) {
spdlog::critical(
"The expected Makefile deps were not generated on disk. This is a bug! "
"(Expected "
"file to exist: [{}])",
"(Expected file to exist: [{}])",
df_path.string());
} else {
auto dep_info = dds::parse_mkfile_deps_file(df_path);
@@ -126,17 +140,26 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun
dep_info.command_output = compiler_output;
ret_deps_info = std::move(dep_info);
}
} else if (env.toolchain.deps_mode() == deps_mode::msvc) {
} else if (env.toolchain.deps_mode() == file_deps_mode::msvc) {
// Uglier deps generation by parsing the output from cl.exe
/// TODO: Handle different #include Note: prefixes, since those are localized
auto msvc_deps = parse_msvc_output_for_deps(compiler_output, "Note: including file:");
msvc_deps.deps_info.inputs.push_back(cf.plan.source_path());
msvc_deps.deps_info.output = cf.object_file_path;
msvc_deps.deps_info.command = quote_command(cf.cmd_info.command);
msvc_deps.deps_info.command_output = msvc_deps.cleaned_output;
ret_deps_info = std::move(msvc_deps.deps_info);
compiler_output = std::move(msvc_deps.cleaned_output);
// parse_msvc_output_for_deps will return the compile output without the /showIncludes notes
compiler_output = std::move(msvc_deps.cleaned_output);
// Only update deps if we actually parsed anything, other wise we can't be sure that we
// successfully parsed anything, and we don't want to store garbage deps info and possibly
// cause a miscompile
if (!msvc_deps.deps_info.inputs.empty()) {
// Add the main source file as an input, since it is not listed by /showIncludes
msvc_deps.deps_info.inputs.push_back(cf.plan.source_path());
msvc_deps.deps_info.output = cf.object_file_path;
msvc_deps.deps_info.command = quote_command(cf.cmd_info.command);
msvc_deps.deps_info.command_output = compiler_output;
ret_deps_info = std::move(msvc_deps.deps_info);
}
}

// MSVC prints the filename of the source file. Dunno why, but they do.
// MSVC prints the filename of the source file. Remove it from the output.
if (compiler_output.find(source_path.filename().string()) == 0) {
compiler_output.erase(0, source_path.filename().string().length());
if (starts_with(compiler_output, "\r")) {
@@ -147,6 +170,7 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun
}
}

// Log a compiler failure
if (!compiled_okay) {
spdlog::error("Compilation failed: {}", source_path.string());
spdlog::error("Subcommand FAILED [Exitted {}]: {}\n{}",
@@ -159,6 +183,7 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun
throw compile_failure(fmt::format("Compilation failed for {}", source_path.string()));
}

// Print any compiler output, sans whitespace
if (!dds::trim_view(compiler_output).empty()) {
spdlog::warn("While compiling file {} [{}]:\n{}",
source_path.string(),
@@ -166,24 +191,27 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun
compiler_output);
}

// We must always generate deps info if it was possible:
// We'll only get here if the compilation was successful, otherwise we throw
assert(compiled_okay);
assert(ret_deps_info.has_value() || env.toolchain.deps_mode() == deps_mode::none);
return ret_deps_info;
}

/// Generate the full compile command information from an abstract plan
compile_file_full realize_plan(const compile_file_plan& plan, build_env_ref env) {
auto cmd_info = plan.generate_compile_command(env);
return compile_file_full{plan, plan.calc_object_file_path(env), cmd_info};
}

bool should_compile(const compile_file_full& comp, build_env_ref env) {
/**
* Determine if the given compile command should actually be executed based on
* the dependency information we have recorded in the database.
*/
bool should_compile(const compile_file_full& comp, const database& db) {
if (!fs::exists(comp.object_file_path)) {
// The output file simply doesn't exist. We have to recompile, of course.
return true;
}
database& db = env.db;
auto rb_info = get_rebuild_info(db, comp.object_file_path);
auto rb_info = get_rebuild_info(db, comp.object_file_path);
if (rb_info.previous_command.empty()) {
// We have no previous compile command for this file. Assume it is new.
return true;
@@ -206,18 +234,25 @@ bool should_compile(const compile_file_full& comp, build_env_ref env) {
bool dds::detail::compile_all(const ref_vector<const compile_file_plan>& compiles,
build_env_ref env,
int njobs) {
auto each_realized = //
compiles //
| views::transform([&](auto&& plan) { return realize_plan(plan, env); }) //
| views::filter([&](auto&& real) { return should_compile(real, env); }) //
auto each_realized = //
compiles
// Convert each _plan_ into a concrete object for compiler invocation.
| views::transform([&](auto&& plan) { return realize_plan(plan, env); })
// Filter out compile jobs that we don't need to run. This drops compilations where the
// output is "up-to-date" based on its inputs.
| views::filter([&](auto&& real) { return should_compile(real, env.db); })
// Convert to to a real vector so we can ask its size.
| ranges::to_vector;

// Keep a counter to display progress to the user.
const auto total = each_realized.size();
const auto max_digits = fmt::format("{}", total).size();
compile_counter counter{{1}, total, max_digits};

std::vector<deps_info> all_new_deps;
std::mutex mut;
// Ass we execute, accumulate new dependency information from successful compilations
std::vector<file_deps_info> all_new_deps;
std::mutex mut;
// Do it!
auto okay = parallel_run(each_realized, njobs, [&](const compile_file_full& full) {
auto new_dep = do_compile(full, env, counter);
if (new_dep) {
@@ -226,9 +261,12 @@ bool dds::detail::compile_all(const ref_vector<const compile_file_plan>& compile
}
});

// Update compile dependency information
auto tr = env.db.transaction();
for (auto& info : all_new_deps) {
update_deps_info(env.db, info);
}

// Return whether or not there were any failures.
return okay;
}

+ 8
- 0
src/dds/build/plan/compile_exec.hpp Datei anzeigen

@@ -15,6 +15,14 @@ bool compile_all(const ref_vector<const compile_file_plan>& files, build_env_ref

} // namespace detail

/**
* Compiles all files in the given range of `compile_file_plan`. Uses as much
* parallelism as specified by `njobs`.
* @param rng The file compilation plans to execute
* @param env The build environment in which the compilations will execute
* @param njobs The maximum number of parallel compilations to execute at once.
* @returns `true` if all compilations were successful, `false` otherwise.
*/
template <typename Range>
bool compile_all(Range&& rng, build_env_ref env, int njobs) {
ref_vector<const compile_file_plan> cfps;

+ 3
- 1
src/dds/build/plan/compile_file.cpp Datei anzeigen

@@ -21,8 +21,10 @@ compile_command_info compile_file_plan::generate_compile_command(build_env_ref e
}

fs::path compile_file_plan::calc_object_file_path(const build_env& env) const noexcept {
// `relpath` is just the path from the root of the source directory to the source file.
auto relpath = fs::relative(_source.path, _source.basis_path);
auto ret = env.output_root / _subdir / relpath;
// The full output directory is prefixed by `_subdir`
auto ret = env.output_root / _subdir / relpath;
ret.replace_filename(relpath.filename().string() + env.toolchain.object_suffix());
return fs::weakly_canonical(ret);
}

+ 68
- 13
src/dds/build/plan/compile_file.hpp Datei anzeigen

@@ -1,51 +1,88 @@
#pragma once

#include <dds/build/plan/base.hpp>
#include <dds/source.hpp>
#include <dds/source/file.hpp>

#include <memory>

namespace dds {

/**
* Exception thrown to indicate a compile failure
*/
struct compile_failure : std::runtime_error {
using runtime_error::runtime_error;
};

/**
* Because we may have many files in a library, we store base file compilation
* parameters in a single object that implements shared semantics. Copying the
* object is cheap, and updates propagate between all copies. Distinct copies
* can be made via the `clone()` method.
*/
class shared_compile_file_rules {
/// The attributes we track
struct rules_impl {
std::vector<fs::path> inc_dirs;
std::vector<std::string> defs;
bool enable_warnings = false;
};

/// The actual PIMPL.
std::shared_ptr<rules_impl> _impl = std::make_shared<rules_impl>();

public:
shared_compile_file_rules() = default;

/**
* Create a detached copy of these rules. Updates to the copy do not affect the original.
*/
auto clone() const noexcept {
auto cp = *this;
cp._impl = std::make_shared<rules_impl>(*_impl);
return cp;
}

/**
* Access the include directories for these rules
*/
auto& include_dirs() noexcept { return _impl->inc_dirs; }
auto& include_dirs() const noexcept { return _impl->inc_dirs; }

/**
* Access the preprocessor definitions for these rules
*/
auto& defs() noexcept { return _impl->defs; }
auto& defs() const noexcept { return _impl->defs; }

/**
* A boolean to toggle compile warnings for the associated compiles
*/
auto& enable_warnings() noexcept { return _impl->enable_warnings; }
auto& enable_warnings() const noexcept { return _impl->enable_warnings; }
};

/**
* Represents the parameters to compile an individual file. This includes the
* original source file path, and the shared compile rules as defined
* by `shared_compile_file_rules`.
*/
class compile_file_plan {
/// The shared rules
shared_compile_file_rules _rules;
source_file _source;
std::string _qualifier;
fs::path _subdir;
/// The source file object that we are compiling
source_file _source;
/// A "qualifier" to be shown in log messages (not otherwise significant)
std::string _qualifier;
/// The subdirectory in which the object file will be generated
fs::path _subdir;

public:
/**
* Create a new instance.
* @param rules The base compile rules
* @param sf The source file that will be compiled
* @param qual An arbitrary qualifier for the source file, shown in log output
* @param subdir The subdirectory where the object file will be generated
*/
compile_file_plan(shared_compile_file_rules rules,
source_file sf,
std::string_view qual,
@@ -55,14 +92,32 @@ public:
, _qualifier(qual)
, _subdir(subdir) {}

/**
* The `source_file` object for this plan.
*/
const source_file& source() const noexcept { return _source; }
path_ref source_path() const noexcept { return _source.path; }
auto& rules() const noexcept { return _rules; }
auto& qualifier() const noexcept { return _qualifier; }

fs::path calc_object_file_path(build_env_ref env) const noexcept;
compile_command_info generate_compile_command(build_env_ref) const noexcept;
std::optional<deps_info> compile(build_env_ref) const;
/**
* The path to the source file
*/
path_ref source_path() const noexcept { return _source.path; }
/**
* The shared rules for this compilation
*/
auto& rules() const noexcept { return _rules; }
/**
* The arbitrary qualifier for this compilation
*/
auto& qualifier() const noexcept { return _qualifier; }

/**
* Generate the path that will be the destination of this compile output
*/
fs::path calc_object_file_path(build_env_ref env) const noexcept;
/**
* Generate a concrete compile command object for this source file for the given build
* environment.
*/
compile_command_info generate_compile_command(build_env_ref) const noexcept;
};

} // namespace dds

+ 14
- 4
src/dds/build/plan/exe.cpp Datei anzeigen

@@ -16,21 +16,29 @@ fs::path link_executable_plan::calc_executable_path(build_env_ref env) const noe
}

void link_executable_plan::link(build_env_ref env, const library_plan& lib) const {
const auto out_path = calc_executable_path(env);

// Build up the link command
link_exe_spec spec;
spec.output = out_path;
spec.output = calc_executable_path(env);
spec.inputs = _input_libs;
if (lib.create_archive()) {
// The associated library has compiled components. Add the static library a as a linker
// input
spec.inputs.push_back(lib.create_archive()->calc_archive_file_path(env));
}

// The main object should be a linker input, of course.
auto main_obj = _main_compile.calc_object_file_path(env);
spec.inputs.push_back(std::move(main_obj));

// Linker inputs are order-dependent in some cases. The top-most input should appear first, and
// its dependencies should appear later. Because of the way inputs were generated, they appear
// sorted with the dependencies coming earlier than the dependees. We can simply reverse the
// order and linking will work.
std::reverse(spec.inputs.begin(), spec.inputs.end());

// Do it!
const auto link_command = env.toolchain.create_link_executable_command(spec);
fs::create_directories(out_path.parent_path());
fs::create_directories(spec.output.parent_path());
auto msg = fmt::format("[{}] Link: {:30}",
lib.name(),
fs::relative(spec.output, env.output_root).string());
@@ -38,6 +46,8 @@ void link_executable_plan::link(build_env_ref env, const library_plan& lib) cons
auto [dur_ms, proc_res]
= timed<std::chrono::milliseconds>([&] { return run_proc(link_command); });
spdlog::info("{} - {:>6n}ms", msg, dur_ms.count());

// Check and throw if errant
if (!proc_res.okay()) {
throw compile_failure(
fmt::format("Failed to link test executable '{}'. Link command [{}] returned {}:\n{}",

+ 38
- 4
src/dds/build/plan/exe.hpp Datei anzeigen

@@ -10,19 +10,37 @@ namespace dds {

class library_plan;

/**
* Represents information about a test failure.
*/
struct test_failure {
fs::path executable_path;
std::string output;
int retc;
};

/**
* Stores information about an executable that should be linked. An executable in DDS consists of a
* single source file defines the entry point and some set of linker inputs.
*/
class link_executable_plan {
/// The linker inputs that should be linked into the executable
std::vector<fs::path> _input_libs;
compile_file_plan _main_compile;
fs::path _out_subdir;
std::string _name;
/// The compilation plan for the entry-point source file
compile_file_plan _main_compile;
/// The subdirectory in which the executable should be generated
fs::path _out_subdir;
/// The name of the executable
std::string _name;

public:
/**
* Create a new instance
* @param in_libs Linker inputs for the executable
* @param cfp The file compilation that defines the entrypoint of the application
* @param out_subdir The subdirectory of the build root in which the executable should be placed
* @param name_ The name of the executable
*/
link_executable_plan(std::vector<fs::path> in_libs,
compile_file_plan cfp,
path_ref out_subdir,
@@ -32,11 +50,27 @@ public:
, _out_subdir(out_subdir)
, _name(std::move(name_)) {}

/**
* Get the compilation of the main source file
*/
auto& main_compile_file() const noexcept { return _main_compile; }

/**
* Calculate the output path of the executable for the given build environment
*/
fs::path calc_executable_path(const build_env& env) const noexcept;

void link(const build_env&, const library_plan&) const;
/**
* Perform the link of the executable
* @param env The build environment to use.
* @param lib The library that owns this executable. If it defines an archive library, it will
* be added as a linker input.
*/
void link(const build_env& env, const library_plan& lib) const;
/**
* Run the executable as a test. If the test fails, then that failure information will be
* returned.
*/
std::optional<test_failure> run_test(build_env_ref) const;

bool is_test() const noexcept;

+ 3
- 1
src/dds/build/plan/full.cpp Datei anzeigen

@@ -17,6 +17,7 @@ using namespace dds;

namespace {

/// XXX: Duplicated in compile_exec.cpp !!
template <typename Range, typename Fn>
bool parallel_run(Range&& rng, int n_jobs, Fn&& fn) {
// We don't bother with a nice thread pool, as the overhead of most build
@@ -91,10 +92,10 @@ void build_plan::archive_all(const build_env& env, int njobs) const {
}

void build_plan::link_all(const build_env& env, int njobs) const {
// Generate a pairing between executables and the libraries that own them
std::vector<std::pair<std::reference_wrapper<const library_plan>,
std::reference_wrapper<const link_executable_plan>>>
executables;

for (auto&& lib : iter_libraries(*this)) {
for (auto&& exe : lib.executables()) {
executables.emplace_back(lib, exe);
@@ -112,6 +113,7 @@ void build_plan::link_all(const build_env& env, int njobs) const {

std::vector<test_failure> build_plan::run_all_tests(build_env_ref env, int njobs) const {
using namespace ranges::views;
// Collect executables that are tests
auto test_executables = //
iter_libraries(*this) //
| transform(&library_plan::executables) //

+ 30
- 3
src/dds/build/plan/full.hpp Datei anzeigen

@@ -5,18 +5,45 @@

namespace dds {

/**
* Encompases an entire build plan.
*
* A build plan consists of some number of packages, defined as `package_plan`
* objects.
*/
class build_plan {
/// The packages that are part of this plan.
std::vector<package_plan> _packages;

public:
/**
* Append a new package plan. Returns a reference to the package plan so that it can be further
* tweaked. Note that the reference is not stable.
*/
package_plan& add_package(package_plan p) noexcept {
return _packages.emplace_back(std::move(p));
}

/**
* All of the packages in this plan
*/
auto& packages() const noexcept { return _packages; }
void compile_all(const build_env& env, int njobs) const;
void archive_all(const build_env& env, int njobs) const;
void link_all(const build_env& env, int njobs) const;
/**
* Compile all files in the plan.
*/
void compile_all(const build_env& env, int njobs) const;
/**
* Generate all static library archive in the plan
*/
void archive_all(const build_env& env, int njobs) const;
/**
* Link all runtime binaries (executables) in the plan
*/
void link_all(const build_env& env, int njobs) const;

/**
* Execute all tests defined in the plan. Returns information for every failed test.
*/
std::vector<test_failure> run_all_tests(build_env_ref env, int njobs) const;
};


+ 54
- 30
src/dds/build/plan/library.cpp Datei anzeigen

@@ -4,78 +4,101 @@

#include <range/v3/view/concat.hpp>
#include <range/v3/view/filter.hpp>
#include <range/v3/view/transform.hpp>

#include <cassert>

using namespace dds;

library_plan library_plan::create(const library& lib,
const library_build_params& params,
const usage_requirement_map& ureqs) {
std::vector<compile_file_plan> compile_files;
std::vector<link_executable_plan> link_executables;
std::optional<create_archive_plan> create_archive;

// Source files are kept in three groups:
std::vector<source_file> app_sources;
std::vector<source_file> test_sources;
std::vector<source_file> lib_sources;

// Collect the source for this library. This will look for any compilable sources in the `src/`
// subdirectory of the library.
auto src_dir = lib.src_dir();
if (src_dir.exists()) {
auto all_sources = src_dir.sources();
auto to_compile = all_sources | ranges::views::filter([&](const source_file& sf) {
return (sf.kind == source_kind::source
|| (sf.kind == source_kind::app && params.build_apps)
|| (sf.kind == source_kind::test && params.build_tests));
});

for (const auto& sfile : to_compile) {
// Sort each source file between the three source arrays, depending on
// the kind of source that we are looking at.
auto all_sources = src_dir.collect_sources();
for (const auto& sfile : all_sources) {
if (sfile.kind == source_kind::test) {
test_sources.push_back(sfile);
} else if (sfile.kind == source_kind::app) {
app_sources.push_back(sfile);
} else {
} else if (sfile.kind == source_kind::source) {
lib_sources.push_back(sfile);
} else {
assert(sfile.kind == source_kind::header);
}
}
}

// Load up the compile rules
auto compile_rules = lib.base_compile_rules();
compile_rules.enable_warnings() = params.enable_warnings;

// Apply our transitive usage requirements. This gives us the search directories for our
// dependencies.
for (const auto& use : lib.manifest().uses) {
ureqs.apply(compile_rules, use.namespace_, use.name);
}

for (const auto& sf : lib_sources) {
compile_files.emplace_back(compile_rules,
sf,
lib.manifest().name,
params.out_subdir / "obj");
}
// Convert the library sources into their respective file compilation plans.
auto lib_compile_files = //
lib_sources //
| ranges::views::transform([&](const source_file& sf) {
return compile_file_plan(compile_rules,
sf,
lib.manifest().name,
params.out_subdir / "obj");
})
| ranges::to_vector;

if (!lib_sources.empty()) {
create_archive.emplace(lib.manifest().name, params.out_subdir, std::move(compile_files));
// If we have any compiled library files, generate a static library archive
// for this library
std::optional<create_archive_plan> create_archive;
if (!lib_compile_files.empty()) {
create_archive.emplace(lib.manifest().name,
params.out_subdir,
std::move(lib_compile_files));
}

std::vector<fs::path> in_libs;
// Collect the paths to linker inputs that should be used when generating executables for this
// library.
std::vector<fs::path> link_libs;
for (auto& use : lib.manifest().uses) {
extend(in_libs, ureqs.link_paths(use.namespace_, use.name));
extend(link_libs, ureqs.link_paths(use.namespace_, use.name));
}
for (auto& link : lib.manifest().links) {
extend(in_libs, ureqs.link_paths(link.namespace_, link.name));
extend(link_libs, ureqs.link_paths(link.namespace_, link.name));
}

auto test_in_libs = in_libs;
extend(test_in_libs, params.test_link_files);
// Linker inputs for tests may contain additional code for test execution
auto test_link_libs = link_libs;
extend(test_link_libs, params.test_link_files);

// There may also be additional #include paths for test source files
auto test_rules = compile_rules.clone();
extend(test_rules.include_dirs(), params.test_include_dirs);

// Generate the plans to link any executables for this library
std::vector<link_executable_plan> link_executables;
for (const source_file& source : ranges::views::concat(app_sources, test_sources)) {
const bool is_test = source.kind == source_kind::test;
// Pick a subdir based on app/test
auto subdir
= source.kind == source_kind::test ? params.out_subdir / "test" : params.out_subdir;
const auto subdir_base = is_test ? params.out_subdir / "test" : params.out_subdir;
// Put test/app executables in a further subdirectory based on the source file path
const auto subdir
= subdir_base / fs::relative(source.path.parent_path(), lib.src_dir().path);
// Pick compile rules based on app/test
auto rules = source.kind == source_kind::test ? test_rules : compile_rules;
auto rules = is_test ? test_rules : compile_rules;
// Pick input libs based on app/test
auto& exe_link_libs = source.kind == source_kind::test ? test_in_libs : in_libs;
auto& exe_link_libs = is_test ? test_link_libs : link_libs;
// TODO: Apps/tests should only see the _public_ include dir, not both
link_executables.emplace_back(exe_link_libs,
compile_file_plan(rules,
@@ -86,6 +109,7 @@ library_plan library_plan::create(const library& lib,
source.path.stem().stem().string());
}

// Done!
return library_plan{lib.manifest().name,
lib.path(),
std::move(create_archive),

+ 86
- 17
src/dds/build/plan/library.hpp Datei anzeigen

@@ -12,26 +12,64 @@

namespace dds {

/**
* The parameters that tweak the behavior of building a library
*/
struct library_build_params {
/// The subdirectory of the build root in which this library should place its files.
fs::path out_subdir;
bool build_tests = false;
bool build_apps = false;
bool enable_warnings = false;
/// Whether tests should be compiled and linked for this library
bool build_tests = false;
/// Whether applications should be compiled and linked for this library
bool build_apps = false;
/// Whether compiler warnings should be enabled for building the source files in this library.
bool enable_warnings = false;

// Extras for compiling tests:
/// Directories that should be on the #include search path when compiling tests
std::vector<fs::path> test_include_dirs;
/// Files that should be added as inputs when linking test executables
std::vector<fs::path> test_link_files;
};

/**
* A `library_plan` is a composite object that keeps track of the parameters for building a library,
* including:
*
* - If the library has compilable library source files, a `create_archive_plan` that details the
* compilation of those source files and their collection into a static library archive.
* - The executables that need to be linked when this library is built. This includes any tests and
* apps that are part of this library. These can be enabled/disabled by setting the appropriate
* values in `library_build_params`.
* - The libraries that this library *uses*.
* - The libraries that this library *links*.
*
* While there is a public constructor, it is best to use the `create` named constructor, which will
* initialize all of the constructor parameters correctly.
*/
class library_plan {
std::string _name;
fs::path _source_root;
/// The name of the library
std::string _name;
/// The directory at the root of this library
fs::path _source_root;
/// The `create_archive_plan` for this library, if applicable
std::optional<create_archive_plan> _create_archive;
std::vector<link_executable_plan> _link_exes;
std::vector<lm::usage> _uses;
std::vector<lm::usage> _links;
/// The executables that should be linked as part of this library's build
std::vector<link_executable_plan> _link_exes;
/// The libraries that we use
std::vector<lm::usage> _uses;
/// The libraries that we link
std::vector<lm::usage> _links;

public:
/**
* Construct a new `library_plan`
* @param name The name of the library
* @param source_root The directory that contains this library
* @param ar The `create_archive_plan`, or `nullopt` for this library.
* @param exes The `link_executable_plan` objects for this library.
* @param uses The identities of the libraries that are used by this library
* @param links The identities of the libraries that are linked by this library
*/
library_plan(std::string_view name,
path_ref source_root,
std::optional<create_archive_plan> ar,
@@ -45,15 +83,46 @@ public:
, _uses(std::move(uses))
, _links(std::move(links)) {}

/**
* Get the name of the library
*/
auto& name() const noexcept { return _name; }
/**
* The directory that defines the source root of the library.
*/
path_ref source_root() const noexcept { return _source_root; }
auto& name() const noexcept { return _name; }
auto& create_archive() const noexcept { return _create_archive; }
auto& executables() const noexcept { return _link_exes; }
auto& uses() const noexcept { return _uses; }
auto& links() const noexcept { return _links; }

static library_plan
create(const library&, const library_build_params&, const usage_requirement_map&);
/**
* A `create_archive_plan` object, or `nullopt`, depending on if this library has compiled
* components
*/
auto& create_archive() const noexcept { return _create_archive; }
/**
* The executables that should be created by this library
*/
auto& executables() const noexcept { return _link_exes; }
/**
* The library identifiers that are used by this library
*/
auto& uses() const noexcept { return _uses; }
/**
* The library identifiers that are linked by this library
*/
auto& links() const noexcept { return _links; }

/**
* Named constructor: Create a new `library_plan` automatically from some build-time parameters.
*
* @param lib The `library` object from which we will inherit several properties.
* @param params Parameters controlling the build of the library. i.e. if we create tests,
* enable warnings, etc.
* @param ureqs The usage requirements map. This should be populated as appropriate.
*
* The `lib` parameter defines the usage requirements of this library, and they are looked up in
* the `ureqs` map. If there are any missing requirements, an exception will be thrown.
*/
static library_plan create(const library& lib,
const library_build_params& params,
const usage_requirement_map& ureqs);
};

} // namespace dds

+ 29
- 2
src/dds/build/plan/package.hpp Datei anzeigen

@@ -7,20 +7,47 @@

namespace dds {

/**
* A package is a top-level component with a name, namespace, and some number of associated
* libraries. A package plan will roughly correspond to either a source distribution or a project
* directory
*/
class package_plan {
std::string _name;
std::string _namespace;
/// Package name
std::string _name;
/// The package namespace. Used to specify interdependencies
std::string _namespace;
/// The libraries in this package
std::vector<library_plan> _libraries;

public:
/**
* Create a new package plan.
* @param name The name of the package
* @param namespace_ The namespace of the package. Used when specifying linker dependencies.
*/
package_plan(std::string_view name, std::string_view namespace_)
: _name(name)
, _namespace(namespace_) {}

/**
* Add a library plan to this package plan
* @param lp The `library_plan` to add to the package. Once added, the
* library plan cannot be changed directly.
*/
void add_library(library_plan lp) { _libraries.emplace_back(std::move(lp)); }

/**
* Get the package name
*/
auto& name() const noexcept { return _name; }
/**
* The package namespace
*/
auto& namespace_() const noexcept { return _namespace; }
/**
* The libraries in the package
*/
auto& libraries() const noexcept { return _libraries; }
};


+ 0
- 18
src/dds/build/source_dir.hpp Datei anzeigen

@@ -1,18 +0,0 @@
#pragma once

#include <dds/source.hpp>
#include <dds/util/fs.hpp>

#include <vector>

namespace dds {

struct source_directory {
fs::path path;

std::vector<source_file> sources() const;

bool exists() const noexcept { return fs::exists(path); }
};

} // namespace dds

+ 2
- 2
src/dds/catalog/get.hpp Datei anzeigen

@@ -1,6 +1,6 @@
#pragma once

#include <dds/sdist.hpp>
#include <dds/source/dist.hpp>
#include <dds/temp.hpp>

namespace dds {
@@ -14,4 +14,4 @@ struct temporary_sdist {

temporary_sdist get_package_sdist(const package_info&);

} // namespace dds
} // namespace dds

+ 1
- 1
src/dds/compdb.cpp Datei anzeigen

@@ -13,7 +13,7 @@ void dds::generate_compdb(const build_plan& plan, build_env_ref env) {

for (const compile_file_plan& cf : iter_compilations(plan)) {
auto cmd_info = cf.generate_compile_command(env);
auto entry = nlohmann::json::object({
auto entry = nlohmann::json::object({
{"directory", env.output_root.string()},
{"arguments", cmd_info.command},
{"file", cf.source_path().string()},

+ 6
- 6
src/dds/db/database.cpp Datei anzeigen

@@ -129,7 +129,7 @@ void database::record_dep(path_ref input, path_ref output, fs::file_time_type in
auto in_id = _record_file(input);
auto out_id = _record_file(output);
auto& st = _stmt_cache(R"(
INSERT OR IGNORE INTO dds_deps (input_file_id, output_file_id, input_mtime)
INSERT OR REPLACE INTO dds_deps (input_file_id, output_file_id, input_mtime)
VALUES (?, ?, ?)
)"_sql);
sqlite3::exec(st, std::forward_as_tuple(in_id, out_id, input_mtime.time_since_epoch().count()));
@@ -159,10 +159,10 @@ void database::forget_inputs_of(path_ref file) {
DELETE FROM dds_deps
WHERE output_file_id IN id_to_delete
)"_sql);
sqlite3::exec(st, std::forward_as_tuple(fs::weakly_canonical(file).string()));
sqlite3::exec(st, std::forward_as_tuple(fs::weakly_canonical(file).generic_string()));
}

std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_) {
std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_) const {
auto file = fs::weakly_canonical(file_);
auto& st = _stmt_cache(R"(
WITH file AS (
@@ -176,7 +176,7 @@ std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_)
WHERE output_file_id IN file
)"_sql);
st.reset();
st.bindings[1] = file.string();
st.bindings[1] = file.generic_string();
auto tup_iter = sqlite3::iter_tuples<std::string, std::int64_t>(st);

std::vector<input_file_info> ret;
@@ -191,7 +191,7 @@ std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_)
return ret;
}

std::optional<command_info> database::command_of(path_ref file_) {
std::optional<command_info> database::command_of(path_ref file_) const {
auto file = fs::weakly_canonical(file_);
auto& st = _stmt_cache(R"(
WITH file AS (
@@ -204,7 +204,7 @@ std::optional<command_info> database::command_of(path_ref file_) {
WHERE file_id IN file
)"_sql);
st.reset();
st.bindings[1] = file.string();
st.bindings[1] = file.generic_string();
auto opt_res = sqlite3::unpack_single_opt<std::string, std::string>(st);
if (!opt_res) {
return std::nullopt;

+ 5
- 5
src/dds/db/database.hpp Datei anzeigen

@@ -26,9 +26,9 @@ struct input_file_info {
};

class database {
neo::sqlite3::database _db;
neo::sqlite3::statement_cache _stmt_cache{_db};
mutable std::shared_mutex _mutex;
neo::sqlite3::database _db;
mutable neo::sqlite3::statement_cache _stmt_cache{_db};
mutable std::shared_mutex _mutex;

explicit database(neo::sqlite3::database db);
database(const database&) = delete;
@@ -49,8 +49,8 @@ public:
void store_file_command(path_ref file, const command_info& cmd);
void forget_inputs_of(path_ref file);

std::optional<std::vector<input_file_info>> inputs_of(path_ref file);
std::optional<command_info> command_of(path_ref file);
std::optional<std::vector<input_file_info>> inputs_of(path_ref file) const;
std::optional<command_info> command_of(path_ref file) const;
};

} // namespace dds

+ 1
- 2
src/dds/deps.cpp Datei anzeigen

@@ -1,7 +1,7 @@
#include "./deps.hpp"

#include <dds/repo/repo.hpp>
#include <dds/sdist.hpp>
#include <dds/source/dist.hpp>
#include <dds/usage_reqs.hpp>
#include <dds/util/string.hpp>
#include <libman/index.hpp>
@@ -10,7 +10,6 @@
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/transform.hpp>
#include <spdlog/spdlog.h>
#include <spdlog/fmt/ostr.h>

#include <cctype>
#include <map>

+ 3
- 3
src/dds/library/library.cpp Datei anzeigen

@@ -1,7 +1,7 @@
#include <dds/library/library.hpp>

#include <dds/build/plan/compile_file.hpp>
#include <dds/build/source_dir.hpp>
#include <dds/source/dir.hpp>
#include <dds/util/algo.hpp>

#include <range/v3/view/filter.hpp>
@@ -22,7 +22,7 @@ auto collect_pf_sources(path_ref path) {
if (!fs::is_directory(include_dir.path)) {
throw std::runtime_error("The `include` at the root of the project is not a directory");
}
auto inc_sources = include_dir.sources();
auto inc_sources = include_dir.collect_sources();
// Drop any source files we found within `include/`
erase_if(sources, [&](auto& info) {
if (info.kind != source_kind::header) {
@@ -39,7 +39,7 @@ auto collect_pf_sources(path_ref path) {
if (!fs::is_directory(src_dir.path)) {
throw std::runtime_error("The `src` at the root of the project is not a directory");
}
auto src_sources = src_dir.sources();
auto src_sources = src_dir.collect_sources();
extend(sources, src_sources);
}


+ 2
- 2
src/dds/library/library.hpp Datei anzeigen

@@ -1,9 +1,9 @@
#pragma once

#include <dds/build/plan/compile_file.hpp>
#include <dds/build/source_dir.hpp>
#include <dds/library/manifest.hpp>
#include <dds/source.hpp>
#include <dds/source/dir.hpp>
#include <dds/source/file.hpp>

#include <string>


+ 1
- 1
src/dds/proc.win.cpp Datei anzeigen

@@ -1,8 +1,8 @@
#ifdef _WIN32
#include "./proc.hpp"

#include <wil/resource.h>
#include <spdlog/spdlog.h>
#include <wil/resource.h>

#include <windows.h>


+ 22
- 23
src/dds/repo/repo.cpp Datei anzeigen

@@ -1,8 +1,8 @@
#include "./repo.hpp"

#include <dds/catalog/catalog.hpp>
#include <dds/sdist.hpp>
#include <dds/solve/solve.hpp>
#include <dds/source/dist.hpp>
#include <dds/util/paths.hpp>
#include <dds/util/string.hpp>

@@ -111,26 +111,25 @@ const sdist* repository::find(const package_id& pkg) const noexcept {
}

std::vector<package_id> repository::solve(const std::vector<dependency>& deps,
const catalog& ctlg) const {
return dds::solve(deps,
[&](std::string_view name) -> std::vector<package_id> {
auto mine = ranges::views::all(_sdists) //
| ranges::views::filter([&](const sdist& sd) {
return sd.manifest.pkg_id.name == name;
})
| ranges::views::transform(
[](const sdist& sd) { return sd.manifest.pkg_id; });
auto avail = ctlg.by_name(name);
auto all = ranges::views::concat(mine, avail) | ranges::to_vector;
ranges::sort(all, std::less<>{});
ranges::unique(all, std::less<>{});
return all;
},
[&](const package_id& pkg_id) {
auto found = find(pkg_id);
if (found) {
return found->manifest.dependencies;
}
return ctlg.dependencies_of(pkg_id);
});
const catalog& ctlg) const {
return dds::solve(
deps,
[&](std::string_view name) -> std::vector<package_id> {
auto mine = ranges::views::all(_sdists) //
| ranges::views::filter(
[&](const sdist& sd) { return sd.manifest.pkg_id.name == name; })
| ranges::views::transform([](const sdist& sd) { return sd.manifest.pkg_id; });
auto avail = ctlg.by_name(name);
auto all = ranges::views::concat(mine, avail) | ranges::to_vector;
ranges::sort(all, std::less<>{});
ranges::unique(all, std::less<>{});
return all;
},
[&](const package_id& pkg_id) {
auto found = find(pkg_id);
if (found) {
return found->manifest.dependencies;
}
return ctlg.dependencies_of(pkg_id);
});
}

+ 2
- 2
src/dds/repo/repo.hpp Datei anzeigen

@@ -1,8 +1,8 @@
#pragma once

#include <dds/sdist.hpp>
#include <dds/util/flock.hpp>
#include <dds/catalog/catalog.hpp>
#include <dds/source/dist.hpp>
#include <dds/util/flock.hpp>
#include <dds/util/fs.hpp>

#include <functional>

src/dds/build/source_dir.cpp → src/dds/source/dir.cpp Datei anzeigen

@@ -1,4 +1,4 @@
#include "./source_dir.hpp"
#include "./dir.hpp"

#include <range/v3/range/conversion.hpp>
#include <range/v3/view/filter.hpp>
@@ -6,19 +6,15 @@

using namespace dds;

std::vector<source_file> source_directory::sources() const {
std::vector<source_file> source_directory::collect_sources() const {
using namespace ranges::views;
// Strips nullopt elements and lifts the value from the results
auto drop_nulls = //
filter([](auto&& opt) { return opt.has_value(); }) //
| transform([](auto&& opt) { return *opt; }); //

// Collect all source files from the directory
return //
fs::recursive_directory_iterator(path) //
| filter([](auto&& entry) { return entry.is_regular_file(); }) //
| transform([&](auto&& entry) { return source_file::from_path(entry, path); }) //
// source_file::from_path returns an optional. Drop nulls
| drop_nulls //
| filter([](auto&& opt) { return opt.has_value(); }) //
| transform([](auto&& opt) { return *opt; }) //
| ranges::to_vector;
}

+ 29
- 0
src/dds/source/dir.hpp Datei anzeigen

@@ -0,0 +1,29 @@
#pragma once

#include <dds/source/file.hpp>
#include <dds/util/fs.hpp>

#include <vector>

namespace dds {

/**
* A `source_directory` is a simple wrapper type that provides type safety and utilities to
* represent a source directory.
*/
struct source_directory {
/// The actual path to the directory
fs::path path;

/**
* Generate a vector of every source file contained in this directory (including subdirectories)
*/
std::vector<source_file> collect_sources() const;

/**
* Check if the directory exists
*/
bool exists() const noexcept { return fs::exists(path); }
};

} // namespace dds

src/dds/sdist.cpp → src/dds/source/dist.cpp Datei anzeigen

@@ -1,4 +1,4 @@
#include "./sdist.hpp"
#include "./dist.hpp"

#include <dds/temp.hpp>
#include <dds/util/fs.hpp>

src/dds/sdist.hpp → src/dds/source/dist.hpp Datei anzeigen


src/dds/source.cpp → src/dds/source/file.cpp Datei anzeigen

@@ -1,4 +1,4 @@
#include "./source.hpp"
#include "./file.hpp"

#include <dds/util/string.hpp>


src/dds/source.hpp → src/dds/source/file.hpp Datei anzeigen


+ 7
- 7
src/dds/toolchain/from_dds.cpp Datei anzeigen

@@ -173,21 +173,21 @@ toolchain dds::parse_toolchain_dds(const lm::pair_list& pairs, strv context) {
bool is_msvc = compiler_id_e == msvc;
bool is_gnu_like = is_gnu || is_clang;

const enum deps_mode deps_mode = [&] {
const enum file_deps_mode deps_mode = [&] {
if (!deps_mode_str.has_value()) {
if (is_gnu_like) {
return deps_mode::gnu;
return file_deps_mode::gnu;
} else if (is_msvc) {
return deps_mode::msvc;
return file_deps_mode::msvc;
} else {
return deps_mode::none;
return file_deps_mode::none;
}
} else if (deps_mode_str == "GNU") {
return deps_mode::gnu;
return file_deps_mode::gnu;
} else if (deps_mode_str == "MSVC") {
return deps_mode::msvc;
return file_deps_mode::msvc;
} else if (deps_mode_str == "None") {
return deps_mode::none;
return file_deps_mode::none;
} else {
fail(context, "Unknown Deps-Mode '{}'", *deps_mode_str);
}

+ 7
- 6
src/dds/toolchain/from_dds.test.cpp Datei anzeigen

@@ -44,12 +44,13 @@ void check_tc_compile(std::string_view tc_content,
}

TEST_CASE("Generating toolchain commands") {
check_tc_compile("Compiler-ID: GNU",
"g++ -fPIC -fdiagnostics-color -pthread -MD -MF foo.o.d -MT foo.o -c foo.cpp -ofoo.o",
"g++ -fPIC -fdiagnostics-color -pthread -Wall -Wextra -Wpedantic -Wconversion "
"-MD -MF foo.o.d -MT foo.o -c foo.cpp -ofoo.o",
"ar rcs stuff.a foo.o bar.o",
"g++ -fPIC -fdiagnostics-color foo.o bar.a -pthread -lstdc++fs -omeow.exe");
check_tc_compile(
"Compiler-ID: GNU",
"g++ -fPIC -fdiagnostics-color -pthread -MD -MF foo.o.d -MT foo.o -c foo.cpp -ofoo.o",
"g++ -fPIC -fdiagnostics-color -pthread -Wall -Wextra -Wpedantic -Wconversion "
"-MD -MF foo.o.d -MT foo.o -c foo.cpp -ofoo.o",
"ar rcs stuff.a foo.o bar.o",
"g++ -fPIC -fdiagnostics-color foo.o bar.a -pthread -lstdc++fs -omeow.exe");

check_tc_compile(
"Compiler-ID: GNU\nDebug: True",

+ 2
- 2
src/dds/toolchain/prep.hpp Datei anzeigen

@@ -1,6 +1,6 @@
#pragma once

#include <dds/build/deps.hpp>
#include <dds/build/file_deps.hpp>

#include <string>
#include <vector>
@@ -26,7 +26,7 @@ struct toolchain_prep {
std::string exe_prefix;
std::string exe_suffix;

enum deps_mode deps_mode;
enum file_deps_mode deps_mode;

toolchain realize() const;
};

+ 6
- 6
src/dds/toolchain/toolchain.cpp Datei anzeigen

@@ -45,8 +45,8 @@ vector<string> toolchain::definition_args(std::string_view s) const noexcept {
return replace(_def_template, "<DEF>", s);
}

compile_command_info toolchain::create_compile_command(const compile_file_spec& spec) const
noexcept {
compile_command_info
toolchain::create_compile_command(const compile_file_spec& spec) const noexcept {
vector<string> flags;

using namespace std::literals;
@@ -78,7 +78,7 @@ compile_command_info toolchain::create_compile_command(const compile_file_spec&

std::optional<fs::path> gnu_depfile_path;

if (_deps_mode == deps_mode::gnu) {
if (_deps_mode == file_deps_mode::gnu) {
gnu_depfile_path = spec.out_path;
gnu_depfile_path->replace_extension(gnu_depfile_path->extension().string() + ".d");
extend(flags,
@@ -87,7 +87,7 @@ compile_command_info toolchain::create_compile_command(const compile_file_spec&
std::string_view(gnu_depfile_path->string()),
"-MT"sv,
std::string_view(spec.out_path.string())});
} else if (_deps_mode == deps_mode::msvc) {
} else if (_deps_mode == file_deps_mode::msvc) {
flags.push_back("/showIncludes");
}

@@ -150,9 +150,9 @@ std::optional<toolchain> toolchain::get_builtin(std::string_view tc_id) noexcept
}

#define CXX_VER_TAG(str, version) \
if (starts_with(tc_id, str)) { \
if (starts_with(tc_id, str)) { \
tc_id = tc_id.substr(std::string_view(str).length()); \
tc_content += "C++-Version: "s + version + "\n"; \
tc_content += "C++-Version: "s + version + "\n"; \
} \
static_assert(true)


+ 2
- 2
src/dds/toolchain/toolchain.hpp Datei anzeigen

@@ -1,6 +1,6 @@
#pragma once

#include <dds/build/deps.hpp>
#include <dds/build/file_deps.hpp>
#include <dds/util/fs.hpp>

#include <optional>
@@ -60,7 +60,7 @@ class toolchain {
std::string _exe_prefix;
std::string _exe_suffix;

enum deps_mode _deps_mode;
enum file_deps_mode _deps_mode;

public:
toolchain() = default;

+ 1
- 1
src/dds/util/algo.hpp Datei anzeigen

@@ -1,9 +1,9 @@
#pragma once

#include <algorithm>
#include <functional>
#include <initializer_list>
#include <vector>
#include <functional>

namespace dds {


+ 1
- 1
tests/test_drivers/catch/test_catch.py Datei anzeigen

@@ -8,6 +8,6 @@ from dds_ci import proc
)
def test_catch_testdriver(dds: DDS):
dds.build(tests=True)
test_exe = dds.build_dir / f'test/calc{dds.exe_suffix}'
test_exe = dds.build_dir / f'test/testlib/calc{dds.exe_suffix}'
assert test_exe.exists()
assert proc.run([test_exe]).returncode == 0

Laden…
Abbrechen
Speichern