@@ -12,10 +12,26 @@ | |||
#include <filesystem> | |||
#include <iostream> | |||
#include <locale> | |||
static void load_locale() { | |||
auto lang = std::getenv("LANG"); | |||
if (!lang) { | |||
return; | |||
} | |||
try { | |||
std::locale::global(std::locale(lang)); | |||
} catch (const std::runtime_error& e) { | |||
// No locale with the given name | |||
return; | |||
} | |||
} | |||
int main_fn(std::string_view program_name, const std::vector<std::string>& argv) { | |||
dds::log::init_logger(); | |||
auto log_subscr = neo::subscribe(&dds::log::ev_log::print); | |||
load_locale(); | |||
std::setlocale(LC_CTYPE, ".utf8"); | |||
dds::install_signal_handlers(); | |||
@@ -104,6 +120,7 @@ int main_fn(std::string_view program_name, const std::vector<std::string>& argv) | |||
// Non-null result from argument parsing, return that value immediately. | |||
return *result; | |||
} | |||
dds::log::current_log_level = opts.log_level; | |||
return dds::cli::dispatch_main(opts); | |||
} | |||
@@ -142,7 +159,6 @@ std::string wstr_to_u8str(std::wstring_view in) { | |||
int wmain(int argc, wchar_t** argv) { | |||
std::vector<std::string> u8_argv; | |||
::setlocale(LC_ALL, ".utf8"); | |||
for (int i = 0; i < argc; ++i) { | |||
u8_argv.emplace_back(wstr_to_u8str(argv[i])); | |||
} |
@@ -69,7 +69,7 @@ msvc_deps_info dds::parse_msvc_output_for_deps(std::string_view output, std::str | |||
void dds::update_deps_info(neo::output<database> db_, const file_deps_info& deps) { | |||
database& db = db_; | |||
db.store_file_command(deps.output, {deps.command, deps.command_output}); | |||
db.record_compilation(deps.output, deps.command); | |||
db.forget_inputs_of(deps.output); | |||
for (auto&& inp : deps.inputs) { | |||
auto mtime = fs::last_write_time(inp); | |||
@@ -77,7 +77,7 @@ void dds::update_deps_info(neo::output<database> db_, const file_deps_info& deps | |||
} | |||
} | |||
deps_rebuild_info dds::get_rebuild_info(const database& db, path_ref output_path) { | |||
std::optional<prior_compilation> dds::get_prior_compilation(const database& db, path_ref output_path) { | |||
auto cmd_ = db.command_of(output_path); | |||
if (!cmd_) { | |||
return {}; | |||
@@ -95,9 +95,8 @@ deps_rebuild_info dds::get_rebuild_info(const database& db, path_ref output_path | |||
}) | |||
| ranges::views::transform([](auto& info) { return info.path; }) // | |||
| ranges::to_vector; | |||
deps_rebuild_info ret; | |||
ret.newer_inputs = std::move(changed_files); | |||
ret.previous_command = cmd.command; | |||
ret.previous_command_output = cmd.output; | |||
prior_compilation ret; | |||
ret.newer_inputs = std::move(changed_files); | |||
ret.previous_command = cmd; | |||
return ret; | |||
} |
@@ -27,6 +27,7 @@ | |||
* other languages is not difficult. | |||
*/ | |||
#include <dds/db/database.hpp> | |||
#include <dds/util/fs.hpp> | |||
#include <neo/out.hpp> | |||
@@ -64,11 +65,7 @@ struct file_deps_info { | |||
/** | |||
* The command that was used to generate the output | |||
*/ | |||
std::string command; | |||
/** | |||
* The output of the command. | |||
*/ | |||
std::string command_output; | |||
completed_compilation command; | |||
}; | |||
class database; | |||
@@ -118,7 +115,7 @@ msvc_deps_info parse_msvc_output_for_deps(std::string_view output, std::string_v | |||
/** | |||
* Update the dependency information in the build database for later reference via | |||
* `get_rebuild_info`. | |||
* `get_prior_compilation`. | |||
* @param db The database to update | |||
* @param info The dependency information to store | |||
*/ | |||
@@ -129,16 +126,15 @@ void update_deps_info(neo::output<database> db, const file_deps_info& info); | |||
* that have a newer mtime than we have recorded, and the previous command and previous command | |||
* output that we have stored. | |||
*/ | |||
struct deps_rebuild_info { | |||
struct prior_compilation { | |||
std::vector<fs::path> newer_inputs; | |||
std::string previous_command; | |||
std::string previous_command_output; | |||
completed_compilation previous_command; | |||
}; | |||
/** | |||
* Given the path to an output file, read all the dependency information from the database. If the | |||
* given output has never been recorded, then the resulting object will be empty. | |||
* given output has never been recorded, then the resulting object will be null. | |||
*/ | |||
deps_rebuild_info get_rebuild_info(const database& db, path_ref output_path); | |||
std::optional<prior_compilation> get_prior_compilation(const database& db, path_ref output_path); | |||
} // namespace dds | |||
} // namespace dds |
@@ -5,13 +5,14 @@ | |||
#include <dds/proc.hpp> | |||
#include <dds/util/log.hpp> | |||
#include <dds/util/parallel.hpp> | |||
#include <dds/util/signal.hpp> | |||
#include <dds/util/string.hpp> | |||
#include <dds/util/time.hpp> | |||
#include <fansi/styled.hpp> | |||
#include <neo/assert.hpp> | |||
#include <range/v3/algorithm/count_if.hpp> | |||
#include <range/v3/range/conversion.hpp> | |||
#include <range/v3/view/filter.hpp> | |||
#include <range/v3/view/transform.hpp> | |||
#include <algorithm> | |||
@@ -25,20 +26,23 @@ using namespace fansi::literals; | |||
namespace { | |||
/// The actual "real" information that we need to perform a compilation. | |||
struct compile_file_full { | |||
const compile_file_plan& plan; | |||
fs::path object_file_path; | |||
compile_command_info cmd_info; | |||
}; | |||
/// Simple aggregate that stores a counter for keeping track of compile progress | |||
struct compile_counter { | |||
std::atomic_size_t n; | |||
std::atomic_size_t n{1}; | |||
const std::size_t max; | |||
const std::size_t max_digits; | |||
}; | |||
struct compile_ticket { | |||
std::reference_wrapper<const compile_file_plan> plan; | |||
// If non-null, the information required to compile the file | |||
compile_command_info command; | |||
fs::path object_file_path; | |||
bool needs_recompile; | |||
// Information about the previous time a file was compiled, if any | |||
std::optional<completed_compilation> prior_command; | |||
}; | |||
/** | |||
* Actually performs a compilation and collects deps information from that compilation | |||
* | |||
@@ -47,21 +51,54 @@ struct compile_counter { | |||
* @param counter A thread-safe counter for display progress to the user | |||
*/ | |||
std::optional<file_deps_info> | |||
do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& counter) { | |||
handle_compilation(const compile_ticket& compile, build_env_ref env, compile_counter& counter) { | |||
if (!compile.needs_recompile) { | |||
// We don't actually compile this file. Just issue any prior warning messages that were from | |||
// a prior compilation. | |||
neo_assert(invariant, | |||
compile.prior_command.has_value(), | |||
"Expected a prior compilation command for file", | |||
compile.plan.get().source_path(), | |||
quote_command(compile.command.command)); | |||
auto& prior = *compile.prior_command; | |||
if (dds::trim_view(prior.output).empty()) { | |||
// Nothing to show | |||
return {}; | |||
} | |||
if (!compile.plan.get().rules().enable_warnings()) { | |||
// This file shouldn't show warnings. The compiler *may* have produced prior output, but | |||
// this block will be hit when the source file belongs to an external dependency. Rather | |||
// than continually spam the user with warnings that belong to dependencies, don't | |||
// repeatedly show them. | |||
dds_log(trace, | |||
"Cached compiler output suppressed for file with disabled warnings ({})", | |||
compile.plan.get().source_path().string()); | |||
return {}; | |||
} | |||
dds_log( | |||
warn, | |||
"While compiling file .bold.cyan[{}] [.bold.yellow[{}]] (.br.blue[cached compiler output]):\n{}"_styled, | |||
compile.plan.get().source_path().string(), | |||
prior.quoted_command, | |||
prior.output); | |||
return {}; | |||
} | |||
// Create the parent directory | |||
fs::create_directories(cf.object_file_path.parent_path()); | |||
fs::create_directories(compile.object_file_path.parent_path()); | |||
// Generate a log message to display to the user | |||
auto source_path = cf.plan.source_path(); | |||
auto source_path = compile.plan.get().source_path(); | |||
auto msg = fmt::format("[{}] Compile: .br.cyan[{}]"_styled, | |||
cf.plan.qualifier(), | |||
fs::relative(source_path, cf.plan.source().basis_path).string()); | |||
auto msg | |||
= fmt::format("[{}] Compile: .br.cyan[{}]"_styled, | |||
compile.plan.get().qualifier(), | |||
fs::relative(source_path, compile.plan.get().source().basis_path).string()); | |||
// Do it! | |||
dds_log(info, msg); | |||
auto&& [dur_ms, proc_res] | |||
= timed<std::chrono::milliseconds>([&] { return run_proc(cf.cmd_info.command); }); | |||
= timed<std::chrono::milliseconds>([&] { return run_proc(compile.command.command); }); | |||
auto nth = counter.n.fetch_add(1); | |||
dds_log(info, | |||
"{:60} - {:>7L}ms [{:{}}/{}]", | |||
@@ -85,8 +122,8 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
*/ | |||
} else if (env.toolchain.deps_mode() == file_deps_mode::gnu) { | |||
// GNU-style deps using Makefile generation | |||
assert(cf.cmd_info.gnu_depfile_path.has_value()); | |||
auto& df_path = *cf.cmd_info.gnu_depfile_path; | |||
assert(compile.command.gnu_depfile_path.has_value()); | |||
auto& df_path = *compile.command.gnu_depfile_path; | |||
if (!fs::is_regular_file(df_path)) { | |||
dds_log(critical, | |||
"The expected Makefile deps were not generated on disk. This is a bug! " | |||
@@ -96,14 +133,15 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
dds_log(trace, "Loading compilation dependencies from {}", df_path.string()); | |||
auto dep_info = dds::parse_mkfile_deps_file(df_path); | |||
neo_assert(invariant, | |||
dep_info.output == cf.object_file_path, | |||
dep_info.output == compile.object_file_path, | |||
"Generated mkfile deps output path does not match the object file path that " | |||
"we gave it to compile into.", | |||
" we gave it to compile into.", | |||
dep_info.output.string(), | |||
cf.object_file_path.string()); | |||
dep_info.command = quote_command(cf.cmd_info.command); | |||
dep_info.command_output = compiler_output; | |||
ret_deps_info = std::move(dep_info); | |||
compile.object_file_path.string()); | |||
dep_info.command.quoted_command = quote_command(compile.command.command); | |||
dep_info.command.output = compiler_output; | |||
dep_info.command.duration = dur_ms; | |||
ret_deps_info = std::move(dep_info); | |||
} | |||
} else if (env.toolchain.deps_mode() == file_deps_mode::msvc) { | |||
// Uglier deps generation by parsing the output from cl.exe | |||
@@ -117,11 +155,12 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
// cause a miscompile | |||
if (!msvc_deps.deps_info.inputs.empty()) { | |||
// Add the main source file as an input, since it is not listed by /showIncludes | |||
msvc_deps.deps_info.inputs.push_back(cf.plan.source_path()); | |||
msvc_deps.deps_info.output = cf.object_file_path; | |||
msvc_deps.deps_info.command = quote_command(cf.cmd_info.command); | |||
msvc_deps.deps_info.command_output = compiler_output; | |||
ret_deps_info = std::move(msvc_deps.deps_info); | |||
msvc_deps.deps_info.inputs.push_back(compile.plan.get().source_path()); | |||
msvc_deps.deps_info.output = compile.object_file_path; | |||
msvc_deps.deps_info.command.quoted_command = quote_command(compile.command.command); | |||
msvc_deps.deps_info.command.output = compiler_output; | |||
msvc_deps.deps_info.command.duration = dur_ms; | |||
ret_deps_info = std::move(msvc_deps.deps_info); | |||
} | |||
} else { | |||
/** | |||
@@ -142,11 +181,11 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
// Log a compiler failure | |||
if (!compiled_okay) { | |||
dds_log(error, "Compilation failed: {}", source_path.string()); | |||
dds_log(error, "Compilation failed: .bold.cyan[{}]"_styled, source_path.string()); | |||
dds_log(error, | |||
"Subcommand .bold.red[FAILED] [Exited {}]: .bold.yellow[{}]\n{}"_styled, | |||
compile_retc, | |||
quote_command(cf.cmd_info.command), | |||
quote_command(compile.command.command), | |||
compiler_output); | |||
if (compile_signal) { | |||
dds_log(error, "Process exited via signal {}", compile_signal); | |||
@@ -157,9 +196,9 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
// Print any compiler output, sans whitespace | |||
if (!dds::trim_view(compiler_output).empty()) { | |||
dds_log(warn, | |||
"While compiling file {} [{}]:\n{}", | |||
"While compiling file .bold.cyan[{}] [.bold.yellow[{}]]:\n{}"_styled, | |||
source_path.string(), | |||
quote_command(cf.cmd_info.command), | |||
quote_command(compile.command.command), | |||
compiler_output); | |||
} | |||
@@ -168,48 +207,45 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
return ret_deps_info; | |||
} | |||
/// Generate the full compile command information from an abstract plan | |||
compile_file_full realize_plan(const compile_file_plan& plan, build_env_ref env) { | |||
auto cmd_info = plan.generate_compile_command(env); | |||
return compile_file_full{plan, plan.calc_object_file_path(env), cmd_info}; | |||
} | |||
/** | |||
* Determine if the given compile command should actually be executed based on | |||
* the dependency information we have recorded in the database. | |||
*/ | |||
bool should_compile(const compile_file_full& comp, const database& db) { | |||
if (!fs::exists(comp.object_file_path)) { | |||
dds_log(trace, "Compile {}: Output does not exist", comp.plan.source_path().string()); | |||
compile_ticket mk_compile_ticket(const compile_file_plan& plan, build_env_ref env) { | |||
compile_ticket ret{.plan = plan, | |||
.command = plan.generate_compile_command(env), | |||
.object_file_path = plan.calc_object_file_path(env), | |||
.needs_recompile = false, | |||
.prior_command = {}}; | |||
auto rb_info = get_prior_compilation(env.db, ret.object_file_path); | |||
if (!rb_info) { | |||
dds_log(trace, "Compile {}: No recorded compilation info", plan.source_path().string()); | |||
ret.needs_recompile = true; | |||
} else if (!fs::exists(ret.object_file_path)) { | |||
dds_log(trace, "Compile {}: Output does not exist", plan.source_path().string()); | |||
// The output file simply doesn't exist. We have to recompile, of course. | |||
return true; | |||
} | |||
auto rb_info = get_rebuild_info(db, comp.object_file_path); | |||
if (rb_info.previous_command.empty()) { | |||
// We have no previous compile command for this file. Assume it is new. | |||
dds_log(trace, "Recompile {}: No prior compilation info", comp.plan.source_path().string()); | |||
return true; | |||
} | |||
if (!rb_info.newer_inputs.empty()) { | |||
ret.needs_recompile = true; | |||
} else if (!rb_info->newer_inputs.empty()) { | |||
// Inputs to this file have changed from a prior execution. | |||
dds_log(trace, | |||
"Recompile {}: Inputs have changed (or no input information)", | |||
comp.plan.source_path().string()); | |||
return true; | |||
} | |||
auto cur_cmd_str = quote_command(comp.cmd_info.command); | |||
if (cur_cmd_str != rb_info.previous_command) { | |||
dds_log(trace, | |||
"Recompile {}: Compile command has changed", | |||
comp.plan.source_path().string()); | |||
plan.source_path().string()); | |||
ret.needs_recompile = true; | |||
} else if (quote_command(ret.command.command) != rb_info->previous_command.quoted_command) { | |||
dds_log(trace, "Recompile {}: Compile command has changed", plan.source_path().string()); | |||
// The command used to generate the output is new | |||
return true; | |||
ret.needs_recompile = true; | |||
} else { | |||
// Nope. This file is up-to-date. | |||
dds_log(debug, | |||
"Skip compilation of {} (Result is up-to-date)", | |||
plan.source_path().string()); | |||
} | |||
// Nope. This file is up-to-date. | |||
dds_log(debug, | |||
"Skip compilation of {} (Result is up-to-date)", | |||
comp.plan.source_path().string()); | |||
return false; | |||
if (rb_info) { | |||
ret.prior_command = rb_info->previous_command; | |||
} | |||
return ret; | |||
} | |||
} // namespace | |||
@@ -220,24 +256,23 @@ bool dds::detail::compile_all(const ref_vector<const compile_file_plan>& compile | |||
auto each_realized = // | |||
compiles | |||
// Convert each _plan_ into a concrete object for compiler invocation. | |||
| views::transform([&](auto&& plan) { return realize_plan(plan, env); }) | |||
// Filter out compile jobs that we don't need to run. This drops compilations where the | |||
// output is "up-to-date" based on its inputs. | |||
| views::filter([&](auto&& real) { return should_compile(real, env.db); }) | |||
| views::transform([&](auto&& plan) { return mk_compile_ticket(plan, env); }) | |||
// Convert to to a real vector so we can ask its size. | |||
| ranges::to_vector; | |||
auto n_to_compile = static_cast<std::size_t>( | |||
ranges::count_if(each_realized, &compile_ticket::needs_recompile)); | |||
// Keep a counter to display progress to the user. | |||
const auto total = each_realized.size(); | |||
const auto max_digits = fmt::format("{}", total).size(); | |||
compile_counter counter{{1}, total, max_digits}; | |||
const auto max_digits = fmt::format("{}", n_to_compile).size(); | |||
compile_counter counter{.max = n_to_compile, .max_digits = max_digits}; | |||
// Ass we execute, accumulate new dependency information from successful compilations | |||
std::vector<file_deps_info> all_new_deps; | |||
std::mutex mut; | |||
// Do it! | |||
auto okay = parallel_run(each_realized, njobs, [&](const compile_file_full& full) { | |||
auto new_dep = do_compile(full, env, counter); | |||
auto okay = parallel_run(each_realized, njobs, [&](const compile_ticket& tkt) { | |||
auto new_dep = handle_compilation(tkt, env, counter); | |||
if (new_dep) { | |||
std::unique_lock lk{mut}; | |||
all_new_deps.push_back(std::move(*new_dep)); | |||
@@ -245,12 +280,15 @@ bool dds::detail::compile_all(const ref_vector<const compile_file_plan>& compile | |||
}); | |||
// Update compile dependency information | |||
auto tr = env.db.transaction(); | |||
dds::stopwatch update_timer; | |||
auto tr = env.db.transaction(); | |||
for (auto& info : all_new_deps) { | |||
dds_log(trace, "Update dependency info on {}", info.output.string()); | |||
update_deps_info(neo::into(env.db), info); | |||
} | |||
dds_log(debug, "Dependency update took {:L}ms", update_timer.elapsed_ms().count()); | |||
cancellation_point(); | |||
// Return whether or not there were any failures. | |||
return okay; | |||
} |
@@ -5,6 +5,9 @@ | |||
#include <dds/util/signal.hpp> | |||
#include <dds/util/time.hpp> | |||
#include <range/v3/algorithm/sort.hpp> | |||
#include <range/v3/algorithm/unique.hpp> | |||
#include <string> | |||
#include <vector> | |||
@@ -24,6 +27,9 @@ compile_command_info compile_file_plan::generate_compile_command(build_env_ref e | |||
extend(spec.external_include_dirs, env.ureqs.include_paths(use)); | |||
} | |||
extend(spec.definitions, _rules.defs()); | |||
// Avoid huge command lines by shrinking down the list of #include dirs | |||
sort_unique_erase(spec.external_include_dirs); | |||
sort_unique_erase(spec.include_dirs); | |||
return env.toolchain.create_compile_command(spec, dds::fs::current_path(), env.knobs); | |||
} | |||
@@ -88,7 +88,7 @@ std::optional<test_failure> link_executable_plan::run_test(build_env_ref env) co | |||
[&] { return run_proc({.command = {exe_path.string()}, .timeout = 10s}); }); | |||
if (res.okay()) { | |||
dds_log(info, "{} - .br.green[PASS] - {:>9L}μs", msg, dur.count()); | |||
dds_log(info, "{} - .br.green[PASS] - {:>9L}μs"_styled, msg, dur.count()); | |||
return std::nullopt; | |||
} else { | |||
auto exit_msg = fmt::format(res.signal ? "signalled {}" : "exited {}", |
@@ -33,7 +33,6 @@ command sdist_create; | |||
} // namespace cmd | |||
int dispatch_main(const options& opts) noexcept { | |||
dds::log::current_log_level = opts.log_level; | |||
return dds::handle_cli_errors([&] { | |||
DDS_E_SCOPE(opts.subcommand); | |||
switch (opts.subcommand) { |
@@ -111,7 +111,7 @@ struct options { | |||
path project_dir = fs::current_path(); | |||
// Compile and build commands with `--no-warnings`/`--no-warn` | |||
bool disable_warnings = true; | |||
bool disable_warnings = false; | |||
// Compile and build commands' `--jobs` parameter | |||
int jobs = 0; | |||
// Compile and build commands' `--toolchain` option: |
@@ -17,34 +17,39 @@ using namespace dds; | |||
namespace nsql = neo::sqlite3; | |||
using nsql::exec; | |||
using namespace nsql::literals; | |||
using namespace std::literals; | |||
namespace { | |||
void migrate_1(nsql::database& db) { | |||
db.exec(R"( | |||
CREATE TABLE dds_files ( | |||
DROP TABLE IF EXISTS dds_deps; | |||
DROP TABLE IF EXISTS dds_file_commands; | |||
DROP TABLE IF EXISTS dds_files; | |||
DROP TABLE IF EXISTS dds_compile_deps; | |||
DROP TABLE IF EXISTS dds_compilations; | |||
DROP TABLE IF EXISTS dds_source_files; | |||
CREATE TABLE dds_source_files ( | |||
file_id INTEGER PRIMARY KEY, | |||
path TEXT NOT NULL UNIQUE | |||
); | |||
CREATE TABLE dds_file_commands ( | |||
command_id INTEGER PRIMARY KEY, | |||
CREATE TABLE dds_compilations ( | |||
compile_id INTEGER PRIMARY KEY, | |||
file_id | |||
INTEGER | |||
UNIQUE | |||
NOT NULL | |||
REFERENCES dds_files(file_id), | |||
INTEGER NOT NULL | |||
UNIQUE REFERENCES dds_source_files(file_id), | |||
command TEXT NOT NULL, | |||
output TEXT NOT NULL | |||
output TEXT NOT NULL, | |||
n_compilations INTEGER NOT NULL DEFAULT 0, | |||
avg_duration INTEGER NOT NULL DEFAULT 0 | |||
); | |||
CREATE TABLE dds_deps ( | |||
CREATE TABLE dds_compile_deps ( | |||
input_file_id | |||
INTEGER | |||
NOT NULL | |||
REFERENCES dds_files(file_id), | |||
INTEGER NOT NULL | |||
REFERENCES dds_source_files(file_id), | |||
output_file_id | |||
INTEGER | |||
NOT NULL | |||
REFERENCES dds_files(file_id), | |||
INTEGER NOT NULL | |||
REFERENCES dds_source_files(file_id), | |||
input_mtime INTEGER NOT NULL, | |||
UNIQUE(input_file_id, output_file_id) | |||
); | |||
@@ -54,31 +59,26 @@ void migrate_1(nsql::database& db) { | |||
void ensure_migrated(nsql::database& db) { | |||
db.exec(R"( | |||
PRAGMA foreign_keys = 1; | |||
CREATE TABLE IF NOT EXISTS dds_meta AS | |||
WITH init (meta) AS (VALUES ('{"version": 0}')) | |||
DROP TABLE IF EXISTS dds_meta; | |||
CREATE TABLE IF NOT EXISTS dds_meta_1 AS | |||
WITH init (version) AS (VALUES ('eggs')) | |||
SELECT * FROM init; | |||
)"); | |||
nsql::transaction_guard tr{db}; | |||
auto meta_st = db.prepare("SELECT meta FROM dds_meta"); | |||
auto [meta_json] = nsql::unpack_single<std::string>(meta_st); | |||
auto version_st = db.prepare("SELECT version FROM dds_meta_1"); | |||
auto [version_str] = nsql::unpack_single<std::string>(version_st); | |||
auto meta = nlohmann::json::parse(meta_json); | |||
if (!meta.is_object()) { | |||
throw_external_error<errc::corrupted_build_db>(); | |||
} | |||
auto version_ = meta["version"]; | |||
if (!version_.is_number_integer()) { | |||
throw_external_error<errc::corrupted_build_db>( | |||
"The build database file is corrupted [bad dds_meta.version]"); | |||
} | |||
int version = version_; | |||
if (version < 1) { | |||
const auto cur_version = "alpha-5"sv; | |||
if (cur_version != version_str) { | |||
if (!version_str.empty()) { | |||
dds_log(info, "NOTE: A prior version of the project build database was found."); | |||
dds_log(info, "This is not an error, but incremental builds will be invalidated."); | |||
dds_log(info, "The database is being upgraded, and no further action is necessary."); | |||
} | |||
migrate_1(db); | |||
} | |||
meta["version"] = 1; | |||
exec(db.prepare("UPDATE dds_meta SET meta=?"), meta.dump()); | |||
exec(db.prepare("UPDATE dds_meta_1 SET version=?"), cur_version); | |||
} | |||
} // namespace | |||
@@ -114,13 +114,13 @@ database::database(nsql::database db) | |||
std::int64_t database::_record_file(path_ref path_) { | |||
auto path = fs::weakly_canonical(path_); | |||
nsql::exec(_stmt_cache(R"( | |||
INSERT OR IGNORE INTO dds_files (path) | |||
INSERT OR IGNORE INTO dds_source_files (path) | |||
VALUES (?) | |||
)"_sql), | |||
path.generic_string()); | |||
auto& st = _stmt_cache(R"( | |||
SELECT file_id | |||
FROM dds_files | |||
FROM dds_source_files | |||
WHERE path = ?1 | |||
)"_sql); | |||
st.reset(); | |||
@@ -134,31 +134,45 @@ void database::record_dep(path_ref input, path_ref output, fs::file_time_type in | |||
auto in_id = _record_file(input); | |||
auto out_id = _record_file(output); | |||
auto& st = _stmt_cache(R"( | |||
INSERT OR REPLACE INTO dds_deps (input_file_id, output_file_id, input_mtime) | |||
INSERT OR REPLACE INTO dds_compile_deps (input_file_id, output_file_id, input_mtime) | |||
VALUES (?, ?, ?) | |||
)"_sql); | |||
nsql::exec(st, in_id, out_id, input_mtime.time_since_epoch().count()); | |||
} | |||
void database::store_file_command(path_ref file, const command_info& cmd) { | |||
void database::record_compilation(path_ref file, const completed_compilation& cmd) { | |||
auto file_id = _record_file(file); | |||
auto& st = _stmt_cache(R"( | |||
INSERT OR REPLACE | |||
INTO dds_file_commands(file_id, command, output) | |||
VALUES (?1, ?2, ?3) | |||
INSERT INTO dds_compilations(file_id, command, output, n_compilations, avg_duration) | |||
VALUES (:file_id, :command, :output, 1, :duration) | |||
ON CONFLICT(file_id) DO UPDATE SET | |||
command = ?2, | |||
output = ?3, | |||
n_compilations = CASE | |||
WHEN :duration < 500 THEN n_compilations | |||
ELSE min(10, n_compilations + 1) | |||
END, | |||
avg_duration = CASE | |||
WHEN :duration < 500 THEN avg_duration | |||
ELSE avg_duration + ((:duration - avg_duration) / min(10, n_compilations + 1)) | |||
END | |||
)"_sql); | |||
nsql::exec(st, file_id, std::string_view(cmd.command), std::string_view(cmd.output)); | |||
nsql::exec(st, | |||
file_id, | |||
std::string_view(cmd.quoted_command), | |||
std::string_view(cmd.output), | |||
cmd.duration.count()); | |||
} | |||
void database::forget_inputs_of(path_ref file) { | |||
auto& st = _stmt_cache(R"( | |||
WITH id_to_delete AS ( | |||
SELECT file_id | |||
FROM dds_files | |||
FROM dds_source_files | |||
WHERE path = ? | |||
) | |||
DELETE FROM dds_deps | |||
DELETE FROM dds_compile_deps | |||
WHERE output_file_id IN id_to_delete | |||
)"_sql); | |||
nsql::exec(st, fs::weakly_canonical(file).generic_string()); | |||
@@ -169,12 +183,12 @@ std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_) | |||
auto& st = _stmt_cache(R"( | |||
WITH file AS ( | |||
SELECT file_id | |||
FROM dds_files | |||
FROM dds_source_files | |||
WHERE path = ? | |||
) | |||
SELECT path, input_mtime | |||
FROM dds_deps | |||
JOIN dds_files ON input_file_id = file_id | |||
FROM dds_compile_deps | |||
JOIN dds_source_files ON input_file_id = file_id | |||
WHERE output_file_id IN file | |||
)"_sql); | |||
st.reset(); | |||
@@ -193,24 +207,24 @@ std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_) | |||
return ret; | |||
} | |||
std::optional<command_info> database::command_of(path_ref file_) const { | |||
std::optional<completed_compilation> database::command_of(path_ref file_) const { | |||
auto file = fs::weakly_canonical(file_); | |||
auto& st = _stmt_cache(R"( | |||
WITH file AS ( | |||
SELECT file_id | |||
FROM dds_files | |||
FROM dds_source_files | |||
WHERE path = ? | |||
) | |||
SELECT command, output | |||
FROM dds_file_commands | |||
SELECT command, output, avg_duration | |||
FROM dds_compilations | |||
WHERE file_id IN file | |||
)"_sql); | |||
st.reset(); | |||
st.bindings()[1] = file.generic_string(); | |||
auto opt_res = nsql::unpack_single_opt<std::string, std::string>(st); | |||
auto opt_res = nsql::unpack_single_opt<std::string, std::string, std::int64_t>(st); | |||
if (!opt_res) { | |||
return std::nullopt; | |||
} | |||
auto& [cmd, out] = *opt_res; | |||
return command_info{cmd, out}; | |||
} | |||
auto& [cmd, out, dur] = *opt_res; | |||
return completed_compilation{cmd, out, std::chrono::milliseconds(dur)}; | |||
} |
@@ -15,9 +15,11 @@ | |||
namespace dds { | |||
struct command_info { | |||
std::string command; | |||
struct completed_compilation { | |||
std::string quoted_command; | |||
std::string output; | |||
// The amount of time that the command took to run | |||
std::chrono::milliseconds duration; | |||
}; | |||
struct input_file_info { | |||
@@ -43,11 +45,11 @@ public: | |||
} | |||
void record_dep(path_ref input, path_ref output, fs::file_time_type input_mtime); | |||
void store_file_command(path_ref file, const command_info& cmd); | |||
void record_compilation(path_ref file, const completed_compilation& cmd); | |||
void forget_inputs_of(path_ref file); | |||
std::optional<std::vector<input_file_info>> inputs_of(path_ref file) const; | |||
std::optional<command_info> command_of(path_ref file) const; | |||
std::optional<completed_compilation> command_of(path_ref file) const; | |||
}; | |||
} // namespace dds |
@@ -275,12 +275,6 @@ std::optional<toolchain> toolchain::get_builtin(std::string_view tc_id) noexcept | |||
return std::nullopt; | |||
} | |||
if (starts_with(tc_id, "gcc") || starts_with(tc_id, "clang")) { | |||
json5::data& arr = root_map.emplace("link_flags", json5::data::array_type()).first->second; | |||
arr.as_array().emplace_back("-static-libgcc"); | |||
arr.as_array().emplace_back("-static-libstdc++"); | |||
} | |||
root_map.emplace("c_compiler", opt_triple->c); | |||
root_map.emplace("cxx_compiler", opt_triple->cxx); | |||
root_map.emplace("compiler_id", opt_triple->id); |
@@ -35,7 +35,18 @@ void extend(Container& c, std::initializer_list<Item> il) { | |||
c.insert(c.end(), il.begin(), il.end()); | |||
} | |||
template <typename Container> | |||
void unique_erase(Container& c) noexcept { | |||
c.erase(std::unique(c.begin(), c.end()), c.end()); | |||
} | |||
template <typename Container> | |||
void sort_unique_erase(Container& c) noexcept { | |||
std::sort(c.begin(), c.end()); | |||
unique_erase(c); | |||
} | |||
template <typename T> | |||
using ref_vector = std::vector<std::reference_wrapper<T>>; | |||
} // namespace dds | |||
} // namespace dds |
@@ -1,5 +1,7 @@ | |||
#include "./parallel.hpp" | |||
#include <dds/util/signal.hpp> | |||
#include <dds/util/log.hpp> | |||
using namespace dds; | |||
@@ -7,6 +9,8 @@ using namespace dds; | |||
void dds::log_exception(std::exception_ptr eptr) noexcept { | |||
try { | |||
std::rethrow_exception(eptr); | |||
} catch (const dds::user_cancelled&) { | |||
// Don't log this one. The user knows what they did | |||
} catch (const std::exception& e) { | |||
dds_log(error, "{}", e.what()); | |||
} |
@@ -18,6 +18,11 @@ void dds::install_signal_handlers() noexcept { | |||
std::signal(SIGINT, handle_signal); | |||
std::signal(SIGTERM, handle_signal); | |||
#ifdef SIGQUIT | |||
// Some systems issue SIGQUIT :shrug: | |||
std::signal(SIGQUIT, handle_signal); | |||
#endif | |||
#ifdef SIGPIPE | |||
// XXX: neo-io doesn't behave nicely when EOF is hit on sockets. This Isn't | |||
// easily fixed portably without simply blocking SIGPIPE globally. |
@@ -90,7 +90,8 @@ class DDSWrapper: | |||
root: Path, | |||
toolchain: Optional[Path] = None, | |||
build_root: Optional[Path] = None, | |||
jobs: Optional[int] = None) -> None: | |||
jobs: Optional[int] = None, | |||
more_args: Optional[proc.CommandLine] = None) -> None: | |||
""" | |||
Run 'dds build' with the given arguments. | |||
@@ -109,6 +110,7 @@ class DDSWrapper: | |||
f'--jobs={jobs}', | |||
f'{self.project_dir_flag}={root}', | |||
f'--out={build_root}', | |||
more_args or (), | |||
]) | |||
def compile_file(self, |
@@ -80,7 +80,7 @@ class Project: | |||
Execute 'dds build' on the project | |||
""" | |||
with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc: | |||
self.dds.build(root=self.root, build_root=self.build_root, toolchain=tc) | |||
self.dds.build(root=self.root, build_root=self.build_root, toolchain=tc, more_args=['-ldebug']) | |||
def compile_file(self, *paths: Pathish, toolchain: Optional[Pathish] = None) -> None: | |||
with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc: |