| #include <filesystem> | #include <filesystem> | ||||
| #include <iostream> | #include <iostream> | ||||
| #include <locale> | |||||
| static void load_locale() { | |||||
| auto lang = std::getenv("LANG"); | |||||
| if (!lang) { | |||||
| return; | |||||
| } | |||||
| try { | |||||
| std::locale::global(std::locale(lang)); | |||||
| } catch (const std::runtime_error& e) { | |||||
| // No locale with the given name | |||||
| return; | |||||
| } | |||||
| } | |||||
| int main_fn(std::string_view program_name, const std::vector<std::string>& argv) { | int main_fn(std::string_view program_name, const std::vector<std::string>& argv) { | ||||
| dds::log::init_logger(); | dds::log::init_logger(); | ||||
| auto log_subscr = neo::subscribe(&dds::log::ev_log::print); | auto log_subscr = neo::subscribe(&dds::log::ev_log::print); | ||||
| load_locale(); | |||||
| std::setlocale(LC_CTYPE, ".utf8"); | |||||
| dds::install_signal_handlers(); | dds::install_signal_handlers(); | ||||
| // Non-null result from argument parsing, return that value immediately. | // Non-null result from argument parsing, return that value immediately. | ||||
| return *result; | return *result; | ||||
| } | } | ||||
| dds::log::current_log_level = opts.log_level; | |||||
| return dds::cli::dispatch_main(opts); | return dds::cli::dispatch_main(opts); | ||||
| } | } | ||||
| int wmain(int argc, wchar_t** argv) { | int wmain(int argc, wchar_t** argv) { | ||||
| std::vector<std::string> u8_argv; | std::vector<std::string> u8_argv; | ||||
| ::setlocale(LC_ALL, ".utf8"); | |||||
| for (int i = 0; i < argc; ++i) { | for (int i = 0; i < argc; ++i) { | ||||
| u8_argv.emplace_back(wstr_to_u8str(argv[i])); | u8_argv.emplace_back(wstr_to_u8str(argv[i])); | ||||
| } | } |
| void dds::update_deps_info(neo::output<database> db_, const file_deps_info& deps) { | void dds::update_deps_info(neo::output<database> db_, const file_deps_info& deps) { | ||||
| database& db = db_; | database& db = db_; | ||||
| db.store_file_command(deps.output, {deps.command, deps.command_output}); | |||||
| db.record_compilation(deps.output, deps.command); | |||||
| db.forget_inputs_of(deps.output); | db.forget_inputs_of(deps.output); | ||||
| for (auto&& inp : deps.inputs) { | for (auto&& inp : deps.inputs) { | ||||
| auto mtime = fs::last_write_time(inp); | auto mtime = fs::last_write_time(inp); | ||||
| } | } | ||||
| } | } | ||||
| deps_rebuild_info dds::get_rebuild_info(const database& db, path_ref output_path) { | |||||
| std::optional<prior_compilation> dds::get_prior_compilation(const database& db, path_ref output_path) { | |||||
| auto cmd_ = db.command_of(output_path); | auto cmd_ = db.command_of(output_path); | ||||
| if (!cmd_) { | if (!cmd_) { | ||||
| return {}; | return {}; | ||||
| }) | }) | ||||
| | ranges::views::transform([](auto& info) { return info.path; }) // | | ranges::views::transform([](auto& info) { return info.path; }) // | ||||
| | ranges::to_vector; | | ranges::to_vector; | ||||
| deps_rebuild_info ret; | |||||
| ret.newer_inputs = std::move(changed_files); | |||||
| ret.previous_command = cmd.command; | |||||
| ret.previous_command_output = cmd.output; | |||||
| prior_compilation ret; | |||||
| ret.newer_inputs = std::move(changed_files); | |||||
| ret.previous_command = cmd; | |||||
| return ret; | return ret; | ||||
| } | } |
| * other languages is not difficult. | * other languages is not difficult. | ||||
| */ | */ | ||||
| #include <dds/db/database.hpp> | |||||
| #include <dds/util/fs.hpp> | #include <dds/util/fs.hpp> | ||||
| #include <neo/out.hpp> | #include <neo/out.hpp> | ||||
| /** | /** | ||||
| * The command that was used to generate the output | * The command that was used to generate the output | ||||
| */ | */ | ||||
| std::string command; | |||||
| /** | |||||
| * The output of the command. | |||||
| */ | |||||
| std::string command_output; | |||||
| completed_compilation command; | |||||
| }; | }; | ||||
| class database; | class database; | ||||
| /** | /** | ||||
| * Update the dependency information in the build database for later reference via | * Update the dependency information in the build database for later reference via | ||||
| * `get_rebuild_info`. | |||||
| * `get_prior_compilation`. | |||||
| * @param db The database to update | * @param db The database to update | ||||
| * @param info The dependency information to store | * @param info The dependency information to store | ||||
| */ | */ | ||||
| * that have a newer mtime than we have recorded, and the previous command and previous command | * that have a newer mtime than we have recorded, and the previous command and previous command | ||||
| * output that we have stored. | * output that we have stored. | ||||
| */ | */ | ||||
| struct deps_rebuild_info { | |||||
| struct prior_compilation { | |||||
| std::vector<fs::path> newer_inputs; | std::vector<fs::path> newer_inputs; | ||||
| std::string previous_command; | |||||
| std::string previous_command_output; | |||||
| completed_compilation previous_command; | |||||
| }; | }; | ||||
| /** | /** | ||||
| * Given the path to an output file, read all the dependency information from the database. If the | * Given the path to an output file, read all the dependency information from the database. If the | ||||
| * given output has never been recorded, then the resulting object will be empty. | |||||
| * given output has never been recorded, then the resulting object will be null. | |||||
| */ | */ | ||||
| deps_rebuild_info get_rebuild_info(const database& db, path_ref output_path); | |||||
| std::optional<prior_compilation> get_prior_compilation(const database& db, path_ref output_path); | |||||
| } // namespace dds | |||||
| } // namespace dds |
| #include <dds/proc.hpp> | #include <dds/proc.hpp> | ||||
| #include <dds/util/log.hpp> | #include <dds/util/log.hpp> | ||||
| #include <dds/util/parallel.hpp> | #include <dds/util/parallel.hpp> | ||||
| #include <dds/util/signal.hpp> | |||||
| #include <dds/util/string.hpp> | #include <dds/util/string.hpp> | ||||
| #include <dds/util/time.hpp> | #include <dds/util/time.hpp> | ||||
| #include <fansi/styled.hpp> | #include <fansi/styled.hpp> | ||||
| #include <neo/assert.hpp> | #include <neo/assert.hpp> | ||||
| #include <range/v3/algorithm/count_if.hpp> | |||||
| #include <range/v3/range/conversion.hpp> | #include <range/v3/range/conversion.hpp> | ||||
| #include <range/v3/view/filter.hpp> | |||||
| #include <range/v3/view/transform.hpp> | #include <range/v3/view/transform.hpp> | ||||
| #include <algorithm> | #include <algorithm> | ||||
| namespace { | namespace { | ||||
| /// The actual "real" information that we need to perform a compilation. | |||||
| struct compile_file_full { | |||||
| const compile_file_plan& plan; | |||||
| fs::path object_file_path; | |||||
| compile_command_info cmd_info; | |||||
| }; | |||||
| /// Simple aggregate that stores a counter for keeping track of compile progress | /// Simple aggregate that stores a counter for keeping track of compile progress | ||||
| struct compile_counter { | struct compile_counter { | ||||
| std::atomic_size_t n; | |||||
| std::atomic_size_t n{1}; | |||||
| const std::size_t max; | const std::size_t max; | ||||
| const std::size_t max_digits; | const std::size_t max_digits; | ||||
| }; | }; | ||||
| struct compile_ticket { | |||||
| std::reference_wrapper<const compile_file_plan> plan; | |||||
| // If non-null, the information required to compile the file | |||||
| compile_command_info command; | |||||
| fs::path object_file_path; | |||||
| bool needs_recompile; | |||||
| // Information about the previous time a file was compiled, if any | |||||
| std::optional<completed_compilation> prior_command; | |||||
| }; | |||||
| /** | /** | ||||
| * Actually performs a compilation and collects deps information from that compilation | * Actually performs a compilation and collects deps information from that compilation | ||||
| * | * | ||||
| * @param counter A thread-safe counter for display progress to the user | * @param counter A thread-safe counter for display progress to the user | ||||
| */ | */ | ||||
| std::optional<file_deps_info> | std::optional<file_deps_info> | ||||
| do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& counter) { | |||||
| handle_compilation(const compile_ticket& compile, build_env_ref env, compile_counter& counter) { | |||||
| if (!compile.needs_recompile) { | |||||
| // We don't actually compile this file. Just issue any prior warning messages that were from | |||||
| // a prior compilation. | |||||
| neo_assert(invariant, | |||||
| compile.prior_command.has_value(), | |||||
| "Expected a prior compilation command for file", | |||||
| compile.plan.get().source_path(), | |||||
| quote_command(compile.command.command)); | |||||
| auto& prior = *compile.prior_command; | |||||
| if (dds::trim_view(prior.output).empty()) { | |||||
| // Nothing to show | |||||
| return {}; | |||||
| } | |||||
| if (!compile.plan.get().rules().enable_warnings()) { | |||||
| // This file shouldn't show warnings. The compiler *may* have produced prior output, but | |||||
| // this block will be hit when the source file belongs to an external dependency. Rather | |||||
| // than continually spam the user with warnings that belong to dependencies, don't | |||||
| // repeatedly show them. | |||||
| dds_log(trace, | |||||
| "Cached compiler output suppressed for file with disabled warnings ({})", | |||||
| compile.plan.get().source_path().string()); | |||||
| return {}; | |||||
| } | |||||
| dds_log( | |||||
| warn, | |||||
| "While compiling file .bold.cyan[{}] [.bold.yellow[{}]] (.br.blue[cached compiler output]):\n{}"_styled, | |||||
| compile.plan.get().source_path().string(), | |||||
| prior.quoted_command, | |||||
| prior.output); | |||||
| return {}; | |||||
| } | |||||
| // Create the parent directory | // Create the parent directory | ||||
| fs::create_directories(cf.object_file_path.parent_path()); | |||||
| fs::create_directories(compile.object_file_path.parent_path()); | |||||
| // Generate a log message to display to the user | // Generate a log message to display to the user | ||||
| auto source_path = cf.plan.source_path(); | |||||
| auto source_path = compile.plan.get().source_path(); | |||||
| auto msg = fmt::format("[{}] Compile: .br.cyan[{}]"_styled, | |||||
| cf.plan.qualifier(), | |||||
| fs::relative(source_path, cf.plan.source().basis_path).string()); | |||||
| auto msg | |||||
| = fmt::format("[{}] Compile: .br.cyan[{}]"_styled, | |||||
| compile.plan.get().qualifier(), | |||||
| fs::relative(source_path, compile.plan.get().source().basis_path).string()); | |||||
| // Do it! | // Do it! | ||||
| dds_log(info, msg); | dds_log(info, msg); | ||||
| auto&& [dur_ms, proc_res] | auto&& [dur_ms, proc_res] | ||||
| = timed<std::chrono::milliseconds>([&] { return run_proc(cf.cmd_info.command); }); | |||||
| = timed<std::chrono::milliseconds>([&] { return run_proc(compile.command.command); }); | |||||
| auto nth = counter.n.fetch_add(1); | auto nth = counter.n.fetch_add(1); | ||||
| dds_log(info, | dds_log(info, | ||||
| "{:60} - {:>7L}ms [{:{}}/{}]", | "{:60} - {:>7L}ms [{:{}}/{}]", | ||||
| */ | */ | ||||
| } else if (env.toolchain.deps_mode() == file_deps_mode::gnu) { | } else if (env.toolchain.deps_mode() == file_deps_mode::gnu) { | ||||
| // GNU-style deps using Makefile generation | // GNU-style deps using Makefile generation | ||||
| assert(cf.cmd_info.gnu_depfile_path.has_value()); | |||||
| auto& df_path = *cf.cmd_info.gnu_depfile_path; | |||||
| assert(compile.command.gnu_depfile_path.has_value()); | |||||
| auto& df_path = *compile.command.gnu_depfile_path; | |||||
| if (!fs::is_regular_file(df_path)) { | if (!fs::is_regular_file(df_path)) { | ||||
| dds_log(critical, | dds_log(critical, | ||||
| "The expected Makefile deps were not generated on disk. This is a bug! " | "The expected Makefile deps were not generated on disk. This is a bug! " | ||||
| dds_log(trace, "Loading compilation dependencies from {}", df_path.string()); | dds_log(trace, "Loading compilation dependencies from {}", df_path.string()); | ||||
| auto dep_info = dds::parse_mkfile_deps_file(df_path); | auto dep_info = dds::parse_mkfile_deps_file(df_path); | ||||
| neo_assert(invariant, | neo_assert(invariant, | ||||
| dep_info.output == cf.object_file_path, | |||||
| dep_info.output == compile.object_file_path, | |||||
| "Generated mkfile deps output path does not match the object file path that " | "Generated mkfile deps output path does not match the object file path that " | ||||
| "we gave it to compile into.", | |||||
| " we gave it to compile into.", | |||||
| dep_info.output.string(), | dep_info.output.string(), | ||||
| cf.object_file_path.string()); | |||||
| dep_info.command = quote_command(cf.cmd_info.command); | |||||
| dep_info.command_output = compiler_output; | |||||
| ret_deps_info = std::move(dep_info); | |||||
| compile.object_file_path.string()); | |||||
| dep_info.command.quoted_command = quote_command(compile.command.command); | |||||
| dep_info.command.output = compiler_output; | |||||
| dep_info.command.duration = dur_ms; | |||||
| ret_deps_info = std::move(dep_info); | |||||
| } | } | ||||
| } else if (env.toolchain.deps_mode() == file_deps_mode::msvc) { | } else if (env.toolchain.deps_mode() == file_deps_mode::msvc) { | ||||
| // Uglier deps generation by parsing the output from cl.exe | // Uglier deps generation by parsing the output from cl.exe | ||||
| // cause a miscompile | // cause a miscompile | ||||
| if (!msvc_deps.deps_info.inputs.empty()) { | if (!msvc_deps.deps_info.inputs.empty()) { | ||||
| // Add the main source file as an input, since it is not listed by /showIncludes | // Add the main source file as an input, since it is not listed by /showIncludes | ||||
| msvc_deps.deps_info.inputs.push_back(cf.plan.source_path()); | |||||
| msvc_deps.deps_info.output = cf.object_file_path; | |||||
| msvc_deps.deps_info.command = quote_command(cf.cmd_info.command); | |||||
| msvc_deps.deps_info.command_output = compiler_output; | |||||
| ret_deps_info = std::move(msvc_deps.deps_info); | |||||
| msvc_deps.deps_info.inputs.push_back(compile.plan.get().source_path()); | |||||
| msvc_deps.deps_info.output = compile.object_file_path; | |||||
| msvc_deps.deps_info.command.quoted_command = quote_command(compile.command.command); | |||||
| msvc_deps.deps_info.command.output = compiler_output; | |||||
| msvc_deps.deps_info.command.duration = dur_ms; | |||||
| ret_deps_info = std::move(msvc_deps.deps_info); | |||||
| } | } | ||||
| } else { | } else { | ||||
| /** | /** | ||||
| // Log a compiler failure | // Log a compiler failure | ||||
| if (!compiled_okay) { | if (!compiled_okay) { | ||||
| dds_log(error, "Compilation failed: {}", source_path.string()); | |||||
| dds_log(error, "Compilation failed: .bold.cyan[{}]"_styled, source_path.string()); | |||||
| dds_log(error, | dds_log(error, | ||||
| "Subcommand .bold.red[FAILED] [Exited {}]: .bold.yellow[{}]\n{}"_styled, | "Subcommand .bold.red[FAILED] [Exited {}]: .bold.yellow[{}]\n{}"_styled, | ||||
| compile_retc, | compile_retc, | ||||
| quote_command(cf.cmd_info.command), | |||||
| quote_command(compile.command.command), | |||||
| compiler_output); | compiler_output); | ||||
| if (compile_signal) { | if (compile_signal) { | ||||
| dds_log(error, "Process exited via signal {}", compile_signal); | dds_log(error, "Process exited via signal {}", compile_signal); | ||||
| // Print any compiler output, sans whitespace | // Print any compiler output, sans whitespace | ||||
| if (!dds::trim_view(compiler_output).empty()) { | if (!dds::trim_view(compiler_output).empty()) { | ||||
| dds_log(warn, | dds_log(warn, | ||||
| "While compiling file {} [{}]:\n{}", | |||||
| "While compiling file .bold.cyan[{}] [.bold.yellow[{}]]:\n{}"_styled, | |||||
| source_path.string(), | source_path.string(), | ||||
| quote_command(cf.cmd_info.command), | |||||
| quote_command(compile.command.command), | |||||
| compiler_output); | compiler_output); | ||||
| } | } | ||||
| return ret_deps_info; | return ret_deps_info; | ||||
| } | } | ||||
| /// Generate the full compile command information from an abstract plan | |||||
| compile_file_full realize_plan(const compile_file_plan& plan, build_env_ref env) { | |||||
| auto cmd_info = plan.generate_compile_command(env); | |||||
| return compile_file_full{plan, plan.calc_object_file_path(env), cmd_info}; | |||||
| } | |||||
| /** | /** | ||||
| * Determine if the given compile command should actually be executed based on | * Determine if the given compile command should actually be executed based on | ||||
| * the dependency information we have recorded in the database. | * the dependency information we have recorded in the database. | ||||
| */ | */ | ||||
| bool should_compile(const compile_file_full& comp, const database& db) { | |||||
| if (!fs::exists(comp.object_file_path)) { | |||||
| dds_log(trace, "Compile {}: Output does not exist", comp.plan.source_path().string()); | |||||
| compile_ticket mk_compile_ticket(const compile_file_plan& plan, build_env_ref env) { | |||||
| compile_ticket ret{.plan = plan, | |||||
| .command = plan.generate_compile_command(env), | |||||
| .object_file_path = plan.calc_object_file_path(env), | |||||
| .needs_recompile = false, | |||||
| .prior_command = {}}; | |||||
| auto rb_info = get_prior_compilation(env.db, ret.object_file_path); | |||||
| if (!rb_info) { | |||||
| dds_log(trace, "Compile {}: No recorded compilation info", plan.source_path().string()); | |||||
| ret.needs_recompile = true; | |||||
| } else if (!fs::exists(ret.object_file_path)) { | |||||
| dds_log(trace, "Compile {}: Output does not exist", plan.source_path().string()); | |||||
| // The output file simply doesn't exist. We have to recompile, of course. | // The output file simply doesn't exist. We have to recompile, of course. | ||||
| return true; | |||||
| } | |||||
| auto rb_info = get_rebuild_info(db, comp.object_file_path); | |||||
| if (rb_info.previous_command.empty()) { | |||||
| // We have no previous compile command for this file. Assume it is new. | |||||
| dds_log(trace, "Recompile {}: No prior compilation info", comp.plan.source_path().string()); | |||||
| return true; | |||||
| } | |||||
| if (!rb_info.newer_inputs.empty()) { | |||||
| ret.needs_recompile = true; | |||||
| } else if (!rb_info->newer_inputs.empty()) { | |||||
| // Inputs to this file have changed from a prior execution. | // Inputs to this file have changed from a prior execution. | ||||
| dds_log(trace, | dds_log(trace, | ||||
| "Recompile {}: Inputs have changed (or no input information)", | "Recompile {}: Inputs have changed (or no input information)", | ||||
| comp.plan.source_path().string()); | |||||
| return true; | |||||
| } | |||||
| auto cur_cmd_str = quote_command(comp.cmd_info.command); | |||||
| if (cur_cmd_str != rb_info.previous_command) { | |||||
| dds_log(trace, | |||||
| "Recompile {}: Compile command has changed", | |||||
| comp.plan.source_path().string()); | |||||
| plan.source_path().string()); | |||||
| ret.needs_recompile = true; | |||||
| } else if (quote_command(ret.command.command) != rb_info->previous_command.quoted_command) { | |||||
| dds_log(trace, "Recompile {}: Compile command has changed", plan.source_path().string()); | |||||
| // The command used to generate the output is new | // The command used to generate the output is new | ||||
| return true; | |||||
| ret.needs_recompile = true; | |||||
| } else { | |||||
| // Nope. This file is up-to-date. | |||||
| dds_log(debug, | |||||
| "Skip compilation of {} (Result is up-to-date)", | |||||
| plan.source_path().string()); | |||||
| } | } | ||||
| // Nope. This file is up-to-date. | |||||
| dds_log(debug, | |||||
| "Skip compilation of {} (Result is up-to-date)", | |||||
| comp.plan.source_path().string()); | |||||
| return false; | |||||
| if (rb_info) { | |||||
| ret.prior_command = rb_info->previous_command; | |||||
| } | |||||
| return ret; | |||||
| } | } | ||||
| } // namespace | } // namespace | ||||
| auto each_realized = // | auto each_realized = // | ||||
| compiles | compiles | ||||
| // Convert each _plan_ into a concrete object for compiler invocation. | // Convert each _plan_ into a concrete object for compiler invocation. | ||||
| | views::transform([&](auto&& plan) { return realize_plan(plan, env); }) | |||||
| // Filter out compile jobs that we don't need to run. This drops compilations where the | |||||
| // output is "up-to-date" based on its inputs. | |||||
| | views::filter([&](auto&& real) { return should_compile(real, env.db); }) | |||||
| | views::transform([&](auto&& plan) { return mk_compile_ticket(plan, env); }) | |||||
| // Convert to to a real vector so we can ask its size. | // Convert to to a real vector so we can ask its size. | ||||
| | ranges::to_vector; | | ranges::to_vector; | ||||
| auto n_to_compile = static_cast<std::size_t>( | |||||
| ranges::count_if(each_realized, &compile_ticket::needs_recompile)); | |||||
| // Keep a counter to display progress to the user. | // Keep a counter to display progress to the user. | ||||
| const auto total = each_realized.size(); | |||||
| const auto max_digits = fmt::format("{}", total).size(); | |||||
| compile_counter counter{{1}, total, max_digits}; | |||||
| const auto max_digits = fmt::format("{}", n_to_compile).size(); | |||||
| compile_counter counter{.max = n_to_compile, .max_digits = max_digits}; | |||||
| // Ass we execute, accumulate new dependency information from successful compilations | // Ass we execute, accumulate new dependency information from successful compilations | ||||
| std::vector<file_deps_info> all_new_deps; | std::vector<file_deps_info> all_new_deps; | ||||
| std::mutex mut; | std::mutex mut; | ||||
| // Do it! | // Do it! | ||||
| auto okay = parallel_run(each_realized, njobs, [&](const compile_file_full& full) { | |||||
| auto new_dep = do_compile(full, env, counter); | |||||
| auto okay = parallel_run(each_realized, njobs, [&](const compile_ticket& tkt) { | |||||
| auto new_dep = handle_compilation(tkt, env, counter); | |||||
| if (new_dep) { | if (new_dep) { | ||||
| std::unique_lock lk{mut}; | std::unique_lock lk{mut}; | ||||
| all_new_deps.push_back(std::move(*new_dep)); | all_new_deps.push_back(std::move(*new_dep)); | ||||
| }); | }); | ||||
| // Update compile dependency information | // Update compile dependency information | ||||
| auto tr = env.db.transaction(); | |||||
| dds::stopwatch update_timer; | |||||
| auto tr = env.db.transaction(); | |||||
| for (auto& info : all_new_deps) { | for (auto& info : all_new_deps) { | ||||
| dds_log(trace, "Update dependency info on {}", info.output.string()); | dds_log(trace, "Update dependency info on {}", info.output.string()); | ||||
| update_deps_info(neo::into(env.db), info); | update_deps_info(neo::into(env.db), info); | ||||
| } | } | ||||
| dds_log(debug, "Dependency update took {:L}ms", update_timer.elapsed_ms().count()); | |||||
| cancellation_point(); | |||||
| // Return whether or not there were any failures. | // Return whether or not there were any failures. | ||||
| return okay; | return okay; | ||||
| } | } |
| #include <dds/util/signal.hpp> | #include <dds/util/signal.hpp> | ||||
| #include <dds/util/time.hpp> | #include <dds/util/time.hpp> | ||||
| #include <range/v3/algorithm/sort.hpp> | |||||
| #include <range/v3/algorithm/unique.hpp> | |||||
| #include <string> | #include <string> | ||||
| #include <vector> | #include <vector> | ||||
| extend(spec.external_include_dirs, env.ureqs.include_paths(use)); | extend(spec.external_include_dirs, env.ureqs.include_paths(use)); | ||||
| } | } | ||||
| extend(spec.definitions, _rules.defs()); | extend(spec.definitions, _rules.defs()); | ||||
| // Avoid huge command lines by shrinking down the list of #include dirs | |||||
| sort_unique_erase(spec.external_include_dirs); | |||||
| sort_unique_erase(spec.include_dirs); | |||||
| return env.toolchain.create_compile_command(spec, dds::fs::current_path(), env.knobs); | return env.toolchain.create_compile_command(spec, dds::fs::current_path(), env.knobs); | ||||
| } | } | ||||
| [&] { return run_proc({.command = {exe_path.string()}, .timeout = 10s}); }); | [&] { return run_proc({.command = {exe_path.string()}, .timeout = 10s}); }); | ||||
| if (res.okay()) { | if (res.okay()) { | ||||
| dds_log(info, "{} - .br.green[PASS] - {:>9L}μs", msg, dur.count()); | |||||
| dds_log(info, "{} - .br.green[PASS] - {:>9L}μs"_styled, msg, dur.count()); | |||||
| return std::nullopt; | return std::nullopt; | ||||
| } else { | } else { | ||||
| auto exit_msg = fmt::format(res.signal ? "signalled {}" : "exited {}", | auto exit_msg = fmt::format(res.signal ? "signalled {}" : "exited {}", |
| } // namespace cmd | } // namespace cmd | ||||
| int dispatch_main(const options& opts) noexcept { | int dispatch_main(const options& opts) noexcept { | ||||
| dds::log::current_log_level = opts.log_level; | |||||
| return dds::handle_cli_errors([&] { | return dds::handle_cli_errors([&] { | ||||
| DDS_E_SCOPE(opts.subcommand); | DDS_E_SCOPE(opts.subcommand); | ||||
| switch (opts.subcommand) { | switch (opts.subcommand) { |
| path project_dir = fs::current_path(); | path project_dir = fs::current_path(); | ||||
| // Compile and build commands with `--no-warnings`/`--no-warn` | // Compile and build commands with `--no-warnings`/`--no-warn` | ||||
| bool disable_warnings = true; | |||||
| bool disable_warnings = false; | |||||
| // Compile and build commands' `--jobs` parameter | // Compile and build commands' `--jobs` parameter | ||||
| int jobs = 0; | int jobs = 0; | ||||
| // Compile and build commands' `--toolchain` option: | // Compile and build commands' `--toolchain` option: |
| namespace nsql = neo::sqlite3; | namespace nsql = neo::sqlite3; | ||||
| using nsql::exec; | using nsql::exec; | ||||
| using namespace nsql::literals; | using namespace nsql::literals; | ||||
| using namespace std::literals; | |||||
| namespace { | namespace { | ||||
| void migrate_1(nsql::database& db) { | void migrate_1(nsql::database& db) { | ||||
| db.exec(R"( | db.exec(R"( | ||||
| CREATE TABLE dds_files ( | |||||
| DROP TABLE IF EXISTS dds_deps; | |||||
| DROP TABLE IF EXISTS dds_file_commands; | |||||
| DROP TABLE IF EXISTS dds_files; | |||||
| DROP TABLE IF EXISTS dds_compile_deps; | |||||
| DROP TABLE IF EXISTS dds_compilations; | |||||
| DROP TABLE IF EXISTS dds_source_files; | |||||
| CREATE TABLE dds_source_files ( | |||||
| file_id INTEGER PRIMARY KEY, | file_id INTEGER PRIMARY KEY, | ||||
| path TEXT NOT NULL UNIQUE | path TEXT NOT NULL UNIQUE | ||||
| ); | ); | ||||
| CREATE TABLE dds_file_commands ( | |||||
| command_id INTEGER PRIMARY KEY, | |||||
| CREATE TABLE dds_compilations ( | |||||
| compile_id INTEGER PRIMARY KEY, | |||||
| file_id | file_id | ||||
| INTEGER | |||||
| UNIQUE | |||||
| NOT NULL | |||||
| REFERENCES dds_files(file_id), | |||||
| INTEGER NOT NULL | |||||
| UNIQUE REFERENCES dds_source_files(file_id), | |||||
| command TEXT NOT NULL, | command TEXT NOT NULL, | ||||
| output TEXT NOT NULL | |||||
| output TEXT NOT NULL, | |||||
| n_compilations INTEGER NOT NULL DEFAULT 0, | |||||
| avg_duration INTEGER NOT NULL DEFAULT 0 | |||||
| ); | ); | ||||
| CREATE TABLE dds_deps ( | |||||
| CREATE TABLE dds_compile_deps ( | |||||
| input_file_id | input_file_id | ||||
| INTEGER | |||||
| NOT NULL | |||||
| REFERENCES dds_files(file_id), | |||||
| INTEGER NOT NULL | |||||
| REFERENCES dds_source_files(file_id), | |||||
| output_file_id | output_file_id | ||||
| INTEGER | |||||
| NOT NULL | |||||
| REFERENCES dds_files(file_id), | |||||
| INTEGER NOT NULL | |||||
| REFERENCES dds_source_files(file_id), | |||||
| input_mtime INTEGER NOT NULL, | input_mtime INTEGER NOT NULL, | ||||
| UNIQUE(input_file_id, output_file_id) | UNIQUE(input_file_id, output_file_id) | ||||
| ); | ); | ||||
| void ensure_migrated(nsql::database& db) { | void ensure_migrated(nsql::database& db) { | ||||
| db.exec(R"( | db.exec(R"( | ||||
| PRAGMA foreign_keys = 1; | PRAGMA foreign_keys = 1; | ||||
| CREATE TABLE IF NOT EXISTS dds_meta AS | |||||
| WITH init (meta) AS (VALUES ('{"version": 0}')) | |||||
| DROP TABLE IF EXISTS dds_meta; | |||||
| CREATE TABLE IF NOT EXISTS dds_meta_1 AS | |||||
| WITH init (version) AS (VALUES ('eggs')) | |||||
| SELECT * FROM init; | SELECT * FROM init; | ||||
| )"); | )"); | ||||
| nsql::transaction_guard tr{db}; | nsql::transaction_guard tr{db}; | ||||
| auto meta_st = db.prepare("SELECT meta FROM dds_meta"); | |||||
| auto [meta_json] = nsql::unpack_single<std::string>(meta_st); | |||||
| auto version_st = db.prepare("SELECT version FROM dds_meta_1"); | |||||
| auto [version_str] = nsql::unpack_single<std::string>(version_st); | |||||
| auto meta = nlohmann::json::parse(meta_json); | |||||
| if (!meta.is_object()) { | |||||
| throw_external_error<errc::corrupted_build_db>(); | |||||
| } | |||||
| auto version_ = meta["version"]; | |||||
| if (!version_.is_number_integer()) { | |||||
| throw_external_error<errc::corrupted_build_db>( | |||||
| "The build database file is corrupted [bad dds_meta.version]"); | |||||
| } | |||||
| int version = version_; | |||||
| if (version < 1) { | |||||
| const auto cur_version = "alpha-5"sv; | |||||
| if (cur_version != version_str) { | |||||
| if (!version_str.empty()) { | |||||
| dds_log(info, "NOTE: A prior version of the project build database was found."); | |||||
| dds_log(info, "This is not an error, but incremental builds will be invalidated."); | |||||
| dds_log(info, "The database is being upgraded, and no further action is necessary."); | |||||
| } | |||||
| migrate_1(db); | migrate_1(db); | ||||
| } | } | ||||
| meta["version"] = 1; | |||||
| exec(db.prepare("UPDATE dds_meta SET meta=?"), meta.dump()); | |||||
| exec(db.prepare("UPDATE dds_meta_1 SET version=?"), cur_version); | |||||
| } | } | ||||
| } // namespace | } // namespace | ||||
| std::int64_t database::_record_file(path_ref path_) { | std::int64_t database::_record_file(path_ref path_) { | ||||
| auto path = fs::weakly_canonical(path_); | auto path = fs::weakly_canonical(path_); | ||||
| nsql::exec(_stmt_cache(R"( | nsql::exec(_stmt_cache(R"( | ||||
| INSERT OR IGNORE INTO dds_files (path) | |||||
| INSERT OR IGNORE INTO dds_source_files (path) | |||||
| VALUES (?) | VALUES (?) | ||||
| )"_sql), | )"_sql), | ||||
| path.generic_string()); | path.generic_string()); | ||||
| auto& st = _stmt_cache(R"( | auto& st = _stmt_cache(R"( | ||||
| SELECT file_id | SELECT file_id | ||||
| FROM dds_files | |||||
| FROM dds_source_files | |||||
| WHERE path = ?1 | WHERE path = ?1 | ||||
| )"_sql); | )"_sql); | ||||
| st.reset(); | st.reset(); | ||||
| auto in_id = _record_file(input); | auto in_id = _record_file(input); | ||||
| auto out_id = _record_file(output); | auto out_id = _record_file(output); | ||||
| auto& st = _stmt_cache(R"( | auto& st = _stmt_cache(R"( | ||||
| INSERT OR REPLACE INTO dds_deps (input_file_id, output_file_id, input_mtime) | |||||
| INSERT OR REPLACE INTO dds_compile_deps (input_file_id, output_file_id, input_mtime) | |||||
| VALUES (?, ?, ?) | VALUES (?, ?, ?) | ||||
| )"_sql); | )"_sql); | ||||
| nsql::exec(st, in_id, out_id, input_mtime.time_since_epoch().count()); | nsql::exec(st, in_id, out_id, input_mtime.time_since_epoch().count()); | ||||
| } | } | ||||
| void database::store_file_command(path_ref file, const command_info& cmd) { | |||||
| void database::record_compilation(path_ref file, const completed_compilation& cmd) { | |||||
| auto file_id = _record_file(file); | auto file_id = _record_file(file); | ||||
| auto& st = _stmt_cache(R"( | auto& st = _stmt_cache(R"( | ||||
| INSERT OR REPLACE | |||||
| INTO dds_file_commands(file_id, command, output) | |||||
| VALUES (?1, ?2, ?3) | |||||
| INSERT INTO dds_compilations(file_id, command, output, n_compilations, avg_duration) | |||||
| VALUES (:file_id, :command, :output, 1, :duration) | |||||
| ON CONFLICT(file_id) DO UPDATE SET | |||||
| command = ?2, | |||||
| output = ?3, | |||||
| n_compilations = CASE | |||||
| WHEN :duration < 500 THEN n_compilations | |||||
| ELSE min(10, n_compilations + 1) | |||||
| END, | |||||
| avg_duration = CASE | |||||
| WHEN :duration < 500 THEN avg_duration | |||||
| ELSE avg_duration + ((:duration - avg_duration) / min(10, n_compilations + 1)) | |||||
| END | |||||
| )"_sql); | )"_sql); | ||||
| nsql::exec(st, file_id, std::string_view(cmd.command), std::string_view(cmd.output)); | |||||
| nsql::exec(st, | |||||
| file_id, | |||||
| std::string_view(cmd.quoted_command), | |||||
| std::string_view(cmd.output), | |||||
| cmd.duration.count()); | |||||
| } | } | ||||
| void database::forget_inputs_of(path_ref file) { | void database::forget_inputs_of(path_ref file) { | ||||
| auto& st = _stmt_cache(R"( | auto& st = _stmt_cache(R"( | ||||
| WITH id_to_delete AS ( | WITH id_to_delete AS ( | ||||
| SELECT file_id | SELECT file_id | ||||
| FROM dds_files | |||||
| FROM dds_source_files | |||||
| WHERE path = ? | WHERE path = ? | ||||
| ) | ) | ||||
| DELETE FROM dds_deps | |||||
| DELETE FROM dds_compile_deps | |||||
| WHERE output_file_id IN id_to_delete | WHERE output_file_id IN id_to_delete | ||||
| )"_sql); | )"_sql); | ||||
| nsql::exec(st, fs::weakly_canonical(file).generic_string()); | nsql::exec(st, fs::weakly_canonical(file).generic_string()); | ||||
| auto& st = _stmt_cache(R"( | auto& st = _stmt_cache(R"( | ||||
| WITH file AS ( | WITH file AS ( | ||||
| SELECT file_id | SELECT file_id | ||||
| FROM dds_files | |||||
| FROM dds_source_files | |||||
| WHERE path = ? | WHERE path = ? | ||||
| ) | ) | ||||
| SELECT path, input_mtime | SELECT path, input_mtime | ||||
| FROM dds_deps | |||||
| JOIN dds_files ON input_file_id = file_id | |||||
| FROM dds_compile_deps | |||||
| JOIN dds_source_files ON input_file_id = file_id | |||||
| WHERE output_file_id IN file | WHERE output_file_id IN file | ||||
| )"_sql); | )"_sql); | ||||
| st.reset(); | st.reset(); | ||||
| return ret; | return ret; | ||||
| } | } | ||||
| std::optional<command_info> database::command_of(path_ref file_) const { | |||||
| std::optional<completed_compilation> database::command_of(path_ref file_) const { | |||||
| auto file = fs::weakly_canonical(file_); | auto file = fs::weakly_canonical(file_); | ||||
| auto& st = _stmt_cache(R"( | auto& st = _stmt_cache(R"( | ||||
| WITH file AS ( | WITH file AS ( | ||||
| SELECT file_id | SELECT file_id | ||||
| FROM dds_files | |||||
| FROM dds_source_files | |||||
| WHERE path = ? | WHERE path = ? | ||||
| ) | ) | ||||
| SELECT command, output | |||||
| FROM dds_file_commands | |||||
| SELECT command, output, avg_duration | |||||
| FROM dds_compilations | |||||
| WHERE file_id IN file | WHERE file_id IN file | ||||
| )"_sql); | )"_sql); | ||||
| st.reset(); | st.reset(); | ||||
| st.bindings()[1] = file.generic_string(); | st.bindings()[1] = file.generic_string(); | ||||
| auto opt_res = nsql::unpack_single_opt<std::string, std::string>(st); | |||||
| auto opt_res = nsql::unpack_single_opt<std::string, std::string, std::int64_t>(st); | |||||
| if (!opt_res) { | if (!opt_res) { | ||||
| return std::nullopt; | return std::nullopt; | ||||
| } | } | ||||
| auto& [cmd, out] = *opt_res; | |||||
| return command_info{cmd, out}; | |||||
| } | |||||
| auto& [cmd, out, dur] = *opt_res; | |||||
| return completed_compilation{cmd, out, std::chrono::milliseconds(dur)}; | |||||
| } |
| namespace dds { | namespace dds { | ||||
| struct command_info { | |||||
| std::string command; | |||||
| struct completed_compilation { | |||||
| std::string quoted_command; | |||||
| std::string output; | std::string output; | ||||
| // The amount of time that the command took to run | |||||
| std::chrono::milliseconds duration; | |||||
| }; | }; | ||||
| struct input_file_info { | struct input_file_info { | ||||
| } | } | ||||
| void record_dep(path_ref input, path_ref output, fs::file_time_type input_mtime); | void record_dep(path_ref input, path_ref output, fs::file_time_type input_mtime); | ||||
| void store_file_command(path_ref file, const command_info& cmd); | |||||
| void record_compilation(path_ref file, const completed_compilation& cmd); | |||||
| void forget_inputs_of(path_ref file); | void forget_inputs_of(path_ref file); | ||||
| std::optional<std::vector<input_file_info>> inputs_of(path_ref file) const; | std::optional<std::vector<input_file_info>> inputs_of(path_ref file) const; | ||||
| std::optional<command_info> command_of(path_ref file) const; | |||||
| std::optional<completed_compilation> command_of(path_ref file) const; | |||||
| }; | }; | ||||
| } // namespace dds | } // namespace dds |
| return std::nullopt; | return std::nullopt; | ||||
| } | } | ||||
| if (starts_with(tc_id, "gcc") || starts_with(tc_id, "clang")) { | |||||
| json5::data& arr = root_map.emplace("link_flags", json5::data::array_type()).first->second; | |||||
| arr.as_array().emplace_back("-static-libgcc"); | |||||
| arr.as_array().emplace_back("-static-libstdc++"); | |||||
| } | |||||
| root_map.emplace("c_compiler", opt_triple->c); | root_map.emplace("c_compiler", opt_triple->c); | ||||
| root_map.emplace("cxx_compiler", opt_triple->cxx); | root_map.emplace("cxx_compiler", opt_triple->cxx); | ||||
| root_map.emplace("compiler_id", opt_triple->id); | root_map.emplace("compiler_id", opt_triple->id); |
| c.insert(c.end(), il.begin(), il.end()); | c.insert(c.end(), il.begin(), il.end()); | ||||
| } | } | ||||
| template <typename Container> | |||||
| void unique_erase(Container& c) noexcept { | |||||
| c.erase(std::unique(c.begin(), c.end()), c.end()); | |||||
| } | |||||
| template <typename Container> | |||||
| void sort_unique_erase(Container& c) noexcept { | |||||
| std::sort(c.begin(), c.end()); | |||||
| unique_erase(c); | |||||
| } | |||||
| template <typename T> | template <typename T> | ||||
| using ref_vector = std::vector<std::reference_wrapper<T>>; | using ref_vector = std::vector<std::reference_wrapper<T>>; | ||||
| } // namespace dds | |||||
| } // namespace dds |
| #include "./parallel.hpp" | #include "./parallel.hpp" | ||||
| #include <dds/util/signal.hpp> | |||||
| #include <dds/util/log.hpp> | #include <dds/util/log.hpp> | ||||
| using namespace dds; | using namespace dds; | ||||
| void dds::log_exception(std::exception_ptr eptr) noexcept { | void dds::log_exception(std::exception_ptr eptr) noexcept { | ||||
| try { | try { | ||||
| std::rethrow_exception(eptr); | std::rethrow_exception(eptr); | ||||
| } catch (const dds::user_cancelled&) { | |||||
| // Don't log this one. The user knows what they did | |||||
| } catch (const std::exception& e) { | } catch (const std::exception& e) { | ||||
| dds_log(error, "{}", e.what()); | dds_log(error, "{}", e.what()); | ||||
| } | } |
| std::signal(SIGINT, handle_signal); | std::signal(SIGINT, handle_signal); | ||||
| std::signal(SIGTERM, handle_signal); | std::signal(SIGTERM, handle_signal); | ||||
| #ifdef SIGQUIT | |||||
| // Some systems issue SIGQUIT :shrug: | |||||
| std::signal(SIGQUIT, handle_signal); | |||||
| #endif | |||||
| #ifdef SIGPIPE | #ifdef SIGPIPE | ||||
| // XXX: neo-io doesn't behave nicely when EOF is hit on sockets. This Isn't | // XXX: neo-io doesn't behave nicely when EOF is hit on sockets. This Isn't | ||||
| // easily fixed portably without simply blocking SIGPIPE globally. | // easily fixed portably without simply blocking SIGPIPE globally. |
| root: Path, | root: Path, | ||||
| toolchain: Optional[Path] = None, | toolchain: Optional[Path] = None, | ||||
| build_root: Optional[Path] = None, | build_root: Optional[Path] = None, | ||||
| jobs: Optional[int] = None) -> None: | |||||
| jobs: Optional[int] = None, | |||||
| more_args: Optional[proc.CommandLine] = None) -> None: | |||||
| """ | """ | ||||
| Run 'dds build' with the given arguments. | Run 'dds build' with the given arguments. | ||||
| f'--jobs={jobs}', | f'--jobs={jobs}', | ||||
| f'{self.project_dir_flag}={root}', | f'{self.project_dir_flag}={root}', | ||||
| f'--out={build_root}', | f'--out={build_root}', | ||||
| more_args or (), | |||||
| ]) | ]) | ||||
| def compile_file(self, | def compile_file(self, |
| Execute 'dds build' on the project | Execute 'dds build' on the project | ||||
| """ | """ | ||||
| with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc: | with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc: | ||||
| self.dds.build(root=self.root, build_root=self.build_root, toolchain=tc) | |||||
| self.dds.build(root=self.root, build_root=self.build_root, toolchain=tc, more_args=['-ldebug']) | |||||
| def compile_file(self, *paths: Pathish, toolchain: Optional[Pathish] = None) -> None: | def compile_file(self, *paths: Pathish, toolchain: Optional[Pathish] = None) -> None: | ||||
| with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc: | with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc: |