| @@ -107,6 +107,22 @@ struct cli_base { | |||
| "Print `yes` and exit 0. Useful for scripting.", | |||
| {"are-you-the-real-dds?"}}; | |||
| args::MapFlag<std::string, dds::log::level> log_level{ | |||
| parser, | |||
| "log-level", | |||
| "Set the logging level", | |||
| {"log-level", 'l'}, | |||
| { | |||
| {"trace", dds::log::level::trace}, | |||
| {"debug", dds::log::level::debug}, | |||
| {"info", dds::log::level::info}, | |||
| {"warn", dds::log::level::warn}, | |||
| {"error", dds::log::level::error}, | |||
| {"critical", dds::log::level::critical}, | |||
| }, | |||
| dds::log::level::info, | |||
| }; | |||
| args::Group cmd_group{parser, "Available Commands"}; | |||
| }; | |||
| @@ -227,7 +243,7 @@ struct cli_catalog { | |||
| auto tsd = dds::get_package_sdist(*info); | |||
| auto out_path = out.Get(); | |||
| auto dest = out_path / id.to_string(); | |||
| dds::log::info("Create sdist at {}", dest.string()); | |||
| dds_log(info, "Create sdist at {}", dest.string()); | |||
| dds::fs::remove_all(dest); | |||
| dds::safe_rename(tsd.sdist.path, dest); | |||
| } | |||
| @@ -342,14 +358,14 @@ struct cli_catalog { | |||
| auto cat = cat_path.open(); | |||
| auto pkg = cat.get(pk_id); | |||
| if (!pkg) { | |||
| dds::log::error("No package '{}' in the catalog", pk_id.to_string()); | |||
| dds_log(error, "No package '{}' in the catalog", pk_id.to_string()); | |||
| return 1; | |||
| } | |||
| std::cout << "Name: " << pkg->ident.name << '\n' | |||
| << "Version: " << pkg->ident.version << '\n'; | |||
| for (const auto& dep : pkg->deps) { | |||
| std::cout << "Depends: " << dep.to_string() << '\n'; | |||
| std::cout << "Depends: " << dep.to_string() << '\n'; | |||
| } | |||
| std::visit([&](const auto& remote) { print_remote_info(remote); }, pkg->remote); | |||
| @@ -419,9 +435,9 @@ struct cli_repo { | |||
| }); | |||
| for (const auto& [name, grp] : grp_by_name) { | |||
| dds::log::info("{}:", name); | |||
| dds_log(info, "{}:", name); | |||
| for (const dds::sdist& sd : grp) { | |||
| dds::log::info(" - {}", sd.manifest.pkg_id.version.to_string()); | |||
| dds_log(info, " - {}", sd.manifest.pkg_id.version.to_string()); | |||
| } | |||
| } | |||
| @@ -692,7 +708,7 @@ struct cli_build_deps { | |||
| auto all_file_deps = deps_files.Get() // | |||
| | ranges::views::transform([&](auto dep_fpath) { | |||
| dds::log::info("Reading deps from {}", dep_fpath.string()); | |||
| dds_log(info, "Reading deps from {}", dep_fpath.string()); | |||
| return dds::dependency_manifest::from_file(dep_fpath).dependencies; | |||
| }) | |||
| | ranges::actions::join; | |||
| @@ -709,7 +725,7 @@ struct cli_build_deps { | |||
| dds::repo_flags::write_lock | dds::repo_flags::create_if_absent, | |||
| [&](dds::repository repo) { | |||
| // Download dependencies | |||
| dds::log::info("Loading {} dependencies", all_deps.size()); | |||
| dds_log(info, "Loading {} dependencies", all_deps.size()); | |||
| auto deps = repo.solve(all_deps, cat); | |||
| dds::get_all(deps, repo, cat); | |||
| for (const dds::package_id& pk : deps) { | |||
| @@ -717,7 +733,7 @@ struct cli_build_deps { | |||
| assert(sdist_ptr); | |||
| dds::sdist_build_params deps_params; | |||
| deps_params.subdir = sdist_ptr->manifest.pkg_id.to_string(); | |||
| dds::log::info("Dependency: {}", sdist_ptr->manifest.pkg_id.to_string()); | |||
| dds_log(info, "Dependency: {}", sdist_ptr->manifest.pkg_id.to_string()); | |||
| bd.add(*sdist_ptr, deps_params); | |||
| } | |||
| }); | |||
| @@ -740,9 +756,6 @@ struct cli_build_deps { | |||
| */ | |||
| int main(int argc, char** argv) { | |||
| #if DDS_DEBUG | |||
| dds::log::current_log_level = dds::log::level::debug; | |||
| #endif | |||
| spdlog::set_pattern("[%H:%M:%S] [%^%-5l%$] %v"); | |||
| args::ArgumentParser parser("DDS - The drop-dead-simple library manager"); | |||
| @@ -752,6 +765,7 @@ int main(int argc, char** argv) { | |||
| cli_repo repo{cli}; | |||
| cli_catalog catalog{cli}; | |||
| cli_build_deps build_deps{cli}; | |||
| try { | |||
| parser.ParseCLI(argc, argv); | |||
| } catch (const args::Help&) { | |||
| @@ -764,6 +778,7 @@ int main(int argc, char** argv) { | |||
| } | |||
| dds::install_signal_handlers(); | |||
| dds::log::current_log_level = cli.log_level.Get(); | |||
| try { | |||
| if (cli._verify_ident) { | |||
| @@ -784,15 +799,15 @@ int main(int argc, char** argv) { | |||
| std::terminate(); | |||
| } | |||
| } catch (const dds::user_cancelled&) { | |||
| dds::log::critical("Operation cancelled by user"); | |||
| dds_log(critical, "Operation cancelled by user"); | |||
| return 2; | |||
| } catch (const dds::error_base& e) { | |||
| dds::log::error("{}", e.what()); | |||
| dds::log::error("{}", e.explanation()); | |||
| dds::log::error("Refer: {}", e.error_reference()); | |||
| dds_log(error, "{}", e.what()); | |||
| dds_log(error, "{}", e.explanation()); | |||
| dds_log(error, "Refer: {}", e.error_reference()); | |||
| return 1; | |||
| } catch (const std::exception& e) { | |||
| dds::log::critical(e.what()); | |||
| dds_log(critical, e.what()); | |||
| return 2; | |||
| } | |||
| } | |||
| @@ -23,14 +23,14 @@ struct state { | |||
| }; | |||
| void log_failure(const test_failure& fail) { | |||
| log::error("Test '{}' failed! [exited {}]", fail.executable_path.string(), fail.retc); | |||
| dds_log(error, "Test '{}' failed! [exited {}]", fail.executable_path.string(), fail.retc); | |||
| if (fail.signal) { | |||
| log::error("Test execution received signal {}", fail.signal); | |||
| dds_log(error, "Test execution received signal {}", fail.signal); | |||
| } | |||
| if (trim_view(fail.output).empty()) { | |||
| log::error("(Test executable produced no output"); | |||
| dds_log(error, "(Test executable produced no output"); | |||
| } else { | |||
| log::error("Test output:\n{}[dds - test output end]", fail.output); | |||
| dds_log(error, "Test output:\n{}[dds - test output end]", fail.output); | |||
| } | |||
| } | |||
| @@ -83,7 +83,7 @@ prepare_catch2_driver(test_lib test_driver, const build_params& params, build_en | |||
| auto obj_file = plan.calc_object_file_path(env2); | |||
| if (!fs::exists(obj_file)) { | |||
| log::info("Compiling Catch2 test driver (This will only happen once)..."); | |||
| dds_log(info, "Compiling Catch2 test driver (This will only happen once)..."); | |||
| compile_all(std::array{plan}, env2, 1); | |||
| } | |||
| @@ -241,19 +241,19 @@ void builder::build(const build_params& params) const { | |||
| dds::stopwatch sw; | |||
| plan.compile_all(env, params.parallel_jobs); | |||
| log::info("Compilation completed in {:L}ms", sw.elapsed_ms().count()); | |||
| dds_log(info, "Compilation completed in {:L}ms", sw.elapsed_ms().count()); | |||
| sw.reset(); | |||
| plan.archive_all(env, params.parallel_jobs); | |||
| log::info("Archiving completed in {:L}ms", sw.elapsed_ms().count()); | |||
| dds_log(info, "Archiving completed in {:L}ms", sw.elapsed_ms().count()); | |||
| sw.reset(); | |||
| plan.link_all(env, params.parallel_jobs); | |||
| log::info("Runtime binary linking completed in {:L}ms", sw.elapsed_ms().count()); | |||
| dds_log(info, "Runtime binary linking completed in {:L}ms", sw.elapsed_ms().count()); | |||
| sw.reset(); | |||
| auto test_failures = plan.run_all_tests(env, params.parallel_jobs); | |||
| log::info("Test execution finished in {:L}ms", sw.elapsed_ms().count()); | |||
| dds_log(info, "Test execution finished in {:L}ms", sw.elapsed_ms().count()); | |||
| for (auto& fail : test_failures) { | |||
| log_failure(fail); | |||
| @@ -26,14 +26,15 @@ file_deps_info dds::parse_mkfile_deps_str(std::string_view str) { | |||
| auto iter = split.begin(); | |||
| auto stop = split.end(); | |||
| if (iter == stop) { | |||
| log::critical( | |||
| "Invalid deps listing. Shell split was empty. This is almost certainly a bug."); | |||
| dds_log(critical, | |||
| "Invalid deps listing. Shell split was empty. This is almost certainly a bug."); | |||
| return ret; | |||
| } | |||
| auto& head = *iter; | |||
| ++iter; | |||
| if (!ends_with(head, ":")) { | |||
| log::critical( | |||
| dds_log( | |||
| critical, | |||
| "Invalid deps listing. Leader item is not colon-terminated. This is probably a bug. " | |||
| "(Are you trying to use C++ Modules? That's not ready yet, sorry. Set `Deps-Mode` to " | |||
| "`None` in your toolchain file.)"); | |||
| @@ -33,6 +33,7 @@ void create_archive_plan::archive(const build_env& env) const { | |||
| // Different archiving tools behave differently between platforms depending on whether the | |||
| // archive file exists. Make it uniform by simply removing the prior copy. | |||
| if (fs::exists(ar.out_path)) { | |||
| dds_log(debug, "Remove prior archive file [{}]", ar.out_path.string()); | |||
| fs::remove(ar.out_path); | |||
| } | |||
| @@ -40,14 +41,17 @@ void create_archive_plan::archive(const build_env& env) const { | |||
| fs::create_directories(ar.out_path.parent_path()); | |||
| // Do it! | |||
| log::info("[{}] Archive: {}", _qual_name, out_relpath); | |||
| dds_log(info, "[{}] Archive: {}", _qual_name, out_relpath); | |||
| auto&& [dur_ms, ar_res] = timed<std::chrono::milliseconds>([&] { return run_proc(ar_cmd); }); | |||
| log::info("[{}] Archive: {} - {:L}ms", _qual_name, out_relpath, dur_ms.count()); | |||
| dds_log(info, "[{}] Archive: {} - {:L}ms", _qual_name, out_relpath, dur_ms.count()); | |||
| // Check, log, and throw | |||
| if (!ar_res.okay()) { | |||
| log::error("Creating static library archive [{}] failed for '{}'", out_relpath, _qual_name); | |||
| log::error("Subcommand FAILED: {}\n{}", quote_command(ar_cmd), ar_res.output); | |||
| dds_log(error, | |||
| "Creating static library archive [{}] failed for '{}'", | |||
| out_relpath, | |||
| _qual_name); | |||
| dds_log(error, "Subcommand FAILED: {}\n{}", quote_command(ar_cmd), ar_res.output); | |||
| throw_external_error< | |||
| errc::archive_failure>("Creating static library archive [{}] failed for '{}'", | |||
| out_relpath, | |||
| @@ -54,16 +54,17 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
| fs::relative(source_path, cf.plan.source().basis_path).string()); | |||
| // Do it! | |||
| log::info(msg); | |||
| dds_log(info, msg); | |||
| auto&& [dur_ms, proc_res] | |||
| = timed<std::chrono::milliseconds>([&] { return run_proc(cf.cmd_info.command); }); | |||
| auto nth = counter.n.fetch_add(1); | |||
| log::info("{:60} - {:>7L}ms [{:{}}/{}]", | |||
| msg, | |||
| dur_ms.count(), | |||
| nth, | |||
| counter.max_digits, | |||
| counter.max); | |||
| dds_log(info, | |||
| "{:60} - {:>7L}ms [{:{}}/{}]", | |||
| msg, | |||
| dur_ms.count(), | |||
| nth, | |||
| counter.max_digits, | |||
| counter.max); | |||
| const bool compiled_okay = proc_res.okay(); | |||
| const auto compile_retc = proc_res.retc; | |||
| @@ -73,16 +74,21 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
| // Build dependency information, if applicable to the toolchain | |||
| std::optional<file_deps_info> ret_deps_info; | |||
| if (env.toolchain.deps_mode() == file_deps_mode::gnu) { | |||
| if (!compiled_okay) { | |||
| /** | |||
| * Do nothing: We failed to compile, so updating deps would be wasteful, and possibly wrong | |||
| */ | |||
| } else if (env.toolchain.deps_mode() == file_deps_mode::gnu) { | |||
| // GNU-style deps using Makefile generation | |||
| assert(cf.cmd_info.gnu_depfile_path.has_value()); | |||
| auto& df_path = *cf.cmd_info.gnu_depfile_path; | |||
| if (!fs::is_regular_file(df_path)) { | |||
| log::critical( | |||
| "The expected Makefile deps were not generated on disk. This is a bug! " | |||
| "(Expected file to exist: [{}])", | |||
| df_path.string()); | |||
| dds_log(critical, | |||
| "The expected Makefile deps were not generated on disk. This is a bug! " | |||
| "(Expected file to exist: [{}])", | |||
| df_path.string()); | |||
| } else { | |||
| dds_log(trace, "Loading compilation dependencies from {}", df_path.string()); | |||
| auto dep_info = dds::parse_mkfile_deps_file(df_path); | |||
| assert(dep_info.output == cf.object_file_path); | |||
| dep_info.command = quote_command(cf.cmd_info.command); | |||
| @@ -91,6 +97,7 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
| } | |||
| } else if (env.toolchain.deps_mode() == file_deps_mode::msvc) { | |||
| // Uglier deps generation by parsing the output from cl.exe | |||
| dds_log(trace, "Parsing compilation dependencies from MSVC output"); | |||
| /// TODO: Handle different #include Note: prefixes, since those are localized | |||
| auto msvc_deps = parse_msvc_output_for_deps(compiler_output, "Note: including file:"); | |||
| // parse_msvc_output_for_deps will return the compile output without the /showIncludes notes | |||
| @@ -106,6 +113,10 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
| msvc_deps.deps_info.command_output = compiler_output; | |||
| ret_deps_info = std::move(msvc_deps.deps_info); | |||
| } | |||
| } else { | |||
| /** | |||
| * We have no deps-mode set, so we can't really figure out what to do. | |||
| */ | |||
| } | |||
| // MSVC prints the filename of the source file. Remove it from the output. | |||
| @@ -121,23 +132,25 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun | |||
| // Log a compiler failure | |||
| if (!compiled_okay) { | |||
| log::error("Compilation failed: {}", source_path.string()); | |||
| log::error("Subcommand FAILED [Exitted {}]: {}\n{}", | |||
| compile_retc, | |||
| quote_command(cf.cmd_info.command), | |||
| compiler_output); | |||
| dds_log(error, "Compilation failed: {}", source_path.string()); | |||
| dds_log(error, | |||
| "Subcommand FAILED [Exitted {}]: {}\n{}", | |||
| compile_retc, | |||
| quote_command(cf.cmd_info.command), | |||
| compiler_output); | |||
| if (compile_signal) { | |||
| log::error("Process exited via signal {}", compile_signal); | |||
| dds_log(error, "Process exited via signal {}", compile_signal); | |||
| } | |||
| throw_user_error<errc::compile_failure>("Compilation failed [{}]", source_path.string()); | |||
| } | |||
| // Print any compiler output, sans whitespace | |||
| if (!dds::trim_view(compiler_output).empty()) { | |||
| log::warn("While compiling file {} [{}]:\n{}", | |||
| source_path.string(), | |||
| quote_command(cf.cmd_info.command), | |||
| compiler_output); | |||
| dds_log(warn, | |||
| "While compiling file {} [{}]:\n{}", | |||
| source_path.string(), | |||
| quote_command(cf.cmd_info.command), | |||
| compiler_output); | |||
| } | |||
| // We'll only get here if the compilation was successful, otherwise we throw | |||
| @@ -157,24 +170,35 @@ compile_file_full realize_plan(const compile_file_plan& plan, build_env_ref env) | |||
| */ | |||
| bool should_compile(const compile_file_full& comp, const database& db) { | |||
| if (!fs::exists(comp.object_file_path)) { | |||
| dds_log(trace, "Compile {}: Output does not exist", comp.plan.source_path().string()); | |||
| // The output file simply doesn't exist. We have to recompile, of course. | |||
| return true; | |||
| } | |||
| auto rb_info = get_rebuild_info(db, comp.object_file_path); | |||
| if (rb_info.previous_command.empty()) { | |||
| // We have no previous compile command for this file. Assume it is new. | |||
| dds_log(trace, "Recompile {}: No prior compilation info", comp.plan.source_path().string()); | |||
| return true; | |||
| } | |||
| if (!rb_info.newer_inputs.empty()) { | |||
| // Inputs to this file have changed from a prior execution. | |||
| dds_log(trace, | |||
| "Recompile {}: Inputs have changed (or no input information)", | |||
| comp.plan.source_path().string()); | |||
| return true; | |||
| } | |||
| auto cur_cmd_str = quote_command(comp.cmd_info.command); | |||
| if (cur_cmd_str != rb_info.previous_command) { | |||
| dds_log(trace, | |||
| "Recompile {}: Compile command has changed", | |||
| comp.plan.source_path().string()); | |||
| // The command used to generate the output is new | |||
| return true; | |||
| } | |||
| // Nope. This file is up-to-date. | |||
| dds_log(debug, | |||
| "Skip compilation of {} (Result is up-to-date)", | |||
| comp.plan.source_path().string()); | |||
| return false; | |||
| } | |||
| @@ -213,6 +237,7 @@ bool dds::detail::compile_all(const ref_vector<const compile_file_plan>& compile | |||
| // Update compile dependency information | |||
| auto tr = env.db.transaction(); | |||
| for (auto& info : all_new_deps) { | |||
| dds_log(trace, "Update dependency info on {}", info.output.string()); | |||
| update_deps_info(neo::into(env.db), info); | |||
| } | |||
| @@ -21,18 +21,24 @@ void link_executable_plan::link(build_env_ref env, const library_plan& lib) cons | |||
| link_exe_spec spec; | |||
| spec.output = calc_executable_path(env); | |||
| spec.inputs = _input_libs; | |||
| dds_log(debug, "Performing link for {}", spec.output.string()); | |||
| for (const lm::usage& links : _links) { | |||
| dds_log(trace, " - Link with: {}/{}", links.name, links.namespace_); | |||
| extend(spec.inputs, env.ureqs.link_paths(links)); | |||
| } | |||
| if (lib.archive_plan()) { | |||
| // The associated library has compiled components. Add the static library a as a linker | |||
| // input | |||
| dds_log(trace, "Adding the library's archive as a linker input"); | |||
| spec.inputs.push_back(env.output_root | |||
| / lib.archive_plan()->calc_archive_file_path(env.toolchain)); | |||
| } else { | |||
| dds_log(trace, "Executable has no corresponding archive library input"); | |||
| } | |||
| // The main object should be a linker input, of course. | |||
| auto main_obj = _main_compile.calc_object_file_path(env); | |||
| dds_log(trace, "Add entry point object file: {}", main_obj.string()); | |||
| spec.inputs.push_back(std::move(main_obj)); | |||
| // Linker inputs are order-dependent in some cases. The top-most input should appear first, and | |||
| @@ -48,10 +54,10 @@ void link_executable_plan::link(build_env_ref env, const library_plan& lib) cons | |||
| auto msg = fmt::format("[{}] Link: {:30}", | |||
| lib.qualified_name(), | |||
| fs::relative(spec.output, env.output_root).string()); | |||
| log::info(msg); | |||
| dds_log(info, msg); | |||
| auto [dur_ms, proc_res] | |||
| = timed<std::chrono::milliseconds>([&] { return run_proc(link_command); }); | |||
| log::info("{} - {:>6L}ms", msg, dur_ms.count()); | |||
| dds_log(info, "{} - {:>6L}ms", msg, dur_ms.count()); | |||
| // Check and throw if errant | |||
| if (!proc_res.okay()) { | |||
| @@ -76,19 +82,19 @@ bool link_executable_plan::is_test() const noexcept { | |||
| std::optional<test_failure> link_executable_plan::run_test(build_env_ref env) const { | |||
| auto exe_path = calc_executable_path(env); | |||
| auto msg = fmt::format("Run test: {:30}", fs::relative(exe_path, env.output_root).string()); | |||
| log::info(msg); | |||
| dds_log(info, msg); | |||
| using namespace std::chrono_literals; | |||
| auto&& [dur, res] = timed<std::chrono::microseconds>( | |||
| [&] { return run_proc({.command = {exe_path.string()}, .timeout = 10s}); }); | |||
| if (res.okay()) { | |||
| log::info("{} - PASSED - {:>9L}μs", msg, dur.count()); | |||
| dds_log(info, "{} - PASSED - {:>9L}μs", msg, dur.count()); | |||
| return std::nullopt; | |||
| } else { | |||
| auto exit_msg = fmt::format(res.signal ? "signalled {}" : "exited {}", | |||
| res.signal ? res.signal : res.retc); | |||
| auto fail_str = res.timed_out ? "TIMEOUT" : "FAILED "; | |||
| log::error("{} - {} - {:>9L}μs [{}]", msg, fail_str, dur.count(), exit_msg); | |||
| dds_log(error, "{} - {} - {:>9L}μs [{}]", msg, fail_str, dur.count(), exit_msg); | |||
| test_failure f; | |||
| f.executable_path = exe_path; | |||
| f.output = res.output; | |||
| @@ -1,6 +1,7 @@ | |||
| #include "./library.hpp" | |||
| #include <dds/util/algo.hpp> | |||
| #include <dds/util/log.hpp> | |||
| #include <range/v3/view/concat.hpp> | |||
| #include <range/v3/view/filter.hpp> | |||
| @@ -81,10 +82,15 @@ library_plan library_plan::create(const library_root& lib, | |||
| // for this library | |||
| std::optional<create_archive_plan> archive_plan; | |||
| if (!lib_compile_files.empty()) { | |||
| dds_log(debug, "Generating an archive library for {}", qual_name); | |||
| archive_plan.emplace(lib.manifest().name, | |||
| qual_name, | |||
| params.out_subdir, | |||
| std::move(lib_compile_files)); | |||
| } else { | |||
| dds_log(debug, | |||
| "Library {} has no compiled inputs, so no archive will be generated", | |||
| qual_name); | |||
| } | |||
| // Collect the paths to linker inputs that should be used when generating executables for this | |||
| @@ -140,6 +140,7 @@ void store_with_remote(neo::sqlite3::statement_cache& stmts, | |||
| void do_store_pkg(neo::sqlite3::database& db, | |||
| neo::sqlite3::statement_cache& st_cache, | |||
| const package_info& pkg) { | |||
| dds_log(debug, "Recording package {}@{}", pkg.ident.name, pkg.ident.version.to_string()); | |||
| std::visit([&](auto&& remote) { store_with_remote(st_cache, pkg, remote); }, pkg.remote); | |||
| auto db_pkg_id = db.last_insert_rowid(); | |||
| auto& new_dep_st = st_cache(R"( | |||
| @@ -159,6 +160,7 @@ void do_store_pkg(neo::sqlite3::database& db, | |||
| new_dep_st.reset(); | |||
| assert(dep.versions.num_intervals() == 1); | |||
| auto iv_1 = *dep.versions.iter_intervals().begin(); | |||
| dds_log(trace, " Depends on: {}", dep.to_string()); | |||
| sqlite3::exec(new_dep_st, | |||
| std::forward_as_tuple(db_pkg_id, | |||
| dep.name, | |||
| @@ -168,6 +170,7 @@ void do_store_pkg(neo::sqlite3::database& db, | |||
| } | |||
| void store_init_packages(sqlite3::database& db, sqlite3::statement_cache& st_cache) { | |||
| dds_log(debug, "Restoring initial package data"); | |||
| for (auto& pkg : init_catalog_packages()) { | |||
| do_store_pkg(db, st_cache, pkg); | |||
| } | |||
| @@ -186,11 +189,13 @@ void ensure_migrated(sqlite3::database& db) { | |||
| auto meta = nlohmann::json::parse(meta_json); | |||
| if (!meta.is_object()) { | |||
| dds_log(critical, "Root of catalog dds_cat_meta cell should be a JSON object"); | |||
| throw_external_error<errc::corrupted_catalog_db>(); | |||
| } | |||
| auto version_ = meta["version"]; | |||
| if (!version_.is_number_integer()) { | |||
| dds_log(critical, "'version' key in dds_cat_meta is not an integer"); | |||
| throw_external_error<errc::corrupted_catalog_db>( | |||
| "The catalog database metadata is invalid [bad dds_meta.version]"); | |||
| } | |||
| @@ -204,20 +209,27 @@ void ensure_migrated(sqlite3::database& db) { | |||
| bool import_init_packages = version == 0; | |||
| if (version > current_database_version) { | |||
| dds_log(critical, | |||
| "Catalog version is {}, but we only support up to {}", | |||
| version, | |||
| current_database_version); | |||
| throw_external_error<errc::catalog_too_new>(); | |||
| } | |||
| if (version < 1) { | |||
| dds_log(debug, "Applying catalog migration 1"); | |||
| migrate_repodb_1(db); | |||
| } | |||
| if (version < 2) { | |||
| dds_log(debug, "Applying catalog migration 2"); | |||
| migrate_repodb_2(db); | |||
| } | |||
| meta["version"] = 2; | |||
| exec(db, "UPDATE dds_cat_meta SET meta=?", std::forward_as_tuple(meta.dump())); | |||
| if (import_init_packages) { | |||
| log::info( | |||
| dds_log( | |||
| info, | |||
| "A new catalog database case been created, and has been populated with some initial " | |||
| "contents."); | |||
| neo::sqlite3::statement_cache stmts{db}; | |||
| @@ -235,18 +247,22 @@ void check_json(bool b, std::string_view what) { | |||
| catalog catalog::open(const std::string& db_path) { | |||
| if (db_path != ":memory:") { | |||
| fs::create_directories(fs::weakly_canonical(db_path).parent_path()); | |||
| auto pardir = fs::weakly_canonical(db_path).parent_path(); | |||
| dds_log(trace, "Ensuring parent directory [{}]", pardir.string()); | |||
| fs::create_directories(pardir); | |||
| } | |||
| dds_log(debug, "Opening package catalog [{}]", db_path); | |||
| auto db = sqlite3::database::open(db_path); | |||
| try { | |||
| ensure_migrated(db); | |||
| } catch (const sqlite3::sqlite3_error& e) { | |||
| log::critical( | |||
| "Failed to load the repository database. It appears to be invalid/corrupted. The " | |||
| "exception message is: {}", | |||
| e.what()); | |||
| dds_log(critical, | |||
| "Failed to load the repository database. It appears to be invalid/corrupted. The " | |||
| "exception message is: {}", | |||
| e.what()); | |||
| throw_external_error<errc::corrupted_catalog_db>(); | |||
| } | |||
| dds_log(trace, "Successfully opened catalog"); | |||
| return catalog(std::move(db)); | |||
| } | |||
| @@ -255,11 +271,12 @@ catalog::catalog(sqlite3::database db) | |||
| void catalog::store(const package_info& pkg) { | |||
| sqlite3::transaction_guard tr{_db}; | |||
| do_store_pkg(_db, _stmt_cache, pkg); | |||
| } | |||
| std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept { | |||
| auto ver_str = pk_id.version.to_string(); | |||
| dds_log(trace, "Lookup package {}@{}", pk_id.name, ver_str); | |||
| auto& st = _stmt_cache(R"( | |||
| SELECT | |||
| pkg_id, | |||
| @@ -275,7 +292,7 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept | |||
| WHERE name = ? AND version = ? | |||
| )"_sql); | |||
| st.reset(); | |||
| st.bindings = std::forward_as_tuple(pk_id.name, pk_id.version.to_string()); | |||
| st.bindings = std::forward_as_tuple(pk_id.name, ver_str); | |||
| auto opt_tup = sqlite3::unpack_single_opt<std::int64_t, | |||
| std::string, | |||
| std::string, | |||
| @@ -323,28 +340,27 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept | |||
| }, | |||
| }; | |||
| auto append_transform = [](auto transform) { | |||
| return [transform = std::move(transform)](auto& remote) { | |||
| if constexpr (neo::alike<decltype(remote), std::monostate>) { | |||
| // Do nothing | |||
| } else { | |||
| remote.transforms.push_back(std::move(transform)); | |||
| } | |||
| }; | |||
| }; | |||
| if (!repo_transform.empty()) { | |||
| auto tr_json = json5::parse_data(repo_transform); | |||
| check_json(tr_json.is_array(), | |||
| // Transforms are stored in the DB as JSON strings. Convert them back to real objects. | |||
| auto tr_data = json5::parse_data(repo_transform); | |||
| check_json(tr_data.is_array(), | |||
| fmt::format("Database record for {} has an invalid 'repo_transform' field [1]", | |||
| pkg_id)); | |||
| for (const auto& el : tr_json.as_array()) { | |||
| for (const auto& el : tr_data.as_array()) { | |||
| check_json( | |||
| el.is_object(), | |||
| fmt::format("Database record for {} has an invalid 'repo_transform' field [2]", | |||
| pkg_id)); | |||
| auto tr = fs_transformation::from_json(el); | |||
| std::visit(append_transform(tr), info.remote); | |||
| std::visit( | |||
| [&](auto& remote) { | |||
| if constexpr (neo::alike<decltype(remote), std::monostate>) { | |||
| // Do nothing | |||
| } else { | |||
| remote.transforms.push_back(std::move(tr)); | |||
| } | |||
| }, | |||
| info.remote); | |||
| } | |||
| } | |||
| return info; | |||
| @@ -377,6 +393,7 @@ std::vector<package_id> catalog::by_name(std::string_view sv) const noexcept { | |||
| } | |||
| std::vector<dependency> catalog::dependencies_of(const package_id& pkg) const noexcept { | |||
| dds_log(trace, "Lookup dependencies of {}@{}", pkg.name, pkg.version.to_string()); | |||
| return sqlite3::exec_iter<std::string, | |||
| std::string, | |||
| std::string>( // | |||
| @@ -395,12 +412,16 @@ std::vector<dependency> catalog::dependencies_of(const package_id& pkg) const no | |||
| std::forward_as_tuple(pkg.name, pkg.version.to_string())) // | |||
| | ranges::views::transform([](auto&& pair) { | |||
| auto& [name, low, high] = pair; | |||
| return dependency{name, {semver::version::parse(low), semver::version::parse(high)}}; | |||
| auto dep | |||
| = dependency{name, {semver::version::parse(low), semver::version::parse(high)}}; | |||
| dds_log(trace, " Depends: {}", dep.to_string()); | |||
| return dep; | |||
| }) // | |||
| | ranges::to_vector; | |||
| } | |||
| void catalog::import_json_str(std::string_view content) { | |||
| dds_log(trace, "Importing JSON string into catalog"); | |||
| auto pkgs = parse_packages_json(content); | |||
| sqlite3::transaction_guard tr{_db}; | |||
| @@ -411,6 +432,6 @@ void catalog::import_json_str(std::string_view content) { | |||
| void catalog::import_initial() { | |||
| sqlite3::transaction_guard tr{_db}; | |||
| log::info("Restoring built-in initial catalog contents"); | |||
| dds_log(info, "Restoring built-in initial catalog contents"); | |||
| store_init_packages(_db, _stmt_cache); | |||
| } | |||
| @@ -7,11 +7,6 @@ | |||
| #include <dds/util/parallel.hpp> | |||
| #include <neo/assert.hpp> | |||
| #include <nlohmann/json.hpp> | |||
| #include <range/v3/algorithm/all_of.hpp> | |||
| #include <range/v3/algorithm/any_of.hpp> | |||
| #include <range/v3/distance.hpp> | |||
| #include <range/v3/numeric/accumulate.hpp> | |||
| #include <range/v3/view/filter.hpp> | |||
| #include <range/v3/view/transform.hpp> | |||
| @@ -32,32 +27,9 @@ temporary_sdist do_pull_sdist(const package_info& listing, std::monostate) { | |||
| temporary_sdist do_pull_sdist(const package_info& listing, const git_remote_listing& git) { | |||
| auto tmpdir = dds::temporary_dir::create(); | |||
| log::info("Cloning Git repository: {} [{}] ...", git.url, git.ref); | |||
| git.clone(tmpdir.path()); | |||
| for (const auto& tr : git.transforms) { | |||
| tr.apply_to(tmpdir.path()); | |||
| } | |||
| log::info("Create sdist from clone ..."); | |||
| if (git.auto_lib.has_value()) { | |||
| log::info("Generating library data automatically"); | |||
| auto pkg_strm | |||
| = dds::open(tmpdir.path() / "package.json5", std::ios::binary | std::ios::out); | |||
| auto man_json = nlohmann::json::object(); | |||
| man_json["name"] = listing.ident.name; | |||
| man_json["version"] = listing.ident.version.to_string(); | |||
| man_json["namespace"] = git.auto_lib->namespace_; | |||
| pkg_strm << nlohmann::to_string(man_json); | |||
| auto lib_strm | |||
| = dds::open(tmpdir.path() / "library.json5", std::ios::binary | std::ios::out); | |||
| auto lib_json = nlohmann::json::object(); | |||
| lib_json["name"] = git.auto_lib->name; | |||
| lib_strm << nlohmann::to_string(lib_json); | |||
| } | |||
| git.pull_to(listing.ident, tmpdir.path()); | |||
| dds_log(info, "Create sdist from clone ..."); | |||
| sdist_params params; | |||
| params.project_dir = tmpdir.path(); | |||
| auto sd_tmp_dir = dds::temporary_dir::create(); | |||
| @@ -99,7 +71,7 @@ void dds::get_all(const std::vector<package_id>& pkgs, repository& repo, const c | |||
| }); | |||
| auto okay = parallel_run(absent_pkg_infos, 8, [&](package_info inf) { | |||
| log::info("Download package: {}", inf.ident.to_string()); | |||
| dds_log(info, "Download package: {}", inf.ident.to_string()); | |||
| auto tsd = get_package_sdist(inf); | |||
| std::scoped_lock lk{repo_mut}; | |||
| repo.add_sdist(tsd.sdist, if_exists::throw_exc); | |||
| @@ -1,20 +0,0 @@ | |||
| #pragma once | |||
| #include <dds/util/fs.hpp> | |||
| #include <libman/package.hpp> | |||
| #include <optional> | |||
| #include <string> | |||
| namespace dds { | |||
| struct git_remote_listing { | |||
| std::string url; | |||
| std::string ref; | |||
| std::optional<lm::usage> auto_lib; | |||
| void clone(path_ref path) const; | |||
| }; | |||
| } // namespace dds | |||
| @@ -1,6 +1,7 @@ | |||
| #include "./import.hpp" | |||
| #include <dds/error/errors.hpp> | |||
| #include <dds/util/log.hpp> | |||
| #include <fmt/core.h> | |||
| #include <json5/parse_data.hpp> | |||
| @@ -31,7 +32,7 @@ struct any_key { | |||
| }; | |||
| template <typename KF, typename... Args> | |||
| any_key(KF&&, Args&&...)->any_key<KF, Args...>; | |||
| any_key(KF&&, Args&&...) -> any_key<KF, Args...>; | |||
| namespace { | |||
| @@ -174,6 +175,7 @@ std::vector<package_info> parse_json_v1(const json5::data& data) { | |||
| std::vector<package_info> dds::parse_packages_json(std::string_view content) { | |||
| json5::data data; | |||
| try { | |||
| dds_log(trace, "Parsing packages JSON data: {}", content); | |||
| data = json5::parse_data(content); | |||
| } catch (const json5::parse_error& e) { | |||
| throw_user_error<errc::invalid_catalog_json>("JSON5 syntax error: {}", e.what()); | |||
| @@ -194,6 +196,7 @@ std::vector<package_info> dds::parse_packages_json(std::string_view content) { | |||
| try { | |||
| if (version == 1.0) { | |||
| dds_log(trace, "Processing JSON data as v1 data"); | |||
| return parse_json_v1(data); | |||
| } else { | |||
| throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'", | |||
| @@ -2,10 +2,14 @@ | |||
| #include <dds/error/errors.hpp> | |||
| #include <dds/proc.hpp> | |||
| #include <dds/util/log.hpp> | |||
| void dds::git_remote_listing::clone(dds::path_ref dest) const { | |||
| #include <nlohmann/json.hpp> | |||
| void dds::git_remote_listing::pull_to(const dds::package_id& pid, dds::path_ref dest) const { | |||
| fs::remove_all(dest); | |||
| using namespace std::literals; | |||
| dds_log(info, "Clone Git repository [{}] (at {}) to [{}]", url, ref, dest.string()); | |||
| auto command = {"git"s, "clone"s, "--depth=1"s, "--branch"s, ref, url, dest.generic_string()}; | |||
| auto git_res = run_proc(command); | |||
| if (!git_res.okay()) { | |||
| @@ -15,4 +19,24 @@ void dds::git_remote_listing::clone(dds::path_ref dest) const { | |||
| git_res.retc, | |||
| git_res.output); | |||
| } | |||
| for (const auto& tr : transforms) { | |||
| tr.apply_to(dest); | |||
| } | |||
| if (auto_lib.has_value()) { | |||
| dds_log(info, "Generating library data automatically"); | |||
| auto pkg_strm = dds::open(dest / "package.json5", std::ios::binary | std::ios::out); | |||
| auto man_json = nlohmann::json::object(); | |||
| man_json["name"] = pid.name; | |||
| man_json["version"] = pid.version.to_string(); | |||
| man_json["namespace"] = auto_lib->namespace_; | |||
| pkg_strm << nlohmann::to_string(man_json); | |||
| auto lib_strm = dds::open(dest / "library.json5", std::ios::binary | std::ios::out); | |||
| auto lib_json = nlohmann::json::object(); | |||
| lib_json["name"] = auto_lib->name; | |||
| lib_strm << nlohmann::to_string(lib_json); | |||
| } | |||
| } | |||
| @@ -18,7 +18,7 @@ struct git_remote_listing { | |||
| std::vector<fs_transformation> transforms; | |||
| void clone(path_ref path) const; | |||
| void pull_to(const package_id& pid, path_ref path) const; | |||
| }; | |||
| } // namespace dds | |||
| @@ -87,7 +87,8 @@ database database::open(const std::string& db_path) { | |||
| try { | |||
| ensure_migrated(db); | |||
| } catch (const sqlite3::sqlite3_error& e) { | |||
| log::error( | |||
| dds_log( | |||
| error, | |||
| "Failed to load the databsae. It appears to be invalid/corrupted. We'll delete it and " | |||
| "create a new one. The exception message is: {}", | |||
| e.what()); | |||
| @@ -96,10 +97,10 @@ database database::open(const std::string& db_path) { | |||
| try { | |||
| ensure_migrated(db); | |||
| } catch (const sqlite3::sqlite3_error& e) { | |||
| log::critical( | |||
| "Failed to apply database migrations to recovery database. This is a critical " | |||
| "error. The exception message is: {}", | |||
| e.what()); | |||
| dds_log(critical, | |||
| "Failed to apply database migrations to recovery database. This is a critical " | |||
| "error. The exception message is: {}", | |||
| e.what()); | |||
| std::terminate(); | |||
| } | |||
| } | |||
| @@ -28,7 +28,9 @@ auto collect_pf_sources(path_ref path) { | |||
| // Drop any source files we found within `include/` | |||
| erase_if(sources, [&](auto& info) { | |||
| if (info.kind != source_kind::header) { | |||
| log::warn("Source file in `include` will not be compiled: {}", info.path.string()); | |||
| dds_log(warn, | |||
| "Source file in `include` will not be compiled: {}", | |||
| info.path.string()); | |||
| return true; | |||
| } | |||
| return false; | |||
| @@ -63,10 +63,10 @@ package_manifest parse_json(const json5::data& data, std::string_view fpath) { | |||
| if_key{"depends", | |||
| [&](auto&& dat) { | |||
| if (dat.is_object()) { | |||
| log::warn( | |||
| "{}: Using a JSON object for 'depends' is deprecated. Use an " | |||
| "array of strings instead.", | |||
| fpath); | |||
| dds_log(warn, | |||
| "{}: Using a JSON object for 'depends' is deprecated. Use an " | |||
| "array of strings instead.", | |||
| fpath); | |||
| return mapping{push_depends_obj_kv}(dat); | |||
| } else if (dat.is_array()) { | |||
| return for_each{put_into{std::back_inserter(ret.dependencies), | |||
| @@ -62,7 +62,7 @@ spawn_child(const std::vector<std::string>& command, int stdout_pipe, int close_ | |||
| } // namespace | |||
| proc_result dds::run_proc(const proc_options& opts) { | |||
| log::debug("Spawning subprocess: {}", quote_command(opts.command)); | |||
| dds_log(debug, "Spawning subprocess: {}", quote_command(opts.command)); | |||
| int stdio_pipe[2] = {}; | |||
| auto rc = ::pipe(stdio_pipe); | |||
| check_rc(rc == 0, "Create stdio pipe for subprocess"); | |||
| @@ -100,7 +100,7 @@ proc_result dds::run_proc(const proc_options& opts) { | |||
| ::kill(child, SIGINT); | |||
| timeout = -1ms; | |||
| res.timed_out = true; | |||
| log::debug("Subprocess [{}] timed out", quote_command(opts.command)); | |||
| dds_log(debug, "Subprocess [{}] timed out", quote_command(opts.command)); | |||
| continue; | |||
| } | |||
| std::string buffer; | |||
| @@ -25,6 +25,7 @@ namespace { | |||
| proc_result dds::run_proc(const proc_options& opts) { | |||
| auto cmd_str = quote_command(opts.command); | |||
| dds_log(debug, "Spawning subprocess: {}", cmd_str); | |||
| ::SECURITY_ATTRIBUTES security = {}; | |||
| security.bInheritHandle = TRUE; | |||
| @@ -32,9 +32,10 @@ auto load_sdists(path_ref root) { | |||
| try { | |||
| return sdist::from_directory(p); | |||
| } catch (const std::runtime_error& e) { | |||
| log::error("Failed to load source distribution from directory '{}': {}", | |||
| p.string(), | |||
| e.what()); | |||
| dds_log(error, | |||
| "Failed to load source distribution from directory '{}': {}", | |||
| p.string(), | |||
| e.what()); | |||
| return std::nullopt; | |||
| } | |||
| }; | |||
| @@ -53,8 +54,8 @@ auto load_sdists(path_ref root) { | |||
| } // namespace | |||
| void repository::_log_blocking(path_ref dirpath) noexcept { | |||
| log::warn("Another process has the repository directory locked [{}]", dirpath.string()); | |||
| log::warn("Waiting for repository to be released..."); | |||
| dds_log(warn, "Another process has the repository directory locked [{}]", dirpath.string()); | |||
| dds_log(warn, "Waiting for repository to be released..."); | |||
| } | |||
| void repository::_init_repo_dir(path_ref dirpath) noexcept { fs::create_directories(dirpath); } | |||
| @@ -68,7 +69,8 @@ repository repository::_open_for_directory(bool writeable, path_ref dirpath) { | |||
| void repository::add_sdist(const sdist& sd, if_exists ife_action) { | |||
| if (!_write_enabled) { | |||
| log::critical( | |||
| dds_log( | |||
| critical, | |||
| "DDS attempted to write into a repository that wasn't opened with a write-lock. This " | |||
| "is a hard bug and should be reported. For the safety and integrity of the local " | |||
| "repository, we'll hard-exit immediately."); | |||
| @@ -81,10 +83,10 @@ void repository::add_sdist(const sdist& sd, if_exists ife_action) { | |||
| if (ife_action == if_exists::throw_exc) { | |||
| throw_user_error<errc::sdist_exists>(msg); | |||
| } else if (ife_action == if_exists::ignore) { | |||
| log::warn(msg); | |||
| dds_log(warn, msg); | |||
| return; | |||
| } else { | |||
| log::info(msg + " - Replacing"); | |||
| dds_log(info, msg + " - Replacing"); | |||
| } | |||
| } | |||
| auto tmp_copy = sd_dest; | |||
| @@ -99,7 +101,7 @@ void repository::add_sdist(const sdist& sd, if_exists ife_action) { | |||
| } | |||
| fs::rename(tmp_copy, sd_dest); | |||
| _sdists.insert(sdist::from_directory(sd_dest)); | |||
| log::info("Source distribution '{}' successfully exported", sd.manifest.pkg_id.to_string()); | |||
| dds_log(info, "Source distribution '{}' successfully exported", sd.manifest.pkg_id.to_string()); | |||
| } | |||
| const sdist* repository::find(const package_id& pkg) const noexcept { | |||
| @@ -76,21 +76,31 @@ struct solver_provider { | |||
| mutable std::map<std::string, std::vector<package_id>> pkgs_by_name = {}; | |||
| std::optional<req_type> best_candidate(const req_type& req) const { | |||
| dds_log(debug, "Find best candidate of {}", req.dep.to_string()); | |||
| // Look up in the cachce for the packages we have with the given name | |||
| auto found = pkgs_by_name.find(req.dep.name); | |||
| if (found == pkgs_by_name.end()) { | |||
| // If it isn't there, insert an entry in the cache | |||
| found = pkgs_by_name.emplace(req.dep.name, pkgs_for_name(req.dep.name)).first; | |||
| } | |||
| auto& vec = found->second; | |||
| auto cand = std::find_if(vec.cbegin(), vec.cend(), [&](const package_id& pk) { | |||
| // Find the first package with the version contained by the ranges in the requirement | |||
| auto& for_name = found->second; | |||
| auto cand = std::find_if(for_name.cbegin(), for_name.cend(), [&](const package_id& pk) { | |||
| return req.dep.versions.contains(pk.version); | |||
| }); | |||
| if (cand == vec.cend()) { | |||
| if (cand == for_name.cend()) { | |||
| dds_log(debug, "No candidate for requirement {}", req.dep.to_string()); | |||
| return std::nullopt; | |||
| } | |||
| dds_log(debug, "Select candidate {}@{}", cand->to_string()); | |||
| return req_type{dependency{cand->name, {cand->version, cand->version.next_after()}}}; | |||
| } | |||
| std::vector<req_type> requirements_of(const req_type& req) const { | |||
| dds_log(trace, | |||
| "Lookup requirements of {}@{}", | |||
| req.key(), | |||
| (*req.dep.versions.iter_intervals().begin()).low.to_string()); | |||
| auto pk_id = as_pkg_id(req); | |||
| auto deps = deps_for_pkg(pk_id); | |||
| return deps // | |||
| @@ -129,7 +139,7 @@ struct explainer { | |||
| void operator()(pubgrub::explain::premise<T> pr) { | |||
| strm.str(""); | |||
| put(pr.value); | |||
| log::error("{} {},", at_head ? "┌─ Given that" : "│ and", strm.str()); | |||
| dds_log(error, "{} {},", at_head ? "┌─ Given that" : "│ and", strm.str()); | |||
| at_head = false; | |||
| } | |||
| @@ -138,10 +148,10 @@ struct explainer { | |||
| at_head = true; | |||
| strm.str(""); | |||
| put(cncl.value); | |||
| log::error("╘═ then {}.", strm.str()); | |||
| dds_log(error, "╘═ then {}.", strm.str()); | |||
| } | |||
| void operator()(pubgrub::explain::separator) { log::error(""); } | |||
| void operator()(pubgrub::explain::separator) { dds_log(error, ""); } | |||
| }; | |||
| } // namespace | |||
| @@ -156,7 +166,7 @@ std::vector<package_id> dds::solve(const std::vector<dependency>& deps, | |||
| auto solution = pubgrub::solve(wrap_req, solver_provider{pkgs_prov, deps_prov}); | |||
| return solution | ranges::views::transform(as_pkg_id) | ranges::to_vector; | |||
| } catch (const solve_fail_exc& failure) { | |||
| log::error("Dependency resolution has failed! Explanation:"); | |||
| dds_log(error, "Dependency resolution has failed! Explanation:"); | |||
| pubgrub::generate_explaination(failure, explainer()); | |||
| throw_user_error<errc::dependency_resolve_failure>(); | |||
| } | |||
| @@ -18,7 +18,7 @@ namespace { | |||
| void sdist_export_file(path_ref out_root, path_ref in_root, path_ref filepath) { | |||
| auto relpath = fs::relative(filepath, in_root); | |||
| log::debug("Export file {}", relpath.string()); | |||
| dds_log(debug, "Export file {}", relpath.string()); | |||
| auto dest = out_root / relpath; | |||
| fs::create_directories(dest.parent_path()); | |||
| fs::copy(filepath, dest); | |||
| @@ -52,7 +52,7 @@ void sdist_copy_library(path_ref out_root, const library_root& lib, const sdist_ | |||
| } | |||
| sdist_export_file(out_root, params.project_dir, *lib_man_path); | |||
| log::info("sdist: Export library from {}", lib.path().string()); | |||
| dds_log(info, "sdist: Export library from {}", lib.path().string()); | |||
| fs::create_directories(out_root); | |||
| for (const auto& source : sources_to_keep) { | |||
| sdist_export_file(out_root, params.project_dir, source.path); | |||
| @@ -77,7 +77,7 @@ sdist dds::create_sdist(const sdist_params& params) { | |||
| } | |||
| fs::create_directories(dest.parent_path()); | |||
| safe_rename(tempdir.path(), dest); | |||
| log::info("Source distribution created in {}", dest.string()); | |||
| dds_log(info, "Source distribution created in {}", dest.string()); | |||
| return sdist::from_directory(dest); | |||
| } | |||
| @@ -98,7 +98,7 @@ sdist dds::create_sdist_in_dir(path_ref out, const sdist_params& params) { | |||
| auto pkg_man = package_manifest::load_from_file(*man_path); | |||
| sdist_export_file(out, params.project_dir, *man_path); | |||
| log::info("Generated export as {}", pkg_man.pkg_id.to_string()); | |||
| dds_log(info, "Generated export as {}", pkg_man.pkg_id.to_string()); | |||
| return sdist::from_directory(out); | |||
| } | |||
| @@ -3,6 +3,7 @@ | |||
| #include <dds/toolchain/from_json.hpp> | |||
| #include <dds/toolchain/prep.hpp> | |||
| #include <dds/util/algo.hpp> | |||
| #include <dds/util/log.hpp> | |||
| #include <dds/util/paths.hpp> | |||
| #include <dds/util/string.hpp> | |||
| @@ -72,10 +73,15 @@ static auto shortest_path_args(path_ref base, R&& r) { | |||
| } | |||
| compile_command_info toolchain::create_compile_command(const compile_file_spec& spec, | |||
| path_ref, | |||
| path_ref cwd, | |||
| toolchain_knobs knobs) const noexcept { | |||
| using namespace std::literals; | |||
| dds_log(trace, | |||
| "Calculate compile command for source file [{}] to object file [{}]", | |||
| spec.source_path.string(), | |||
| spec.out_path.string()); | |||
| language lang = spec.lang; | |||
| if (lang == language::automatic) { | |||
| if (spec.source_path.extension() == ".c" || spec.source_path.extension() == ".C") { | |||
| @@ -87,15 +93,20 @@ compile_command_info toolchain::create_compile_command(const compile_file_spec& | |||
| vector<string> flags; | |||
| if (knobs.is_tty) { | |||
| dds_log(trace, "Enabling TTY flags."); | |||
| extend(flags, _tty_flags); | |||
| } | |||
| dds_log(trace, "#include-search dirs:"); | |||
| for (auto&& inc_dir : spec.include_dirs) { | |||
| auto inc_args = include_args(inc_dir); | |||
| dds_log(trace, " - search: {}", inc_dir.string()); | |||
| auto shortest = shortest_path_from(inc_dir, cwd); | |||
| auto inc_args = include_args(shortest); | |||
| extend(flags, inc_args); | |||
| } | |||
| for (auto&& ext_inc_dir : spec.external_include_dirs) { | |||
| dds_log(trace, " - search (external): {}", ext_inc_dir.string()); | |||
| auto inc_args = external_include_args(ext_inc_dir); | |||
| extend(flags, inc_args); | |||
| } | |||
| @@ -142,10 +153,14 @@ vector<string> toolchain::create_archive_command(const archive_spec& spec, | |||
| path_ref cwd, | |||
| toolchain_knobs) const noexcept { | |||
| vector<string> cmd; | |||
| dds_log(trace, "Creating archive command [output: {}]", spec.out_path.string()); | |||
| auto out_arg = shortest_path_from(spec.out_path, cwd).string(); | |||
| for (auto& arg : _link_archive) { | |||
| if (arg == "[in]") { | |||
| dds_log(trace, "Expand [in] placeholder:"); | |||
| for (auto&& in : spec.input_files) { | |||
| dds_log(trace, " - input: [{}]", in.string()); | |||
| } | |||
| extend(cmd, shortest_path_args(cwd, spec.input_files)); | |||
| } else { | |||
| cmd.push_back(replace(arg, "[out]", out_arg)); | |||
| @@ -158,8 +173,13 @@ vector<string> toolchain::create_link_executable_command(const link_exe_spec& sp | |||
| path_ref cwd, | |||
| toolchain_knobs) const noexcept { | |||
| vector<string> cmd; | |||
| dds_log(trace, "Creating link command [output: {}]", spec.output.string()); | |||
| for (auto& arg : _link_exe) { | |||
| if (arg == "[in]") { | |||
| dds_log(trace, "Expand [in] placeholder:"); | |||
| for (auto&& in : spec.inputs) { | |||
| dds_log(trace, " - input: [{}]", in.string()); | |||
| } | |||
| extend(cmd, shortest_path_args(cwd, spec.inputs)); | |||
| } else { | |||
| cmd.push_back(replace(arg, "[out]", shortest_path_from(spec.output, cwd).string())); | |||
| @@ -280,7 +300,9 @@ std::optional<dds::toolchain> dds::toolchain::get_default() { | |||
| user_home_dir() / "toolchain.json", | |||
| }; | |||
| for (auto&& cand : candidates) { | |||
| dds_log(trace, "Checking for default toolchain at [{}]", cand.string()); | |||
| if (fs::exists(cand)) { | |||
| dds_log(debug, "Using default toolchain file: {}", cand.string()); | |||
| return parse_toolchain_json5(slurp_file(cand)); | |||
| } | |||
| } | |||
| @@ -19,12 +19,13 @@ inline level current_log_level = level::info; | |||
| void log_print(level l, std::string_view s) noexcept; | |||
| // clang-format off | |||
| template <typename T> | |||
| concept formattable = requires (const T item) { | |||
| concept formattable = requires(const T item) { | |||
| fmt::format("{}", item); | |||
| }; | |||
| inline bool level_enabled(level l) { return int(l) >= int(current_log_level); } | |||
| template <formattable... Args> | |||
| void log(level l, std::string_view s, const Args&... args) noexcept { | |||
| if (int(l) >= int(current_log_level)) { | |||
| @@ -38,31 +39,11 @@ void trace(std::string_view s, const Args&... args) { | |||
| log(level::trace, s, args...); | |||
| } | |||
| template <formattable... Args> | |||
| void debug(std::string_view s, const Args&... args) { | |||
| log(level::debug, s, args...); | |||
| } | |||
| template <formattable... Args> | |||
| void info(std::string_view s, const Args&... args) { | |||
| log(level::info, s, args...); | |||
| } | |||
| template <formattable... Args> | |||
| void warn(std::string_view s, const Args&... args) { | |||
| log(level::warn, s, args...); | |||
| } | |||
| template <formattable... Args> | |||
| void error(std::string_view s, const Args&... args) { | |||
| log(level::error, s, args...); | |||
| } | |||
| template <formattable... Args> | |||
| void critical(std::string_view s, const Args&&... args) { | |||
| log(level::critical, s, args...); | |||
| } | |||
| // clang-format on | |||
| #define dds_log(Level, str, ...) \ | |||
| do { \ | |||
| if (int(dds::log::level::Level) >= int(dds::log::current_log_level)) { \ | |||
| ::dds::log::log(::dds::log::level::Level, str __VA_OPT__(, ) __VA_ARGS__); \ | |||
| } \ | |||
| } while (0) | |||
| } // namespace dds::log | |||
| @@ -8,6 +8,6 @@ void dds::log_exception(std::exception_ptr eptr) noexcept { | |||
| try { | |||
| std::rethrow_exception(eptr); | |||
| } catch (const std::exception& e) { | |||
| log::error(e.what()); | |||
| dds_log(error, e.what()); | |||
| } | |||
| } | |||
| @@ -12,7 +12,7 @@ fs::path dds::user_home_dir() { | |||
| static auto ret = []() -> fs::path { | |||
| auto home_env = std::getenv("HOME"); | |||
| if (!home_env) { | |||
| log::error("No HOME environment variable set!"); | |||
| dds_log(error, "No HOME environment variable set!"); | |||
| return "/"; | |||
| } | |||
| return fs::absolute(fs::path(home_env)); | |||
| @@ -12,7 +12,7 @@ fs::path dds::user_home_dir() { | |||
| static auto ret = []() -> fs::path { | |||
| auto home_env = std::getenv("HOME"); | |||
| if (!home_env) { | |||
| log::warn("No HOME environment variable set!"); | |||
| dds_log(warn, "No HOME environment variable set!"); | |||
| return "/"; | |||
| } | |||
| return fs::absolute(fs::path(home_env)); | |||
| @@ -13,7 +13,7 @@ fs::path dds::user_home_dir() { | |||
| static auto ret = []() -> fs::path { | |||
| auto userprofile_env = std::getenv("USERPROFILE"); | |||
| if (!userprofile_env) { | |||
| log::warn("No USERPROFILE environment variable set!"); | |||
| dds_log(warn, "No USERPROFILE environment variable set!"); | |||
| return "/"; | |||
| } | |||
| return fs::absolute(fs::path(userprofile_env)); | |||