Browse Source

Merge branch 'feature/auto-pkg-transform' into develop

default_compile_flags
vector-of-bool 4 years ago
parent
commit
9944f4427f
59 changed files with 2557 additions and 237 deletions
  1. +86
    -35
      catalog.json
  2. +10
    -0
      docs/err/invalid-repo-transform.rst
  3. +7
    -0
      docs/err/template-error.rst
  4. +190
    -2
      docs/guide/catalog.rst
  5. +2
    -2
      package.jsonc
  6. +5
    -1
      src/dds.main.cpp
  7. +1
    -1
      src/dds/build/plan/archive.cpp
  8. +1
    -1
      src/dds/build/plan/compile_file.cpp
  9. +2
    -1
      src/dds/build/plan/exe.cpp
  10. +96
    -88
      src/dds/catalog/catalog.cpp
  11. +4
    -9
      src/dds/catalog/catalog.hpp
  12. +3
    -3
      src/dds/catalog/catalog.test.cpp
  13. +21
    -16
      src/dds/catalog/get.cpp
  14. +214
    -0
      src/dds/catalog/import.cpp
  15. +9
    -0
      src/dds/catalog/import.hpp
  16. +154
    -0
      src/dds/catalog/import.test.cpp
  17. +25
    -0
      src/dds/catalog/package_info.hpp
  18. +18
    -0
      src/dds/catalog/remote/git.cpp
  19. +24
    -0
      src/dds/catalog/remote/git.hpp
  20. +26
    -16
      src/dds/error/errors.cpp
  21. +1
    -0
      src/dds/error/errors.hpp
  22. +2
    -0
      src/dds/source/file.cpp
  23. +16
    -11
      src/dds/toolchain/from_json.test.cpp
  24. +28
    -10
      src/dds/toolchain/toolchain.cpp
  25. +8
    -4
      src/dds/toolchain/toolchain.hpp
  26. +189
    -0
      src/dds/util/fnmatch.cpp
  27. +321
    -0
      src/dds/util/fnmatch.hpp
  28. +32
    -0
      src/dds/util/fnmatch.test.cpp
  29. +330
    -0
      src/dds/util/fs_transform.cpp
  30. +49
    -0
      src/dds/util/fs_transform.hpp
  31. +242
    -0
      src/dds/util/glob.cpp
  32. +63
    -0
      src/dds/util/glob.hpp
  33. +69
    -0
      src/dds/util/glob.test.cpp
  34. +1
    -1
      src/dds/util/paths.win.cpp
  35. +5
    -0
      tests/deps/use-catch2/gcc.tc.jsonc
  36. +3
    -0
      tests/deps/use-catch2/msvc.tc.jsonc
  37. +43
    -0
      tests/deps/use-catch2/project/catalog.json5
  38. +4
    -0
      tests/deps/use-catch2/project/library.json5
  39. +8
    -0
      tests/deps/use-catch2/project/package.json5
  40. +6
    -0
      tests/deps/use-catch2/project/src/use-catch2.main.cpp
  41. +11
    -0
      tests/deps/use-catch2/test_use_catch2.py
  42. +7
    -0
      tests/deps/use-cryptopp/gcc.tc.jsonc
  43. +4
    -0
      tests/deps/use-cryptopp/msvc.tc.jsonc
  44. +27
    -0
      tests/deps/use-cryptopp/project/catalog.json
  45. +4
    -0
      tests/deps/use-cryptopp/project/library.json5
  46. +8
    -0
      tests/deps/use-cryptopp/project/package.json5
  47. +17
    -0
      tests/deps/use-cryptopp/project/src/use-cryptopp.main.cpp
  48. +12
    -0
      tests/deps/use-cryptopp/test_use_cryptopp.py
  49. +7
    -0
      tests/deps/use-libsodium/gcc.tc.jsonc
  50. +4
    -0
      tests/deps/use-libsodium/msvc.tc.jsonc
  51. +44
    -0
      tests/deps/use-libsodium/project/catalog.json
  52. +4
    -0
      tests/deps/use-libsodium/project/library.json5
  53. +8
    -0
      tests/deps/use-libsodium/project/package.json5
  54. +14
    -0
      tests/deps/use-libsodium/project/src/use-libsodium.main.cpp
  55. +12
    -0
      tests/deps/use-libsodium/test_use_libsodium.py
  56. +15
    -21
      tools/ci.py
  57. +37
    -12
      tools/gen-catalog-json.py
  58. +1
    -1
      tools/msvc.jsonc
  59. +3
    -2
      tools/self_build.py

+ 86
- 35
catalog.json View File

@@ -234,7 +234,6 @@
"depends": {},
"description": "The Windows Implementation Library",
"git": {
"auto-lib": null,
"ref": "dds/2020.03.16",
"url": "https://github.com/vector-of-bool/wil.git"
}
@@ -245,7 +244,6 @@
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"auto-lib": null,
"ref": "0.1.0",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
@@ -254,7 +252,6 @@
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"auto-lib": null,
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
@@ -263,7 +260,6 @@
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"auto-lib": null,
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
@@ -272,10 +268,33 @@
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"auto-lib": null,
"ref": "0.2.2",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
},
"0.3.0": {
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"ref": "0.3.0",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
},
"0.3.1": {
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"ref": "0.3.1",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
},
"0.3.2": {
"depends": {},
"description": "Minimal C++ concepts library. Contains many definitions from C++20.",
"git": {
"ref": "0.3.2",
"url": "https://github.com/vector-of-bool/neo-concepts.git"
}
}
},
"neo-fun": {
@@ -283,7 +302,6 @@
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"auto-lib": null,
"ref": "0.1.0",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
@@ -292,10 +310,49 @@
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"auto-lib": null,
"ref": "0.1.1",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.2.0": {
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.2.1": {
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.3.0": {
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"ref": "0.3.0",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.3.1": {
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"ref": "0.3.1",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.3.2": {
"depends": {},
"description": "Some library fundamentals that you might find useful",
"git": {
"ref": "0.3.2",
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
}
},
"neo-sqlite3": {
@@ -303,7 +360,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.1.0",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -312,7 +368,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -321,7 +376,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -330,7 +384,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.2.2",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -339,7 +392,6 @@
"depends": {},
"description": "A modern and low-level C++ SQLite API",
"git": {
"auto-lib": null,
"ref": "0.2.3",
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
@@ -350,7 +402,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.0.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -359,7 +410,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.0.1",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -368,7 +418,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.1.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -377,7 +426,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.1.1",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -386,7 +434,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.1.2",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -395,7 +442,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.2.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -404,7 +450,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.3.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -413,7 +458,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.4.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -422,7 +466,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.5.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -431,7 +474,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.6.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -440,7 +482,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.6.1",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -449,7 +490,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.7.0",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -458,7 +498,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.7.1",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -467,7 +506,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.7.2",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -476,7 +514,6 @@
"depends": {},
"description": "JSON for Modern C++",
"git": {
"auto-lib": null,
"ref": "dds/3.7.3",
"url": "https://github.com/vector-of-bool/json.git"
}
@@ -487,7 +524,6 @@
"depends": {},
"description": "A C++ implementation of the Pubgrub version solving algorithm",
"git": {
"auto-lib": null,
"ref": "0.1.2",
"url": "https://github.com/vector-of-bool/pubgrub.git"
}
@@ -496,7 +532,6 @@
"depends": {},
"description": "A C++ implementation of the Pubgrub version solving algorithm",
"git": {
"auto-lib": null,
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/pubgrub.git"
}
@@ -505,7 +540,6 @@
"depends": {},
"description": "A C++ implementation of the Pubgrub version solving algorithm",
"git": {
"auto-lib": null,
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/pubgrub.git"
}
@@ -554,7 +588,6 @@
"depends": {},
"description": "A C++ library that implements Semantic Versioning parsing, emitting, types, ordering, and operations. See https://semver.org/",
"git": {
"auto-lib": null,
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/semver.git"
}
@@ -563,7 +596,6 @@
"depends": {},
"description": "A C++ library that implements Semantic Versioning parsing, emitting, types, ordering, and operations. See https://semver.org/",
"git": {
"auto-lib": null,
"ref": "0.2.2",
"url": "https://github.com/vector-of-bool/semver.git"
}
@@ -747,7 +779,6 @@
"depends": {},
"description": "A C++ implementation of a JSON5 parser",
"git": {
"auto-lib": null,
"ref": "0.1.5",
"url": "https://github.com/vector-of-bool/json5.git"
}
@@ -761,7 +792,6 @@
},
"description": "A C++ library to process recursive dynamic data",
"git": {
"auto-lib": null,
"ref": "0.1.0",
"url": "https://github.com/vector-of-bool/semester.git"
}
@@ -773,10 +803,31 @@
},
"description": "A C++ library to process recursive dynamic data",
"git": {
"auto-lib": null,
"ref": "0.1.1",
"url": "https://github.com/vector-of-bool/semester.git"
}
},
"0.2.0": {
"depends": {
"neo-concepts": "^0.3.2",
"neo-fun": "^0.3.2"
},
"description": "A C++ library to process recursive dynamic data",
"git": {
"ref": "0.2.0",
"url": "https://github.com/vector-of-bool/semester.git"
}
},
"0.2.1": {
"depends": {
"neo-concepts": "^0.3.2",
"neo-fun": "^0.3.2"
},
"description": "A C++ library to process recursive dynamic data",
"git": {
"ref": "0.2.1",
"url": "https://github.com/vector-of-bool/semester.git"
}
}
}
},

+ 10
- 0
docs/err/invalid-repo-transform.rst View File

@@ -0,0 +1,10 @@
Error: A repository filesystem transformation is invalid
########################################################

In ``dds``, a catalog entry can have a list of attached "transforms" that will
be applies to the root directory of the package before ``dds`` tries to build
and use it.

.. seealso::
For information on the shape and purpose of transforms, refer to
:ref:`catalog.fs-transform` on the :doc:`/guide/catalog` page.

+ 7
- 0
docs/err/template-error.rst View File

@@ -0,0 +1,7 @@
Error: There is an issue rendering a template file
##################################################

.. warning:: TODO

Templates are a highly experimental feature in ``dds`` and aren't ready
for any public usage yet.

+ 190
- 2
docs/guide/catalog.rst View File

@@ -71,6 +71,8 @@ that will be added to the catalog. The following options are supported:
generated for the library.


.. _catalog.adding.json:

Bulk Imports via JSON
=====================

@@ -112,9 +114,195 @@ The JSON file has the following structure:
// The `auto-lib` is optional, to specify an automatic
// library name/namespace pair to generate for the
// root library
"auto-lib": "Acme/Gadgets"
"auto-lib": "Acme/Gadgets",
// List of filesystem transformations to apply to the repository
// (optional)
"transform": [
// ... (see below) ...
]
}
}
}
}
}
}


.. _catalog.fs-transform:

Filesystem Transformations
**************************

A catalog entry can have a set of filesystem transformations attached to its remote information (e.g. the ``git`` property). When ``dds`` is obtaining a copy of the code for the package, it will apply the associated transformations to the filesystem based in the directory of the downloaded/cloned directory. In this was, ``dds`` can effectively "patch" the filesystem structure of a project arbitrarily. This allows many software projects to be imported into ``dds`` without needing to patch/fork the original project to support the required filesystem structure.

.. important::
While ``dds`` allows you to patch directories downloaded via the catalog, a
native ``dds`` project must still follow the layout rules.

The intention of filesystem transformations is to act as a "bridge" that will allow ``dds`` projects to more easily utilize existing libraries.

.. note::
Filesystem transformations can only be added to catalog entries using the
:ref:`JSON import method <catalog.adding.json>`. It is not available in the
command-line import method.


Available Transformations
=========================

At time of writing, there are four main transformations available to catalog entries:

``copy`` and ``move``
Copies or moves a set of files/directories from one location to another. Allows the following options:

- ``from`` - The path from which to copy/move. **Required**
- ``to`` - The destination path for the copy/move. **Required**
- ``include`` - A list of globbing expressions for files to copy/move. If
omitted, then all files will be included.
- ``exclude`` - A list of globbing expressions of files to exclude from the
copy/move. If omitted, then no files will be excluded. **If both** ``include`` and ``exclude`` are provided, ``include`` will be checked *before* ``exclude``.
- ``strip-components`` - A positive integer (or zero, the default). When the
``from`` path identifies a directory, its contents will be copied/moved
into the destination and maintain their relative path from the source path as their relative path within the destination. If ``strip-components`` is set to an integer ``N``, then the first ``N`` path components of that relative path will be removed when copying/moving the files in a directory. If a file's relative path has less than ``N`` components, then that file will be excluded from the ``copy/move`` operation.

``remove``
Delete files and directories from the package source. Has the following options:

- ``path`` - The path of the file/directory to remove. **Required**
- ``only-matching`` - A list of globbing expressions for files to remove. If omitted and the path is a directory, then the entire directory will be deleted. If at least one pattern is provided, then directories will be left intact and only non-directory files will be removed. If ``path`` names a non-directory file, then this option has no effect.

``write``
Write the contents of a string to a file in the package source. Has the following options:

- ``path`` - The path of the file to write. **Required**
- ``content`` - A string that will be written to the file. **Required**

If the file exists and is not a directory, the file will be replaced. If the path names an existing directory, an error will be generated.

Transformations are added as a JSON array to the JSON object that specifies the remote information for the package. Each element of the array is an object, with one or more of the four keys listed above. If an object features more than one of the above keys, they are applied in the same order as they have been listed.


Example: Crypto++
=================

The following catalog entry will build and import `Crypto++`_ for use by a ``dds`` project. This uses the unmodified Crypto++ repository, which ``dds`` doesn't know how to build immediately. With some simple moving of files, we end up with something ``dds`` can build directly:

.. code-block:: javascript

"cryptopp": {
"8.2.0": {
"git": {
"url": "https://github.com/weidai11/cryptopp.git",
"ref": "CRYPTOPP_8_2_0",
"auto-lib": "cryptopp/cryptopp",
"transform": [
{
// Crypto++ has no source directories at all, and everything lives
// at the top level. No good for dds.
//
// Clients are expected to #include files with a `cryptopp/` prefix,
// so we need to move the files around so that they match the
// expected layout:
"move": {
// Move from the root of the repo:
"from": ".",
// Move files *into* `src/cryptopp`
"to": "src/cryptopp",
// Only move the C++ sources and headers:
"include": [
"*.c",
"*.cpp",
"*.h"
]
}
}
]
}
}
}


Example: libsodium
==================

For example, this catalog entry will build and import `libsodium`_ for use in a ``dds`` project. This uses the upstream libsodium repository, which does not meet the layout requirements needed by ``dds``. With a few simple transformations, we can allow ``dds`` to build and consume libsodium successfully:

.. code-block:: javascript

"libsodium": {
"1.0.18": {
"git": {
"url": "https://github.com/jedisct1/libsodium.git",
"ref": "1.0.18",
"auto-lib": "sodium/sodium",
/// Make libsodium look as dds expects of a project.
"transform": [
// libsodium has a `src` directory, but it does not look how dds
// expects it to. The public `#include` root of libsodium lives in
// a nested subdirectory of `src/`
{
"move": {
// Move the public header root out from that nested subdirectory
"from": "src/libsodium/include",
// Put it at `include/` in the top-level
"to": "include/"
}
},
// libsodium has some files whose contents are generated by a
// configure script. For demonstration purposes, we don't need most
// of them, and we can just swipe an existing pre-configured file
// that is already in the source repository and put it into the
// public header root.
{
"copy": {
// Generated version header committed to the repository:
"from": "builds/msvc/version.h",
// Put it where the configure script would put it:
"to": "include/sodium/version.h"
}
},
// The subdirectory `src/libsodium/` is no good. It now acts as an
// unnecessary layer of indirection. We want `src/` to be the root.
// We can just "lift" the subdirectory:
{
// Up we go:
"move": {
"from": "src/libsodium",
"to": "src/"
},
// Delete the now-unused subdirectory:
"remove": {
"path": "src/libsodium"
}
},
// Lastly, libsodium's source files expect to resolve their header
// paths differently than they expect of their clients (Bad!!!).
// Fortunately, we can do a hack to allow the files in `src/` to
// resolve its headers. The source files use #include as if the
// header root was `include/sodium/`, rather than `include/`.
// To work around this, generate a copy of each header file in the
// source root, but remove the leading path element.
// Because we have a separate `include/` and `src/` directory, dds
// will only expose the `include/` directory to clients, and the
// header copies in `src/` are not externally visible.
//
// For example, the `include/sodium/version.h` file is visible to
// clients as `sodium/version.h`, but libsodium itself tries to
// include it as `version.h` within its source files. When we copy
// from `include/`, we grab the relative path to `sodium/version.h`,
// strip the leading components to get `version.h`, and then join that
// path with the `to` path to generate the full destination at
// `src/version.h`
{
"copy": {
"from": "include/",
"to": "src/",
"strip-components": 1
}
}
]
}
}
}

.. _libsodium: https://doc.libsodium.org/
.. _Crypto++: https://cryptopp.com/

+ 2
- 2
package.jsonc View File

@@ -9,11 +9,11 @@
"range-v3": "0.10.0",
"nlohmann-json": "3.7.1",
"neo-sqlite3": "0.2.3",
"neo-fun": "0.1.1",
"neo-fun": "0.3.2",
"semver": "0.2.2",
"pubgrub": "0.2.1",
"vob-json5": "0.1.5",
"vob-semester": "0.1.1",
"vob-semester": "0.2.1",
"ctre": "2.7.0",
},
"test_driver": "Catch-Main"

+ 5
- 1
src/dds.main.cpp View File

@@ -276,7 +276,7 @@ struct cli_catalog {
if (!git_ref) {
dds::throw_user_error<dds::errc::git_url_ref_mutual_req>();
}
auto git = dds::git_remote_listing{git_url.Get(), git_ref.Get(), std::nullopt};
auto git = dds::git_remote_listing{git_url.Get(), git_ref.Get(), std::nullopt, {}};
if (auto_lib) {
git.auto_lib = lm::split_usage_string(auto_lib.Get());
}
@@ -328,6 +328,10 @@ struct cli_catalog {
}
}

void print_remote_info(std::monostate) {
std::cout << "THIS ENTRY IS MISSING REMOTE INFORMATION!\n";
}

int run() {
auto pk_id = dds::package_id::parse(ident.Get());
auto cat = cat_path.open();

+ 1
- 1
src/dds/build/plan/archive.cpp View File

@@ -24,7 +24,7 @@ void create_archive_plan::archive(const build_env& env) const {
archive_spec ar;
ar.input_files = std::move(objects);
ar.out_path = env.output_root / calc_archive_file_path(env.toolchain);
auto ar_cmd = env.toolchain.create_archive_command(ar, env.knobs);
auto ar_cmd = env.toolchain.create_archive_command(ar, fs::current_path(), env.knobs);

// `out_relpath` is purely for the benefit of the user to have a short name
// in the logs

+ 1
- 1
src/dds/build/plan/compile_file.cpp View File

@@ -26,7 +26,7 @@ compile_command_info compile_file_plan::generate_compile_command(build_env_ref e
extend(spec.external_include_dirs, env.ureqs.include_paths(use));
}
extend(spec.definitions, _rules.defs());
return env.toolchain.create_compile_command(spec, env.knobs);
return env.toolchain.create_compile_command(spec, dds::fs::current_path(), env.knobs);
}

fs::path compile_file_plan::calc_object_file_path(const build_env& env) const noexcept {

+ 2
- 1
src/dds/build/plan/exe.cpp View File

@@ -43,7 +43,8 @@ void link_executable_plan::link(build_env_ref env, const library_plan& lib) cons
std::reverse(spec.inputs.begin(), spec.inputs.end());

// Do it!
const auto link_command = env.toolchain.create_link_executable_command(spec, env.knobs);
const auto link_command
= env.toolchain.create_link_executable_command(spec, dds::fs::current_path(), env.knobs);
fs::create_directories(spec.output.parent_path());
auto msg = fmt::format("[{}] Link: {:30}",
lib.qualified_name(),

+ 96
- 88
src/dds/catalog/catalog.cpp View File

@@ -1,9 +1,14 @@
#include "./catalog.hpp"

#include "./import.hpp"

#include <dds/dym.hpp>
#include <dds/error/errors.hpp>
#include <dds/solve/solve.hpp>

#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <neo/concepts.hpp>
#include <neo/sqlite3/exec.hpp>
#include <neo/sqlite3/iter_tuples.hpp>
#include <neo/sqlite3/single.hpp>
@@ -65,6 +70,13 @@ void migrate_repodb_1(sqlite3::database& db) {
)");
}

void migrate_repodb_2(sqlite3::database& db) {
db.exec(R"(
ALTER TABLE dds_cat_pkgs
ADD COLUMN repo_transform TEXT NOT NULL DEFAULT '[]'
)");
}

void ensure_migrated(sqlite3::database& db) {
sqlite3::transaction_guard tr{db};
db.exec(R"(
@@ -87,7 +99,7 @@ void ensure_migrated(sqlite3::database& db) {
"The catalog database metadata is invalid [bad dds_meta.version]");
}

constexpr int current_database_version = 1;
constexpr int current_database_version = 2;

int version = version_;
if (version > current_database_version) {
@@ -97,10 +109,19 @@ void ensure_migrated(sqlite3::database& db) {
if (version < 1) {
migrate_repodb_1(db);
}
meta["version"] = 1;
if (version < 2) {
migrate_repodb_2(db);
}
meta["version"] = 2;
exec(db, "UPDATE dds_cat_meta SET meta=?", std::forward_as_tuple(meta.dump()));
}

void check_json(bool b, std::string_view what) {
if (!b) {
throw_user_error<errc::invalid_catalog_json>("Catalog JSON is invalid: {}", what);
}
}

} // namespace

catalog catalog::open(const std::string& db_path) {
@@ -123,6 +144,30 @@ catalog catalog::open(const std::string& db_path) {
catalog::catalog(sqlite3::database db)
: _db(std::move(db)) {}

void catalog::_store_pkg(const package_info& pkg, std::monostate) {
neo_assert_always(
invariant,
false,
"There was an attempt to insert a package listing into the database where that package "
"listing does not have a remote listing. If you see this message, it is a dds bug.",
pkg.ident.to_string());
}

namespace {

std::string transforms_to_json(const std::vector<fs_transformation>& trs) {
std::string acc = "[";
for (auto it = trs.begin(); it != trs.end(); ++it) {
acc += it->as_json();
if (std::next(it) != trs.end()) {
acc += ", ";
}
}
return acc + "]";
}

} // namespace

void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git) {
auto lm_usage = git.auto_lib.value_or(lm::usage{});
sqlite3::exec( //
@@ -135,7 +180,8 @@ void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git)
git_ref,
lm_name,
lm_namespace,
description
description,
repo_transform
) VALUES (
?1,
?2,
@@ -143,7 +189,8 @@ void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git)
?4,
CASE WHEN ?5 = '' THEN NULL ELSE ?5 END,
CASE WHEN ?6 = '' THEN NULL ELSE ?6 END,
?7
?7,
?8
)
)"_sql,
std::forward_as_tuple( //
@@ -153,7 +200,8 @@ void catalog::_store_pkg(const package_info& pkg, const git_remote_listing& git)
git.ref,
lm_usage.name,
lm_usage.namespace_,
pkg.description));
pkg.description,
transforms_to_json(git.transforms)));
}

void catalog::store(const package_info& pkg) {
@@ -197,7 +245,8 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept
git_ref,
lm_name,
lm_namespace,
description
description,
repo_transform
FROM dds_cat_pkgs
WHERE name = ? AND version = ?
)"_sql);
@@ -210,6 +259,7 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept
std::optional<std::string>,
std::optional<std::string>,
std::optional<std::string>,
std::string,
std::string>(st);
if (!opt_tup) {
dym_target::fill([&] {
@@ -220,7 +270,15 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept
});
return std::nullopt;
}
const auto& [pkg_id, name, version, git_url, git_ref, lm_name, lm_namespace, description]
const auto& [pkg_id,
name,
version,
git_url,
git_ref,
lm_name,
lm_namespace,
description,
repo_transform]
= *opt_tup;
assert(pk_id.name == name);
assert(pk_id.version == semver::version::parse(version));
@@ -229,7 +287,7 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept

auto deps = dependencies_of(pk_id);

return package_info{
auto info = package_info{
pk_id,
std::move(deps),
std::move(description),
@@ -237,8 +295,35 @@ std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept
*git_url,
*git_ref,
lm_name ? std::make_optional(lm::usage{*lm_namespace, *lm_name}) : std::nullopt,
{},
},
};

auto append_transform = [](auto transform) {
return [transform = std::move(transform)](auto& remote) {
if constexpr (neo::alike<decltype(remote), std::monostate>) {
// Do nothing
} else {
remote.transforms.push_back(std::move(transform));
}
};
};

if (!repo_transform.empty()) {
auto tr_json = json5::parse_data(repo_transform);
check_json(tr_json.is_array(),
fmt::format("Database record for {} has an invalid 'repo_transform' field [1]",
pkg_id));
for (const auto& el : tr_json.as_array()) {
check_json(
el.is_object(),
fmt::format("Database record for {} has an invalid 'repo_transform' field [2]",
pkg_id));
auto tr = fs_transformation::from_json(el);
std::visit(append_transform(tr), info.remote);
}
}
return info;
}

auto pair_to_pkg_id = [](auto&& pair) {
@@ -291,88 +376,11 @@ std::vector<dependency> catalog::dependencies_of(const package_id& pkg) const no
| ranges::to_vector;
}

namespace {

void check_json(bool b, std::string_view what) {
if (!b) {
throw_user_error<errc::invalid_catalog_json>("Catalog JSON is invalid: {}", what);
}
}

} // namespace

void catalog::import_json_str(std::string_view content) {
using nlohmann::json;

auto root = json::parse(content);
check_json(root.is_object(), "Root of JSON must be an object (key-value mapping)");

auto version = root["version"];
check_json(version.is_number_integer(), "/version must be an integral value");
check_json(version <= 1, "/version is too new. We don't know how to parse this.");

auto packages = root["packages"];
check_json(packages.is_object(), "/packages must be an object");
auto pkgs = parse_packages_json(content);

sqlite3::transaction_guard tr{_db};

for (const auto& [pkg_name_, versions_map] : packages.items()) {
std::string pkg_name = pkg_name_;
check_json(versions_map.is_object(),
fmt::format("/packages/{} must be an object", pkg_name));

for (const auto& [version_, pkg_info] : versions_map.items()) {
auto version = semver::version::parse(version_);
check_json(pkg_info.is_object(),
fmt::format("/packages/{}/{} must be an object", pkg_name, version_));

package_info info{{pkg_name, version}, {}, {}, {}};
auto deps = pkg_info["depends"];

if (!deps.is_null()) {
check_json(deps.is_object(),
fmt::format("/packages/{}/{}/depends must be an object",
pkg_name,
version_));

for (const auto& [dep_name, dep_version] : deps.items()) {
check_json(dep_version.is_string(),
fmt::format("/packages/{}/{}/depends/{} must be a string",
pkg_name,
version_,
dep_name));
auto range = semver::range::parse(std::string(dep_version));
info.deps.push_back({
std::string(dep_name),
{range.low(), range.high()},
});
}
}

auto git_remote = pkg_info["git"];
if (!git_remote.is_null()) {
check_json(git_remote.is_object(), "`git` must be an object");
std::string url = git_remote["url"];
std::string ref = git_remote["ref"];
auto lm_usage = git_remote["auto-lib"];
std::optional<lm::usage> autolib;
if (!lm_usage.is_null()) {
autolib = lm::split_usage_string(std::string(lm_usage));
}
info.remote = git_remote_listing{url, ref, autolib};
} else {
throw_user_error<errc::no_catalog_remote_info>("No remote info for /packages/{}/{}",
pkg_name,
version_);
}

auto desc_ = pkg_info["description"];
if (!desc_.is_null()) {
check_json(desc_.is_string(), "`description` must be a string");
info.description = desc_;
}

store(info);
}
for (const auto& pkg : pkgs) {
store(pkg);
}
}

+ 4
- 9
src/dds/catalog/catalog.hpp View File

@@ -1,9 +1,11 @@
#pragma once

#include <dds/catalog/git.hpp>
#include <dds/deps.hpp>
#include <dds/package/id.hpp>
#include <dds/util/fs.hpp>
#include <dds/util/glob.hpp>

#include "./package_info.hpp"

#include <neo/sqlite3/database.hpp>
#include <neo/sqlite3/statement.hpp>
@@ -16,14 +18,6 @@

namespace dds {

struct package_info {
package_id ident;
std::vector<dependency> deps;
std::string description;

std::variant<git_remote_listing> remote;
};

class catalog {
neo::sqlite3::database _db;
mutable neo::sqlite3::statement_cache _stmt_cache{_db};
@@ -32,6 +26,7 @@ class catalog {
catalog(const catalog&) = delete;

void _store_pkg(const package_info&, const git_remote_listing&);
void _store_pkg(const package_info&, std::monostate);

public:
catalog(catalog&&) = default;

+ 3
- 3
src/dds/catalog/catalog.test.cpp View File

@@ -19,7 +19,7 @@ TEST_CASE_METHOD(catalog_test_case, "Store a simple package") {
dds::package_id("foo", semver::version::parse("1.2.3")),
{},
"example",
dds::git_remote_listing{"http://example.com", "master", std::nullopt},
dds::git_remote_listing{"http://example.com", "master", std::nullopt, {}},
});

auto pkgs = db.by_name("foo");
@@ -38,7 +38,7 @@ TEST_CASE_METHOD(catalog_test_case, "Store a simple package") {
dds::package_id("foo", semver::version::parse("1.2.3")),
{},
"example",
dds::git_remote_listing{"http://example.com", "develop", std::nullopt},
dds::git_remote_listing{"http://example.com", "develop", std::nullopt, {}},
}));
// The previous pkg_id is still a valid lookup key
info = db.get(pkgs[0]);
@@ -54,7 +54,7 @@ TEST_CASE_METHOD(catalog_test_case, "Package requirements") {
{"baz", {semver::version::parse("5.3.0"), semver::version::parse("6.0.0")}},
},
"example",
dds::git_remote_listing{"http://example.com", "master", std::nullopt},
dds::git_remote_listing{"http://example.com", "master", std::nullopt, {}},
});
auto pkgs = db.by_name("foo");
REQUIRE(pkgs.size() == 1);

+ 21
- 16
src/dds/catalog/get.cpp View File

@@ -2,34 +2,39 @@

#include <dds/catalog/catalog.hpp>
#include <dds/error/errors.hpp>
#include <dds/proc.hpp>

#include <neo/assert.hpp>
#include <nlohmann/json.hpp>
#include <range/v3/algorithm/all_of.hpp>
#include <range/v3/algorithm/any_of.hpp>
#include <range/v3/distance.hpp>
#include <range/v3/numeric/accumulate.hpp>
#include <spdlog/spdlog.h>

using namespace dds;

namespace {

temporary_sdist do_pull_sdist(const package_info& listing, std::monostate) {
neo_assert_always(
invariant,
false,
"A package listing in the catalog has no defined remote from which to pull. This "
"shouldn't happen in normal usage. This will occur if the database has been "
"manually altered, or if DDS has a bug.",
listing.ident.to_string());
}

temporary_sdist do_pull_sdist(const package_info& listing, const git_remote_listing& git) {
auto tmpdir = dds::temporary_dir::create();
using namespace std::literals;
spdlog::info("Cloning Git repository: {} [{}] ...", git.url, git.ref);
auto command = {"git"s,
"clone"s,
"--depth=1"s,
"--branch"s,
git.ref,
git.url,
tmpdir.path().generic_string()};
auto git_res = run_proc(command);
if (!git_res.okay()) {
throw_external_error<errc::git_clone_failure>(
"Git clone operation failed [Git command: {}] [Exitted {}]:\n{}",
quote_command(command),
git_res.retc,
git_res.output);
git.clone(tmpdir.path());

for (const auto& tr : git.transforms) {
tr.apply_to(tmpdir.path());
}

spdlog::info("Create sdist from clone ...");
if (git.auto_lib.has_value()) {
spdlog::info("Generating library data automatically");

+ 214
- 0
src/dds/catalog/import.cpp View File

@@ -0,0 +1,214 @@
#include "./import.hpp"

#include <dds/error/errors.hpp>

#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <semester/walk.hpp>
#include <spdlog/fmt/fmt.h>

#include <optional>

using namespace dds;

template <typename KeyFunc, typename... Args>
struct any_key {
KeyFunc _key_fn;
semester::walk_seq<Args...> _seq;

any_key(KeyFunc&& kf, Args&&... args)
: _key_fn(kf)
, _seq(NEO_FWD(args)...) {}

template <typename Data>
semester::walk_result operator()(std::string_view key, Data&& dat) {
auto res = _key_fn(key);
if (res.rejected()) {
return res;
}
return _seq.invoke(NEO_FWD(dat));
}
};

template <typename KF, typename... Args>
any_key(KF&&, Args&&...) -> any_key<KF, Args...>;

namespace {

using require_obj = semester::require_type<json5::data::mapping_type>;
using require_array = semester::require_type<json5::data::array_type>;
using require_str = semester::require_type<std::string>;

template <typename... Args>
[[noreturn]] void import_error(Args&&... args) {
throw_user_error<dds::errc::invalid_catalog_json>(NEO_FWD(args)...);
}

git_remote_listing parse_git_remote(const json5::data& data) {
git_remote_listing git;

using namespace semester::walk_ops;

walk(data,
require_obj{"Git remote should be an object"},
mapping{required_key{"url",
"A git 'url' string is required",
require_str("Git URL should be a string"),
put_into(git.url)},
required_key{"ref",
"A git 'ref' is required, and must be a tag or branch name",
require_str("Git ref should be a string"),
put_into(git.ref)},
if_key{"auto-lib",
require_str("'auto-lib' should be a string"),
put_into(git.auto_lib,
[](std::string const& str) {
try {
return lm::split_usage_string(str);
} catch (const std::runtime_error& e) {
import_error("{}: {}", walk.path(), e.what());
}
})},
if_key{"transform",
require_array{"Expect an array of transforms"},
for_each{put_into(std::back_inserter(git.transforms), [](auto&& dat) {
try {
return fs_transformation::from_json(dat);
} catch (const semester::walk_error& e) {
import_error(e.what());
}
})}}});

return git;
}

package_info
parse_pkg_json_v1(std::string_view name, semver::version version, const json5::data& data) {
package_info ret;
ret.ident = package_id{std::string{name}, version};

using namespace semester::walk_ops;

std::string dep_name;
auto dep_range = semver::range::everything();
auto parse_dep_range = [&](const std::string& s) {
try {
return semver::range::parse_restricted(s);
} catch (const semver::invalid_range& e) {
import_error(std::string(walk.path()) + e.what());
}
};
auto make_dep = [&](auto&&) {
return dependency{dep_name, {dep_range.low(), dep_range.high()}};
};

auto check_one_remote = [&](auto&&) {
if (!semester::holds_alternative<std::monostate>(ret.remote)) {
return walk.reject("Cannot specify multiple remotes for a package");
}
return walk.pass;
};

auto add_dep = any_key{put_into(dep_name),
require_str{"Dependency should specify a version range string"},
put_into_pass{dep_range, parse_dep_range},
put_into{std::back_inserter(ret.deps), make_dep}};

walk(data,
mapping{if_key{"description",
require_str{"'description' should be a string"},
put_into{ret.description}},
if_key{"depends",
require_obj{"'depends' must be a JSON object"},
mapping{add_dep}},
if_key{
"git",
check_one_remote,
put_into(ret.remote, parse_git_remote),
}});

if (semester::holds_alternative<std::monostate>(ret.remote)) {
import_error("{}: Package listing for {} does not have any remote information",
walk.path(),
ret.ident.to_string());
}

return ret;
}

std::vector<package_info> parse_json_v1(const json5::data& data) {
std::vector<package_info> acc_pkgs;

std::string pkg_name;
semver::version pkg_version;
package_info dummy;

using namespace semester::walk_ops;

auto convert_pkg_obj
= [&](auto&& dat) { return parse_pkg_json_v1(pkg_name, pkg_version, dat); };

auto convert_version_str = [&](std::string_view str) {
try {
return semver::version::parse(str);
} catch (const semver::invalid_version& e) {
throw_user_error<errc::invalid_catalog_json>("{}: version string '{}' is invalid: {}",
walk.path(),
pkg_name,
str,
e.what());
}
};

auto import_pkg_versions
= walk_seq{require_obj{"Package entries must be JSON objects"},
mapping{any_key{put_into(pkg_version, convert_version_str),
require_obj{"Package+version entries must be JSON"},
put_into{std::back_inserter(acc_pkgs), convert_pkg_obj}}}};

auto import_pkgs = walk_seq{require_obj{"'packages' should be a JSON object"},
mapping{any_key{put_into(pkg_name), import_pkg_versions}}};

walk(data,
mapping{
if_key{"version", just_accept},
required_key{"packages", "'packages' should be an object of packages", import_pkgs},
});

return acc_pkgs;
}

} // namespace

std::vector<package_info> dds::parse_packages_json(std::string_view content) {
json5::data data;
try {
data = json5::parse_data(content);
} catch (const json5::parse_error& e) {
throw_user_error<errc::invalid_catalog_json>("JSON5 syntax error: {}", e.what());
}

if (!data.is_object()) {
throw_user_error<errc::invalid_catalog_json>("Root of import JSON must be a JSON object");
}

auto& data_obj = data.as_object();
auto version_it = data_obj.find("version");
if (version_it == data_obj.end() || !version_it->second.is_number()) {
throw_user_error<errc::invalid_catalog_json>(
"Root JSON import requires a 'version' property");
}

double version = version_it->second.as_number();

try {
if (version == 1.0) {
return parse_json_v1(data);
} else {
throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'",
version);
}
} catch (const semester::walk_error& e) {
throw_user_error<errc::invalid_catalog_json>(e.what());
}
}

+ 9
- 0
src/dds/catalog/import.hpp View File

@@ -0,0 +1,9 @@
#pragma once

#include "./package_info.hpp"

namespace dds {

std::vector<package_info> parse_packages_json(std::string_view);

} // namespace dds

+ 154
- 0
src/dds/catalog/import.test.cpp View File

@@ -0,0 +1,154 @@
#include "./import.hpp"

#include <dds/error/errors.hpp>

#include <catch2/catch.hpp>

TEST_CASE("An empty import is okay") {
// An empty JSON with no packages in it
auto pkgs = dds::parse_packages_json("{version: 1, packages: {}}");
CHECK(pkgs.empty());
}

TEST_CASE("Valid/invalid package JSON5") {
std::string_view bads[] = {
// Invalid JSON:
"",
// Should be an object
"[]",
// Missing keys
"{}",
// Missing "packages"
"{version: 1}",
// Bad version
"{version: 1.7, packages: {}}",
"{version: [], packages: {}}",
"{version: null, packages: {}}",
// 'packages' should be an object
"{version: 1, packages: []}",
"{version: 1, packages: null}",
"{version: 1, packages: 4}",
"{version: 1, packages: 'lol'}",
// Objects in 'packages' should be objects
"{version:1, packages:{foo:null}}",
"{version:1, packages:{foo:[]}}",
"{version:1, packages:{foo:9}}",
"{version:1, packages:{foo:'lol'}}",
// Objects in 'packages' shuold have version strings
"{version:1, packages:{foo:{'lol':{}}}}",
"{version:1, packages:{foo:{'1.2':{}}}}",
// No remote
"{version:1, packages:{foo:{'1.2.3':{}}}}",
// Bad empty git
"{version:1, packages:{foo:{'1.2.3':{git:{}}}}}",
// Git `url` and `ref` should be a string
"{version:1, packages:{foo:{'1.2.3':{git:{url:2, ref:''}}}}}",
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:2}}}}}",
// 'auto-lib' should be a usage string
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':3}}}}}",
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'ffasdf'}}}}}",
// 'transform' should be an array
R"(
{
version: 1,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: 'lol hi',
}
}}}
}
)",
};

for (auto bad : bads) {
INFO("Bad: " << bad);
CHECK_THROWS_AS(dds::parse_packages_json(bad),
dds::user_error<dds::errc::invalid_catalog_json>);
}

std::string_view goods[] = {
// Basic empty:
"{version:1, packages:{}}",
// No versions for 'foo' is weird, but okay
"{version:1, packages:{foo:{}}}",
// Basic package with minimum info:
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:''}}}}}",
// Minimal auto-lib:
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'a/b'}}}}}",
// Empty transforms:
R"(
{
version: 1,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: [],
}
}}}
}
)",
// Basic transform:
R"(
{
version: 1,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: [{
copy: {
from: 'here',
to: 'there',
include: [
"*.c",
"*.cpp",
"*.h",
'*.txt'
]
}
}],
}
}}}
}
)",
};
for (auto good : goods) {
INFO("Parse: " << good);
CHECK_NOTHROW(dds::parse_packages_json(good));
}
}

TEST_CASE("Check a single object") {
// An empty JSON with no packages in it
auto pkgs = dds::parse_packages_json(R"({
version: 1,
packages: {
foo: {
'1.2.3': {
git: {
url: 'foo',
ref: 'fasdf',
'auto-lib': 'a/b',
}
}
}
}
})");
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].ident.name == "foo");
CHECK(pkgs[0].ident.to_string() == "foo@1.2.3");
CHECK(std::holds_alternative<dds::git_remote_listing>(pkgs[0].remote));

auto git = std::get<dds::git_remote_listing>(pkgs[0].remote);
CHECK(git.url == "foo");
CHECK(git.ref == "fasdf");
REQUIRE(git.auto_lib);
CHECK(git.auto_lib->namespace_ == "a");
CHECK(git.auto_lib->name == "b");
}

+ 25
- 0
src/dds/catalog/package_info.hpp View File

@@ -0,0 +1,25 @@
#pragma once

#include "./remote/git.hpp"

#include <dds/deps.hpp>
#include <dds/package/id.hpp>
#include <dds/util/fs_transform.hpp>
#include <dds/util/glob.hpp>

#include <optional>
#include <string>
#include <variant>
#include <vector>

namespace dds {

struct package_info {
package_id ident;
std::vector<dependency> deps;
std::string description;

std::variant<std::monostate, git_remote_listing> remote;
};

} // namespace dds

+ 18
- 0
src/dds/catalog/remote/git.cpp View File

@@ -0,0 +1,18 @@
#include "./git.hpp"

#include <dds/error/errors.hpp>
#include <dds/proc.hpp>

void dds::git_remote_listing::clone(dds::path_ref dest) const {
fs::remove_all(dest);
using namespace std::literals;
auto command = {"git"s, "clone"s, "--depth=1"s, "--branch"s, ref, url, dest.generic_string()};
auto git_res = run_proc(command);
if (!git_res.okay()) {
throw_external_error<errc::git_clone_failure>(
"Git clone operation failed [Git command: {}] [Exitted {}]:\n{}",
quote_command(command),
git_res.retc,
git_res.output);
}
}

+ 24
- 0
src/dds/catalog/remote/git.hpp View File

@@ -0,0 +1,24 @@
#pragma once

#include <dds/catalog/get.hpp>
#include <dds/util/fs.hpp>
#include <dds/util/fs_transform.hpp>

#include <libman/package.hpp>

#include <optional>
#include <string>

namespace dds {

struct git_remote_listing {
std::string url;
std::string ref;
std::optional<lm::usage> auto_lib;

std::vector<fs_transformation> transforms;

void clone(path_ref path) const;
};

} // namespace dds

+ 26
- 16
src/dds/error/errors.cpp View File

@@ -37,6 +37,8 @@ std::string error_url_suffix(dds::errc ec) noexcept {
return "no-catalog-remote-info.html";
case errc::git_clone_failure:
return "git-clone-failure.html";
case errc::invalid_repo_transform:
return "invalid-repo-transform.html";
case errc::sdist_ident_mismatch:
return "sdist-ident-mismatch.html";
case errc::corrupted_build_db:
@@ -67,6 +69,8 @@ std::string error_url_suffix(dds::errc ec) noexcept {
return "dup-lib-name.html";
case errc::unknown_usage_name:
return "unknown-usage.html";
case errc::template_error:
return "template-error.html";
case errc::none:
break;
}
@@ -169,6 +173,11 @@ Refer to the documentation for details.
dds tried to clone a repository using Git, but the clone operation failed.
There are a variety of possible causes. It is best to check the output from
Git in diagnosing this failure.
)";
case errc::invalid_repo_transform:
return R"(
A 'transform' property in a catalog entry contains an invalid transformation.
These cannot and should not be saved to the catalog.
)";
case errc::sdist_ident_mismatch:
return R"(
@@ -239,6 +248,8 @@ Check your spelling, and check that the package containing the library is
available, either from the `package.json5` or from the `INDEX.lmi` that was used
for the build.
)";
case errc::template_error:
return R"(dds encountered a problem while rendering a file template and cannot continue.)";
case errc::none:
break;
}
@@ -246,6 +257,8 @@ for the build.
std::terminate();
}

#define BUG_STRING_SUFFIX " <- (Seeing this text is a `dds` bug. Please report it.)"

std::string_view dds::default_error_string(dds::errc ec) noexcept {
switch (ec) {
case errc::invalid_builtin_toolchain:
@@ -275,9 +288,11 @@ std::string_view dds::default_error_string(dds::errc ec) noexcept {
"packages";
case errc::git_clone_failure:
return "A git-clone operation failed.";
case errc::invalid_repo_transform:
return "A repository filesystem transformation is invalid";
case errc::sdist_ident_mismatch:
return "The package version of a generated source distribution did not match the version\n"
"that was expected of it";
return "The package version of a generated source distribution did not match the "
"version\n that was expected of it";
case errc::corrupted_build_db:
return "The build database file is corrupted";
case errc::invalid_lib_manifest:
@@ -285,27 +300,20 @@ std::string_view dds::default_error_string(dds::errc ec) noexcept {
case errc::invalid_pkg_manifest:
return "The package manifest is invalid";
case errc::invalid_version_range_string:
return "Attempted to parse an invalid version range string. <- (Seeing this text is a "
"`dds` bug. Please report it.)";
return "Attempted to parse an invalid version range string." BUG_STRING_SUFFIX;
case errc::invalid_version_string:
return "Attempted to parse an invalid version string. <- (Seeing this text is a `dds` "
"bug. Please report it.)";
return "Attempted to parse an invalid version string." BUG_STRING_SUFFIX;
case errc::invalid_config_key:
return "Found an invalid configuration key. <- (Seeing this text is a `dds` bug. "
"Please report it.)";
return "Found an invalid configuration key." BUG_STRING_SUFFIX;
case errc::invalid_lib_filesystem:
case errc::invalid_pkg_filesystem:
return "The filesystem structure of the package/library is invalid. <- (Seeing this "
"text is a `dds` bug. Please report it.)";
return "The filesystem structure of the package/library is invalid." BUG_STRING_SUFFIX;
case errc::invalid_pkg_id:
return "A package identifier is invalid <- (Seeing this text is a `dds` bug. Please "
"report it.)";
return "A package identifier is invalid." BUG_STRING_SUFFIX;
case errc::invalid_pkg_name:
return "A package name is invalid <- (Seeing this text is a `dds` bug. Please report "
"it.)";
return "A package name is invalid." BUG_STRING_SUFFIX;
case errc::sdist_exists:
return "The source ditsribution already exists at the destination <- (Seeing this "
"text is a `dds` bug. Please report it.)";
return "The source ditsribution already exists at the destination " BUG_STRING_SUFFIX;
case errc::unknown_test_driver:
return "The specified test_driver is not known to `dds`";
case errc::dependency_resolve_failure:
@@ -314,6 +322,8 @@ std::string_view dds::default_error_string(dds::errc ec) noexcept {
return "More than one library has claimed the same name.";
case errc::unknown_usage_name:
return "A `uses` or `links` field names a library that isn't recognized.";
case errc::template_error:
return "There was an error while rendering a template file." BUG_STRING_SUFFIX;
case errc::none:
break;
}

+ 1
- 0
src/dds/error/errors.hpp View File

@@ -24,6 +24,7 @@ enum class errc {
no_catalog_remote_info,

git_clone_failure,
invalid_repo_transform,
sdist_ident_mismatch,
sdist_exists,


+ 2
- 0
src/dds/source/file.cpp View File

@@ -19,6 +19,7 @@ std::optional<source_kind> dds::infer_source_kind(path_ref p) noexcept {
".hpp",
".hxx",
".inl",
".ipp",
};
assert(std::is_sorted(header_exts.begin(), header_exts.end()));
static std::vector<std::string_view> source_exts = {
@@ -29,6 +30,7 @@ std::optional<source_kind> dds::infer_source_kind(path_ref p) noexcept {
".cpp",
".cxx",
};
assert(std::is_sorted(header_exts.begin(), header_exts.end()));
assert(std::is_sorted(source_exts.begin(), source_exts.end()));
auto leaf = p.filename();


+ 16
- 11
src/dds/toolchain/from_json.test.cpp View File

@@ -13,30 +13,33 @@ void check_tc_compile(std::string_view tc_content,
auto tc = dds::parse_toolchain_json5(tc_content);

dds::compile_file_spec cf;
cf.source_path = "foo.cpp";
cf.out_path = "foo.o";
auto cf_cmd = tc.create_compile_command(cf, dds::toolchain_knobs{});
cf.source_path = "foo.cpp";
cf.out_path = "foo.o";
auto cf_cmd = tc.create_compile_command(cf, dds::fs::current_path(), dds::toolchain_knobs{});
auto cf_cmd_str = dds::quote_command(cf_cmd.command);
CHECK(cf_cmd_str == expected_compile);

cf.enable_warnings = true;
cf_cmd = tc.create_compile_command(cf, dds::toolchain_knobs{});
cf_cmd_str = dds::quote_command(cf_cmd.command);
cf_cmd = tc.create_compile_command(cf, dds::fs::current_path(), dds::toolchain_knobs{});
cf_cmd_str = dds::quote_command(cf_cmd.command);
CHECK(cf_cmd_str == expected_compile_warnings);

dds::archive_spec ar_spec;
ar_spec.input_files.push_back("foo.o");
ar_spec.input_files.push_back("bar.o");
ar_spec.out_path = "stuff.a";
auto ar_cmd = tc.create_archive_command(ar_spec, dds::toolchain_knobs{});
auto ar_cmd_str = dds::quote_command(ar_cmd);
auto ar_cmd
= tc.create_archive_command(ar_spec, dds::fs::current_path(), dds::toolchain_knobs{});
auto ar_cmd_str = dds::quote_command(ar_cmd);
CHECK(ar_cmd_str == expected_ar);

dds::link_exe_spec exe_spec;
exe_spec.inputs.push_back("foo.o");
exe_spec.inputs.push_back("bar.a");
exe_spec.output = "meow.exe";
auto exe_cmd = tc.create_link_executable_command(exe_spec, dds::toolchain_knobs{});
auto exe_cmd = tc.create_link_executable_command(exe_spec,
dds::fs::current_path(),
dds::toolchain_knobs{});
auto exe_cmd_str = dds::quote_command(exe_cmd);
CHECK(exe_cmd_str == expected_exe);
}
@@ -93,7 +96,7 @@ TEST_CASE("Manipulate a toolchain and file compilation") {
dds::compile_file_spec cfs;
cfs.source_path = "foo.cpp";
cfs.out_path = "foo.o";
auto cmd = tc.create_compile_command(cfs, dds::toolchain_knobs{});
auto cmd = tc.create_compile_command(cfs, dds::fs::current_path(), dds::toolchain_knobs{});
CHECK(cmd.command
== std::vector<std::string>{"g++",
"-fPIC",
@@ -108,7 +111,9 @@ TEST_CASE("Manipulate a toolchain and file compilation") {
"-ofoo.o"});

cfs.definitions.push_back("FOO=BAR");
cmd = tc.create_compile_command(cfs, dds::toolchain_knobs{.is_tty = true});
cmd = tc.create_compile_command(cfs,
dds::fs::current_path(),
dds::toolchain_knobs{.is_tty = true});
CHECK(cmd.command
== std::vector<std::string>{"g++",
"-fPIC",
@@ -126,7 +131,7 @@ TEST_CASE("Manipulate a toolchain and file compilation") {
"-ofoo.o"});

cfs.include_dirs.push_back("fake-dir");
cmd = tc.create_compile_command(cfs, dds::toolchain_knobs{});
cmd = tc.create_compile_command(cfs, dds::fs::current_path(), dds::toolchain_knobs{});
CHECK(cmd.command
== std::vector<std::string>{"g++",
"-fPIC",

+ 28
- 10
src/dds/toolchain/toolchain.cpp View File

@@ -6,6 +6,8 @@
#include <dds/util/paths.hpp>
#include <dds/util/string.hpp>

#include <range/v3/view/transform.hpp>

#include <cassert>
#include <optional>
#include <string>
@@ -52,7 +54,25 @@ vector<string> toolchain::definition_args(std::string_view s) const noexcept {
return replace(_def_template, "[def]", s);
}

static fs::path shortest_path_from(path_ref file, path_ref base) {
auto relative = file.lexically_normal().lexically_proximate(base);
auto abs = file.lexically_normal();
if (relative.string().size() > abs.string().size()) {
return abs;
} else {
return relative;
}
}

template <typename R>
static auto shortest_path_args(path_ref base, R&& r) {
return ranges::views::all(r) //
| ranges::views::transform(
[base](auto&& path) { return shortest_path_from(path, base).string(); }); //
}

compile_command_info toolchain::create_compile_command(const compile_file_spec& spec,
path_ref,
toolchain_knobs knobs) const noexcept {
using namespace std::literals;

@@ -119,32 +139,30 @@ compile_command_info toolchain::create_compile_command(const compile_file_spec&
}

vector<string> toolchain::create_archive_command(const archive_spec& spec,
path_ref cwd,
toolchain_knobs) const noexcept {
vector<string> cmd;

auto out_arg = shortest_path_from(spec.out_path, cwd).string();
for (auto& arg : _link_archive) {
if (arg == "[in]") {
std::transform(spec.input_files.begin(),
spec.input_files.end(),
std::back_inserter(cmd),
[](auto&& p) { return p.string(); });
extend(cmd, shortest_path_args(cwd, spec.input_files));
} else {
cmd.push_back(replace(arg, "[out]", spec.out_path.string()));
cmd.push_back(replace(arg, "[out]", out_arg));
}
}
return cmd;
}

vector<string> toolchain::create_link_executable_command(const link_exe_spec& spec,
path_ref cwd,
toolchain_knobs) const noexcept {
vector<string> cmd;
for (auto& arg : _link_exe) {
if (arg == "[in]") {
std::transform(spec.inputs.begin(),
spec.inputs.end(),
std::back_inserter(cmd),
[](auto&& p) { return p.string(); });
extend(cmd, shortest_path_args(cwd, spec.inputs));
} else {
cmd.push_back(replace(arg, "[out]", spec.output.string()));
cmd.push_back(replace(arg, "[out]", shortest_path_from(spec.output, cwd).string()));
}
}
return cmd;

+ 8
- 4
src/dds/toolchain/toolchain.hpp View File

@@ -82,11 +82,15 @@ public:
std::vector<std::string> definition_args(std::string_view s) const noexcept;
std::vector<std::string> include_args(const fs::path& p) const noexcept;
std::vector<std::string> external_include_args(const fs::path& p) const noexcept;
compile_command_info create_compile_command(const compile_file_spec&,
toolchain_knobs) const noexcept;
std::vector<std::string> create_archive_command(const archive_spec&,
toolchain_knobs) const noexcept;

compile_command_info
create_compile_command(const compile_file_spec&, path_ref cwd, toolchain_knobs) const noexcept;

std::vector<std::string>
create_archive_command(const archive_spec&, path_ref cwd, toolchain_knobs) const noexcept;

std::vector<std::string> create_link_executable_command(const link_exe_spec&,
path_ref cwd,
toolchain_knobs) const noexcept;

static std::optional<toolchain> get_builtin(std::string_view key) noexcept;

+ 189
- 0
src/dds/util/fnmatch.cpp View File

@@ -0,0 +1,189 @@
#include "./fnmatch.hpp"

#include <cassert>
#include <stdexcept>
#include <string_view>
#include <vector>

using charptr = const char*;

namespace dds::detail::fnmatch {

namespace {

class base_pattern_elem {
public:
virtual bool match(charptr first, charptr last) const noexcept = 0;
virtual ~base_pattern_elem() = default;

std::unique_ptr<base_pattern_elem> next;
};

class rt_star : public base_pattern_elem {
bool match(charptr first, charptr last) const noexcept {
while (first != last) {
auto did_match = next->match(first, last);
if (did_match) {
return true;
}
++first;
}
// We're at the end. Try once more
return next->match(first, last);
}
};

class rt_any_char : public base_pattern_elem {
bool match(charptr first, charptr last) const noexcept {
if (first == last) {
return false;
}
return next->match(first + 1, last);
}
};

class rt_oneof : public base_pattern_elem {
std::string _chars;
bool match(charptr first, charptr last) const noexcept {
if (first == last) {
return false;
}
auto idx = _chars.find(*first);
if (idx == _chars.npos) {
return false;
}
return next->match(first + 1, last);
}

public:
explicit rt_oneof(std::string chars)
: _chars(chars) {}
};

class rt_lit : public base_pattern_elem {
std::string _lit;
bool match(charptr first, charptr last) const noexcept {
auto remaining = static_cast<std::size_t>(std::distance(first, last));
if (remaining < _lit.size()) {
return false;
}
auto eq = std::equal(first, first + _lit.size(), _lit.begin());
if (!eq) {
return false;
}
return next->match(first + _lit.size(), last);
}

public:
explicit rt_lit(std::string lit)
: _lit(lit) {}
};

class rt_end : public base_pattern_elem {
bool match(charptr first, charptr last) const noexcept { return first == last; }
};

} // namespace

class pattern_impl {
std::unique_ptr<base_pattern_elem> _head;
std::unique_ptr<base_pattern_elem>* _next_to_compile = &_head;

template <typename T, typename... Args>
void _add_elem(Args&&... args) {
*_next_to_compile = std::make_unique<T>(std::forward<Args>(args)...);
_next_to_compile = &(*_next_to_compile)->next;
}

charptr _compile_oneof(charptr cur, charptr last) {
std::string chars;
while (cur != last) {
auto c = *cur;
if (c == ']') {
// We've reached the end of the group
_add_elem<rt_oneof>(chars);
return cur + 1;
}
if (c == '\\') {
++cur;
if (cur == last) {
throw std::runtime_error("Untermated [group] in pattern");
}
chars.push_back(*cur);
} else {
chars.push_back(c);
}
++cur;
}
throw std::runtime_error("Unterminated [group] in pattern");
}

charptr _compile_lit(charptr cur, charptr last) {
std::string lit;
while (cur != last) {
auto c = *cur;
if (c == '*' || c == '[' || c == '?') {
break;
}
if (c == '\\') {
++cur;
if (cur == last) {
throw std::runtime_error("Invalid \\ at end of pattern");
}
// Push back whatever character follows
lit.push_back(*cur);
++cur;
continue;
} else {
lit.push_back(c);
}
++cur;
}
_add_elem<rt_lit>(lit);
return cur;
}

void _compile_next(charptr first, charptr last) {
if (first == last) {
return;
}
auto c = *first;
if (c == '*') {
_add_elem<rt_star>();
_compile_next(first + 1, last);
} else if (c == '[') {
first = _compile_oneof(first + 1, last);
_compile_next(first, last);
} else if (c == '?') {
_add_elem<rt_any_char>();
_compile_next(first + 1, last);
} else {
// Literal string
first = _compile_lit(first, last);
_compile_next(first, last);
}
}

public:
pattern_impl(std::string_view str) {
_compile_next(str.data(), str.data() + str.size());
// Set the tail of the list to be an rt_end to detect end-of-string
_add_elem<rt_end>();
}

bool match(charptr first, charptr last) const noexcept {
assert(_head);
return _head->match(first, last);
}
};

} // namespace dds::detail::fnmatch

dds::fnmatch::pattern dds::fnmatch::compile(std::string_view str) {
return pattern{std::make_shared<detail::fnmatch::pattern_impl>(str)};
}

bool dds::fnmatch::pattern::_match(charptr first, charptr last) const noexcept {
assert(_impl);
return _impl->match(first, last);
}

+ 321
- 0
src/dds/util/fnmatch.hpp View File

@@ -0,0 +1,321 @@
#pragma once

#include <cstdlib>
#include <iterator>
#include <memory>
#include <optional>
#include <string>
#include <type_traits>

namespace dds {

namespace fnmatch {

template <typename... Elems>
struct ct_pattern;

class pattern;

} // namespace fnmatch

namespace detail::fnmatch {

template <typename... Sub>
struct seq {};

struct star {};

struct any_one {};

template <typename Char>
constexpr std::size_t length(const Char* str) {
std::size_t ret = 0;
while (*str != Char(0)) {
++str;
++ret;
}
return ret;
}

template <auto... Chars>
struct oneof {};

template <auto... Chars>
struct not_oneof {};

template <auto Char>
struct just {};

template <typename>
struct is_just : std::false_type {};
template <auto C>
struct is_just<just<C>> : std::true_type {};

template <typename Matcher, auto NewCur>
struct oneof_ret {
using type = Matcher;
constexpr static auto end_offset = NewCur;
};

template <auto... Chars, auto End>
constexpr auto negate(oneof_ret<oneof<Chars...>, End>) {
return oneof_ret<not_oneof<Chars...>, End>();
}

template <auto Cur, auto Len, auto... Chars, typename String>
constexpr auto compile_oneof_chars(String s) {
constexpr auto str = s();
constexpr auto cur_char = str[Cur];
static_assert(Cur != Len, "Unterminated '[' group in pattern");
static_assert(Cur + 1 != Len || cur_char != '\\', "Escape \\ at end of pattern");
if constexpr (cur_char == ']') {
return oneof_ret<oneof<Chars...>, Cur + 1>();
} else if constexpr (cur_char == '\\') {
constexpr auto next_char = str[Cur + 1];
return compile_oneof_chars<Cur + 2, Len, Chars..., next_char>(s);
} else {
return compile_oneof_chars<Cur + 1, Len, Chars..., cur_char>(s);
}
}

template <auto Cur, auto Len, typename String>
constexpr auto compile_oneof(String s) {
constexpr auto str = s();
constexpr bool negated = str[Cur] == '!';
constexpr auto oneof_start = Cur + (negated ? 1 : 0);
auto oneof = compile_oneof_chars<oneof_start, Len>(s);
if constexpr (negated) {
return negate(oneof);
} else {
return oneof;
}
}

template <auto Cur, auto Len, typename... Matchers, typename String>
constexpr auto compile_next(String s) {
constexpr auto str = s();
constexpr auto cur_char = str[Cur];
if constexpr (Cur == Len) {
return dds::fnmatch::ct_pattern<Matchers...>();
} else if constexpr (cur_char == '*') {
return compile_next<Cur + 1, Len, Matchers..., star>(s);
} else if constexpr (cur_char == '?') {
return compile_next<Cur + 1, Len, Matchers..., any_one>(s);
} else if constexpr (cur_char == '[') {
constexpr auto oneof_ret = compile_oneof<Cur + 1, Len>(s);
return compile_next<oneof_ret.end_offset,
Len,
Matchers...,
typename decltype(oneof_ret)::type>(s);
} else if constexpr (cur_char == '\\') {
// Escape sequence
static_assert(Cur + 1 != Len, "Escape \\ at end of pattern.");
constexpr auto next_char = str[Cur + 1];
return compile_next<Cur + 2, Len, Matchers..., just<next_char>>(s);
} else {
return compile_next<Cur + 1, Len, Matchers..., just<cur_char>>(s);
}
}

template <typename Iter1, typename Iter2>
constexpr bool equal(Iter1 a_first, Iter1 a_last, Iter2 b_first) {
while (a_first != a_last) {
if (*a_first != *b_first) {
return false;
}
++a_first;
++b_first;
}
return true;
}

} // namespace detail::fnmatch

namespace fnmatch {

template <typename... Elems>
struct ct_pattern {
private:
/// VVVVVVVVVVVVVVVVVVV Optimized Cases VVVVVVVVVVVVVVVVVVVVVVV

/**
* Common case of a star '*' followed by literals to the end of the pattern
*/
template <typename Iter, auto C, auto... Chars>
static constexpr bool match_1(Iter cur,
const Iter last,
detail::fnmatch::star,
detail::fnmatch::just<C> c1,
detail::fnmatch::just<Chars>... t) {
// We know the length of tail required, so we can just skip ahead without
// a loop
auto cur_len = std::distance(cur, last);
if (cur_len < sizeof...(Chars) + 1) {
// Not enough remaining to match
return false;
}
// Skip ahead and match the rest
auto to_skip = cur_len - (sizeof...(Chars) + 1);
return match_1(std::next(cur, to_skip), last, c1, t...);
}

/**
* Common case of a sequence of literals at the tail.
*/
template <typename Iter, auto... Chars>
static constexpr bool match_1(Iter cur, const Iter last, detail::fnmatch::just<Chars>...) {
constexpr auto LitLength = sizeof...(Chars);
auto remaining = std::distance(cur, last);
if (remaining != LitLength) {
return false;
}
// Put our characters into an array for a quick comparison
std::decay_t<decltype(*cur)> chars[LitLength] = {Chars...};
return detail::fnmatch::equal(chars, chars + LitLength, cur);
}

/// VVVVVVVVVVVVVVVVVVVV General cases VVVVVVVVVVVVVVVVVVVVVVVV
template <typename Iter, typename... Tail>
static constexpr bool match_1(Iter cur, const Iter last, detail::fnmatch::star, Tail... t) {
while (cur != last) {
auto did_match = match_1(cur, last, t...);
if (did_match) {
return true;
}
++cur;
}
// We've advanced to the end of the string, but we might still have a match...
return match_1(cur, last, t...);
}

template <typename Iter, auto... Chars, typename... Tail>
static constexpr bool
match_1(Iter cur, const Iter last, detail::fnmatch::not_oneof<Chars...>, Tail... t) {
if (cur == last) {
return false;
}
if (((*cur == Chars) || ...)) {
return false;
}
return match_1(std::next(cur), last, t...);
}

template <typename Iter, auto... Chars, typename... Tail>
static constexpr bool
match_1(Iter cur, const Iter last, detail::fnmatch::oneof<Chars...>, Tail... t) {
if (cur == last) {
return false;
}
if (((*cur == Chars) || ...)) {
return match_1(std::next(cur), last, t...);
} else {
// current char is not in pattern
return false;
}
}

template <typename Iter,
auto C,
typename... Tail,
// Only enable this overload if the tail is not entirely just<> items
// (we have an optimization for that case)
typename = std::enable_if_t<!(detail::fnmatch::is_just<Tail>() && ...)>>
static constexpr bool match_1(Iter cur, const Iter last, detail::fnmatch::just<C>, Tail... t) {
if (cur == last) {
// We've reached the end, but we have more things to match
return false;
}
if (*cur != C) {
// Wrong char
return false;
} else {
// Good char, keep going
return match_1(std::next(cur), last, t...);
}
}

template <typename Iter, typename... Tail>
static constexpr bool match_1(Iter cur, const Iter last, detail::fnmatch::any_one, Tail... t) {
if (cur == last) {
return false;
}
return match_1(std::next(cur), last, t...);
}

template <typename Iter>
static constexpr bool match_1(Iter cur, Iter last) {
return cur == last;
}

public:
static constexpr bool match(const char* fname) {
return match_1(fname, fname + detail::fnmatch::length(fname), Elems()...);
}
};

template <typename StringGenerator, typename = decltype(std::declval<StringGenerator&>()())>
constexpr auto compile(StringGenerator&& s) {
constexpr auto pattern = s();
constexpr auto len = detail::fnmatch::length(pattern);
return decltype(detail::fnmatch::compile_next<0, len>(s))();
}

pattern compile(std::string_view str);

} // namespace fnmatch

namespace detail::fnmatch {

class pattern_impl;

} // namespace detail::fnmatch

namespace fnmatch {

class pattern {
std::shared_ptr<const detail::fnmatch::pattern_impl> _impl;

bool _match(const char* begin, const char* end) const noexcept;

public:
constexpr static std::size_t noalloc_size = 256;

pattern(std::shared_ptr<const detail::fnmatch::pattern_impl> ptr)
: _impl(ptr) {}
~pattern() = default;
pattern(const pattern&) = default;
pattern(pattern&&) = default;
pattern& operator=(const pattern&) = default;
pattern& operator=(pattern&&) = default;

template <typename Iter>
bool match(Iter first, Iter last) const {
auto dist = static_cast<std::size_t>(std::distance(first, last));
if (dist < noalloc_size) {
char buffer[noalloc_size];
auto buf_end = std::copy(first, last, buffer);
return _match(buffer, buf_end);
} else {
// Allocates
std::string str(first, last);
return _match(str.data(), str.data() + str.size());
}
}

bool match(const char* str) const {
return match(str, str + dds::detail::fnmatch::length(str));
}

template <typename Seq>
bool match(const Seq& seq) const {
using std::begin;
using std::end;
return match(begin(seq), end(seq));
}

std::optional<std::string> literal_spelling() const noexcept;
};

} // namespace fnmatch

} // namespace dds

+ 32
- 0
src/dds/util/fnmatch.test.cpp View File

@@ -0,0 +1,32 @@
#include <dds/util/fnmatch.hpp>

#include <catch2/catch.hpp>

TEST_CASE("Basic fnmatch matching") {
auto pat = dds::fnmatch::compile("foo.bar");
CHECK_FALSE(pat.match("foo.baz"));
CHECK_FALSE(pat.match("foo."));
CHECK_FALSE(pat.match("foo.barz"));
CHECK_FALSE(pat.match("foo.bar "));
CHECK_FALSE(pat.match(" foo.bar"));
CHECK(pat.match("foo.bar"));

pat = dds::fnmatch::compile("foo.*");
CHECK(pat.match("foo."));
auto m = pat.match("foo.b");
CHECK(m);
CHECK(pat.match("foo. "));
CHECK_FALSE(pat.match("foo"));
CHECK_FALSE(pat.match(" foo.bar"));

pat = dds::fnmatch::compile("foo.*.cpp");
for (auto fname : {"foo.bar.cpp", "foo..cpp", "foo.cat.cpp"}) {
auto m = pat.match(fname);
CHECK(m);
}

for (auto fname : {"foo.cpp", "foo.cpp"}) {
auto m = pat.match(fname);
CHECK_FALSE(m);
}
}

+ 330
- 0
src/dds/util/fs_transform.cpp View File

@@ -0,0 +1,330 @@
#include "./fs_transform.hpp"

#include <dds/error/errors.hpp>
#include <dds/util/fs.hpp>

#include <range/v3/algorithm/any_of.hpp>
#include <range/v3/distance.hpp>
#include <range/v3/numeric/accumulate.hpp>
#include <semester/walk.hpp>

#include <nlohmann/json.hpp>

#include <iostream>

using namespace dds;

using require_obj = semester::require_type<json5::data::mapping_type>;
using require_array = semester::require_type<json5::data::array_type>;
using require_str = semester::require_type<std::string>;

dds::fs_transformation dds::fs_transformation::from_json(const json5::data& data) {
fs_transformation ret;
using namespace semester::walk_ops;

auto prep_optional = [](auto& opt) {
return [&](auto&&) {
opt.emplace();
return walk.pass;
};
};

auto str_to_path = [](std::string const& s) {
auto p = fs::path(s);
if (p.is_absolute()) {
throw semester::walk_error(std::string(walk.path())
+ ": Only relative paths are accepted");
}
return p;
};

auto get_strip_components = [](double d) {
if (d != double(int(d)) || d < 0) {
throw semester::walk_error(std::string(walk.path()) + ": "
+ "'strip-components' should be a positive whole number");
}
return int(d);
};

auto populate_globs = [&](std::vector<dds::glob>& globs) {
return for_each{
require_str{"Include/exclude list should be a list of globs"},
put_into(std::back_inserter(globs),
[](const std::string& glob) {
try {
return dds::glob::compile(glob);
} catch (const std::runtime_error& e) {
throw semester::walk_error{std::string(walk.path()) + ": " + e.what()};
}
}),
};
};

auto populate_reloc = [&](auto& op) {
return [&](auto&& dat) {
op.emplace();
return mapping{
required_key{"from",
"a 'from' path is required",
require_str{"'from' should be a path string"},
put_into(op->from, str_to_path)},
required_key{"to",
"a 'to' path is required",
require_str{"'to' should be a path string"},
put_into(op->to, str_to_path)},
if_key{"strip-components",
require_type<double>{"'strip-components' should be an integer"},
put_into(op->strip_components, get_strip_components)},
if_key{"include",
require_array{"'include' should be an array"},
populate_globs(op->include)},
if_key{"exclude",
require_array{"'exclude' should be an array"},
populate_globs(op->exclude)},
}(dat);
};
};

walk(data,
require_obj{"Each transform must be a JSON object"},
mapping{
if_key{"copy", populate_reloc(ret.copy)},
if_key{"move", populate_reloc(ret.move)},
if_key{"remove",
require_obj{"'remove' should be a JSON object"},
prep_optional(ret.remove),
mapping{
required_key{"path",
"'path' is required",
require_str{"'path' should be a string path to remove"},
put_into(ret.remove->path, str_to_path)},
if_key{"only-matching",
require_array{"'only-matching' should be an array of globs"},
populate_globs(ret.remove->only_matching)},
}},
if_key{"write",
require_obj{"'write' should be a JSON object"},
prep_optional(ret.write),
mapping{
required_key{"path",
"'path' is required",
require_str{"'path' should be a string path to write to"},
put_into(ret.write->path, str_to_path)},
required_key{"content",
"'content' is required",
require_str{"'content' must be a string"},
put_into(ret.write->content)},
}},
});

return ret;
}

namespace {

bool matches_any(path_ref path, const std::vector<glob>& globs) {
return std::any_of(globs.begin(), globs.end(), [&](auto&& gl) { return gl.match(path); });
}

bool parent_dir_of(fs::path root, fs::path child) {
auto root_str = (root += "/").lexically_normal().generic_string();
auto child_str = (child += "/").lexically_normal().generic_string();
return child_str.find(root_str) == 0;
}

void do_relocate(const dds::fs_transformation::copy_move_base& oper,
dds::path_ref root,
bool is_copy) {
auto from = fs::weakly_canonical(root / oper.from);
auto to = fs::weakly_canonical(root / oper.to);
if (!parent_dir_of(root, from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to copy/move a file/directory from outside of the "
"root [{}] into the root [{}].",
from.string(),
root.string());
}
if (!parent_dir_of(root, to)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to copy/move a file/directory [{}] to a "
"destination outside of the restricted root [{}].",
to.string(),
root.string());
}

if (!fs::exists(from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempting to copy/move a non-existint file/directory [{}] "
"to [{}].",
from.string(),
to.string());
}

fs::create_directories(to.parent_path());

if (fs::is_regular_file(from)) {
if (is_copy) {
fs::copy_file(from, to, fs::copy_options::overwrite_existing);
} else {
safe_rename(from, to);
}
return;
}

for (auto item : fs::recursive_directory_iterator(from)) {
auto relpath = fs::relative(item, from);
auto matches_glob = [&](auto glob) { return glob.match(relpath.string()); };
auto included = oper.include.empty() || ranges::any_of(oper.include, matches_glob);
auto excluded = ranges::any_of(oper.exclude, matches_glob);
if (!included || excluded) {
continue;
}

auto n_components = ranges::distance(relpath);
if (n_components <= oper.strip_components) {
continue;
}

auto it = relpath.begin();
std::advance(it, oper.strip_components);
relpath = ranges::accumulate(it, relpath.end(), fs::path(), std::divides<>());

auto dest = to / relpath;
fs::create_directories(dest.parent_path());
if (item.is_directory()) {
fs::create_directories(dest);
} else {
if (is_copy) {
fs::copy_file(item, dest, fs::copy_options::overwrite_existing);
} else {
safe_rename(item, dest);
}
}
}
}

void do_remove(const struct fs_transformation::remove& oper, path_ref root) {
auto from = fs::weakly_canonical(root / oper.path);
if (!parent_dir_of(root, from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to deletes files/directories outside of the "
"root. Attempted to remove [{}]. Removal is restricted to [{}].",
from.string(),
root.string());
}

if (!fs::exists(from)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation attempts to delete a non-existint file/directory [{}].",
from.string());
}

if (fs::is_directory(from)) {
for (auto child : fs::recursive_directory_iterator{from}) {
if (child.is_directory()) {
continue;
}
if (!oper.only_matching.empty() && !matches_any(child, oper.only_matching)) {
continue;
}
fs::remove_all(child);
}
} else {
fs::remove_all(from);
}
}

void do_write(const struct fs_transformation::write& oper, path_ref root) {
auto dest = fs::weakly_canonical(root / oper.path);
if (!parent_dir_of(root, dest)) {
throw_external_error<errc::invalid_repo_transform>(
"Filesystem transformation is trying to write outside of the root. Attempted to write "
"to [{}]. Writing is restricted to [{}].",
dest.string(),
root.string());
}

std::cout << "Write content: " << oper.content;

auto of = dds::open(dest, std::ios::binary | std::ios::out);
of << oper.content;
}

} // namespace

void dds::fs_transformation::apply_to(dds::path_ref root_) const {
auto root = fs::weakly_canonical(root_);
if (copy) {
do_relocate(*copy, root, true);
}
if (move) {
do_relocate(*move, root, false);
}
if (remove) {
do_remove(*remove, root);
}
if (write) {
do_write(*write, root);
}
}

namespace {

nlohmann::json reloc_as_json(const fs_transformation::copy_move_base& oper) {
auto obj = nlohmann::json::object();
obj["from"] = oper.from.string();
obj["to"] = oper.to.string();

obj["strip-components"] = oper.strip_components;

auto inc_list = nlohmann::json::array();
for (auto& inc : oper.include) {
inc_list.push_back(inc.string());
}

auto exc_list = nlohmann::json::array();
for (auto& exc : oper.exclude) {
exc_list.push_back(exc.string());
}

if (!inc_list.empty()) {
obj["include"] = inc_list;
}
if (!exc_list.empty()) {
obj["exclude"] = exc_list;
}

return obj;
}

} // namespace

std::string fs_transformation::as_json() const noexcept {
auto obj = nlohmann::json::object();
if (copy) {
obj["copy"] = reloc_as_json(*copy);
}
if (move) {
obj["move"] = reloc_as_json(*move);
}
if (remove) {
auto rm = nlohmann::json::object();
rm["path"] = remove->path.string();
if (!remove->only_matching.empty()) {
auto if_arr = nlohmann::json::array();
for (auto&& gl : remove->only_matching) {
if_arr.push_back(gl.string());
}
rm["only-matching"] = rm;
}
obj["remove"] = rm;
}
if (write) {
auto wr = nlohmann::json::object();
wr["path"] = write->path.string();
wr["content"] = write->content;
obj["write"] = wr;
}

return to_string(obj);
}

+ 49
- 0
src/dds/util/fs_transform.hpp View File

@@ -0,0 +1,49 @@
#pragma once

#include "./fs.hpp"
#include "./glob.hpp"

#include <json5/data.hpp>

#include <optional>
#include <variant>

namespace dds {

struct fs_transformation {
struct copy_move_base {
fs::path from;
fs::path to;

int strip_components = 0;
std::vector<dds::glob> include;
std::vector<dds::glob> exclude;
};

struct copy : copy_move_base {};
struct move : copy_move_base {};

struct remove {
fs::path path;

std::vector<dds::glob> only_matching;
};

struct write {
fs::path path;
std::string content;
};

std::optional<struct copy> copy;
std::optional<struct move> move;
std::optional<remove> remove;
std::optional<struct write> write;

void apply_to(path_ref root) const;

static fs_transformation from_json(const json5::data&);

std::string as_json() const noexcept;
};

} // namespace dds

+ 242
- 0
src/dds/util/glob.cpp View File

@@ -0,0 +1,242 @@
#include "./glob.hpp"

#include "./fnmatch.hpp"

#include <neo/assert.hpp>

#include <optional>

namespace {

enum glob_coro_ret {
reenter_again,
yield_value,
done,
};

} // namespace

namespace dds::detail {

struct rglob_item {
std::optional<dds::fnmatch::pattern> pattern;
};

struct glob_impl {
std::string spelling;
std::vector<rglob_item> items;
};

struct glob_iter_state {
fs::path root;
const glob_impl& impl;
std::vector<rglob_item>::const_iterator pat_iter = impl.items.begin();

const bool is_leaf_pattern = std::next(pat_iter) == impl.items.end();

fs::directory_entry entry{};
fs::directory_iterator dir_iter{root};
const bool is_rglob = !pat_iter->pattern.has_value();

std::unique_ptr<glob_iter_state> _next_state{};
int _state_label = 0;

fs::directory_entry get_entry() const noexcept {
if (_next_state) {
return _next_state->get_entry();
}
return entry;
}

#define CORO_REENTER_POINT \
case __LINE__: \
static_assert(true)
#define CORO_SAVE_POINT _state_label = __LINE__

#define YIELD(E) \
do { \
CORO_SAVE_POINT; \
entry = E; \
return yield_value; \
} while (0); \
CORO_REENTER_POINT

#define EXIT_DIRECTORY() \
do { \
return done; \
} while (0); \
CORO_REENTER_POINT

#define ENTER_DIRECTORY(D, Pat) \
do { \
_next_state.reset(new glob_iter_state{fs::path(D), impl, Pat}); \
CORO_SAVE_POINT; \
return reenter_again; \
} while (0); \
CORO_REENTER_POINT

#define CONTINUE() \
do { \
_state_label = 0; \
return reenter_again; \
} while (0)

glob_coro_ret reenter() {
if (_next_state) {
auto st = _next_state->reenter();
if (st == done) {
_next_state.reset();
return reenter_again;
}
return st;
}

const bool dir_done = dir_iter == fs::directory_iterator();
const auto cur_pattern = pat_iter->pattern;
const bool cur_is_rglob = !cur_pattern.has_value();

switch (_state_label) {
case 0:
//
if (dir_done) {
EXIT_DIRECTORY();
}
entry = *dir_iter++;

if (cur_is_rglob) {
if (is_leaf_pattern) {
YIELD(entry);
} else if (std::next(pat_iter)->pattern.value().match(
fs::path(entry).filename().string())) {
// The next pattern in the glob will match this file directly.
if (entry.is_directory()) {
ENTER_DIRECTORY(entry, std::next(pat_iter));
} else {
YIELD(entry);
}
}
if (entry.is_directory()) {
ENTER_DIRECTORY(entry, pat_iter);
} else {
// A non-directory file matches an `**` pattern? Ignore it.
}
} else {
if (cur_pattern->match(fs::path(entry).filename().string())) {
// We match this entry
if (is_leaf_pattern) {
YIELD(entry);
} else if (entry.is_directory()) {
ENTER_DIRECTORY(entry, std::next(pat_iter));
}
}
}
}

CONTINUE();
}
}; // namespace dds::detail

} // namespace dds::detail

namespace {

dds::detail::glob_impl compile_glob_expr(std::string_view pattern) {
using namespace dds::detail;

glob_impl acc{};
acc.spelling = std::string(pattern);

while (!pattern.empty()) {
const auto next_slash = pattern.find('/');
const auto next_part = pattern.substr(0, next_slash);
if (next_slash != pattern.npos) {
pattern.remove_prefix(next_slash + 1);
} else {
pattern = "";
}

if (next_part == "**") {
acc.items.emplace_back();
} else {
acc.items.push_back({dds::fnmatch::compile(next_part)});
}
}

if (acc.items.empty()) {
throw std::runtime_error("Invalid path glob expression (Must not be empty!)");
}

return acc;
}

} // namespace

dds::glob_iterator::glob_iterator(dds::glob gl, dds::path_ref root)
: _impl(gl._impl)
, _done(false) {

_state = std::make_shared<detail::glob_iter_state>(detail::glob_iter_state{root, *_impl});
increment();
}

void dds::glob_iterator::increment() {
auto st = reenter_again;
while (st == reenter_again) {
st = _state->reenter();
}
_done = st == done;
}

dds::fs::directory_entry dds::glob_iterator::dereference() const noexcept {
return _state->get_entry();
}

dds::glob dds::glob::compile(std::string_view pattern) {
glob ret;
ret._impl = std::make_shared<dds::detail::glob_impl>(compile_glob_expr(pattern));
return ret;
}

namespace {

using path_iter = dds::fs::path::const_iterator;
using pat_iter = std::vector<dds::detail::rglob_item>::const_iterator;

bool check_matches(path_iter elem_it,
const path_iter elem_stop,
pat_iter pat_it,
const pat_iter pat_stop) noexcept {
if (elem_it == elem_stop && pat_it == pat_stop) {
return true;
}
if (elem_it == elem_stop || pat_it == pat_stop) {
return false;
}
if (pat_it->pattern.has_value()) {
// A regular pattern
if (!pat_it->pattern->match(elem_it->string())) {
return false;
}
return check_matches(++elem_it, elem_stop, ++pat_it, pat_stop);
} else {
// An rglob pattern "**". Check by peeling of individual path elements
const auto next_pat = std::next(pat_it);
for (; elem_it != elem_stop; ++elem_it) {
if (check_matches(elem_it, elem_stop, next_pat, pat_stop)) {
return true;
}
}
return false;
}
}

} // namespace

bool dds::glob::match(dds::path_ref filepath) const noexcept {
return check_matches(filepath.begin(),
filepath.end(),
_impl->items.cbegin(),
_impl->items.cend());
}

std::string_view dds::glob::string() const noexcept { return _impl->spelling; }

+ 63
- 0
src/dds/util/glob.hpp View File

@@ -0,0 +1,63 @@
#pragma once

#include <dds/util/fs.hpp>

#include <neo/iterator_facade.hpp>

#include <string_view>
#include <vector>

namespace dds {

namespace detail {

struct glob_impl;

struct glob_iter_state;

} // namespace detail

class glob;

class glob_iterator : public neo::iterator_facade<glob_iterator> {
std::shared_ptr<const detail::glob_impl> _impl;

std::shared_ptr<detail::glob_iter_state> _state;

bool _done = true;

public:
glob_iterator() = default;
glob_iterator(glob impl, path_ref root);

fs::directory_entry dereference() const noexcept;
void increment();

struct sentinel_type {};

bool at_end() const noexcept { return _done; }

glob_iterator begin() const noexcept { return *this; }
auto end() const noexcept { return sentinel_type{}; }
};

class glob {
friend class glob_iterator;
std::shared_ptr<const detail::glob_impl> _impl;

glob() = default;

public:
static glob compile(std::string_view str);

auto scan_from(path_ref root) const noexcept { return glob_iterator(*this, root); }

auto begin() const noexcept { return scan_from(fs::current_path()); }
auto end() const noexcept { return glob_iterator::sentinel_type{}; }

bool match(path_ref) const noexcept;

std::string_view string() const noexcept;
};

} // namespace dds

+ 69
- 0
src/dds/util/glob.test.cpp View File

@@ -0,0 +1,69 @@
#include <dds/util/glob.hpp>

#include <catch2/catch.hpp>

TEST_CASE("Simple glob") {
auto this_dir = dds::fs::path(__FILE__).parent_path();
auto glob = dds::glob::compile("*.test.cpp");

auto it = glob.scan_from(this_dir);
for (; it != glob.end(); ++it) {
auto&& el = *it;
}

int n_found = 0;
for (auto found : glob.scan_from(this_dir)) {
++n_found;
}
CHECK(n_found > 0);

n_found = 0;
for (auto found : dds::glob::compile("glob.test.cpp").scan_from(this_dir)) {
n_found++;
}
CHECK(n_found == 1);

auto me_it = dds::glob::compile("src/**/glob.test.cpp").begin();
REQUIRE(!me_it.at_end());
++me_it;
CHECK(me_it.at_end());

auto all_tests = dds::glob::compile("src/**/*.test.cpp");
n_found = 0;
for (auto f : all_tests) {
n_found += 1;
}
CHECK(n_found > 10);
CHECK(n_found < 1000); // If we have more than 1000 .test files, that's crazy
}

TEST_CASE("Check globs") {
auto glob = dds::glob::compile("foo/bar*/baz");
CHECK(glob.match("foo/bar/baz"));
CHECK(glob.match("foo/barffff/baz"));
CHECK_FALSE(glob.match("foo/bar"));
CHECK_FALSE(glob.match("foo/ffbar/baz"));
CHECK_FALSE(glob.match("foo/bar/bazf"));
CHECK_FALSE(glob.match("foo/bar/"));

glob = dds::glob::compile("foo/**/bar.txt");
CHECK(glob.match("foo/bar.txt"));
CHECK(glob.match("foo/thing/bar.txt"));
CHECK(glob.match("foo/thing/another/bar.txt"));
CHECK_FALSE(glob.match("foo/fail"));
CHECK_FALSE(glob.match("foo/bar.txtf"));
CHECK_FALSE(glob.match("foo/bar.txt/f"));
CHECK_FALSE(glob.match("foo/fbar.txt"));
CHECK_FALSE(glob.match("foo/thing/fail"));
CHECK_FALSE(glob.match("foo/thing/another/fail"));
CHECK_FALSE(glob.match("foo/thing/bar.txt/fail"));
CHECK_FALSE(glob.match("foo/bar.txt/fail"));

glob = dds::glob::compile("foo/**/bar/**/baz.txt");
CHECK(glob.match("foo/bar/baz.txt"));
CHECK(glob.match("foo/thing/bar/baz.txt"));
CHECK(glob.match("foo/thing/bar/baz.txt"));
CHECK(glob.match("foo/thing/bar/thing/baz.txt"));
CHECK(glob.match("foo/bar/thing/baz.txt"));
CHECK(glob.match("foo/bar/baz/baz.txt"));
}

+ 1
- 1
src/dds/util/paths.win.cpp View File

@@ -30,7 +30,7 @@ fs::path appdatalocal_dir() {
}

fs::path appdata_dir() {
auto env = std::getenv("LocalAppData");
auto env = std::getenv("AppData");
assert(env);

return fs::absolute(fs::path(env));

+ 5
- 0
tests/deps/use-catch2/gcc.tc.jsonc View File

@@ -0,0 +1,5 @@
{
"compiler_id": 'gnu',
"cxx_version": 'c++17',
"cxx_compiler": 'g++-9',
}

+ 3
- 0
tests/deps/use-catch2/msvc.tc.jsonc View File

@@ -0,0 +1,3 @@
{
"compiler_id": 'msvc',
}

+ 43
- 0
tests/deps/use-catch2/project/catalog.json5 View File

@@ -0,0 +1,43 @@
{
"version": 1,
"packages": {
"catch2": {
"2.12.4": {
"git": {
"url": "https://github.com/catchorg/Catch2.git",
"ref": "v2.12.4",
"auto-lib": "catch2/catch2",
"transform": [
{
"move": {
"from": "include",
"to": "include/catch2",
}
},
{
"copy": {
"from": "include",
"to": "src"
},
write: {
path: 'include/catch2/catch_with_main.hpp',
content: '\
#pragma once \n\
\n\
#define CATCH_CONFIG_MAIN \n\
#include "./catch.hpp" \n\
\n\
namespace Catch { \n\
\n\
CATCH_REGISTER_REPORTER("console", ConsoleReporter) \n\
\n\
} // namespace Catch \n\
'
}
}
]
}
}
}
}
}

+ 4
- 0
tests/deps/use-catch2/project/library.json5 View File

@@ -0,0 +1,4 @@
{
name: 'use-catch2',
uses: ['catch2/catch2']
}

+ 8
- 0
tests/deps/use-catch2/project/package.json5 View File

@@ -0,0 +1,8 @@
{
name: 'use-catch2',
version: '1.0.0',
namespace: 'test',
depends: {
'catch2': '2.12.4'
}
}

+ 6
- 0
tests/deps/use-catch2/project/src/use-catch2.main.cpp View File

@@ -0,0 +1,6 @@
#include <catch2/catch_with_main.hpp>

TEST_CASE("I am a simple test case") {
CHECK((2 + 2) == 4);
CHECK_FALSE((2 + 2) == 5);
}

+ 11
- 0
tests/deps/use-catch2/test_use_catch2.py View File

@@ -0,0 +1,11 @@
from tests import DDS

from dds_ci import proc


def test_get_build_use_catch2(dds: DDS):
dds.catalog_import(dds.source_root / 'catalog.json5')
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc)
proc.check_run((dds.build_dir / 'use-catch2').with_suffix(dds.exe_suffix))

+ 7
- 0
tests/deps/use-cryptopp/gcc.tc.jsonc View File

@@ -0,0 +1,7 @@
{
"compiler_id": "gnu",
"cxx_compiler": "g++-9",
"cxx_version": "c++17",
// All required for Crypto++ intrinsics:
"flags": "-msse2 -msse3 -mssse3 -msse4.1 -msse4.2 -mpclmul -maes -mavx -mavx2 -msha -Wa,-q -DCRYPTOPP_DISABLE_ASM=1",
}

+ 4
- 0
tests/deps/use-cryptopp/msvc.tc.jsonc View File

@@ -0,0 +1,4 @@
{
"compiler_id": 'msvc',
"flags": "/std:c++17 /DCRYPTOPP_DISABLE_ASM=1"
}

+ 27
- 0
tests/deps/use-cryptopp/project/catalog.json View File

@@ -0,0 +1,27 @@
{
"version": 1,
"packages": {
"cryptopp": {
"8.2.0": {
"git": {
"url": "https://github.com/weidai11/cryptopp.git",
"ref": "CRYPTOPP_8_2_0",
"auto-lib": "cryptopp/cryptopp",
"transform": [
{
"move": {
"from": ".",
"to": "src/cryptopp",
"include": [
"*.c",
"*.cpp",
"*.h"
]
}
}
]
}
}
}
}
}

+ 4
- 0
tests/deps/use-cryptopp/project/library.json5 View File

@@ -0,0 +1,4 @@
{
name: 'use-cryptopp',
uses: ['cryptopp/cryptopp']
}

+ 8
- 0
tests/deps/use-cryptopp/project/package.json5 View File

@@ -0,0 +1,8 @@
{
name: 'use-cryptopp',
version: '1.0.0',
namespace: 'test',
depends: {
'cryptopp': '8.2.0'
}
}

+ 17
- 0
tests/deps/use-cryptopp/project/src/use-cryptopp.main.cpp View File

@@ -0,0 +1,17 @@
#include <cryptopp/osrng.h>

#include <string>

int main() {
std::string arr;
arr.resize(256);
CryptoPP::OS_GenerateRandomBlock(false,
reinterpret_cast<CryptoPP::byte*>(arr.data()),
arr.size());
for (auto b : arr) {
if (b != '\x00') {
return 0;
}
}
return 1;
}

+ 12
- 0
tests/deps/use-cryptopp/test_use_cryptopp.py View File

@@ -0,0 +1,12 @@
from tests import DDS

from dds_ci import proc


def test_get_build_use_cryptopp(dds: DDS):
dds.catalog_import(dds.source_root / 'catalog.json')
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc)
proc.check_run(
(dds.build_dir / 'use-cryptopp').with_suffix(dds.exe_suffix))

+ 7
- 0
tests/deps/use-libsodium/gcc.tc.jsonc View File

@@ -0,0 +1,7 @@
{
"compiler_id": 'gnu',
"cxx_version": 'c++17',
"cxx_compiler": 'g++-9',
"flags": '-DSODIUM_STATIC',
"link_flags": '-static-libgcc -static-libstdc++'
}

+ 4
- 0
tests/deps/use-libsodium/msvc.tc.jsonc View File

@@ -0,0 +1,4 @@
{
"compiler_id": 'msvc',
"flags": '-DSODIUM_STATIC',
}

+ 44
- 0
tests/deps/use-libsodium/project/catalog.json View File

@@ -0,0 +1,44 @@
{
"version": 1,
"packages": {
"libsodium": {
"1.0.18": {
"git": {
"url": "https://github.com/jedisct1/libsodium.git",
"ref": "1.0.18",
"auto-lib": "sodium/sodium",
"transform": [
{
"move": {
"from": "src/libsodium/include",
"to": "include/"
}
},
{
"copy": {
"from": "builds/msvc/version.h",
"to": "include/sodium/version.h"
}
},
{
"move": {
"from": "src/libsodium",
"to": "src/"
},
"remove": {
"path": "src/libsodium"
}
},
{
"copy": {
"from": "include/",
"to": "src/",
"strip-components": 1,
}
}
]
}
}
}
}
}

+ 4
- 0
tests/deps/use-libsodium/project/library.json5 View File

@@ -0,0 +1,4 @@
{
name: 'use-libsodium',
uses: ['sodium/sodium']
}

+ 8
- 0
tests/deps/use-libsodium/project/package.json5 View File

@@ -0,0 +1,8 @@
{
name: 'use-libsodium',
version: '1.0.0',
namespace: 'test',
depends: {
'libsodium': '1.0.18'
}
}

+ 14
- 0
tests/deps/use-libsodium/project/src/use-libsodium.main.cpp View File

@@ -0,0 +1,14 @@
#include <sodium.h>

#include <algorithm>

int main() {
char arr[256] = {};
::randombytes_buf(arr, sizeof arr);
for (auto b : arr) {
if (b != '\x00') {
return 0;
}
}
return 1;
}

+ 12
- 0
tests/deps/use-libsodium/test_use_libsodium.py View File

@@ -0,0 +1,12 @@
from tests import DDS

from dds_ci import proc


def test_get_build_use_libsodium(dds: DDS):
dds.catalog_import(dds.source_root / 'catalog.json')
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc)
proc.check_run(
(dds.build_dir / 'use-libsodium').with_suffix(dds.exe_suffix))

+ 15
- 21
tools/ci.py View File

@@ -87,11 +87,11 @@ def main(argv: Sequence[str]) -> int:
else:
assert False, 'impossible'

cat_path = paths.BUILD_DIR / 'catalog.db'
if cat_path.is_file():
cat_path.unlink()
old_cat_path = paths.PREBUILT_DIR / 'catalog.db'
if old_cat_path.is_file():
old_cat_path.unlink()

ci_repo_dir = paths.BUILD_DIR / '_ci-repo'
ci_repo_dir = paths.PREBUILT_DIR / '_ci-repo'
if ci_repo_dir.exists():
shutil.rmtree(ci_repo_dir)

@@ -99,16 +99,13 @@ def main(argv: Sequence[str]) -> int:
paths.PREBUILT_DDS,
'catalog',
'import',
('--catalog', cat_path),
('--catalog', old_cat_path),
('--json', paths.PROJECT_ROOT / 'catalog.json'),
])
self_build(
paths.PREBUILT_DDS,
toolchain=opts.toolchain,
dds_flags=[
('--catalog', cat_path),
('--repo-dir', ci_repo_dir),
])
self_build(paths.PREBUILT_DDS,
toolchain=opts.toolchain,
cat_path=old_cat_path,
dds_flags=[('--repo-dir', ci_repo_dir)])
print('Main build PASSED!')
print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}')

@@ -118,21 +115,18 @@ def main(argv: Sequence[str]) -> int:
)
return 0

# Delete the catalog database, since there may be schema changes since the
# bootstrap executable was built
cat_path.unlink()

new_cat_path = paths.BUILD_DIR / 'catalog.db'
proc.check_run([
paths.CUR_BUILT_DDS,
'catalog',
'import',
('--catalog', cat_path),
('--catalog', new_cat_path),
('--json', paths.PROJECT_ROOT / 'catalog.json'),
])
self_build(
paths.CUR_BUILT_DDS,
toolchain=opts.toolchain,
dds_flags=[f'--repo-dir={ci_repo_dir}', f'--catalog={cat_path}'])
self_build(paths.CUR_BUILT_DDS,
toolchain=opts.toolchain,
cat_path=new_cat_path,
dds_flags=[f'--repo-dir={ci_repo_dir}'])
print('Bootstrap test PASSED!')

return pytest.main([

+ 37
- 12
tools/gen-catalog-json.py View File

@@ -10,11 +10,13 @@ class Git(NamedTuple):
auto_lib: Optional[str] = None

def to_dict(self) -> dict:
return {
d = {
'url': self.url,
'ref': self.ref,
'auto-lib': self.auto_lib,
}
if self.auto_lib:
d['auto-lib'] = self.auto_lib
return d


RemoteInfo = Union[Git]
@@ -114,18 +116,20 @@ packages = [
description='A modern and low-level C++ SQLite API',
git_url='https://github.com/vector-of-bool/neo-sqlite3.git',
),
Package('neo-fun', [
Version(
many_versions(
'neo-fun',
(
'0.1.0',
description='Some library fundamentals that you might find useful',
remote=Git('https://github.com/vector-of-bool/neo-fun.git',
'0.1.0')),
Version(
'0.1.1',
description='Some library fundamentals that you might find useful',
remote=Git('https://github.com/vector-of-bool/neo-fun.git',
'0.1.1'))
]),
'0.2.0',
'0.2.1',
'0.3.0',
'0.3.1',
'0.3.2',
),
description='Some library fundamentals that you might find useful',
git_url='https://github.com/vector-of-bool/neo-fun.git',
),
many_versions(
'neo-concepts',
(
@@ -133,6 +137,9 @@ packages = [
'0.2.0',
'0.2.1',
'0.2.2',
'0.3.0',
'0.3.1',
'0.3.2',
),
description=
'Minimal C++ concepts library. Contains many definitions from C++20.',
@@ -190,6 +197,24 @@ packages = [
'neo-fun': '^0.1.1',
'neo-concepts': '^0.2.2',
}),
Version(
'0.2.0',
description='A C++ library to process recursive dynamic data',
remote=Git('https://github.com/vector-of-bool/semester.git',
'0.2.0'),
depends={
'neo-fun': '^0.3.2',
'neo-concepts': '^0.3.2',
}),
Version(
'0.2.1',
description='A C++ library to process recursive dynamic data',
remote=Git('https://github.com/vector-of-bool/semester.git',
'0.2.1'),
depends={
'neo-fun': '^0.3.2',
'neo-concepts': '^0.3.2',
}),
]),
Package('ctre', [
Version(

+ 1
- 1
tools/msvc.jsonc View File

@@ -2,7 +2,7 @@
"$schema": "../res/toolchain-schema.json",
"compiler_id": "msvc",
"flags": [
"/experimental:preprocessor", // Required for range-v3
"/Zc:preprocessor", // Required for range-v3
"/DSPDLOG_COMPILED_LIB", // Required to use spdlog as a compiled lib
"/std:c++latest",
],

+ 3
- 2
tools/self_build.py View File

@@ -15,6 +15,7 @@ def self_build(exe: Path,
*,
toolchain: str,
lmi_path: Path = None,
cat_path: Path = Path('_build/catalog.db'),
dds_flags: proc.CommandLine = ()):
# Copy the exe to another location, as windows refuses to let a binary be
# replaced while it is executing
@@ -25,13 +26,13 @@ def self_build(exe: Path,
new_exe,
'catalog',
'import',
f'--catalog=_build/catalog.db',
f'--catalog={cat_path}',
f'--json=catalog.json',
)
proc.check_run(
new_exe,
'build',
f'--catalog=_build/catalog.db',
f'--catalog={cat_path}',
f'--repo-dir=_build/ci-repo',
dds_flags,
('--toolchain', toolchain),

Loading…
Cancel
Save