Quellcode durchsuchen

Merge branch 'release/0.1.0-alpha.6' into develop

default_compile_flags
vector-of-bool vor 3 Jahren
Ursprung
Commit
1159a0ef6f
100 geänderte Dateien mit 3370 neuen und 1043 gelöschten Zeilen
  1. +1
    -1
      .github/ISSUE_TEMPLATE/bug_report.md
  2. +1
    -1
      Makefile
  3. +6
    -2
      docs/conf.py
  4. +30
    -24
      docs/design.rst
  5. +88
    -164
      docs/dev/building.rst
  6. +79
    -0
      docs/dev/ci-api.rst
  7. +91
    -0
      docs/dev/env.rst
  8. +12
    -1
      docs/dev/index.rst
  9. +29
    -0
      docs/dev/reqs.rst
  10. +74
    -0
      docs/dev/testing.rst
  11. +1
    -1
      docs/err/git-url-ref-mutual-req.rst
  12. +0
    -9
      docs/err/invalid-catalog-json.rst
  13. +0
    -10
      docs/err/invalid-repo-transform.rst
  14. +0
    -2
      docs/err/no-catalog-remote-info.rst
  15. +0
    -7
      docs/err/no-such-catalog-package.rst
  16. +3
    -3
      docs/err/sdist-exists.rst
  17. +187
    -0
      docs/guide/build-deps.rst
  18. +0
    -286
      docs/guide/catalog.rst
  19. +45
    -203
      docs/guide/cmake.rst
  20. +3
    -2
      docs/guide/index.rst
  21. +2
    -2
      docs/guide/interdeps.rst
  22. +39
    -12
      docs/guide/packages.rst
  23. +90
    -0
      docs/guide/pkg-cache.rst
  24. +231
    -0
      docs/guide/remote-pkgs.rst
  25. +0
    -92
      docs/guide/repo.rst
  26. +31
    -9
      docs/guide/source-dists.rst
  27. +10
    -0
      docs/guide/toolchains.rst
  28. +214
    -0
      docs/howto/cmake.rst
  29. +124
    -0
      docs/howto/deps.rst
  30. +11
    -0
      docs/howto/index.rst
  31. +1
    -0
      docs/index.rst
  32. +1
    -1
      docs/tut/hello-test.rst
  33. +7
    -3
      docs/tut/hello-world.rst
  34. +12
    -7
      docs/tut/index.rst
  35. +154
    -0
      docs/tut/install.rst
  36. +1
    -1
      package.jsonc
  37. +354
    -1
      poetry.lock
  38. +1
    -0
      pyproject.toml
  39. +23
    -6
      src/dds.main.cpp
  40. +104
    -1
      src/dds/build/builder.cpp
  41. +3
    -1
      src/dds/build/params.hpp
  42. +1
    -0
      src/dds/cli/cmd/build.cpp
  43. +2
    -0
      src/dds/cli/cmd/build_deps.cpp
  44. +1
    -0
      src/dds/cli/cmd/compile_file.cpp
  45. +433
    -0
      src/dds/cli/cmd/install_yourself.cpp
  46. +18
    -8
      src/dds/cli/cmd/pkg_create.cpp
  47. +34
    -10
      src/dds/cli/cmd/pkg_import.cpp
  48. +1
    -1
      src/dds/cli/cmd/pkg_repo_ls.cpp
  49. +60
    -0
      src/dds/cli/cmd/pkg_search.cpp
  50. +5
    -22
      src/dds/cli/cmd/repoman_add.cpp
  51. +9
    -10
      src/dds/cli/dispatch_main.cpp
  52. +39
    -1
      src/dds/cli/error_handler.cpp
  53. +94
    -32
      src/dds/cli/options.cpp
  54. +26
    -14
      src/dds/cli/options.hpp
  55. +19
    -0
      src/dds/error/toolchain.hpp
  56. +16
    -3
      src/dds/pkg/cache.cpp
  57. +1
    -1
      src/dds/pkg/cache.hpp
  58. +15
    -1
      src/dds/pkg/db.cpp
  59. +3
    -7
      src/dds/pkg/get/get.cpp
  60. +2
    -0
      src/dds/pkg/get/get.hpp
  61. +71
    -11
      src/dds/pkg/remote.cpp
  62. +2
    -0
      src/dds/pkg/remote.hpp
  63. +76
    -0
      src/dds/pkg/search.cpp
  64. +33
    -0
      src/dds/pkg/search.hpp
  65. +0
    -1
      src/dds/sdist/dist.cpp
  66. +14
    -0
      src/dds/toolchain/toolchain.cpp
  67. +3
    -0
      src/dds/toolchain/toolchain.hpp
  68. +18
    -0
      src/dds/util/env.cpp
  69. +23
    -0
      src/dds/util/env.hpp
  70. +52
    -7
      src/dds/util/fs.cpp
  71. +27
    -0
      src/dds/util/fs.hpp
  72. +6
    -5
      src/dds/util/http/pool.cpp
  73. +2
    -0
      src/dds/util/output.hpp
  74. +4
    -0
      src/dds/util/output.nix.cpp
  75. +29
    -3
      src/dds/util/output.win.cpp
  76. +9
    -19
      src/dds/util/paths.linux_fbsd.cpp
  77. +5
    -6
      src/dds/util/paths.macos.cpp
  78. +3
    -2
      src/dds/util/result.cpp
  79. +15
    -0
      src/dds/util/string.hpp
  80. +55
    -22
      src/debate/argument_parser.cpp
  81. +15
    -2
      src/fansi/styled.cpp
  82. +2
    -2
      src/fansi/styled.hpp
  83. +0
    -0
      tests/projects/sdist/src/foo.cpp
  84. +7
    -0
      tests/projects/simple-cmake/CMakeLists.txt
  85. +3
    -0
      tests/projects/simple-cmake/main.cpp
  86. +3
    -0
      tests/projects/simple/include/foo.hpp
  87. +0
    -0
      tests/projects/simple/include/header.h
  88. +0
    -0
      tests/projects/simple/include/header.hpp
  89. +0
    -0
      tests/projects/simple/library.jsonc
  90. +0
    -0
      tests/projects/simple/other-file.txt
  91. +0
    -0
      tests/projects/simple/package.json5
  92. +5
    -0
      tests/projects/simple/src/foo.cpp
  93. +21
    -0
      tests/projects/tweaks/include/tweakable.config.hpp
  94. +7
    -0
      tests/projects/tweaks/include/tweakable.hpp
  95. +3
    -0
      tests/projects/tweaks/library.jsonc
  96. +5
    -0
      tests/projects/tweaks/package.json5
  97. +6
    -0
      tests/projects/tweaks/src/tweakable.cpp
  98. +3
    -0
      tests/projects/tweaks/src/tweakable.main.cpp
  99. +1
    -1
      tests/test_basics.py
  100. +0
    -0
      tests/test_build_deps.py

+ 1
- 1
.github/ISSUE_TEMPLATE/bug_report.md Datei anzeigen

@@ -55,7 +55,7 @@ List the platform(s) and toolsets which are applicable to the issue, and all of

- Operating System: [e.g. macOS, Linux, Windows]
- Compiler: [e.g. MSVC, GCC, Clang]
- `dds` Version: [e.g. `0.1.0-alpha.5`]
- `dds` Version: [e.g. `0.1.0-alpha.6`]

**Additional context**


+ 1
- 1
Makefile Datei anzeigen

@@ -28,7 +28,7 @@ docs-server: docs
python -m http.server 9794

docs-watch: docs
+sh tools/docs-watch.sh
+poetry run sh tools/docs-watch.sh

docs-sync-server:
mkdir -p _build/docs

+ 6
- 2
docs/conf.py Datei anzeigen

@@ -11,16 +11,20 @@ author = 'vector-of-bool'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '0.1.0-alpha.5'
release = '0.1.0-alpha.6'

# -- General configuration ---------------------------------------------------
extensions = []
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
language = None
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
pygments_style = None
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
'pytest': ('https://docs.pytest.org/en/latest/', None),
}

# -- Options for HTML output -------------------------------------------------
html_theme = 'nature'

+ 30
- 24
docs/design.rst Datei anzeigen

@@ -3,13 +3,14 @@

``dds`` has been designed from the very beginning as an extremely opinionated
hybrid *build system* and *package manager*. Unlike most build systems however,
``dds`` has a hyper-specific focus on a particular aspect of software
development: C and C++ libraries.
``dds`` has a strong focus on a particular aspect of software development: C and
C++ libraries.

This may sound pointless, right? Libraries are useless unless we can use them
to build applications!

Indeed, applications *are* essential, but that is "not our job" with ``dds``.
Indeed, applications *are* essential, and ``dds`` is able to build those as
well.

Another design decision is that ``dds`` is built to be driven by automated
tools as well as humans. ``dds`` is not designed to entirely replace existing
@@ -32,8 +33,8 @@ incredible implementation challenges.

Despite the vast amount of work put into build systems and tooling, virtually
all developers are using them *incorrectly* and/or *dangerously* without
realizing it. Despite this work, we seem to be a great distance from a unified
library package distribution and consumption mechanism.
realizing it, and we seem to be still a great distance from a unified library
package distribution and consumption mechanism.


Tabula Rasa
@@ -46,7 +47,7 @@ If you opt-in to have your library built by ``dds``, you forgoe
*customizability* in favor of *simplicity* and *ease*.

``dds`` takes a look at what is needed to build and develop *libraries* and
hyper-optimizes for that use case. It is also built with a very strong, very
optimizes for that use case. It is also built with a very strong, very
opinionated idea of *how* libraries should be constructed and used. These
prescriptions are not at all arbitrary, though. They are built upon the
observations of the strengths and weaknesses of build systems in use throughout
@@ -69,14 +70,14 @@ different, despite both using the same underlying "Build System."

``dds`` takes a massive divergence at this point. One project using ``dds`` as
their build system has a nearly identical build process to every other project
using ``dds``. Simply running :code:`dds build -t <toolchain>` should be enough
using ``dds``. Simply running ``dds build`` should be enough
to build *any* ``dds`` project.

In order to reach this uniformity and simplicity, ``dds`` drops almost all
aspects of project-by-project customizability. Instead, ``dds`` affords the
developer a contract:

If you play by my rules, you get to play in my space.
If you play by the rules, you get to play in this space.


.. _design.rules:
@@ -91,7 +92,7 @@ imposes, but what are they?
.. _design.rules.not-apps:

``dds`` Is not Made for Complex Applications
===============================================
============================================

Alright, this one isn't a "rule" as much as a recommendation: If you are
building an application that *needs* some build process functionality that
@@ -105,22 +106,28 @@ violate any of the other existing rules.
customization features to permit the rules to be bent arbitrarily: Read
on.

``dds`` contains a minimal amount of functionality for building simple
applications, but it is certainly not its primary purpose.
``dds`` *does* contain functionality for building applications, but they must
also play by the rules.

If you want to build a complex application with ``dds`` that uses lots of
platform-specific sources, code generation, and conditional components, a good
option is to use an external build script that prepares the project tree before
invoking ``dds``.


.. _design.rules.change:

*Your* Code Should Be Changed Before ``dds`` Should Be Changed
=================================================================
*Your Code* Should Be Changed Before ``dds`` Should Be Changed
==============================================================

The wording of this rule means that the onus is on the library developer to
meet the expectations that ``dds`` prescribes in order to make the build
work.
The wording of this rule means that the onus is on the developer to meet the
expectations that ``dds`` prescribes in order to make the build work.

If your library meets all the requirements outlined in this document but you
still find trouble in making your build work, this is grounds for change in
``dds``, either in clarifying the rules or tweaking ``dds`` functionality.
If your project meets all the requirements outlined in this document but you
still find trouble in making your build work, or if you *cannot* see *any*
possible way for your project to be built by ``dds`` regardless of what changes
you make, then it this is grounds for change in ``dds``, either in clarifying
the rules or tweaking ``dds`` functionality


.. _design.rules.layout:
@@ -154,9 +161,8 @@ conditional compilation.
All Code Must Be in Place Before Building
=========================================

``dds`` does not provide code-generation functionality. Instead, any
generated code should be generated and committed to the repository to be only
ever modified through such generation scripts.
``dds`` does not provide code-generation functionality. Instead, any generated
code should be generated by separate build steps before ``dds`` is executed.


.. _design.rules.one-binary-per-src:
@@ -176,7 +182,7 @@ No Arbitrary ``#include`` Directories
=====================================

Only ``src/`` and ``include/`` will ever be used as the basis for header
resolution while building a library, so all ``#include`` directives should be
resolution while building a project, so all ``#include`` directives should be
relative to those directories. Refer to :ref:`pkg.source-root`.


@@ -185,7 +191,7 @@ relative to those directories. Refer to :ref:`pkg.source-root`.
All Files Compile with the Same Options
=======================================

When DDS compiles a library, every source file will be compiled with an
When DDS compiles a project, every source file will be compiled with an
identical set of options. Additionally, when DDS compiles a dependency tree,
every library in that dependency tree will be compiled with an identical set of
options. Refer to the :doc:`guide/toolchains` page for more information.

+ 88
- 164
docs/dev/building.rst Datei anzeigen

@@ -1,209 +1,133 @@
Building ``dds`` from Source
############################

While prebuilt ``dds`` executables are `available on the GitHub page
<releases_>`_, one may wish to build ``dds`` from source.

.. _releases: https://github.com/vector-of-bool/dds/releases

The ``dds`` build process is designed to be as turn-key simple as possible.


Platform Support
****************

``dds`` aims to be as cross-platform as possible. It currently build and
executes on Windows, macOS, Linux, and FreeBSD. Support for additional
platforms is possible but will require modifications to ``bootstrap.py`` that
will allow it to be built on such platforms.


Build Requirements
******************

Building ``dds`` has a simple set of requirements:

- **Python 3.6** or newer to run the bootstrap/CI scripts.
- A C++ compiler that has rudimentary support for C++20 concepts. Newer
releases of Visual C++ that ship with **VS 2019** will be sufficient on
Windows, as will **GCC 9** with ``-fconcepts`` on other platforms.

.. note::
On Windows, you will need to execute the build from within a Visual C++
enabled environment. This will involve launching the build from a Visual
Studio Command Prompt.

.. note::
At the time of writing, C++20 Concepts has not yet been released in Clang,
but should be available in LLVM/Clang 11 and newer.


Build Scripts and the CI Process
********************************

The main CI process is driven by Python. The root CI script is ``tools/ci.py``,
and it accepts several command-line parameters. Only a few of are immediate
interest:

``--bootstrap-with=<method>`` or ``-B <method>``
Tell ``ci.py`` how to obtain the previous ``dds`` executable that can build
the *current* ``dds`` source tree. This accepts one of three values:
``skip``, ``download``, or ``build``. Refer to :ref:`bootstrapping`.

``--build-only``
A flag that tells ``ci.py`` to exit after it has successfully built the
current source tree, and to not execute the phase-2 build nor the automated
tests.

``--toolchain=<path>`` or ``-T <path>``
Tell ``ci.py`` what toolchain to give to the prior ``dds`` to build the
current ``dds``.

The ``ci.py`` script performs the following actions, in order:

#. Prepare the build output directory
#. Prepare the prior version of ``dds`` that will build the current version.
#. Import the embedded ``catalog.json`` into a catalog database stored within
``_prebuilt/``. This will be used to resolve the third-party packages that
``dds`` itself uses.
#. Invoke the build of ``dds`` using the prebuilt ``dds`` from the prior
bootstrap phase. If ``--build-only`` was specified, the CI script stops
here.
#. Use the new ``dds`` executable to rebuild itself *again* (phase-2 self-build
test). A bit of a "sanity test."
#. Execute the test suite using ``pytest``.


.. _bootstrapping:
This page assumes that you have ready the :doc:`env` page, and that you are
running all commands from within the Poetry-generated virtual environment.

Bootstrapping ``dds``
*********************
The main entrypoint for the ``dds`` CI process is the ``dds-ci`` command, which
will build and test the ``dds`` from the repository sources. ``dds-ci`` accepts
several optional command-line arguments to tweak its behavior.

In the beginning, ``dds`` was built by a Python script that globbed the sources
and invoked the compiler+linker on those sources. Once ``dds`` was able to
build and link itself, this Python script was replaced instead with ``dds``
building itself. ``dds`` has never used another build system.

The ``ci.py`` script accepts one of three methods for the ``--bootstrap-with``
flag: ``skip``, ``download``, or ``build``.
Running a Build *Only*
**********************

Once bootstrapping is complete, a ``dds`` executable will be written to
``_prebuilt/dds``. This executable refers to a **previous** version of ``dds``
that is able to build the newer ``dds`` source tree.
If you only wish to obtain a built ``dds`` executable, the ``--no-test``
parameter can be given::

.. note::
For all development work on ``dds``, the ``_prebuilt/dds`` executable should
always be used. This means that newer ``dds`` features are not available
for use within the ``dds`` repository.
$ dds-ci --no-test

This will skip the audit-build and testing phases of CI and build only the final
``dds`` executable.

Bootstrap: ``skip``
===================

If given ``skip``, ``ci.py`` will not perform any bootstrapping steps. It will
assume that there is an existing ``_prebuilt/dds`` executable. This option
should be used once bootstrapping has been performed at least once with another
method, as this is much faster than rebuilding/redownloading every time.
Rapid Iterations for Development
********************************

If you are making frequent changes to ``dds``'s source code and want a fast
development process, use ``--rapid``::

Bootstrap: ``download``
=======================
$ dds-ci --rapid

The ``ci.py`` script has a reference to a download URL of the prior version of
``dds`` that has been designated for the bootstrap. These executables originate
from `the GitHub releases <releases_>`_ page.
This will build the build step only, and builds an executable with maximum debug
and audit information, including AddressSanitizer and
UndefinedBehaviorSanitizer. This will also execute the unit tests, which should
run completely in under two seconds (if they are slower, then it may be a bug).

If given ``download``, then ``ci.py`` will download a predetermined ``dds``
executable and use it to perform the remainder of the build.

Toolchain Control
*****************

Bootstrap: ``build``
====================
``dds-ci`` will automatically select and build with an appropriate
:doc:`toolchain </guide/toolchains>` based on what it detects of the host
platform, but you may want to tweak those options.

Another script, ``tools/bootstrap.py`` is able to build ``dds`` from the ground
up. It works by progressively cloning previous versions of the ``dds``
repository and using them to build the next commit in the chain.
The ``dds-ci`` script accepts two toolchain options:

While this is a neat trick, it isn't necessary for most development, as the
resulting executable will be derived from the same commit as the executable
that would be obtained using the ``download`` method. This is also more fragile
as the past commits may make certain assumptions about the system that might
not be true outside of the CI environment. The build process may be tweaked in
the future to correct these assumptions.
``--main-toolchain``
This is the toolchain that is used to create a final release-built executable.
If you build with ``--no-test``, this toolchain will be used.

``--test-toolchain`` This is the toolchain that is used to create an auditing
and debuggable executable of ``dds``. This is the toolchain that is used if you
build with ``--rapid``.

Selecting a Build Toolchain
***************************
If you build with neither ``--rapid`` nor ``--no-test``, then ``dds-ci`` will
build *two* ``dds`` executables: One with the ``--test-toolchain`` that is
passed through the test suite, and another for ``--main-toolchain`` that is
built for distribution.

``dds`` includes three toolchains that it uses to build itself in its CI
environment: ``tools/gcc-9.jsonc`` for Linux and macOS,
``tools/freebsd-gcc-9.jsonc`` for FreeBSD, and ``tools/msvc.jsonc`` for
Windows.
The default toolchains are files contained within the ``tools/`` directory of
the repository. When ``dds-ci`` builds ``dds``, it will print the path to the
toolchain file that is selected for that build.

While these toolchains will work perfectly well in CI, you may need to tweak
these for your build setup. For example: ``gcc-9.jsonc`` assumes that the GCC 9
executables are named ``gcc-9`` and ``g++-9``, which is incorrect on some
Linux distributions.
While these provided toolchains will work perfectly well in CI, you may need to
tweak these for your build setup. For example: ``gcc-9-*.jsonc`` toolchains
assume that the GCC 9 executables are named ``gcc-9`` and ``g++-9``, which is
incorrect on some Unix and Linux distributions.

It is recommended to tweak these files as necessary to get the build working on
your system. However, do not include those tweaks in a commit unless they are
necessary to get the build running in CI.

your system. However, **do not** include those tweaks in a commit unless they
are necessary to get the build running in CI.

Giving a Toolchain to ``ci.py``
===============================

Just like passing a toolchain to ``dds``, ``ci.py`` also requires a toolchain.
Simply pass the path to your desired toolchain using the ``--toolchain``/
``-T`` argument:
What's Happening?
*****************

.. code-block:: bash
The ``dds-ci`` script performs the following actions, in order:

$ python3 tools/ci.py [...] -T tools/gcc-9.jsonc
#. If given ``--clean``, remove any prior build output and downloaded
dependencies.
#. Prepare the prior version of ``dds`` that will build the current version
(usually, just download it). This is placed in ``_prebuilt/``.
#. Import the ``old-catalog.json`` into a catalog database stored within
``_prebuilt/``. This will be used to resolve the third-party packages that
``dds`` itself uses.
#. Invoke the build of ``dds`` using the prebuilt ``dds`` obtained from the
prior bootstrap phase. If ``--no-test`` or ``--rapid`` was specified, the CI
script stops here.
#. Launch ``pytest`` with the generated ``dds`` executable and start the final
release build simultaneously, and wait for both to finish.


Building for Development
************************
Unit Tests
**********

While ``ci.py`` is rigorous in maintaining a clean and reproducible environment,
we often don't need such rigor for a rapid development iteration cycle. Instead
we can invoke the build command directly in the same way that ``ci.py`` does
it:
Various pieces of ``dds`` contain unit tests. These are stored within the
``src/`` directory itself in ``*.test.cpp`` files. They are built and executed
as part of the iteration cycle *unconditionally*. These tests execute in
milliseconds so as not to burden the development iteration cycle. The more
rigorous tests are executed separately by PyTest.

.. code-block:: bash

$ _prebuilt/dds build -t [toolchain] \
--catalog _prebuilt/catalog.db \
--repo-dir _prebuilt/ci-repo
Speeding Up the Build
*********************

The ``--catalog`` and ``--repo-dir`` arguments are not strictly necessary, but
help to isolate the ``dds`` dev environment from the user-local ``dds``
environment. This is important if modifications are made to the catalog
database schema that would conflict with the one of an external ``dds``
version.
``dds``'s build is unfortunately demanding, but can be sped up by additional
tools:

.. note::
You'll likely want to run ``ci.py`` *at least once* for it to prepare the
necessary ``catalog.db``.

.. note::
As mentioned previously, if using MSVC, the above command must execute with
the appropriate VS development environment enabled.
Use the LLVM ``lld`` Linker
===========================

Installing the LLVM ``lld`` linker will *significantly* improve the time it
takes for ``dds`` and its unit test executables to link. ``dds-ci`` will
automatically recognize the presence of ``lld`` if it has been installed
properly.

Running the Test Suite
**********************
.. note::

The ``--build-only`` flag for ``ci.py`` will disable test execution. When this
flag is omitted, ``ci.py`` will execute a self-build sanity test and then
execute the main test suite, which is itself written as a set of ``pytest``
tests in the ``tests/`` subdirectory.
``dds-ci`` (and GCC) look for an executable called ``ld.ldd`` on the
executable PATH (no version suffix!). You may need to symlink the
version-suffixed executable with ``ld.ldd`` in another location on PATH so
that ``dds-ci`` (and GCC) can find it.


Unit Tests
==========
Use ``ccache``
==============

Various pieces of ``dds`` contain unit tests. These are stored within the
``src/`` directory itself in ``*.test.cpp`` files. They are built and executed
by the bootstrapped ``dds`` executable unconditionally. These tests execute
in milliseconds and do not burden the development iteration cycle.
``dds-ci`` will also recognize ``ccache`` and add it as a compiler-launcher if
it is installed on your PATH. This won't improve initial compilation times, but
can make subsequent compilations significantly faster when files are unchanged.

+ 79
- 0
docs/dev/ci-api.rst Datei anzeigen

@@ -0,0 +1,79 @@
DDS CI Scripts Python API
#########################

Types from pytest
*****************

These types are defined by pytest, but are used extensively within the testing
scripts.

.. class:: _pytest.fixtures.FixtureRequest

.. seealso:: :class:`pytest.FixtureRequest`

.. class:: _pytest.tmpdir.TempPathFactory

.. seealso:: :class:`pytest.TempPathFactory`


Test Fixtures
*************

The following test fixtures are defined:

- :func:`~dds_ci.testing.fixtures.dds` - :class:`dds_ci.dds.DDSWrapper` - A
wrapper around the ``dds`` executable under test.
- :func:`~dds_ci.testing.fixtures.tmp_project` -
:class:`dds_ci.testing.fixtures.Project` - Create a new empty directory to be
used as a test project for ``dds`` to execute.
- :func:`~dds_ci.testing.http.http_repo` -
:class:`dds_ci.testing.http.RepoServer` - Create a new dds repository and
spawn an HTTP server to serve it.

Module: ``dds_ci``
******************

.. automodule:: dds_ci
:members:


Module: ``dds_ci.dds``
**********************

.. automodule:: dds_ci.dds
:members:


Module: ``dds_ci.proc``
***********************

.. automodule:: dds_ci.proc
:members:


Module: ``dds_ci.testing``
**************************

.. automodule:: dds_ci.testing
:members:


Module: ``dds_ci.testing.http``
*******************************

.. automodule:: dds_ci.testing.http
:members:


Module: ``dds_ci.testing.fixtures``
***********************************

.. automodule:: dds_ci.testing.fixtures
:members:


Module: ``dds_ci.testing.error``
********************************

.. automodule:: dds_ci.testing.error
:members:

+ 91
- 0
docs/dev/env.rst Datei anzeigen

@@ -0,0 +1,91 @@
Setting Up a Build/Development Environment
##########################################

While ``dds`` is able to build itself, several aspects of build infrastructure
are controlled via Python scripts. You will need Python 3.6 or later available
on your system to get started.


.. _Poetry: python-poetry.org

Getting Started with *Poetry*
*****************************

``dds`` CI runs atop `Poetry`_, a Python project management tool. While designed
for Python projects, it serves our purposes well.


Installing Poetry
=================

If you do not have Poetry already installed, it can be obtained easily for most
any platform.
`Refer to the Poetry "Installation" documentation to learn how to get Poetry on your platform <https://python-poetry.org/docs/#installation>`_.

The remainder of this documentation will assume you are able to execute
``poetry`` on your command-line.


Setting Up the Environment
==========================

To set up the scripts and Python dependencies required for CI and development,
simply execute the following command from within the root directory of the
project::

$ poetry install

Poetry will then create a Python virtual environment that contains the Python
scripts and tools required for building and developing ``dds``.

The Python virtual environment that Poetry created can be inspected using
``poetry env info``, and can be deleted from the system using
``poetry env remove``. Refer to
`the Poetry documentation <https://python-poetry.org/docs>`_ for more
information about using Poetry.


Using the Poetry Environment
****************************

Once the ``poetry install`` command has been executed, you will now be ready to
run the ``dds`` CI scripts and tools.

The scripts are installed into the virtual environment, and need not be globally
installed anywhere else on the system. You can only access these scripts by
going through Poetry. To run any individual command within the virtual
environment, use ``poetry run``::

$ poetry run <some-command>

This will load the virtual environment, execute ``<some-command>``, then exit
the environment. This is useful for running CI scripts from outside of the
virtualenv.

**Alternatively**, the environment can be loaded persistently into a shell
session by using ``poetry shell``::

$ poetry shell

This will spawn a new interactive shell process with the virtual environment
loaded, and you can now run any CI or development script without needing to
prefix them with ``poetry run``.

Going forward, the documentation will assume you have the environment loaded
as-if by ``poetry shell``, but any ``dds``-CI-specific command can also be
executed by prefixing the command with ``poetry run``.


Working With an MSVC Environment in VSCode
==========================================

If you use Visual Studio Code as your editor and MSVC as your C++ toolchain,
you'll need to load the MSVC environment as part of your build task. ``dds`` CI
has a script designed for this purpose. To use it, first load up a shell within
the Visual C++ environment, then, from within the previously create Poetry
environment, run ``gen-msvs-vsc-task``. This program will emit a Visual Studio
Code JSON build task that builds ``dds`` and also contains the environment
variables required for the MSVC toolchain to compile and link programs. You can
save this JSON task into ``.vscode/tasks.json`` to use as your primary build
task while hacking on ``dds``.


+ 12
- 1
docs/dev/index.rst Datei anzeigen

@@ -1,9 +1,20 @@
``dds`` Development
###################

While prebuilt ``dds`` executables are `available on the GitHub page
<releases>`_, one may wish to build ``dds`` from source.

.. _releases: https://github.com/vector-of-bool/dds/releases

The ``dds`` build process is designed to be as turn-key simple as possible.

This section will discuss how to modify and build ``dds`` itself.

.. toctree::
:maxdepth: 2

building
reqs
env
building
testing
ci-api

+ 29
- 0
docs/dev/reqs.rst Datei anzeigen

@@ -0,0 +1,29 @@
Supported Platforms and Build Requirements
##########################################

``dds`` aims to be as cross-platform as possible. It currently build and
executes on **Windows**, **macOS**, **Linux**, and **FreeBSD**. Support for
additional platforms is possible but will require modifications to
``bootstrap.py`` that will allow it to be built on such platforms.


Build Requirements
******************

Building ``dds`` has a simple set of requirements:

- **Python 3.6** or newer to run the bootstrap/CI scripts.
- A C++ compiler that has rudimentary support for several C++20 features,
including Concepts. Newer releases of Visual C++ that ship with **VS
2019** will be sufficient on Windows, as will **GCC 9** with ``-fconcepts`` on
other platforms.

.. note::
On Windows, you will need to execute the build from within a Visual C++
enabled environment. This may involve launching the build from a Visual
Studio Command Prompt.

.. note::
At the time of writing, C++20 Concepts has not yet been released in Clang,
but should be available in LLVM/Clang 11 and newer.


+ 74
- 0
docs/dev/testing.rst Datei anzeigen

@@ -0,0 +1,74 @@
Testing with ``pytest``
#######################

For ``dds``'s more rigorous test suite, we use the ``pytest`` testing framework.
These tests are stored in the ``tests/`` directory and written in ``test_*.py``
files.

The test suite can be run separately without ``dds-ci`` by executing ``pytest``
from within the :doc:`Poetry virtual environment <env>`::

$ pytest tests/

Note that individual tests can take between a few seconds and a few minutes to
execute, so it may be useful to execute only a subset of the tests based on the
functionality you want to test. Refer to
`the pytest documentation <https://docs.pytest.org/en/latest/>` for more
information about using and executing ``pytest``. If you are running the full
test suite, you may also want to pass the ``-n`` argument with a number of
parallel jobs to execute.


.. highlight:: python

Writing Tests
*************

If a particular aspect of ``dds`` can be tested in isolation and within a few
dozen milliseconds, you should prefer to test it as a unit test in a
``*.test.cpp`` file. The ``pytest`` tests are intended to perform full
end-to-end feature and error handling tests.

Tests are grouped into individual Python files in the ``tests/`` directory. Any
Python file containing tests must have a filename beginning with ``test_``.
Individual test functions should begin with ``test_``. All test functions should
be properly type-annotated and successfully check via ``mypy``.

The ``dds`` test suite has access to a set of test fixtures that can be used
throughout tests to perform complex setup and teardown for complete test-by-test
isolation.

Here is a simple test that simple executes ``dds`` with ``--help``::

def test_get_help(dds: DDSWrapper) -> None:
dds.run(['--help'])

In this test function, :func:`the dds object is a test fixture
<dds_ci.testing.fixtures.dds>` that wraps the ``dds`` executable under test.


Testing Error Handling
**********************

It is important that ``dds`` handle errors correctly, of course, including user
error. It is not simply enough to check that a certain operation fails: We must
be sure that it fails *correctly*. To check that the correct code path is
executed, ``dds`` can write a file containing a simple constant string
designating the error handling path that was taken. The file will be written to
the path indicated by the ``DDS_WRITE_ERROR_MARKER`` environment variable.

For examples of these error strings, search for usage of ``write_error_marker``
in the ``dds`` source code. These should only execute within error-handling
contexts, should appear near the log messages that issue diagnostics, and should
be specific to the error at hand.

To write a test that checks for a given error-handling path, use the
:func:`~dds_ci.testing.error.expect_error_marker` context manager function::

def test_sdist_invalid_project(tmp_project: Project) -> None:
# Trying to create a package archive from a project without a
# package.json5 is invalid. Check that it creates the correct
# error-message string
with error.expect_error_marker('no-package-json5'):
tmp_project.pkg_create()


+ 1
- 1
docs/err/git-url-ref-mutual-req.rst Datei anzeigen

@@ -12,4 +12,4 @@ as the ``ref`` requires support from the remote Git server, and it is often
unavailable in most setups). Using a Git tag is strongly recommended.

.. seealso::
Refer to the documentation on :doc:`/guide/catalog`.
Refer to the documentation on :doc:`/guide/remote-pkgs`.

+ 0
- 9
docs/err/invalid-catalog-json.rst Datei anzeigen

@@ -1,9 +0,0 @@
Error: Invalid catalog JSON
###########################

This error occurs when the JSON data given to import into the package catalog
is in some way invalid. Refer to the catalog documentation for a description of
the proper JSON format.

.. seealso::
:ref:`catalog.adding`

+ 0
- 10
docs/err/invalid-repo-transform.rst Datei anzeigen

@@ -1,10 +0,0 @@
Error: A repository filesystem transformation is invalid
########################################################

In ``dds``, a catalog entry can have a list of attached "transforms" that will
be applies to the root directory of the package before ``dds`` tries to build
and use it.

.. seealso::
For information on the shape and purpose of transforms, refer to
:ref:`catalog.fs-transform` on the :doc:`/guide/catalog` page.

+ 0
- 2
docs/err/no-catalog-remote-info.rst Datei anzeigen

@@ -6,5 +6,3 @@ requires some information regarding how to actually *acquire* that package
when it is requested.

If such information is not provided, ``dds`` will issue an error.

.. seealso:: :ref:`catalog.adding`.

+ 0
- 7
docs/err/no-such-catalog-package.rst Datei anzeigen

@@ -8,10 +8,3 @@ in the catalog.
It is possible that the intended package *does exist* but that the spelling of
the package name or version number is incorrect. Firstly, check your spelling
and that the version number you have requested is correct.

In another case, it is possible that the package *exists somewhere*, but has
not been loaded into the local catalog. As of this writing, ``dds`` does not
automatically maintain the catalog against a central package repository, so
package entries must be loaded and imported manually. If you believe this to be
the case, refer to the section on the :doc:`/guide/catalog`, especially the
section :ref:`catalog.adding`.

+ 3
- 3
docs/err/sdist-exists.rst Datei anzeigen

@@ -19,7 +19,7 @@ write a source distribution to the named path, it would be required to delete
whatever exists there before creating the source distribution.

.. warning::
When using ``dds sdist create`` with the ``--out <path>`` parameter, the
When using ``dds pkg create`` with the ``--out <path>`` parameter, the
``<path>`` given **is not the directory in which to place the source
distribution, but the filepath to the source distribution itself**!

@@ -27,7 +27,7 @@ whatever exists there before creating the source distribution.
distribution in that directory, **the following command is incorrect**::

# Do not do this:
dds sdist create --out foo/
dds pkg create --out foo/

If you pass ``--replace`` to the above command, ``dds`` will **destroy the
existing directory** and replace it with the source distribution!
@@ -35,4 +35,4 @@ whatever exists there before creating the source distribution.
You **must** provide the full path to the source distribution::

# Do this:
dds sdist create --out foo/my-project.dsd
dds pkg create --out foo/my-project.tar.gz

+ 187
- 0
docs/guide/build-deps.rst Datei anzeigen

@@ -0,0 +1,187 @@
Building and Using ``dds`` in Another Build System
##################################################

One of ``dds``'s primary goals is to inter-operate with other build systems
cleanly. One of ``dds``'s primary outputs is *libman* package indices. These
package indices can be imported into other build systems that support the
`libman`_ format. (At the time of writing there is a CMake module which can do
the import, but other build systems are planned.)

.. _libman: https://api.csswg.org/bikeshed/?force=1&url=https://raw.githubusercontent.com/vector-of-bool/libman/develop/data/spec.bs

.. _PMM: https://github.com/vector-of-bool/PMM

.. _CMakeCM: https://github.com/vector-of-bool/CMakeCM

.. _lm-cmake: https://raw.githubusercontent.com/vector-of-bool/libman/develop/cmake/libman.cmake


.. _build-deps.gen-libman:

Generating a libman Index
*************************

Importing libman packages into a build system requires that we have a libman
index generated on the filesystem. **This index is not generated globally**: It
is generated on a per-build basis as part of the build setup. The index will
describe in build-system-agnostic terms how to include a set of packages and
libraries as part of a build.

``dds`` has first-class support for generating this index. The ``build-deps``
subcommand of ``dds`` will download and build a set of dependencies, and places
an ``INDEX.lmi`` file that can be used to import the built results.


Declaring Dependencies
======================

``dds build-deps`` accepts a list of dependency statements as command line
arguments, but it may be useful to specify those requirements in a file.

``dds build-deps`` accepts a JSON5 file describing the dependencies of a
project as well. This file is similar to a very stripped-down version of a
``dds`` :ref:`package manifest <pkgs.pkgs>`, and only includes the ``depends``
key. (The presence of any other key is an error.)

Here is a simple dependencies file that declares a single requirement:

.. code-block:: js
:caption: ``dependencies.json5``

{
depends: [
'neo-sqlite3^0.2.0',
]
}


Building Dependencies and the Index
===================================

We can invoke ``dds build-deps`` and give it the path to this file:

.. code-block:: bash

$ dds build-deps --deps-file dependencies.json5

When finished, ``dds`` will write the build results into a subdirectory called
``_deps`` and generate a file named ``INDEX.lmi``. This file is ready to be
imported into any build system that can understand libman files.

.. note::
The output directory and index filepath can be controlled with the
``--out`` and ``--lmi-path`` flags, respectively.


Importing an Index: CMake
*************************

.. highlight:: cmake

.. note::

This section discusses how to import ``INDEX.lmi`` into CMake, but ``dds``
also has built-in support for generating a CMake targets file. See
:doc:`/howto/cmake` and :doc:`cmake` for even simpler integration steps.

Supposed that we've generated a libman index and set of packages, and we want to
import them into CMake. CMake doesn't know how to do this natively, but there
exists a single-file module for CMake that allows CMake to import libraries from
libman indices without any additional work.

The module is not shipped with CMake, but is available online as a single
stand-alone file. The `libman.cmake <lm-cmake_>`_ file can be downloaded and
added to a project directly, or it can be obtained automatically through a
CMake tool like `PMM`_ (recommended).


Getting ``libman.cmake`` via PMM
================================

Refer to the ``README.md`` file in `the PMM repo <PMM_>`_ for information on how
to get PMM into your CMake project. In short, download and place the
``pmm.cmake`` file in your repository, and ``include()`` the file near the top
of your ``CMakeLists.txt``::

include(pmm.cmake)

Once it has been included, you can call the ``pmm()`` function. To obtain
*libman*, we need to start by enabling `CMakeCM`_::

pmm(CMakeCM ROLLING)

.. warning::
It is not recommended to use the ``ROLLING`` mode, but it is the easiest to
use when getting started. For reproducible and reliable builds, you should
pin your CMakeCM version using the ``FROM <url>`` argument.

Enabling CMakeCM will make available all of the CMake modules available in `the
CMakeCM repository <CMakeCM_>`_, which includes `libman.cmake <lm-cmake_>`_.

After the call to ``pmm()``, simply ``include()`` the ``libman`` module::

include(libman)

That's it! The only function from the module that we will care about for now
is the ``import_packages()`` function.


Importing Our Dependencies' Packages
====================================

To import a package from a libman tree, we need only know the *name* of the
package we wish to import. In our example case above, we depend on
``neo-sqlite3``, so we simply call the libman-CMake function
``import_packages()`` with that package name::

import_packages("neo-sqlite3")

You'll note that we don't request any particular version of the package: All
versioning resolution is handled by ``dds``. You'll also note that we don't
need to specify our transitive dependencies: This is handled by the libman
index that was generated by ``dds``: It will automatically ``import_packages()``
any of the transitive dependencies required.

More than one package name can be provided to a single call to
``import_packages()``, and ``import_packages()`` may be called multiple times
within a CMake project.


Using Our Dependencies' Libraries
=================================

Like with ``dds``, CMake wants us to explicitly declare how our build targets
*use* other libraries. When we import a package from a libman index, the
import will generate CMake ``IMPORTED`` targets that can be linked against.

In ``dds`` and in libman, a library is identified by a combination of
*namespace* and *name*, joined together with a slash ``/`` character. This
*qualified name* of a library is decided by the original package author, and
should be documented. In the case of ``neo-sqlite3``, the only library is
``neo/sqlite3``.

When the libman CMake module imports a library, it creates a qualified name
using a double-colon "``::``" instead of a slash. As such, our ``neo/sqlite3``
is imported in CMake as ``neo::sqlite3``. We can link against it as we would
with any other target::

add_executable(my-application app.cpp)
target_link_libraries(my-application PRIVATE neo::sqlite3)

Altogether, here is the final CMake file:

.. code-block::
:caption: ``CMakeLists.txt``
:linenos:

cmake_minimum_required(VERSION 3.15)
project(MyApplication VERSION 1.0.0)

include(pmm.cmake)
pmm(CMakeCM ROLLING)

include(libman)
import_packages("neo-sqlite3")

add_executable(my-application app.cpp)
target_link_libraries(my-application PRIVATE neo::sqlite3)

+ 0
- 286
docs/guide/catalog.rst Datei anzeigen

@@ -1,286 +0,0 @@
The Package Catalog
###################

``dds`` stores a catalog of available packages, along with their dependency
statements and information about how a source distribution thereof may be
maintained.


Viewing Catalog Contents
************************

The default catalog database is stored in a user-local location, and the
package IDs available can be listed with ``dds catalog list``. This will only
list the IDs of the packages, but none of the additional metadata about them.


.. _catalog.adding:

Adding Packages to the Catalog
******************************

The ``dds catalog import`` supports a ``--json`` flag that specifies a JSON5
file from which catalog entries will be generated.

.. note::
The ``--json`` flag can be passed more than once to import multiple JSON
files at once.

The JSON file has the following structure:

.. code-block:: javascript

{
// Import version spec.
version: 1,
// Packages section
packages: {
// Subkeys are package names
"acme-gadgets": {
// Keys within the package names are the versions that are
// available for each package.
"0.4.2": {
// `depends` is an array of dependency statements for this
// particular version of the package. (Optional)
depends: [
"acme-widgets^1.4.1"
],
// `description` is an attribute to give a string to describe
// the package. (Optional)
description: "A collection of useful gadgets.",
// Specify the Git remote information
git: {
// `url` and `ref` are required.
url: "http://example.com/git/repo/acme-gadgets.git",
ref: "v0.4.2-stable",
// The `auto-lib` is optional, to specify an automatic
// library name/namespace pair to generate for the
// root library
"auto-lib": "Acme/Gadgets",
// List of filesystem transformations to apply to the repository
// (optional)
transform: [
// ... (see below) ...
]
}
}
}
}
}


.. _catalog.fs-transform:

Filesystem Transformations
**************************

.. note::
Filesystem transformations is a transitional feature that is likely to be
removed in a future release, and replaced with a more robust system when
``dds`` has a better way to download packages. Its aim is to allow ``dds``
projects to use existing libraries that might not meet the layout
requirements that ``dds`` imposes, but can otherwise be consumed by ``dds``
with a few tweaks.

A catalog entry can have a set of filesystem transformations attached to its
remote information (e.g. the ``git`` property). When ``dds`` is obtaining a
copy of the code for the package, it will apply the associated transformations
to the filesystem based in the directory of the downloaded/cloned directory. In
this way, ``dds`` can effectively "patch" the filesystem structure of a project
arbitrarily. This allows many software projects to be imported into ``dds``
without needing to patch/fork the original project to support the required
filesystem structure.

.. important::
While ``dds`` allows you to patch directories downloaded via the catalog, a
native ``dds`` project must still follow the layout rules.

The intention of filesystem transformations is to act as a "bridge" that will allow ``dds`` projects to more easily utilize existing libraries.


Available Transformations
=========================

At time of writing, there are five transformations available to catalog entries:

``copy`` and ``move``
Copies or moves a set of files/directories from one location to another. Allows the following options:

- ``from`` - The path from which to copy/move. **Required**
- ``to`` - The destination path for the copy/move. **Required**
- ``include`` - A list of globbing expressions for files to copy/move. If
omitted, then all files will be included.
- ``exclude`` - A list of globbing expressions of files to exclude from the
copy/move. If omitted, then no files will be excluded. **If both** ``include`` and ``exclude`` are provided, ``include`` will be checked *before* ``exclude``.
- ``strip-components`` - A positive integer (or zero, the default). When the
``from`` path identifies a directory, its contents will be copied/moved
into the destination and maintain their relative path from the source path as their relative path within the destination. If ``strip-components`` is set to an integer ``N``, then the first ``N`` path components of that relative path will be removed when copying/moving the files in a directory. If a file's relative path has less than ``N`` components, then that file will be excluded from the ``copy/move`` operation.

``remove``
Delete files and directories from the package source. Has the following options:

- ``path`` - The path of the file/directory to remove. **Required**
- ``only-matching`` - A list of globbing expressions for files to remove. If omitted and the path is a directory, then the entire directory will be deleted. If at least one pattern is provided, then directories will be left intact and only non-directory files will be removed. If ``path`` names a non-directory file, then this option has no effect.

``write``
Write the contents of a string to a file in the package source. Has the following options:

- ``path`` - The path of the file to write. **Required**
- ``content`` - A string that will be written to the file. **Required**

If the file exists and is not a directory, the file will be replaced. If the
path names an existing directory, an error will be generated.

``edit``
Modifies the contents of the files in the package.

- ``path`` - Path to the file to edit. **Required**
- ``edits`` - An array of edit objects, applied in order, with the following
keys:

- ``kind`` - One of ``insert`` or ``delete`` to insert/delete lines,
respectively. **Required**
- ``line`` - The line at which to perform the insert/delete. The first line
of the file is line one, *not* line zero. **Required**
- ``content`` - For ``insert``, the string content to insert into the file.
A newline will be appended after the content has been inserted.

Transformations are added as a JSON array to the JSON object that specifies
the remote information for the package. Each element of the array is an
object, with one or more of the keys listed above. If an object features more
than one of the above keys, they are applied in the same order as they have
been listed.


Example: Crypto++
=================

The following catalog entry will build and import `Crypto++`_ for use by a
``dds`` project. This uses the unmodified Crypto++ repository, which ``dds``
doesn't know how to build immediately. With some simple moving of files, we
end up with something ``dds`` can build directly:

.. code-block:: javascript

"cryptopp": {
"8.2.0": {
"git": {
"url": "https://github.com/weidai11/cryptopp.git",
"ref": "CRYPTOPP_8_2_0",
"auto-lib": "cryptopp/cryptopp",
"transform": [
{
// Crypto++ has no source directories at all, and everything lives
// at the top level. No good for dds.
//
// Clients are expected to #include files with a `cryptopp/` prefix,
// so we need to move the files around so that they match the
// expected layout:
"move": {
// Move from the root of the repo:
"from": ".",
// Move files *into* `src/cryptopp`
"to": "src/cryptopp",
// Only move the C++ sources and headers:
"include": [
"*.c",
"*.cpp",
"*.h"
]
}
}
]
}
}
}


Example: libsodium
==================

For example, this catalog entry will build and import `libsodium`_ for use in
a ``dds`` project. This uses the upstream libsodium repository, which does not
meet the layout requirements needed by ``dds``. With a few simple
transformations, we can allow ``dds`` to build and consume libsodium
successfully:

.. code-block:: javascript

"libsodium": {
"1.0.18": {
"git": {
"url": "https://github.com/jedisct1/libsodium.git",
"ref": "1.0.18",
"auto-lib": "sodium/sodium",
/// Make libsodium look as dds expects of a project.
"transform": [
// libsodium has a `src` directory, but it does not look how dds
// expects it to. The public `#include` root of libsodium lives in
// a nested subdirectory of `src/`
{
"move": {
// Move the public header root out from that nested subdirectory
"from": "src/libsodium/include",
// Put it at `include/` in the top-level
"to": "include/"
}
},
// libsodium has some files whose contents are generated by a
// configure script. For demonstration purposes, we don't need most
// of them, and we can just swipe an existing pre-configured file
// that is already in the source repository and put it into the
// public header root.
{
"copy": {
// Generated version header committed to the repository:
"from": "builds/msvc/version.h",
// Put it where the configure script would put it:
"to": "include/sodium/version.h"
}
},
// The subdirectory `src/libsodium/` is no good. It now acts as an
// unnecessary layer of indirection. We want `src/` to be the root.
// We can just "lift" the subdirectory:
{
// Up we go:
"move": {
"from": "src/libsodium",
"to": "src/"
},
// Delete the now-unused subdirectory:
"remove": {
"path": "src/libsodium"
}
},
// Lastly, libsodium's source files expect to resolve their header
// paths differently than they expect of their clients (Bad!!!).
// Fortunately, we can do a hack to allow the files in `src/` to
// resolve its headers. The source files use #include as if the
// header root was `include/sodium/`, rather than `include/`.
// To work around this, generate a copy of each header file in the
// source root, but remove the leading path element.
// Because we have a separate `include/` and `src/` directory, dds
// will only expose the `include/` directory to clients, and the
// header copies in `src/` are not externally visible.
//
// For example, the `include/sodium/version.h` file is visible to
// clients as `sodium/version.h`, but libsodium itself tries to
// include it as `version.h` within its source files. When we copy
// from `include/`, we grab the relative path to `sodium/version.h`,
// strip the leading components to get `version.h`, and then join that
// path with the `to` path to generate the full destination at
// `src/version.h`
{
"copy": {
"from": "include/",
"to": "src/",
"strip-components": 1
}
}
]
}
}
}

.. _libsodium: https://doc.libsodium.org/
.. _Crypto++: https://cryptopp.com/

+ 45
- 203
docs/guide/cmake.rst Datei anzeigen

@@ -1,240 +1,82 @@
.. highlight:: cmake

Using ``dds`` Packages in a CMake Project
#########################################
Using ``dds`` in a CMake Project
################################

One of ``dds``'s primary goals is to inter-operate with other build systems
cleanly. One of ``dds``'s primary outputs is *libman* package indices. These
package indices can be imported into other build systems that support the
*libman* format.
cleanly. Because of CMakes ubiquity, ``dds`` includes built-in support for
emitting files that can be imported into CMake.

.. note::
``dds`` doesn't (yet) have a ready-made central repository of packages that
can be downloaded. You'll need to populate the local package catalog
appropriately. The default catalog file contains a limited set of useful
packages, but you may wish to add more for yourself.
.. seealso::

.. seealso:: Refer to :doc:`catalog` for information about remote packages.
Before reading this page, be sure to read the :ref:`build-deps.gen-libman`
section of the :doc:`build-deps` page, which will discuss how to use the
``dds build-deps`` subcommand.

.. _PMM: https://github.com/vector-of-bool/PMM

.. _CMakeCM: https://github.com/vector-of-bool/CMakeCM

.. _lm-cmake: https://raw.githubusercontent.com/vector-of-bool/libman/develop/cmake/libman.cmake


Generating a libman Index
*************************

Importing libman packages into a build system requires that we have a libman
index generated on the filesystem. **This index is not generated globally**: It
is generated on a per-build basis as part of the build setup. The index will
describe in build-system-agnostic terms how to include a set of packages and
libraries as part of a build.

``dds`` has first-class support for generating this index. The ``build-deps``
subcommand of ``dds`` will download and build a set of dependencies, and places
an ``INDEX.lmi`` file that can be used to import the built results.
.. seealso::

This page presents an involved and detailed process for importing
dependencies, but there's also an *easy mode* for a one-line solution. See:
:doc:`/howto/cmake`.

Declaring Dependencies
======================

``dds build-deps`` accepts a list of dependency statements as commnad line
arguments, but it may be useful to specify those requirements in a file.

``dds build-deps`` accepts a JSON5 file describing the dependencies of a
project as well. This file is similar to a very stripped-down version of a
``dds`` :ref:`package manifest <pkgs.pkgs>`, and only includes the ``depends``
key. (The presence of any other key is an error.)

Here is a simple dependencies file that declares a single requirement:

.. code-block:: js
:caption: ``dependencies.json5``

{
depends: [
'neo-sqlite3^0.2.0',
]
}
.. _PMM: https://github.com/vector-of-bool/PMM


Building Dependencies and the Index
===================================
Generating a CMake Import File
******************************

We can invoke ``dds build-deps`` and give it the path to this file:
``build-deps`` accepts an ``--lmi-path`` argument, but also accepts a
``--cmake=<path>`` argument that serves a similar purpose: It will write a CMake
file to ``<path>`` that can be ``include()``'d into a CMake project:

.. code-block:: bash

$ dds build-deps --deps dependencies.json5

When finished, ``dds`` will write the build results into a subdirectory called
``_deps`` and generate a file named ``INDEX.lmi``. This file is ready to be
imported into any build system that can understand libman files (in our case,
CMake).

.. note::
The output directory and index filepath can be controlled with the
``--out`` and ``--lmi-path`` flags, respectively.


Importing into CMake
********************

We've generated a libman index and set of packages, and we want to import
them into CMake. CMake doesn't know how to do this natively, but there exists a
single-file module for CMake that allows CMake to import libraries from libman
indices without any additional work.

The module is not shipped with CMake, but is available online as a single
stand-alone file. The `libman.cmake <lm-cmake_>`_ file can be downloaded and
added to a project directly, or it can be obtained automatically through a
CMake tool like `PMM`_ (recommended).


Enabling *libman* Support in CMake via PMM
==========================================

Refer to the ``README.md`` file in `the PMM repo <PMM_>`_ for information on how
to get PMM into your CMake project. In short, download and place the
``pmm.cmake`` file in your repository, and ``include()`` the file near the top
of your ``CMakeLists.txt``::

include(pmm.cmake)

Once it has been included, you can call the ``pmm()`` function. To obtain
*libman*, we need to start by enabling `CMakeCM`_::

pmm(CMakeCM ROLLING)
$ dds build-deps "neo-sqlite3^0.2.0" --cmake=deps.cmake

.. warning::
It is not recommended to use the ``ROLLING`` mode, but it is the easiest to
use when getting started. For reproducible and reliable builds, you should
pin your CMakeCM version using the ``FROM <url>`` argument.
This will write a file ``./deps.cmake`` that we can ``include()`` from a CMake
project, which will then expose the ``neo-sqlite3`` package as a set of imported
targets.

Enabling CMakeCM will make available all of the CMake modules available in `the
CMakeCM repository <CMakeCM_>`_, which includes `libman.cmake <lm-cmake_>`_.

After the call to ``pmm()``, simply ``include()`` the ``libman`` module::
Using the CMake Import File
===========================

include(libman)
Once we have generated the CMake import file using ``dds build-deps``, we can
simply import it in our ``CMakeLists.txt``::

That's it! The only function from the module that we will care about for now
is the ``import_packages()`` function.


Importing Our Dependencies' Packages
====================================

To import a package from a libman tree, we need only know the *name* of the
package we wish to import. In our example case above, we depend on
``neo-sqlite3``, so we simply call the libman-CMake function
``import_packages()`` with that package name::

import_packages("neo-sqlite3")

You'll note that we don't request any particular version of the package: All
versioning resolution is handled by ``dds``. You'll also note that we don't
need to specify our transitive dependencies: This is handled by the libman
index that was generated by ``dds``: It will automatically ``import_packages()``
any of the transitive dependencies required.


Using Our Dependencies' Libraries
=================================
include(deps.cmake)

Like with ``dds``, CMake wants us to explicitly declare how our build targets
*use* other libraries. When we import a package from a libman index, the
import will generate CMake ``IMPORTED`` targets that can be linked against.
*use* other libraries. When we ``include()`` the generated CMake file, it will
generate ``IMPORTED`` targets that can be linked against.

In ``dds`` and in libman, a library is identified by a combination of
In ``dds`` (and in libman), a library is identified by a combination of
*namespace* and *name*, joined together with a slash ``/`` character. This
*qualified name* of a library is decided by the original package author, and
should be documented. In the case of ``neo-sqlite3``, the only target is
should be documented. In the case of ``neo-sqlite3``, the only library is
``neo/sqlite3``.

When the libman CMake module imports a library, it creates a qualified name
When the generated import file imports a library, it creates a qualified name
using a double-colon "``::``" instead of a slash. As such, our ``neo/sqlite3``
is imported in CMake as ``neo::sqlite3``. We can link against it as we would
with any other target::

add_executable(my-application app.cpp)
target_link_libraries(my-application PRIVATE neo::sqlite3)

Altogether, here is the final CMake file:

.. code-block::
:caption: ``CMakeLists.txt``
:linenos:

cmake_minimum_required(VERSION 3.15)
project(MyApplication VERSION 1.0.0)

include(pmm.cmake)
pmm(CMakeCM ROLLING)

include(libman)
import_packages("neo-sqlite3")

add_executable(my-application app.cpp)
target_link_libraries(my-application PRIVATE neo::sqlite3)


Additional PMM Support
**********************

The ``pmm()`` function also supports ``dds`` directly, similar to ``CMakeCM``
mode. This will automatically download a prebuilt ``dds`` for the host platform
and invoke ``dds build-deps`` in a single pass as part of CMake's configure
process. This is especially useful for a CI environment where you want to have
a stable ``dds`` version and always have your dependencies obtained
just-in-time.

To start, pass the ``DDS`` argument to ``pmm()`` to use it::

pmm(DDS)

.. note::
The ``_deps`` directory and ``INDEX.lmi`` file will be placed in the CMake
build directory, out of the way of the rest of the project.

.. note::
The version of ``dds`` that PMM downloads depends on the version of PMM
that is in use.

This alone won't do anything useful, because you'll need to tell it what
dependencies we want to install::

pmm(DDS DEP_FILES dependencies.json5)

You can also list your dependencies as an inline string in your CMakeLists.txt
instead of a separate file::

pmm(DDS DEPENDS neo-sqlite3^0.2.2)

Since you'll probably want to be using ``libman.cmake`` at the same time, the
calls for ``CMakeCM`` and ``DDS`` can simply be combined. This is how our new
CMake project might look:
add_executable(my-application app.cpp)
target_link_libraries(my-application PRIVATE neo::sqlite3)

.. code-block::
:caption: ``CMakeLists.txt``
:linenos:

cmake_minimum_required(VERSION 3.15)
project(MyApplication VERSION 1.0.0)
.. _cmake.pmm:

include(pmm.cmake)
pmm(CMakeCM ROLLING
DDS DEPENDS neo-sqlite3^0.2.2
)
*Easy* Mode: PMM Support
************************

include(libman)
import_packages("neo-sqlite3")
`PMM`_ is the *package package manager*, and can be used to control and access
package managers from within CMake scripts. This includes controlling ``dds``.
With PMM, we can automate all of the previous steps into a single line.

add_executable(my-application app.cpp)
target_link_libraries(my-application PRIVATE neo::sqlite3)
For a complete rundown on using PMM to get dependencies via ``dds``, refer to
the :doc:`/howto/cmake` page.

This removes the requirement that we write a separate dependencies file, and we
no longer need to invoke ``dds build-deps`` externally, as it is all handled
by ``pmm``.
Using PMM removes the requirement that we write a separate dependencies file,
and we no longer need to invoke ``dds build-deps`` externally, as it is all
handled by PMM.

+ 3
- 2
docs/guide/index.rst Datei anzeigen

@@ -9,7 +9,8 @@ User Guide
packages
toolchains
source-dists
repo
catalog
pkg-cache
remote-pkgs
interdeps
build-deps
cmake

+ 2
- 2
docs/guide/interdeps.rst Datei anzeigen

@@ -85,8 +85,8 @@ versions of the dependency are supported.
Refer to: :ref:`deps.ranges.why-lowest`.

``dds`` compatible-version ranges are similar to the shorthand range specifiers
supported by ``npm`` and ``npm``-like tools. There are five (and a half)
version range formats available, listed in order of most-to-least restrictive:
supported by ``npm`` and ``npm``-like tools. There are four version range kinds
available, listed in order of most-to-least restrictive:

Exact: ``@1.2.3``
Specifies an *exact* requirement. The dependency must match the named

+ 39
- 12
docs/guide/packages.rst Datei anzeigen

@@ -56,7 +56,7 @@ If a file's extension is not listed in the table above, ``dds`` will ignore it.
.. note::
Although headers are not compiled, this does not mean they are ignored.
``dds`` still understands and respects headers, and they are collected
together as part of *source distribution*.
together as part of a *source distribution*.


.. _pkgs.apps-tests:
@@ -65,15 +65,42 @@ Applications and Tests
**********************

``dds`` will recognize certain compilable source files as belonging to
applications and tests. If a compilable source file stem ends with ``.main`` or
``.test``, that source file is assumed to correspond to an executable to
generate. The filename stem before the ``.main`` or ``.test`` will be used as
the name of the generated executable. For example:
applications and tests, depending on the filenames "stem," which is the part of
the filename not including the outer-most file extension. If a compilable source
filename stem ends with ``.main`` or ``.test``, that source file is assumed to
correspond to an executable to generate. The filename second-inner stem before
the ``.main`` or ``.test`` will be used as the name of the generated executable.
For example:

- ``foo.main.cpp`` will generate an executable named ``foo``.
- ``bar.test.cpp`` will generate an executable named ``bar``.
- ``cat-meow.main.cpp`` will generate an executable named ``cat-meow``.
- ``cats.musical.test.cpp`` will generate an executable named ``cats.musical``.
- Given ``foo.main.cpp``

- The stem is ``foo.main``, whose extension is ``.main``, so we will generate
an application.
- The stem of ``foo.main`` is ``foo``, so the executable will be named
``foo``.

- Given ``bar.test.cpp``

- The stem is ``bar.test``, whose extension is ``.test``, so we will generate
a test.
- The stem of ``bar.test`` is ``bar``, so will generate an executable named
``bar``.

- Given ``cat-meow.main.cpp``

- The stem is ``cat-meow.main``, which has extension ``.main``, so it is an
application.
- The stem of ``cat-meow.main`` is ``cat-meow``, so will generate an
executable named ``cat-meow``.

- Given ``cats.musical.test.cpp``

- The stem is ``cats.musical.test``, which has extension ``.test``, so this is
a text executable.
- The stem of ``cats.musical.test`` is ``cats.musical``, so we will generate
an executable named ``cats.musical``.
- Note that the dot in ``cats.musical`` is not significant, as ``dds`` does
strip any further extensions.

.. note::
``dds`` will automatically append the appropriate filename extension to the
@@ -161,7 +188,7 @@ In order for any code to compile and resolve these ``#include`` directives, the
``src/`` directory must be added to their *include search path*.

Because the ``#include`` directives are based on the *portable* source root,
the exactly location of ``src/`` is not important to the content of the
the exact location of ``src/`` is not important to the content of the
consuming source code, and can thus be relocated and renamed as necessary.
Consumers only need to update the path of the *include search path* in a single
location rather than modifying their source code.
@@ -285,8 +312,8 @@ The primary distribution format of packages that is used by ``dds`` is the

Packages are identified by a name/version pair, joined together by an ``@``
symbol. The version of a package must be a semantic version string. Together,
the ``name@version`` string forms the *package ID*, and it must be unique
within a repository or package catalog.
the ``name@version`` string forms the *package ID*, and it must be unique within
a repository or local package cache.

In order for a package to be exported by ``dds`` it must have a
``package.json5`` file at its package root. Three keys are required to be

+ 90
- 0
docs/guide/pkg-cache.rst Datei anzeigen

@@ -0,0 +1,90 @@
The Local Package Cache
#######################

``dds`` maintains a local cache of packages that it has obtained at the request
of a user. The packages themselves are stored as
:doc:`source distributions <source-dists>` (``dds`` does not store the binaries
that it builds within this package cache).


Reading Repository Contents
***************************

Most times, ``dds`` will manage the cache content silently, but it may be useful
to see what ``dds`` is currently storing away.

The content of the cache can be seen with the ``pkg ls`` subcommand::

> dds pkg ls

This will print the names of packages that ``dds`` has downloaded, as well as
the versions of each.


Obtaining Packages
******************

.. seealso:: See also: :doc:`remote-pkgs`

When ``dds`` builds a package, it will also build the dependency libraries of
that package. In order for the dependency build to succeed, it must have a
local copy of the source distribution of that dependency.

When ``dds`` performs dependency resolution, it will consider both locally
cached packages, as well as packages that are available from any
:doc:`remote packages <remote-pkgs>`. If the dependency solution requires any
packages that are not in the local cache, it will use the information in its
catalog database to obtain a source distribution for each missing package. These
source distributions will automatically be added to the local cache, and later
dependency resolutions will not need to download that package again.

This all happens automatically when a project is built: There is **no**
"``dds install``" subcommand.


Manually Downloading a Dependency
=================================

It may be useful to obtain a copy of the source distribution of a package
from a remote. The ``pkg get`` command can be used to do this::

> dds pkg get <name>@<version>

This will obtain the source distribution of the package matching the given
package ID and place that distribution in current working directory, using the
package ID as the name of the source distribution directory::

$ dds pkg get spdlog@1.4.2
[ ... ]

$ ls .
.
..
spdlog@1.4.2

$ ls ./spdlog@1.4.2/
include/
src/
library.json5
package.json5


.. _repo.import-local:

Exporting a Project into the Repository
***************************************

``dds`` can only use packages that are available in the local cache. For
packages that have a listing in the catalog, this is not a problem. But if one
is developing a local package and wants to allow it to be used in another local
package, that can be done by importing that project into the package cache as a
regular package, as detailed in :ref:`sdist.import`::

> dds pkg import /path/to/project

This command will create a source distribution and place it in the local cache.
The package is now available to other projects on the local system.

.. note::
This doesn't import in "editable" mode: A snapshot of the package root
will be taken and imported to the local cache.

+ 231
- 0
docs/guide/remote-pkgs.rst Datei anzeigen

@@ -0,0 +1,231 @@
Remote Packages and Repositories
################################

.. highlight:: bash

``dds`` stores a local database of available packages, along with their
dependency statements and information about how a source distribution thereof
may be obtained.

Inside the database are *package repositories*, which are remote servers that
contain their own database of packages, and may also contain the packages
themselves. An arbitrary number of package repositories may be added to the
local database. When ``dds`` updates its package information, it will download
the package database from each registered remote and import the listings into
its own local database, making them available for download.


Viewing Available Packages
**************************

The default catalog database is stored in a user-local location, and the
available packages can be listed with ``dds pkg search``::

$ dds pkg search
Name: abseil
Versions: 2018.6.0, 2019.8.8, 2020.2.25
From: repo-1.dds.pizza

Name: asio
Versions: 1.12.0, 1.12.1, 1.12.2, 1.13.0, 1.14.0, 1.14.1, 1.16.0, 1.16.1
From: repo-1.dds.pizza

Name: boost.leaf
Versions: 0.1.0, 0.2.0, 0.2.1, 0.2.2, 0.2.3, 0.2.4, 0.2.5, 0.3.0
From: repo-1.dds.pizza

Name: boost.mp11
Versions: 1.70.0, 1.71.0, 1.72.0, 1.73.0
From: repo-1.dds.pizza

Optionally, one can search with a glob/fnmatch-style pattern::

$ dds pkg search 'neo-*'
Name: neo-buffer
Versions: 0.2.1, 0.3.0, 0.4.0, 0.4.1, 0.4.2
From: repo-1.dds.pizza

Name: neo-compress
Versions: 0.1.0, 0.1.1, 0.2.0
From: repo-1.dds.pizza

Name: neo-concepts
Versions: 0.2.2, 0.3.0, 0.3.1, 0.3.2, 0.4.0
From: repo-1.dds.pizza


Remote Repositories
*******************

A remote package repository consists of an HTTP(S) server serving the following:

1. An accessible directory containing source distributions of various packages,
and
2. An accessible database file that contains a listing of packages and some
repository metadata.

The exact details of the directory layout and database are not covered here, and
are not necessary to make use of a repository.

When ``dds`` uses a repository, it pulls down the database file and imports its
contents into its own local database, associating the imported package listings
with the remote repository which provides them. Pulling the entire database at
once allows ``dds`` to perform much faster dependency resolution and reduces
the round-trips associated with using a dynamic package repository.


Adding a Repository
===================

Adding a remote repository to the local database is a simple single command::

$ dds pkg repo add "https://repo-1.dds.pizza"
[info ] Pulling repository contents for repo-1.dds.pizza [https://repo-1.dds.pizza/]

This will tell ``dds`` to add ``https://repo-1.dds.pizza`` as a remote
repository and immediately pull its package listings for later lookup. This
initial update can be suppressed with the ``--no-update`` flag.

.. note::

After the initial ``pkg repo add``, the repository is *not* identified by its
URL, but by its *name*, which is provided by the repository itself. The name
is printed the first time it is added, and can be seen using ``pkg repo ls``.


Listing Repositories
====================

A list of package repositories can be seen with the ``pkg repo ls`` subcommand::

$ dds pkg repo ls


Removing Repositories
=====================

A repository can be removed by the ``pkg repo remove`` subcommand::

$ dds pkg repo remove <name>

Where ``<name>`` is given as the *name* (not URL!) of the repository.

**Note** that removing a repository will make all of its corresponding remote
packages unavailable, while packages that have been pulled into the local cache
will remain available even after removing a repository.


Updating Repository Data
========================

Repository data and package listings can be updated with the ``pkg repo update``
subcommand::

$ dds pkg repo update

This will pull down the databases of all registered remote repositories. If
``dds`` can detect that a repository's database is unchanged since a prior
update, that update will be skipped.


The Default Repository
**********************

When ``dds`` first initializes its local package database, it will add a single
remote repository: ``https://repo-1.dds.pizza/``, which has the name
``repo-1.dds.pizza``. At the time of writing, this is the only official ``dds``
repository, and is populated sparsely with hand-curated and prepared packages.
In the future, the catalog of packages will grow and be partially automated.

There is nothing intrinsically special about this repository other than it being
the default when ``dds`` first creates its package database. It can be removed
as any other, should one want tighter control over package availability.


Managing a Repository
*********************

A ``dds`` repository is simply a directory of static files, so any HTTP server
that can serve from a filesystem can be used as a repository. ``dds`` also
ships with a subcommand, ``repoman``, that can be used to manage a repository
directory.


Initializing a Repository
=========================

Before anything can be done, a directory should be converted to a repository by
using ``repoman init``::

$ dds repoman init ./my-repo-dir --name=my-experimental-repo

This will add the basic metadata into ``./my-repo-dir`` such that ``dds`` will
be able to pull package data from it.

The ``--name`` argument should be used to give the repository a unique name. The
name should be globally unique to avoid collisions: When ``dds`` pulls a
repository that declares a given name, it will *replace* the package listings
associated with any repository of that name. As such, generic names like
``main`` or ``packages`` shouldn't be used in production.


Listing Contents
================

The packages in a repository can be listed using ``dds repoman ls <repo-dir>``.
This will simply print each package identifier that is present in the
repository.


Importing Source Distributions
==============================

If you have a source distribution archive, it can be imported with the
appropriately named ``dds repoman import`` command::

$ dds repoman import ./my-repo some-pkg@1.2.3.tar.gz

Multiple archive paths may be provided to import them all at once.


Adding a Package by URL
=======================

A repository can also list packages that it does not host itself. Such a package
listing can be added "by URL," where the URL tells ``dds`` how to pull the
source distribution of the package. Beyond basic HTTP(S) URLs, ``dds`` can also
clone packages via ``git``::

$ dds repoman add ./my-repo git+https://github.com/vector-of-bool/neo-fun.git#0.5.2

The above URL tells ``dds`` that it can use ``git clone`` against
``https://github.com/vector-of-bool/neo-fun.git`` and ask for tag ``0.5.2`` to
get a source distribution directory that can be imported. Note the fragment on
``git`` URLs! The fragment is required to specify the branch or tag to clone.

If the package is available on GitHub, ``dds`` has a shorthand URL for that::

$ dds repoman add ./my-repo github:vector-of-bool/neo-fun/0.6.0

The ``github:`` URL scheme tells ``dds`` to clone from GitHub. A ``github:`` URL
must have exactly three path elements to determine *what* to download:
``github:{owner}/{repository}/{branch-or-tag}``.

.. note::

The ``github:`` URL lacks an *authority* element, and as such *does not* use
the double-slash.

.. note::

``repoman add`` will immediately attempt to pull a source distribution from
the given URL so that it may import the package's metadata into its listing
database. You cannot add a URL that is not already accessible.


Removing Packages
=================

A package can be removed from a repository with
``dds repoman remove <repo-dir> <pkg-id>``, where ``<pkg-id>`` is the
``<name>@<version>`` of the package to remove.

+ 0
- 92
docs/guide/repo.rst Datei anzeigen

@@ -1,92 +0,0 @@
The Local Package Repository
############################

``dds`` maintains a local repository of packages that it has obtained at the
request of a user. The packages themselves are stored as
:doc:`source distributions <source-dists>` (``dds`` does not store the binaries
that it builds within the repository).


Reading Repository Contents
***************************

Most times, ``dds`` will manage the repository content silently, but it may be
useful to see what ``dds`` is currently storing away.

The content of the repostiory can be seen with the ``repo`` subcommand::

> dds repo ls

This will print the names of packages that ``dds`` has downloaded, as well as
the versions of each.


Obtaining Packages
******************

.. seealso:: See also: :doc:`catalog`

When ``dds`` builds a package, it will also build the dependency libraries of
that package. In order for the dependency build to succeed, it must have a
local copy of the source distribution of that dependency.

When ``dds`` performs dependency resolution, it will consider both existing
packages in the local repository, as well as packages that are available from
the :doc:`package catalog <catalog>`. If the dependency solution requires any
packages that are not in the local repository, it will use the information in
the catalog to obtain a source distribution for each missing package. These
source distributions will automatically be added to the local repository, and
later dependency resolutions will not need to download that package again.


Manually Downloading a Dependency
=================================

It may be useful to obtain a copy of the source distribution of a package
contained in the catalog. The ``catalog get`` command can be used to do this::

> dds catalog get <name>@<version>

This will obtain the source distribution of the package matching the named
identifier and place that distribution in current working directory, using the
package ID as the name of the source distribution directory::

$ dds catalog get spdlog@1.4.2
[ ... ]

$ ls .
.
..
spdlog@1.4.2

$ ls ./spdlog@1.4.2/
include/
src/
library.json5
package.json5


.. _repo.export-local:

Exporting a Project into the Repository
***************************************

``dds`` can only use packages that are available in the local repository. For
packages that have a listing in the catalog, this is not a problem. But if one
is developing a local package and wants to allow it to be used in another local
package, that can be done by exporting a source distribution from the package
root::

> dds sdist export

This command will create a source distribution and place it in the local
repository. The package is now available to other projects on the local system.

.. note::
This doesn't export in "editable" mode: A snapshot of the package root
will be taken and exported to the local repository.

If one tries to export a package root into a repository that already contains
a package with a matching identifier, ``dds`` will issue an error. If the
``--replace`` flag is specified with ``sdist export``, then ``dds`` will
forcibly replace the package in the local repository with a new copy.

+ 31
- 9
docs/guide/source-dists.rst Datei anzeigen

@@ -1,9 +1,9 @@
Source Distributions
####################

A *source distribution* is ``dds``'s primary format for consuming and
distributing packages. A source distribution, in essence, is a
:ref:`package root <pkgs.pkg-root>` archive that contains only the files
A *source distribution* (often abbreviated as "sdist") is ``dds``'s primary
format for consuming and distributing packages. A source distribution, in
essence, is a :ref:`package root <pkgs.pkg-root>` that contains only the files
necessary for ``dds`` to reproduce the full build of all libraries in the
package. The source distribution retains the directory structure of every
:ref:`source root <pkgs.source-root>` of the original package, and thus retains
@@ -18,7 +18,7 @@ Generating a Source Distribution
Generating a source distribution from a project directory is done with the
``sdist`` subcommand::

> dds sdist create
> dds pkg create

The above command can be executed within a package root, and the result will be
a gzip'd tar archive that reproduces the package's filesystem structure, but
@@ -26,8 +26,8 @@ only maintaining the files that are necessary for ``dds`` to reproduce a build
of that package.

The ``--project=<dir>`` flag can be provided to override the directory that
``dds`` will use as the package root. The default is the working directory of
the project.
``dds`` will use as the package root. The default is the current working
directory.

The ``--out=<path>`` flag can be provided to override the destination path of
the archive. The path should not name an existing file or directory. By default,
@@ -37,10 +37,32 @@ then ``dds`` will overwrite the destination if it names an existing file or
directory.


Importing a Source Ditsribution
.. _sdist.import:

Importing a Source Distribution
*******************************

Given a source distribution archive, one can import the package into the local
repository with a single command::
package cache with a single command::

> dds pkg import some-package@1.2.3.tar.gz

You can also specify an HTTP or HTTPS URL to download a source distribution
archive to import without downloading it separately::

> dds pkg import https://example.org/path/to/sdist.tar.gz

Alternatively, if a directory correctly models a source distribution, then
that directory can be imported in the same manner::

> dds pkg import /path/to/some/project

Importing a package will allow projects on the system to use the imported
package as a dependency.

.. note::

> dds repo import some-package@1.2.3.tar.gz
If one tries to import a package root into the cache that already contains a
package with a matching identifier, ``dds`` will issue an error. This
behavior can be overridden by providing ``--if-exists=replace`` on the
command-line.

+ 10
- 0
docs/guide/toolchains.rst Datei anzeigen

@@ -299,6 +299,16 @@ Specify *additional* compiler options, possibly per-language.

Specify *additional* link options to use when linking executables.

.. note::

``dds`` does not invoke the linker directly, but instead invokes the
compiler with the appropriate flags to perform linking. If you need to pass
flags directly to the linker, you will need to use the compiler's options to
direct flags through to the linker. On GNU-style, this is
``-Wl,<linker-option>``. With MSVC, a separate flag ``/LINK`` must be
specified, and all following options are passed to the underlying
``link.exe``.


``optimize``
------------

+ 214
- 0
docs/howto/cmake.rst Datei anzeigen

@@ -0,0 +1,214 @@
How Do I Use ``dds`` in a CMake Project?
########################################

.. highlight:: cmake

If you have a CMake project and you wish to pull your dependencies via ``dds``,
you're in luck: Such a process is explicitly supported. Here's the recommended
approach:

#. Download `PMM`_ and place and commit `the PMM script <pmm.cmake>`_ into your
CMake project. [#f1]_
#. In your ``CMakeLists.txt``, ``include()`` ``pmm.cmake``.
#. Call ``pmm(DDS)`` and list your dependencies.

Below, we'll walk through this in more detail.

.. note::

You don't even have to have ``dds`` downloaded and present on your system to
use ``dds`` in PMM! Read on...


Using PMM
*********

`PMM`_ is the *Package Manager Manager* for CMake, and is designed to offer
greater integration between a CMake build and an external package management
tool. `PMM`_ supports Conan, vcpkg, and, of course, ``dds``.

.. seealso::

Refer to the ``README.md`` file in `the PMM repo <PMM>`_ for information on
how to use PMM.


Getting PMM
===========

To use PMM, you need to download one only file and commit it to your project:
`pmm.cmake`_, the entrypoint for PMM [#f1]_. It is not significant where the
``pmm.cmake`` script is placed, but it should be noted for inclusion.

``pmm.cmake`` should be committed to the project because it contains version
pinning settings for PMM and can be customized on a per-project basis to alter
its behavior for a particular project's needs.


Including PMM
=============

Suppose I have downloaded and committed `pmm.cmake`_ into the ``tools/``
subdirectory of my CMake project. To use it in CMake, I first need to
``include()`` the script. The simplest way is to simply ``include()`` the file

.. code-block::
:caption: CMakeLists.txt
:emphasize-lines: 4

cmake_minimum_required(VERSION 3.12)
project(MyApplication VERSION 2.1.3)

include(tools/pmm.cmake)

The ``include()`` command should specify the path to ``pmm.cmake``, including
the file extension, relative to the directory that contains the CMake script
that contains the ``include()`` command.


Running PMM
===========

Simply ``include()``-ing PMM won't do much, because we need to actually *invoke
it*.

PMM's main CMake command is ``pmm()``. It takes a variety of options and
arguments for the package managers it supports, but we'll only focus on ``dds``
for now.

The basic signature of the ``pmm(DDS)`` command looks like this::

pmm(DDS [DEP_FILES [filepaths...]]
[DEPENDS [dependencies...]]
[TOOLCHAIN file-or-id])

The most straightforward usage is to use only the ``DEPENDS`` argument. For
example, if we want to import `{fmt} <https://fmt.dev>`_::

pmm(DDS DEPENDS "fmt^7.0.3")

When CMake executes the ``pmm(DDS ...)`` line above, PMM will download the
appropriate ``dds`` executable for your platform, generate
:doc:`a dds toolchain </guide/toolchains>` based on the CMake environment, and
then invoke ``dds build-deps`` to build the dependencies that were listed in the
``pmm()`` invocation. The results from ``build-deps`` are then imported into
CMake as ``IMPORTED`` targets that can be used by the containing CMake project.

.. seealso::

For more in-depth discussion on ``dds build-deps``, refer to
:doc:`/guide/build-deps`.

.. note::
The ``_deps`` directory and generated CMake imports file will be placed in
the CMake build directory, out of the way of the rest of the project.

.. note::
The version of ``dds`` that PMM downloads depends on the version of PMM
that is in use.


Using the ``IMPORTED`` Targets
==============================

Like with ``dds``, CMake wants us to explicitly declare how our build targets
*use* other libraries. After ``pmm(DDS)`` executes, there will be ``IMPORTED``
targets that can be linked against.

In ``dds`` (and in libman), a library is identified by a combination of
*namespace* and *name*, joined together with a slash ``/`` character. This
*qualified name* of a library is decided by the original package author or
maintainer, and should be documented. In the case of ``fmt``, the only library
is ``fmt/fmt``.

When ``pmm(DDS)`` imports a library, it creates a qualified name using a
double-colon "``::``" instead of a slash. As such, our ``fmt/fmt`` is imported
in CMake as ``fmt::fmt``. We can link against it as we would with any other
target::

add_executable(my-application app.cpp)
target_link_libraries(my-application PRIVATE fmt::fmt)

This will allow us to use **{fmt}** in our CMake project as an external
dependency.

In all, this is our final ``CMakeLists.txt``:

.. code-block::
:caption: ``CMakeLists.txt``

cmake_minimum_required(VERSION 3.12)
project(MYApplication VERSION 2.1.3)

include(tools/pmm.cmake)
pmm(DDS DEPENDS fmt^7.0.3)

add_executable(my-application app.cpp)
target_link_libraries(my-application PRIVATE fmt::fmt)


Changing Compile Options
************************

``dds`` supports setting compilation options using
:doc:`toolchains </guide/toolchains>`. PMM supports specifying a toolchain using
the ``TOOLCHAIN`` argument::

pmm(DDS DEPENDS fmt^7.0.3 TOOLCHAIN my-toolchain.json5)

Of course, writing a separate toolchain file just for your dependencies can be
tedious. For this reason, PMM will write a toolchain file on-the-fly when it
executes ``dds``. The generated toolchain is created based on the current CMake
settings when ``pmm()`` was executed.

To add compile options, simply ``add_compile_options``::

add_compile_options(-fsanitize=address)
pmm(DDS ...)

The above will cause all ``dds``-built dependencies to compile with
``-fsanitize=address`` as a command-line option.

The following CMake variables and directory properties are used to generate the
``dds`` toolchain:

``COMPILE_OPTIONS``
Adds additional compiler options. Should be provided by
``add_compile_options``.

``COMPILE_DEFINITIONS``
Add preprocessor definitions. Should be provided by
``add_compile_definitions``

``CXX_STANDARD``
Control the ``cxx_version`` in the toolchian

``CMAKE_MSVC_RUNTIME_LIBRARY``
Sets the ``runtime`` option. This option has limited support for generator
expressions.

``CMAKE_C_FLAGS`` and ``CMAKE_CXX_FLAGS``, and their per-config variants
Set the basic compile flags for the respective file sypes

``CXX_COMPILE_LAUNCHER``
Allow providing a compiler launcher, e.g. ``ccache``.

.. note::

Calls to ``add_compile_options``, ``add_compile_definitions``, or other CMake
settings should appear *before* calling ``pmm(DDS)``, since the toolchain file
is generated and dependencies are built at that point.

``add_link_options`` has no effect on the ``dds`` toolchain, as ``dds`` does
not generate any runtime binaries.

.. rubric:: Footnotes

.. [#f1]
Do not use ``file(DOWNLOAD)`` to "automatically" obtain `pmm.cmake`_. The
``pmm.cmake`` script is already built to do this for the rest of PMM. The
`pmm.cmake`_ script itself is very small and is *designed* to be copy-pasted
and committed into other projects.

.. _PMM: https://github.com/vector-of-bool/pmm
.. _pmm.cmake: https://github.com/vector-of-bool/pmm/raw/master/pmm.cmake

+ 124
- 0
docs/howto/deps.rst Datei anzeigen

@@ -0,0 +1,124 @@
How Do I Use Other Libraries as Dependencies?
#############################################

Of course, fundamental to any build system is the question of consuming
dependencies. ``dds`` takes an approach that is both familiar and novel.

The *Familiar*:
Dependencies are listed in a project's package manifest file
(``package.json5``, for ``dds``).

A range of acceptable versions is provided in the package manifest, which
tells ``dds`` and your consumers what versions of a particular dependency are
allowed to be used with your package.

Transitive dependencies are resolved and pulled the same as if they were
listed in the manifest as well.

The *Novel*:
``dds`` does not have a separate "install" step. Instead, whenever a ``dds
build`` is executed, the dependencies are resolved, downloaded, extracted,
and compiled. Of course, ``dds`` caches every step of this process, so you'll
only see the download, extract, and compilation when you add a new dependency,

Additionally, changes in the toolchain will necessitate that all the
dependencies be re-compiled. Since the compilation of dependencies happens
alongside the main project, the same caching layer that provides incremental
compilation to your own project will be used to perform incremental
compilation of your dependencies as well.

.. seealso:: :doc:`/guide/interdeps`


Listing Package Dependencies
****************************

Suppose you have a project and you wish to use
`spdlog <https://github.com/gabime/spdlog>`_ for your logging. To begin, we need
to find a ``spdlog`` package. We can search via ``dds pkg search``::

$ dds pkg search spdlog
Name: spdlog
Versions: 1.4.0, 1.4.1, 1.4.2, 1.5.0, 1.6.0, 1.6.1, 1.7.0
From: repo-1.dds.pizza
No description

.. note::
If you do not see any results, you may need to add the main repository to
your package database. Refer to :doc:`/guide/remote-pkgs`.

In the output above, we can see one ``spdlog`` group with several available
versions. Let's pick the newest available, ``1.7.0``.

If you've followed at least the :doc:`Hello, World tutorial </tut/hello-world>`,
you should have at least a ``package.json5`` file present. Dependencies are
listed in the ``package.json5`` file under the ``depends`` key as an array of
dependency statement strings:

.. code-block:: js
:emphasize-lines: 5-7

{
name: 'my-application',
version: '1.2.3',
namespace: 'myself',
depends: [
"spdlog^1.7.0"
]
}

The string ``"spdlog^1.7.0"`` is a *dependency statement*, and says that we want
``spdlog``, with minimum version ``1.7.0``, but less than version ``2.0.0``.
Refer to :ref:`deps.ranges` for information on the version range syntax.

This is enough that ``dds`` knows about our dependency, but there is another
step that we need to take:


Listing Usage Requirements
**************************

The ``depends`` is a package-level dependency, but we need to tell ``dds`` that
we want to *use* a library from that package. For this, we need to provide a
``library.json5`` file alongside the ``package.json5`` file.

.. seealso::
The ``library.json5`` file is discussed in :ref:`pkgs.libs` and
:ref:`deps.lib-deps`.

We use the aptly-named ``uses`` key in ``library.json5`` to specify what
libraries we wish to use from our package dependencies. In this case, the
library from ``spdlog`` is named ``spdlog/spdlog``:

.. code-block:: js

{
name: 'my-application',
uses: [
'spdlog/spdlog'
]
}


Using Dependencies
******************

We've prepared our ``package.json5`` and our ``library.json5``, so how do we get
the dependencies and use them in our application?

Simply *use them*. There is no separate "install" step. Write your application
as normal:

.. code-block:: cpp
:caption: src/app.main.cpp

#include <spdlog/spdlog.h>

int main() {
spdlog::info("Hello, dependency!");
}

Now, when you run ``dds build``, you'll see ``dds`` automatically download
``spdlog`` *as well as* ``fmt`` (a dependency of ``spdlog``), and then build all
three components *simultaneously*. The result will be an ``app`` executable that
uses ``spdlog``.

+ 11
- 0
docs/howto/index.rst Datei anzeigen

@@ -0,0 +1,11 @@
How-To's
########

These pages will discuss some common "How-do-I...?" questions.


.. toctree::
:maxdepth: 2

deps
cmake

+ 1
- 0
docs/index.rst Datei anzeigen

@@ -15,6 +15,7 @@ the :doc:`tut/index` page.
:maxdepth: 2

tut/index
howto/index
guide/index
design
dev/index

+ 1
- 1
docs/tut/hello-test.rst Datei anzeigen

@@ -98,7 +98,7 @@ leave the condition the same, though:
Now running ``dds build`` will print more output that Catch has generated as
part of test execution, and we can see the reason for the failing test::

[16:41:45] [error] Test <root>/_build/test/hello/strings failed! Output:
[error] Test <root>/_build/test/hello/strings failed! Output:

~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
strings is a Catch v2.10.2 host application.

+ 7
- 3
docs/tut/hello-world.rst Datei anzeigen

@@ -168,9 +168,9 @@ If you run the ``dds build`` command again, you will now see an error:

.. code-block:: text

[12:55:25] [info ] [dds-hello] Link: hello-world
[12:55:25] [info ] [dds-hello] Link: hello-world - 57ms
[12:55:25] [error] Failed to link executable '<root>/_build/hello-world'.
[info ] [dds-hello] Link: hello-world
[info ] [dds-hello] Link: hello-world - 57ms
[error] Failed to link executable '<root>/_build/hello-world'.
...
<additional lines follow>

@@ -238,6 +238,10 @@ package root:

Rebuilding the project will show no difference at the moment.

.. note::
You may also use a ``.jsonc`` or ``.json`` file extension. ``dds`` will
search for all of these files, but they will all be parsed as JSON5.

.. seealso::
Creating a single application executable is fine and all, but what if we
want to create libraries? See the next page: :doc:`hello-lib`

+ 12
- 7
docs/tut/index.rst Datei anzeigen

@@ -5,15 +5,20 @@ The child pages here contain introductory material for getting started with
using ``dds``. If you don't know where to start, this will be a good
place to begin!

.. seealso::

For more focused "how-do-i" topics, refer to :doc:`/howto/index`.

.. note::
The shell samples in these pages are written with Unix-flavored commands,
but the analogous process will work just as well on Windows systems.
Translate as appropriate.
The shell samples in these pages are written with Unix-flavored commands,
but the analogous process will work just as well on Windows systems.
Translate as appropriate.


.. toctree::
:maxdepth: 2
:maxdepth: 2

hello-world
hello-lib
hello-test
install
hello-world
hello-lib
hello-test

+ 154
- 0
docs/tut/install.rst Datei anzeigen

@@ -0,0 +1,154 @@
Getting/Installing ``dds``
##########################

``dds`` ships as a single statically linked executable. It does not have any
installer or distribution package.


Downloading
***********

Downloads are available on `the main dds website <https://dds.pizza/downloads>`_
as well as
`the GitHub Releases page <https://github.com/vector-of-bool/dds/releases>`_. Select the executable appropriate for your platform.

Alternatively, the appropriate executable can be downloaded directly from the
command-line with an easy-to-remember URL. Using ``curl``:

.. code-block:: sh

# For Linux, writes a file in the working directory called "dds"
curl dds.pizza/get/linux -Lo dds

# For macOS, writes a file in the working directory called "dds"
curl dds.pizza/get/macos -Lo dds

Or using PowerShell on Windows:

.. code-block:: powershell

# Writes a file in the working directory called "dds.exe"
Invoke-WebRequest dds.pizza/get/windows -OutFile dds.exe

**On Linux, macOS, or other Unix-like system**, you will need to mark the
downloaded file as executable:

.. code-block:: sh

# Add the executable bit to the file mode for the file named "dds"
chmod +x dds


Installing
**********

Note that it is not necessary to "install" ``dds`` before it can be used.
``dds`` is a single standalone executable that can be executed in whatever
directory it is placed. If you are running a CI process and need ``dds``, it is
viable to simply download the executable and place it in your source tree and
execute it from that directory.

**However:** If you want to be able to execute ``dds`` with an unqualified
command name from any shell interpreter, you will need to place ``dds`` on a
directory on your shell's ``PATH`` environment variable.


Easy Mode: ``install-yourself``
===============================

``dds`` includes a subcommand "``install-yourself``" that will move its own
executable to a predetermined directory and ensure that it exists on your
``PATH`` environment variable. It is simple enough to run the command::

$ ./dds install-yourself

This will copy the executable ``./dds`` into a user-local directory designated
for containing user-local executable binaries. On Unix-like systems, this is
``~/.local/bin``, and on Windows this is ``%LocalAppData%/bin``. ``dds`` will
also ensure that the destination directory is available on the ``PATH``
environment variable for your user profile.

.. note::

If ``dds`` reports that is has modified your PATH, you will need to restart
your command line and any other applications that wish to see ``dds`` on your
``PATH``.


Manually: On Unix-like Systems
==============================

For an **unprivileged, user-specific installation (preferred)**, we recommend
placing ``dds`` in ``~/.local/bin`` (Where ``~`` represents the ``$HOME``
directory of the current user).

Although not officially standardized,
`the XDG Base Directory specification <https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_
recommends several related directories to live within ``~/.local`` (and ``dds``
itself follows those recommendations for the most part).
`The systemd file heirarchy <https://www.freedesktop.org/software/systemd/man/file-hierarchy.html>`_
also recommends placing user-local binaries in ``~/.local/bin``, and several
Linux distribution's shell packages add ``~/.local/bin`` to the startup
``$PATH``.

Placing a file in ``~/.local/bin`` requires no privileges beyond what the
current user can execute, and gives a good isolation to other users on the
system. Other tools (e.g. ``pip``) will also use ``~/.local/bin`` for the
installation of user-local scripts and commands.

.. note::

On some shells, ``~/.local/bin`` is not an entry on ``$PATH`` by default.
Check if your shell's default ``$PATH`` environment variable contains
``.local/bin``. If it does not, refer to your shell's documentation on how to
add this directory to the startup ``$PATH``.

For a **system-wide installation**, place the downloaded ``dds`` executable
within the ``/usr/local/bin/`` directory. This will be a directory on the
``PATH`` for any Unix-like system.

.. note::

**DO NOT** place ``dds`` in ``/usr/bin`` or ``/bin``: These are reserved for
your system's package management utilities.


Manually: On Windows
====================

Unlike Unix-like systems, Windows does not have a directory designated for
user-installed binaries that lives on the ``PATH``. If you have a directory that
you use for custom binaries, simply place ``dds.exe`` in that directory.

If you are unfamiliar with placing binaries and modifying your ``PATH``, read
on:

For an **unprivileged, user-specific installation**, ``dds`` should be placed in
a user-local directory, and that directory should be added to the user ``PATH``.

To emulate what ``dds install-yourself`` does, follow the following steps:

#. Create a directory ``%LocalAppData%\bin\`` if it does not exist.

For ``cmd.exe``

.. code-block:: batch

md %LocalAppData%\bin

Or for PowerShell:

.. code-block:: powershell

md $env:LocalAppData\bin

#. Copy ``dds.exe`` into the ``%LocalAppData%\bin`` directory.
#. Go to the Start Menu, and run "Edit environment variables for your account"
#. In the upper area, find and open the entry for the "Path" variable.
#. Add an entry in "Path" for ``%LocalAppData%\bin``.
#. Confirm your edits.
#. Restart any applications that require the modified environment, including
command-lines.

If the above steps are performed successfully, you should be able to open a new
command window and execute ``dds --help`` to get the help output.

+ 1
- 1
package.jsonc Datei anzeigen

@@ -1,7 +1,7 @@
{
"$schema": "./res/package-schema.json",
"name": "dds",
"version": "0.1.0-alpha.5",
"version": "0.1.0-alpha.6",
"namespace": "dds",
"depends": [
"spdlog@1.7.0",

+ 354
- 1
poetry.lock Datei anzeigen

@@ -1,3 +1,11 @@
[[package]]
name = "alabaster"
version = "0.7.12"
description = "A configurable sidebar-enabled Sphinx theme"
category = "dev"
optional = false
python-versions = "*"

[[package]]
name = "apipkg"
version = "1.5"
@@ -42,6 +50,33 @@ docs = ["furo", "sphinx", "zope.interface"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"]

[[package]]
name = "babel"
version = "2.9.0"
description = "Internationalization utilities"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[package.dependencies]
pytz = ">=2015.7"

[[package]]
name = "certifi"
version = "2020.12.5"
description = "Python package for providing Mozilla's CA Bundle."
category = "dev"
optional = false
python-versions = "*"

[[package]]
name = "chardet"
version = "4.0.0"
description = "Universal encoding detector for Python 2 and 3"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"

[[package]]
name = "colorama"
version = "0.4.4"
@@ -58,6 +93,14 @@ category = "main"
optional = false
python-versions = "*"

[[package]]
name = "docutils"
version = "0.16"
description = "Docutils -- Python Documentation Utilities"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"

[[package]]
name = "execnet"
version = "1.7.1"
@@ -72,6 +115,22 @@ apipkg = ">=1.4"
[package.extras]
testing = ["pre-commit"]

[[package]]
name = "idna"
version = "2.10"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[[package]]
name = "imagesize"
version = "1.2.0"
description = "Getting image size from png/jpeg/jpeg2000/gif file"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[[package]]
name = "importlib-metadata"
version = "3.1.1"
@@ -108,6 +167,20 @@ pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]

[[package]]
name = "jinja2"
version = "2.11.2"
description = "A very fast and expressive template engine."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"

[package.dependencies]
MarkupSafe = ">=0.23"

[package.extras]
i18n = ["Babel (>=0.8)"]

[[package]]
name = "json5"
version = "0.9.5"
@@ -127,6 +200,14 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[[package]]
name = "markupsafe"
version = "1.1.1"
description = "Safely add untrusted strings to HTML/XML markup."
category = "dev"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*"

[[package]]
name = "mccabe"
version = "0.6.1"
@@ -192,6 +273,14 @@ category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[[package]]
name = "pygments"
version = "2.7.3"
description = "Pygments is a syntax highlighting package written in Python."
category = "dev"
optional = false
python-versions = ">=3.5"

[[package]]
name = "pylint"
version = "2.6.0"
@@ -281,6 +370,32 @@ pytest-forked = "*"
psutil = ["psutil (>=3.0)"]
testing = ["filelock"]

[[package]]
name = "pytz"
version = "2020.5"
description = "World timezone definitions, modern and historical"
category = "dev"
optional = false
python-versions = "*"

[[package]]
name = "requests"
version = "2.25.1"
description = "Python HTTP for Humans."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"

[package.dependencies]
certifi = ">=2017.4.17"
chardet = ">=3.0.2,<5"
idna = ">=2.5,<3"
urllib3 = ">=1.21.1,<1.27"

[package.extras]
security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"]
socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"]

[[package]]
name = "rope"
version = "0.18.0"
@@ -308,6 +423,116 @@ category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"

[[package]]
name = "snowballstemmer"
version = "2.0.0"
description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms."
category = "dev"
optional = false
python-versions = "*"

[[package]]
name = "sphinx"
version = "3.4.1"
description = "Python documentation generator"
category = "dev"
optional = false
python-versions = ">=3.5"

[package.dependencies]
alabaster = ">=0.7,<0.8"
babel = ">=1.3"
colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""}
docutils = ">=0.12"
imagesize = "*"
Jinja2 = ">=2.3"
packaging = "*"
Pygments = ">=2.0"
requests = ">=2.5.0"
snowballstemmer = ">=1.1"
sphinxcontrib-applehelp = "*"
sphinxcontrib-devhelp = "*"
sphinxcontrib-htmlhelp = "*"
sphinxcontrib-jsmath = "*"
sphinxcontrib-qthelp = "*"
sphinxcontrib-serializinghtml = "*"

[package.extras]
docs = ["sphinxcontrib-websupport"]
lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.790)", "docutils-stubs"]
test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"]

[[package]]
name = "sphinxcontrib-applehelp"
version = "1.0.2"
description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books"
category = "dev"
optional = false
python-versions = ">=3.5"

[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]

[[package]]
name = "sphinxcontrib-devhelp"
version = "1.0.2"
description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document."
category = "dev"
optional = false
python-versions = ">=3.5"

[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]

[[package]]
name = "sphinxcontrib-htmlhelp"
version = "1.0.3"
description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
category = "dev"
optional = false
python-versions = ">=3.5"

[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest", "html5lib"]

[[package]]
name = "sphinxcontrib-jsmath"
version = "1.0.1"
description = "A sphinx extension which renders display math in HTML via JavaScript"
category = "dev"
optional = false
python-versions = ">=3.5"

[package.extras]
test = ["pytest", "flake8", "mypy"]

[[package]]
name = "sphinxcontrib-qthelp"
version = "1.0.3"
description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document."
category = "dev"
optional = false
python-versions = ">=3.5"

[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]

[[package]]
name = "sphinxcontrib-serializinghtml"
version = "1.1.4"
description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)."
category = "dev"
optional = false
python-versions = ">=3.5"

[package.extras]
lint = ["flake8", "mypy", "docutils-stubs"]
test = ["pytest"]

[[package]]
name = "toml"
version = "0.10.2"
@@ -332,6 +557,19 @@ category = "main"
optional = false
python-versions = "*"

[[package]]
name = "urllib3"
version = "1.26.2"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"

[package.extras]
brotli = ["brotlipy (>=0.6.0)"]
secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]

[[package]]
name = "wrapt"
version = "1.12.1"
@@ -363,9 +601,13 @@ testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake
[metadata]
lock-version = "1.1"
python-versions = "^3.6"
content-hash = "5c3cefd7d2a4b573928b14dc6291fbb7ef8a8a29306f7982ad64db4cb615e6e5"
content-hash = "d762128dfce333176ad89e2c60a91113c56efff1539f9ca1c7ab490c7ac05067"

[metadata.files]
alabaster = [
{file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"},
{file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"},
]
apipkg = [
{file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"},
{file = "apipkg-1.5.tar.gz", hash = "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6"},
@@ -382,6 +624,18 @@ attrs = [
{file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"},
{file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"},
]
babel = [
{file = "Babel-2.9.0-py2.py3-none-any.whl", hash = "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5"},
{file = "Babel-2.9.0.tar.gz", hash = "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05"},
]
certifi = [
{file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"},
{file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"},
]
chardet = [
{file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"},
{file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"},
]
colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
@@ -390,10 +644,22 @@ distro = [
{file = "distro-1.5.0-py2.py3-none-any.whl", hash = "sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799"},
{file = "distro-1.5.0.tar.gz", hash = "sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92"},
]
docutils = [
{file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"},
{file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"},
]
execnet = [
{file = "execnet-1.7.1-py2.py3-none-any.whl", hash = "sha256:d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547"},
{file = "execnet-1.7.1.tar.gz", hash = "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50"},
]
idna = [
{file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
{file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"},
]
imagesize = [
{file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"},
{file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"},
]
importlib-metadata = [
{file = "importlib_metadata-3.1.1-py3-none-any.whl", hash = "sha256:6112e21359ef8f344e7178aa5b72dc6e62b38b0d008e6d3cb212c5b84df72013"},
{file = "importlib_metadata-3.1.1.tar.gz", hash = "sha256:b0c2d3b226157ae4517d9625decf63591461c66b3a808c2666d538946519d170"},
@@ -406,6 +672,10 @@ isort = [
{file = "isort-5.6.4-py3-none-any.whl", hash = "sha256:dcab1d98b469a12a1a624ead220584391648790275560e1a43e54c5dceae65e7"},
{file = "isort-5.6.4.tar.gz", hash = "sha256:dcaeec1b5f0eca77faea2a35ab790b4f3680ff75590bfcb7145986905aab2f58"},
]
jinja2 = [
{file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"},
{file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"},
]
json5 = [
{file = "json5-0.9.5-py2.py3-none-any.whl", hash = "sha256:af1a1b9a2850c7f62c23fde18be4749b3599fd302f494eebf957e2ada6b9e42c"},
{file = "json5-0.9.5.tar.gz", hash = "sha256:703cfee540790576b56a92e1c6aaa6c4b0d98971dc358ead83812aa4d06bdb96"},
@@ -433,6 +703,41 @@ lazy-object-proxy = [
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"},
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"},
]
markupsafe = [
{file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"},
{file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"},
{file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"},
{file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"},
{file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"},
{file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"},
{file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"},
{file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"},
{file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"},
{file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"},
{file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"},
{file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"},
{file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"},
{file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"},
{file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"},
{file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"},
{file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"},
{file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"},
{file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"},
{file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"},
{file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"},
{file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"},
{file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"},
{file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"},
{file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"},
{file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"},
{file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"},
{file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"},
{file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"},
{file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"},
{file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"},
{file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"},
{file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
@@ -469,6 +774,10 @@ py = [
{file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"},
{file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"},
]
pygments = [
{file = "Pygments-2.7.3-py3-none-any.whl", hash = "sha256:f275b6c0909e5dafd2d6269a656aa90fa58ebf4a74f8fcf9053195d226b24a08"},
{file = "Pygments-2.7.3.tar.gz", hash = "sha256:ccf3acacf3782cbed4a989426012f1c535c9a90d3a7fc3f16d231b9372d2b716"},
]
pylint = [
{file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"},
{file = "pylint-2.6.0.tar.gz", hash = "sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210"},
@@ -493,6 +802,14 @@ pytest-xdist = [
{file = "pytest-xdist-2.1.0.tar.gz", hash = "sha256:82d938f1a24186520e2d9d3a64ef7d9ac7ecdf1a0659e095d18e596b8cbd0672"},
{file = "pytest_xdist-2.1.0-py3-none-any.whl", hash = "sha256:7c629016b3bb006b88ac68e2b31551e7becf173c76b977768848e2bbed594d90"},
]
pytz = [
{file = "pytz-2020.5-py2.py3-none-any.whl", hash = "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4"},
{file = "pytz-2020.5.tar.gz", hash = "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"},
]
requests = [
{file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"},
{file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"},
]
rope = [
{file = "rope-0.18.0.tar.gz", hash = "sha256:786b5c38c530d4846aa68a42604f61b4e69a493390e3ca11b88df0fbfdc3ed04"},
]
@@ -504,6 +821,38 @@ six = [
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
{file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
]
snowballstemmer = [
{file = "snowballstemmer-2.0.0-py2.py3-none-any.whl", hash = "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0"},
{file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"},
]
sphinx = [
{file = "Sphinx-3.4.1-py3-none-any.whl", hash = "sha256:aeef652b14629431c82d3fe994ce39ead65b3fe87cf41b9a3714168ff8b83376"},
{file = "Sphinx-3.4.1.tar.gz", hash = "sha256:e450cb205ff8924611085183bf1353da26802ae73d9251a8fcdf220a8f8712ef"},
]
sphinxcontrib-applehelp = [
{file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"},
{file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"},
]
sphinxcontrib-devhelp = [
{file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"},
{file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"},
]
sphinxcontrib-htmlhelp = [
{file = "sphinxcontrib-htmlhelp-1.0.3.tar.gz", hash = "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"},
{file = "sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f"},
]
sphinxcontrib-jsmath = [
{file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"},
{file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"},
]
sphinxcontrib-qthelp = [
{file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"},
{file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"},
]
sphinxcontrib-serializinghtml = [
{file = "sphinxcontrib-serializinghtml-1.1.4.tar.gz", hash = "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc"},
{file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
@@ -536,6 +885,10 @@ typing-extensions = [
{file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"},
{file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"},
]
urllib3 = [
{file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"},
{file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"},
]
wrapt = [
{file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"},
]

+ 1
- 0
pyproject.toml Datei anzeigen

@@ -25,6 +25,7 @@ pylint = "^2.6.0"
mypy = "^0.790"
rope = "^0.18.0"
yapf = "^0.30.0"
Sphinx = "^3.4.1"

[tool.poetry.scripts]
dds-ci = "dds_ci.main:start"

+ 23
- 6
src/dds.main.cpp Datei anzeigen

@@ -1,12 +1,15 @@
#include <dds/cli/dispatch_main.hpp>
#include <dds/cli/options.hpp>
#include <dds/util/env.hpp>
#include <dds/util/log.hpp>
#include <dds/util/output.hpp>
#include <dds/util/signal.hpp>

#include <debate/debate.hpp>
#include <debate/enum.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fansi/styled.hpp>
#include <fmt/ostream.h>
#include <neo/event.hpp>

@@ -15,13 +18,15 @@
#include <iostream>
#include <locale>

using namespace fansi::literals;

static void load_locale() {
auto lang = std::getenv("LANG");
auto lang = dds::getenv("LANG");
if (!lang) {
return;
}
try {
std::locale::global(std::locale(lang));
std::locale::global(std::locale(*lang));
} catch (const std::runtime_error& e) {
// No locale with the given name
return;
@@ -35,6 +40,7 @@ int main_fn(std::string_view program_name, const std::vector<std::string>& argv)
std::setlocale(LC_CTYPE, ".utf8");

dds::install_signal_handlers();
dds::enable_ansi_console();

dds::cli::options opts;
debate::argument_parser parser;
@@ -51,12 +57,22 @@ int main_fn(std::string_view program_name, const std::vector<std::string>& argv)
},
[&](debate::unrecognized_argument,
debate::e_argument_parser p,
debate::e_arg_spelling arg) {
debate::e_arg_spelling arg,
debate::e_did_you_mean* dym) {
std::cerr << p.parser.usage_string(program_name) << '\n';
if (p.parser.subparsers()) {
fmt::print(std::cerr, "Unrecognized argument/subcommand: \"{}\"\n", arg.spelling);
fmt::print(std::cerr,
"Unrecognized argument/subcommand: \".bold.red[{}]\"\n"_styled,
arg.spelling);
} else {
fmt::print(std::cerr, "Unrecognized argument: \"{}\"\n", arg.spelling);
fmt::print(std::cerr,
"Unrecognized argument: \".bold.red[{}]\"\n"_styled,
arg.spelling);
}
if (dym) {
fmt::print(std::cerr,
" (Did you mean '.br.yellow[{}]'?)\n"_styled,
dym->candidate);
}
return 2;
},
@@ -105,7 +121,8 @@ int main_fn(std::string_view program_name, const std::vector<std::string>& argv)
return 2;
},
[&](debate::invalid_repitition, debate::e_argument_parser p, debate::e_arg_spelling sp) {
fmt::print(std::cerr << "{}\nArgument '{}' cannot be provided more than once\n",
fmt::print(std::cerr,
"{}\nArgument '{}' cannot be provided more than once\n",
p.parser.usage_string(program_name),
sp.spelling);
return 2;

+ 104
- 1
src/dds/build/builder.cpp Datei anzeigen

@@ -11,6 +11,7 @@
#include <dds/util/time.hpp>

#include <fansi/styled.hpp>
#include <fmt/ostream.h>

#include <array>
#include <set>
@@ -212,6 +213,95 @@ void write_lmi(build_env_ref env, const build_plan& plan, path_ref base_dir, pat
}
}

void write_lib_cmake(build_env_ref env,
std::ostream& out,
const package_plan& pkg,
const library_plan& lib) {
fmt::print(out, "# Library {}/{}\n", pkg.namespace_(), lib.name());
auto cmake_name = fmt::format("{}::{}", pkg.namespace_(), lib.name());
auto cm_kind = lib.archive_plan().has_value() ? "STATIC" : "INTERFACE";
fmt::print(
out,
"if(TARGET {0})\n"
" get_target_property(dds_imported {0} dds_IMPORTED)\n"
" if(NOT dds_imported)\n"
" message(WARNING [[A target \"{0}\" is already defined, and not by a dds import]])\n"
" endif()\n"
"else()\n",
cmake_name);
fmt::print(out,
" add_library({0} {1} IMPORTED GLOBAL)\n"
" set_property(TARGET {0} PROPERTY dds_IMPORTED TRUE)\n"
" set_property(TARGET {0} PROPERTY INTERFACE_INCLUDE_DIRECTORIES [[{2}]])\n",
cmake_name,
cm_kind,
lib.library_().public_include_dir().generic_string());
for (auto&& use : lib.uses()) {
fmt::print(out,
" set_property(TARGET {} APPEND PROPERTY INTERFACE_LINK_LIBRARIES {}::{})\n",
cmake_name,
use.namespace_,
use.name);
}
for (auto&& link : lib.links()) {
fmt::print(out,
" set_property(TARGET {} APPEND PROPERTY\n"
" INTERFACE_LINK_LIBRARIES $<LINK_ONLY:{}::{}>)\n",
cmake_name,
link.namespace_,
link.name);
}
if (auto& arc = lib.archive_plan()) {
fmt::print(out,
" set_property(TARGET {} PROPERTY IMPORTED_LOCATION [[{}]])\n",
cmake_name,
(env.output_root / arc->calc_archive_file_path(env.toolchain)).generic_string());
}
fmt::print(out, "endif()\n");
}

void write_cmake_pkg(build_env_ref env, std::ostream& out, const package_plan& pkg) {
fmt::print(out, "## Imports for {}\n", pkg.name());
for (auto& lib : pkg.libraries()) {
write_lib_cmake(env, out, pkg, lib);
}
fmt::print(out, "\n");
}

void write_cmake(build_env_ref env, const build_plan& plan, path_ref cmake_out) {
fs::create_directories(fs::absolute(cmake_out).parent_path());
auto out = open(cmake_out, std::ios::binary | std::ios::out);
out << "## This CMake file was generated by `dds build-deps`. DO NOT EDIT!\n\n";
for (const auto& pkg : plan.packages()) {
write_cmake_pkg(env, out, pkg);
}
}

/**
* @brief Calculate a hash of the directory layout of the given directory.
*
* Because a tweaks-dir is specifically designed to have files added/removed within it, and
* its contents are inspected by `__has_include`, we need to have a way to invalidate any caches
* when the content of that directory changes. We don't care to hash the contents of the files,
* since those will already break any caches.
*/
std::string hash_tweaks_dir(const fs::path& tweaks_dir) {
if (!fs::is_directory(tweaks_dir)) {
return "0"; // No tweaks directory, no cache to bust
}
std::vector<fs::path> children{fs::recursive_directory_iterator{tweaks_dir},
fs::recursive_directory_iterator{}};
std::sort(children.begin(), children.end());
// A really simple inline djb2 hash
std::uint32_t hash = 5381;
for (auto& p : children) {
for (std::uint32_t c : fs::weakly_canonical(p).string()) {
hash = ((hash << 5) + hash) + c;
}
}
return std::to_string(hash);
}

template <typename Func>
void with_build_plan(const build_params& params,
const std::vector<sdist_target>& sdists,
@@ -227,11 +317,20 @@ void with_build_plan(const build_params& params,
params.out_root,
db,
toolchain_knobs{
.is_tty = stdout_is_a_tty(),
.is_tty = stdout_is_a_tty(),
.tweaks_dir = params.tweaks_dir,
},
ureqs,
};

if (env.knobs.tweaks_dir) {
env.knobs.cache_buster = hash_tweaks_dir(*env.knobs.tweaks_dir);
dds_log(trace,
"Build cache-buster value for tweaks-dir [{}] content is '{}'",
*env.knobs.tweaks_dir,
*env.knobs.cache_buster);
}

if (st.generate_catch2_main) {
auto catch_lib = prepare_test_driver(params, test_lib::catch_main, env);
ureqs.add(".dds", "Catch-Main") = catch_lib;
@@ -286,5 +385,9 @@ void builder::build(const build_params& params) const {
if (params.emit_lmi) {
write_lmi(env, plan, params.out_root, *params.emit_lmi);
}

if (params.emit_cmake) {
write_cmake(env, plan, *params.emit_cmake);
}
});
}

+ 3
- 1
src/dds/build/params.hpp Datei anzeigen

@@ -12,9 +12,11 @@ struct build_params {
fs::path out_root;
std::optional<fs::path> existing_lm_index;
std::optional<fs::path> emit_lmi;
std::optional<fs::path> emit_cmake{};
std::optional<fs::path> tweaks_dir{};
dds::toolchain toolchain;
bool generate_compdb = true;
int parallel_jobs = 0;
};

} // namespace dds
} // namespace dds

+ 1
- 0
src/dds/cli/cmd/build.cpp Datei anzeigen

@@ -30,6 +30,7 @@ int build(const options& opts) {
.out_root = opts.out_path.value_or(fs::current_path() / "_build"),
.existing_lm_index = opts.build.lm_index,
.emit_lmi = {},
.tweaks_dir = opts.build.tweaks_dir,
.toolchain = opts.load_toolchain(),
.parallel_jobs = opts.jobs,
});

+ 2
- 0
src/dds/cli/cmd/build_deps.cpp Datei anzeigen

@@ -17,6 +17,8 @@ int build_deps(const options& opts) {
.out_root = opts.out_path.value_or(fs::current_path() / "_deps"),
.existing_lm_index = {},
.emit_lmi = opts.build.lm_index.value_or("INDEX.lmi"),
.emit_cmake = opts.build_deps.cmake_file,
.tweaks_dir = opts.build.tweaks_dir,
.toolchain = opts.load_toolchain(),
.parallel_jobs = opts.jobs,
};

+ 1
- 0
src/dds/cli/cmd/compile_file.cpp Datei anzeigen

@@ -11,6 +11,7 @@ int compile_file(const options& opts) {
.out_root = opts.out_path.value_or(fs::current_path() / "_build"),
.existing_lm_index = opts.build.lm_index,
.emit_lmi = {},
.tweaks_dir = opts.build.tweaks_dir,
.toolchain = opts.load_toolchain(),
.parallel_jobs = opts.jobs,
});

+ 433
- 0
src/dds/cli/cmd/install_yourself.cpp Datei anzeigen

@@ -0,0 +1,433 @@
#include "../options.hpp"

#include <dds/util/env.hpp>
#include <dds/util/fs.hpp>
#include <dds/util/paths.hpp>
#include <dds/util/result.hpp>
#include <dds/util/string.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fansi/styled.hpp>
#include <neo/assert.hpp>
#include <neo/platform.hpp>
#include <neo/scope.hpp>

#ifdef __APPLE__
#include <mach-o/dyld.h>
#elif __FreeBSD__
#include <sys/sysctl.h>
#elif _WIN32
#include <windows.h>
// Must be included second:
#include <wil/resource.h>
#endif

using namespace fansi::literals;

namespace dds::cli::cmd {

namespace {

fs::path current_executable() {
#if __linux__
return fs::read_symlink("/proc/self/exe");
#elif __APPLE__
std::uint32_t len = 0;
_NSGetExecutablePath(nullptr, &len);
std::string buffer;
buffer.resize(len + 1);
auto rc = _NSGetExecutablePath(buffer.data(), &len);
neo_assert(invariant, rc == 0, "Unexpected error from _NSGetExecutablePath()");
return fs::canonical(buffer);
#elif __FreeBSD__
std::string buffer;
int mib[] = {CTRL_KERN, KERN_PROC, KERN_PROC_PATHNAME, -1};
std::size_t len = 0;
auto rc = ::sysctl(mib, 4, nullptr, &len, nullptr, 0);
neo_assert(invariant,
rc == 0,
"Unexpected error from ::sysctl() while getting executable path",
errno);
buffer.resize(len + 1);
rc = ::sysctl(mib, 4, buffer.data(), &len, nullptr, 0);
neo_assert(invariant,
rc == 0,
"Unexpected error from ::sysctl() while getting executable path",
errno);
return fs::canonical(nullptr);
#elif _WIN32
std::wstring buffer;
while (true) {
buffer.resize(buffer.size() + 32);
auto reallen
= ::GetModuleFileNameW(nullptr, buffer.data(), static_cast<DWORD>(buffer.size()));
if (reallen == buffer.size() && ::GetLastError() == ERROR_INSUFFICIENT_BUFFER) {
continue;
}
buffer.resize(reallen);
return fs::canonical(buffer);
}
#else
#error "No method of getting the current executable path is implemented on this system. FIXME!"
#endif
}

fs::path user_binaries_dir() noexcept {
#if _WIN32
return dds::user_data_dir() / "bin";
#else
return dds::user_home_dir() / ".local/bin";
#endif
}

fs::path system_binaries_dir() noexcept {
#if _WIN32
return "C:/bin";
#else
return "/usr/local/bin";
#endif
}

#if _WIN32
void fixup_path_env(const options& opts, const wil::unique_hkey& env_hkey, fs::path want_path) {
DWORD len = 0;
// Get the length
auto err = ::RegGetValueW(env_hkey.get(),
nullptr,
L"PATH",
RRF_RT_REG_EXPAND_SZ | RRF_RT_REG_SZ | RRF_NOEXPAND,
nullptr,
nullptr,
&len);
if (err != ERROR_SUCCESS) {
throw std::system_error(std::error_code(err, std::system_category()),
"Failed to access PATH environment variable [1]");
}
// Now get the value
std::wstring buffer;
buffer.resize(len / 2);
err = ::RegGetValueW(env_hkey.get(),
nullptr,
L"PATH",
RRF_RT_REG_EXPAND_SZ | RRF_RT_REG_SZ | RRF_NOEXPAND,
nullptr,
buffer.data(),
&len);
if (err != ERROR_SUCCESS) {
throw std::system_error(std::error_code(err, std::system_category()),
"Failed to access PATH environment variable [2]");
}
// Strip null-term
buffer.resize(len);
while (!buffer.empty() && buffer.back() == 0) {
buffer.pop_back();
}
// Check if we need to append the user-local binaries dir to the path
const auto want_entry = fs::path(want_path).make_preferred().lexically_normal();
const auto path_env_str = fs::path(buffer).string();
auto path_elems = split_view(path_env_str, ";");
const bool any_match = std::any_of(path_elems.cbegin(), path_elems.cend(), [&](auto view) {
auto existing = fs::weakly_canonical(view).make_preferred().lexically_normal();
dds_log(trace, "Existing PATH entry: '{}'", existing.string());
return existing.native() == want_entry.native();
});
if (any_match) {
dds_log(info, "PATH is up-to-date");
return;
}
if (opts.dry_run) {
dds_log(info, "The PATH environment variable would be modified.");
return;
}
// It's not there. Add it now.
auto want_str = want_entry.string();
path_elems.insert(path_elems.begin(), want_str);
auto joined = joinstr(";", path_elems);
buffer = fs::path(joined).native();
// Put the new PATH entry back into the environment
err = ::RegSetValueExW(env_hkey.get(),
L"Path",
0,
REG_EXPAND_SZ,
reinterpret_cast<const BYTE*>(buffer.data()),
(buffer.size() + 1) * 2);
if (err != ERROR_SUCCESS) {
throw std::system_error(std::error_code(err, std::system_category()),
"Failed to modify PATH environment variable");
}
dds_log(
info,
"The directory [.br.cyan[{}]] has been added to your PATH environment variables."_styled,
want_path.string());
dds_log(
info,
".bold.cyan[NOTE:] You may need to restart running applications to see this change!"_styled);
}
#endif

void fixup_system_path(const options& opts [[maybe_unused]]) {
#if !_WIN32
// We install into /usr/local/bin, and every nix-like system we support already has that on the
// global PATH
#else // Windows!
wil::unique_hkey env_hkey;
auto err = ::RegOpenKeyExW(HKEY_LOCAL_MACHINE,
L"SYSTEM\\CurrentControlSet\\Control\\Session Manager\\Environment",
0,
KEY_WRITE | KEY_READ,
&env_hkey);
if (err != ERROR_SUCCESS) {
throw std::system_error(std::error_code(err, std::system_category()),
"Failed to open user-local environment variables registry "
"entry");
}
fixup_path_env(opts, env_hkey, "C:/bin");
#endif
}

void fixup_user_path(const options& opts) {
#if !_WIN32
auto profile_file = dds::user_home_dir() / ".profile";
auto profile_content = dds::slurp_file(profile_file);
if (dds::contains(profile_content, "$HOME/.local/bin")) {
// We'll assume that this is properly loading .local/bin for .profile
dds_log(info, "[.br.cyan[{}]] is okay"_styled, profile_file.string());
} else if (opts.dry_run) {
dds_log(info,
"Would update [.br.cyan[{}]] to have ~/.local/bin on $PATH"_styled,
profile_file.string());
} else {
// Let's add it
profile_content
+= ("\n# This entry was added by 'dds install-yourself' for the user-local "
"binaries path\nPATH=$HOME/bin:$HOME/.local/bin:$PATH\n");
dds_log(info,
"Updating [.br.cyan[{}]] with a user-local binaries PATH entry"_styled,
profile_file.string());
auto tmp_file = profile_file;
tmp_file += ".tmp";
auto bak_file = profile_file;
bak_file += ".bak";
// Move .profile back into place if we abore for any reason
neo_defer {
if (!fs::exists(profile_file)) {
safe_rename(bak_file, profile_file);
}
};
// Write the temporary version
dds::write_file(tmp_file, profile_content).value();
// Make a backup
safe_rename(profile_file, bak_file);
// Move the tmp over the final location
safe_rename(tmp_file, profile_file);
// Okay!
dds_log(info,
"[.br.green[{}]] was updated. Prior contents are safe in [.br.cyan[{}]]"_styled,
profile_file.string(),
bak_file.string());
dds_log(
info,
".bold.cyan[NOTE:] Running applications may need to be restarted to see this change"_styled);
}

auto fish_config = dds::user_config_dir() / "fish/config.fish";
if (fs::exists(fish_config)) {
auto fish_config_content = slurp_file(fish_config);
if (dds::contains(fish_config_content, "$HOME/.local/bin")) {
// Assume that this is up-to-date
dds_log(info,
"Fish configuration in [.br.cyan[{}]] is okay"_styled,
fish_config.string());
} else if (opts.dry_run) {
dds_log(info,
"Would update [.br.cyan[{}]] to have ~/.local/bin on $PATH"_styled,
fish_config.string());
} else {
dds_log(
info,
"Updating Fish shell configuration [.br.cyan[{}]] with user-local binaries PATH entry"_styled,
fish_config.string());
fish_config_content
+= ("\n# This line was added by 'dds install-yourself' to add the user-local "
"binaries directory to $PATH\nset -x PATH $PATH \"$HOME/.local/bin\"\n");
auto tmp_file = fish_config;
auto bak_file = fish_config;
tmp_file += ".tmp";
bak_file += ".bak";
neo_defer {
if (!fs::exists(fish_config)) {
safe_rename(bak_file, fish_config);
}
};
// Write the temporary version
dds::write_file(tmp_file, fish_config_content).value();
// Make a backup
safe_rename(fish_config, bak_file);
// Move the temp over the destination
safe_rename(tmp_file, fish_config);
// Okay!
dds_log(info,
"[.br.green[{}]] was updated. Prior contents are safe in [.br.cyan[{}]]"_styled,
fish_config.string(),
bak_file.string());
dds_log(
info,
".bold.cyan[NOTE:] Running Fish shells will need to be restartred to see this change"_styled);
}
}
#else // _WIN32
wil::unique_hkey env_hkey;
auto err
= ::RegOpenKeyExW(HKEY_CURRENT_USER, L"Environment", 0, KEY_WRITE | KEY_READ, &env_hkey);
if (err != ERROR_SUCCESS) {
throw std::system_error(std::error_code(err, std::system_category()),
"Failed to open user-local environment variables registry "
"entry");
}
fixup_path_env(opts, env_hkey, "%LocalAppData%/bin");
#endif
}

void fixup_path(const options& opts) {
if (opts.install_yourself.where == opts.install_yourself.system) {
fixup_system_path(opts);
} else {
fixup_user_path(opts);
}
}

int _install_yourself(const options& opts) {
auto self_exe = current_executable();

auto dest_dir = opts.install_yourself.where == opts.install_yourself.user
? user_binaries_dir()
: system_binaries_dir();

auto dest_path = dest_dir / "dds";
if constexpr (neo::os_is_windows) {
dest_path += ".exe";
}

if (fs::absolute(dest_path).lexically_normal() == fs::canonical(self_exe)) {
dds_log(error,
"We cannot install over our own executable (.br.red[{}])"_styled,
self_exe.string());
return 1;
}

if (!fs::is_directory(dest_dir)) {
if (opts.dry_run) {
dds_log(info, "Would create directory [.br.cyan[{}]]"_styled, dest_dir.string());
} else {
dds_log(info, "Creating directory [.br.cyan[{}]]"_styled, dest_dir.string());
fs::create_directories(dest_dir);
}
}

if (opts.dry_run) {
if (fs::is_symlink(dest_path)) {
dds_log(info, "Would remove symlink [.br.cyan[{}]]"_styled, dest_path.string());
}
if (fs::exists(dest_path) && !fs::is_symlink(dest_path)) {
if (opts.install_yourself.symlink) {
dds_log(
info,
"Would overwrite .br.yellow[{0}] with a symlink .br.green[{0}] -> .br.cyan[{1}]"_styled,
dest_path.string(),
self_exe.string());
} else {
dds_log(info,
"Would overwrite .br.yellow[{}] with [.br.cyan[{}]]"_styled,
dest_path.string(),
self_exe.string());
}
} else {
if (opts.install_yourself.symlink) {
dds_log(info,
"Would create a symlink [.br.green[{}]] -> [.br.cyan[{}]]"_styled,
dest_path.string(),
self_exe.string());
} else {
dds_log(info,
"Would install [.br.cyan[{}]] to .br.yellow[{}]"_styled,
self_exe.string(),
dest_path.string());
}
}
} else {
if (fs::is_symlink(dest_path)) {
dds_log(info, "Removing old symlink file [.br.cyan[{}]]"_styled, dest_path.string());
dds::remove_file(dest_path).value();
}
if (opts.install_yourself.symlink) {
if (fs::exists(dest_path)) {
dds_log(info, "Removing previous file [.br.cyan[{}]]"_styled, dest_path.string());
dds::remove_file(dest_path).value();
}
dds_log(info,
"Creating symbolic link [.br.green[{}]] -> [.br.cyan[{}]]"_styled,
dest_path.string(),
self_exe.string());
dds::create_symlink(self_exe, dest_path).value();
} else {
dds_log(info,
"Installing [.br.cyan[{}]] to [.br.green[{}]]"_styled,
self_exe.string(),
dest_path.string());
dds::copy_file(self_exe, dest_path, fs::copy_options::overwrite_existing).value();
}
}

if (opts.install_yourself.fixup_path_env) {
fixup_path(opts);
}

if (!opts.dry_run) {
dds_log(info, "Success!");
}
return 0;
}

} // namespace

int install_yourself(const options& opts) {
return boost::leaf::try_catch(
[&] {
try {
return _install_yourself(opts);
} catch (...) {
capture_exception();
}
},
[](std::error_code ec, e_copy_file copy) {
dds_log(error,
"Failed to copy file [.br.cyan[{}]] to .br.yellow[{}]: .bold.red[{}]"_styled,
copy.source.string(),
copy.dest.string(),
ec.message());
return 1;
},
[](std::error_code ec, e_remove_file file) {
dds_log(error,
"Failed to delete file .br.yellow[{}]: .bold.red[{}]"_styled,
file.value.string(),
ec.message());
return 1;
},
[](std::error_code ec, e_symlink oper) {
dds_log(
error,
"Failed to create symlink from .br.yellow[{}] to [.br.cyan[{}]]: .bold.red[{}]"_styled,
oper.symlink.string(),
oper.target.string(),
ec.message());
return 1;
},
[](e_system_error_exc e) {
dds_log(error, "Failure while installing: {}", e.message);
return 1;
});
return 0;
}

} // namespace dds::cli::cmd

src/dds/cli/cmd/sdist_create.cpp → src/dds/cli/cmd/pkg_create.cpp Datei anzeigen

@@ -5,11 +5,14 @@

#include <boost/leaf/common.hpp>
#include <boost/leaf/handle_exception.hpp>
#include <fansi/styled.hpp>
#include <fmt/core.h>

using namespace fansi::literals;

namespace dds::cli::cmd {

int sdist_create(const options& opts) {
int pkg_create(const options& opts) {
dds::sdist_params params{
.project_dir = opts.project_dir,
.dest_path = {},
@@ -23,20 +26,27 @@ int sdist_create(const options& opts) {
auto default_filename = fmt::format("{}.tar.gz", pkg_man.id.to_string());
auto filepath = opts.out_path.value_or(fs::current_path() / default_filename);
create_sdist_targz(filepath, params);
dds_log(info,
"Created source dirtribution archive: .bold.cyan[{}]"_styled,
filepath.string());
return 0;
},
[&](boost::leaf::bad_result, e_missing_file missing, e_human_message msg) {
dds_log(error,
"A required file is missing for creating a source distribution for [{}]",
params.project_dir.string());
dds_log(error, "Error: {}", msg.value);
dds_log(error, "Missing file: {}", missing.path.string());
dds_log(
error,
"A required file is missing for creating a source distribution for [.bold.yellow[{}]]"_styled,
params.project_dir.string());
dds_log(error, "Error: .bold.yellow[{}]"_styled, msg.value);
dds_log(error, "Missing file: .bold.red[{}]"_styled, missing.path.string());
write_error_marker("no-package-json5");
return 1;
},
[&](std::error_code ec, e_human_message msg, boost::leaf::e_file_name file) {
dds_log(error, "Error: {}", msg.value);
dds_log(error, "Failed to access file [{}]: {}", file.value, ec.message());
dds_log(error, "Error: .bold.red[{}]"_styled, msg.value);
dds_log(error,
"Failed to access file [.bold.red[{}]]: .br.yellow[{}]"_styled,
file.value,
ec.message());
write_error_marker("failed-package-json5-scan");
return 1;
});

+ 34
- 10
src/dds/cli/cmd/pkg_import.cpp Datei anzeigen

@@ -5,6 +5,7 @@
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fansi/styled.hpp>
#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <neo/url/parse.hpp>
@@ -12,25 +13,40 @@
#include <iostream>
#include <string_view>

using namespace fansi::literals;

namespace dds::cli::cmd {

struct e_importing {
std::string value;
};

static int _pkg_import(const options& opts) {
return pkg_cache::with_cache( //
opts.pkg_cache_dir.value_or(pkg_cache::default_local_path()),
pkg_cache_flags::write_lock | pkg_cache_flags::create_if_absent,
[&](auto repo) {
for (std::string_view tgz_where : opts.pkg.import.items) {
neo_assertion_breadcrumbs("Importing sdist", tgz_where);
auto tmp_sd
= (tgz_where.starts_with("http://") || tgz_where.starts_with("https://"))
? download_expand_sdist_targz(tgz_where)
: expand_sdist_targz(tgz_where);
neo_assertion_breadcrumbs("Importing from temporary directory",
tmp_sd.tmpdir.path());
repo.add_sdist(tmp_sd.sdist, dds::if_exists(opts.if_exists));
// Lambda to import an sdist object
auto import_sdist
= [&](const sdist& sd) { repo.import_sdist(sd, dds::if_exists(opts.if_exists)); };

for (std::string_view sdist_where : opts.pkg.import.items) {
DDS_E_SCOPE(e_importing{std::string(sdist_where)});
neo_assertion_breadcrumbs("Importing sdist", sdist_where);
if (sdist_where.starts_with("http://") || sdist_where.starts_with("https://")) {
auto tmp_sd = download_expand_sdist_targz(sdist_where);
import_sdist(tmp_sd.sdist);
} else if (fs::is_directory(sdist_where)) {
auto sd = sdist::from_directory(sdist_where);
import_sdist(sd);
} else {
auto tmp_sd = expand_sdist_targz(sdist_where);
import_sdist(tmp_sd.sdist);
}
}
if (opts.pkg.import.from_stdin) {
auto tmp_sd = dds::expand_sdist_from_istream(std::cin, "<stdin>");
repo.add_sdist(tmp_sd.sdist, dds::if_exists(opts.if_exists));
repo.import_sdist(tmp_sd.sdist, dds::if_exists(opts.if_exists));
}
return 0;
});
@@ -52,6 +68,14 @@ int pkg_import(const options& opts) {
[](dds::e_sqlite3_error_exc e) {
dds_log(error, "Unexpected database error: {}", e.message);
return 1;
},
[](e_system_error_exc err, e_importing what) {
dds_log(
error,
"Error while importing source distribution from [.bold.red[{}]]: .br.yellow[{}]"_styled,
what.value,
err.message);
return 1;
});
}
} // namespace dds::cli::cmd

+ 1
- 1
src/dds/cli/cmd/pkg_repo_ls.cpp Datei anzeigen

@@ -13,7 +13,7 @@ static int _pkg_repo_ls(const options& opts) {
auto pkg_db = opts.open_pkg_db();
neo::sqlite3::database_ref db = pkg_db.database();

auto st = db.prepare("SELECT name, remote_url, db_mtime FROM dds_pkg_remotes");
auto st = db.prepare("SELECT name, url, db_mtime FROM dds_pkg_remotes");
auto tups = neo::sqlite3::iter_tuples<std::string, std::string, std::optional<std::string>>(st);
for (auto [name, remote_url, mtime] : tups) {
fmt::print("Remote '{}':\n", name);

+ 60
- 0
src/dds/cli/cmd/pkg_search.cpp Datei anzeigen

@@ -0,0 +1,60 @@
#include "../options.hpp"

#include <dds/error/nonesuch.hpp>
#include <dds/pkg/db.hpp>
#include <dds/pkg/search.hpp>
#include <dds/util/result.hpp>
#include <dds/util/string.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fansi/styled.hpp>
#include <fmt/format.h>
#include <range/v3/view/transform.hpp>

using namespace fansi::literals;

namespace dds::cli::cmd {

static int _pkg_search(const options& opts) {
auto cat = opts.open_pkg_db();
auto results = *dds::pkg_search(cat.database(), opts.pkg.search.pattern);
for (pkg_group_search_result const& found : results.found) {
fmt::print(
" Name: .bold[{}]\n"
"Versions: .bold[{}]\n"
" From: .bold[{}]\n"
" .bold[{}]\n\n"_styled,
found.name,
joinstr(", ", found.versions | ranges::views::transform(&semver::version::to_string)),
found.remote_name,
found.description);
}

if (results.found.empty()) {
dds_log(error,
"There are no packages that match the given pattern \".bold.red[{}]\""_styled,
opts.pkg.search.pattern.value_or("*"));
write_error_marker("pkg-search-no-result");
return 1;
}
return 0;
}

int pkg_search(const options& opts) {
return boost::leaf::try_catch(
[&] {
try {
return _pkg_search(opts);
} catch (...) {
capture_exception();
}
},
[](e_nonesuch missing) {
missing.log_error(
"There are no packages that match the given pattern \".bold.red[{}]\""_styled);
write_error_marker("pkg-search-no-result");
return 1;
});
}

} // namespace dds::cli::cmd

+ 5
- 22
src/dds/cli/cmd/repoman_add.cpp Datei anzeigen

@@ -14,16 +14,15 @@
namespace dds::cli::cmd {

static int _repoman_add(const options& opts) {
auto pkg_id = dds::pkg_id::parse(opts.repoman.add.pkg_id_str);
auto rpkg = any_remote_pkg::from_url(neo::url::parse(opts.repoman.add.url_str));
auto rpkg = any_remote_pkg::from_url(neo::url::parse(opts.repoman.add.url_str));
auto temp_sdist = get_package_sdist(rpkg);

dds::pkg_listing add_info{
.ident = pkg_id,
.ident = temp_sdist.sdist.manifest.id,
.deps = temp_sdist.sdist.manifest.dependencies,
.description = opts.repoman.add.description,
.remote_pkg = rpkg,
};
auto temp_sdist = get_package_sdist(add_info);

add_info.deps = temp_sdist.sdist.manifest.dependencies;

auto repo = repo_manager::open(opts.repoman.repo_dir);
repo.add_pkg(add_info, opts.repoman.add.url_str);
@@ -39,22 +38,6 @@ int repoman_add(const options& opts) {
dds::capture_exception();
}
},
[](user_error<errc::invalid_pkg_id>,
semver::invalid_version err,
dds::e_invalid_pkg_id_str idstr) -> int {
dds_log(error,
"Package ID string '{}' is invalid, because '{}' is not a valid semantic "
"version string",
idstr.value,
err.string());
write_error_marker("invalid-pkg-id-str-version");
throw;
},
[](user_error<errc::invalid_pkg_id>, dds::e_invalid_pkg_id_str idstr) -> int {
dds_log(error, "Invalid package ID string '{}'", idstr.value);
write_error_marker("invalid-pkg-id-str");
throw;
},
[](dds::e_sqlite3_error_exc,
boost::leaf::match<neo::sqlite3::errc, neo::sqlite3::errc::constraint_unique>,
dds::pkg_id pkid) {

+ 9
- 10
src/dds/cli/dispatch_main.cpp Datei anzeigen

@@ -16,6 +16,8 @@ using command = int(const options&);
command build_deps;
command build;
command compile_file;
command install_yourself;
command pkg_create;
command pkg_get;
command pkg_import;
command pkg_ls;
@@ -23,12 +25,12 @@ command pkg_repo_add;
command pkg_repo_update;
command pkg_repo_ls;
command pkg_repo_remove;
command pkg_search;
command repoman_add;
command repoman_import;
command repoman_init;
command repoman_ls;
command repoman_remove;
command sdist_create;

} // namespace cmd

@@ -38,20 +40,13 @@ int dispatch_main(const options& opts) noexcept {
switch (opts.subcommand) {
case subcommand::build:
return cmd::build(opts);
case subcommand::sdist: {
DDS_E_SCOPE(opts.sdist.subcommand);
switch (opts.sdist.subcommand) {
case sdist_subcommand::create:
return cmd::sdist_create(opts);
case sdist_subcommand::_none_:;
}
neo::unreachable();
}
case subcommand::pkg: {
DDS_E_SCOPE(opts.pkg.subcommand);
switch (opts.pkg.subcommand) {
case pkg_subcommand::ls:
return cmd::pkg_ls(opts);
case pkg_subcommand::create:
return cmd::pkg_create(opts);
case pkg_subcommand::get:
return cmd::pkg_get(opts);
case pkg_subcommand::import:
@@ -71,6 +66,8 @@ int dispatch_main(const options& opts) noexcept {
}
neo::unreachable();
}
case pkg_subcommand::search:
return cmd::pkg_search(opts);
case pkg_subcommand::_none_:;
}
neo::unreachable();
@@ -96,6 +93,8 @@ int dispatch_main(const options& opts) noexcept {
return cmd::compile_file(opts);
case subcommand::build_deps:
return cmd::build_deps(opts);
case subcommand::install_yourself:
return cmd::install_yourself(opts);
case subcommand::_none_:;
}
neo::unreachable();

+ 39
- 1
src/dds/cli/error_handler.cpp Datei anzeigen

@@ -2,6 +2,8 @@
#include "./options.hpp"

#include <dds/error/errors.hpp>
#include <dds/error/toolchain.hpp>
#include <dds/util/http/pool.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>
#include <dds/util/signal.hpp>
@@ -10,6 +12,7 @@
#include <boost/leaf/handle_error.hpp>
#include <boost/leaf/handle_exception.hpp>
#include <boost/leaf/result.hpp>
#include <fansi/styled.hpp>
#include <fmt/ostream.h>
#include <json5/parse_data.hpp>
#include <neo/scope.hpp>
@@ -18,6 +21,7 @@
#include <fstream>

using namespace dds;
using namespace fansi::literals;

namespace {

@@ -55,6 +59,32 @@ auto handlers = std::tuple( //
dds_log(critical, "Operation cancelled by the user");
return 2;
},
[](e_system_error_exc e, neo::url url, http_response_info) {
dds_log(error,
"An error occured while downloading [.bold.red[{}]]: {}"_styled,
url.to_string(),
e.message);
return 1;
},
[](e_system_error_exc e, network_origin origin, neo::url* url) {
dds_log(error,
"Network error communicating with .bold.red[{}://{}:{}]: {}"_styled,
origin.protocol,
origin.hostname,
origin.port,
e.message);
if (url) {
dds_log(error, " (While accessing URL [.bold.red[{}]])"_styled, url->to_string());
}
return 1;
},
[](e_system_error_exc err, e_loading_toolchain, e_toolchain_file* tc_file) {
dds_log(error, "Failed to load toolchain: .br.yellow[{}]"_styled, err.message);
if (tc_file) {
dds_log(error, " (While loading from file [.bold.red[{}]])"_styled, tc_file->value);
}
return 1;
},
[](e_system_error_exc exc, boost::leaf::verbose_diagnostic_info const& diag) {
dds_log(critical,
"An unhandled std::system_error arose. THIS IS A DDS BUG! Info: {}",
@@ -69,5 +99,13 @@ auto handlers = std::tuple( //
} // namespace

int dds::handle_cli_errors(std::function<int()> fn) noexcept {
return boost::leaf::try_catch(fn, handlers);
return boost::leaf::try_catch(
[&] {
try {
return fn();
} catch (...) {
capture_exception();
}
},
handlers);
}

+ 94
- 32
src/dds/cli/options.cpp Datei anzeigen

@@ -1,14 +1,18 @@
#include "./options.hpp"

#include <dds/error/errors.hpp>
#include <dds/error/on_error.hpp>
#include <dds/error/toolchain.hpp>
#include <dds/pkg/db.hpp>
#include <dds/toolchain/from_json.hpp>
#include <dds/toolchain/toolchain.hpp>

#include <debate/enum.hpp>
#include <fansi/styled.hpp>

using namespace dds;
using namespace debate;
using namespace fansi::literals;

namespace {

@@ -86,6 +90,17 @@ struct setup {
.action = put_into(opts.repoman.repo_dir),
};

argument tweaks_dir_arg{
.long_spellings = {"tweaks-dir"},
.short_spellings = {"TD"},
.help
= "Base directory of "
"\x1b]8;;https://vector-of-bool.github.io/2020/10/04/lib-configuration.html\x1b\\tweak "
"headers\x1b]8;;\x1b\\ that should be available to the build.",
.valname = "<dir>",
.action = put_into(opts.build.tweaks_dir),
};

void do_setup(argument_parser& parser) noexcept {
parser.add_argument({
.long_spellings = {"log-level"},
@@ -142,14 +157,14 @@ struct setup {
.name = "pkg",
.help = "Manage packages and package remotes",
}));
setup_sdist_cmd(group.add_parser({
.name = "sdist",
.help = "Work with source distribution packages",
}));
setup_repoman_cmd(group.add_parser({
.name = "repoman",
.help = "Manage a dds package repository",
}));
setup_install_yourself_cmd(group.add_parser({
.name = "install-yourself",
.help = "Have this dds executable install itself onto your PATH",
}));
}

void setup_build_cmd(argument_parser& build_cmd) {
@@ -189,6 +204,7 @@ struct setup {
build_cmd.add_argument(lm_index_arg.dup()).help
= "Path to a libman index file to use for loading project dependencies";
build_cmd.add_argument(jobs_arg.dup());
build_cmd.add_argument(tweaks_dir_arg.dup());
}

void setup_compile_file_cmd(argument_parser& compile_file_cmd) noexcept {
@@ -199,6 +215,7 @@ struct setup {
= "Set the maximum number of files to compile in parallel";
compile_file_cmd.add_argument(lm_index_arg.dup());
compile_file_cmd.add_argument(out_arg.dup());
compile_file_cmd.add_argument(tweaks_dir_arg.dup());
compile_file_cmd.add_argument({
.help = "One or more source files to compile",
.valname = "<source-files>",
@@ -221,6 +238,14 @@ struct setup {
.can_repeat = true,
.action = debate::push_back_onto(opts.build_deps.deps_files),
});
build_deps_cmd.add_argument({
.long_spellings = {"cmake"},
.help = "Generate a CMake file at the given path that will create import targets for "
"the dependencies",
.valname = "<file-path>",
.action = debate::put_into(opts.build_deps.cmake_file),
});
build_deps_cmd.add_argument(tweaks_dir_arg.dup());
build_deps_cmd.add_argument({
.help = "Dependency statement strings",
.valname = "<dependency>",
@@ -234,18 +259,22 @@ struct setup {
.valname = "<pkg-subcommand>",
.action = put_into(opts.pkg.subcommand),
});
setup_pkg_init_db_cmd(pkg_group.add_parser({
.name = "init-db",
.help = "Initialize a new package database file (Path specified with '--pkg-db-path')",
}));
pkg_group.add_parser({
.name = "ls",
.help = "List locally available packages",
});
setup_pkg_create_cmd(pkg_group.add_parser({
.name = "create",
.help = "Create a source distribution archive of a project",
}));
setup_pkg_get_cmd(pkg_group.add_parser({
.name = "get",
.help = "Obtain a copy of a package from a remote",
}));
setup_pkg_init_db_cmd(pkg_group.add_parser({
.name = "init-db",
.help = "Initialize a new package database file (Path specified with '--pkg-db-path')",
}));
setup_pkg_import_cmd(pkg_group.add_parser({
.name = "import",
.help = "Import a source distribution archive into the local package cache",
@@ -254,6 +283,20 @@ struct setup {
.name = "repo",
.help = "Manage package repositories",
}));
setup_pkg_search_cmd(pkg_group.add_parser({
.name = "search",
.help = "Search for packages available to download",
}));
}

void setup_pkg_create_cmd(argument_parser& pkg_create_cmd) {
pkg_create_cmd.add_argument(project_arg.dup()).help
= "Path to the project for which to create a source distribution.\n"
"Default is the current working directory.";
pkg_create_cmd.add_argument(out_arg.dup()).help
= "Destination path for the source distributioon archive";
pkg_create_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the destination names an existing file";
}

void setup_pkg_get_cmd(argument_parser& pkg_get_cmd) {
@@ -339,25 +382,16 @@ struct setup {
= "What to do if any of the named repositories do not exist";
}

void setup_sdist_cmd(argument_parser& sdist_cmd) noexcept {
auto& sdist_grp = sdist_cmd.add_subparsers({
.valname = "<sdist-subcommand>",
.action = put_into(opts.sdist.subcommand),
void setup_pkg_search_cmd(argument_parser& pkg_repo_search_cmd) noexcept {
pkg_repo_search_cmd.add_argument({
.help
= "A name or glob-style pattern. Only matching packages will be returned. \n"
"Searching is case-insensitive. Only the .italic[name] will be matched (not the \n"
"version).\n\nIf this parameter is omitted, the search will return .italic[all] \n"
"available packages."_styled,
.valname = "<name-or-pattern>",
.action = put_into(opts.pkg.search.pattern),
});
setup_sdist_create_cmd(sdist_grp.add_parser({
.name = "create",
.help = "Create a source distribution from a project tree",
}));
}

void setup_sdist_create_cmd(argument_parser& sdist_create_cmd) {
sdist_create_cmd.add_argument(project_arg.dup()).help
= "Path to the project for which to create a source distribution.\n"
"Default is the current working directory.";
sdist_create_cmd.add_argument(out_arg.dup()).help
= "Destination path for the source distributnion archive";
sdist_create_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the destination names an existing file";
}

void setup_repoman_cmd(argument_parser& repoman_cmd) {
@@ -414,12 +448,6 @@ struct setup {

void setup_repoman_add_cmd(argument_parser& repoman_add_cmd) {
repoman_add_cmd.add_argument(repoman_repo_dir_arg.dup());
repoman_add_cmd.add_argument({
.help = "The package ID of the package to add",
.valname = "<pkg-id>",
.required = true,
.action = put_into(opts.repoman.add.pkg_id_str),
});
repoman_add_cmd.add_argument({
.help = "URL to add to the repository",
.valname = "<url>",
@@ -442,6 +470,37 @@ struct setup {
.action = push_back_onto(opts.repoman.remove.pkgs),
});
}

void setup_install_yourself_cmd(argument_parser& install_yourself_cmd) {
install_yourself_cmd.add_argument({
.long_spellings = {"where"},
.help = "The scope of the installation. For .bold[system], installs in a global \n"
"directory for all users of the system. For .bold[user], installs in a \n"
"user-specific directory for executable binaries."_styled,
.valname = "{user,system}",
.action = put_into(opts.install_yourself.where),
});
install_yourself_cmd.add_argument({
.long_spellings = {"dry-run"},
.help
= "Do not actually perform any operations, but log what .italic[would] happen"_styled,
.nargs = 0,
.action = store_true(opts.dry_run),
});
install_yourself_cmd.add_argument({
.long_spellings = {"no-modify-path"},
.help = "Do not attempt to modify the PATH environment variable",
.nargs = 0,
.action = store_false(opts.install_yourself.fixup_path_env),
});
install_yourself_cmd.add_argument({
.long_spellings = {"symlink"},
.help = "Create a symlink at the installed location to the existing 'dds' executable\n"
"instead of copying the executable file",
.nargs = 0,
.action = store_true(opts.install_yourself.symlink),
});
}
};

} // namespace
@@ -464,7 +523,9 @@ toolchain dds::cli::options::load_toolchain() const {
}
// Convert the given string to a toolchain
auto& tc_str = *toolchain;
DDS_E_SCOPE(e_loading_toolchain{tc_str});
if (tc_str.starts_with(":")) {
DDS_E_SCOPE(e_toolchain_builtin{tc_str});
auto default_tc = tc_str.substr(1);
auto tc = dds::toolchain::get_builtin(default_tc);
if (!tc.has_value()) {
@@ -474,6 +535,7 @@ toolchain dds::cli::options::load_toolchain() const {
}
return std::move(*tc);
} else {
DDS_E_SCOPE(e_toolchain_file{tc_str});
return parse_toolchain_json5(slurp_file(tc_str));
}
}

+ 26
- 14
src/dds/cli/options.hpp Datei anzeigen

@@ -25,16 +25,8 @@ enum class subcommand {
compile_file,
build_deps,
pkg,
sdist,
repoman,
};

/**
* @brief 'dds sdist' subcommands
*/
enum class sdist_subcommand {
_none_,
create,
install_yourself,
};

/**
@@ -44,8 +36,10 @@ enum class pkg_subcommand {
_none_,
ls,
get,
create,
import,
repo,
search,
};

/**
@@ -103,6 +97,8 @@ struct options {
opt_path pkg_db_dir;
// The `--log-level` argument
log::level log_level = log::level::info;
// Any `--dry-run` argument
bool dry_run = false;

// The top-most selected subcommand
enum subcommand subcommand;
@@ -143,6 +139,7 @@ struct options {
opt_path lm_index;
std::vector<string> add_repos;
bool update_repos = false;
opt_path tweaks_dir;
} build;

/**
@@ -161,6 +158,8 @@ struct options {
std::vector<fs::path> deps_files;
/// Dependency strings provided directly in the command-line
std::vector<string> deps;
/// Path to a CMake import file to write
opt_path cmake_file;
} build_deps;

/**
@@ -214,11 +213,15 @@ struct options {
/// Package IDs to download
std::vector<string> pkgs;
} get;
} pkg;

struct {
sdist_subcommand subcommand;
} sdist;
/**
* @brief Parameters for 'dds pkg search'
*/
struct {
/// The search pattern, if provided
opt_string pattern;
} search;
} pkg;

/**
* @brief Parameters for 'dds repoman'
@@ -244,7 +247,6 @@ struct options {

/// Options for 'dds repoman add'
struct {
std::string pkg_id_str;
std::string url_str;
std::string description;
} add;
@@ -256,6 +258,16 @@ struct options {
} remove;
} repoman;

struct {
enum where_e {
system,
user,
} where
= user;
bool fixup_path_env = true;
bool symlink = false;
} install_yourself;

/**
* @brief Attach arguments and subcommands to the given argument parser, binding those arguments
* to the values in this object.

+ 19
- 0
src/dds/error/toolchain.hpp Datei anzeigen

@@ -0,0 +1,19 @@
#pragma once

#include <string>

namespace dds {

struct e_loading_toolchain {
std::string value;
};

struct e_toolchain_file {
std::string value;
};

struct e_toolchain_builtin {
std::string value;
};

} // namespace dds

+ 16
- 3
src/dds/pkg/cache.cpp Datei anzeigen

@@ -59,7 +59,7 @@ pkg_cache pkg_cache::_open_for_directory(bool writeable, path_ref dirpath) {
return {writeable, dirpath, std::move(entries)};
}

void pkg_cache::add_sdist(const sdist& sd, if_exists ife_action) {
void pkg_cache::import_sdist(const sdist& sd, if_exists ife_action) {
neo_assertion_breadcrumbs("Importing sdist archive", sd.manifest.id.to_string());
if (!_write_enabled) {
dds_log(critical,
@@ -83,19 +83,32 @@ void pkg_cache::add_sdist(const sdist& sd, if_exists ife_action) {
dds_log(info, msg + " - Replacing");
}
}

// Create a temporary location where we are creating it
auto tmp_copy = sd_dest;
tmp_copy.replace_filename(".tmp-import");
if (fs::exists(tmp_copy)) {
fs::remove_all(tmp_copy);
}
fs::create_directories(tmp_copy.parent_path());
fs::copy(sd.path, tmp_copy, fs::copy_options::recursive);

// Re-create an sdist from the given sdist. This will prune non-sdist files, rather than just
// fs::copy_all from the source, which may contain extras.
sdist_params params{
.project_dir = sd.path,
.dest_path = tmp_copy,
.include_apps = true,
.include_tests = true,
};
create_sdist_in_dir(tmp_copy, params);

// Swap out the temporary to the final location
if (fs::exists(sd_dest)) {
fs::remove_all(sd_dest);
}
fs::rename(tmp_copy, sd_dest);
_sdists.insert(sdist::from_directory(sd_dest));
dds_log(info, "Source distribution '{}' successfully exported", sd.manifest.id.to_string());
dds_log(info, "Source distribution for '{}' successfully imported", sd.manifest.id.to_string());
}

const sdist* pkg_cache::find(const pkg_id& pkg) const noexcept {

+ 1
- 1
src/dds/pkg/cache.hpp Datei anzeigen

@@ -81,7 +81,7 @@ public:

static fs::path default_local_path() noexcept;

void add_sdist(const sdist&, if_exists = if_exists::throw_exc);
void import_sdist(const sdist&, if_exists = if_exists::throw_exc);

const sdist* find(const pkg_id& pk) const noexcept;


+ 15
- 1
src/dds/pkg/db.cpp Datei anzeigen

@@ -4,6 +4,7 @@
#include <dds/error/errors.hpp>
#include <dds/error/nonesuch.hpp>
#include <dds/solve/solve.hpp>
#include <dds/util/env.hpp>
#include <dds/util/log.hpp>
#include <dds/util/paths.hpp>

@@ -24,6 +25,12 @@ using namespace dds;
namespace nsql = neo::sqlite3;
using namespace neo::sqlite3::literals;

namespace dds {

void add_init_repo(nsql::database_ref db) noexcept;

} // namespace dds

namespace {

void migrate_repodb_1(nsql::database& db) {
@@ -82,7 +89,7 @@ void migrate_repodb_3(nsql::database& db) {
CREATE TABLE dds_pkg_remotes (
remote_id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
remote_url TEXT NOT NULL,
url TEXT NOT NULL,
db_etag TEXT,
db_mtime TEXT
);
@@ -225,6 +232,13 @@ void ensure_migrated(nsql::database& db) {
}
meta["version"] = current_database_version;
exec(db.prepare("UPDATE dds_cat_meta SET meta=?"), meta.dump());
tr.commit();

if (version < 3 && !getenv_bool("DDS_NO_ADD_INITIAL_REPO")) {
// Version 3 introduced remote repositories. If we're updating to 3, add that repo now
dds_log(info, "Downloading initial repository");
dds::add_init_repo(db);
}
}

} // namespace

+ 3
- 7
src/dds/pkg/get/get.cpp Datei anzeigen

@@ -12,9 +12,7 @@

using namespace dds;

namespace {

temporary_sdist do_pull_sdist(const any_remote_pkg& rpkg) {
temporary_sdist dds::get_package_sdist(const any_remote_pkg& rpkg) {
auto tmpdir = dds::temporary_dir::create();

rpkg.get_sdist(tmpdir.path());
@@ -29,10 +27,8 @@ temporary_sdist do_pull_sdist(const any_remote_pkg& rpkg) {
return {sd_tmp_dir, sd};
}

} // namespace

temporary_sdist dds::get_package_sdist(const pkg_listing& pkg) {
auto tsd = do_pull_sdist(pkg.remote_pkg);
auto tsd = get_package_sdist(pkg.remote_pkg);
if (!(tsd.sdist.manifest.id == pkg.ident)) {
throw_external_error<errc::sdist_ident_mismatch>(
"The package name@version in the generated source distribution does not match the name "
@@ -62,7 +58,7 @@ void dds::get_all(const std::vector<pkg_id>& pkgs, pkg_cache& repo, const pkg_db
dds_log(info, "Download package: {}", inf.ident.to_string());
auto tsd = get_package_sdist(inf);
std::scoped_lock lk{repo_mut};
repo.add_sdist(tsd.sdist, if_exists::throw_exc);
repo.import_sdist(tsd.sdist, if_exists::throw_exc);
});

if (!okay) {

+ 2
- 0
src/dds/pkg/get/get.hpp Datei anzeigen

@@ -8,7 +8,9 @@ namespace dds {
class pkg_cache;
class pkg_db;
struct pkg_listing;
class any_remote_pkg;

temporary_sdist get_package_sdist(const any_remote_pkg&);
temporary_sdist get_package_sdist(const pkg_listing&);

void get_all(const std::vector<pkg_id>& pkgs, dds::pkg_cache& repo, const pkg_db& cat);

+ 71
- 11
src/dds/pkg/remote.cpp Datei anzeigen

@@ -9,7 +9,9 @@
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fansi/styled.hpp>
#include <fmt/ostream.h>
#include <neo/event.hpp>
#include <neo/io/stream/buffers.hpp>
#include <neo/io/stream/file.hpp>
@@ -68,10 +70,10 @@ pkg_remote pkg_remote::connect(std::string_view url_str) {

void pkg_remote::store(nsql::database_ref db) {
auto st = db.prepare(R"(
INSERT INTO dds_pkg_remotes (name, remote_url)
INSERT INTO dds_pkg_remotes (name, url)
VALUES (?, ?)
ON CONFLICT (name) DO
UPDATE SET remote_url = ?2
UPDATE SET url = ?2
)");
nsql::exec(st, _name, _base_url.to_string());
}
@@ -206,16 +208,16 @@ void pkg_remote::update_pkg_db(nsql::database_ref db,

void dds::update_all_remotes(nsql::database_ref db) {
dds_log(info, "Updating catalog from all remotes");
auto repos_st = db.prepare("SELECT name, remote_url, db_etag, db_mtime FROM dds_pkg_remotes");
auto repos_st = db.prepare("SELECT name, url, db_etag, db_mtime FROM dds_pkg_remotes");
auto tups = nsql::iter_tuples<std::string,
std::string,
std::optional<std::string>,
std::optional<std::string>>(repos_st)
| ranges::to_vector;

for (const auto& [name, remote_url, etag, db_mtime] : tups) {
DDS_E_SCOPE(e_url_string{remote_url});
pkg_remote repo{name, neo::url::parse(remote_url)};
for (const auto& [name, url, etag, db_mtime] : tups) {
DDS_E_SCOPE(e_url_string{url});
pkg_remote repo{name, neo::url::parse(url)};
repo.update_pkg_db(db, etag, db_mtime);
}

@@ -224,18 +226,18 @@ void dds::update_all_remotes(nsql::database_ref db) {
}

void dds::remove_remote(pkg_db& pkdb, std::string_view name) {
auto& db = pkdb.database();
neo::sqlite3::transaction_guard tr{db};
auto& db = pkdb.database();
nsql::transaction_guard tr{db};
auto get_rowid_st = db.prepare("SELECT remote_id FROM dds_pkg_remotes WHERE name = ?");
get_rowid_st.bindings()[1] = name;
auto row = neo::sqlite3::unpack_single_opt<std::int64_t>(get_rowid_st);
auto row = nsql::unpack_single_opt<std::int64_t>(get_rowid_st);
if (!row) {
BOOST_LEAF_THROW_EXCEPTION( //
make_user_error<errc::no_catalog_remote_info>("There is no remote with name '{}'",
name),
[&] {
auto all_st = db.prepare("SELECT name FROM dds_pkg_remotes");
auto tups = neo::sqlite3::iter_tuples<std::string>(all_st);
auto tups = nsql::iter_tuples<std::string>(all_st);
auto names = tups | ranges::views::transform([](auto&& tup) {
auto&& [n] = tup;
return n;
@@ -245,5 +247,63 @@ void dds::remove_remote(pkg_db& pkdb, std::string_view name) {
});
}
auto [rowid] = *row;
neo::sqlite3::exec(db.prepare("DELETE FROM dds_pkg_remotes WHERE remote_id = ?"), rowid);
nsql::exec(db.prepare("DELETE FROM dds_pkg_remotes WHERE remote_id = ?"), rowid);
}

void dds::add_init_repo(nsql::database_ref db) noexcept {
std::string_view init_repo = "https://repo-1.dds.pizza";
// _Do not_ let errors stop us from continuing
bool okay = boost::leaf::try_catch(
[&]() -> bool {
try {
auto remote = pkg_remote::connect(init_repo);
remote.store(db);
update_all_remotes(db);
return true;
} catch (...) {
capture_exception();
}
},
[](http_status_error err, http_response_info resp, neo::url url) {
dds_log(error,
"An HTTP error occurred while adding the initial repository [{}]: HTTP Status "
"{} {}",
err.what(),
url.to_string(),
resp.status,
resp.status_message);
return false;
},
[](e_sqlite3_error_exc e, neo::url url) {
dds_log(error,
"Error accessing remote database while adding initial repository: {}: {}",
url.to_string(),
e.message);
return false;
},
[](e_sqlite3_error_exc e) {
dds_log(error, "Unexpected database error: {}", e.message);
return false;
},
[](e_system_error_exc e, network_origin conn) {
dds_log(error,
"Error communicating with [.br.red[{}://{}:{}]`]: {}"_styled,
conn.protocol,
conn.hostname,
conn.port,
e.message);
return false;
},
[](boost::leaf::diagnostic_info const& diag) -> int {
dds_log(critical, "Unhandled error while adding initial package repository: ", diag);
throw;
});
if (!okay) {
dds_log(warn, "We failed to add the initial package repository [{}]", init_repo);
dds_log(warn, "No remote packages will be available until the above issue is resolved.");
dds_log(
warn,
"The remote package repository can be added again with [.br.yellow[dds pkg repo add \"{}\"]]"_styled,
init_repo);
}
}

+ 2
- 0
src/dds/pkg/remote.hpp Datei anzeigen

@@ -34,4 +34,6 @@ public:
void update_all_remotes(neo::sqlite3::database_ref);
void remove_remote(pkg_db& db, std::string_view name);

void add_init_repo(neo::sqlite3::database_ref db) noexcept;

} // namespace dds

+ 76
- 0
src/dds/pkg/search.cpp Datei anzeigen

@@ -0,0 +1,76 @@
#include "./search.hpp"

#include <dds/dym.hpp>
#include <dds/error/nonesuch.hpp>
#include <dds/error/result.hpp>
#include <dds/util/log.hpp>
#include <dds/util/string.hpp>

#include <neo/sqlite3/database.hpp>
#include <neo/sqlite3/iter_tuples.hpp>

#include <range/v3/algorithm/sort.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/transform.hpp>

using namespace dds;
namespace nsql = neo::sqlite3;

result<pkg_search_results> dds::pkg_search(nsql::database_ref db,
std::optional<std::string_view> pattern) noexcept {
auto search_st = db.prepare(R"(
SELECT pkg.name,
group_concat(version, ';;'),
description,
remote.name,
remote.url
FROM dds_pkgs AS pkg
JOIN dds_pkg_remotes AS remote USING(remote_id)
WHERE lower(pkg.name) GLOB lower(:pattern)
GROUP BY pkg.name, remote_id, description
ORDER BY remote.name, pkg.name
)");
// If no pattern, grab _everything_
auto final_pattern = pattern.value_or("*");
dds_log(debug, "Searching for packages matching pattern '{}'", final_pattern);
search_st.bindings()[1] = final_pattern;
auto rows = nsql::iter_tuples<std::string, std::string, std::string, std::string, std::string>(
search_st);

std::vector<pkg_group_search_result> found;
for (auto [name, versions, desc, remote_name, remote_url] : rows) {
dds_log(debug,
"Found: {} with versions {} (Description: {}) from {} [{}]",
name,
versions,
desc,
remote_name,
remote_url);
auto version_strs = split(versions, ";;");
auto versions_semver
= version_strs | ranges::views::transform(&semver::version::parse) | ranges::to_vector;
ranges::sort(versions_semver);
found.push_back(pkg_group_search_result{
.name = name,
.versions = versions_semver,
.description = desc,
.remote_name = remote_name,
});
}

if (found.empty()) {
return boost::leaf::new_error([&] {
auto names_st = db.prepare("SELECT DISTINCT name from dds_pkgs");
auto tups = nsql::iter_tuples<std::string>(names_st);
auto names_vec = tups | ranges::views::transform([](auto&& row) {
auto [name] = row;
return name;
})
| ranges::to_vector;
auto nearest = dds::did_you_mean(final_pattern, names_vec);
return e_nonesuch{final_pattern, nearest};
});
}

return pkg_search_results{.found = std::move(found)};
}

+ 33
- 0
src/dds/pkg/search.hpp Datei anzeigen

@@ -0,0 +1,33 @@
#pragma once

#include <dds/error/result_fwd.hpp>

#include <semver/version.hpp>

#include <optional>
#include <string_view>
#include <vector>

namespace neo::sqlite3 {

class database_ref;

} // namespace neo::sqlite3

namespace dds {

struct pkg_group_search_result {
std::string name;
std::vector<semver::version> versions;
std::string description;
std::string remote_name;
};

struct pkg_search_results {
std::vector<pkg_group_search_result> found;
};

result<pkg_search_results> pkg_search(neo::sqlite3::database_ref db,
std::optional<std::string_view> query) noexcept;

} // namespace dds

+ 0
- 1
src/dds/sdist/dist.cpp Datei anzeigen

@@ -116,7 +116,6 @@ sdist dds::create_sdist_in_dir(path_ref out, const sdist_params& params) {

auto pkg_man = package_manifest::load_from_file(*man_path);
sdist_export_file(out, params.project_dir, *man_path);
dds_log(info, "Generated export as {}", pkg_man.id.to_string());
return sdist::from_directory(out);
}


+ 14
- 0
src/dds/toolchain/toolchain.cpp Datei anzeigen

@@ -97,6 +97,13 @@ compile_command_info toolchain::create_compile_command(const compile_file_spec&
extend(flags, _tty_flags);
}

if (knobs.cache_buster) {
// This is simply a CPP definition that is used to "bust" any caches that rely on inspecting
// the command-line of the compiler (including our own).
auto def = replace(_def_template, "[def]", "__dds_cachebust=" + *knobs.cache_buster);
extend(flags, def);
}

dds_log(trace, "#include-search dirs:");
for (auto&& inc_dir : spec.include_dirs) {
dds_log(trace, " - search: {}", inc_dir.string());
@@ -111,6 +118,13 @@ compile_command_info toolchain::create_compile_command(const compile_file_spec&
extend(flags, inc_args);
}

if (knobs.tweaks_dir) {
dds_log(trace, " - search (tweaks): {}", knobs.tweaks_dir->string());
auto shortest = shortest_path_from(*knobs.tweaks_dir, cwd);
auto tweak_inc_args = include_args(shortest);
extend(flags, tweak_inc_args);
}

for (auto&& def : spec.definitions) {
auto def_args = definition_args(def);
extend(flags, def_args);

+ 3
- 0
src/dds/toolchain/toolchain.hpp Datei anzeigen

@@ -18,6 +18,9 @@ enum class language {

struct toolchain_knobs {
bool is_tty = false;
// Directory storing tweaks for the compilation
std::optional<fs::path> tweaks_dir{};
std::optional<std::string> cache_buster{};
};

struct compile_file_spec {

+ 18
- 0
src/dds/util/env.cpp Datei anzeigen

@@ -0,0 +1,18 @@
#include "./env.hpp"

#include <neo/utility.hpp>

#include <cstdlib>

std::optional<std::string> dds::getenv(const std::string& varname) noexcept {
auto cptr = std::getenv(varname.data());
if (cptr) {
return std::string(cptr);
}
return {};
}

bool dds::getenv_bool(const std::string& varname) noexcept {
auto s = getenv(varname);
return s == neo::oper::any_of("1", "true", "on", "TRUE", "ON", "YES", "yes");
}

+ 23
- 0
src/dds/util/env.hpp Datei anzeigen

@@ -0,0 +1,23 @@
#pragma once

#include <neo/concepts.hpp>

#include <optional>
#include <string>

namespace dds {

std::optional<std::string> getenv(const std::string& env) noexcept;

bool getenv_bool(const std::string& env) noexcept;

template <neo::invocable Func>
std::string getenv(const std::string& name, Func&& fn) noexcept(noexcept(fn())) {
auto val = getenv(name);
if (!val) {
return std::string(fn());
}
return *val;
}

} // namespace dds

+ 52
- 7
src/dds/util/fs.cpp Datei anzeigen

@@ -1,5 +1,8 @@
#include "./fs.hpp"

#include <dds/error/on_error.hpp>
#include <dds/error/result.hpp>

#include <fmt/core.h>

#include <sstream>
@@ -7,13 +10,8 @@
using namespace dds;

std::fstream dds::open(const fs::path& filepath, std::ios::openmode mode, std::error_code& ec) {
std::fstream ret;
auto mask = ret.exceptions() | std::ios::badbit;
ret.exceptions(mask);

try {
ret.open(filepath.string(), mode);
} catch (const std::ios::failure&) {
std::fstream ret{filepath, mode};
if (!ret) {
ec = std::error_code(errno, std::system_category());
}
return ret;
@@ -55,4 +53,51 @@ void dds::safe_rename(path_ref source, path_ref dest) {
}
fs::rename(tmp, dest);
fs::remove_all(source);
}

result<void> dds::copy_file(path_ref source, path_ref dest, fs::copy_options opts) noexcept {
std::error_code ec;
fs::copy_file(source, dest, opts, ec);
if (ec) {
return new_error(DDS_E_ARG(e_copy_file{source, dest}), ec);
}
return {};
}

result<void> dds::remove_file(path_ref file) noexcept {
std::error_code ec;
fs::remove(file, ec);
if (ec) {
return new_error(DDS_E_ARG(e_remove_file{file}), ec);
}
return {};
}

result<void> dds::create_symlink(path_ref target, path_ref symlink) noexcept {
std::error_code ec;
if (fs::is_directory(target)) {
fs::create_directory_symlink(target, symlink, ec);
} else {
fs::create_symlink(target, symlink, ec);
}
if (ec) {
return new_error(DDS_E_ARG(e_symlink{symlink, target}), ec);
}
return {};
}

result<void> dds::write_file(path_ref dest, std::string_view content) noexcept {
std::error_code ec;
auto outfile = dds::open(dest, std::ios::binary | std::ios::out, ec);
if (ec) {
return new_error(DDS_E_ARG(e_write_file_path{dest}), ec);
}
errno = 0;
outfile.write(content.data(), content.size());
auto e = errno;
if (!outfile) {
return new_error(std::error_code(e, std::system_category()),
DDS_E_ARG(e_write_file_path{dest}));
}
return {};
}

+ 27
- 0
src/dds/util/fs.hpp Datei anzeigen

@@ -1,5 +1,7 @@
#pragma once

#include <dds/error/result_fwd.hpp>

#include <filesystem>
#include <fstream>
#include <string>
@@ -16,6 +18,11 @@ using path_ref = const fs::path&;
std::fstream open(const fs::path& filepath, std::ios::openmode mode, std::error_code& ec);
std::string slurp_file(const fs::path& path, std::error_code& ec);

struct e_write_file_path {
fs::path value;
};
[[nodiscard]] result<void> write_file(const fs::path& path, std::string_view content) noexcept;

inline std::fstream open(const fs::path& filepath, std::ios::openmode mode) {
std::error_code ec;
auto ret = dds::open(filepath, mode, ec);
@@ -36,6 +43,26 @@ inline std::string slurp_file(const fs::path& path) {

void safe_rename(path_ref source, path_ref dest);

struct e_copy_file {
fs::path source;
fs::path dest;
};

struct e_remove_file {
fs::path value;
};

struct e_symlink {
fs::path symlink;
fs::path target;
};

[[nodiscard]] result<void>
copy_file(path_ref source, path_ref dest, fs::copy_options opts = {}) noexcept;
[[nodiscard]] result<void> remove_file(path_ref file) noexcept;

[[nodiscard]] result<void> create_symlink(path_ref target, path_ref symlink) noexcept;

} // namespace file_utils

} // namespace dds

+ 6
- 5
src/dds/util/http/pool.cpp Datei anzeigen

@@ -111,8 +111,9 @@ struct http_client_impl {
{"Host", hostname_port},
{"Accept", "*/*"},
{"Content-Length", "0"},
{"TE", "gzip, chunked, plain"},
{"TE", "gzip, chunked"},
{"Connection", "keep-alive"},
{"User-Agent", "dds 0.1.0-alpha.6"},
};
if (!params.prior_etag.empty()) {
headers.push_back({"If-None-Match", params.prior_etag});
@@ -385,8 +386,8 @@ void http_client::_set_ready() noexcept {
}

request_result http_pool::request(neo::url url, http_request_params params) {
DDS_E_SCOPE(url);
for (auto i = 0; i <= 100; ++i) {
DDS_E_SCOPE(url);
params.path = url.path;
params.query = url.query.value_or("");

@@ -410,18 +411,18 @@ request_result http_pool::request(neo::url url, http_request_params params) {

if (resp.is_error()) {
client.discard_body(resp);
throw boost::leaf::exception(http_status_error("Received an error from HTTP"));
throw BOOST_LEAF_EXCEPTION(http_status_error("Received an error from HTTP"));
}

if (resp.is_redirect()) {
client.discard_body(resp);
if (i == 100) {
throw boost::leaf::exception(
throw BOOST_LEAF_EXCEPTION(
http_server_error("Encountered over 100 HTTP redirects. Request aborted."));
}
auto loc = resp.headers.find("Location");
if (!loc) {
throw boost::leaf::exception(
throw BOOST_LEAF_EXCEPTION(
http_server_error("Server sent an invalid response of a 30x redirect without a "
"'Location' header"));
}

+ 2
- 0
src/dds/util/output.hpp Datei anzeigen

@@ -2,6 +2,8 @@

namespace dds {

void enable_ansi_console() noexcept;

bool stdout_is_a_tty() noexcept;

} // namespace dds

+ 4
- 0
src/dds/util/output.nix.cpp Datei anzeigen

@@ -6,6 +6,10 @@

using namespace dds;

void dds::enable_ansi_console() noexcept {
// unix consoles generally already support ANSI control chars by default
}

bool dds::stdout_is_a_tty() noexcept { return ::isatty(STDOUT_FILENO) != 0; }

#endif

+ 29
- 3
src/dds/util/output.win.cpp Datei anzeigen

@@ -1,10 +1,36 @@
#include <dds/util/output.hpp>

#if _WIN32

#include <dds/util/output.hpp>
#include <windows.h>

void dds::enable_ansi_console() noexcept {
auto stdio_console = ::GetStdHandle(STD_OUTPUT_HANDLE);
if (stdio_console == INVALID_HANDLE_VALUE) {
// Oh well...
return;
}
DWORD mode = 0;
if (!::GetConsoleMode(stdio_console, &mode)) {
// Failed to get the mode?
return;
}
// Set the bit!
mode |= ENABLE_VIRTUAL_TERMINAL_PROCESSING;
::SetConsoleMode(stdio_console, mode);
}

bool dds::stdout_is_a_tty() noexcept {
// XXX: Newer Windows consoles support ANSI color, so this should be made smarter
return false;
auto stdio_console = ::GetStdHandle(STD_OUTPUT_HANDLE);
if (stdio_console == INVALID_HANDLE_VALUE) {
return false;
}
DWORD mode = 0;
if (!::GetConsoleMode(stdio_console, &mode)) {
// Failed to get the mode
return false;
}
return (mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING);
}

#endif

+ 9
- 19
src/dds/util/paths.linux_fbsd.cpp Datei anzeigen

@@ -2,6 +2,7 @@

#include "./paths.hpp"

#include <dds/util/env.hpp>
#include <dds/util/log.hpp>

#include <cstdlib>
@@ -10,45 +11,34 @@ using namespace dds;

fs::path dds::user_home_dir() {
static auto ret = []() -> fs::path {
auto home_env = std::getenv("HOME");
if (!home_env) {
return fs::absolute(dds::getenv("HOME", [] {
dds_log(error, "No HOME environment variable set!");
return "/";
}
return fs::absolute(fs::path(home_env));
}));
}();
return ret;
}

fs::path dds::user_data_dir() {
static auto ret = []() -> fs::path {
auto xdg_data_home = std::getenv("XDG_DATA_HOME");
if (xdg_data_home) {
return fs::absolute(fs::path(xdg_data_home));
}
return user_home_dir() / ".local/share";
return fs::absolute(
dds::getenv("XDG_DATA_HOME", [] { return user_home_dir() / ".local/share"; }));
}();
return ret;
}

fs::path dds::user_cache_dir() {
static auto ret = []() -> fs::path {
auto xdg_cache_home = std::getenv("XDG_CACHE_HOME");
if (xdg_cache_home) {
return fs::absolute(fs::path(xdg_cache_home));
}
return user_home_dir() / ".cache";
return fs::absolute(
dds::getenv("XDG_CACHE_HOME", [] { return user_home_dir() / ".cache"; }));
}();
return ret;
}

fs::path dds::user_config_dir() {
static auto ret = []() -> fs::path {
auto xdg_config_home = std::getenv("XDG_CONFIG_HOME");
if (xdg_config_home) {
return fs::absolute(fs::path(xdg_config_home));
}
return user_home_dir() / ".config";
return fs::absolute(
dds::getenv("XDG_CONFIG_HOME", [] { return user_home_dir() / ".config"; }));
}();
return ret;
}

+ 5
- 6
src/dds/util/paths.macos.cpp Datei anzeigen

@@ -2,6 +2,7 @@

#include "./paths.hpp"

#include <dds/util/env.hpp>
#include <dds/util/log.hpp>

#include <cstdlib>
@@ -10,12 +11,10 @@ using namespace dds;

fs::path dds::user_home_dir() {
static auto ret = []() -> fs::path {
auto home_env = std::getenv("HOME");
if (!home_env) {
dds_log(warn, "No HOME environment variable set!");
return fs::absolute(dds::getenv("HOME", [] {
dds_log(error, "No HOME environment variable set!");
return "/";
}
return fs::absolute(fs::path(home_env));
}));
}();
return ret;
}
@@ -24,4 +23,4 @@ fs::path dds::user_data_dir() { return user_home_dir() / "Library/Application Su
fs::path dds::user_cache_dir() { return user_home_dir() / "Library/Caches"; }
fs::path dds::user_config_dir() { return user_home_dir() / "Preferences"; }

#endif
#endif

+ 3
- 2
src/dds/util/result.cpp Datei anzeigen

@@ -1,5 +1,6 @@
#include "./result.hpp"

#include <dds/util/env.hpp>
#include <dds/util/log.hpp>

#include <fmt/ostream.h>
@@ -23,9 +24,9 @@ void dds::capture_exception() {

void dds::write_error_marker(std::string_view error) noexcept {
dds_log(trace, "[error marker {}]", error);
auto efile_path = std::getenv("DDS_WRITE_ERROR_MARKER");
auto efile_path = dds::getenv("DDS_WRITE_ERROR_MARKER");
if (efile_path) {
std::ofstream outfile{efile_path, std::ios::binary};
std::ofstream outfile{*efile_path, std::ios::binary};
fmt::print(outfile, "{}", error);
}
}

+ 15
- 0
src/dds/util/string.hpp Datei anzeigen

@@ -86,6 +86,21 @@ replace(std::vector<std::string> strings, std::string_view key, std::string_view
return strings;
}

template <typename Range>
inline std::string joinstr(std::string_view joiner, Range&& rng) {
auto iter = std::begin(rng);
auto end = std::end(rng);
std::string ret;
while (iter != end) {
ret.append(*iter);
++iter;
if (iter != end) {
ret.append(joiner);
}
}
return ret;
}

} // namespace string_utils

} // namespace dds

+ 55
- 22
src/debate/argument_parser.cpp Datei anzeigen

@@ -1,11 +1,15 @@
#include "./argument_parser.hpp"

/// XXX: Refactor this after pulling debate:: out of dds
#include <dds/dym.hpp>

#include <boost/leaf/error.hpp>
#include <boost/leaf/exception.hpp>
#include <boost/leaf/on_error.hpp>

#include <fmt/color.h>
#include <fmt/format.h>
#include <neo/scope.hpp>

#include <set>

@@ -32,7 +36,7 @@ struct parse_engine {
void see(const argument& arg) {
auto did_insert = seen.insert(&arg).second;
if (!did_insert && !arg.can_repeat) {
throw boost::leaf::exception(invalid_repitition("Invalid repitition"));
BOOST_LEAF_THROW_EXCEPTION(invalid_repitition("Invalid repitition"));
}
}

@@ -45,12 +49,42 @@ struct parse_engine {
finalize();
}

std::optional<std::string> find_nearest_arg_spelling(std::string_view given) const noexcept {
std::vector<std::string> candidates;
// Only match arguments of the corrent type
auto parser = bottom_parser;
while (parser) {
for (auto& arg : parser->arguments()) {
for (auto& l : arg.long_spellings) {
candidates.push_back("--" + l);
}
for (auto& s : arg.short_spellings) {
candidates.push_back("-" + s);
}
}
parser = parser->parent().pointer();
}
if (bottom_parser->subparsers()) {
auto&& grp = *bottom_parser->subparsers();
for (auto& p : grp._p_subparsers) {
candidates.push_back(p.name);
}
}
return dds::did_you_mean(given, candidates);
}

void parse_another() {
auto given = current_arg();
auto did_parse = try_parse_given(given);
if (!did_parse) {
throw boost::leaf::exception(unrecognized_argument("Unrecognized argument"),
e_arg_spelling{std::string(given)});
neo_defer {
auto dym = find_nearest_arg_spelling(given);
if (dym) {
boost::leaf::current_error().load(e_did_you_mean{*dym});
}
};
BOOST_LEAF_THROW_EXCEPTION(unrecognized_argument("Unrecognized argument"),
e_arg_spelling{std::string(given)});
}
}

@@ -81,7 +115,7 @@ struct parse_engine {

bool try_parse_long(strv tail, const strv given) {
if (tail == "help") {
throw boost::leaf::exception(help_request());
BOOST_LEAF_THROW_EXCEPTION(help_request());
}
auto argset = bottom_parser;
while (argset) {
@@ -115,8 +149,8 @@ struct parse_engine {
if (arg.nargs == 0) {
if (!tail.empty()) {
// We should not have a value
throw boost::leaf::exception(invalid_arguments("Argument does not expect a value"),
e_wrong_val_num{1});
BOOST_LEAF_THROW_EXCEPTION(invalid_arguments("Argument does not expect a value"),
e_wrong_val_num{1});
}
// Just a switch. Dispatch
arg.action(given, given);
@@ -133,17 +167,17 @@ struct parse_engine {
tail.remove_prefix(1);
// The remainder is a single value
if (arg.nargs > 1) {
throw boost::leaf::exception(invalid_arguments("Invalid number of values"),
e_wrong_val_num{1});
BOOST_LEAF_THROW_EXCEPTION(invalid_arguments("Invalid number of values"),
e_wrong_val_num{1});
}
arg.action(tail, given);
} else {
// Trailing words are arguments
for (auto i = 0; i < arg.nargs; ++i) {
if (at_end()) {
throw boost::leaf::exception(invalid_arguments(
"Invalid number of argument values"),
e_wrong_val_num{i});
BOOST_LEAF_THROW_EXCEPTION(invalid_arguments(
"Invalid number of argument values"),
e_wrong_val_num{i});
}
arg.action(current_arg(), given);
shift();
@@ -164,7 +198,7 @@ struct parse_engine {

bool try_parse_short(strv tail, const strv given) {
if (tail == "h") {
throw boost::leaf::exception(help_request());
BOOST_LEAF_THROW_EXCEPTION(help_request());
}
auto argset = bottom_parser;
while (argset) {
@@ -213,7 +247,7 @@ struct parse_engine {
// The next argument is the value
shift();
if (at_end()) {
throw boost::leaf::exception(invalid_arguments("Expected a value"));
BOOST_LEAF_THROW_EXCEPTION(invalid_arguments("Expected a value"));
}
arg.action(current_arg(), spelling);
shift();
@@ -228,16 +262,15 @@ struct parse_engine {
} else {
// Consume the next arguments
if (!tail.empty()) {
throw boost::leaf::exception(invalid_arguments(
"Wrong number of argument values given"),
e_wrong_val_num{1});
BOOST_LEAF_THROW_EXCEPTION(invalid_arguments(
"Wrong number of argument values given"),
e_wrong_val_num{1});
}
shift();
for (auto i = 0; i < arg.nargs; ++i) {
if (at_end()) {
throw boost::leaf::exception(invalid_arguments(
"Wrong number of argument values"),
e_wrong_val_num{i});
BOOST_LEAF_THROW_EXCEPTION(invalid_arguments("Wrong number of argument values"),
e_wrong_val_num{i});
}
arg.action(current_arg(), spelling);
shift();
@@ -343,15 +376,15 @@ struct parse_engine {
argset = argset->parent().pointer();
}
if (bottom_parser->subparsers() && bottom_parser->subparsers()->required) {
throw boost::leaf::exception(missing_required("Expected a subcommand"));
BOOST_LEAF_THROW_EXCEPTION(missing_required("Expected a subcommand"));
}
}

void finalize(const argument_parser& argset) {
for (auto& arg : argset.arguments()) {
if (arg.required && !seen.contains(&arg)) {
throw boost::leaf::exception(missing_required("Required argument is missing"),
e_argument{arg});
BOOST_LEAF_THROW_EXCEPTION(missing_required("Required argument is missing"),
e_argument{arg});
}
}
}

+ 15
- 2
src/fansi/styled.cpp Datei anzeigen

@@ -16,7 +16,20 @@
#include <vector>

#if NEO_OS_IS_WINDOWS
bool fansi::detect_should_style() noexcept { return false; }
#include <windows.h>

bool fansi::detect_should_style() noexcept {
auto stdio_console = ::GetStdHandle(STD_OUTPUT_HANDLE);
if (stdio_console == INVALID_HANDLE_VALUE) {
return false;
}
DWORD mode = 0;
if (!::GetConsoleMode(stdio_console, &mode)) {
// Failed to get the mode
return false;
}
return (mode & ENABLE_VIRTUAL_TERMINAL_PROCESSING);
}
#else
#include <unistd.h>
bool fansi::detect_should_style() noexcept { return ::isatty(STDOUT_FILENO); }
@@ -163,7 +176,7 @@ std::string fansi::stylize(std::string_view str, fansi::should_style should) {
return text_styler{str, should}.render();
}

std::string_view detail::cached_rendering(const char* ptr) noexcept {
const std::string& detail::cached_rendering(const char* ptr) noexcept {
thread_local std::map<const char*, std::string> cache;
auto found = cache.find(ptr);
if (found == cache.end()) {

+ 2
- 2
src/fansi/styled.hpp Datei anzeigen

@@ -19,12 +19,12 @@ enum class should_style {
std::string stylize(std::string_view text, should_style = should_style::detect);

namespace detail {
std::string_view cached_rendering(const char* ptr) noexcept;
const std::string& cached_rendering(const char* ptr) noexcept;
}

inline namespace literals {
inline namespace styled_literals {
inline std::string_view operator""_styled(const char* str, std::size_t) {
inline const std::string& operator""_styled(const char* str, std::size_t) {
return detail::cached_rendering(str);
}


+ 0
- 0
tests/projects/sdist/src/foo.cpp Datei anzeigen


+ 7
- 0
tests/projects/simple-cmake/CMakeLists.txt Datei anzeigen

@@ -0,0 +1,7 @@
cmake_minimum_required(VERSION 3.12)
project(TestProject)

include(${PROJECT_BINARY_DIR}/libraries.cmake)

add_executable(app main.cpp)
target_link_libraries(app PRIVATE test::foo)

+ 3
- 0
tests/projects/simple-cmake/main.cpp Datei anzeigen

@@ -0,0 +1,3 @@
#include <foo.hpp>

int main() { say_hello(); }

+ 3
- 0
tests/projects/simple/include/foo.hpp Datei anzeigen

@@ -0,0 +1,3 @@
#pragma once

extern void say_hello();

tests/projects/sdist/include/header.h → tests/projects/simple/include/header.h Datei anzeigen


tests/projects/sdist/include/header.hpp → tests/projects/simple/include/header.hpp Datei anzeigen


tests/projects/sdist/library.jsonc → tests/projects/simple/library.jsonc Datei anzeigen


tests/projects/sdist/other-file.txt → tests/projects/simple/other-file.txt Datei anzeigen


tests/projects/sdist/package.json5 → tests/projects/simple/package.json5 Datei anzeigen


+ 5
- 0
tests/projects/simple/src/foo.cpp Datei anzeigen

@@ -0,0 +1,5 @@
#include <foo.hpp>

#include <iostream>

void say_hello() { std::cout << "Hello!\n"; }

+ 21
- 0
tests/projects/tweaks/include/tweakable.config.hpp Datei anzeigen

@@ -0,0 +1,21 @@
#pragma once

#if __has_include(<tweakable.tweaks.hpp>)
#include <tweakable.tweaks.hpp>
#endif

namespace tweakable {

namespace config {

namespace defaults {

const int value = 99;

} // namespace defaults

using namespace defaults;

} // namespace config

} // namespace tweakable

+ 7
- 0
tests/projects/tweaks/include/tweakable.hpp Datei anzeigen

@@ -0,0 +1,7 @@
#pragma once

namespace tweakable {

extern int get_value();

} // namespace tweakable

+ 3
- 0
tests/projects/tweaks/library.jsonc Datei anzeigen

@@ -0,0 +1,3 @@
{
"name": "foo"
}

+ 5
- 0
tests/projects/tweaks/package.json5 Datei anzeigen

@@ -0,0 +1,5 @@
{
name: 'tweakable',
version: '1.2.3',
"namespace": "test",
}

+ 6
- 0
tests/projects/tweaks/src/tweakable.cpp Datei anzeigen

@@ -0,0 +1,6 @@
#include <tweakable.config.hpp>
#include <tweakable.hpp>

#include <iostream>

int tweakable::get_value() { return tweakable::config::value; }

+ 3
- 0
tests/projects/tweaks/src/tweakable.main.cpp Datei anzeigen

@@ -0,0 +1,3 @@
#include <tweakable.hpp>

int main() { return tweakable::get_value(); }

+ 1
- 1
tests/test_basics.py Datei anzeigen

@@ -81,6 +81,6 @@ def test_empty_with_pkg_json(tmp_project: Project) -> None:

def test_empty_sdist_create(tmp_project: Project) -> None:
tmp_project.package_json = TEST_PACKAGE
tmp_project.sdist_create()
tmp_project.pkg_create()
assert tmp_project.build_root.joinpath('test-pkg@0.2.2.tar.gz').is_file(), \
'The expected sdist tarball was not generated'

+ 0
- 0
tests/test_build_deps.py Datei anzeigen


Einige Dateien werden nicht angezeigt, da zu viele Dateien in diesem Diff geändert wurden.

Laden…
Abbrechen
Speichern