Browse Source

Merge branch 'feature/static-http-repo' into develop

default_compile_flags
vector-of-bool 3 years ago
parent
commit
4ae3392b5e
100 changed files with 58670 additions and 10628 deletions
  1. +9
    -1
      .gitignore
  2. +161
    -0
      .pylintrc
  3. +3
    -0
      .style.yapf
  4. +54
    -22
      Makefile
  5. +35
    -19
      azure-pipelines.yml
  6. +0
    -3976
      catalog.old.json
  7. BIN
      data/neo-url@0.2.1.tar.gz
  8. +14
    -0
      docs/err/invalid-remote-url.rst
  9. +7
    -3
      library.jsonc
  10. +6
    -0
      mypy.ini
  11. +251
    -0
      old-catalog.json
  12. +9
    -4
      package.jsonc
  13. +549
    -0
      poetry.lock
  14. +36
    -0
      pyproject.toml
  15. +3
    -0
      pytest.ini
  16. +96
    -946
      src/dds.main.cpp
  17. +0
    -4283
      src/dds/3rd/args.hxx
  18. +11
    -4
      src/dds/build/builder.cpp
  19. +1
    -1
      src/dds/build/builder.hpp
  20. +1
    -1
      src/dds/build/params.hpp
  21. +11
    -3
      src/dds/build/plan/archive.cpp
  22. +5
    -2
      src/dds/build/plan/compile_exec.cpp
  23. +1
    -1
      src/dds/build/plan/compile_file.hpp
  24. +8
    -3
      src/dds/build/plan/exe.cpp
  25. +3
    -2
      src/dds/build/plan/exe.hpp
  26. +1
    -1
      src/dds/build/plan/library.hpp
  27. +1
    -1
      src/dds/build/plan/template.cpp
  28. +1
    -1
      src/dds/build/plan/template.hpp
  29. +0
    -438
      src/dds/catalog/catalog.cpp
  30. +0
    -50
      src/dds/catalog/catalog.hpp
  31. +0
    -106
      src/dds/catalog/catalog.test.cpp
  32. +0
    -83
      src/dds/catalog/get.cpp
  33. +0
    -16
      src/dds/catalog/get.hpp
  34. +0
    -208
      src/dds/catalog/import.cpp
  35. +0
    -9
      src/dds/catalog/import.hpp
  36. +0
    -154
      src/dds/catalog/import.test.cpp
  37. +0
    -34
      src/dds/catalog/init_catalog.cpp
  38. +0
    -11
      src/dds/catalog/init_catalog.hpp
  39. +0
    -25
      src/dds/catalog/package_info.hpp
  40. +0
    -42
      src/dds/catalog/remote/git.cpp
  41. +0
    -24
      src/dds/catalog/remote/git.hpp
  42. +54323
    -3
      src/dds/catch2_embeddead.generated.cpp
  43. +40
    -0
      src/dds/cli/cmd/build.cpp
  44. +45
    -0
      src/dds/cli/cmd/build_common.cpp
  45. +11
    -0
      src/dds/cli/cmd/build_common.hpp
  46. +63
    -0
      src/dds/cli/cmd/build_deps.cpp
  47. +20
    -0
      src/dds/cli/cmd/compile_file.cpp
  48. +73
    -0
      src/dds/cli/cmd/pkg_get.cpp
  49. +57
    -0
      src/dds/cli/cmd/pkg_import.cpp
  50. +60
    -0
      src/dds/cli/cmd/pkg_ls.cpp
  51. +24
    -0
      src/dds/cli/cmd/pkg_repo_add.cpp
  52. +75
    -0
      src/dds/cli/cmd/pkg_repo_err_handle.cpp
  53. +9
    -0
      src/dds/cli/cmd/pkg_repo_err_handle.hpp
  54. +33
    -0
      src/dds/cli/cmd/pkg_repo_ls.cpp
  55. +26
    -0
      src/dds/cli/cmd/pkg_repo_remove.cpp
  56. +19
    -0
      src/dds/cli/cmd/pkg_repo_update.cpp
  57. +88
    -0
      src/dds/cli/cmd/repoman_add.cpp
  58. +57
    -0
      src/dds/cli/cmd/repoman_import.cpp
  59. +48
    -0
      src/dds/cli/cmd/repoman_init.cpp
  60. +37
    -0
      src/dds/cli/cmd/repoman_ls.cpp
  61. +45
    -0
      src/dds/cli/cmd/repoman_remove.cpp
  62. +45
    -0
      src/dds/cli/cmd/sdist_create.cpp
  63. +107
    -0
      src/dds/cli/dispatch_main.cpp
  64. +9
    -0
      src/dds/cli/dispatch_main.hpp
  65. +73
    -0
      src/dds/cli/error_handler.cpp
  66. +9
    -0
      src/dds/cli/error_handler.hpp
  67. +479
    -0
      src/dds/cli/options.cpp
  68. +267
    -0
      src/dds/cli/options.hpp
  69. +27
    -29
      src/dds/db/database.cpp
  70. +1
    -2
      src/dds/dym.cpp
  71. +0
    -30
      src/dds/dym.hpp
  72. +19
    -5
      src/dds/error/errors.cpp
  73. +24
    -2
      src/dds/error/errors.hpp
  74. +15
    -0
      src/dds/error/nonesuch.cpp
  75. +19
    -0
      src/dds/error/nonesuch.hpp
  76. +17
    -0
      src/dds/error/on_error.hpp
  77. +12
    -0
      src/dds/error/result.hpp
  78. +14
    -0
      src/dds/error/result_fwd.hpp
  79. +0
    -32
      src/dds/package/id.cpp
  80. +33
    -32
      src/dds/pkg/cache.cpp
  81. +19
    -19
      src/dds/pkg/cache.hpp
  82. +380
    -0
      src/dds/pkg/db.cpp
  83. +47
    -0
      src/dds/pkg/db.hpp
  84. +75
    -0
      src/dds/pkg/db.test.cpp
  85. +33
    -0
      src/dds/pkg/get/base.cpp
  86. +26
    -0
      src/dds/pkg/get/base.hpp
  87. +41
    -0
      src/dds/pkg/get/dds_http.cpp
  88. +31
    -0
      src/dds/pkg/get/dds_http.hpp
  89. +12
    -0
      src/dds/pkg/get/dds_http.test.cpp
  90. +71
    -0
      src/dds/pkg/get/get.cpp
  91. +16
    -0
      src/dds/pkg/get/get.hpp
  92. +59
    -0
      src/dds/pkg/get/git.cpp
  93. +22
    -0
      src/dds/pkg/get/git.hpp
  94. +9
    -0
      src/dds/pkg/get/git.test.cpp
  95. +42
    -0
      src/dds/pkg/get/github.cpp
  96. +24
    -0
      src/dds/pkg/get/github.hpp
  97. +11
    -0
      src/dds/pkg/get/github.test.cpp
  98. +123
    -0
      src/dds/pkg/get/http.cpp
  99. +29
    -0
      src/dds/pkg/get/http.hpp
  100. +0
    -0
      src/dds/pkg/get/http.test.cpp

+ 9
- 1
.gitignore View File

@@ -5,4 +5,12 @@ __pycache__/
.mypy_cache/
_prebuilt/
.pytest_cache/
.vagrant/
.vagrant/

## Generate by CI scripts:
# A copy of OpenSSL for Windows:
external/OpenSSL
.docker-ccache/

*.egg-info
*.stamp

+ 161
- 0
.pylintrc View File

@@ -0,0 +1,161 @@
[MASTER]

jobs=1
persistent=yes
suggestion-mode=yes
unsafe-load-any-extension=no

[MESSAGES CONTROL]

confidence=
disable=C,too-few-public-methods,redefined-outer-name
enable=c-extension-no-member


[REPORTS]

evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
output-format=colorized
reports=no
score=yes


[REFACTORING]

max-nested-blocks=5
never-returning-functions=optparse.Values,sys.exit


[BASIC]

argument-naming-style=snake_case
attr-naming-style=snake_case
class-attribute-naming-style=snake_case
class-naming-style=PascalCase
const-naming-style=UPPER_CASE
docstring-min-length=-1
function-naming-style=snake_case
# Good variable names which should always be accepted, separated by a comma
good-names=i,
j,
k,
ex,
Run,
fd,
_

include-naming-hint=no
inlinevar-naming-style=any
method-naming-style=snake_case
module-naming-style=snake_case
name-group=
no-docstring-rgx=^_
variable-naming-style=snake_case


[FORMAT]

expected-line-ending-format=LF
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
max-line-length=100
max-module-lines=1000
no-space-check=trailing-comma,
dict-separator
single-line-class-stmt=no
single-line-if-stmt=no


[LOGGING]

# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging


[MISCELLANEOUS]

# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO


[SIMILARITIES]

ignore-comments=yes
ignore-docstrings=yes
ignore-imports=no
min-similarity-lines=4


[SPELLING]

max-spelling-suggestions=4
spelling-dict=
spelling-ignore-words=
spelling-private-dict-file=
spelling-store-unknown-words=no


[TYPECHECK]

contextmanager-decorators=contextlib.contextmanager
generated-members=
ignore-mixin-members=yes
ignore-on-opaque-inference=yes
ignored-classes=optparse.Values,thread._local,_thread._local
ignored-modules=
missing-member-hint=yes
missing-member-hint-distance=1
missing-member-max-choices=1


[VARIABLES]

additional-builtins=
allow-global-unused-variables=yes
callbacks=cb_,
_cb
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
ignored-argument-names=_.*|^ignored_|^unused_
init-import=no
redefining-builtins-modules=six.moves,past.builtins,future.builtins


[CLASSES]

defining-attr-methods=__init__,__new__

exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
valid-classmethod-first-arg=cls
valid-metaclass-classmethod-first-arg=mcs


[DESIGN]

max-args=5
max-attributes=7
max-bool-expr=5
max-branches=12
max-locals=15
max-parents=7
max-public-methods=20
max-returns=6
max-statements=50
min-public-methods=2


[IMPORTS]

allow-wildcard-with-all=no
analyse-fallback-blocks=no
deprecated-modules=optparse,tkinter.tix
ext-import-graph=
import-graph=
int-import-graph=
known-standard-library=
known-third-party=enchant

+ 3
- 0
.style.yapf View File

@@ -0,0 +1,3 @@
[style]
based_on_style = pep8
column_limit = 120

+ 54
- 22
Makefile View File

@@ -1,16 +1,15 @@
.SILENT:

.PHONY: \
docs docs-server docs-watch docs-sync-server nix-ci linux-ci macos-ci \
vagrant-freebsd-ci site
docs docs-server docs-watch docs-sync-server linux-ci macos-ci \
vagrant-freebsd-ci site alpine-static-ci _alpine-static-ci poetry-setup \
full-ci dev-build release-build

_invalid:
echo "Specify a target name to execute"
exit 1

clean:
rm -f -r -- $(shell find -name __pycache__ -type d)
rm -f -r -- _build/ _prebuilt/
rm -f -vr -- $(shell find -name __pycache__ -type d)
rm -f -vr -- _build/ _prebuilt/
rm -f -v -- $(shell find -name "*.stamp" -type f)

docs:
sphinx-build -b html \
@@ -38,31 +37,57 @@ docs-sync-server:
--reload-delay 300 \
--watch **/*.html

macos-ci:
python3 -u tools/ci.py \
-B download \
-T tools/gcc-9-rel.jsonc
.poetry.stamp: poetry.lock
poetry install --no-dev
touch .poetry.stamp

poetry-setup: .poetry.stamp

full-ci: poetry-setup
poetry run dds-ci --clean

dev-build: poetry-setup
poetry run dds-ci --rapid

release-build: poetry-setup
poetry run dds-ci --no-test

macos-ci: full-ci
mv _build/dds _build/dds-macos-x64

linux-ci:
python3 -u tools/ci.py \
-B download \
-T tools/gcc-9-static-rel.jsonc
linux-ci: full-ci
mv _build/dds _build/dds-linux-x64

nix-ci:
python3 -u tools/ci.py \
-B download \
-T tools/gcc-9-rel.jsonc
_alpine-static-ci:
poetry install --no-dev
# Alpine Linux does not ship with ASan nor UBSan, so we can't use them in
# our test-build. Just use the same build for both. CCache will also speed this up.
poetry run dds-ci \
--bootstrap-with=lazy \
--test-toolchain=tools/gcc-9-static-rel.jsonc \
--main-toolchain=tools/gcc-9-static-rel.jsonc
mv _build/dds _build/dds-linux-x64

alpine-static-ci:
docker build \
--build-arg DDS_USER_UID=$(shell id -u) \
-t dds-builder \
-f tools/Dockerfile.alpine \
tools/
docker run \
-t --rm \
-u $(shell id -u) \
-v $(PWD):/host -w /host \
-e CCACHE_DIR=/host/.docker-ccache \
dds-builder \
make _alpine-static-ci

vagrant-freebsd-ci:
vagrant up freebsd11
vagrant rsync
vagrant ssh freebsd11 -c '\
cd /vagrant && \
python3.7 tools/ci.py \
-B download \
-T tools/freebsd-gcc-10.jsonc \
make full-ci \
'
mkdir -p _build/
vagrant scp freebsd11:/vagrant/_build/dds _build/dds-freebsd-x64
@@ -74,3 +99,10 @@ site: docs
cp site/index.html _site/
cp -r _build/docs _site/
echo "Site generated at _site/"

py-check:
poetry run mypy tools/dds_ci $(shell find tests/ -name *.py)
poetry run pylint tools/dds_ci $(shell find tests/ -name *.py)

format:
poetry run dds-format

+ 35
- 19
azure-pipelines.yml View File

@@ -2,10 +2,7 @@

variables:
shouldDeploy: >-
${{ or(
eq(variables['Build.SourceBranch'], 'refs/heads/develop'),
eq(variables['Build.SourceBranch'], 'refs/heads/master')
) }}
${{ eq(variables['Build.SourceBranch'], 'refs/heads/master') }}
deployDest: ${{ format('~/web/{0}/', variables['Build.SourceBranchName']) }}

stages:
@@ -17,15 +14,25 @@ stages:
pool:
vmImage: windows-2019
steps:
- pwsh: tools\get-win-openssl.ps1
displayName: Get OpenSSL for Windows
- script: python -m pip install poetry && poetry install --no-dev
displayName: Install Python deps
- script: |
echo Loading VS environment
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\vsdevcmd" -arch=x64 || exit 1
echo Executing Build and Tests
reg add HKLM\SYSTEM\CurrentControlSet\Control\FileSystem /v LongPathsEnabled /t REG_DWORD /d 1 /f || exit 1
python -m pip install pytest pytest-xdist || exit 1
python -u tools/ci.py -B download -T tools\msvc.jsonc || exit 1
poetry run dds-ci || exit 1
move _build\dds.exe _build\dds-win-x64.exe || exit 1
displayName: Build and Test
- task: PublishTestResults@2
displayName: Publish Tests
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/pytest-junit.xml'
failTaskOnFailedTests: true
testRunTitle: Windows Tests
- publish: _build\dds-win-x64.exe
displayName: Publish
artifact: dds-win-x64
@@ -35,14 +42,15 @@ stages:
pool:
vmImage: ubuntu-18.04
steps:
- script: |
set -eu
sudo apt update -y
sudo apt install -y python3-minimal python3-setuptools g++-9 ccache
python3 -m pip install pytest pytest-xdist
displayName: Prepare System
- script: make linux-ci
- script: make alpine-static-ci
displayName: Build and Test
- task: PublishTestResults@2
displayName: Publish Tests
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/pytest-junit.xml'
failTaskOnFailedTests: true
testRunTitle: Linux Tests
- publish: _build/dds-linux-x64
displayName: Publish
artifact: dds-linux-x64
@@ -50,15 +58,23 @@ stages:
- job: macos_gcc9
displayName: macOS - GCC 9
pool:
vmImage: macOS-10.14
vmImage: macOS-10.15
steps:
- script: brew install gcc@9 ccache
displayName: Prepare System
- script: |
set -eu
python3 -m pip install pytest pytest-xdist
make macos-ci
displayName: Get GCC 9
- script: brew install openssl@1.1
displayName: Install OpenSSL
- script: python3 -m pip install poetry
displayName: Get Python Dependencies
- script: make macos-ci
displayName: Build and Test
- task: PublishTestResults@2
displayName: Publish Tests
condition: succeededOrFailed()
inputs:
testResultsFiles: '**/pytest-junit.xml'
failTaskOnFailedTests: true
testRunTitle: macOS Tests
- publish: _build/dds-macos-x64
displayName: Publish
artifact: dds-macos-x64

+ 0
- 3976
catalog.old.json
File diff suppressed because it is too large
View File


BIN
data/neo-url@0.2.1.tar.gz View File


+ 14
- 0
docs/err/invalid-remote-url.rst View File

@@ -0,0 +1,14 @@
Error: Invalid Remote/Package URL
#################################

``dds`` encodes a lot of information about remotes repositories and remote
packages in URLs. If you received this error, it may be because:

1. The URL syntax is invalid. Make sure that you have spelled it correctly.
2. The URL scheme (the part at the beginning, before the ``://``) is unsupported
by ``dds``. ``dds`` only supports a subset of possible URL schemes in
different contexts. Check the output carefully and read the documentation
about the task you are trying to solve.
3. There are missing URL components that the task is expecting. For example,
``git`` remote URLs require that the URL have a URL fragment specifying the
tag/branch to clone. (The fragment is the final ``#`` component.)

+ 7
- 3
library.jsonc View File

@@ -6,17 +6,21 @@
"microsoft/wil",
"range-v3/range-v3",
"nlohmann/json",
"neo/sqlite3",
"neo/fun",
"neo/sqlite3",
"vob/semver",
"vob/pubgrub",
"vob/json5",
"vob/semester",
"hanickadot/ctre",
// "neo/io",
"neo/io",
"neo/http",
"neo/url",
"boost/leaf",
// Explicit zlib link is required due to linker input order bug.
// Can be removed after alpha.5
"zlib/zlib",
"neo/compress"
"neo/compress",
"neargye/magic_enum",
]
}

+ 6
- 0
mypy.ini View File

@@ -0,0 +1,6 @@
[mypy]
strict=True
ignore_missing_imports=True
incremental=True
sqlite_cache=True
mypy_path = tools/

catalog.json → old-catalog.json View File

@@ -2201,6 +2201,18 @@
"transform": [],
"url": "https://github.com/vector-of-bool/neo-buffer.git"
}
},
"0.4.2": {
"depends": [
"neo-concepts^0.4.0",
"neo-fun^0.4.1"
],
"description": "Buffer and byte algorithms/types based on those of Asio",
"git": {
"ref": "0.4.2",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-buffer.git"
}
}
},
"neo-compress": {
@@ -2216,6 +2228,32 @@
"transform": [],
"url": "https://github.com/vector-of-bool/neo-compress.git"
}
},
"0.1.1": {
"depends": [
"neo-buffer^0.4.1",
"neo-fun^0.5.0",
"zlib^1.2.9"
],
"description": "Compression, archiving, etc. for C++20",
"git": {
"ref": "0.1.1",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-compress.git"
}
},
"0.2.0": {
"depends": [
"neo-buffer^0.4.1",
"neo-fun^0.5.0",
"zlib^1.2.9"
],
"description": "Compression, archiving, etc. for C++20",
"git": {
"ref": "0.2.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-compress.git"
}
}
},
"neo-concepts": {
@@ -2337,6 +2375,118 @@
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.4.2": {
"depends": [],
"description": "Some library components that didn't quite fit anywhere else...",
"git": {
"ref": "0.4.2",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.5.0": {
"depends": [],
"description": "Some library components that didn't quite fit anywhere else...",
"git": {
"ref": "0.5.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.5.1": {
"depends": [],
"description": "Some library components that didn't quite fit anywhere else...",
"git": {
"ref": "0.5.1",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.5.2": {
"depends": [],
"description": "Some library components that didn't quite fit anywhere else...",
"git": {
"ref": "0.5.2",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.5.3": {
"depends": [],
"description": "Some library components that didn't quite fit anywhere else...",
"git": {
"ref": "0.5.3",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.5.4": {
"depends": [],
"description": "Some library components that didn't quite fit anywhere else...",
"git": {
"ref": "0.5.4",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.5.5": {
"depends": [],
"description": "Some library components that didn't quite fit anywhere else...",
"git": {
"ref": "0.5.5",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
},
"0.6.0": {
"depends": [],
"description": "Some library components that didn't quite fit anywhere else...",
"git": {
"ref": "0.6.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-fun.git"
}
}
},
"neo-http": {
"0.1.0": {
"depends": [
"neo-buffer^0.4.2",
"neo-fun^0.5.4"
],
"description": "A modern HTTP library",
"git": {
"ref": "0.1.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-http.git"
}
}
},
"neo-io": {
"0.1.0": {
"depends": [
"neo-fun~0.5.4",
"neo-buffer~0.4.2"
],
"description": "A modern IO library",
"git": {
"ref": "0.1.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-io.git"
}
},
"0.1.1": {
"depends": [
"neo-fun^0.6.0",
"neo-buffer^0.4.2"
],
"description": "A modern IO library",
"git": {
"ref": "0.1.1",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-io.git"
}
}
},
"neo-sqlite3": {
@@ -2357,6 +2507,107 @@
"transform": [],
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
},
"0.4.0": {
"depends": [
"neo-fun^0.5.0"
],
"description": "A modern and low-level C++ SQLite API",
"git": {
"ref": "0.4.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
},
"0.4.1": {
"depends": [
"neo-fun^0.5.0"
],
"description": "A modern and low-level C++ SQLite API",
"git": {
"ref": "0.4.1",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-sqlite3.git"
}
}
},
"neo-url": {
"0.1.0": {
"depends": [
"neo-fun^0.4.1"
],
"description": "URL parsing and handling library.",
"git": {
"ref": "0.1.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-url.git"
}
},
"0.1.1": {
"depends": [
"neo-fun^0.4.3"
],
"description": "URL parsing and handling library.",
"git": {
"ref": "0.1.1",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-url.git"
}
},
"0.1.2": {
"depends": [
"neo-fun^0.4.3"
],
"description": "URL parsing and handling library.",
"git": {
"ref": "0.1.2",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-url.git"
}
},
"0.2.0": {
"depends": [
"neo-fun^0.5.5"
],
"description": "URL parsing and handling library.",
"git": {
"ref": "0.2.0",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-url.git"
}
},
"0.2.1": {
"depends": [
"neo-fun^0.5.5"
],
"description": "URL parsing and handling library.",
"git": {
"ref": "0.2.1",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-url.git"
}
},
"0.2.2": {
"depends": [
"neo-fun^0.5.5"
],
"description": "URL parsing and handling library.",
"git": {
"ref": "0.2.2",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-url.git"
}
},
"0.2.3": {
"depends": [
"neo-fun^0.5.5"
],
"description": "URL parsing and handling library.",
"git": {
"ref": "0.2.3",
"transform": [],
"url": "https://github.com/vector-of-bool/neo-url.git"
}
}
},
"nlohmann-json": {

+ 9
- 4
package.jsonc View File

@@ -8,15 +8,20 @@
"ms-wil@2020.3.16",
"range-v3@0.11.0",
"nlohmann-json@3.7.1",
"neo-sqlite3@0.2.3",
"neo-fun^0.3.2",
"neo-compress^0.1.0",
"neo-sqlite3@0.4.1",
"neo-fun~0.6.0",
"neo-compress~0.2.0",
"neo-url~0.2.3",
"semver@0.2.2",
"pubgrub@0.2.1",
"vob-json5@0.1.5",
"vob-semester@0.2.2",
"ctre@2.8.1",
"fmt^7.0.3"
"fmt^7.0.3",
"neo-http^0.1.0",
"neo-io^0.1.1",
"boost.leaf~0.3.0",
"magic_enum+0.0.0",
],
"test_driver": "Catch-Main"
}

+ 549
- 0
poetry.lock View File

@@ -0,0 +1,549 @@
[[package]]
name = "apipkg"
version = "1.5"
description = "apipkg: namespace control and lazy-import mechanism"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[[package]]
name = "astroid"
version = "2.4.2"
description = "An abstract syntax tree for Python with inference support."
category = "dev"
optional = false
python-versions = ">=3.5"

[package.dependencies]
lazy-object-proxy = ">=1.4.0,<1.5.0"
six = ">=1.12,<2.0"
typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""}
wrapt = ">=1.11,<2.0"

[[package]]
name = "atomicwrites"
version = "1.4.0"
description = "Atomic file writes."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[[package]]
name = "attrs"
version = "20.3.0"
description = "Classes Without Boilerplate"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[package.extras]
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"]
docs = ["furo", "sphinx", "zope.interface"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"]

[[package]]
name = "colorama"
version = "0.4.4"
description = "Cross-platform colored terminal text."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"

[[package]]
name = "distro"
version = "1.5.0"
description = "Distro - an OS platform information API"
category = "main"
optional = false
python-versions = "*"

[[package]]
name = "execnet"
version = "1.7.1"
description = "execnet: rapid multi-Python deployment"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[package.dependencies]
apipkg = ">=1.4"

[package.extras]
testing = ["pre-commit"]

[[package]]
name = "importlib-metadata"
version = "3.1.1"
description = "Read metadata from Python packages"
category = "main"
optional = false
python-versions = ">=3.6"

[package.dependencies]
zipp = ">=0.5"

[package.extras]
docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"]

[[package]]
name = "iniconfig"
version = "1.1.1"
description = "iniconfig: brain-dead simple config-ini parsing"
category = "main"
optional = false
python-versions = "*"

[[package]]
name = "isort"
version = "5.6.4"
description = "A Python utility / library to sort Python imports."
category = "dev"
optional = false
python-versions = ">=3.6,<4.0"

[package.extras]
pipfile_deprecated_finder = ["pipreqs", "requirementslib"]
requirements_deprecated_finder = ["pipreqs", "pip-api"]
colors = ["colorama (>=0.4.3,<0.5.0)"]

[[package]]
name = "json5"
version = "0.9.5"
description = "A Python implementation of the JSON5 data format."
category = "main"
optional = false
python-versions = "*"

[package.extras]
dev = ["hypothesis"]

[[package]]
name = "lazy-object-proxy"
version = "1.4.3"
description = "A fast and thorough lazy object proxy."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[[package]]
name = "mccabe"
version = "0.6.1"
description = "McCabe checker, plugin for flake8"
category = "dev"
optional = false
python-versions = "*"

[[package]]
name = "mypy"
version = "0.790"
description = "Optional static typing for Python"
category = "dev"
optional = false
python-versions = ">=3.5"

[package.dependencies]
mypy-extensions = ">=0.4.3,<0.5.0"
typed-ast = ">=1.4.0,<1.5.0"
typing-extensions = ">=3.7.4"

[package.extras]
dmypy = ["psutil (>=4.0)"]

[[package]]
name = "mypy-extensions"
version = "0.4.3"
description = "Experimental type system extensions for programs checked with the mypy typechecker."
category = "dev"
optional = false
python-versions = "*"

[[package]]
name = "packaging"
version = "20.7"
description = "Core utilities for Python packages"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[package.dependencies]
pyparsing = ">=2.0.2"

[[package]]
name = "pluggy"
version = "0.13.1"
description = "plugin and hook calling mechanisms for python"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[package.dependencies]
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}

[package.extras]
dev = ["pre-commit", "tox"]

[[package]]
name = "py"
version = "1.9.0"
description = "library with cross-python path, ini-parsing, io, code, log facilities"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[[package]]
name = "pylint"
version = "2.6.0"
description = "python code static checker"
category = "dev"
optional = false
python-versions = ">=3.5.*"

[package.dependencies]
astroid = ">=2.4.0,<=2.5"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
isort = ">=4.2.5,<6"
mccabe = ">=0.6,<0.7"
toml = ">=0.7.1"

[[package]]
name = "pyparsing"
version = "2.4.7"
description = "Python parsing module"
category = "main"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"

[[package]]
name = "pytest"
version = "6.1.2"
description = "pytest: simple powerful testing with Python"
category = "main"
optional = false
python-versions = ">=3.5"

[package.dependencies]
atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
attrs = ">=17.4.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<1.0"
py = ">=1.8.2"
toml = "*"

[package.extras]
checkqa_mypy = ["mypy (==0.780)"]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]

[[package]]
name = "pytest-asyncio"
version = "0.14.0"
description = "Pytest support for asyncio."
category = "main"
optional = false
python-versions = ">= 3.5"

[package.dependencies]
pytest = ">=5.4.0"

[package.extras]
testing = ["async-generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"]

[[package]]
name = "pytest-forked"
version = "1.3.0"
description = "run tests in isolated forked subprocesses"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"

[package.dependencies]
py = "*"
pytest = ">=3.10"

[[package]]
name = "pytest-xdist"
version = "2.1.0"
description = "pytest xdist plugin for distributed testing and loop-on-failing modes"
category = "main"
optional = false
python-versions = ">=3.5"

[package.dependencies]
execnet = ">=1.1"
pytest = ">=6.0.0"
pytest-forked = "*"

[package.extras]
psutil = ["psutil (>=3.0)"]
testing = ["filelock"]

[[package]]
name = "rope"
version = "0.18.0"
description = "a python refactoring library..."
category = "dev"
optional = false
python-versions = "*"

[package.extras]
dev = ["pytest"]

[[package]]
name = "semver"
version = "2.13.0"
description = "Python helper for Semantic Versioning (http://semver.org/)"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"

[[package]]
name = "six"
version = "1.15.0"
description = "Python 2 and 3 compatibility utilities"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"

[[package]]
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
category = "main"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"

[[package]]
name = "typed-ast"
version = "1.4.1"
description = "a fork of Python 2 and 3 ast modules with type comment support"
category = "dev"
optional = false
python-versions = "*"

[[package]]
name = "typing-extensions"
version = "3.7.4.3"
description = "Backported and Experimental Type Hints for Python 3.5+"
category = "main"
optional = false
python-versions = "*"

[[package]]
name = "wrapt"
version = "1.12.1"
description = "Module for decorators, wrappers and monkey patching."
category = "dev"
optional = false
python-versions = "*"

[[package]]
name = "yapf"
version = "0.30.0"
description = "A formatter for Python code."
category = "dev"
optional = false
python-versions = "*"

[[package]]
name = "zipp"
version = "3.4.0"
description = "Backport of pathlib-compatible object wrapper for zip files"
category = "main"
optional = false
python-versions = ">=3.6"

[package.extras]
docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"]
testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"]

[metadata]
lock-version = "1.1"
python-versions = "^3.6"
content-hash = "5c3cefd7d2a4b573928b14dc6291fbb7ef8a8a29306f7982ad64db4cb615e6e5"

[metadata.files]
apipkg = [
{file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"},
{file = "apipkg-1.5.tar.gz", hash = "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6"},
]
astroid = [
{file = "astroid-2.4.2-py3-none-any.whl", hash = "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386"},
{file = "astroid-2.4.2.tar.gz", hash = "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703"},
]
atomicwrites = [
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
]
attrs = [
{file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"},
{file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"},
]
colorama = [
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
]
distro = [
{file = "distro-1.5.0-py2.py3-none-any.whl", hash = "sha256:df74eed763e18d10d0da624258524ae80486432cd17392d9c3d96f5e83cd2799"},
{file = "distro-1.5.0.tar.gz", hash = "sha256:0e58756ae38fbd8fc3020d54badb8eae17c5b9dcbed388b17bb55b8a5928df92"},
]
execnet = [
{file = "execnet-1.7.1-py2.py3-none-any.whl", hash = "sha256:d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547"},
{file = "execnet-1.7.1.tar.gz", hash = "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50"},
]
importlib-metadata = [
{file = "importlib_metadata-3.1.1-py3-none-any.whl", hash = "sha256:6112e21359ef8f344e7178aa5b72dc6e62b38b0d008e6d3cb212c5b84df72013"},
{file = "importlib_metadata-3.1.1.tar.gz", hash = "sha256:b0c2d3b226157ae4517d9625decf63591461c66b3a808c2666d538946519d170"},
]
iniconfig = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
isort = [
{file = "isort-5.6.4-py3-none-any.whl", hash = "sha256:dcab1d98b469a12a1a624ead220584391648790275560e1a43e54c5dceae65e7"},
{file = "isort-5.6.4.tar.gz", hash = "sha256:dcaeec1b5f0eca77faea2a35ab790b4f3680ff75590bfcb7145986905aab2f58"},
]
json5 = [
{file = "json5-0.9.5-py2.py3-none-any.whl", hash = "sha256:af1a1b9a2850c7f62c23fde18be4749b3599fd302f494eebf957e2ada6b9e42c"},
{file = "json5-0.9.5.tar.gz", hash = "sha256:703cfee540790576b56a92e1c6aaa6c4b0d98971dc358ead83812aa4d06bdb96"},
]
lazy-object-proxy = [
{file = "lazy-object-proxy-1.4.3.tar.gz", hash = "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"},
{file = "lazy_object_proxy-1.4.3-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442"},
{file = "lazy_object_proxy-1.4.3-cp27-cp27m-win32.whl", hash = "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4"},
{file = "lazy_object_proxy-1.4.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a"},
{file = "lazy_object_proxy-1.4.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d"},
{file = "lazy_object_proxy-1.4.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a"},
{file = "lazy_object_proxy-1.4.3-cp34-cp34m-win32.whl", hash = "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e"},
{file = "lazy_object_proxy-1.4.3-cp34-cp34m-win_amd64.whl", hash = "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357"},
{file = "lazy_object_proxy-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50"},
{file = "lazy_object_proxy-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db"},
{file = "lazy_object_proxy-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449"},
{file = "lazy_object_proxy-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156"},
{file = "lazy_object_proxy-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531"},
{file = "lazy_object_proxy-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb"},
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08"},
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383"},
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142"},
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea"},
{file = "lazy_object_proxy-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62"},
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"},
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"},
]
mccabe = [
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"},
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"},
]
mypy = [
{file = "mypy-0.790-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:bd03b3cf666bff8d710d633d1c56ab7facbdc204d567715cb3b9f85c6e94f669"},
{file = "mypy-0.790-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:2170492030f6faa537647d29945786d297e4862765f0b4ac5930ff62e300d802"},
{file = "mypy-0.790-cp35-cp35m-win_amd64.whl", hash = "sha256:e86bdace26c5fe9cf8cb735e7cedfe7850ad92b327ac5d797c656717d2ca66de"},
{file = "mypy-0.790-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e97e9c13d67fbe524be17e4d8025d51a7dca38f90de2e462243ab8ed8a9178d1"},
{file = "mypy-0.790-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0d34d6b122597d48a36d6c59e35341f410d4abfa771d96d04ae2c468dd201abc"},
{file = "mypy-0.790-cp36-cp36m-win_amd64.whl", hash = "sha256:72060bf64f290fb629bd4a67c707a66fd88ca26e413a91384b18db3876e57ed7"},
{file = "mypy-0.790-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:eea260feb1830a627fb526d22fbb426b750d9f5a47b624e8d5e7e004359b219c"},
{file = "mypy-0.790-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c614194e01c85bb2e551c421397e49afb2872c88b5830e3554f0519f9fb1c178"},
{file = "mypy-0.790-cp37-cp37m-win_amd64.whl", hash = "sha256:0a0d102247c16ce93c97066443d11e2d36e6cc2a32d8ccc1f705268970479324"},
{file = "mypy-0.790-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf4e7bf7f1214826cf7333627cb2547c0db7e3078723227820d0a2490f117a01"},
{file = "mypy-0.790-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:af4e9ff1834e565f1baa74ccf7ae2564ae38c8df2a85b057af1dbbc958eb6666"},
{file = "mypy-0.790-cp38-cp38-win_amd64.whl", hash = "sha256:da56dedcd7cd502ccd3c5dddc656cb36113dd793ad466e894574125945653cea"},
{file = "mypy-0.790-py3-none-any.whl", hash = "sha256:2842d4fbd1b12ab422346376aad03ff5d0805b706102e475e962370f874a5122"},
{file = "mypy-0.790.tar.gz", hash = "sha256:2b21ba45ad9ef2e2eb88ce4aeadd0112d0f5026418324176fd494a6824b74975"},
]
mypy-extensions = [
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
]
packaging = [
{file = "packaging-20.7-py2.py3-none-any.whl", hash = "sha256:eb41423378682dadb7166144a4926e443093863024de508ca5c9737d6bc08376"},
{file = "packaging-20.7.tar.gz", hash = "sha256:05af3bb85d320377db281cf254ab050e1a7ebcbf5410685a9a407e18a1f81236"},
]
pluggy = [
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"},
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
]
py = [
{file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"},
{file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"},
]
pylint = [
{file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"},
{file = "pylint-2.6.0.tar.gz", hash = "sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210"},
]
pyparsing = [
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"},
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"},
]
pytest = [
{file = "pytest-6.1.2-py3-none-any.whl", hash = "sha256:4288fed0d9153d9646bfcdf0c0428197dba1ecb27a33bb6e031d002fa88653fe"},
{file = "pytest-6.1.2.tar.gz", hash = "sha256:c0a7e94a8cdbc5422a51ccdad8e6f1024795939cc89159a0ae7f0b316ad3823e"},
]
pytest-asyncio = [
{file = "pytest-asyncio-0.14.0.tar.gz", hash = "sha256:9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700"},
{file = "pytest_asyncio-0.14.0-py3-none-any.whl", hash = "sha256:2eae1e34f6c68fc0a9dc12d4bea190483843ff4708d24277c41568d6b6044f1d"},
]
pytest-forked = [
{file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"},
{file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"},
]
pytest-xdist = [
{file = "pytest-xdist-2.1.0.tar.gz", hash = "sha256:82d938f1a24186520e2d9d3a64ef7d9ac7ecdf1a0659e095d18e596b8cbd0672"},
{file = "pytest_xdist-2.1.0-py3-none-any.whl", hash = "sha256:7c629016b3bb006b88ac68e2b31551e7becf173c76b977768848e2bbed594d90"},
]
rope = [
{file = "rope-0.18.0.tar.gz", hash = "sha256:786b5c38c530d4846aa68a42604f61b4e69a493390e3ca11b88df0fbfdc3ed04"},
]
semver = [
{file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"},
{file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"},
]
six = [
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"},
{file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"},
]
toml = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
typed-ast = [
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"},
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"},
{file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"},
{file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"},
{file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"},
{file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"},
{file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"},
{file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"},
{file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"},
{file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"},
{file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"},
{file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"},
{file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"},
{file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"},
{file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"},
{file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"},
{file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"},
{file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"},
{file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"},
{file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"},
{file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"},
]
typing-extensions = [
{file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"},
{file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"},
{file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"},
]
wrapt = [
{file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"},
]
yapf = [
{file = "yapf-0.30.0-py2.py3-none-any.whl", hash = "sha256:3abf61ba67cf603069710d30acbc88cfe565d907e16ad81429ae90ce9651e0c9"},
{file = "yapf-0.30.0.tar.gz", hash = "sha256:3000abee4c28daebad55da6c85f3cd07b8062ce48e2e9943c8da1b9667d48427"},
]
zipp = [
{file = "zipp-3.4.0-py3-none-any.whl", hash = "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108"},
{file = "zipp-3.4.0.tar.gz", hash = "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"},
]

+ 36
- 0
pyproject.toml View File

@@ -0,0 +1,36 @@
[tool.poetry]
name = "dds"
version = "0.0.0"
description = ""
authors = ["vector-of-bool <vectorofbool@gmail.com>"]
license = "MPL-2.0"
packages = [
{ include = "dds_ci", from = "tools/" },
]

[tool.poetry.dependencies]
python = "^3.6"

semver = "^2.13.0"
pytest = "^6.1.2"
pytest-xdist = "^2.1.0"
pytest-asyncio = "^0.14.0"
typing-extensions = "^3.7.4"
json5 = "^0.9.5"
distro = "^1.5.0"

[tool.poetry.dev-dependencies]
# Only needed for development
pylint = "^2.6.0"
mypy = "^0.790"
rope = "^0.18.0"
yapf = "^0.30.0"

[tool.poetry.scripts]
dds-ci = "dds_ci.main:start"
dds-format = "dds_ci.format:start"
gen-msvs-vsc-task = "dds_ci.msvs:generate_vsc_task"

[build-system]
requires = ["poetry>=0.12"]
build-backend = "poetry.masonry.api"

+ 3
- 0
pytest.ini View File

@@ -0,0 +1,3 @@
[pytest]
junit_log_passing_tests=true
junit_logging=all

+ 96
- 946
src/dds.main.cpp
File diff suppressed because it is too large
View File


+ 0
- 4283
src/dds/3rd/args.hxx
File diff suppressed because it is too large
View File


+ 11
- 4
src/dds/build/builder.cpp View File

@@ -10,10 +10,13 @@
#include <dds/util/output.hpp>
#include <dds/util/time.hpp>

#include <fansi/styled.hpp>

#include <array>
#include <set>

using namespace dds;
using namespace fansi::literals;

namespace {

@@ -23,12 +26,16 @@ struct state {
};

void log_failure(const test_failure& fail) {
dds_log(error, "Test '{}' failed! [exited {}]", fail.executable_path.string(), fail.retc);
dds_log(error,
"Test .br.yellow[{}] .br.red[{}] [Exited {}]"_styled,
fail.executable_path.string(),
fail.timed_out ? "TIMED OUT" : "FAILED",
fail.retc);
if (fail.signal) {
dds_log(error, "Test execution received signal {}", fail.signal);
}
if (trim_view(fail.output).empty()) {
dds_log(error, "(Test executable produced no output");
dds_log(error, "(Test executable produced no output)");
} else {
dds_log(error, "Test output:\n{}[dds - test output end]", fail.output);
}
@@ -125,7 +132,7 @@ library_plan prepare_library(state& st,
}

package_plan prepare_one(state& st, const sdist_target& sd) {
package_plan pkg{sd.sd.manifest.pkg_id.name, sd.sd.manifest.namespace_};
package_plan pkg{sd.sd.manifest.id.name, sd.sd.manifest.namespace_};
auto libs = collect_libraries(sd.sd.path);
for (const auto& lib : libs) {
pkg.add_library(prepare_library(st, sd, lib, sd.sd.manifest));
@@ -195,7 +202,7 @@ void write_lmp(build_env_ref env, const package_plan& pkg, path_ref lmp_path) {
}

void write_lmi(build_env_ref env, const build_plan& plan, path_ref base_dir, path_ref lmi_path) {
fs::create_directories(lmi_path.parent_path());
fs::create_directories(fs::absolute(lmi_path).parent_path());
auto out = open(lmi_path, std::ios::binary | std::ios::out);
out << "Type: Index\n";
for (const auto& pkg : plan.packages()) {

+ 1
- 1
src/dds/build/builder.hpp View File

@@ -1,7 +1,7 @@
#pragma once

#include <dds/build/params.hpp>
#include <dds/source/dist.hpp>
#include <dds/sdist/dist.hpp>

#include <cassert>
#include <map>

+ 1
- 1
src/dds/build/params.hpp View File

@@ -1,6 +1,6 @@
#pragma once

#include <dds/source/dist.hpp>
#include <dds/sdist/dist.hpp>
#include <dds/toolchain/toolchain.hpp>
#include <dds/util/fs.hpp>


+ 11
- 3
src/dds/build/plan/archive.cpp View File

@@ -5,10 +5,12 @@
#include <dds/util/log.hpp>
#include <dds/util/time.hpp>

#include <fansi/styled.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/transform.hpp>

using namespace dds;
using namespace fansi::literals;

fs::path create_archive_plan::calc_archive_file_path(const toolchain& tc) const noexcept {
return _subdir / fmt::format("{}{}{}", "lib", _name, tc.archive_suffix());
@@ -23,9 +25,11 @@ void create_archive_plan::archive(const build_env& env) const {
;
// Build up the archive command
archive_spec ar;

auto ar_cwd = env.output_root;
ar.input_files = std::move(objects);
ar.out_path = env.output_root / calc_archive_file_path(env.toolchain);
auto ar_cmd = env.toolchain.create_archive_command(ar, fs::current_path(), env.knobs);
auto ar_cmd = env.toolchain.create_archive_command(ar, ar_cwd, env.knobs);

// `out_relpath` is purely for the benefit of the user to have a short name
// in the logs
@@ -43,7 +47,8 @@ void create_archive_plan::archive(const build_env& env) const {

// Do it!
dds_log(info, "[{}] Archive: {}", _qual_name, out_relpath);
auto&& [dur_ms, ar_res] = timed<std::chrono::milliseconds>([&] { return run_proc(ar_cmd); });
auto&& [dur_ms, ar_res] = timed<std::chrono::milliseconds>(
[&] { return run_proc(proc_options{.command = ar_cmd, .cwd = ar_cwd}); });
dds_log(info, "[{}] Archive: {} - {:L}ms", _qual_name, out_relpath, dur_ms.count());

// Check, log, and throw
@@ -52,7 +57,10 @@ void create_archive_plan::archive(const build_env& env) const {
"Creating static library archive [{}] failed for '{}'",
out_relpath,
_qual_name);
dds_log(error, "Subcommand FAILED: {}\n{}", quote_command(ar_cmd), ar_res.output);
dds_log(error,
"Subcommand FAILED: .bold.yellow[{}]\n{}"_styled,
quote_command(ar_cmd),
ar_res.output);
throw_external_error<
errc::archive_failure>("Creating static library archive [{}] failed for '{}'",
out_relpath,

+ 5
- 2
src/dds/build/plan/compile_exec.cpp View File

@@ -8,6 +8,7 @@
#include <dds/util/string.hpp>
#include <dds/util/time.hpp>

#include <fansi/styled.hpp>
#include <neo/assert.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/filter.hpp>
@@ -20,6 +21,7 @@

using namespace dds;
using namespace ranges;
using namespace fansi::literals;

namespace {

@@ -51,7 +53,8 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun

// Generate a log message to display to the user
auto source_path = cf.plan.source_path();
auto msg = fmt::format("[{}] Compile: {}",

auto msg = fmt::format("[{}] Compile: .br.cyan[{}]"_styled,
cf.plan.qualifier(),
fs::relative(source_path, cf.plan.source().basis_path).string());

@@ -141,7 +144,7 @@ do_compile(const compile_file_full& cf, build_env_ref env, compile_counter& coun
if (!compiled_okay) {
dds_log(error, "Compilation failed: {}", source_path.string());
dds_log(error,
"Subcommand FAILED [Exitted {}]: {}\n{}",
"Subcommand .bold.red[FAILED] [Exited {}]: .bold.yellow[{}]\n{}"_styled,
compile_retc,
quote_command(cf.cmd_info.command),
compiler_output);

+ 1
- 1
src/dds/build/plan/compile_file.hpp View File

@@ -1,7 +1,7 @@
#pragma once

#include <dds/build/plan/base.hpp>
#include <dds/source/file.hpp>
#include <dds/sdist/file.hpp>

#include <libman/library.hpp>


+ 8
- 3
src/dds/build/plan/exe.cpp View File

@@ -7,10 +7,13 @@
#include <dds/util/log.hpp>
#include <dds/util/time.hpp>

#include <fansi/styled.hpp>

#include <algorithm>
#include <chrono>

using namespace dds;
using namespace fansi::literals;

fs::path link_executable_plan::calc_executable_path(build_env_ref env) const noexcept {
return env.output_root / _out_subdir / (_name + env.toolchain.executable_suffix());
@@ -77,25 +80,27 @@ bool link_executable_plan::is_test() const noexcept {

std::optional<test_failure> link_executable_plan::run_test(build_env_ref env) const {
auto exe_path = calc_executable_path(env);
auto msg = fmt::format("Run test: {:30}", fs::relative(exe_path, env.output_root).string());
auto msg = fmt::format("Run test: .br.cyan[{:30}]"_styled,
fs::relative(exe_path, env.output_root).string());
dds_log(info, msg);
using namespace std::chrono_literals;
auto&& [dur, res] = timed<std::chrono::microseconds>(
[&] { return run_proc({.command = {exe_path.string()}, .timeout = 10s}); });

if (res.okay()) {
dds_log(info, "{} - PASSED - {:>9L}μs", msg, dur.count());
dds_log(info, "{} - .br.green[PASS] - {:>9L}μs", msg, dur.count());
return std::nullopt;
} else {
auto exit_msg = fmt::format(res.signal ? "signalled {}" : "exited {}",
res.signal ? res.signal : res.retc);
auto fail_str = res.timed_out ? "TIMEOUT" : "FAILED ";
auto fail_str = res.timed_out ? ".br.yellow[TIME]"_styled : ".br.red[FAIL]"_styled;
dds_log(error, "{} - {} - {:>9L}μs [{}]", msg, fail_str, dur.count(), exit_msg);
test_failure f;
f.executable_path = exe_path;
f.output = res.output;
f.retc = res.retc;
f.signal = res.signal;
f.timed_out = res.timed_out;
return f;
}
}

+ 3
- 2
src/dds/build/plan/exe.hpp View File

@@ -18,8 +18,9 @@ class library_plan;
struct test_failure {
fs::path executable_path;
std::string output;
int retc;
int signal;
int retc{};
int signal{};
bool timed_out = false;
};

/**

+ 1
- 1
src/dds/build/plan/library.hpp View File

@@ -3,7 +3,7 @@
#include <dds/build/plan/archive.hpp>
#include <dds/build/plan/exe.hpp>
#include <dds/build/plan/template.hpp>
#include <dds/library/root.hpp>
#include <dds/sdist/library/root.hpp>
#include <dds/usage_reqs.hpp>
#include <dds/util/fs.hpp>


+ 1
- 1
src/dds/build/plan/template.cpp View File

@@ -1,7 +1,7 @@
#include <dds/build/plan/template.hpp>

#include <dds/error/errors.hpp>
#include <dds/library/root.hpp>
#include <dds/sdist/library/root.hpp>
#include <dds/util/fs.hpp>
#include <dds/util/string.hpp>


+ 1
- 1
src/dds/build/plan/template.hpp View File

@@ -1,7 +1,7 @@
#pragma once

#include <dds/build/plan/base.hpp>
#include <dds/source/file.hpp>
#include <dds/sdist/file.hpp>

#include <utility>


+ 0
- 438
src/dds/catalog/catalog.cpp View File

@@ -1,438 +0,0 @@
#include "./catalog.hpp"

#include "./import.hpp"

#include <dds/catalog/init_catalog.hpp>
#include <dds/dym.hpp>
#include <dds/error/errors.hpp>
#include <dds/solve/solve.hpp>
#include <dds/util/log.hpp>
#include <dds/util/ranges.hpp>

#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <neo/concepts.hpp>
#include <neo/sqlite3/exec.hpp>
#include <neo/sqlite3/iter_tuples.hpp>
#include <neo/sqlite3/single.hpp>
#include <nlohmann/json.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/join.hpp>
#include <range/v3/view/transform.hpp>

using namespace dds;

namespace sqlite3 = neo::sqlite3;
using namespace sqlite3::literals;

namespace {

void migrate_repodb_1(sqlite3::database& db) {
db.exec(R"(
CREATE TABLE dds_cat_pkgs (
pkg_id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
version TEXT NOT NULL,
git_url TEXT,
git_ref TEXT,
lm_name TEXT,
lm_namespace TEXT,
description TEXT NOT NULL,
UNIQUE(name, version),
CONSTRAINT has_source_info CHECK(
(
git_url NOT NULL
AND git_ref NOT NULL
)
= 1
),
CONSTRAINT valid_lm_info CHECK(
(
lm_name NOT NULL
AND lm_namespace NOT NULL
)
+
(
lm_name ISNULL
AND lm_namespace ISNULL
)
= 1
)
);

CREATE TABLE dds_cat_pkg_deps (
dep_id INTEGER PRIMARY KEY AUTOINCREMENT,
pkg_id INTEGER NOT NULL REFERENCES dds_cat_pkgs(pkg_id),
dep_name TEXT NOT NULL,
low TEXT NOT NULL,
high TEXT NOT NULL,
UNIQUE(pkg_id, dep_name)
);
)");
}

void migrate_repodb_2(sqlite3::database& db) {
db.exec(R"(
ALTER TABLE dds_cat_pkgs
ADD COLUMN repo_transform TEXT NOT NULL DEFAULT '[]'
)");
}

std::string transforms_to_json(const std::vector<fs_transformation>& trs) {
std::string acc = "[";
for (auto it = trs.begin(); it != trs.end(); ++it) {
acc += it->as_json();
if (std::next(it) != trs.end()) {
acc += ", ";
}
}
return acc + "]";
}

void store_with_remote(const neo::sqlite3::statement_cache&,
const package_info& pkg,
std::monostate) {
neo_assert_always(
invariant,
false,
"There was an attempt to insert a package listing into the database where that package "
"listing does not have a remote listing. If you see this message, it is a dds bug.",
pkg.ident.to_string());
}

void store_with_remote(neo::sqlite3::statement_cache& stmts,
const package_info& pkg,
const git_remote_listing& git) {
auto lm_usage = git.auto_lib.value_or(lm::usage{});
sqlite3::exec( //
stmts,
R"(
INSERT OR REPLACE INTO dds_cat_pkgs (
name,
version,
git_url,
git_ref,
lm_name,
lm_namespace,
description,
repo_transform
) VALUES (
?1,
?2,
?3,
?4,
CASE WHEN ?5 = '' THEN NULL ELSE ?5 END,
CASE WHEN ?6 = '' THEN NULL ELSE ?6 END,
?7,
?8
)
)"_sql,
std::forward_as_tuple( //
pkg.ident.name,
pkg.ident.version.to_string(),
git.url,
git.ref,
lm_usage.name,
lm_usage.namespace_,
pkg.description,
transforms_to_json(git.transforms)));
}

void do_store_pkg(neo::sqlite3::database& db,
neo::sqlite3::statement_cache& st_cache,
const package_info& pkg) {
dds_log(debug, "Recording package {}@{}", pkg.ident.name, pkg.ident.version.to_string());
std::visit([&](auto&& remote) { store_with_remote(st_cache, pkg, remote); }, pkg.remote);
auto db_pkg_id = db.last_insert_rowid();
auto& new_dep_st = st_cache(R"(
INSERT INTO dds_cat_pkg_deps (
pkg_id,
dep_name,
low,
high
) VALUES (
?,
?,
?,
?
)
)"_sql);
for (const auto& dep : pkg.deps) {
new_dep_st.reset();
assert(dep.versions.num_intervals() == 1);
auto iv_1 = *dep.versions.iter_intervals().begin();
dds_log(trace, " Depends on: {}", dep.to_string());
sqlite3::exec(new_dep_st,
std::forward_as_tuple(db_pkg_id,
dep.name,
iv_1.low.to_string(),
iv_1.high.to_string()));
}
}

void store_init_packages(sqlite3::database& db, sqlite3::statement_cache& st_cache) {
dds_log(debug, "Restoring initial package data");
for (auto& pkg : init_catalog_packages()) {
do_store_pkg(db, st_cache, pkg);
}
}

void ensure_migrated(sqlite3::database& db) {
sqlite3::transaction_guard tr{db};
db.exec(R"(
PRAGMA foreign_keys = 1;
CREATE TABLE IF NOT EXISTS dds_cat_meta AS
WITH init(meta) AS (VALUES ('{"version": 0}'))
SELECT * FROM init;
)");
auto meta_st = db.prepare("SELECT meta FROM dds_cat_meta");
auto [meta_json] = sqlite3::unpack_single<std::string>(meta_st);

auto meta = nlohmann::json::parse(meta_json);
if (!meta.is_object()) {
dds_log(critical, "Root of catalog dds_cat_meta cell should be a JSON object");
throw_external_error<errc::corrupted_catalog_db>();
}

auto version_ = meta["version"];
if (!version_.is_number_integer()) {
dds_log(critical, "'version' key in dds_cat_meta is not an integer");
throw_external_error<errc::corrupted_catalog_db>(
"The catalog database metadata is invalid [bad dds_meta.version]");
}

constexpr int current_database_version = 2;

int version = version_;

// If this is the first time we're working here, import the initial
// catalog with some useful tidbits.
bool import_init_packages = version == 0;

if (version > current_database_version) {
dds_log(critical,
"Catalog version is {}, but we only support up to {}",
version,
current_database_version);
throw_external_error<errc::catalog_too_new>();
}

if (version < 1) {
dds_log(debug, "Applying catalog migration 1");
migrate_repodb_1(db);
}
if (version < 2) {
dds_log(debug, "Applying catalog migration 2");
migrate_repodb_2(db);
}
meta["version"] = 2;
exec(db, "UPDATE dds_cat_meta SET meta=?", std::forward_as_tuple(meta.dump()));

if (import_init_packages) {
dds_log(
info,
"A new catalog database case been created, and has been populated with some initial "
"contents.");
neo::sqlite3::statement_cache stmts{db};
store_init_packages(db, stmts);
}
}

void check_json(bool b, std::string_view what) {
if (!b) {
throw_user_error<errc::invalid_catalog_json>("Catalog JSON is invalid: {}", what);
}
}

} // namespace

catalog catalog::open(const std::string& db_path) {
if (db_path != ":memory:") {
auto pardir = fs::weakly_canonical(db_path).parent_path();
dds_log(trace, "Ensuring parent directory [{}]", pardir.string());
fs::create_directories(pardir);
}
dds_log(debug, "Opening package catalog [{}]", db_path);
auto db = sqlite3::database::open(db_path);
try {
ensure_migrated(db);
} catch (const sqlite3::sqlite3_error& e) {
dds_log(critical,
"Failed to load the repository database. It appears to be invalid/corrupted. The "
"exception message is: {}",
e.what());
throw_external_error<errc::corrupted_catalog_db>();
}
dds_log(trace, "Successfully opened catalog");
return catalog(std::move(db));
}

catalog::catalog(sqlite3::database db)
: _db(std::move(db)) {}

void catalog::store(const package_info& pkg) {
sqlite3::transaction_guard tr{_db};
do_store_pkg(_db, _stmt_cache, pkg);
}

std::optional<package_info> catalog::get(const package_id& pk_id) const noexcept {
auto ver_str = pk_id.version.to_string();
dds_log(trace, "Lookup package {}@{}", pk_id.name, ver_str);
auto& st = _stmt_cache(R"(
SELECT
pkg_id,
name,
version,
git_url,
git_ref,
lm_name,
lm_namespace,
description,
repo_transform
FROM dds_cat_pkgs
WHERE name = ? AND version = ?
)"_sql);
st.reset();
st.bindings = std::forward_as_tuple(pk_id.name, ver_str);
auto opt_tup = sqlite3::unpack_single_opt<std::int64_t,
std::string,
std::string,
std::optional<std::string>,
std::optional<std::string>,
std::optional<std::string>,
std::optional<std::string>,
std::string,
std::string>(st);
if (!opt_tup) {
dym_target::fill([&] {
auto all_ids = this->all();
auto id_strings
= ranges::views::transform(all_ids, [&](auto id) { return id.to_string(); });
return did_you_mean(pk_id.to_string(), id_strings);
});
return std::nullopt;
}
const auto& [pkg_id,
name,
version,
git_url,
git_ref,
lm_name,
lm_namespace,
description,
repo_transform]
= *opt_tup;
assert(pk_id.name == name);
assert(pk_id.version == semver::version::parse(version));
assert(git_url);
assert(git_ref);

auto deps = dependencies_of(pk_id);

auto info = package_info{
pk_id,
std::move(deps),
std::move(description),
git_remote_listing{
*git_url,
*git_ref,
lm_name ? std::make_optional(lm::usage{*lm_namespace, *lm_name}) : std::nullopt,
{},
},
};

if (!repo_transform.empty()) {
// Transforms are stored in the DB as JSON strings. Convert them back to real objects.
auto tr_data = json5::parse_data(repo_transform);
check_json(tr_data.is_array(),
fmt::format("Database record for {} has an invalid 'repo_transform' field [1]",
pkg_id));
for (const auto& el : tr_data.as_array()) {
check_json(
el.is_object(),
fmt::format("Database record for {} has an invalid 'repo_transform' field [2]",
pkg_id));
auto tr = fs_transformation::from_json(el);
std::visit(
[&](auto& remote) {
if constexpr (neo::alike<decltype(remote), std::monostate>) {
// Do nothing
} else {
remote.transforms.push_back(std::move(tr));
}
},
info.remote);
}
}
return info;
}

auto pair_to_pkg_id = [](auto&& pair) {
const auto& [name, ver] = pair;
return package_id{name, semver::version::parse(ver)};
};

std::vector<package_id> catalog::all() const noexcept {
return view_safe(sqlite3::exec_iter<std::string, std::string>( //
_stmt_cache,
"SELECT name, version FROM dds_cat_pkgs"_sql))
| ranges::views::transform(pair_to_pkg_id) //
| ranges::to_vector;
}

std::vector<package_id> catalog::by_name(std::string_view sv) const noexcept {
return view_safe(sqlite3::exec_iter<std::string, std::string>( //
_stmt_cache,
R"(
SELECT name, version
FROM dds_cat_pkgs
WHERE name = ?
)"_sql,
std::tie(sv))) //
| ranges::views::transform(pair_to_pkg_id) //
| ranges::to_vector;
}

std::vector<dependency> catalog::dependencies_of(const package_id& pkg) const noexcept {
dds_log(trace, "Lookup dependencies of {}@{}", pkg.name, pkg.version.to_string());
return view_safe(sqlite3::exec_iter<std::string,
std::string,
std::string>( //
_stmt_cache,
R"(
WITH this_pkg_id AS (
SELECT pkg_id
FROM dds_cat_pkgs
WHERE name = ? AND version = ?
)
SELECT dep_name, low, high
FROM dds_cat_pkg_deps
WHERE pkg_id IN this_pkg_id
ORDER BY dep_name
)"_sql,
std::forward_as_tuple(pkg.name, pkg.version.to_string()))) //
| ranges::views::transform([](auto&& pair) {
auto& [name, low, high] = pair;
auto dep
= dependency{name, {semver::version::parse(low), semver::version::parse(high)}};
dds_log(trace, " Depends: {}", dep.to_string());
return dep;
}) //
| ranges::to_vector;
}

void catalog::import_json_str(std::string_view content) {
dds_log(trace, "Importing JSON string into catalog");
auto pkgs = parse_packages_json(content);

sqlite3::transaction_guard tr{_db};
for (const auto& pkg : pkgs) {
store(pkg);
}
}

void catalog::import_initial() {
sqlite3::transaction_guard tr{_db};
dds_log(info, "Restoring built-in initial catalog contents");
store_init_packages(_db, _stmt_cache);
}

+ 0
- 50
src/dds/catalog/catalog.hpp View File

@@ -1,50 +0,0 @@
#pragma once

#include <dds/deps.hpp>
#include <dds/package/id.hpp>
#include <dds/util/fs.hpp>
#include <dds/util/glob.hpp>

#include "./package_info.hpp"

#include <neo/sqlite3/database.hpp>
#include <neo/sqlite3/statement.hpp>
#include <neo/sqlite3/statement_cache.hpp>
#include <neo/sqlite3/transaction.hpp>

#include <string>
#include <variant>
#include <vector>

namespace dds {

class catalog {
neo::sqlite3::database _db;
mutable neo::sqlite3::statement_cache _stmt_cache{_db};

explicit catalog(neo::sqlite3::database db);
catalog(const catalog&) = delete;

public:
catalog(catalog&&) = default;
catalog& operator=(catalog&&) = default;

static catalog open(const std::string& db_path);
static catalog open(path_ref db_path) { return open(db_path.string()); }

void store(const package_info& info);
std::optional<package_info> get(const package_id& id) const noexcept;

std::vector<package_id> all() const noexcept;
std::vector<package_id> by_name(std::string_view sv) const noexcept;
std::vector<dependency> dependencies_of(const package_id& pkg) const noexcept;

void import_initial();
void import_json_str(std::string_view json_str);
void import_json_file(path_ref json_path) {
auto content = dds::slurp_file(json_path);
import_json_str(content);
}
};

} // namespace dds

+ 0
- 106
src/dds/catalog/catalog.test.cpp View File

@@ -1,106 +0,0 @@
#include <dds/catalog/catalog.hpp>

#include <catch2/catch.hpp>

using namespace std::literals;

TEST_CASE("Create a simple database") {
// Just create and run migrations on an in-memory database
auto repo = dds::catalog::open(":memory:"s);
}

TEST_CASE("Open a catalog in a non-ascii path") {
::setlocale(LC_ALL, ".utf8");
auto THIS_DIR = dds::fs::canonical(__FILE__).parent_path();
auto BUILD_DIR
= (THIS_DIR.parent_path().parent_path().parent_path() / "_build").lexically_normal();
auto subdir = BUILD_DIR / "Ю́рий Алексе́евич Гага́рин";
dds::fs::remove_all(subdir);
dds::catalog::open(subdir / "test.db");
dds::fs::remove_all(subdir);
}

class catalog_test_case {
public:
dds::catalog db = dds::catalog::open(":memory:"s);
};

TEST_CASE_METHOD(catalog_test_case, "Store a simple package") {
db.store(dds::package_info{
dds::package_id("foo", semver::version::parse("1.2.3")),
{},
"example",
dds::git_remote_listing{"http://example.com", "master", std::nullopt, {}},
});

auto pkgs = db.by_name("foo");
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].name == "foo");
CHECK(pkgs[0].version == semver::version::parse("1.2.3"));
auto info = db.get(pkgs[0]);
REQUIRE(info);
CHECK(info->ident == pkgs[0]);
CHECK(info->deps.empty());
CHECK(std::holds_alternative<dds::git_remote_listing>(info->remote));
CHECK(std::get<dds::git_remote_listing>(info->remote).ref == "master");

// Update the entry with a new git remote ref
CHECK_NOTHROW(db.store(dds::package_info{
dds::package_id("foo", semver::version::parse("1.2.3")),
{},
"example",
dds::git_remote_listing{"http://example.com", "develop", std::nullopt, {}},
}));
// The previous pkg_id is still a valid lookup key
info = db.get(pkgs[0]);
REQUIRE(info);
CHECK(std::get<dds::git_remote_listing>(info->remote).ref == "develop");
}

TEST_CASE_METHOD(catalog_test_case, "Package requirements") {
db.store(dds::package_info{
dds::package_id{"foo", semver::version::parse("1.2.3")},
{
{"bar", {semver::version::parse("1.2.3"), semver::version::parse("1.4.0")}},
{"baz", {semver::version::parse("5.3.0"), semver::version::parse("6.0.0")}},
},
"example",
dds::git_remote_listing{"http://example.com", "master", std::nullopt, {}},
});
auto pkgs = db.by_name("foo");
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].name == "foo");
auto deps = db.dependencies_of(pkgs[0]);
CHECK(deps.size() == 2);
CHECK(deps[0].name == "bar");
CHECK(deps[1].name == "baz");
}

TEST_CASE_METHOD(catalog_test_case, "Parse JSON repo") {
db.import_json_str(R"({
"version": 1,
"packages": {
"foo": {
"1.2.3": {
"depends": [
"bar~4.2.1"
],
"git": {
"url": "http://example.com",
"ref": "master"
}
}
}
}
})");
auto pkgs = db.by_name("foo");
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].name == "foo");
CHECK(pkgs[0].version == semver::version::parse("1.2.3"));
auto deps = db.dependencies_of(pkgs[0]);
REQUIRE(deps.size() == 1);
CHECK(deps[0].name == "bar");
CHECK(deps[0].versions
== dds::version_range_set{semver::version::parse("4.2.1"),
semver::version::parse("4.3.0")});
}

+ 0
- 83
src/dds/catalog/get.cpp View File

@@ -1,83 +0,0 @@
#include "./get.hpp"

#include <dds/catalog/catalog.hpp>
#include <dds/error/errors.hpp>
#include <dds/repo/repo.hpp>
#include <dds/util/log.hpp>
#include <dds/util/parallel.hpp>

#include <neo/assert.hpp>
#include <range/v3/view/filter.hpp>
#include <range/v3/view/transform.hpp>

using namespace dds;

namespace {

temporary_sdist do_pull_sdist(const package_info& listing, std::monostate) {
neo_assert_always(
invariant,
false,
"A package listing in the catalog has no defined remote from which to pull. This "
"shouldn't happen in normal usage. This will occur if the database has been "
"manually altered, or if DDS has a bug.",
listing.ident.to_string());
}

temporary_sdist do_pull_sdist(const package_info& listing, const git_remote_listing& git) {
auto tmpdir = dds::temporary_dir::create();

git.pull_to(listing.ident, tmpdir.path());

dds_log(info, "Create sdist from clone ...");
sdist_params params;
params.project_dir = tmpdir.path();
auto sd_tmp_dir = dds::temporary_dir::create();
params.dest_path = sd_tmp_dir.path();
params.force = true;
auto sd = create_sdist(params);
return {sd_tmp_dir, sd};
}

} // namespace

temporary_sdist dds::get_package_sdist(const package_info& pkg) {
auto tsd = std::visit([&](auto&& remote) { return do_pull_sdist(pkg, remote); }, pkg.remote);
if (!(tsd.sdist.manifest.pkg_id == pkg.ident)) {
throw_external_error<errc::sdist_ident_mismatch>(
"The package name@version in the generated source distribution does not match the name "
"listed in the remote listing file (expected '{}', but got '{}')",
pkg.ident.to_string(),
tsd.sdist.manifest.pkg_id.to_string());
}
return tsd;
}

void dds::get_all(const std::vector<package_id>& pkgs, repository& repo, const catalog& cat) {
std::mutex repo_mut;

auto absent_pkg_infos = pkgs //
| ranges::views::filter([&](auto pk) {
std::scoped_lock lk{repo_mut};
return !repo.find(pk);
})
| ranges::views::transform([&](auto id) {
auto info = cat.get(id);
neo_assert(invariant,
info.has_value(),
"No catalog entry for package id?",
id.to_string());
return *info;
});

auto okay = parallel_run(absent_pkg_infos, 8, [&](package_info inf) {
dds_log(info, "Download package: {}", inf.ident.to_string());
auto tsd = get_package_sdist(inf);
std::scoped_lock lk{repo_mut};
repo.add_sdist(tsd.sdist, if_exists::throw_exc);
});

if (!okay) {
throw_external_error<errc::dependency_resolve_failure>("Downloading of packages failed.");
}
}

+ 0
- 16
src/dds/catalog/get.hpp View File

@@ -1,16 +0,0 @@
#pragma once

#include <dds/source/dist.hpp>
#include <dds/temp.hpp>

namespace dds {

class repository;
class catalog;
struct package_info;

temporary_sdist get_package_sdist(const package_info&);

void get_all(const std::vector<package_id>& pkgs, dds::repository& repo, const catalog& cat);

} // namespace dds

+ 0
- 208
src/dds/catalog/import.cpp View File

@@ -1,208 +0,0 @@
#include "./import.hpp"

#include <dds/error/errors.hpp>
#include <dds/util/log.hpp>

#include <fmt/core.h>
#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <semester/walk.hpp>

#include <optional>

using namespace dds;

template <typename KeyFunc, typename... Args>
struct any_key {
KeyFunc _key_fn;
semester::walk_seq<Args...> _seq;

any_key(KeyFunc&& kf, Args&&... args)
: _key_fn(kf)
, _seq(NEO_FWD(args)...) {}

template <typename Data>
semester::walk_result operator()(std::string_view key, Data&& dat) {
auto res = _key_fn(key);
if (res.rejected()) {
return res;
}
return _seq.invoke(NEO_FWD(dat));
}
};

template <typename KF, typename... Args>
any_key(KF&&, Args&&...) -> any_key<KF, Args...>;

namespace {

using require_obj = semester::require_type<json5::data::mapping_type>;
using require_array = semester::require_type<json5::data::array_type>;
using require_str = semester::require_type<std::string>;

template <typename... Args>
[[noreturn]] void import_error(Args&&... args) {
throw_user_error<dds::errc::invalid_catalog_json>(NEO_FWD(args)...);
}

git_remote_listing parse_git_remote(const json5::data& data) {
git_remote_listing git;

using namespace semester::walk_ops;

walk(data,
require_obj{"Git remote should be an object"},
mapping{required_key{"url",
"A git 'url' string is required",
require_str("Git URL should be a string"),
put_into(git.url)},
required_key{"ref",
"A git 'ref' is required, and must be a tag or branch name",
require_str("Git ref should be a string"),
put_into(git.ref)},
if_key{"auto-lib",
require_str("'auto-lib' should be a string"),
put_into(git.auto_lib,
[](std::string const& str) {
try {
return lm::split_usage_string(str);
} catch (const std::runtime_error& e) {
import_error("{}: {}", walk.path(), e.what());
}
})},
if_key{"transform",
require_array{"Expect an array of transforms"},
for_each{put_into(std::back_inserter(git.transforms), [](auto&& dat) {
try {
return fs_transformation::from_json(dat);
} catch (const semester::walk_error& e) {
import_error(e.what());
}
})}}});

return git;
}

package_info
parse_pkg_json_v1(std::string_view name, semver::version version, const json5::data& data) {
package_info ret;
ret.ident = package_id{std::string{name}, version};

using namespace semester::walk_ops;

auto make_dep = [&](std::string const& str) {
try {
return dependency::parse_depends_string(str);
} catch (std::runtime_error const& e) {
import_error(std::string(walk.path()) + e.what());
}
};

auto check_one_remote = [&](auto&&) {
if (!semester::holds_alternative<std::monostate>(ret.remote)) {
return walk.reject("Cannot specify multiple remotes for a package");
}
return walk.pass;
};

walk(data,
mapping{if_key{"description",
require_str{"'description' should be a string"},
put_into{ret.description}},
if_key{"depends",
require_array{"'depends' must be an array of dependency strings"},
for_each{require_str{"Each dependency should be a string"},
put_into{std::back_inserter(ret.deps), make_dep}}},
if_key{
"git",
check_one_remote,
put_into(ret.remote, parse_git_remote),
}});

if (semester::holds_alternative<std::monostate>(ret.remote)) {
import_error("{}: Package listing for {} does not have any remote information",
walk.path(),
ret.ident.to_string());
}

return ret;
}

std::vector<package_info> parse_json_v1(const json5::data& data) {
std::vector<package_info> acc_pkgs;

std::string pkg_name;
semver::version pkg_version;
package_info dummy;

using namespace semester::walk_ops;

auto convert_pkg_obj
= [&](auto&& dat) { return parse_pkg_json_v1(pkg_name, pkg_version, dat); };

auto convert_version_str = [&](std::string_view str) {
try {
return semver::version::parse(str);
} catch (const semver::invalid_version& e) {
throw_user_error<errc::invalid_catalog_json>("{}: version string '{}' is invalid: {}",
walk.path(),
pkg_name,
str,
e.what());
}
};

auto import_pkg_versions
= walk_seq{require_obj{"Package entries must be JSON objects"},
mapping{any_key{put_into(pkg_version, convert_version_str),
require_obj{"Package+version entries must be JSON"},
put_into{std::back_inserter(acc_pkgs), convert_pkg_obj}}}};

auto import_pkgs = walk_seq{require_obj{"'packages' should be a JSON object"},
mapping{any_key{put_into(pkg_name), import_pkg_versions}}};

walk(data,
mapping{
if_key{"version", just_accept},
required_key{"packages", "'packages' should be an object of packages", import_pkgs},
});

return acc_pkgs;
}

} // namespace

std::vector<package_info> dds::parse_packages_json(std::string_view content) {
json5::data data;
try {
dds_log(trace, "Parsing packages JSON data: {}", content);
data = json5::parse_data(content);
} catch (const json5::parse_error& e) {
throw_user_error<errc::invalid_catalog_json>("JSON5 syntax error: {}", e.what());
}

if (!data.is_object()) {
throw_user_error<errc::invalid_catalog_json>("Root of import JSON must be a JSON object");
}

auto& data_obj = data.as_object();
auto version_it = data_obj.find("version");
if (version_it == data_obj.end() || !version_it->second.is_number()) {
throw_user_error<errc::invalid_catalog_json>(
"Root JSON import requires a 'version' property");
}

double version = version_it->second.as_number();

try {
if (version == 1.0) {
dds_log(trace, "Processing JSON data as v1 data");
return parse_json_v1(data);
} else {
throw_user_error<errc::invalid_catalog_json>("Unknown catalog JSON version '{}'",
version);
}
} catch (const semester::walk_error& e) {
throw_user_error<errc::invalid_catalog_json>(e.what());
}
}

+ 0
- 9
src/dds/catalog/import.hpp View File

@@ -1,9 +0,0 @@
#pragma once

#include "./package_info.hpp"

namespace dds {

std::vector<package_info> parse_packages_json(std::string_view);

} // namespace dds

+ 0
- 154
src/dds/catalog/import.test.cpp View File

@@ -1,154 +0,0 @@
#include "./import.hpp"

#include <dds/error/errors.hpp>

#include <catch2/catch.hpp>

TEST_CASE("An empty import is okay") {
// An empty JSON with no packages in it
auto pkgs = dds::parse_packages_json("{version: 1, packages: {}}");
CHECK(pkgs.empty());
}

TEST_CASE("Valid/invalid package JSON5") {
std::string_view bads[] = {
// Invalid JSON:
"",
// Should be an object
"[]",
// Missing keys
"{}",
// Missing "packages"
"{version: 1}",
// Bad version
"{version: 1.7, packages: {}}",
"{version: [], packages: {}}",
"{version: null, packages: {}}",
// 'packages' should be an object
"{version: 1, packages: []}",
"{version: 1, packages: null}",
"{version: 1, packages: 4}",
"{version: 1, packages: 'lol'}",
// Objects in 'packages' should be objects
"{version:1, packages:{foo:null}}",
"{version:1, packages:{foo:[]}}",
"{version:1, packages:{foo:9}}",
"{version:1, packages:{foo:'lol'}}",
// Objects in 'packages' shuold have version strings
"{version:1, packages:{foo:{'lol':{}}}}",
"{version:1, packages:{foo:{'1.2':{}}}}",
// No remote
"{version:1, packages:{foo:{'1.2.3':{}}}}",
// Bad empty git
"{version:1, packages:{foo:{'1.2.3':{git:{}}}}}",
// Git `url` and `ref` should be a string
"{version:1, packages:{foo:{'1.2.3':{git:{url:2, ref:''}}}}}",
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:2}}}}}",
// 'auto-lib' should be a usage string
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':3}}}}}",
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'ffasdf'}}}}}",
// 'transform' should be an array
R"(
{
version: 1,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: 'lol hi',
}
}}}
}
)",
};

for (auto bad : bads) {
INFO("Bad: " << bad);
CHECK_THROWS_AS(dds::parse_packages_json(bad),
dds::user_error<dds::errc::invalid_catalog_json>);
}

std::string_view goods[] = {
// Basic empty:
"{version:1, packages:{}}",
// No versions for 'foo' is weird, but okay
"{version:1, packages:{foo:{}}}",
// Basic package with minimum info:
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:''}}}}}",
// Minimal auto-lib:
"{version:1, packages:{foo:{'1.2.3':{git:{url:'', ref:'', 'auto-lib':'a/b'}}}}}",
// Empty transforms:
R"(
{
version: 1,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: [],
}
}}}
}
)",
// Basic transform:
R"(
{
version: 1,
packages: {foo: {'1.2.3': {
git: {
url: '',
ref: '',
'auto-lib': 'a/b',
transform: [{
copy: {
from: 'here',
to: 'there',
include: [
"*.c",
"*.cpp",
"*.h",
'*.txt'
]
}
}],
}
}}}
}
)",
};
for (auto good : goods) {
INFO("Parse: " << good);
CHECK_NOTHROW(dds::parse_packages_json(good));
}
}

TEST_CASE("Check a single object") {
// An empty JSON with no packages in it
auto pkgs = dds::parse_packages_json(R"({
version: 1,
packages: {
foo: {
'1.2.3': {
git: {
url: 'foo',
ref: 'fasdf',
'auto-lib': 'a/b',
}
}
}
}
})");
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].ident.name == "foo");
CHECK(pkgs[0].ident.to_string() == "foo@1.2.3");
CHECK(std::holds_alternative<dds::git_remote_listing>(pkgs[0].remote));

auto git = std::get<dds::git_remote_listing>(pkgs[0].remote);
CHECK(git.url == "foo");
CHECK(git.ref == "fasdf");
REQUIRE(git.auto_lib);
CHECK(git.auto_lib->namespace_ == "a");
CHECK(git.auto_lib->name == "b");
}

+ 0
- 34
src/dds/catalog/init_catalog.cpp
File diff suppressed because it is too large
View File


+ 0
- 11
src/dds/catalog/init_catalog.hpp View File

@@ -1,11 +0,0 @@
#pragma once

#include "./package_info.hpp"

#include <vector>

namespace dds {

const std::vector<package_info>& init_catalog_packages() noexcept;

} // namespace dds

+ 0
- 25
src/dds/catalog/package_info.hpp View File

@@ -1,25 +0,0 @@
#pragma once

#include "./remote/git.hpp"

#include <dds/deps.hpp>
#include <dds/package/id.hpp>
#include <dds/util/fs_transform.hpp>
#include <dds/util/glob.hpp>

#include <optional>
#include <string>
#include <variant>
#include <vector>

namespace dds {

struct package_info {
package_id ident;
std::vector<dependency> deps;
std::string description;

std::variant<std::monostate, git_remote_listing> remote;
};

} // namespace dds

+ 0
- 42
src/dds/catalog/remote/git.cpp View File

@@ -1,42 +0,0 @@
#include "./git.hpp"

#include <dds/error/errors.hpp>
#include <dds/proc.hpp>
#include <dds/util/log.hpp>

#include <nlohmann/json.hpp>

void dds::git_remote_listing::pull_to(const dds::package_id& pid, dds::path_ref dest) const {
fs::remove_all(dest);
using namespace std::literals;
dds_log(info, "Clone Git repository [{}] (at {}) to [{}]", url, ref, dest.string());
auto command = {"git"s, "clone"s, "--depth=1"s, "--branch"s, ref, url, dest.generic_string()};
auto git_res = run_proc(command);
if (!git_res.okay()) {
throw_external_error<errc::git_clone_failure>(
"Git clone operation failed [Git command: {}] [Exitted {}]:\n{}",
quote_command(command),
git_res.retc,
git_res.output);
}

for (const auto& tr : transforms) {
tr.apply_to(dest);
}

if (auto_lib.has_value()) {
dds_log(info, "Generating library data automatically");

auto pkg_strm = dds::open(dest / "package.json5", std::ios::binary | std::ios::out);
auto man_json = nlohmann::json::object();
man_json["name"] = pid.name;
man_json["version"] = pid.version.to_string();
man_json["namespace"] = auto_lib->namespace_;
pkg_strm << nlohmann::to_string(man_json);

auto lib_strm = dds::open(dest / "library.json5", std::ios::binary | std::ios::out);
auto lib_json = nlohmann::json::object();
lib_json["name"] = auto_lib->name;
lib_strm << nlohmann::to_string(lib_json);
}
}

+ 0
- 24
src/dds/catalog/remote/git.hpp View File

@@ -1,24 +0,0 @@
#pragma once

#include <dds/catalog/get.hpp>
#include <dds/util/fs.hpp>
#include <dds/util/fs_transform.hpp>

#include <libman/package.hpp>

#include <optional>
#include <string>

namespace dds {

struct git_remote_listing {
std::string url;
std::string ref;
std::optional<lm::usage> auto_lib;

std::vector<fs_transformation> transforms;

void pull_to(const package_id& pid, path_ref path) const;
};

} // namespace dds

+ 54323
- 3
src/dds/catch2_embeddead.generated.cpp
File diff suppressed because it is too large
View File


+ 40
- 0
src/dds/cli/cmd/build.cpp View File

@@ -0,0 +1,40 @@
#include "../options.hpp"

#include "./build_common.hpp"

#include <dds/build/builder.hpp>
#include <dds/error/errors.hpp>
#include <dds/pkg/db.hpp>
#include <dds/pkg/remote.hpp>
#include <dds/toolchain/from_json.hpp>

using namespace dds;

namespace dds::cli::cmd {

int build(const options& opts) {
if (!opts.build.add_repos.empty()) {
auto cat = opts.open_pkg_db();
for (auto& str : opts.build.add_repos) {
auto repo = pkg_remote::connect(str);
repo.store(cat.database());
}
}

if (opts.build.update_repos || !opts.build.add_repos.empty()) {
update_all_remotes(opts.open_pkg_db().database());
}

auto builder = create_project_builder(opts);
builder.build({
.out_root = opts.out_path.value_or(fs::current_path() / "_build"),
.existing_lm_index = opts.build.lm_index,
.emit_lmi = {},
.toolchain = opts.load_toolchain(),
.parallel_jobs = opts.jobs,
});

return 0;
}

} // namespace dds::cli::cmd

+ 45
- 0
src/dds/cli/cmd/build_common.cpp View File

@@ -0,0 +1,45 @@
#include "./build_common.hpp"

#include <dds/pkg/cache.hpp>
#include <dds/pkg/db.hpp>
#include <dds/pkg/get/get.hpp>

using namespace dds;

builder dds::cli::create_project_builder(const dds::cli::options& opts) {
sdist_build_params main_params = {
.subdir = "",
.build_tests = opts.build.want_tests,
.run_tests = opts.build.want_tests,
.build_apps = opts.build.want_apps,
.enable_warnings = !opts.disable_warnings,
};

auto man
= value_or(package_manifest::load_from_directory(opts.project_dir), package_manifest{});
auto cat_path = opts.pkg_db_dir.value_or(pkg_db::default_path());
auto repo_path = opts.pkg_cache_dir.value_or(pkg_cache::default_local_path());

builder builder;
if (!opts.build.lm_index.has_value()) {
auto cat = pkg_db::open(cat_path);
// Build the dependencies
pkg_cache::with_cache( //
repo_path,
pkg_cache_flags::write_lock | pkg_cache_flags::create_if_absent,
[&](pkg_cache repo) {
// Download dependencies
auto deps = repo.solve(man.dependencies, cat);
get_all(deps, repo, cat);
for (const pkg_id& pk : deps) {
auto sdist_ptr = repo.find(pk);
assert(sdist_ptr);
sdist_build_params deps_params;
deps_params.subdir = fs::path("_deps") / sdist_ptr->manifest.id.to_string();
builder.add(*sdist_ptr, deps_params);
}
});
}
builder.add(sdist{std::move(man), opts.project_dir}, main_params);
return builder;
}

+ 11
- 0
src/dds/cli/cmd/build_common.hpp View File

@@ -0,0 +1,11 @@
#include "../options.hpp"

#include <dds/build/builder.hpp>

#include <functional>

namespace dds::cli {

dds::builder create_project_builder(const options& opts);

} // namespace dds::cli

+ 63
- 0
src/dds/cli/cmd/build_deps.cpp View File

@@ -0,0 +1,63 @@
#include "../options.hpp"

#include <dds/build/builder.hpp>
#include <dds/build/params.hpp>
#include <dds/pkg/cache.hpp>
#include <dds/pkg/get/get.hpp>

#include <range/v3/action/join.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/concat.hpp>
#include <range/v3/view/transform.hpp>

namespace dds::cli::cmd {

int build_deps(const options& opts) {
dds::build_params params{
.out_root = opts.out_path.value_or(fs::current_path() / "_deps"),
.existing_lm_index = {},
.emit_lmi = opts.build.lm_index.value_or("INDEX.lmi"),
.toolchain = opts.load_toolchain(),
.parallel_jobs = opts.jobs,
};

dds::builder bd;
dds::sdist_build_params sdist_params;

auto all_file_deps = opts.build_deps.deps_files //
| ranges::views::transform([&](auto dep_fpath) {
dds_log(info, "Reading deps from {}", dep_fpath.string());
return dds::dependency_manifest::from_file(dep_fpath).dependencies;
})
| ranges::actions::join;

auto cmd_deps = ranges::views::transform(opts.build_deps.deps, [&](auto dep_str) {
return dds::dependency::parse_depends_string(dep_str);
});

auto all_deps = ranges::views::concat(all_file_deps, cmd_deps) | ranges::to_vector;

auto cat = opts.open_pkg_db();
dds::pkg_cache::with_cache( //
opts.pkg_cache_dir.value_or(pkg_cache::default_local_path()),
dds::pkg_cache_flags::write_lock | dds::pkg_cache_flags::create_if_absent,
[&](dds::pkg_cache repo) {
// Download dependencies
dds_log(info, "Loading {} dependencies", all_deps.size());
auto deps = repo.solve(all_deps, cat);
dds::get_all(deps, repo, cat);
for (const dds::pkg_id& pk : deps) {
auto sdist_ptr = repo.find(pk);
assert(sdist_ptr);
dds::sdist_build_params deps_params;
deps_params.subdir = sdist_ptr->manifest.id.to_string();
dds_log(info, "Dependency: {}", sdist_ptr->manifest.id.to_string());
bd.add(*sdist_ptr, deps_params);
}
});

bd.build(params);
return 0;
}

} // namespace dds::cli::cmd

+ 20
- 0
src/dds/cli/cmd/compile_file.cpp View File

@@ -0,0 +1,20 @@
#include "../options.hpp"

#include "./build_common.hpp"

namespace dds::cli::cmd {

int compile_file(const options& opts) {
auto builder = create_project_builder(opts);
builder.compile_files(opts.compile_file.files,
{
.out_root = opts.out_path.value_or(fs::current_path() / "_build"),
.existing_lm_index = opts.build.lm_index,
.emit_lmi = {},
.toolchain = opts.load_toolchain(),
.parallel_jobs = opts.jobs,
});
return 0;
}

} // namespace dds::cli::cmd

+ 73
- 0
src/dds/cli/cmd/pkg_get.cpp View File

@@ -0,0 +1,73 @@
#include "../options.hpp"

#include <dds/dym.hpp>
#include <dds/error/errors.hpp>
#include <dds/error/nonesuch.hpp>
#include <dds/pkg/db.hpp>
#include <dds/pkg/get/get.hpp>
#include <dds/util/http/pool.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <json5/parse_data.hpp>
#include <neo/url.hpp>

namespace dds::cli::cmd {

static int _pkg_get(const options& opts) {
auto cat = opts.open_pkg_db();
for (const auto& item : opts.pkg.get.pkgs) {
auto id = pkg_id::parse(item);
auto info = *cat.get(id);
auto tsd = get_package_sdist(info);
auto dest = opts.out_path.value_or(fs::current_path()) / id.to_string();
dds_log(info, "Create sdist at {}", dest.string());
fs::remove_all(dest);
safe_rename(tsd.sdist.path, dest);
}
return 0;
}

int pkg_get(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _pkg_get(opts);
} catch (...) {
dds::capture_exception();
}
},
[&](neo::url_validation_error url_err, dds::e_url_string bad_url) {
dds_log(error,
"Invalid package URL in the database [{}]: {}",
bad_url.value,
url_err.what());
return 1;
},
[&](const json5::parse_error& e, neo::url bad_url) {
dds_log(error,
"Error parsing JSON5 document package downloaded from [{}]: {}",
bad_url.to_string(),
e.what());
return 1;
},
[](dds::e_sqlite3_error_exc e) {
dds_log(error, "Error accessing the package database: {}", e.message);
return 1;
},
[](e_nonesuch nonesuch) -> int {
nonesuch.log_error("There is no entry in the package database for '{}'.");
write_error_marker("pkg-get-no-pkg-id-listing");
return 1;
},
[&](dds::e_system_error_exc e, dds::network_origin conn) {
dds_log(error,
"Error opening connection to [{}:{}]: {}",
conn.hostname,
conn.port,
e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 57
- 0
src/dds/cli/cmd/pkg_import.cpp View File

@@ -0,0 +1,57 @@
#include "../options.hpp"

#include <dds/pkg/cache.hpp>
#include <dds/sdist/dist.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <neo/url/parse.hpp>

#include <iostream>
#include <string_view>

namespace dds::cli::cmd {
static int _pkg_import(const options& opts) {
return pkg_cache::with_cache( //
opts.pkg_cache_dir.value_or(pkg_cache::default_local_path()),
pkg_cache_flags::write_lock | pkg_cache_flags::create_if_absent,
[&](auto repo) {
for (std::string_view tgz_where : opts.pkg.import.items) {
neo_assertion_breadcrumbs("Importing sdist", tgz_where);
auto tmp_sd
= (tgz_where.starts_with("http://") || tgz_where.starts_with("https://"))
? download_expand_sdist_targz(tgz_where)
: expand_sdist_targz(tgz_where);
neo_assertion_breadcrumbs("Importing from temporary directory",
tmp_sd.tmpdir.path());
repo.add_sdist(tmp_sd.sdist, dds::if_exists(opts.if_exists));
}
if (opts.pkg.import.from_stdin) {
auto tmp_sd = dds::expand_sdist_from_istream(std::cin, "<stdin>");
repo.add_sdist(tmp_sd.sdist, dds::if_exists(opts.if_exists));
}
return 0;
});
}

int pkg_import(const options& opts) {
return boost::leaf::try_catch(
[&] {
try {
return _pkg_import(opts);
} catch (...) {
dds::capture_exception();
}
},
[&](const json5::parse_error& e) {
dds_log(error, "Error parsing JSON in package archive: {}", e.what());
return 1;
},
[](dds::e_sqlite3_error_exc e) {
dds_log(error, "Unexpected database error: {}", e.message);
return 1;
});
}
} // namespace dds::cli::cmd

+ 60
- 0
src/dds/cli/cmd/pkg_ls.cpp View File

@@ -0,0 +1,60 @@
#include "../options.hpp"

#include <dds/pkg/cache.hpp>
#include <dds/sdist/dist.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <neo/assert.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/group_by.hpp>
#include <range/v3/view/transform.hpp>

#include <iostream>
#include <string_view>

namespace dds::cli::cmd {
static int _pkg_ls(const options& opts) {
auto list_contents = [&](pkg_cache repo) {
auto same_name
= [](auto&& a, auto&& b) { return a.manifest.id.name == b.manifest.id.name; };

auto all = repo.iter_sdists();
auto grp_by_name = all //
| ranges::views::group_by(same_name) //
| ranges::views::transform(ranges::to_vector) //
| ranges::views::transform([](auto&& grp) {
assert(grp.size() > 0);
return std::pair(grp[0].manifest.id.name, grp);
});

for (const auto& [name, grp] : grp_by_name) {
dds_log(info, "{}:", name);
for (const dds::sdist& sd : grp) {
dds_log(info, " - {}", sd.manifest.id.version.to_string());
}
}

return 0;
};

return dds::pkg_cache::with_cache(opts.pkg_cache_dir.value_or(pkg_cache::default_local_path()),
dds::pkg_cache_flags::read,
list_contents);
}

int pkg_ls(const options& opts) {
return boost::leaf::try_catch(
[&] {
try {
return _pkg_ls(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_sqlite3_error_exc e) {
dds_log(error, "Unexpected database error: {}", e.message);
return 1;
});
}
} // namespace dds::cli::cmd

+ 24
- 0
src/dds/cli/cmd/pkg_repo_add.cpp View File

@@ -0,0 +1,24 @@
#include "../options.hpp"

#include "./pkg_repo_err_handle.hpp"

#include <dds/pkg/db.hpp>
#include <dds/pkg/remote.hpp>

namespace dds::cli::cmd {

static int _pkg_repo_add(const options& opts) {
auto cat = opts.open_pkg_db();
auto repo = pkg_remote::connect(opts.pkg.repo.add.url);
repo.store(cat.database());
if (opts.pkg.repo.add.update) {
repo.update_pkg_db(cat.database());
}
return 0;
}

int pkg_repo_add(const options& opts) {
return handle_pkg_repo_remote_errors([&] { return _pkg_repo_add(opts); });
}

} // namespace dds::cli::cmd

+ 75
- 0
src/dds/cli/cmd/pkg_repo_err_handle.cpp View File

@@ -0,0 +1,75 @@
#include "./pkg_repo_err_handle.hpp"

#include "../options.hpp"

#include <dds/dym.hpp>
#include <dds/error/errors.hpp>
#include <dds/error/nonesuch.hpp>
#include <dds/pkg/remote.hpp>
#include <dds/util/http/pool.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fansi/styled.hpp>
#include <json5/parse_data.hpp>
#include <neo/url.hpp>

using namespace fansi::literals;

int dds::cli::cmd::handle_pkg_repo_remote_errors(std::function<int()> fn) {
return boost::leaf::try_catch(
[&] {
try {
return fn();
} catch (...) {
dds::capture_exception();
}
},
[](neo::url_validation_error url_err, neo::url bad_url) {
dds_log(error, "Invalid URL [{}]: {}", bad_url.to_string(), url_err.what());
return 1;
},
[](dds::http_status_error err, dds::http_response_info resp, neo::url bad_url) {
dds_log(error,
"An HTTP error occured while requesting [{}]: HTTP Status {} {}",
err.what(),
bad_url.to_string(),
resp.status,
resp.status_message);
return 1;
},
[](const json5::parse_error& e, neo::url bad_url) {
dds_log(error,
"Error parsing JSON downloaded from URL [.br.red[{}]`]: {}"_styled,
bad_url.to_string(),
e.what());
return 1;
},
[](dds::e_sqlite3_error_exc e, neo::url url) {
dds_log(error,
"Error accessing remote database [.br.red[{}]`]: {}"_styled,
url.to_string(),
e.message);
return 1;
},
[](dds::e_sqlite3_error_exc e) {
dds_log(error, "Unexpected database error: {}", e.message);
return 1;
},
[](dds::e_system_error_exc e, dds::network_origin conn) {
dds_log(error,
"Error communicating with [.br.red[{}://{}:{}]`]: {}"_styled,
conn.protocol,
conn.hostname,
conn.port,
e.message);
return 1;
},
[](matchv<pkg_repo_subcommand::remove>, e_nonesuch missing) {
missing.log_error(
"Cannot delete remote '.br.red[{}]', as no such remote repository is locally registered by that name."_styled);
write_error_marker("repo-rm-no-such-repo");
return 1;
});
}

+ 9
- 0
src/dds/cli/cmd/pkg_repo_err_handle.hpp View File

@@ -0,0 +1,9 @@
#pragma once

#include <functional>

namespace dds::cli::cmd {

int handle_pkg_repo_remote_errors(std::function<int()>);

} // namespace dds::cli::cmd

+ 33
- 0
src/dds/cli/cmd/pkg_repo_ls.cpp View File

@@ -0,0 +1,33 @@
#include "../options.hpp"

#include "./pkg_repo_err_handle.hpp"

#include <dds/pkg/db.hpp>
#include <dds/pkg/remote.hpp>

#include <neo/sqlite3/iter_tuples.hpp>

namespace dds::cli::cmd {

static int _pkg_repo_ls(const options& opts) {
auto pkg_db = opts.open_pkg_db();
neo::sqlite3::database_ref db = pkg_db.database();

auto st = db.prepare("SELECT name, remote_url, db_mtime FROM dds_pkg_remotes");
auto tups = neo::sqlite3::iter_tuples<std::string, std::string, std::optional<std::string>>(st);
for (auto [name, remote_url, mtime] : tups) {
fmt::print("Remote '{}':\n", name);
fmt::print(" Updates URL: {}\n", remote_url);
if (mtime) {
fmt::print(" Last Modified: {}\n", *mtime);
}
fmt::print("\n");
}
return 0;
}

int pkg_repo_ls(const options& opts) {
return handle_pkg_repo_remote_errors([&] { return _pkg_repo_ls(opts); });
}

} // namespace dds::cli::cmd

+ 26
- 0
src/dds/cli/cmd/pkg_repo_remove.cpp View File

@@ -0,0 +1,26 @@
#include "../options.hpp"

#include "./pkg_repo_err_handle.hpp"

#include <dds/pkg/db.hpp>
#include <dds/pkg/remote.hpp>
#include <dds/util/result.hpp>

namespace dds::cli::cmd {

static int _pkg_repo_remove(const options& opts) {
auto cat = opts.open_pkg_db();
for (auto&& rm_name : opts.pkg.repo.remove.names) {
dds::remove_remote(cat, rm_name);
}
return 0;
}

int pkg_repo_remove(const options& opts) {
return handle_pkg_repo_remote_errors([&] {
DDS_E_SCOPE(opts.pkg.repo.subcommand);
return _pkg_repo_remove(opts);
});
}

} // namespace dds::cli::cmd

+ 19
- 0
src/dds/cli/cmd/pkg_repo_update.cpp View File

@@ -0,0 +1,19 @@
#include "../options.hpp"

#include "./pkg_repo_err_handle.hpp"

#include <dds/pkg/db.hpp>
#include <dds/pkg/remote.hpp>

namespace dds::cli::cmd {

static int _pkg_repo_update(const options& opts) {
update_all_remotes(opts.open_pkg_db().database());
return 0;
}

int pkg_repo_update(const options& opts) {
return handle_pkg_repo_remote_errors([&] { return _pkg_repo_update(opts); });
}

} // namespace dds::cli::cmd

+ 88
- 0
src/dds/cli/cmd/repoman_add.cpp View File

@@ -0,0 +1,88 @@
#include "../options.hpp"

#include <dds/error/errors.hpp>
#include <dds/pkg/get/get.hpp>
#include <dds/pkg/listing.hpp>
#include <dds/repoman/repoman.hpp>
#include <dds/util/http/pool.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fmt/ostream.h>
#include <neo/sqlite3/error.hpp>

namespace dds::cli::cmd {

static int _repoman_add(const options& opts) {
auto pkg_id = dds::pkg_id::parse(opts.repoman.add.pkg_id_str);
auto rpkg = any_remote_pkg::from_url(neo::url::parse(opts.repoman.add.url_str));
dds::pkg_listing add_info{
.ident = pkg_id,
.description = opts.repoman.add.description,
.remote_pkg = rpkg,
};
auto temp_sdist = get_package_sdist(add_info);

add_info.deps = temp_sdist.sdist.manifest.dependencies;

auto repo = repo_manager::open(opts.repoman.repo_dir);
repo.add_pkg(add_info, opts.repoman.add.url_str);
return 0;
}

int repoman_add(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _repoman_add(opts);
} catch (...) {
dds::capture_exception();
}
},
[](user_error<errc::invalid_pkg_id>,
semver::invalid_version err,
dds::e_invalid_pkg_id_str idstr) -> int {
dds_log(error,
"Package ID string '{}' is invalid, because '{}' is not a valid semantic "
"version string",
idstr.value,
err.string());
write_error_marker("invalid-pkg-id-str-version");
throw;
},
[](user_error<errc::invalid_pkg_id>, dds::e_invalid_pkg_id_str idstr) -> int {
dds_log(error, "Invalid package ID string '{}'", idstr.value);
write_error_marker("invalid-pkg-id-str");
throw;
},
[](dds::e_sqlite3_error_exc,
boost::leaf::match<neo::sqlite3::errc, neo::sqlite3::errc::constraint_unique>,
dds::pkg_id pkid) {
dds_log(error, "Package {} is already present in the repository", pkid.to_string());
write_error_marker("dup-pkg-add");
return 1;
},
[](http_status_error, http_response_info resp, neo::url url) {
dds_log(error,
"Error resulted from HTTP request [{}]: {} {}",
url.to_string(),
resp.status,
resp.status_message);
return 1;
},
[](dds::user_error<errc::invalid_remote_url> e, neo::url url) -> int {
dds_log(error, "Invalid URL '{}': {}", url.to_string(), e.what());
write_error_marker("repoman-add-invalid-pkg-url");
throw;
},
[](dds::e_sqlite3_error_exc e, dds::e_repo_import_targz tgz) {
dds_log(error, "Database error while importing tar file {}: {}", tgz.path, e.message);
return 1;
},
[](dds::e_system_error_exc e, dds::e_open_repo_db db) {
dds_log(error, "Error while opening repository database {}: {}", db.path, e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 57
- 0
src/dds/cli/cmd/repoman_import.cpp View File

@@ -0,0 +1,57 @@
#include "../options.hpp"

#include <dds/repoman/repoman.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fmt/ostream.h>
#include <neo/sqlite3/error.hpp>

namespace dds::cli::cmd {

static int _repoman_import(const options& opts) {
auto repo = repo_manager::open(opts.repoman.repo_dir);
for (auto pkg : opts.repoman.import.files) {
repo.import_targz(pkg);
}
return 0;
}

int repoman_import(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _repoman_import(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_sqlite3_error_exc,
boost::leaf::match<neo::sqlite3::errc, neo::sqlite3::errc::constraint_unique>,
dds::e_repo_import_targz tgz,
dds::pkg_id pkid) {
dds_log(error,
"Package {} (from {}) is already present in the repository",
pkid.to_string(),
tgz.path);
return 1;
},
[](dds::e_system_error_exc e, dds::e_repo_import_targz tgz) {
dds_log(error, "Failed to import file {}: {}", tgz.path, e.message);
return 1;
},
[](const std::runtime_error& e, dds::e_repo_import_targz tgz) {
dds_log(error, "Unknown error while importing file {}: {}", tgz.path, e.what());
return 1;
},
[](dds::e_sqlite3_error_exc e, dds::e_repo_import_targz tgz) {
dds_log(error, "Database error while importing tar file {}: {}", tgz.path, e.message);
return 1;
},
[](dds::e_system_error_exc e, dds::e_open_repo_db db) {
dds_log(error, "Error while opening repository database {}: {}", db.path, e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 48
- 0
src/dds/cli/cmd/repoman_init.cpp View File

@@ -0,0 +1,48 @@
#include "../options.hpp"

#include <dds/repoman/repoman.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fmt/ostream.h>

namespace dds::cli::cmd {

static int _repoman_init(const options& opts) {
auto repo = repo_manager::create(opts.repoman.repo_dir, opts.repoman.init.name);
dds_log(info, "Created new repository '{}' in {}", repo.name(), repo.root());
return 0;
}

int repoman_init(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _repoman_init(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_sqlite3_error_exc e, dds::e_init_repo init, dds::e_init_repo_db init_db) {
dds_log(error,
"SQLite error while initializing repository in [{}] (SQlite database {}): {}",
init.path,
init_db.path,
e.message);
return 1;
},
[](dds::e_system_error_exc e, dds::e_open_repo_db db) {
dds_log(error, "Error while opening repository database {}: {}", db.path, e.message);
return 1;
},
[](dds::e_sqlite3_error_exc e, dds::e_init_repo init) {
dds_log(error,
"SQLite error while initializing repository in [{}]: {}",
init.path,
e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 37
- 0
src/dds/cli/cmd/repoman_ls.cpp View File

@@ -0,0 +1,37 @@
#include "../options.hpp"

#include <dds/repoman/repoman.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fmt/ostream.h>

#include <iostream>

namespace dds::cli::cmd {

static int _repoman_ls(const options& opts) {
auto repo = repo_manager::open(opts.repoman.repo_dir);
for (auto id : repo.all_packages()) {
std::cout << id.to_string() << '\n';
}
return 0;
}

int repoman_ls(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _repoman_ls(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_system_error_exc e, dds::e_open_repo_db db) {
dds_log(error, "Error while opening repository database {}: {}", db.path, e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 45
- 0
src/dds/cli/cmd/repoman_remove.cpp View File

@@ -0,0 +1,45 @@
#include "../options.hpp"

#include <dds/repoman/repoman.hpp>
#include <dds/util/result.hpp>

#include <boost/leaf/handle_exception.hpp>
#include <fmt/ostream.h>
#include <neo/sqlite3/error.hpp>

namespace dds::cli::cmd {

static int _repoman_remove(const options& opts) {
auto repo = repo_manager::open(opts.repoman.repo_dir);
for (auto& str : opts.repoman.remove.pkgs) {
auto id = dds::pkg_id::parse(str);
repo.delete_package(id);
}
return 0;
}

int repoman_remove(const options& opts) {
return boost::leaf::try_catch( //
[&] {
try {
return _repoman_remove(opts);
} catch (...) {
dds::capture_exception();
}
},
[](dds::e_system_error_exc e, dds::e_repo_delete_path tgz, dds::pkg_id pkid) {
dds_log(error,
"Cannot delete requested package '{}' from repository {}: {}",
pkid.to_string(),
tgz.path,
e.message);
write_error_marker("repoman-rm-no-such-package");
return 1;
},
[](dds::e_system_error_exc e, dds::e_open_repo_db db) {
dds_log(error, "Error while opening repository database {}: {}", db.path, e.message);
return 1;
});
}

} // namespace dds::cli::cmd

+ 45
- 0
src/dds/cli/cmd/sdist_create.cpp View File

@@ -0,0 +1,45 @@
#include "../options.hpp"

#include <dds/error/errors.hpp>
#include <dds/sdist/dist.hpp>

#include <boost/leaf/common.hpp>
#include <boost/leaf/handle_exception.hpp>
#include <fmt/core.h>

namespace dds::cli::cmd {

int sdist_create(const options& opts) {
dds::sdist_params params{
.project_dir = opts.project_dir,
.dest_path = {},
.force = opts.if_exists == if_exists::replace,
.include_apps = true,
.include_tests = true,
};
return boost::leaf::try_catch(
[&] {
auto pkg_man = package_manifest::load_from_directory(params.project_dir).value();
auto default_filename = fmt::format("{}.tar.gz", pkg_man.id.to_string());
auto filepath = opts.out_path.value_or(fs::current_path() / default_filename);
create_sdist_targz(filepath, params);
return 0;
},
[&](boost::leaf::bad_result, e_missing_file missing, e_human_message msg) {
dds_log(error,
"A required file is missing for creating a source distribution for [{}]",
params.project_dir.string());
dds_log(error, "Error: {}", msg.value);
dds_log(error, "Missing file: {}", missing.path.string());
write_error_marker("no-package-json5");
return 1;
},
[&](std::error_code ec, e_human_message msg, boost::leaf::e_file_name file) {
dds_log(error, "Error: {}", msg.value);
dds_log(error, "Failed to access file [{}]: {}", file.value, ec.message());
write_error_marker("failed-package-json5-scan");
return 1;
});
}

} // namespace dds::cli::cmd

+ 107
- 0
src/dds/cli/dispatch_main.cpp View File

@@ -0,0 +1,107 @@
#include "./dispatch_main.hpp"

#include "./error_handler.hpp"
#include "./options.hpp"

#include <dds/util/paths.hpp>
#include <dds/util/result.hpp>

using namespace dds;

namespace dds::cli {

namespace cmd {
using command = int(const options&);

command build_deps;
command build;
command compile_file;
command pkg_get;
command pkg_import;
command pkg_ls;
command pkg_repo_add;
command pkg_repo_update;
command pkg_repo_ls;
command pkg_repo_remove;
command repoman_add;
command repoman_import;
command repoman_init;
command repoman_ls;
command repoman_remove;
command sdist_create;

} // namespace cmd

int dispatch_main(const options& opts) noexcept {
dds::log::current_log_level = opts.log_level;
return dds::handle_cli_errors([&] {
DDS_E_SCOPE(opts.subcommand);
switch (opts.subcommand) {
case subcommand::build:
return cmd::build(opts);
case subcommand::sdist: {
DDS_E_SCOPE(opts.sdist.subcommand);
switch (opts.sdist.subcommand) {
case sdist_subcommand::create:
return cmd::sdist_create(opts);
case sdist_subcommand::_none_:;
}
neo::unreachable();
}
case subcommand::pkg: {
DDS_E_SCOPE(opts.pkg.subcommand);
switch (opts.pkg.subcommand) {
case pkg_subcommand::ls:
return cmd::pkg_ls(opts);
case pkg_subcommand::get:
return cmd::pkg_get(opts);
case pkg_subcommand::import:
return cmd::pkg_import(opts);
case pkg_subcommand::repo: {
DDS_E_SCOPE(opts.pkg.repo.subcommand);
switch (opts.pkg.repo.subcommand) {
case pkg_repo_subcommand::add:
return cmd::pkg_repo_add(opts);
case pkg_repo_subcommand::update:
return cmd::pkg_repo_update(opts);
case pkg_repo_subcommand::ls:
return cmd::pkg_repo_ls(opts);
case pkg_repo_subcommand::remove:
return cmd::pkg_repo_remove(opts);
case pkg_repo_subcommand::_none_:;
}
neo::unreachable();
}
case pkg_subcommand::_none_:;
}
neo::unreachable();
}
case subcommand::repoman: {
DDS_E_SCOPE(opts.repoman.subcommand);
switch (opts.repoman.subcommand) {
case repoman_subcommand::import:
return cmd::repoman_import(opts);
case repoman_subcommand::add:
return cmd::repoman_add(opts);
case repoman_subcommand::init:
return cmd::repoman_init(opts);
case repoman_subcommand::remove:
return cmd::repoman_remove(opts);
case repoman_subcommand::ls:
return cmd::repoman_ls(opts);
case repoman_subcommand::_none_:;
}
neo::unreachable();
}
case subcommand::compile_file:
return cmd::compile_file(opts);
case subcommand::build_deps:
return cmd::build_deps(opts);
case subcommand::_none_:;
}
neo::unreachable();
return 6;
});
}

} // namespace dds::cli

+ 9
- 0
src/dds/cli/dispatch_main.hpp View File

@@ -0,0 +1,9 @@
#pragma once

namespace dds::cli {

struct options;

int dispatch_main(const options&) noexcept;

} // namespace dds

+ 73
- 0
src/dds/cli/error_handler.cpp View File

@@ -0,0 +1,73 @@
#include "./error_handler.hpp"
#include "./options.hpp"

#include <dds/error/errors.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>
#include <dds/util/signal.hpp>

#include <boost/leaf/common.hpp>
#include <boost/leaf/handle_error.hpp>
#include <boost/leaf/handle_exception.hpp>
#include <boost/leaf/result.hpp>
#include <fmt/ostream.h>
#include <json5/parse_data.hpp>
#include <neo/scope.hpp>
#include <neo/url/parse.hpp>

#include <fstream>

using namespace dds;

namespace {

auto handlers = std::tuple( //
[](neo::url_validation_error exc, e_url_string bad_url) {
dds_log(error, "Invalid URL '{}': {}", bad_url.value, exc.what());
return 1;
},
[](boost::leaf::catch_<error_base> exc,
json5::parse_error parse_err,
boost::leaf::e_file_name* maybe_fpath) {
dds_log(error, "{}", exc.value().what());
dds_log(error, "Invalid JSON5 was found: {}", parse_err.what());
if (maybe_fpath) {
dds_log(error, " (While reading from [{}])", maybe_fpath->value);
}
dds_log(error, "{}", exc.value().explanation());
write_error_marker("package-json5-parse-error");
return 1;
},
[](user_error<errc::test_failure> exc, matchv<cli::subcommand::build>) {
write_error_marker("build-failed-test-failed");
dds_log(error, "{}", exc.what());
dds_log(error, "{}", exc.explanation());
dds_log(error, "Refer: {}", exc.error_reference());
return 1;
},
[](boost::leaf::catch_<error_base> exc) {
dds_log(error, "{}", exc.value().what());
dds_log(error, "{}", exc.value().explanation());
dds_log(error, "Refer: {}", exc.value().error_reference());
return 1;
},
[](user_cancelled) {
dds_log(critical, "Operation cancelled by the user");
return 2;
},
[](e_system_error_exc exc, boost::leaf::verbose_diagnostic_info const& diag) {
dds_log(critical,
"An unhandled std::system_error arose. THIS IS A DDS BUG! Info: {}",
diag);
dds_log(critical, "Exception message from std::system_error: {}", exc.message);
return 42;
},
[](boost::leaf::verbose_diagnostic_info const& diag) {
dds_log(critical, "An unhandled error arose. THIS IS A DDS BUG! Info: {}", diag);
return 42;
});
} // namespace

int dds::handle_cli_errors(std::function<int()> fn) noexcept {
return boost::leaf::try_catch(fn, handlers);
}

+ 9
- 0
src/dds/cli/error_handler.hpp View File

@@ -0,0 +1,9 @@
#pragma once

#include <functional>

namespace dds {

int handle_cli_errors(std::function<int()>) noexcept;

} // namespace dds

+ 479
- 0
src/dds/cli/options.cpp View File

@@ -0,0 +1,479 @@
#include "./options.hpp"

#include <dds/error/errors.hpp>
#include <dds/pkg/db.hpp>
#include <dds/toolchain/from_json.hpp>
#include <dds/toolchain/toolchain.hpp>

#include <debate/enum.hpp>

using namespace dds;
using namespace debate;

namespace {

struct setup {
dds::cli::options& opts;

explicit setup(dds::cli::options& opts)
: opts(opts) {}

// Util argument common to a lot of operations
argument if_exists_arg{
.long_spellings = {"if-exists"},
.help = "What to do if the resource already exists",
.valname = "{replace,skip,fail}",
.action = put_into(opts.if_exists),
};

argument if_missing_arg{
.long_spellings = {"if-missing"},
.help = "What to do if the resource does not exist",
.valname = "{fail,ignore}",
.action = put_into(opts.if_missing),
};

argument toolchain_arg{
.long_spellings = {"toolchain"},
.short_spellings = {"t"},
.help = "The toolchain to use when building",
.valname = "<file-or-id>",
.action = put_into(opts.toolchain),
};

argument project_arg{
.long_spellings = {"project"},
.short_spellings = {"p"},
.help = "The project to build. If not given, uses the current working directory",
.valname = "<project-path>",
.action = put_into(opts.project_dir),
};

argument no_warn_arg{
.long_spellings = {"no-warn", "no-warnings"},
.help = "Disable build warnings",
.nargs = 0,
.action = store_true(opts.disable_warnings),
};

argument out_arg{
.long_spellings = {"out", "output"},
.short_spellings = {"o"},
.help = "Path to the output",
.valname = "<path>",
.action = put_into(opts.out_path),
};

argument lm_index_arg{
.long_spellings = {"libman-index"},
.help = "Path to a libman index to use",
.valname = "<lmi-path>",
.action = put_into(opts.build.lm_index),
};

argument jobs_arg{
.long_spellings = {"jobs"},
.short_spellings = {"j"},
.help = "Set the maximum number of parallel jobs to execute",
.valname = "<job-count>",
.action = put_into(opts.jobs),
};

argument repoman_repo_dir_arg{
.help = "The directory of the repository to manage",
.valname = "<repo-dir>",
.required = true,
.action = put_into(opts.repoman.repo_dir),
};

void do_setup(argument_parser& parser) noexcept {
parser.add_argument({
.long_spellings = {"log-level"},
.short_spellings = {"l"},
.help = ""
"Set the dds logging level. One of 'trace', 'debug', 'info', \n"
"'warn', 'error', 'critical', or 'silent'",
.valname = "<level>",
.action = put_into(opts.log_level),
});
parser.add_argument({
.long_spellings = {"data-dir"},
.help
= ""
"(Advanced) "
"Override dds's data directory. This is used for various caches and databases.\n"
"The default is a user-local directory that differs depending on platform.",
.valname = "<directory>",
.action = put_into(opts.data_dir),
});
parser.add_argument({
.long_spellings = {"pkg-cache-dir"},
.help = "(Advanced) Override dds's local package cache directory.",
.valname = "<directory>",
.action = put_into(opts.pkg_cache_dir),
});
parser.add_argument({
.long_spellings = {"pkg-db-path"},
.help = "(Advanced) Override dds's default package database path.",
.valname = "<database-path>",
.action = put_into(opts.pkg_db_dir),
});

setup_main_commands(parser.add_subparsers({
.description = "The operation to perform",
.action = put_into(opts.subcommand),
}));
}

void setup_main_commands(subparser_group& group) {
setup_build_cmd(group.add_parser({
.name = "build",
.help = "Build a project",
}));
setup_compile_file_cmd(group.add_parser({
.name = "compile-file",
.help = "Compile individual files in the project",
}));
setup_build_deps_cmd(group.add_parser({
.name = "build-deps",
.help = "Build a set of dependencies and generate a libman index",
}));
setup_pkg_cmd(group.add_parser({
.name = "pkg",
.help = "Manage packages and package remotes",
}));
setup_sdist_cmd(group.add_parser({
.name = "sdist",
.help = "Work with source distribution packages",
}));
setup_repoman_cmd(group.add_parser({
.name = "repoman",
.help = "Manage a dds package repository",
}));
}

void setup_build_cmd(argument_parser& build_cmd) {
build_cmd.add_argument(toolchain_arg.dup());
build_cmd.add_argument(project_arg.dup());
build_cmd.add_argument({
.long_spellings = {"no-tests"},
.help = "Do not build and run project tests",
.nargs = 0,
.action = debate::store_false(opts.build.want_tests),
});
build_cmd.add_argument({
.long_spellings = {"no-apps"},
.help = "Do not build project applications",
.nargs = 0,
.action = debate::store_false(opts.build.want_apps),
});
build_cmd.add_argument(no_warn_arg.dup());
build_cmd.add_argument(out_arg.dup()).help = "Directory where dds will write build results";

build_cmd.add_argument({
.long_spellings = {"add-repo"},
.help = ""
"Add remote repositories to the package database before building\n"
"(Implies --update-repos)",
.valname = "<repo-url>",
.can_repeat = true,
.action = debate::push_back_onto(opts.build.add_repos),
});
build_cmd.add_argument({
.long_spellings = {"update-repos"},
.short_spellings = {"U"},
.help = "Update package repositories before building",
.nargs = 0,
.action = debate::store_true(opts.build.update_repos),
});
build_cmd.add_argument(lm_index_arg.dup()).help
= "Path to a libman index file to use for loading project dependencies";
build_cmd.add_argument(jobs_arg.dup());
}

void setup_compile_file_cmd(argument_parser& compile_file_cmd) noexcept {
compile_file_cmd.add_argument(project_arg.dup());
compile_file_cmd.add_argument(toolchain_arg.dup());
compile_file_cmd.add_argument(no_warn_arg.dup()).help = "Disable compiler warnings";
compile_file_cmd.add_argument(jobs_arg.dup()).help
= "Set the maximum number of files to compile in parallel";
compile_file_cmd.add_argument(lm_index_arg.dup());
compile_file_cmd.add_argument(out_arg.dup());
compile_file_cmd.add_argument({
.help = "One or more source files to compile",
.valname = "<source-files>",
.can_repeat = true,
.action = debate::push_back_onto(opts.compile_file.files),
});
}

void setup_build_deps_cmd(argument_parser& build_deps_cmd) noexcept {
build_deps_cmd.add_argument(toolchain_arg.dup()).required;
build_deps_cmd.add_argument(jobs_arg.dup());
build_deps_cmd.add_argument(out_arg.dup());
build_deps_cmd.add_argument(lm_index_arg.dup()).help
= "Destination path for the generated libman index file";
build_deps_cmd.add_argument({
.long_spellings = {"deps-file"},
.short_spellings = {"d"},
.help = "Path to a JSON5 file listing dependencies",
.valname = "<deps-file>",
.can_repeat = true,
.action = debate::push_back_onto(opts.build_deps.deps_files),
});
build_deps_cmd.add_argument({
.help = "Dependency statement strings",
.valname = "<dependency>",
.can_repeat = true,
.action = debate::push_back_onto(opts.build_deps.deps),
});
}

void setup_pkg_cmd(argument_parser& pkg_cmd) {
auto& pkg_group = pkg_cmd.add_subparsers({
.valname = "<pkg-subcommand>",
.action = put_into(opts.pkg.subcommand),
});
pkg_group.add_parser({
.name = "ls",
.help = "List locally available packages",
});
setup_pkg_get_cmd(pkg_group.add_parser({
.name = "get",
.help = "Obtain a copy of a package from a remote",
}));
setup_pkg_init_db_cmd(pkg_group.add_parser({
.name = "init-db",
.help = "Initialize a new package database file (Path specified with '--pkg-db-path')",
}));
setup_pkg_import_cmd(pkg_group.add_parser({
.name = "import",
.help = "Import a source distribution archive into the local package cache",
}));
setup_pkg_repo_cmd(pkg_group.add_parser({
.name = "repo",
.help = "Manage package repositories",
}));
}

void setup_pkg_get_cmd(argument_parser& pkg_get_cmd) {
pkg_get_cmd.add_argument({
.valname = "<pkg-id>",
.can_repeat = true,
.action = push_back_onto(opts.pkg.get.pkgs),
});
pkg_get_cmd.add_argument(out_arg.dup()).help
= "Directory where obtained packages will be placed.\n"
"Default is the current working directory.";
}

void setup_pkg_init_db_cmd(argument_parser& pkg_init_db_cmd) {
pkg_init_db_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the database file already exists";
}

void setup_pkg_import_cmd(argument_parser& pkg_import_cmd) noexcept {
pkg_import_cmd.add_argument({
.long_spellings = {"stdin"},
.help = "Import a source distribution archive from standard input",
.nargs = 0,
.action = debate::store_true(opts.pkg.import.from_stdin),
});
pkg_import_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the package already exists in the local cache";
pkg_import_cmd.add_argument({
.help = "One or more paths/URLs to source distribution archives to import",
.valname = "<path-or-url>",
.can_repeat = true,
.action = debate::push_back_onto(opts.pkg.import.items),
});
}

void setup_pkg_repo_cmd(argument_parser& pkg_repo_cmd) noexcept {
auto& pkg_repo_grp = pkg_repo_cmd.add_subparsers({
.valname = "<pkg-repo-subcommand>",
.action = put_into(opts.pkg.repo.subcommand),
});
setup_pkg_repo_add_cmd(pkg_repo_grp.add_parser({
.name = "add",
.help = "Add a package repository",
}));
setup_pkg_repo_remove_cmd(pkg_repo_grp.add_parser({
.name = "remove",
.help = "Remove one or more package repositories",
}));

pkg_repo_grp.add_parser({
.name = "update",
.help = "Update package repository information",
});
pkg_repo_grp.add_parser({
.name = "ls",
.help = "List locally registered package repositories",
});
}

void setup_pkg_repo_add_cmd(argument_parser& pkg_repo_add_cmd) noexcept {
pkg_repo_add_cmd.add_argument({
.help = "URL of a repository to add",
.valname = "<url>",
.required = true,
.action = debate::put_into(opts.pkg.repo.add.url),
});
pkg_repo_add_cmd.add_argument({
.long_spellings = {"no-update"},
.help = "Do not immediately update for the new package repository",
.nargs = 0,
.action = debate::store_false(opts.pkg.repo.add.update),
});
}

void setup_pkg_repo_remove_cmd(argument_parser& pkg_repo_remove_cmd) noexcept {
pkg_repo_remove_cmd.add_argument({
.help = "Name of one or more repositories to remove",
.valname = "<repo-name>",
.can_repeat = true,
.action = push_back_onto(opts.pkg.repo.remove.names),
});
pkg_repo_remove_cmd.add_argument(if_missing_arg.dup()).help
= "What to do if any of the named repositories do not exist";
}

void setup_sdist_cmd(argument_parser& sdist_cmd) noexcept {
auto& sdist_grp = sdist_cmd.add_subparsers({
.valname = "<sdist-subcommand>",
.action = put_into(opts.sdist.subcommand),
});
setup_sdist_create_cmd(sdist_grp.add_parser({
.name = "create",
.help = "Create a source distribution from a project tree",
}));
}

void setup_sdist_create_cmd(argument_parser& sdist_create_cmd) {
sdist_create_cmd.add_argument(project_arg.dup()).help
= "Path to the project for which to create a source distribution.\n"
"Default is the current working directory.";
sdist_create_cmd.add_argument(out_arg.dup()).help
= "Destination path for the source distributnion archive";
sdist_create_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the destination names an existing file";
}

void setup_repoman_cmd(argument_parser& repoman_cmd) {
auto& grp = repoman_cmd.add_subparsers({
.valname = "<repoman-subcommand>",
.action = put_into(opts.repoman.subcommand),
});

setup_repoman_init_cmd(grp.add_parser({
.name = "init",
.help = "Initialize a directory as a new repository",
}));
auto& ls_cmd = grp.add_parser({
.name = "ls",
.help = "List the contents of a package repository directory",
});
ls_cmd.add_argument(repoman_repo_dir_arg.dup());
setup_repoman_add_cmd(grp.add_parser({
.name = "add",
.help = "Add a package listing to the repository by URL",
}));
setup_repoman_import_cmd(grp.add_parser({
.name = "import",
.help = "Import a source distribution into the repository",
}));
setup_repoman_remove_cmd(grp.add_parser({
.name = "remove",
.help = "Remove packages from a package repository",
}));
}

void setup_repoman_init_cmd(argument_parser& repoman_init_cmd) {
repoman_init_cmd.add_argument(repoman_repo_dir_arg.dup());
repoman_init_cmd.add_argument(if_exists_arg.dup()).help
= "What to do if the directory exists and is already repository";
repoman_init_cmd.add_argument({
.long_spellings = {"name"},
.short_spellings = {"n"},
.help = "Specifiy the name of the new repository",
.valname = "<name>",
.action = put_into(opts.repoman.init.name),
});
}

void setup_repoman_import_cmd(argument_parser& repoman_import_cmd) {
repoman_import_cmd.add_argument(repoman_repo_dir_arg.dup());
repoman_import_cmd.add_argument({
.help = "Paths to source distribution archives to import",
.valname = "<sdist-file-path>",
.can_repeat = true,
.action = push_back_onto(opts.repoman.import.files),
});
}

void setup_repoman_add_cmd(argument_parser& repoman_add_cmd) {
repoman_add_cmd.add_argument(repoman_repo_dir_arg.dup());
repoman_add_cmd.add_argument({
.help = "The package ID of the package to add",
.valname = "<pkg-id>",
.required = true,
.action = put_into(opts.repoman.add.pkg_id_str),
});
repoman_add_cmd.add_argument({
.help = "URL to add to the repository",
.valname = "<url>",
.required = true,
.action = put_into(opts.repoman.add.url_str),
});
repoman_add_cmd.add_argument({
.long_spellings = {"description"},
.short_spellings = {"d"},
.action = put_into(opts.repoman.add.description),
});
}

void setup_repoman_remove_cmd(argument_parser& repoman_remove_cmd) {
repoman_remove_cmd.add_argument(repoman_repo_dir_arg.dup());
repoman_remove_cmd.add_argument({
.help = "One or more identifiers of packages to remove",
.valname = "<pkg-id>",
.can_repeat = true,
.action = push_back_onto(opts.repoman.remove.pkgs),
});
}
};

} // namespace

void cli::options::setup_parser(debate::argument_parser& parser) noexcept {
setup{*this}.do_setup(parser);
}

pkg_db dds::cli::options::open_pkg_db() const {
return pkg_db::open(this->pkg_db_dir.value_or(pkg_db::default_path()));
}

toolchain dds::cli::options::load_toolchain() const {
if (!toolchain) {
auto def = dds::toolchain::get_default();
if (!def) {
throw_user_error<errc::no_default_toolchain>();
}
return *def;
}
// Convert the given string to a toolchain
auto& tc_str = *toolchain;
if (tc_str.starts_with(":")) {
auto default_tc = tc_str.substr(1);
auto tc = dds::toolchain::get_builtin(default_tc);
if (!tc.has_value()) {
throw_user_error<
errc::invalid_builtin_toolchain>("Invalid built-in toolchain name '{}'",
default_tc);
}
return std::move(*tc);
} else {
return parse_toolchain_json5(slurp_file(tc_str));
}
}

+ 267
- 0
src/dds/cli/options.hpp View File

@@ -0,0 +1,267 @@
#pragma once

#include <dds/util/log.hpp>
#include <debate/argument_parser.hpp>

#include <filesystem>
#include <optional>
#include <string>
#include <vector>

namespace dds {

namespace fs = std::filesystem;
class pkg_db;
class toolchain;

namespace cli {

/**
* @brief Top-level dds subcommands
*/
enum class subcommand {
_none_,
build,
compile_file,
build_deps,
pkg,
sdist,
repoman,
};

/**
* @brief 'dds sdist' subcommands
*/
enum class sdist_subcommand {
_none_,
create,
};

/**
* @brief 'dds pkg' subcommands
*/
enum class pkg_subcommand {
_none_,
ls,
get,
import,
repo,
};

/**
* @brief 'dds pkg repo' subcommands
*/
enum class pkg_repo_subcommand {
_none_,
add,
remove,
update,
ls,
};

/**
* @brief 'dds repoman' subcommands
*
*/
enum class repoman_subcommand {
_none_,
init,
import,
add,
remove,
ls,
};

/**
* @brief Options for `--if-exists` on the CLI
*/
enum class if_exists {
replace,
fail,
ignore,
};

enum class if_missing {
fail,
ignore,
};

/**
* @brief Complete aggregate of all dds command-line options, and some utilities
*/
struct options {
using path = fs::path;
using opt_path = std::optional<fs::path>;
using string = std::string;
using opt_string = std::optional<std::string>;

// The `--data-dir` argument
opt_path data_dir;
// The `--pkg-cache-dir' argument
opt_path pkg_cache_dir;
// The `--pkg-db-dir` argument
opt_path pkg_db_dir;
// The `--log-level` argument
log::level log_level = log::level::info;

// The top-most selected subcommand
enum subcommand subcommand;

// Many subcommands use a '--project' argument, stored here, using the CWD as the default
path project_dir = fs::current_path();

// Compile and build commands with `--no-warnings`/`--no-warn`
bool disable_warnings = true;
// Compile and build commands' `--jobs` parameter
int jobs = 0;
// Compile and build commands' `--toolchain` option:
opt_string toolchain;
opt_path out_path;

// Shared `--if-exists` argument:
cli::if_exists if_exists = cli::if_exists::fail;
// Shared '--if-missing' argument:
cli::if_missing if_missing = cli::if_missing::fail;

/**
* @brief Open the package pkg_db based on the user-specified options.
* @return pkg_db
*/
pkg_db open_pkg_db() const;
/**
* @brief Load a dds toolchain as specified by the user, or a default.
* @return dds::toolchain
*/
dds::toolchain load_toolchain() const;

/**
* @brief Parameters specific to 'dds build'
*/
struct {
bool want_tests = true;
bool want_apps = true;
opt_path lm_index;
std::vector<string> add_repos;
bool update_repos = false;
} build;

/**
* @brief Parameters specific to 'dds compile-file'
*/
struct {
/// The files that the user has requested to be compiled
std::vector<fs::path> files;
} compile_file;

/**
* @brief Parameters specific to 'dds build-deps'
*/
struct {
/// Files listed with '--deps-file'
std::vector<fs::path> deps_files;
/// Dependency strings provided directly in the command-line
std::vector<string> deps;
} build_deps;

/**
* @brief Parameters and subcommands for 'dds pkg'
*
*/
struct {
/// The 'dds pkg' subcommand
pkg_subcommand subcommand;

/**
* @brief Parameters for 'dds pkg import'
*/
struct {
/// File paths or URLs of packages to import
std::vector<string> items;
/// Allow piping a package tarball in through stdin
bool from_stdin = false;
} import;

/**
* @brief Parameters for 'dds pkg repo'
*/
struct {
/// The 'pkg repo' subcommand
pkg_repo_subcommand subcommand;

/**
* @brief Parameters of 'dds pkg repo add'
*/
struct {
/// The repository URL
string url;
/// Whether we should update repo data after adding the repository
bool update = true;
} add;

/**
* @brief Parameters of 'dds pkg repo remove'
*/
struct {
/// Repositories to remove (by name)
std::vector<string> names;
} remove;
} repo;

/**
* @brief Paramters for 'dds pkg get'
*/
struct {
/// Package IDs to download
std::vector<string> pkgs;
} get;
} pkg;

struct {
sdist_subcommand subcommand;
} sdist;

/**
* @brief Parameters for 'dds repoman'
*/
struct {
/// Shared parameter between repoman subcommands: The directory we are acting upon
path repo_dir;

/// The actual operation we are performing on the repository dir
repoman_subcommand subcommand;

/// Options for 'dds repoman init'
struct {
/// The name of the new repository. If not provided, a random one will be generated
opt_string name;
} init;

/// Options for 'dds repoman import'
struct {
/// sdist tarball file paths to import into the repository
std::vector<fs::path> files;
} import;

/// Options for 'dds repoman add'
struct {
std::string pkg_id_str;
std::string url_str;
std::string description;
} add;

/// Options for 'dds repoman remove'
struct {
/// Package IDs of packages to remove
std::vector<string> pkgs;
} remove;
} repoman;

/**
* @brief Attach arguments and subcommands to the given argument parser, binding those arguments
* to the values in this object.
*/
void setup_parser(debate::argument_parser& parser) noexcept;
};

} // namespace cli
} // namespace dds

+ 27
- 29
src/dds/db/database.cpp View File

@@ -14,13 +14,13 @@

using namespace dds;

namespace sqlite3 = neo::sqlite3;
using sqlite3::exec;
using namespace sqlite3::literals;
namespace nsql = neo::sqlite3;
using nsql::exec;
using namespace nsql::literals;

namespace {

void migrate_1(sqlite3::database& db) {
void migrate_1(nsql::database& db) {
db.exec(R"(
CREATE TABLE dds_files (
file_id INTEGER PRIMARY KEY,
@@ -51,16 +51,17 @@ void migrate_1(sqlite3::database& db) {
)");
}

void ensure_migrated(sqlite3::database& db) {
sqlite3::transaction_guard tr{db};
void ensure_migrated(nsql::database& db) {
db.exec(R"(
PRAGMA foreign_keys = 1;
CREATE TABLE IF NOT EXISTS dds_meta AS
WITH init (meta) AS (VALUES ('{"version": 0}'))
SELECT * FROM init;
)");
nsql::transaction_guard tr{db};

auto meta_st = db.prepare("SELECT meta FROM dds_meta");
auto [meta_json] = sqlite3::unpack_single<std::string>(meta_st);
auto [meta_json] = nsql::unpack_single<std::string>(meta_st);

auto meta = nlohmann::json::parse(meta_json);
if (!meta.is_object()) {
@@ -77,26 +78,26 @@ void ensure_migrated(sqlite3::database& db) {
migrate_1(db);
}
meta["version"] = 1;
exec(db, "UPDATE dds_meta SET meta=?", std::forward_as_tuple(meta.dump()));
exec(db.prepare("UPDATE dds_meta SET meta=?"), meta.dump());
}

} // namespace

database database::open(const std::string& db_path) {
auto db = sqlite3::database::open(db_path);
auto db = nsql::database::open(db_path);
try {
ensure_migrated(db);
} catch (const sqlite3::sqlite3_error& e) {
} catch (const nsql::sqlite3_error& e) {
dds_log(
error,
"Failed to load the databsae. It appears to be invalid/corrupted. We'll delete it and "
"create a new one. The exception message is: {}",
e.what());
fs::remove(db_path);
db = sqlite3::database::open(db_path);
db = nsql::database::open(db_path);
try {
ensure_migrated(db);
} catch (const sqlite3::sqlite3_error& e) {
} catch (const nsql::sqlite3_error& e) {
dds_log(critical,
"Failed to apply database migrations to recovery database. This is a critical "
"error. The exception message is: {}",
@@ -107,25 +108,25 @@ database database::open(const std::string& db_path) {
return database(std::move(db));
}

database::database(sqlite3::database db)
database::database(nsql::database db)
: _db(std::move(db)) {}

std::int64_t database::_record_file(path_ref path_) {
auto path = fs::weakly_canonical(path_);
sqlite3::exec(_stmt_cache(R"(
nsql::exec(_stmt_cache(R"(
INSERT OR IGNORE INTO dds_files (path)
VALUES (?)
)"_sql),
std::forward_as_tuple(path.generic_string()));
path.generic_string());
auto& st = _stmt_cache(R"(
SELECT file_id
FROM dds_files
WHERE path = ?1
)"_sql);
st.reset();
auto str = path.generic_string();
st.bindings[1] = str;
auto [rowid] = sqlite3::unpack_single<std::int64_t>(st);
auto str = path.generic_string();
st.bindings()[1] = str;
auto [rowid] = nsql::unpack_single<std::int64_t>(st);
return rowid;
}

@@ -136,7 +137,7 @@ void database::record_dep(path_ref input, path_ref output, fs::file_time_type in
INSERT OR REPLACE INTO dds_deps (input_file_id, output_file_id, input_mtime)
VALUES (?, ?, ?)
)"_sql);
sqlite3::exec(st, std::forward_as_tuple(in_id, out_id, input_mtime.time_since_epoch().count()));
nsql::exec(st, in_id, out_id, input_mtime.time_since_epoch().count());
}

void database::store_file_command(path_ref file, const command_info& cmd) {
@@ -147,10 +148,7 @@ void database::store_file_command(path_ref file, const command_info& cmd) {
INTO dds_file_commands(file_id, command, output)
VALUES (?1, ?2, ?3)
)"_sql);
sqlite3::exec(st,
std::forward_as_tuple(file_id,
std::string_view(cmd.command),
std::string_view(cmd.output)));
nsql::exec(st, file_id, std::string_view(cmd.command), std::string_view(cmd.output));
}

void database::forget_inputs_of(path_ref file) {
@@ -163,7 +161,7 @@ void database::forget_inputs_of(path_ref file) {
DELETE FROM dds_deps
WHERE output_file_id IN id_to_delete
)"_sql);
sqlite3::exec(st, std::forward_as_tuple(fs::weakly_canonical(file).generic_string()));
nsql::exec(st, fs::weakly_canonical(file).generic_string());
}

std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_) const {
@@ -180,11 +178,11 @@ std::optional<std::vector<input_file_info>> database::inputs_of(path_ref file_)
WHERE output_file_id IN file
)"_sql);
st.reset();
st.bindings[1] = file.generic_string();
auto tup_iter = sqlite3::iter_tuples<std::string, std::int64_t>(st);
st.bindings()[1] = file.generic_string();
auto tup_iter = nsql::iter_tuples<std::string, std::int64_t>(st);

std::vector<input_file_info> ret;
for (auto& [path, mtime] : tup_iter) {
for (auto [path, mtime] : tup_iter) {
ret.emplace_back(
input_file_info{path, fs::file_time_type(fs::file_time_type::duration(mtime))});
}
@@ -208,8 +206,8 @@ std::optional<command_info> database::command_of(path_ref file_) const {
WHERE file_id IN file
)"_sql);
st.reset();
st.bindings[1] = file.generic_string();
auto opt_res = sqlite3::unpack_single_opt<std::string, std::string>(st);
st.bindings()[1] = file.generic_string();
auto opt_res = nsql::unpack_single_opt<std::string, std::string>(st);
if (!opt_res) {
return std::nullopt;
}

+ 1
- 2
src/dds/dym.cpp View File

@@ -1,6 +1,7 @@
#include <dds/dym.hpp>

#include <dds/error/errors.hpp>
#include <dds/util/log.hpp>

#include <range/v3/algorithm/min_element.hpp>
#include <range/v3/view/cartesian_product.hpp>
@@ -10,8 +11,6 @@

using namespace dds;

thread_local dym_target* dym_target::_tls_current = nullptr;

std::size_t dds::lev_edit_distance(std::string_view a, std::string_view b) noexcept {
const auto n_rows = b.size() + 1;
const auto n_columns = a.size() + 1;

+ 0
- 30
src/dds/dym.hpp View File

@@ -11,36 +11,6 @@ namespace dds {

std::size_t lev_edit_distance(std::string_view a, std::string_view b) noexcept;

class dym_target {
std::optional<std::string> _candidate;
dym_target* _tls_prev = nullptr;
static thread_local dym_target* _tls_current;

public:
dym_target()
: _tls_prev(_tls_current) {
_tls_current = this;
}
dym_target(const dym_target&) = delete;
~dym_target() { _tls_current = _tls_prev; }

template <typename Func>
static void fill(Func&& fn) noexcept {
if (_tls_current) {
_tls_current->_candidate = fn();
}
}

auto& candidate() const noexcept { return _candidate; }

std::string sentence_suffix() const noexcept {
if (_candidate) {
return " (Did you mean '" + *_candidate + "'?)";
}
return "";
}
};

template <typename Range>
std::optional<std::string> did_you_mean(std::string_view given, Range&& strings) noexcept {
auto cand = ranges::min_element(strings, ranges::less{}, [&](std::string_view candidate) {

+ 19
- 5
src/dds/error/errors.cpp View File

@@ -34,9 +34,13 @@ std::string error_url_suffix(dds::errc ec) noexcept {
case errc::invalid_catalog_json:
return "invalid-catalog-json.html";
case errc::no_catalog_remote_info:
return "no-catalog-remote-info.html";
return "no-pkg-remote.html";
case errc::git_clone_failure:
return "git-clone-failure.html";
case errc::invalid_remote_url:
return "invalid-remote-url.html";
case errc::http_download_failure:
return "http-failure.html";
case errc::invalid_repo_transform:
return "invalid-repo-transform.html";
case errc::sdist_ident_mismatch:
@@ -163,14 +167,21 @@ Check the JSON schema and try your submission again.
)";
case errc::no_catalog_remote_info:
return R"(
The catalog entry requires information regarding the remote acquisition method.
Refer to the documentation for details.
There is no package remote with the given name
)";
case errc::git_clone_failure:
return R"(
dds tried to clone a repository using Git, but the clone operation failed.
There are a variety of possible causes. It is best to check the output from
Git in diagnosing this failure.
)";
case errc::invalid_remote_url:
return R"(The given package/remote URL is invalid)";
case errc::http_download_failure:
return R"(
There was a problem when trying to download data from an HTTP server. HTTP 40x
errors indicate problems on the client-side, and HTTP 50x errors indicate that
the server itself encountered an error.
)";
case errc::invalid_repo_transform:
return R"(
@@ -280,10 +291,13 @@ std::string_view dds::default_error_string(dds::errc ec) noexcept {
case errc::invalid_catalog_json:
return "The given catalog JSON data is not valid";
case errc::no_catalog_remote_info:
return "The catalog JSON is missing remote acquisition information for one or more\n"
"packages";
return "Tne named remote does not exist." BUG_STRING_SUFFIX;
case errc::git_clone_failure:
return "A git-clone operation failed.";
case errc::invalid_remote_url:
return "The given package/remote URL is not valid";
case errc::http_download_failure:
return "There was an error downloading data from an HTTP server.";
case errc::invalid_repo_transform:
return "A repository filesystem transformation is invalid";
case errc::sdist_ident_mismatch:

+ 24
- 2
src/dds/error/errors.hpp View File

@@ -24,6 +24,8 @@ enum class errc {
no_catalog_remote_info,

git_clone_failure,
invalid_remote_url,
http_download_failure,
invalid_repo_transform,
sdist_ident_mismatch,
sdist_exists,
@@ -84,6 +86,16 @@ struct external_error : external_error_base {

using error_invalid_default_toolchain = user_error<errc::invalid_builtin_toolchain>;

template <errc ErrorCode, typename... Args>
auto make_user_error(std::string_view fmt_str, Args&&... args) {
return user_error<ErrorCode>(fmt::format(fmt_str, std::forward<Args>(args)...));
}

template <errc ErrorCode>
auto make_user_error() {
return user_error<ErrorCode>(std::string(default_error_string(ErrorCode)));
}

template <errc ErrorCode, typename... Args>
[[noreturn]] void throw_user_error(std::string_view fmt_str, Args&&... args) {
throw user_error<ErrorCode>(fmt::format(fmt_str, std::forward<Args>(args)...));
@@ -94,14 +106,24 @@ template <errc ErrorCode>
throw user_error<ErrorCode>(std::string(default_error_string(ErrorCode)));
}

template <errc ErrorCode, typename... Args>
auto make_external_error(std::string_view fmt_str, Args&&... args) {
return external_error<ErrorCode>(fmt::format(fmt_str, std::forward<Args>(args)...));
}

template <errc ErrorCode>
auto make_external_error() {
return external_error<ErrorCode>(std::string(default_error_string(ErrorCode)));
}

template <errc ErrorCode, typename... Args>
[[noreturn]] void throw_external_error(std::string_view fmt_str, Args&&... args) {
throw external_error<ErrorCode>(fmt::format(fmt_str, std::forward<Args>(args)...));
throw make_external_error<ErrorCode>(fmt::format(fmt_str, std::forward<Args>(args)...));
}

template <errc ErrorCode>
[[noreturn]] void throw_external_error() {
throw external_error<ErrorCode>(std::string(default_error_string(ErrorCode)));
throw make_external_error<ErrorCode>(std::string(default_error_string(ErrorCode)));
}

} // namespace dds

+ 15
- 0
src/dds/error/nonesuch.cpp View File

@@ -0,0 +1,15 @@
#include "./nonesuch.hpp"

#include <dds/util/log.hpp>

#include <fansi/styled.hpp>

using namespace dds;
using namespace fansi::literals;

void e_nonesuch::log_error(std::string_view fmt) const noexcept {
dds_log(error, fmt, given);
if (nearest) {
dds_log(error, " (Did you mean '.br.yellow[{}]'?)"_styled, *nearest);
}
}

+ 19
- 0
src/dds/error/nonesuch.hpp View File

@@ -0,0 +1,19 @@
#pragma once

#include <optional>
#include <string>

namespace dds {

struct e_nonesuch {
std::string given;
std::optional<std::string> nearest;

e_nonesuch(std::string_view gn, std::optional<std::string> nr) noexcept
: given{gn}
, nearest{nr} {}

void log_error(std::string_view fmt) const noexcept;
};

} // namespace dds

+ 17
- 0
src/dds/error/on_error.hpp View File

@@ -0,0 +1,17 @@
#pragma once

#include <boost/leaf/on_error.hpp>

/**
* @brief Generate a callable object that returns the given expression.
*
* Use this as a parameter to leaf's error-loading APIs.
*/
#define DDS_E_ARG(...) ([&] { return __VA_ARGS__; })

/**
* @brief Generate a leaf::on_error object that loads the given expression into the currently
* in-flight error if the current scope is exitted via exception or a bad result<>
*/
#define DDS_E_SCOPE(...) \
auto NEO_CONCAT(_err_info_, __LINE__) = boost::leaf::on_error(DDS_E_ARG(__VA_ARGS__))

+ 12
- 0
src/dds/error/result.hpp View File

@@ -0,0 +1,12 @@
#pragma once

#include "./result_fwd.hpp"

#include <boost/leaf/error.hpp>
#include <boost/leaf/result.hpp>

namespace dds {

using boost::leaf::new_error;

} // namespace dds

+ 14
- 0
src/dds/error/result_fwd.hpp View File

@@ -0,0 +1,14 @@
#pragma once

namespace boost::leaf {

template <typename T>
class result;

} // namespace boost::leaf

namespace dds {

using boost::leaf::result;

} // namespace dds

+ 0
- 32
src/dds/package/id.cpp View File

@@ -1,32 +0,0 @@
#include <dds/package/id.hpp>

#include <dds/error/errors.hpp>

#include <fmt/core.h>

#include <tuple>

using namespace dds;

package_id package_id::parse(std::string_view s) {
auto at_pos = s.find('@');
if (at_pos == s.npos) {
throw_user_error<errc::invalid_pkg_id>("Invalid package ID '{}'", s);
}

auto name = s.substr(0, at_pos);
auto ver_str = s.substr(at_pos + 1);

return {std::string(name), semver::version::parse(ver_str)};
}

package_id::package_id(std::string_view n, semver::version v)
: name(n)
, version(std::move(v)) {
if (name.find('@') != name.npos) {
throw_user_error<errc::invalid_pkg_name>(
"Invalid package name '{}' (The '@' character is not allowed)");
}
}

std::string package_id::to_string() const noexcept { return name + "@" + version.to_string(); }

src/dds/repo/repo.cpp → src/dds/pkg/cache.cpp View File

@@ -1,14 +1,14 @@
#include "./repo.hpp"
#include "./cache.hpp"

#include <dds/catalog/catalog.hpp>
#include <dds/error/errors.hpp>
#include <dds/pkg/db.hpp>
#include <dds/sdist/dist.hpp>
#include <dds/solve/solve.hpp>
#include <dds/source/dist.hpp>
#include <dds/util/log.hpp>
#include <dds/util/paths.hpp>
#include <dds/util/ranges.hpp>
#include <dds/util/string.hpp>

#include <neo/ref.hpp>
#include <range/v3/action/sort.hpp>
#include <range/v3/action/unique.hpp>
#include <range/v3/range/conversion.hpp>
@@ -20,16 +20,16 @@ using namespace dds;

using namespace ranges;

void repository::_log_blocking(path_ref dirpath) noexcept {
dds_log(warn, "Another process has the repository directory locked [{}]", dirpath.string());
dds_log(warn, "Waiting for repository to be released...");
void pkg_cache::_log_blocking(path_ref dirpath) noexcept {
dds_log(warn, "Another process has the package cache directory locked [{}]", dirpath.string());
dds_log(warn, "Waiting for cache to be released...");
}

void repository::_init_repo_dir(path_ref dirpath) noexcept { fs::create_directories(dirpath); }
void pkg_cache::_init_cache_dir(path_ref dirpath) noexcept { fs::create_directories(dirpath); }

fs::path repository::default_local_path() noexcept { return dds_data_dir() / "repo"; }
fs::path pkg_cache::default_local_path() noexcept { return dds_data_dir() / "pkg"; }

repository repository::_open_for_directory(bool writeable, path_ref dirpath) {
pkg_cache pkg_cache::_open_for_directory(bool writeable, path_ref dirpath) {
auto try_read_sdist = [](path_ref p) -> std::optional<sdist> {
if (starts_with(p.filename().string(), ".")) {
return std::nullopt;
@@ -47,10 +47,11 @@ repository repository::_open_for_directory(bool writeable, path_ref dirpath) {

auto entries =
// Get the top-level `name-version` dirs
view_safe(fs::directory_iterator(dirpath)) //
// // Convert each dir into an `sdist` object
fs::directory_iterator(dirpath) //
| neo::lref //
// Convert each dir into an `sdist` object
| ranges::views::transform(try_read_sdist) //
// // Drop items that failed to load
// Drop items that failed to load
| ranges::views::filter([](auto&& opt) { return opt.has_value(); }) //
| ranges::views::transform([](auto&& opt) { return *opt; }) //
| to<sdist_set>();
@@ -58,20 +59,20 @@ repository repository::_open_for_directory(bool writeable, path_ref dirpath) {
return {writeable, dirpath, std::move(entries)};
}

void repository::add_sdist(const sdist& sd, if_exists ife_action) {
void pkg_cache::add_sdist(const sdist& sd, if_exists ife_action) {
neo_assertion_breadcrumbs("Importing sdist archive", sd.manifest.id.to_string());
if (!_write_enabled) {
dds_log(
critical,
"DDS attempted to write into a repository that wasn't opened with a write-lock. This "
"is a hard bug and should be reported. For the safety and integrity of the local "
"repository, we'll hard-exit immediately.");
dds_log(critical,
"DDS attempted to write into a cache that wasn't opened with a write-lock. This "
"is a hard bug and should be reported. For the safety and integrity of the local "
"cache, we'll hard-exit immediately.");
std::terminate();
}
auto sd_dest = _root / sd.manifest.pkg_id.to_string();
auto sd_dest = _root / sd.manifest.id.to_string();
if (fs::exists(sd_dest)) {
auto msg = fmt::
format("Package '{}' (Importing from [{}]) is already available in the local repo",
sd.manifest.pkg_id.to_string(),
format("Package '{}' (Importing from [{}]) is already available in the local cache",
sd.manifest.id.to_string(),
sd.path.string());
if (ife_action == if_exists::throw_exc) {
throw_user_error<errc::sdist_exists>(msg);
@@ -94,10 +95,10 @@ void repository::add_sdist(const sdist& sd, if_exists ife_action) {
}
fs::rename(tmp_copy, sd_dest);
_sdists.insert(sdist::from_directory(sd_dest));
dds_log(info, "Source distribution '{}' successfully exported", sd.manifest.pkg_id.to_string());
dds_log(info, "Source distribution '{}' successfully exported", sd.manifest.id.to_string());
}

const sdist* repository::find(const package_id& pkg) const noexcept {
const sdist* pkg_cache::find(const pkg_id& pkg) const noexcept {
auto found = _sdists.find(pkg);
if (found == _sdists.end()) {
return nullptr;
@@ -105,22 +106,22 @@ const sdist* repository::find(const package_id& pkg) const noexcept {
return &*found;
}

std::vector<package_id> repository::solve(const std::vector<dependency>& deps,
const catalog& ctlg) const {
std::vector<pkg_id> pkg_cache::solve(const std::vector<dependency>& deps,
const pkg_db& ctlg) const {
return dds::solve(
deps,
[&](std::string_view name) -> std::vector<package_id> {
[&](std::string_view name) -> std::vector<pkg_id> {
auto mine = ranges::views::all(_sdists) //
| ranges::views::filter(
[&](const sdist& sd) { return sd.manifest.pkg_id.name == name; })
| ranges::views::transform([](const sdist& sd) { return sd.manifest.pkg_id; });
[&](const sdist& sd) { return sd.manifest.id.name == name; })
| ranges::views::transform([](const sdist& sd) { return sd.manifest.id; });
auto avail = ctlg.by_name(name);
auto all = ranges::views::concat(mine, avail) | ranges::to_vector;
ranges::sort(all, std::less<>{});
ranges::unique(all, std::less<>{});
ranges::sort(all, std::less{});
ranges::unique(all, std::less{});
return all;
},
[&](const package_id& pkg_id) {
[&](const pkg_id& pkg_id) {
auto found = find(pkg_id);
if (found) {
return found->manifest.dependencies;

src/dds/repo/repo.hpp → src/dds/pkg/cache.hpp View File

@@ -1,7 +1,7 @@
#pragma once

#include <dds/catalog/catalog.hpp>
#include <dds/source/dist.hpp>
#include <dds/pkg/db.hpp>
#include <dds/sdist/dist.hpp>
#include <dds/util/flock.hpp>
#include <dds/util/fs.hpp>

@@ -15,7 +15,7 @@

namespace dds {

enum repo_flags {
enum pkg_cache_flags {
none = 0b00,
read = none,
create_if_absent = 0b01,
@@ -28,40 +28,40 @@ enum class if_exists {
ignore,
};

inline repo_flags operator|(repo_flags a, repo_flags b) {
return static_cast<repo_flags>(int(a) | int(b));
inline pkg_cache_flags operator|(pkg_cache_flags a, pkg_cache_flags b) {
return static_cast<pkg_cache_flags>(int(a) | int(b));
}

class repository {
class pkg_cache {
using sdist_set = std::set<sdist, sdist_compare_t>;

bool _write_enabled = false;
fs::path _root;
sdist_set _sdists;

repository(bool writeable, path_ref p, sdist_set sds)
pkg_cache(bool writeable, path_ref p, sdist_set sds)
: _write_enabled(writeable)
, _root(p)
, _sdists(std::move(sds)) {}

static void _log_blocking(path_ref dir) noexcept;
static void _init_repo_dir(path_ref dir) noexcept;
static repository _open_for_directory(bool writeable, path_ref);
static void _log_blocking(path_ref dir) noexcept;
static void _init_cache_dir(path_ref dir) noexcept;
static pkg_cache _open_for_directory(bool writeable, path_ref);

public:
template <typename Func>
static decltype(auto) with_repository(path_ref dirpath, repo_flags flags, Func&& fn) {
static decltype(auto) with_cache(path_ref dirpath, pkg_cache_flags flags, Func&& fn) {
if (!fs::exists(dirpath)) {
if (flags & repo_flags::create_if_absent) {
_init_repo_dir(dirpath);
if (flags & pkg_cache_flags::create_if_absent) {
_init_cache_dir(dirpath);
}
}

shared_file_mutex mut{dirpath / ".dds-repo-lock"};
shared_file_mutex mut{dirpath / ".dds-cache-lock"};
std::shared_lock shared_lk{mut, std::defer_lock};
std::unique_lock excl_lk{mut, std::defer_lock};

bool writeable = (flags & repo_flags::write_lock) != repo_flags::none;
bool writeable = (flags & pkg_cache_flags::write_lock) != pkg_cache_flags::none;

if (writeable) {
if (!excl_lk.try_lock()) {
@@ -75,15 +75,15 @@ public:
}
}

auto repo = _open_for_directory(writeable, dirpath);
return std::invoke(NEO_FWD(fn), std::move(repo));
auto cache = _open_for_directory(writeable, dirpath);
return std::invoke(NEO_FWD(fn), std::move(cache));
}

static fs::path default_local_path() noexcept;

void add_sdist(const sdist&, if_exists = if_exists::throw_exc);

const sdist* find(const package_id& pk) const noexcept;
const sdist* find(const pkg_id& pk) const noexcept;

auto iter_sdists() const noexcept {
class ret {
@@ -99,7 +99,7 @@ public:
return r;
}

std::vector<package_id> solve(const std::vector<dependency>& deps, const catalog&) const;
std::vector<pkg_id> solve(const std::vector<dependency>& deps, const pkg_db&) const;
};

} // namespace dds

+ 380
- 0
src/dds/pkg/db.cpp View File

@@ -0,0 +1,380 @@
#include "./db.hpp"

#include <dds/dym.hpp>
#include <dds/error/errors.hpp>
#include <dds/error/nonesuch.hpp>
#include <dds/solve/solve.hpp>
#include <dds/util/log.hpp>
#include <dds/util/paths.hpp>

#include <json5/parse_data.hpp>
#include <neo/assert.hpp>
#include <neo/concepts.hpp>
#include <neo/sqlite3/exec.hpp>
#include <neo/sqlite3/iter_tuples.hpp>
#include <neo/sqlite3/single.hpp>
#include <neo/sqlite3/transaction.hpp>
#include <nlohmann/json.hpp>
#include <range/v3/range/conversion.hpp>
#include <range/v3/view/join.hpp>
#include <range/v3/view/transform.hpp>

using namespace dds;

namespace nsql = neo::sqlite3;
using namespace neo::sqlite3::literals;

namespace {

void migrate_repodb_1(nsql::database& db) {
db.exec(R"(
CREATE TABLE dds_cat_pkgs (
pkg_id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
version TEXT NOT NULL,
git_url TEXT,
git_ref TEXT,
lm_name TEXT,
lm_namespace TEXT,
description TEXT NOT NULL,
UNIQUE(name, version),
CONSTRAINT has_source_info CHECK(
(
git_url NOT NULL
AND git_ref NOT NULL
)
= 1
),
CONSTRAINT valid_lm_info CHECK(
(
lm_name NOT NULL
AND lm_namespace NOT NULL
)
+
(
lm_name ISNULL
AND lm_namespace ISNULL
)
= 1
)
);

CREATE TABLE dds_cat_pkg_deps (
dep_id INTEGER PRIMARY KEY AUTOINCREMENT,
pkg_id INTEGER NOT NULL REFERENCES dds_cat_pkgs(pkg_id),
dep_name TEXT NOT NULL,
low TEXT NOT NULL,
high TEXT NOT NULL,
UNIQUE(pkg_id, dep_name)
);
)");
}

void migrate_repodb_2(nsql::database& db) {
db.exec(R"(
ALTER TABLE dds_cat_pkgs
ADD COLUMN repo_transform TEXT NOT NULL DEFAULT '[]'
)");
}

void migrate_repodb_3(nsql::database& db) {
db.exec(R"(
CREATE TABLE dds_pkg_remotes (
remote_id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
remote_url TEXT NOT NULL,
db_etag TEXT,
db_mtime TEXT
);

CREATE TABLE dds_pkgs (
pkg_id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
version TEXT NOT NULL,
description TEXT NOT NULL,
remote_url TEXT NOT NULL,
remote_id INTEGER
REFERENCES dds_pkg_remotes
ON DELETE CASCADE,
UNIQUE (name, version, remote_id)
);

INSERT INTO dds_pkgs(pkg_id,
name,
version,
description,
remote_url)
SELECT pkg_id,
name,
version,
description,
'git+' || git_url || (
CASE
WHEN lm_name ISNULL THEN ''
ELSE ('?lm=' || lm_namespace || '/' || lm_name)
END
) || '#' || git_ref
FROM dds_cat_pkgs;

CREATE TABLE dds_pkg_deps (
dep_id INTEGER PRIMARY KEY AUTOINCREMENT,
pkg_id INTEGER
NOT NULL
REFERENCES dds_pkgs(pkg_id)
ON DELETE CASCADE,
dep_name TEXT NOT NULL,
low TEXT NOT NULL,
high TEXT NOT NULL,
UNIQUE(pkg_id, dep_name)
);

INSERT INTO dds_pkg_deps SELECT * FROM dds_cat_pkg_deps;

DROP TABLE dds_cat_pkg_deps;
DROP TABLE dds_cat_pkgs;
)");
}

void do_store_pkg(neo::sqlite3::database& db,
neo::sqlite3::statement_cache& st_cache,
const pkg_listing& pkg) {
dds_log(debug, "Recording package {}@{}", pkg.ident.name, pkg.ident.version.to_string());
auto& store_pkg_st = st_cache(R"(
INSERT OR REPLACE INTO dds_pkgs
(name, version, remote_url, description)
VALUES
(?, ?, ?, ?)
)"_sql);
nsql::exec(store_pkg_st,
pkg.ident.name,
pkg.ident.version.to_string(),
pkg.remote_pkg.to_url_string(),
pkg.description);

auto db_pkg_id = db.last_insert_rowid();
auto& new_dep_st = st_cache(R"(
INSERT INTO dds_pkg_deps (
pkg_id,
dep_name,
low,
high
) VALUES (
?,
?,
?,
?
)
)"_sql);
for (const auto& dep : pkg.deps) {
new_dep_st.reset();
assert(dep.versions.num_intervals() == 1);
auto iv_1 = *dep.versions.iter_intervals().begin();
dds_log(trace, " Depends on: {}", dep.to_string());
nsql::exec(new_dep_st, db_pkg_id, dep.name, iv_1.low.to_string(), iv_1.high.to_string());
}
}

void ensure_migrated(nsql::database& db) {
db.exec(R"(
PRAGMA foreign_keys = 1;
CREATE TABLE IF NOT EXISTS dds_cat_meta AS
WITH init(meta) AS (VALUES ('{"version": 0}'))
SELECT * FROM init;
)");
nsql::transaction_guard tr{db};

auto meta_st = db.prepare("SELECT meta FROM dds_cat_meta");
auto [meta_json] = nsql::unpack_single<std::string>(meta_st);

auto meta = nlohmann::json::parse(meta_json);
if (!meta.is_object()) {
dds_log(critical, "Root of database dds_cat_meta cell should be a JSON object");
throw_external_error<errc::corrupted_catalog_db>();
}

auto version_ = meta["version"];
if (!version_.is_number_integer()) {
dds_log(critical, "'version' key in dds_cat_meta is not an integer");
throw_external_error<errc::corrupted_catalog_db>(
"The database metadata is invalid [bad dds_meta.version]");
}

constexpr int current_database_version = 3;

int version = version_;

if (version > current_database_version) {
dds_log(critical,
"Catalog version is {}, but we only support up to {}",
version,
current_database_version);
throw_external_error<errc::catalog_too_new>();
}

if (version < 1) {
dds_log(debug, "Applying pkg_db migration 1");
migrate_repodb_1(db);
}
if (version < 2) {
dds_log(debug, "Applying pkg_db migration 2");
migrate_repodb_2(db);
}
if (version < 3) {
dds_log(debug, "Applying pkg_db migration 3");
migrate_repodb_3(db);
}
meta["version"] = current_database_version;
exec(db.prepare("UPDATE dds_cat_meta SET meta=?"), meta.dump());
}

} // namespace

fs::path pkg_db::default_path() noexcept { return dds_data_dir() / "pkgs.db"; }

pkg_db pkg_db::open(const std::string& db_path) {
if (db_path != ":memory:") {
auto pardir = fs::weakly_canonical(db_path).parent_path();
fs::create_directories(pardir);
}
dds_log(debug, "Opening package database [{}]", db_path);
auto db = nsql::database::open(db_path);
try {
ensure_migrated(db);
} catch (const nsql::sqlite3_error& e) {
dds_log(critical,
"Failed to load the package database. It appears to be invalid/corrupted. The "
"exception message is: {}",
e.what());
throw_external_error<errc::corrupted_catalog_db>();
}
dds_log(trace, "Successfully opened database");
return pkg_db(std::move(db));
}

pkg_db::pkg_db(nsql::database db)
: _db(std::move(db)) {}

void pkg_db::store(const pkg_listing& pkg) {
nsql::transaction_guard tr{_db};
do_store_pkg(_db, _stmt_cache, pkg);
}

result<pkg_listing> pkg_db::get(const pkg_id& pk_id) const noexcept {
auto ver_str = pk_id.version.to_string();
dds_log(trace, "Lookup package {}@{}", pk_id.name, ver_str);
auto& st = _stmt_cache(R"(
SELECT
pkg_id,
name,
version,
remote_url,
description
FROM dds_pkgs
WHERE name = ?1 AND version = ?2
ORDER BY pkg_id DESC
)"_sql);
st.reset();
st.bindings() = std::forward_as_tuple(pk_id.name, ver_str);
auto ec = st.step(std::nothrow);
if (ec == nsql::errc::done) {
return new_error([&] {
auto all_ids = this->all();
auto id_strings
= ranges::views::transform(all_ids, [&](auto id) { return id.to_string(); });
return e_nonesuch{pk_id.to_string(), did_you_mean(pk_id.to_string(), id_strings)};
});
}
neo_assert_always(invariant,
ec == nsql::errc::row,
"Failed to pull a package from the database",
ec,
pk_id.to_string(),
nsql::error_category().message(int(ec)));

const auto& [pkg_id, name, version, remote_url, description]
= st.row().unpack<std::int64_t, std::string, std::string, std::string, std::string>();

ec = st.step(std::nothrow);
if (ec == nsql::errc::row) {
dds_log(warn,
"There is more than one entry for package {} in the database. One will be "
"chosen arbitrarily.",
pk_id.to_string());
}

neo_assert(invariant,
pk_id.name == name && pk_id.version == semver::version::parse(version),
"Package metadata does not match",
pk_id.to_string(),
name,
version);

auto deps = dependencies_of(pk_id);

auto info = pkg_listing{
.ident = pk_id,
.deps = std::move(deps),
.description = std::move(description),
.remote_pkg = any_remote_pkg::from_url(neo::url::parse(remote_url)),
};

return info;
}

auto pair_to_pkg_id = [](auto&& pair) {
const auto& [name, ver] = pair;
return pkg_id{name, semver::version::parse(ver)};
};

std::vector<pkg_id> pkg_db::all() const noexcept {
return nsql::exec_tuples<std::string, std::string>(
_stmt_cache("SELECT name, version FROM dds_pkgs"_sql))
| neo::lref //
| ranges::views::transform(pair_to_pkg_id) //
| ranges::to_vector;
}

std::vector<pkg_id> pkg_db::by_name(std::string_view sv) const noexcept {
return nsql::exec_tuples<std::string, std::string>( //
_stmt_cache(
R"(
SELECT name, version
FROM dds_pkgs
WHERE name = ?
ORDER BY pkg_id DESC
)"_sql),
sv) //
| neo::lref //
| ranges::views::transform(pair_to_pkg_id) //
| ranges::to_vector;
}

std::vector<dependency> pkg_db::dependencies_of(const pkg_id& pkg) const noexcept {
dds_log(trace, "Lookup dependencies of {}@{}", pkg.name, pkg.version.to_string());
return nsql::exec_tuples<std::string,
std::string,
std::string>( //
_stmt_cache(
R"(
WITH this_pkg_id AS (
SELECT pkg_id
FROM dds_pkgs
WHERE name = ? AND version = ?
)
SELECT dep_name, low, high
FROM dds_pkg_deps
WHERE pkg_id IN this_pkg_id
ORDER BY dep_name
)"_sql),
pkg.name,
pkg.version.to_string()) //
| neo::lref //
| ranges::views::transform([](auto&& pair) {
auto& [name, low, high] = pair;
auto dep
= dependency{name, {semver::version::parse(low), semver::version::parse(high)}};
dds_log(trace, " Depends: {}", dep.to_string());
return dep;
}) //
| ranges::to_vector;
}

+ 47
- 0
src/dds/pkg/db.hpp View File

@@ -0,0 +1,47 @@
#pragma once

#include "./listing.hpp"

#include <dds/error/result.hpp>
#include <dds/util/fs.hpp>

#include <neo/sqlite3/database.hpp>
#include <neo/sqlite3/statement.hpp>
#include <neo/sqlite3/statement_cache.hpp>

#include <string>
#include <vector>

namespace dds {

struct dependency;
struct pkg_id;

class pkg_db {
neo::sqlite3::database _db;
mutable neo::sqlite3::statement_cache _stmt_cache{_db};

explicit pkg_db(neo::sqlite3::database db);
pkg_db(const pkg_db&) = delete;

public:
pkg_db(pkg_db&&) = default;
pkg_db& operator=(pkg_db&&) = default;

static pkg_db open(const std::string& db_path);
static pkg_db open(path_ref db_path) { return open(db_path.string()); }

static fs::path default_path() noexcept;

void store(const pkg_listing& info);
result<pkg_listing> get(const pkg_id& id) const noexcept;

std::vector<pkg_id> all() const noexcept;
std::vector<pkg_id> by_name(std::string_view sv) const noexcept;
std::vector<dependency> dependencies_of(const pkg_id& pkg) const noexcept;

auto& database() noexcept { return _db; }
auto& database() const noexcept { return _db; }
};

} // namespace dds

+ 75
- 0
src/dds/pkg/db.test.cpp View File

@@ -0,0 +1,75 @@
#include <dds/pkg/db.hpp>

#include <catch2/catch.hpp>

using namespace std::literals;

TEST_CASE("Create a simple database") {
// Just create and run migrations on an in-memory database
auto repo = dds::pkg_db::open(":memory:"s);
}

TEST_CASE("Open a database in a non-ascii path") {
::setlocale(LC_ALL, ".utf8");
auto THIS_DIR = dds::fs::canonical(__FILE__).parent_path();
auto BUILD_DIR
= (THIS_DIR.parent_path().parent_path().parent_path() / "_build").lexically_normal();
auto subdir = BUILD_DIR / "Ю́рий Алексе́евич Гага́рин";
dds::fs::remove_all(subdir);
dds::pkg_db::open(subdir / "test.db");
dds::fs::remove_all(subdir);
}

struct pkg_db_test_case {
dds::pkg_db db = dds::pkg_db::open(":memory:"s);
};

TEST_CASE_METHOD(pkg_db_test_case, "Store a simple package") {
db.store(dds::pkg_listing{
dds::pkg_id{"foo", semver::version::parse("1.2.3")},
{},
"example",
dds::any_remote_pkg::from_url(neo::url::parse("git+http://example.com#master")),
});

auto pkgs = db.by_name("foo");
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].name == "foo");
CHECK(pkgs[0].version == semver::version::parse("1.2.3"));
auto info = db.get(pkgs[0]);
REQUIRE(info);
CHECK(info->ident == pkgs[0]);
CHECK(info->deps.empty());
CHECK(info->remote_pkg.to_url_string() == "git+http://example.com#master");

// Update the entry with a new git remote ref
CHECK_NOTHROW(db.store(dds::pkg_listing{
dds::pkg_id{"foo", semver::version::parse("1.2.3")},
{},
"example",
dds::any_remote_pkg::from_url(neo::url::parse("git+http://example.com#develop")),
}));
// The previous pkg_id is still a valid lookup key
info = db.get(pkgs[0]);
REQUIRE(info);
CHECK(info->remote_pkg.to_url_string() == "git+http://example.com#develop");
}

TEST_CASE_METHOD(pkg_db_test_case, "Package requirements") {
db.store(dds::pkg_listing{
dds::pkg_id{"foo", semver::version::parse("1.2.3")},
{
{"bar", {semver::version::parse("1.2.3"), semver::version::parse("1.4.0")}},
{"baz", {semver::version::parse("5.3.0"), semver::version::parse("6.0.0")}},
},
"example",
dds::any_remote_pkg::from_url(neo::url::parse("git+http://example.com#master")),
});
auto pkgs = db.by_name("foo");
REQUIRE(pkgs.size() == 1);
CHECK(pkgs[0].name == "foo");
auto deps = db.dependencies_of(pkgs[0]);
CHECK(deps.size() == 2);
CHECK(deps[0].name == "bar");
CHECK(deps[1].name == "baz");
}

+ 33
- 0
src/dds/pkg/get/base.cpp View File

@@ -0,0 +1,33 @@
#include "./base.hpp"

#include <dds/pkg/id.hpp>
#include <dds/util/log.hpp>

#include <nlohmann/json.hpp>

using namespace dds;

// void remote_pkg_base::generate_auto_lib_files(const pkg_id& pid, path_ref root) const {
// if (auto_lib.has_value()) {
// dds_log(info, "Generating library data automatically");

// auto pkg_strm = open(root / "package.json5", std::ios::binary | std::ios::out);
// auto man_json = nlohmann::json::object();
// man_json["name"] = pid.name;
// man_json["version"] = pid.version.to_string();
// man_json["namespace"] = auto_lib->namespace_;
// pkg_strm << nlohmann::to_string(man_json);

// auto lib_strm = open(root / "library.json5", std::ios::binary | std::ios::out);
// auto lib_json = nlohmann::json::object();
// lib_json["name"] = auto_lib->name;
// lib_strm << nlohmann::to_string(lib_json);
// }
// }

void remote_pkg_base::get_sdist(path_ref dest) const { get_raw_directory(dest); }
void remote_pkg_base::get_raw_directory(path_ref dest) const { do_get_raw(dest); }

neo::url remote_pkg_base::to_url() const { return do_to_url(); }

std::string remote_pkg_base::to_url_string() const { return to_url().to_string(); }

+ 26
- 0
src/dds/pkg/get/base.hpp View File

@@ -0,0 +1,26 @@
#pragma once

#include <libman/package.hpp>
#include <neo/concepts.hpp>
#include <neo/url.hpp>

#include <optional>
#include <vector>

namespace dds {

struct pkg_id;

class remote_pkg_base {
virtual void do_get_raw(path_ref dest) const = 0;
virtual neo::url do_to_url() const = 0;

public:
void get_sdist(path_ref dest) const;
void get_raw_directory(path_ref dest) const;

neo::url to_url() const;
std::string to_url_string() const;
};

} // namespace dds

+ 41
- 0
src/dds/pkg/get/dds_http.cpp View File

@@ -0,0 +1,41 @@
#include "./dds_http.hpp"

#include "./http.hpp"

#include <fmt/core.h>

using namespace dds;

neo::url dds_http_remote_pkg::do_to_url() const {
auto ret = repo_url;
ret.scheme = "dds+" + ret.scheme;
ret.path = fmt::format("{}/{}", ret.path, pkg_id.to_string());
return ret;
}

dds_http_remote_pkg dds_http_remote_pkg::from_url(const neo::url& url) {
auto repo_url = url;
if (repo_url.scheme.starts_with("dds+")) {
repo_url.scheme = repo_url.scheme.substr(4);
} else if (repo_url.scheme.ends_with("+dds")) {
repo_url.scheme = repo_url.scheme.substr(0, repo_url.scheme.size() - 4);
} else {
// Nothing to trim
}

fs::path full_path = repo_url.path;
repo_url.path = full_path.parent_path().generic_string();
auto pkg_id = dds::pkg_id::parse(full_path.filename().string());

return {repo_url, pkg_id};
}

void dds_http_remote_pkg::do_get_raw(path_ref dest) const {
auto http_url = repo_url;
fs::path path = fs::path(repo_url.path) / "pkg" / pkg_id.name / pkg_id.version.to_string()
/ "sdist.tar.gz";
http_url.path = path.lexically_normal().generic_string();
http_remote_pkg http;
http.url = http_url;
http.get_raw_directory(dest);
}

+ 31
- 0
src/dds/pkg/get/dds_http.hpp View File

@@ -0,0 +1,31 @@
#pragma once

#include "./base.hpp"

#include <dds/pkg/id.hpp>

#include <neo/url.hpp>

#include <string>
#include <string_view>

namespace dds {

class dds_http_remote_pkg : public remote_pkg_base {
void do_get_raw(path_ref) const override;
neo::url do_to_url() const override;

public:
neo::url repo_url;
dds::pkg_id pkg_id;

dds_http_remote_pkg() = default;

dds_http_remote_pkg(neo::url u, dds::pkg_id pid)
: repo_url(u)
, pkg_id(pid) {}

static dds_http_remote_pkg from_url(const neo::url& url);
};

} // namespace dds

+ 12
- 0
src/dds/pkg/get/dds_http.test.cpp View File

@@ -0,0 +1,12 @@
#include "./dds_http.hpp"

#include <catch2/catch.hpp>

TEST_CASE("Parse a URL") {
auto pkg = dds::dds_http_remote_pkg::from_url(
neo::url::parse("dds+http://foo.bar/repo-dir/egg@1.2.3"));
CHECK(pkg.repo_url.to_string() == "http://foo.bar/repo-dir");
CHECK(pkg.pkg_id.name == "egg");
CHECK(pkg.pkg_id.version.to_string() == "1.2.3");
CHECK(pkg.to_url_string() == "dds+http://foo.bar/repo-dir/egg@1.2.3");
}

+ 71
- 0
src/dds/pkg/get/get.cpp View File

@@ -0,0 +1,71 @@
#include "./get.hpp"

#include <dds/error/errors.hpp>
#include <dds/pkg/cache.hpp>
#include <dds/pkg/db.hpp>
#include <dds/util/log.hpp>
#include <dds/util/parallel.hpp>

#include <neo/assert.hpp>
#include <range/v3/view/filter.hpp>
#include <range/v3/view/transform.hpp>

using namespace dds;

namespace {

temporary_sdist do_pull_sdist(const any_remote_pkg& rpkg) {
auto tmpdir = dds::temporary_dir::create();

rpkg.get_sdist(tmpdir.path());

auto sd_tmp_dir = dds::temporary_dir::create();
sdist_params params{
.project_dir = tmpdir.path(),
.dest_path = sd_tmp_dir.path(),
.force = true,
};
auto sd = create_sdist(params);
return {sd_tmp_dir, sd};
}

} // namespace

temporary_sdist dds::get_package_sdist(const pkg_listing& pkg) {
auto tsd = do_pull_sdist(pkg.remote_pkg);
if (!(tsd.sdist.manifest.id == pkg.ident)) {
throw_external_error<errc::sdist_ident_mismatch>(
"The package name@version in the generated source distribution does not match the name "
"listed in the remote listing file (expected '{}', but got '{}')",
pkg.ident.to_string(),
tsd.sdist.manifest.id.to_string());
}
return tsd;
}

void dds::get_all(const std::vector<pkg_id>& pkgs, pkg_cache& repo, const pkg_db& cat) {
std::mutex repo_mut;

auto absent_pkg_infos
= pkgs //
| ranges::views::filter([&](auto pk) {
std::scoped_lock lk{repo_mut};
return !repo.find(pk);
})
| ranges::views::transform([&](auto id) {
auto info = cat.get(id);
neo_assert(invariant, !!info, "No database entry for package id?", id.to_string());
return *info;
});

auto okay = parallel_run(absent_pkg_infos, 8, [&](pkg_listing inf) {
dds_log(info, "Download package: {}", inf.ident.to_string());
auto tsd = get_package_sdist(inf);
std::scoped_lock lk{repo_mut};
repo.add_sdist(tsd.sdist, if_exists::throw_exc);
});

if (!okay) {
throw_external_error<errc::dependency_resolve_failure>("Downloading of packages failed.");
}
}

+ 16
- 0
src/dds/pkg/get/get.hpp View File

@@ -0,0 +1,16 @@
#pragma once

#include <dds/sdist/dist.hpp>
#include <dds/temp.hpp>

namespace dds {

class pkg_cache;
class pkg_db;
struct pkg_listing;

temporary_sdist get_package_sdist(const pkg_listing&);

void get_all(const std::vector<pkg_id>& pkgs, dds::pkg_cache& repo, const pkg_db& cat);

} // namespace dds

+ 59
- 0
src/dds/pkg/get/git.cpp View File

@@ -0,0 +1,59 @@
#include "./git.hpp"

#include <dds/error/errors.hpp>
#include <dds/proc.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>

#include <neo/url.hpp>
#include <neo/url/query.hpp>

using namespace dds;
using namespace std::literals;

git_remote_pkg git_remote_pkg::from_url(const neo::url& url) {
if (!url.fragment) {
BOOST_LEAF_THROW_EXCEPTION(
user_error<errc::invalid_remote_url>(
"Git URL requires a fragment specified the Git ref to clone"),
DDS_E_ARG(e_url_string{url.to_string()}));
}
git_remote_pkg ret;
ret.url = url;
if (url.scheme.starts_with("git+")) {
ret.url.scheme = url.scheme.substr(4);
} else if (url.scheme.ends_with("+git")) {
ret.url.scheme = url.scheme.substr(0, url.scheme.size() - 4);
} else {
// Leave the URL as-is
}
ret.ref = *url.fragment;
ret.url.fragment.reset();
return ret;
}

neo::url git_remote_pkg::do_to_url() const {
neo::url ret = url;
ret.fragment = ref;
if (ret.scheme != "git") {
ret.scheme = "git+" + ret.scheme;
}
return ret;
}

void git_remote_pkg::do_get_raw(path_ref dest) const {
fs::remove(dest);
dds_log(info, "Clone Git repository [{}] (at {}) to [{}]", url.to_string(), ref, dest.string());
auto command
= {"git"s, "clone"s, "--depth=1"s, "--branch"s, ref, url.to_string(), dest.string()};
auto git_res = run_proc(command);
if (!git_res.okay()) {
BOOST_LEAF_THROW_EXCEPTION(
make_external_error<errc::git_clone_failure>(
"Git clone operation failed [Git command: {}] [Exitted {}]:\n{}",
quote_command(command),
git_res.retc,
git_res.output),
url);
}
}

+ 22
- 0
src/dds/pkg/get/git.hpp View File

@@ -0,0 +1,22 @@
#pragma once

#include "./base.hpp"

#include <neo/url.hpp>

#include <string>

namespace dds {

class git_remote_pkg : public remote_pkg_base {
void do_get_raw(path_ref) const override;
neo::url do_to_url() const override;

public:
neo::url url;
std::string ref;

static git_remote_pkg from_url(const neo::url&);
};

} // namespace dds

+ 9
- 0
src/dds/pkg/get/git.test.cpp View File

@@ -0,0 +1,9 @@
#include "./git.hpp"

#include <catch2/catch.hpp>

TEST_CASE("Round-trip a URL") {
auto git = dds::git_remote_pkg::from_url(
neo::url::parse("http://github.com/vector-of-bool/neo-fun.git#0.4.0"));
CHECK(git.to_url_string() == "git+http://github.com/vector-of-bool/neo-fun.git#0.4.0");
}

+ 42
- 0
src/dds/pkg/get/github.cpp View File

@@ -0,0 +1,42 @@
#include "./github.hpp"

#include "./http.hpp"

#include <dds/error/errors.hpp>
#include <dds/util/result.hpp>

#include <fmt/format.h>
#include <range/v3/iterator/operations.hpp>

using namespace dds;

neo::url github_remote_pkg::do_to_url() const {
neo::url ret;
ret.scheme = "github";
ret.path = fmt::format("{}/{}/{}", owner, reponame, ref);
return ret;
}

void github_remote_pkg::do_get_raw(path_ref dest) const {
http_remote_pkg http;
auto new_url = fmt::format("https://github.com/{}/{}/archive/{}.tar.gz", owner, reponame, ref);
http.url = neo::url::parse(new_url);
http.strip_n_components = 1;
http.get_raw_directory(dest);
}

github_remote_pkg github_remote_pkg::from_url(const neo::url& url) {
fs::path path = url.path;
if (ranges::distance(path) != 3) {
BOOST_LEAF_THROW_EXCEPTION(make_user_error<errc::invalid_remote_url>(
"'github:' URLs should have a path with three segments"),
url);
}
github_remote_pkg ret;
// Split the three path elements as {owner}/{reponame}/{git-ref}
auto elem_iter = path.begin();
ret.owner = (*elem_iter++).generic_string();
ret.reponame = (*elem_iter++).generic_string();
ret.ref = (*elem_iter).generic_string();
return ret;
}

+ 24
- 0
src/dds/pkg/get/github.hpp View File

@@ -0,0 +1,24 @@
#pragma once

#include "./base.hpp"

#include <neo/url.hpp>

#include <string>
#include <string_view>

namespace dds {

class github_remote_pkg : public remote_pkg_base {
void do_get_raw(path_ref) const override;
neo::url do_to_url() const override;

public:
std::string owner;
std::string reponame;
std::string ref;

static github_remote_pkg from_url(const neo::url&);
};

} // namespace dds

+ 11
- 0
src/dds/pkg/get/github.test.cpp View File

@@ -0,0 +1,11 @@
#include "./github.hpp"

#include <catch2/catch.hpp>

TEST_CASE("Parse a github: URL") {
auto gh_pkg
= dds::github_remote_pkg::from_url(neo::url::parse("github:vector-of-bool/neo-fun/0.6.0"));
CHECK(gh_pkg.owner == "vector-of-bool");
CHECK(gh_pkg.reponame == "neo-fun");
CHECK(gh_pkg.ref == "0.6.0");
}

+ 123
- 0
src/dds/pkg/get/http.cpp View File

@@ -0,0 +1,123 @@
#include "./http.hpp"

#include <dds/error/errors.hpp>
#include <dds/temp.hpp>
#include <dds/util/http/pool.hpp>
#include <dds/util/log.hpp>
#include <dds/util/result.hpp>

#include <neo/io/stream/buffers.hpp>
#include <neo/io/stream/file.hpp>
#include <neo/tar/util.hpp>
#include <neo/url.hpp>
#include <neo/url/query.hpp>

using namespace dds;

void http_remote_pkg::do_get_raw(path_ref dest) const {
dds_log(trace, "Downloading remote package via HTTP from [{}]", url.to_string());

if (url.scheme != "http" && url.scheme != "https") {
dds_log(error, "Unsupported URL scheme '{}' (in [{}])", url.scheme, url.to_string());
BOOST_LEAF_THROW_EXCEPTION(user_error<errc::invalid_remote_url>(
"The given URL download is not supported. (Only 'http' and "
"'https' URLs are supported)"),
DDS_E_ARG(e_url_string{url.to_string()}));
}

neo_assert(invariant,
!!url.host,
"The given URL did not have a host part. This shouldn't be possible... Please file "
"a bug report.",
url.to_string());

// Create a temporary directory in which to download the archive
auto tdir = dds::temporary_dir::create();
// For ease of debugging, use the filename from the URL, if possible
auto fname = fs::path(url.path).filename();
if (fname.empty()) {
fname = "dds-download.tmp";
}
auto dl_path = tdir.path() / fname;
fs::create_directories(tdir.path());

// Download the file!
{
auto& pool = http_pool::thread_local_pool();
auto [client, resp] = pool.request(url);
auto dl_file = neo::file_stream::open(dl_path, neo::open_mode::write);
client.recv_body_into(resp, neo::stream_io_buffers{dl_file});
}

fs::create_directories(fs::absolute(dest));
dds_log(debug, "Expanding downloaded package archive into [{}]", dest.string());
std::ifstream infile{dl_path, std::ios::binary};
try {
neo::expand_directory_targz(
neo::expand_options{
.destination_directory = dest,
.input_name = dl_path.string(),
.strip_components = this->strip_n_components,
},
infile);
} catch (const std::runtime_error& err) {
throw_external_error<errc::invalid_remote_url>(
"The file downloaded from [{}] failed to extract (Inner error: {})",
url.to_string(),
err.what());
}
}

http_remote_pkg http_remote_pkg::from_url(const neo::url& url) {
neo_assert(expects,
url.scheme == neo::oper::any_of("http", "https"),
"Invalid URL for an HTTP remote",
url.to_string());

neo::url ret_url = url;
if (url.fragment) {
dds_log(warn,
"Fragment '{}' in URL [{}] will have no effect",
*url.fragment,
url.to_string());
ret_url.fragment.reset();
}

ret_url.query = {};

unsigned n_strpcmp = 0;

if (url.query) {
std::string query_acc;

neo::basic_query_string_view qsv{*url.query};
for (auto qstr : qsv) {
if (qstr.key_raw() == "__dds_strpcmp") {
n_strpcmp = static_cast<unsigned>(std::stoul(qstr.value_decoded()));
} else {
if (!query_acc.empty()) {
query_acc.push_back(';');
}
query_acc.append(qstr.string());
}
}
if (!query_acc.empty()) {
ret_url.query = query_acc;
}
}

return {ret_url, n_strpcmp};
}

neo::url http_remote_pkg::do_to_url() const {
auto ret_url = url;
if (strip_n_components != 0) {
auto strpcmp_param = fmt::format("__dds_strpcmp={}", strip_n_components);
if (ret_url.query) {
*ret_url.query += ";" + strpcmp_param;
} else {
ret_url.query = strpcmp_param;
}
}
return ret_url;
}

+ 29
- 0
src/dds/pkg/get/http.hpp View File

@@ -0,0 +1,29 @@
#pragma once

#include "./base.hpp"

#include <neo/url.hpp>

#include <string>
#include <string_view>

namespace dds {

class http_remote_pkg : public remote_pkg_base {
void do_get_raw(path_ref) const override;
neo::url do_to_url() const override;

public:
neo::url url;
unsigned strip_n_components = 0;

http_remote_pkg() = default;

http_remote_pkg(neo::url u, unsigned strpcmp)
: url(u)
, strip_n_components(strpcmp) {}

static http_remote_pkg from_url(const neo::url& url);
};

} // namespace dds

+ 0
- 0
src/dds/pkg/get/http.test.cpp View File


Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save