.pytest_cache/ | .pytest_cache/ | ||||
.vagrant/ | .vagrant/ | ||||
external/OpenSSL | |||||
## Generate by CI scripts: | |||||
# A copy of OpenSSL for Windows: | |||||
external/OpenSSL | |||||
.docker-ccache/ | |||||
*.egg-info | |||||
*.stamp |
.SILENT: | .SILENT: | ||||
.PHONY: \ | .PHONY: \ | ||||
docs docs-server docs-watch docs-sync-server nix-ci linux-ci macos-ci \ | |||||
vagrant-freebsd-ci site alpine-static-ci | |||||
docs docs-server docs-watch docs-sync-server linux-ci macos-ci \ | |||||
vagrant-freebsd-ci site alpine-static-ci _alpine-static-ci poetry-setup \ | |||||
full-ci dev-build release-build | |||||
_invalid: | |||||
echo "Specify a target name to execute" | |||||
exit 1 | |||||
clean: | clean: | ||||
rm -f -r -- $(shell find -name __pycache__ -type d) | |||||
rm -f -r -- _build/ _prebuilt/ | |||||
rm -f -vr -- $(shell find -name __pycache__ -type d) | |||||
rm -f -vr -- _build/ _prebuilt/ | |||||
rm -f -v -- $(shell find -name "*.stamp" -type f) | |||||
docs: | docs: | ||||
sphinx-build -b html \ | sphinx-build -b html \ | ||||
--reload-delay 300 \ | --reload-delay 300 \ | ||||
--watch **/*.html | --watch **/*.html | ||||
macos-ci: | |||||
python3 -u tools/ci.py \ | |||||
-B download \ | |||||
-T tools/gcc-9-rel-macos.jsonc | |||||
.poetry.stamp: poetry.lock | |||||
poetry install --no-dev | |||||
touch .poetry.stamp | |||||
poetry-setup: .poetry.stamp | |||||
full-ci: poetry-setup | |||||
poetry run dds-ci --clean | |||||
dev-build: poetry-setup | |||||
poetry run dds-ci --rapid | |||||
release-build: poetry-setup | |||||
poetry run dds-ci --no-test | |||||
macos-ci: full-ci | |||||
mv _build/dds _build/dds-macos-x64 | mv _build/dds _build/dds-macos-x64 | ||||
linux-ci: | |||||
python3 -u tools/ci.py \ | |||||
-B download \ | |||||
-T tools/gcc-9-static-rel.jsonc | |||||
linux-ci: full-ci | |||||
mv _build/dds _build/dds-linux-x64 | mv _build/dds _build/dds-linux-x64 | ||||
nix-ci: | |||||
python3 -u tools/ci.py \ | |||||
-B download \ | |||||
-T tools/gcc-9-rel.jsonc | |||||
_alpine-static-ci: | |||||
poetry install --no-dev | |||||
# Alpine Linux does not ship with ASan nor UBSan, so we can't use them in | |||||
# our test-build. Just use the same build for both. CCache will also speed this up. | |||||
poetry run dds-ci \ | |||||
--bootstrap-with=lazy \ | |||||
--test-toolchain=tools/gcc-9-static-rel.jsonc \ | |||||
--main-toolchain=tools/gcc-9-static-rel.jsonc | |||||
mv _build/dds _build/dds-linux-x64 | |||||
alpine-static-ci: | alpine-static-ci: | ||||
docker build -t dds-builder -f tools/Dockerfile.alpine tools/ | |||||
docker build \ | |||||
--build-arg DDS_USER_UID=$(shell id -u) \ | |||||
-t dds-builder \ | |||||
-f tools/Dockerfile.alpine \ | |||||
tools/ | |||||
docker run \ | docker run \ | ||||
-t --rm \ | -t --rm \ | ||||
-u $(shell id -u) \ | -u $(shell id -u) \ | ||||
-v $(PWD):/host -w /host \ | -v $(PWD):/host -w /host \ | ||||
--privileged \ | |||||
-e CCACHE_DIR=/host/.docker-ccache \ | |||||
dds-builder \ | dds-builder \ | ||||
make linux-ci | |||||
make _alpine-static-ci | |||||
vagrant-freebsd-ci: | vagrant-freebsd-ci: | ||||
vagrant up freebsd11 | vagrant up freebsd11 | ||||
vagrant rsync | vagrant rsync | ||||
vagrant ssh freebsd11 -c '\ | vagrant ssh freebsd11 -c '\ | ||||
cd /vagrant && \ | cd /vagrant && \ | ||||
python3.7 tools/ci.py \ | |||||
-B download \ | |||||
-T tools/freebsd-gcc-10.jsonc \ | |||||
make full-ci \ | |||||
' | ' | ||||
mkdir -p _build/ | mkdir -p _build/ | ||||
vagrant scp freebsd11:/vagrant/_build/dds _build/dds-freebsd-x64 | vagrant scp freebsd11:/vagrant/_build/dds _build/dds-freebsd-x64 |
steps: | steps: | ||||
- pwsh: tools\get-win-openssl.ps1 | - pwsh: tools\get-win-openssl.ps1 | ||||
displayName: Get OpenSSL for Windows | displayName: Get OpenSSL for Windows | ||||
- script: python -m pip install pytest pytest-xdist | |||||
pytest-asyncio semver typing-extensions | |||||
- script: python -m pip install poetry && poetry install --no-dev | |||||
displayName: Install Python deps | displayName: Install Python deps | ||||
- script: | | - script: | | ||||
echo Loading VS environment | echo Loading VS environment | ||||
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\vsdevcmd" -arch=x64 || exit 1 | call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\Common7\Tools\vsdevcmd" -arch=x64 || exit 1 | ||||
echo Executing Build and Tests | echo Executing Build and Tests | ||||
reg add HKLM\SYSTEM\CurrentControlSet\Control\FileSystem /v LongPathsEnabled /t REG_DWORD /d 1 /f || exit 1 | reg add HKLM\SYSTEM\CurrentControlSet\Control\FileSystem /v LongPathsEnabled /t REG_DWORD /d 1 /f || exit 1 | ||||
python -u tools/ci.py -B download -T tools\msvc.jsonc || exit 1 | |||||
poetry run dds-ci || exit 1 | |||||
move _build\dds.exe _build\dds-win-x64.exe || exit 1 | move _build\dds.exe _build\dds-win-x64.exe || exit 1 | ||||
displayName: Build and Test | displayName: Build and Test | ||||
- publish: _build\dds-win-x64.exe | - publish: _build\dds-win-x64.exe | ||||
displayName: Get GCC 9 | displayName: Get GCC 9 | ||||
- script: brew install openssl@1.1 | - script: brew install openssl@1.1 | ||||
displayName: Install OpenSSL | displayName: Install OpenSSL | ||||
- script: python3 -m pip install pytest pytest-xdist pytest-asyncio semver typing-extensions | |||||
- script: python3 -m pip install poetry | |||||
displayName: Get Python Dependencies | displayName: Get Python Dependencies | ||||
- script: make macos-ci | - script: make macos-ci | ||||
displayName: Build and Test | displayName: Build and Test |
[[package]] | |||||
category = "main" | |||||
description = "apipkg: namespace control and lazy-import mechanism" | |||||
name = "apipkg" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | |||||
version = "1.5" | |||||
[[package]] | |||||
category = "dev" | |||||
description = "An abstract syntax tree for Python with inference support." | |||||
name = "astroid" | |||||
optional = false | |||||
python-versions = ">=3.5" | |||||
version = "2.4.2" | |||||
[package.dependencies] | |||||
lazy-object-proxy = ">=1.4.0,<1.5.0" | |||||
six = ">=1.12,<2.0" | |||||
wrapt = ">=1.11,<2.0" | |||||
[package.dependencies.typed-ast] | |||||
python = "<3.8" | |||||
version = ">=1.4.0,<1.5" | |||||
[[package]] | |||||
category = "main" | |||||
description = "Atomic file writes." | |||||
marker = "sys_platform == \"win32\"" | |||||
name = "atomicwrites" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | |||||
version = "1.4.0" | |||||
[[package]] | |||||
category = "main" | |||||
description = "Classes Without Boilerplate" | |||||
name = "attrs" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | |||||
version = "20.3.0" | |||||
[package.extras] | |||||
dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] | |||||
docs = ["furo", "sphinx", "zope.interface"] | |||||
tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] | |||||
tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] | |||||
[[package]] | |||||
category = "main" | |||||
description = "Cross-platform colored terminal text." | |||||
marker = "sys_platform == \"win32\"" | |||||
name = "colorama" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" | |||||
version = "0.4.4" | |||||
[[package]] | |||||
category = "main" | |||||
description = "execnet: rapid multi-Python deployment" | |||||
name = "execnet" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | |||||
version = "1.7.1" | |||||
[package.dependencies] | |||||
apipkg = ">=1.4" | |||||
[package.extras] | |||||
testing = ["pre-commit"] | |||||
[[package]] | |||||
category = "main" | |||||
description = "Read metadata from Python packages" | |||||
marker = "python_version < \"3.8\"" | |||||
name = "importlib-metadata" | |||||
optional = false | |||||
python-versions = ">=3.6" | |||||
version = "3.1.1" | |||||
[package.dependencies] | |||||
zipp = ">=0.5" | |||||
[package.extras] | |||||
docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] | |||||
testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] | |||||
[[package]] | |||||
category = "main" | |||||
description = "iniconfig: brain-dead simple config-ini parsing" | |||||
name = "iniconfig" | |||||
optional = false | |||||
python-versions = "*" | |||||
version = "1.1.1" | |||||
[[package]] | |||||
category = "dev" | |||||
description = "A Python utility / library to sort Python imports." | |||||
name = "isort" | |||||
optional = false | |||||
python-versions = ">=3.6,<4.0" | |||||
version = "5.6.4" | |||||
[package.extras] | |||||
colors = ["colorama (>=0.4.3,<0.5.0)"] | |||||
pipfile_deprecated_finder = ["pipreqs", "requirementslib"] | |||||
requirements_deprecated_finder = ["pipreqs", "pip-api"] | |||||
[[package]] | |||||
category = "main" | |||||
description = "A Python implementation of the JSON5 data format." | |||||
name = "json5" | |||||
optional = false | |||||
python-versions = "*" | |||||
version = "0.9.5" | |||||
[package.extras] | |||||
dev = ["hypothesis"] | |||||
[[package]] | |||||
category = "dev" | |||||
description = "A fast and thorough lazy object proxy." | |||||
name = "lazy-object-proxy" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | |||||
version = "1.4.3" | |||||
[[package]] | |||||
category = "dev" | |||||
description = "McCabe checker, plugin for flake8" | |||||
name = "mccabe" | |||||
optional = false | |||||
python-versions = "*" | |||||
version = "0.6.1" | |||||
[[package]] | |||||
category = "dev" | |||||
description = "Optional static typing for Python" | |||||
name = "mypy" | |||||
optional = false | |||||
python-versions = ">=3.5" | |||||
version = "0.790" | |||||
[package.dependencies] | |||||
mypy-extensions = ">=0.4.3,<0.5.0" | |||||
typed-ast = ">=1.4.0,<1.5.0" | |||||
typing-extensions = ">=3.7.4" | |||||
[package.extras] | |||||
dmypy = ["psutil (>=4.0)"] | |||||
[[package]] | |||||
category = "dev" | |||||
description = "Experimental type system extensions for programs checked with the mypy typechecker." | |||||
name = "mypy-extensions" | |||||
optional = false | |||||
python-versions = "*" | |||||
version = "0.4.3" | |||||
[[package]] | |||||
category = "main" | |||||
description = "Core utilities for Python packages" | |||||
name = "packaging" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | |||||
version = "20.7" | |||||
[package.dependencies] | |||||
pyparsing = ">=2.0.2" | |||||
[[package]] | |||||
category = "main" | |||||
description = "plugin and hook calling mechanisms for python" | |||||
name = "pluggy" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | |||||
version = "0.13.1" | |||||
[package.dependencies] | |||||
[package.dependencies.importlib-metadata] | |||||
python = "<3.8" | |||||
version = ">=0.12" | |||||
[package.extras] | |||||
dev = ["pre-commit", "tox"] | |||||
[[package]] | |||||
category = "main" | |||||
description = "library with cross-python path, ini-parsing, io, code, log facilities" | |||||
name = "py" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | |||||
version = "1.9.0" | |||||
[[package]] | |||||
category = "dev" | |||||
description = "python code static checker" | |||||
name = "pylint" | |||||
optional = false | |||||
python-versions = ">=3.5.*" | |||||
version = "2.6.0" | |||||
[package.dependencies] | |||||
astroid = ">=2.4.0,<=2.5" | |||||
colorama = "*" | |||||
isort = ">=4.2.5,<6" | |||||
mccabe = ">=0.6,<0.7" | |||||
toml = ">=0.7.1" | |||||
[[package]] | |||||
category = "main" | |||||
description = "Python parsing module" | |||||
name = "pyparsing" | |||||
optional = false | |||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" | |||||
version = "2.4.7" | |||||
[[package]] | |||||
category = "main" | |||||
description = "pytest: simple powerful testing with Python" | |||||
name = "pytest" | |||||
optional = false | |||||
python-versions = ">=3.5" | |||||
version = "6.1.2" | |||||
[package.dependencies] | |||||
atomicwrites = ">=1.0" | |||||
attrs = ">=17.4.0" | |||||
colorama = "*" | |||||
iniconfig = "*" | |||||
packaging = "*" | |||||
pluggy = ">=0.12,<1.0" | |||||
py = ">=1.8.2" | |||||
toml = "*" | |||||
[package.dependencies.importlib-metadata] | |||||
python = "<3.8" | |||||
version = ">=0.12" | |||||
[package.extras] | |||||
checkqa_mypy = ["mypy (0.780)"] | |||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] | |||||
[[package]] | |||||
category = "main" | |||||
description = "Pytest support for asyncio." | |||||
name = "pytest-asyncio" | |||||
optional = false | |||||
python-versions = ">= 3.5" | |||||
version = "0.14.0" | |||||
[package.dependencies] | |||||
pytest = ">=5.4.0" | |||||
[package.extras] | |||||
testing = ["async-generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] | |||||
[[package]] | |||||
category = "main" | |||||
description = "run tests in isolated forked subprocesses" | |||||
name = "pytest-forked" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" | |||||
version = "1.3.0" | |||||
[package.dependencies] | |||||
py = "*" | |||||
pytest = ">=3.10" | |||||
[[package]] | |||||
category = "main" | |||||
description = "pytest xdist plugin for distributed testing and loop-on-failing modes" | |||||
name = "pytest-xdist" | |||||
optional = false | |||||
python-versions = ">=3.5" | |||||
version = "2.1.0" | |||||
[package.dependencies] | |||||
execnet = ">=1.1" | |||||
pytest = ">=6.0.0" | |||||
pytest-forked = "*" | |||||
[package.extras] | |||||
psutil = ["psutil (>=3.0)"] | |||||
testing = ["filelock"] | |||||
[[package]] | |||||
category = "dev" | |||||
description = "a python refactoring library..." | |||||
name = "rope" | |||||
optional = false | |||||
python-versions = "*" | |||||
version = "0.18.0" | |||||
[package.extras] | |||||
dev = ["pytest"] | |||||
[[package]] | |||||
category = "main" | |||||
description = "Python helper for Semantic Versioning (http://semver.org/)" | |||||
name = "semver" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | |||||
version = "2.13.0" | |||||
[[package]] | |||||
category = "dev" | |||||
description = "Python 2 and 3 compatibility utilities" | |||||
name = "six" | |||||
optional = false | |||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" | |||||
version = "1.15.0" | |||||
[[package]] | |||||
category = "main" | |||||
description = "Python Library for Tom's Obvious, Minimal Language" | |||||
name = "toml" | |||||
optional = false | |||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" | |||||
version = "0.10.2" | |||||
[[package]] | |||||
category = "dev" | |||||
description = "a fork of Python 2 and 3 ast modules with type comment support" | |||||
name = "typed-ast" | |||||
optional = false | |||||
python-versions = "*" | |||||
version = "1.4.1" | |||||
[[package]] | |||||
category = "main" | |||||
description = "Backported and Experimental Type Hints for Python 3.5+" | |||||
name = "typing-extensions" | |||||
optional = false | |||||
python-versions = "*" | |||||
version = "3.7.4.3" | |||||
[[package]] | |||||
category = "dev" | |||||
description = "Module for decorators, wrappers and monkey patching." | |||||
name = "wrapt" | |||||
optional = false | |||||
python-versions = "*" | |||||
version = "1.12.1" | |||||
[[package]] | |||||
category = "dev" | |||||
description = "A formatter for Python code." | |||||
name = "yapf" | |||||
optional = false | |||||
python-versions = "*" | |||||
version = "0.30.0" | |||||
[[package]] | |||||
category = "main" | |||||
description = "Backport of pathlib-compatible object wrapper for zip files" | |||||
marker = "python_version < \"3.8\"" | |||||
name = "zipp" | |||||
optional = false | |||||
python-versions = ">=3.6" | |||||
version = "3.4.0" | |||||
[package.extras] | |||||
docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] | |||||
testing = ["pytest (>=3.5,<3.7.3 || >3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "jaraco.test (>=3.2.0)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] | |||||
[metadata] | |||||
content-hash = "bb7d048748c946ac4f6196a339a149d8060b048968853cb281d83207e324a61b" | |||||
python-versions = "^3.6" | |||||
[metadata.files] | |||||
apipkg = [ | |||||
{file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"}, | |||||
{file = "apipkg-1.5.tar.gz", hash = "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6"}, | |||||
] | |||||
astroid = [ | |||||
{file = "astroid-2.4.2-py3-none-any.whl", hash = "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386"}, | |||||
{file = "astroid-2.4.2.tar.gz", hash = "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703"}, | |||||
] | |||||
atomicwrites = [ | |||||
{file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, | |||||
{file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, | |||||
] | |||||
attrs = [ | |||||
{file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, | |||||
{file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, | |||||
] | |||||
colorama = [ | |||||
{file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, | |||||
{file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, | |||||
] | |||||
execnet = [ | |||||
{file = "execnet-1.7.1-py2.py3-none-any.whl", hash = "sha256:d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547"}, | |||||
{file = "execnet-1.7.1.tar.gz", hash = "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50"}, | |||||
] | |||||
importlib-metadata = [ | |||||
{file = "importlib_metadata-3.1.1-py3-none-any.whl", hash = "sha256:6112e21359ef8f344e7178aa5b72dc6e62b38b0d008e6d3cb212c5b84df72013"}, | |||||
{file = "importlib_metadata-3.1.1.tar.gz", hash = "sha256:b0c2d3b226157ae4517d9625decf63591461c66b3a808c2666d538946519d170"}, | |||||
] | |||||
iniconfig = [ | |||||
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, | |||||
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, | |||||
] | |||||
isort = [ | |||||
{file = "isort-5.6.4-py3-none-any.whl", hash = "sha256:dcab1d98b469a12a1a624ead220584391648790275560e1a43e54c5dceae65e7"}, | |||||
{file = "isort-5.6.4.tar.gz", hash = "sha256:dcaeec1b5f0eca77faea2a35ab790b4f3680ff75590bfcb7145986905aab2f58"}, | |||||
] | |||||
json5 = [ | |||||
{file = "json5-0.9.5-py2.py3-none-any.whl", hash = "sha256:af1a1b9a2850c7f62c23fde18be4749b3599fd302f494eebf957e2ada6b9e42c"}, | |||||
{file = "json5-0.9.5.tar.gz", hash = "sha256:703cfee540790576b56a92e1c6aaa6c4b0d98971dc358ead83812aa4d06bdb96"}, | |||||
] | |||||
lazy-object-proxy = [ | |||||
{file = "lazy-object-proxy-1.4.3.tar.gz", hash = "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp27-cp27m-win32.whl", hash = "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp34-cp34m-win32.whl", hash = "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp34-cp34m-win_amd64.whl", hash = "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"}, | |||||
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"}, | |||||
] | |||||
mccabe = [ | |||||
{file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, | |||||
{file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, | |||||
] | |||||
mypy = [ | |||||
{file = "mypy-0.790-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:bd03b3cf666bff8d710d633d1c56ab7facbdc204d567715cb3b9f85c6e94f669"}, | |||||
{file = "mypy-0.790-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:2170492030f6faa537647d29945786d297e4862765f0b4ac5930ff62e300d802"}, | |||||
{file = "mypy-0.790-cp35-cp35m-win_amd64.whl", hash = "sha256:e86bdace26c5fe9cf8cb735e7cedfe7850ad92b327ac5d797c656717d2ca66de"}, | |||||
{file = "mypy-0.790-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e97e9c13d67fbe524be17e4d8025d51a7dca38f90de2e462243ab8ed8a9178d1"}, | |||||
{file = "mypy-0.790-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0d34d6b122597d48a36d6c59e35341f410d4abfa771d96d04ae2c468dd201abc"}, | |||||
{file = "mypy-0.790-cp36-cp36m-win_amd64.whl", hash = "sha256:72060bf64f290fb629bd4a67c707a66fd88ca26e413a91384b18db3876e57ed7"}, | |||||
{file = "mypy-0.790-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:eea260feb1830a627fb526d22fbb426b750d9f5a47b624e8d5e7e004359b219c"}, | |||||
{file = "mypy-0.790-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c614194e01c85bb2e551c421397e49afb2872c88b5830e3554f0519f9fb1c178"}, | |||||
{file = "mypy-0.790-cp37-cp37m-win_amd64.whl", hash = "sha256:0a0d102247c16ce93c97066443d11e2d36e6cc2a32d8ccc1f705268970479324"}, | |||||
{file = "mypy-0.790-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cf4e7bf7f1214826cf7333627cb2547c0db7e3078723227820d0a2490f117a01"}, | |||||
{file = "mypy-0.790-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:af4e9ff1834e565f1baa74ccf7ae2564ae38c8df2a85b057af1dbbc958eb6666"}, | |||||
{file = "mypy-0.790-cp38-cp38-win_amd64.whl", hash = "sha256:da56dedcd7cd502ccd3c5dddc656cb36113dd793ad466e894574125945653cea"}, | |||||
{file = "mypy-0.790-py3-none-any.whl", hash = "sha256:2842d4fbd1b12ab422346376aad03ff5d0805b706102e475e962370f874a5122"}, | |||||
{file = "mypy-0.790.tar.gz", hash = "sha256:2b21ba45ad9ef2e2eb88ce4aeadd0112d0f5026418324176fd494a6824b74975"}, | |||||
] | |||||
mypy-extensions = [ | |||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, | |||||
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, | |||||
] | |||||
packaging = [ | |||||
{file = "packaging-20.7-py2.py3-none-any.whl", hash = "sha256:eb41423378682dadb7166144a4926e443093863024de508ca5c9737d6bc08376"}, | |||||
{file = "packaging-20.7.tar.gz", hash = "sha256:05af3bb85d320377db281cf254ab050e1a7ebcbf5410685a9a407e18a1f81236"}, | |||||
] | |||||
pluggy = [ | |||||
{file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, | |||||
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, | |||||
] | |||||
py = [ | |||||
{file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"}, | |||||
{file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"}, | |||||
] | |||||
pylint = [ | |||||
{file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"}, | |||||
{file = "pylint-2.6.0.tar.gz", hash = "sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210"}, | |||||
] | |||||
pyparsing = [ | |||||
{file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, | |||||
{file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, | |||||
] | |||||
pytest = [ | |||||
{file = "pytest-6.1.2-py3-none-any.whl", hash = "sha256:4288fed0d9153d9646bfcdf0c0428197dba1ecb27a33bb6e031d002fa88653fe"}, | |||||
{file = "pytest-6.1.2.tar.gz", hash = "sha256:c0a7e94a8cdbc5422a51ccdad8e6f1024795939cc89159a0ae7f0b316ad3823e"}, | |||||
] | |||||
pytest-asyncio = [ | |||||
{file = "pytest-asyncio-0.14.0.tar.gz", hash = "sha256:9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700"}, | |||||
{file = "pytest_asyncio-0.14.0-py3-none-any.whl", hash = "sha256:2eae1e34f6c68fc0a9dc12d4bea190483843ff4708d24277c41568d6b6044f1d"}, | |||||
] | |||||
pytest-forked = [ | |||||
{file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"}, | |||||
{file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"}, | |||||
] | |||||
pytest-xdist = [ | |||||
{file = "pytest-xdist-2.1.0.tar.gz", hash = "sha256:82d938f1a24186520e2d9d3a64ef7d9ac7ecdf1a0659e095d18e596b8cbd0672"}, | |||||
{file = "pytest_xdist-2.1.0-py3-none-any.whl", hash = "sha256:7c629016b3bb006b88ac68e2b31551e7becf173c76b977768848e2bbed594d90"}, | |||||
] | |||||
rope = [ | |||||
{file = "rope-0.18.0.tar.gz", hash = "sha256:786b5c38c530d4846aa68a42604f61b4e69a493390e3ca11b88df0fbfdc3ed04"}, | |||||
] | |||||
semver = [ | |||||
{file = "semver-2.13.0-py2.py3-none-any.whl", hash = "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4"}, | |||||
{file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, | |||||
] | |||||
six = [ | |||||
{file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, | |||||
{file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, | |||||
] | |||||
toml = [ | |||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, | |||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, | |||||
] | |||||
typed-ast = [ | |||||
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, | |||||
{file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, | |||||
{file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, | |||||
{file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, | |||||
{file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, | |||||
{file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, | |||||
{file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, | |||||
{file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, | |||||
{file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, | |||||
{file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, | |||||
{file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, | |||||
{file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, | |||||
{file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, | |||||
{file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, | |||||
{file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, | |||||
{file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, | |||||
{file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, | |||||
{file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, | |||||
{file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, | |||||
{file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, | |||||
{file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, | |||||
] | |||||
typing-extensions = [ | |||||
{file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, | |||||
{file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, | |||||
{file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, | |||||
] | |||||
wrapt = [ | |||||
{file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, | |||||
] | |||||
yapf = [ | |||||
{file = "yapf-0.30.0-py2.py3-none-any.whl", hash = "sha256:3abf61ba67cf603069710d30acbc88cfe565d907e16ad81429ae90ce9651e0c9"}, | |||||
{file = "yapf-0.30.0.tar.gz", hash = "sha256:3000abee4c28daebad55da6c85f3cd07b8062ce48e2e9943c8da1b9667d48427"}, | |||||
] | |||||
zipp = [ | |||||
{file = "zipp-3.4.0-py3-none-any.whl", hash = "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108"}, | |||||
{file = "zipp-3.4.0.tar.gz", hash = "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"}, | |||||
] |
[tool.poetry] | |||||
name = "dds" | |||||
version = "0.0.0" | |||||
description = "" | |||||
authors = ["vector-of-bool <vectorofbool@gmail.com>"] | |||||
license = "MPL-2.0" | |||||
packages = [ | |||||
{ include = "dds_ci", from = "tools/" }, | |||||
] | |||||
[tool.poetry.dependencies] | |||||
python = "^3.6" | |||||
semver = "^2.13.0" | |||||
pytest = "^6.1.2" | |||||
pytest-xdist = "^2.1.0" | |||||
pytest-asyncio = "^0.14.0" | |||||
typing-extensions = "^3.7.4" | |||||
json5 = "^0.9.5" | |||||
[tool.poetry.dev-dependencies] | |||||
# Only needed for development | |||||
pylint = "^2.6.0" | |||||
yapf = "^0.30.0" | |||||
mypy = "^0.790" | |||||
rope = "^0.18.0" | |||||
[tool.poetry.scripts] | |||||
dds-ci = "dds_ci.main:start" | |||||
[build-system] | |||||
requires = ["poetry>=0.12"] | |||||
build-backend = "poetry.masonry.api" |
""" | """ | ||||
A simple HTTP request handler that simply serves files from a directory given to the constructor. | A simple HTTP request handler that simply serves files from a directory given to the constructor. | ||||
""" | """ | ||||
def __init__(self, *args, **kwargs) -> None: | def __init__(self, *args, **kwargs) -> None: | ||||
self.dir = kwargs.pop('dir') | self.dir = kwargs.pop('dir') | ||||
super().__init__(*args, **kwargs) | super().__init__(*args, **kwargs) | ||||
""" | """ | ||||
A fixture handle to a dds HTTP repository, including a path and URL. | A fixture handle to a dds HTTP repository, including a path and URL. | ||||
""" | """ | ||||
def __init__(self, dds_exe: Path, info: ServerInfo) -> None: | def __init__(self, dds_exe: Path, info: ServerInfo) -> None: | ||||
self.server = info | self.server = info | ||||
self.url = info.base_url | self.url = info.base_url |
# Base build dependencies | # Base build dependencies | ||||
RUN apk add "gcc=9.3.0-r2" "g++=9.3.0-r2" make python3 py3-pip \ | RUN apk add "gcc=9.3.0-r2" "g++=9.3.0-r2" make python3 py3-pip \ | ||||
git openssl-libs-static openssl-dev | |||||
git openssl-libs-static openssl-dev ccache lld curl python3-dev | |||||
# We use version-qualified names for compiler executables | # We use version-qualified names for compiler executables | ||||
RUN ln -s $(type -P gcc) /usr/local/bin/gcc-9 && \ | RUN ln -s $(type -P gcc) /usr/local/bin/gcc-9 && \ | ||||
ln -s $(type -P g++) /usr/local/bin/g++-9 | ln -s $(type -P g++) /usr/local/bin/g++-9 | ||||
# Some Python test dependencies | |||||
RUN python3 -m pip install pytest pytest-xdist \ | |||||
pytest-asyncio semver typing-extensions | |||||
# We want the UID in the container to match the UID on the outside, for minimal | |||||
# fuss with file permissions | |||||
ARG DDS_USER_UID=1000 | |||||
RUN curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py \ | |||||
| env POETRY_HOME=/opt/poetry python3 -u - --no-modify-path && \ | |||||
ln -s /opt/poetry/bin/poetry /usr/local/bin/poetry && \ | |||||
chmod a+x /opt/poetry/bin/poetry && \ | |||||
adduser --disabled-password --uid=${DDS_USER_UID} dds | |||||
USER dds |
import argparse | |||||
from pathlib import Path | |||||
import subprocess | |||||
import os | |||||
from typing import Sequence, NamedTuple | |||||
import sys | |||||
import shutil | |||||
class BootstrapPhase(NamedTuple): | |||||
ref: str | |||||
nix_compiler: str | |||||
win_compiler: str | |||||
@property | |||||
def platform_compiler(self): | |||||
if os.name == 'nt': | |||||
return self.win_compiler | |||||
else: | |||||
return self.nix_compiler | |||||
BOOTSTRAP_PHASES = [ | |||||
BootstrapPhase('bootstrap-p1.2', 'g++-8', 'cl.exe'), | |||||
BootstrapPhase('bootstrap-p4.2', 'g++-8', 'cl.exe'), | |||||
BootstrapPhase('bootstrap-p5.2', 'g++-9', 'cl.exe'), | |||||
BootstrapPhase('0.1.0-alpha.3', 'g++-9', 'cl.exe'), | |||||
BootstrapPhase('0.1.0-alpha.4', 'g++-9', 'cl.exe'), | |||||
] | |||||
HERE = Path(__file__).parent.absolute() | |||||
PROJECT_ROOT = HERE.parent | |||||
BUILD_DIR = PROJECT_ROOT / '_build' | |||||
BOOTSTRAP_BASE_DIR = BUILD_DIR / '_bootstrap' | |||||
PREBUILT_DIR = PROJECT_ROOT / '_prebuilt' | |||||
EXE_SUFFIX = '.exe' if os.name == 'nt' else '' | |||||
def _run_quiet(cmd, **kwargs) -> None: | |||||
cmd = [str(s) for s in cmd] | |||||
res = subprocess.run( | |||||
cmd, | |||||
stdout=subprocess.PIPE, | |||||
stderr=subprocess.STDOUT, | |||||
**kwargs, | |||||
) | |||||
if res.returncode != 0: | |||||
print(f'Subprocess command {cmd} failed ' | |||||
f'[{res.returncode}]:\n{res.stdout.decode()}') | |||||
raise subprocess.CalledProcessError(res.returncode, cmd) | |||||
def _clone_bootstrap_phase(ref: str) -> Path: | |||||
print(f'Clone revision: {ref}') | |||||
bts_dir = BOOTSTRAP_BASE_DIR / ref | |||||
if bts_dir.exists(): | |||||
shutil.rmtree(bts_dir) | |||||
_run_quiet([ | |||||
'git', | |||||
'clone', | |||||
'--depth=1', | |||||
f'--branch={ref}', | |||||
f'file://{PROJECT_ROOT}', | |||||
bts_dir, | |||||
]) | |||||
return bts_dir | |||||
def _build_bootstrap_phase(ph: BootstrapPhase, bts_dir: Path) -> None: | |||||
print(f'Build revision: {ph.ref} [This may take a moment]') | |||||
env = os.environ.copy() | |||||
env['DDS_BOOTSTRAP_PREV_EXE'] = str(PREBUILT_DIR / F'dds{EXE_SUFFIX}') | |||||
_run_quiet( | |||||
[ | |||||
sys.executable, | |||||
'-u', | |||||
str(bts_dir / 'tools/build.py'), | |||||
f'--cxx={ph.platform_compiler}', | |||||
], | |||||
env=env, | |||||
cwd=bts_dir, | |||||
) | |||||
def _pull_executable(bts_dir: Path) -> Path: | |||||
prebuild_dir = (PROJECT_ROOT / '_prebuilt') | |||||
prebuild_dir.mkdir(exist_ok=True) | |||||
generated = list(bts_dir.glob(f'_build/dds{EXE_SUFFIX}')) | |||||
assert len(generated) == 1, repr(generated) | |||||
exe, = generated | |||||
dest = prebuild_dir / exe.name | |||||
if dest.exists(): | |||||
dest.unlink() | |||||
exe.rename(dest) | |||||
return dest | |||||
def _run_boot_phase(phase: BootstrapPhase) -> Path: | |||||
bts_dir = _clone_bootstrap_phase(phase.ref) | |||||
_build_bootstrap_phase(phase, bts_dir) | |||||
return _pull_executable(bts_dir) | |||||
def main() -> int: | |||||
for idx, phase in enumerate(BOOTSTRAP_PHASES): | |||||
print(f'Bootstrap phase [{idx+1}/{len(BOOTSTRAP_PHASES)}]') | |||||
exe = _run_boot_phase(phase) | |||||
print(f'A bootstrapped DDS executable has been generated: {exe}') | |||||
return 0 | |||||
if __name__ == "__main__": | |||||
sys.exit(main()) |
#!/usr/bin/env python3 | |||||
import argparse | |||||
import os | |||||
from pathlib import Path | |||||
from typing import Sequence | |||||
import sys | |||||
import shutil | |||||
from dds_ci import paths | |||||
from self_build import self_build | |||||
ROOT = Path(__file__).parent.parent.absolute() | |||||
BUILD_DIR = ROOT / '_build' | |||||
def main(argv: Sequence[str]) -> int: | |||||
# Prior versions of this script took a --cxx argument, but we don't care anymore | |||||
parser = argparse.ArgumentParser() | |||||
parser.add_argument('--cxx', help=argparse.SUPPRESS) | |||||
parser.parse_args(argv) | |||||
dds_bootstrap_env_key = 'DDS_BOOTSTRAP_PREV_EXE' | |||||
if dds_bootstrap_env_key not in os.environ: | |||||
raise RuntimeError('A previous-phase bootstrapped executable ' | |||||
'must be available via $DDS_BOOTSTRAP_PREV_EXE') | |||||
dds_exe = Path(os.environ[dds_bootstrap_env_key]) | |||||
if BUILD_DIR.exists(): | |||||
shutil.rmtree(BUILD_DIR) | |||||
print(f'Using previously built DDS executable: {dds_exe}') | |||||
if os.name == 'nt': | |||||
tc_fpath = ROOT / 'tools/msvc.jsonc' | |||||
elif sys.platform.startswith('freebsd'): | |||||
tc_fpath = ROOT / 'tools/freebsd-gcc-9.jsonc' | |||||
else: | |||||
tc_fpath = ROOT / 'tools/gcc-9.jsonc' | |||||
self_build(dds_exe, | |||||
cat_json_path=ROOT / 'catalog.old.json', | |||||
toolchain=str(tc_fpath)) | |||||
return 0 | |||||
if __name__ == "__main__": | |||||
sys.exit(main(sys.argv[1:])) |
import argparse | |||||
import os | |||||
import sys | |||||
import pytest | |||||
from pathlib import Path | |||||
from typing import Sequence, NamedTuple | |||||
import multiprocessing | |||||
import subprocess | |||||
import urllib.request | |||||
import shutil | |||||
from self_build import self_build, dds_build | |||||
from dds_ci import paths, proc | |||||
class CIOptions(NamedTuple): | |||||
toolchain: str | |||||
def _do_bootstrap_build(opts: CIOptions) -> None: | |||||
print('Bootstrapping by a local build of prior versions...') | |||||
subprocess.check_call([ | |||||
sys.executable, | |||||
'-u', | |||||
str(paths.TOOLS_DIR / 'bootstrap.py'), | |||||
]) | |||||
def _do_bootstrap_download() -> None: | |||||
filename = { | |||||
'win32': 'dds-win-x64.exe', | |||||
'linux': 'dds-linux-x64', | |||||
'darwin': 'dds-macos-x64', | |||||
'freebsd11': 'dds-freebsd-x64', | |||||
'freebsd12': 'dds-freebsd-x64', | |||||
}.get(sys.platform) | |||||
if filename is None: | |||||
raise RuntimeError(f'We do not have a prebuilt DDS binary for the "{sys.platform}" platform') | |||||
url = f'https://github.com/vector-of-bool/dds/releases/download/0.1.0-alpha.4/{filename}' | |||||
print(f'Downloading prebuilt DDS executable: {url}') | |||||
stream = urllib.request.urlopen(url) | |||||
paths.PREBUILT_DDS.parent.mkdir(exist_ok=True, parents=True) | |||||
with paths.PREBUILT_DDS.open('wb') as fd: | |||||
while True: | |||||
buf = stream.read(1024 * 4) | |||||
if not buf: | |||||
break | |||||
fd.write(buf) | |||||
if os.name != 'nt': | |||||
# Mark the binary executable. By default it won't be | |||||
mode = paths.PREBUILT_DDS.stat().st_mode | |||||
mode |= 0b001_001_001 | |||||
paths.PREBUILT_DDS.chmod(mode) | |||||
def main(argv: Sequence[str]) -> int: | |||||
parser = argparse.ArgumentParser() | |||||
parser.add_argument( | |||||
'-B', | |||||
'--bootstrap-with', | |||||
help='How are we to obtain a bootstrapped DDS executable?', | |||||
choices=('download', 'build', 'skip'), | |||||
required=True, | |||||
) | |||||
parser.add_argument( | |||||
'--toolchain', | |||||
'-T', | |||||
help='The toolchain to use for the CI process', | |||||
required=True, | |||||
) | |||||
parser.add_argument( | |||||
'--build-only', action='store_true', help='Only build the `dds` executable. Skip second-phase and tests.') | |||||
parser.add_argument( | |||||
'--no-clean', | |||||
action='store_false', | |||||
dest='clean', | |||||
help='Don\'t remove prior build/deps results', | |||||
) | |||||
args = parser.parse_args(argv) | |||||
opts = CIOptions(toolchain=args.toolchain) | |||||
if args.bootstrap_with == 'build': | |||||
_do_bootstrap_build(opts) | |||||
elif args.bootstrap_with == 'download': | |||||
_do_bootstrap_download() | |||||
elif args.bootstrap_with == 'skip': | |||||
pass | |||||
else: | |||||
assert False, 'impossible' | |||||
old_cat_path = paths.PREBUILT_DIR / 'catalog.db' | |||||
if old_cat_path.is_file() and args.clean: | |||||
old_cat_path.unlink() | |||||
ci_repo_dir = paths.PREBUILT_DIR / 'ci-repo' | |||||
if ci_repo_dir.exists() and args.clean: | |||||
shutil.rmtree(ci_repo_dir) | |||||
self_build( | |||||
paths.PREBUILT_DDS, | |||||
toolchain=opts.toolchain, | |||||
cat_path=old_cat_path, | |||||
cat_json_path=Path('old-catalog.json'), | |||||
dds_flags=[('--repo-dir', ci_repo_dir)]) | |||||
print('Main build PASSED!') | |||||
print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}') | |||||
if args.build_only: | |||||
print('`--build-only` was given, so second phase and tests will not execute') | |||||
return 0 | |||||
print('Bootstrapping myself:') | |||||
new_cat_path = paths.BUILD_DIR / 'catalog.db' | |||||
new_repo_dir = paths.BUILD_DIR / 'ci-repo-2' | |||||
if new_cat_path.is_file(): | |||||
new_cat_path.unlink() | |||||
if new_repo_dir.is_dir(): | |||||
shutil.rmtree(new_repo_dir) | |||||
dds_build(paths.CUR_BUILT_DDS, | |||||
toolchain=opts.toolchain, | |||||
more_flags=[ | |||||
f'--repo-dir={new_repo_dir}', | |||||
f'--catalog={new_cat_path}', | |||||
'--add-repo=https://dds.pizza/repo', | |||||
]) | |||||
print('Bootstrap test PASSED!') | |||||
basetemp = Path('/tmp/dds-ci') | |||||
basetemp.mkdir(exist_ok=True, parents=True) | |||||
return pytest.main([ | |||||
'-v', | |||||
'--durations=10', | |||||
'-n', | |||||
str(multiprocessing.cpu_count() + 2), | |||||
f'--basetemp={basetemp}', # Force to use a top-level /tmp dir. On Windows this prevents paths from begin too long | |||||
'tests/', | |||||
]) | |||||
if __name__ == "__main__": | |||||
sys.exit(main(sys.argv[1:])) |
import enum | |||||
from pathlib import Path | |||||
from contextlib import contextmanager | |||||
from typing import Iterator, ContextManager | |||||
import sys | |||||
import urllib.request | |||||
import shutil | |||||
import tempfile | |||||
from . import paths | |||||
from .dds import DDSWrapper | |||||
from .paths import new_tempdir | |||||
class BootstrapMode(enum.Enum): | |||||
"""How should be bootstrap our prior DDS executable?""" | |||||
#: Downlaod one from GitHub | |||||
Download = 'download' | |||||
#: Build one from source | |||||
Build = 'build' | |||||
#: Skip bootstrapping. Assume it already exists. | |||||
Skip = 'skip' | |||||
#: If the prior executable exists, skip, otherwise download | |||||
Lazy = 'lazy' | |||||
def _do_bootstrap_download() -> Path: | |||||
filename = { | |||||
'win32': 'dds-win-x64.exe', | |||||
'linux': 'dds-linux-x64', | |||||
'darwin': 'dds-macos-x64', | |||||
'freebsd11': 'dds-freebsd-x64', | |||||
'freebsd12': 'dds-freebsd-x64', | |||||
}.get(sys.platform) | |||||
if filename is None: | |||||
raise RuntimeError(f'We do not have a prebuilt DDS binary for the "{sys.platform}" platform') | |||||
url = f'https://github.com/vector-of-bool/dds/releases/download/0.1.0-alpha.4/{filename}' | |||||
print(f'Downloading prebuilt DDS executable: {url}') | |||||
stream = urllib.request.urlopen(url) | |||||
paths.PREBUILT_DDS.parent.mkdir(exist_ok=True, parents=True) | |||||
with paths.PREBUILT_DDS.open('wb') as fd: | |||||
while True: | |||||
buf = stream.read(1024 * 4) | |||||
if not buf: | |||||
break | |||||
fd.write(buf) | |||||
if sys.platform != 'win32': | |||||
# Mark the binary executable. By default it won't be | |||||
mode = paths.PREBUILT_DDS.stat().st_mode | |||||
mode |= 0b001_001_001 | |||||
paths.PREBUILT_DDS.chmod(mode) | |||||
return paths.PREBUILT_DDS | |||||
@contextmanager | |||||
def pin_exe(fpath: Path) -> Iterator[Path]: | |||||
""" | |||||
Create a copy of 'fpath' at an unspecified location, and yield that path. | |||||
This is needed if the executable would overwrite itself. | |||||
""" | |||||
with new_tempdir() as tdir: | |||||
tfile = tdir / 'previous-dds.exe' | |||||
shutil.copy2(fpath, tfile) | |||||
yield tfile | |||||
@contextmanager | |||||
def get_bootstrap_exe(mode: BootstrapMode) -> Iterator[DDSWrapper]: | |||||
"""Context manager that yields a DDSWrapper around a prior 'dds' executable""" | |||||
if mode is BootstrapMode.Lazy: | |||||
f = paths.PREBUILT_DDS | |||||
if not f.exists(): | |||||
_do_bootstrap_download() | |||||
elif mode is BootstrapMode.Download: | |||||
f = _do_bootstrap_download() | |||||
elif mode is BootstrapMode.Build: | |||||
f = _do_bootstrap_build() | |||||
elif mode is BootstrapMode.Skip: | |||||
f = paths.PREBUILT_DDS | |||||
with pin_exe(f) as dds: | |||||
yield DDSWrapper(dds) |
from argparse import ArgumentParser | |||||
from dds_ci import paths | |||||
def add_tc_arg(parser: ArgumentParser, *, required=True) -> None: | |||||
parser.add_argument( | |||||
'--toolchain', | |||||
'-T', | |||||
help='The DDS toolchain to use', | |||||
required=required) | |||||
def add_dds_exe_arg(parser: ArgumentParser, *, required=True) -> None: | |||||
parser.add_argument( | |||||
'--exe', | |||||
'-e', | |||||
help='Path to a DDS executable to use', | |||||
required=required) |
from pathlib import Path | |||||
import multiprocessing | |||||
import shutil | |||||
from . import proc | |||||
from . import paths | |||||
class DDSWrapper: | |||||
""" | |||||
Wraps a 'dds' executable with some convenience APIs that invoke various | |||||
'dds' subcommands. | |||||
""" | |||||
def __init__(self, path: Path) -> None: | |||||
self.path = path | |||||
self.repo_dir = paths.PREBUILT_DIR / 'ci-repo' | |||||
self.catalog_path = paths.PREBUILT_DIR / 'ci-catalog.db' | |||||
@property | |||||
def catalog_path_arg(self): | |||||
"""The arguments for --catalog""" | |||||
return f'--catalog={self.catalog_path}' | |||||
@property | |||||
def repo_dir_arg(self): | |||||
"""The arguments for --repo-dir""" | |||||
return f'--repo-dir={self.repo_dir}' | |||||
def clean(self, *, build_dir: Path = None, repo=True, catalog=True): | |||||
""" | |||||
Clean out prior executable output, including repos, catalog, and | |||||
the build results at 'build_dir', if given. | |||||
""" | |||||
if build_dir and build_dir.exists(): | |||||
shutil.rmtree(build_dir) | |||||
if repo and self.repo_dir.exists(): | |||||
shutil.rmtree(self.repo_dir) | |||||
if catalog and self.catalog_path.exists(): | |||||
self.catalog_path.unlink() | |||||
def run(self, args: proc.CommandLine) -> None: | |||||
"""Execute the 'dds' executable with the given arguments""" | |||||
proc.check_run([self.path, args]) # type: ignore | |||||
def catalog_json_import(self, path: Path) -> None: | |||||
"""Run 'catalog import' to import the given JSON. Only applicable to older 'dds'""" | |||||
self.run(['catalog', 'import', self.catalog_path_arg, f'--json={path}']) | |||||
def build(self, *, toolchain: Path, root: Path, build_root: Path = None, jobs: int = None) -> None: | |||||
""" | |||||
Run 'dds build' with the given arguments. | |||||
:param toolchain: The toolchain to use for the build. | |||||
:param root: The root project directory. | |||||
:param build_root: The root directory where the output will be written. | |||||
:param jobs: The number of jobs to use. Default is CPU-count + 2 | |||||
""" | |||||
jobs = jobs or multiprocessing.cpu_count() + 2 | |||||
self.run([ | |||||
'build', | |||||
f'--toolchain={toolchain}', | |||||
self.repo_dir_arg, | |||||
self.catalog_path_arg, | |||||
f'--jobs={jobs}', | |||||
f'--project-dir={root}', | |||||
f'--out={build_root}', | |||||
]) |
import argparse | |||||
import json | |||||
from contextlib import contextmanager | |||||
import enum | |||||
import multiprocessing | |||||
import pytest | |||||
from pathlib import Path | |||||
from concurrent import futures | |||||
import sys | |||||
import os | |||||
from typing import NoReturn, Sequence, Optional, Iterator | |||||
from typing_extensions import Protocol | |||||
import subprocess | |||||
import json5 | |||||
from . import paths | |||||
from .dds import DDSWrapper | |||||
from .bootstrap import BootstrapMode, get_bootstrap_exe | |||||
def make_argparser() -> argparse.ArgumentParser: | |||||
"""Create an argument parser for the dds-ci command-line""" | |||||
parser = argparse.ArgumentParser() | |||||
parser.add_argument('-B', | |||||
'--bootstrap-with', | |||||
help='How are we to obtain a bootstrapped DDS executable?', | |||||
metavar='{download,build,skip,lazy}', | |||||
type=BootstrapMode, | |||||
default=BootstrapMode.Lazy) | |||||
parser.add_argument('--rapid', help='Run CI for fast development iterations', action='store_true') | |||||
parser.add_argument('--test-toolchain', | |||||
'-TT', | |||||
type=Path, | |||||
metavar='<toolchain-file>', | |||||
help='The toolchain to use for the first build, which will be passed through the tests') | |||||
parser.add_argument('--main-toolchain', | |||||
'-T', | |||||
type=Path, | |||||
dest='toolchain', | |||||
metavar='<toolchain-file>', | |||||
help='The toolchain to use for the final build') | |||||
parser.add_argument('--jobs', | |||||
'-j', | |||||
type=int, | |||||
help='Number of parallel jobs to use when building and testing', | |||||
default=multiprocessing.cpu_count() + 2) | |||||
parser.add_argument('--build-only', action='store_true', help='Only build the dds executable, do not run tests') | |||||
parser.add_argument('--clean', action='store_true', help="Don't remove prior build/deps results") | |||||
parser.add_argument('--no-test', | |||||
action='store_false', | |||||
dest='do_test', | |||||
help='Skip testing and just build the final result') | |||||
return parser | |||||
class CommandArguments(Protocol): | |||||
""" | |||||
The result of parsing argv with the dds-ci argument parser. | |||||
""" | |||||
#: Whether the user wants us to clean result before building | |||||
clean: bool | |||||
#: The bootstrap method the user has requested | |||||
bootstrap_with: BootstrapMode | |||||
#: The toolchain to use when building the 'dds' executable that will be tested. | |||||
test_toolchain: Optional[Path] | |||||
#: The toolchain to use when building the main 'dds' executable to publish | |||||
toolchain: Optional[Path] | |||||
#: The maximum number of parallel jobs for build and test | |||||
jobs: int | |||||
#: Whether we should run the pytest tests | |||||
do_test: bool | |||||
#: Rapid-CI is for 'dds' development purposes | |||||
rapid: bool | |||||
def parse_argv(argv: Sequence[str]) -> CommandArguments: | |||||
"""Parse the given dds-ci command-line argument list""" | |||||
return make_argparser().parse_args(argv) | |||||
@contextmanager | |||||
def fixup_toolchain(json_file: Path) -> Iterator[Path]: | |||||
""" | |||||
Augment the toolchain at the given path by adding 'ccache' or -fuse-ld=lld, | |||||
if those tools are available on the system. Yields a new toolchain file | |||||
based on 'json_file' | |||||
""" | |||||
data = json5.loads(json_file.read_text()) | |||||
# Check if we can add ccache | |||||
ccache = paths.find_exe('ccache') | |||||
if ccache: | |||||
print('Found ccache:', ccache) | |||||
data['compiler_launcher'] = [str(ccache)] | |||||
# Check for lld for use with GCC/Clang | |||||
if paths.find_exe('ld.lld') and data.get('compiler_id') in ('gnu', 'clang'): | |||||
print('Linking with `-fuse-ld=lld`') | |||||
data.setdefault('link_flags', []).append('-fuse-ld=lld') | |||||
# Save the new toolchain data | |||||
with paths.new_tempdir() as tdir: | |||||
new_json = tdir / json_file.name | |||||
new_json.write_text(json.dumps(data)) | |||||
yield new_json | |||||
def get_default_test_toolchain() -> Path: | |||||
""" | |||||
Get the default toolchain that should be used for dev and test based on the | |||||
host platform. | |||||
""" | |||||
if sys.platform == 'win32': | |||||
return paths.TOOLS_DIR / 'msvc-audit.jsonc' | |||||
elif sys.platform in 'linux': | |||||
return paths.TOOLS_DIR / 'gcc-9-audit.jsonc' | |||||
elif sys.platform == 'darwin': | |||||
return paths.TOOLS_DIR / 'gcc-9-audit-macos.jsonc' | |||||
else: | |||||
raise RuntimeError(f'Unable to determine the default toolchain (sys.platform is {sys.platform!r})') | |||||
def get_default_toolchain() -> Path: | |||||
""" | |||||
Get the default toolchain that should be used to generate the release executable | |||||
based on the host platform. | |||||
""" | |||||
if sys.platform == 'win32': | |||||
return paths.TOOLS_DIR / 'msvc-rel.jsonc' | |||||
elif sys.platform == 'linux': | |||||
return paths.TOOLS_DIR / 'gcc-9-rel.jsonc' | |||||
elif sys.platform == 'darwin': | |||||
return paths.TOOLS_DIR / 'gcc-9-rel-macos.jsonc' | |||||
else: | |||||
raise RuntimeError(f'Unable to determine the default toolchain (sys.platform is {sys.platform!r})') | |||||
def test_build(dds: DDSWrapper, args: CommandArguments) -> DDSWrapper: | |||||
""" | |||||
Execute the build that generates the test-mode executable. Uses the given 'dds' | |||||
to build the new dds. Returns a DDSWrapper around the generated test executable. | |||||
""" | |||||
test_tc = args.test_toolchain or get_default_test_toolchain() | |||||
build_dir = paths.BUILD_DIR / '_ci-test' | |||||
with fixup_toolchain(test_tc) as new_tc: | |||||
dds.build(toolchain=new_tc, root=paths.PROJECT_ROOT, build_root=build_dir, jobs=args.jobs) | |||||
return DDSWrapper(build_dir / ('dds' + paths.EXE_SUFFIX)) | |||||
def run_pytest(dds: DDSWrapper, args: CommandArguments) -> int: | |||||
""" | |||||
Execute pytest, testing against the given 'test_dds' executable. Returns | |||||
the exit code of pytest. | |||||
""" | |||||
basetemp = Path('/tmp/dds-ci') | |||||
basetemp.mkdir(exist_ok=True, parents=True) | |||||
return pytest.main([ | |||||
'-v', | |||||
'--durations=10', | |||||
'-n', | |||||
str(args.jobs), | |||||
f'--basetemp={basetemp}', | |||||
f'--dds-exe={dds.path}', | |||||
str(paths.PROJECT_ROOT / 'tests/'), | |||||
]) | |||||
def main_build(dds: DDSWrapper, args: CommandArguments) -> int: | |||||
""" | |||||
Execute the main build of dds using the given 'dds' executable to build itself. | |||||
""" | |||||
main_tc = args.toolchain or ( | |||||
# If we are in rapid-dev mode, use the test toolchain, which had audit/debug enabled | |||||
get_default_toolchain() if not args.rapid else get_default_test_toolchain()) | |||||
with fixup_toolchain(main_tc) as new_tc: | |||||
try: | |||||
dds.build(toolchain=new_tc, root=paths.PROJECT_ROOT, build_root=paths.BUILD_DIR, jobs=args.jobs) | |||||
except subprocess.CalledProcessError as e: | |||||
if args.rapid: | |||||
return e.returncode | |||||
raise | |||||
return 0 | |||||
def ci_with_dds(dds: DDSWrapper, args: CommandArguments) -> int: | |||||
""" | |||||
Execute CI using the given prior 'dds' executable. | |||||
""" | |||||
if args.clean: | |||||
dds.clean(build_dir=paths.BUILD_DIR) | |||||
dds.catalog_json_import(paths.PROJECT_ROOT / 'old-catalog.json') | |||||
pool = futures.ThreadPoolExecutor() | |||||
test_fut = pool.submit(lambda: 0) | |||||
if args.do_test and not args.rapid: | |||||
test_dds = test_build(dds, args) | |||||
test_fut = pool.submit(lambda: run_pytest(test_dds, args)) | |||||
main_fut = pool.submit(lambda: main_build(dds, args)) | |||||
for fut in futures.as_completed({test_fut, main_fut}): | |||||
if fut.result(): | |||||
return fut.result() | |||||
return 0 | |||||
def main(argv: Sequence[str]) -> int: | |||||
args = parse_argv(argv) | |||||
with get_bootstrap_exe(args.bootstrap_with) as f: | |||||
return ci_with_dds(f, args) | |||||
def start(): | |||||
sys.exit(main(sys.argv[1:])) | |||||
if __name__ == "__main__": | |||||
start() |
import os | import os | ||||
import shutil | |||||
import itertools | |||||
import tempfile | |||||
from contextlib import contextmanager | |||||
from pathlib import Path | from pathlib import Path | ||||
from typing import Iterator, Optional | |||||
TOOLS_DIR = Path(__file__).absolute().parent.parent | |||||
PROJECT_ROOT = TOOLS_DIR.parent | |||||
# The root directory of the dds project | |||||
PROJECT_ROOT = Path(__file__).absolute().parent.parent.parent | |||||
#: The <repo>/tools directory | |||||
TOOLS_DIR = PROJECT_ROOT / 'tools' | |||||
#: The default build directory | |||||
BUILD_DIR = PROJECT_ROOT / '_build' | BUILD_DIR = PROJECT_ROOT / '_build' | ||||
#: The directory were w prebuild/bootstrapped results will go, and scratch space for the build | |||||
PREBUILT_DIR = PROJECT_ROOT / '_prebuilt' | PREBUILT_DIR = PROJECT_ROOT / '_prebuilt' | ||||
#: THe suffix of executable files on this system | |||||
EXE_SUFFIX = '.exe' if os.name == 'nt' else '' | EXE_SUFFIX = '.exe' if os.name == 'nt' else '' | ||||
#: The path to the prebuilt 'dds' executable | |||||
PREBUILT_DDS = (PREBUILT_DIR / 'dds').with_suffix(EXE_SUFFIX) | PREBUILT_DDS = (PREBUILT_DIR / 'dds').with_suffix(EXE_SUFFIX) | ||||
#: The path to the main built 'dds' executable | |||||
CUR_BUILT_DDS = (BUILD_DIR / 'dds').with_suffix(EXE_SUFFIX) | CUR_BUILT_DDS = (BUILD_DIR / 'dds').with_suffix(EXE_SUFFIX) | ||||
EMBEDDED_REPO_DIR = PROJECT_ROOT / 'external/repo' | |||||
SELF_TEST_REPO_DIR = BUILD_DIR / '_self-repo' | |||||
@contextmanager | |||||
def new_tempdir() -> Iterator[Path]: | |||||
""" | |||||
Create and yield a new temporary directory, which will be destroyed on | |||||
context-manager exit | |||||
""" | |||||
tdir = Path(tempfile.mkdtemp()) | |||||
try: | |||||
yield tdir | |||||
finally: | |||||
shutil.rmtree(tdir) | |||||
def find_exe(name: str) -> Optional[Path]: | |||||
""" | |||||
Find a file on the system by searching through the PATH environment variable. | |||||
""" | |||||
sep = ';' if os.name == 'nt' else ':' | |||||
paths = os.environ['PATH'].split(sep) | |||||
exts = os.environ['PATHEXT'].split(';') if os.name == 'nt' else [''] | |||||
for dirpath, ext in itertools.product(paths, exts): | |||||
cand = Path(dirpath) / (name + ext) | |||||
if cand.is_file(): | |||||
return cand | |||||
return None |
{ | |||||
"$schema": "../res/toolchain-schema.json", | |||||
"compiler_id": "gnu", | |||||
"c_compiler": "gcc-9", | |||||
"cxx_compiler": "g++-9", | |||||
"warning_flags": [ | |||||
"-Werror", | |||||
], | |||||
"flags": [ | |||||
"-I/usr/local/opt/openssl@1.1/include", | |||||
/// NOTE: Asan/UBsan misbehave on macOS, so we aren't ready to use them in CI | |||||
// "-fsanitize=address,undefined", | |||||
], | |||||
"cxx_flags": [ | |||||
"-fconcepts", | |||||
"-std=c++2a", | |||||
], | |||||
"link_flags": [ | |||||
// "-fsanitize=address,undefined", | |||||
"/usr/local/opt/openssl@1.1/lib/libssl.a", | |||||
"/usr/local/opt/openssl@1.1/lib/libcrypto.a", | |||||
], | |||||
"debug": true | |||||
} |
"-Werror", | "-Werror", | ||||
], | ], | ||||
"flags": [ | "flags": [ | ||||
"-fsanitize=address,undefined" | |||||
"-fsanitize=address,undefined", | |||||
], | ], | ||||
"cxx_flags": [ | "cxx_flags": [ | ||||
"-fconcepts", | "-fconcepts", | ||||
"-std=c++2a", | "-std=c++2a", | ||||
], | ], | ||||
"link_flags": [ | "link_flags": [ | ||||
"-fuse-ld=lld", | |||||
"-fsanitize=address,undefined", | "-fsanitize=address,undefined", | ||||
"-l:libssl.a", | "-l:libssl.a", | ||||
"-l:libcrypto.a", | "-l:libcrypto.a", | ||||
"-ldl", | "-ldl", | ||||
], | ], | ||||
"debug": true, | |||||
"compiler_launcher": "ccache" | |||||
"debug": true | |||||
} | } |
REPO_ROOT = Path(__file__).resolve().absolute().parent.parent | REPO_ROOT = Path(__file__).resolve().absolute().parent.parent | ||||
def dds_exe() -> Path: | |||||
def _get_dds_exe() -> Path: | |||||
suffix = '.exe' if os.name == 'nt' else '' | suffix = '.exe' if os.name == 'nt' else '' | ||||
dirs = [REPO_ROOT / '_build', REPO_ROOT / '_prebuilt'] | dirs = [REPO_ROOT / '_build', REPO_ROOT / '_prebuilt'] | ||||
for d in dirs: | for d in dirs: | ||||
@classmethod | @classmethod | ||||
def parse_data(cls: Type[T], data: Any) -> T: | def parse_data(cls: Type[T], data: Any) -> T: | ||||
return cls( | |||||
frm=data.pop('from'), | |||||
to=data.pop('to'), | |||||
include=data.pop('include', []), | |||||
strip_components=data.pop('strip-components', 0), | |||||
exclude=data.pop('exclude', [])) | |||||
return cls(frm=data.pop('from'), | |||||
to=data.pop('to'), | |||||
include=data.pop('include', []), | |||||
strip_components=data.pop('strip-components', 0), | |||||
exclude=data.pop('exclude', [])) | |||||
def apply_to(self, p: Path) -> None: | def apply_to(self, p: Path) -> None: | ||||
src = p / self.frm | src = p / self.frm | ||||
deps = data.pop('depends', []) | deps = data.pop('depends', []) | ||||
desc = data.pop('description', '[No description]') | desc = data.pop('description', '[No description]') | ||||
remote = ForeignPackage.parse_data(data.pop('remote')) | remote = ForeignPackage.parse_data(data.pop('remote')) | ||||
return SpecPackage( | |||||
name, | |||||
VersionInfo.parse(version), | |||||
description=desc, | |||||
depends=[Dependency.parse(d) for d in deps], | |||||
remote=remote) | |||||
return SpecPackage(name, | |||||
VersionInfo.parse(version), | |||||
description=desc, | |||||
depends=[Dependency.parse(d) for d in deps], | |||||
remote=remote) | |||||
def iter_spec(path: Path) -> Iterable[SpecPackage]: | def iter_spec(path: Path) -> Iterable[SpecPackage]: | ||||
@contextmanager | @contextmanager | ||||
def spec_as_local_tgz(spec: SpecPackage) -> Iterator[Path]: | |||||
def spec_as_local_tgz(dds_exe: Path, spec: SpecPackage) -> Iterator[Path]: | |||||
with spec.remote.make_local_dir(spec.name, spec.version) as clone_dir: | with spec.remote.make_local_dir(spec.name, spec.version) as clone_dir: | ||||
out_tgz = clone_dir / 'sdist.tgz' | out_tgz = clone_dir / 'sdist.tgz' | ||||
check_call([str(dds_exe()), 'sdist', 'create', f'--project-dir={clone_dir}', f'--out={out_tgz}']) | |||||
check_call([str(dds_exe), 'sdist', 'create', f'--project-dir={clone_dir}', f'--out={out_tgz}']) | |||||
yield out_tgz | yield out_tgz | ||||
class Repository: | class Repository: | ||||
def __init__(self, path: Path) -> None: | |||||
def __init__(self, dds_exe: Path, path: Path) -> None: | |||||
self._path = path | self._path = path | ||||
self._dds_exe = dds_exe | |||||
self._import_lock = Lock() | self._import_lock = Lock() | ||||
@property | @property | ||||
return self._path / 'pkg' | return self._path / 'pkg' | ||||
@classmethod | @classmethod | ||||
def create(cls, dirpath: Path, name: str) -> 'Repository': | |||||
check_call([str(dds_exe()), 'repoman', 'init', str(dirpath), f'--name={name}']) | |||||
return Repository(dirpath) | |||||
def create(cls, dds_exe: Path, dirpath: Path, name: str) -> 'Repository': | |||||
check_call([str(dds_exe), 'repoman', 'init', str(dirpath), f'--name={name}']) | |||||
return Repository(dds_exe, dirpath) | |||||
@classmethod | @classmethod | ||||
def open(cls, dirpath: Path) -> 'Repository': | |||||
return Repository(dirpath) | |||||
def open(cls, dds_exe: Path, dirpath: Path) -> 'Repository': | |||||
return Repository(dds_exe, dirpath) | |||||
def import_tgz(self, path: Path) -> None: | def import_tgz(self, path: Path) -> None: | ||||
check_call([str(dds_exe()), 'repoman', 'import', str(self._path), str(path)]) | |||||
check_call([str(self._dds_exe), 'repoman', 'import', str(self._path), str(path)]) | |||||
def remove(self, name: str) -> None: | def remove(self, name: str) -> None: | ||||
check_call([str(dds_exe()), 'repoman', 'remove', str(self._path), name]) | |||||
check_call([str(self._dds_exe), 'repoman', 'remove', str(self._path), name]) | |||||
def spec_import(self, spec: Path) -> None: | def spec_import(self, spec: Path) -> None: | ||||
all_specs = iter_spec(spec) | all_specs = iter_spec(spec) | ||||
def _get_and_import(self, spec: SpecPackage) -> None: | def _get_and_import(self, spec: SpecPackage) -> None: | ||||
print(f'Import: {spec.name}@{spec.version}') | print(f'Import: {spec.name}@{spec.version}') | ||||
with spec_as_local_tgz(spec) as tgz: | |||||
with spec_as_local_tgz(self._dds_exe, spec) as tgz: | |||||
with self._import_lock: | with self._import_lock: | ||||
self.import_tgz(tgz) | self.import_tgz(tgz) | ||||
class Arguments(Protocol): | class Arguments(Protocol): | ||||
dir: Path | dir: Path | ||||
spec: Path | spec: Path | ||||
dds_exe: Path | |||||
def main(argv: Sequence[str]) -> int: | def main(argv: Sequence[str]) -> int: | ||||
parser = argparse.ArgumentParser() | parser = argparse.ArgumentParser() | ||||
parser.add_argument('--dds-exe', type=Path, help='Path to the dds executable to use', default=_get_dds_exe()) | |||||
parser.add_argument('--dir', '-d', help='Path to a repository to manage', required=True, type=Path) | parser.add_argument('--dir', '-d', help='Path to a repository to manage', required=True, type=Path) | ||||
parser.add_argument( | |||||
'--spec', | |||||
metavar='<spec-path>', | |||||
type=Path, | |||||
required=True, | |||||
help='Provide a JSON document specifying how to obtain an import some packages') | |||||
parser.add_argument('--spec', | |||||
metavar='<spec-path>', | |||||
type=Path, | |||||
required=True, | |||||
help='Provide a JSON document specifying how to obtain an import some packages') | |||||
args: Arguments = parser.parse_args(argv) | args: Arguments = parser.parse_args(argv) | ||||
repo = Repository.open(args.dir) | |||||
repo = Repository.open(args.dds_exe, args.dir) | |||||
repo.spec_import(args.spec) | repo.spec_import(args.spec) | ||||
return 0 | return 0 |
{ | |||||
"$schema": "../res/toolchain-schema.json", | |||||
"compiler_id": "msvc", | |||||
"flags": [ | |||||
"/Zc:preprocessor", | |||||
"/Zc:__cplusplus", | |||||
"/std:c++latest", | |||||
"/DNOMINMAX", | |||||
// Workaround quirks in LEAF | |||||
"/DBOOST_LEAF_CONSTEXPR=", | |||||
"/DBOOST_LEAF_STD_UNCAUGHT_EXCEPTIONS=1", | |||||
// OpenSSL headers: | |||||
"/Iexternal/OpenSSL/include", | |||||
], | |||||
"link_flags": [ | |||||
"rpcrt4.lib", | |||||
// Networking: | |||||
"Ws2_32.lib", | |||||
// Deps for OpenSSL: | |||||
"AdvApi32.lib", | |||||
"Crypt32.lib", | |||||
"User32.lib", | |||||
// Link in our external OpenSSL: | |||||
"/link", | |||||
"/LibPath:external/OpenSSL/lib", | |||||
"libssl.lib", | |||||
"libcrypto.lib", | |||||
], | |||||
"debug": true | |||||
} |
"libssl.lib", | "libssl.lib", | ||||
"libcrypto.lib", | "libcrypto.lib", | ||||
], | ], | ||||
// "debug": true, | |||||
"optimize": true | "optimize": true | ||||
} | } |
#!/usr/bin/env python3 | |||||
import argparse | |||||
from pathlib import Path | |||||
from typing import List, NamedTuple, Iterable | |||||
import shutil | |||||
import subprocess | |||||
import sys | |||||
from dds_ci import cli, proc | |||||
ROOT = Path(__file__).parent.parent.absolute() | |||||
def dds_build(exe: Path, *, toolchain: str, more_flags: proc.CommandLine = ()): | |||||
new_exe = ROOT / '_dds.bootstrap-test.exe' | |||||
shutil.copy2(exe, new_exe) | |||||
try: | |||||
proc.check_run(new_exe, 'build', (f'--toolchain={toolchain}'), more_flags) | |||||
finally: | |||||
new_exe.unlink() | |||||
def self_build(exe: Path, | |||||
*, | |||||
toolchain: str, | |||||
lmi_path: Path = None, | |||||
cat_path: Path = Path('_build/catalog.db'), | |||||
cat_json_path: Path = Path('catalog.json'), | |||||
dds_flags: proc.CommandLine = ()): | |||||
# Copy the exe to another location, as windows refuses to let a binary be | |||||
# replaced while it is executing | |||||
proc.check_run( | |||||
exe, | |||||
'catalog', | |||||
'import', | |||||
f'--catalog={cat_path}', | |||||
f'--json={cat_json_path}', | |||||
) | |||||
dds_build( | |||||
exe, | |||||
toolchain=toolchain, | |||||
more_flags=[ | |||||
('-I', lmi_path) if lmi_path else (), | |||||
f'--repo-dir={ROOT}/_build/ci-repo', | |||||
f'--catalog={cat_path}', | |||||
*dds_flags, | |||||
], | |||||
) | |||||
def main(argv: List[str]) -> int: | |||||
parser = argparse.ArgumentParser() | |||||
cli.add_tc_arg(parser) | |||||
cli.add_dds_exe_arg(parser) | |||||
args = parser.parse_args(argv) | |||||
self_build(Path(args.exe), toolchain=args.toolchain, dds_flags=['--full']) | |||||
return 0 | |||||
if __name__ == "__main__": | |||||
sys.exit(main(sys.argv[1:])) |