echo "Site generated at _site/" | echo "Site generated at _site/" | ||||
py-check: | py-check: | ||||
poetry run mypy tools/dds_ci | |||||
poetry run pylint tools/dds_ci | |||||
poetry run mypy tools/dds_ci $(shell find tests/ -name *.py) | |||||
poetry run pylint tools/dds_ci $(shell find tests/ -name *.py) | |||||
format: | format: | ||||
poetry run dds-format | poetry run dds-format |
import sys | |||||
from pathlib import Path | |||||
sys.path.append(str(Path(__file__).absolute().parent.parent / 'tools')) | |||||
from .dds import DDS, DDSFixtureParams, scoped_dds, dds_fixture_conf, dds_fixture_conf_1 | |||||
from .http import http_repo, RepoFixture | |||||
__all__ = ( | |||||
'DDS', | |||||
'DDSFixtureParams', | |||||
'scoped_dds', | |||||
'dds_fixture_conf', | |||||
'dds_fixture_conf_1', | |||||
'http_repo', | |||||
'RepoFixture', | |||||
) |
from tests import DDS | |||||
from tests.fileutil import set_contents | |||||
from dds_ci import paths | |||||
def test_lib_with_just_app(dds: DDS) -> None: | |||||
dds.scope.enter_context(set_contents( | |||||
dds.source_root / 'src/foo.main.cpp', | |||||
b'int main() {}', | |||||
)) | |||||
dds.build() | |||||
assert (dds.build_dir / f'foo{paths.EXE_SUFFIX}').is_file() |
from typing import ContextManager | |||||
from pathlib import Path | |||||
from tests import DDS | |||||
from tests.fileutil import ensure_dir, set_contents | |||||
def test_build_empty(dds: DDS) -> None: | |||||
assert not dds.source_root.exists() | |||||
dds.scope.enter_context(ensure_dir(dds.source_root)) | |||||
dds.build() | |||||
def test_build_simple(dds: DDS) -> None: | |||||
dds.scope.enter_context(set_contents(dds.source_root / 'src/f.cpp', b'void foo() {}')) | |||||
dds.build() | |||||
def basic_pkg_dds(dds: DDS) -> ContextManager[Path]: | |||||
return set_contents( | |||||
dds.source_root / 'package.json5', b''' | |||||
{ | |||||
name: 'test-pkg', | |||||
version: '0.2.2', | |||||
namespace: 'test', | |||||
} | |||||
''') | |||||
def test_empty_with_pkg_dds(dds: DDS) -> None: | |||||
dds.scope.enter_context(basic_pkg_dds(dds)) | |||||
dds.build() | |||||
def test_empty_with_lib_dds(dds: DDS) -> None: | |||||
dds.scope.enter_context(basic_pkg_dds(dds)) | |||||
dds.build() | |||||
def test_empty_sdist_create(dds: DDS) -> None: | |||||
dds.scope.enter_context(basic_pkg_dds(dds)) | |||||
dds.sdist_create() | |||||
def test_empty_sdist_export(dds: DDS) -> None: | |||||
dds.scope.enter_context(basic_pkg_dds(dds)) | |||||
dds.sdist_export() |
from contextlib import ExitStack | |||||
from tests import DDS | |||||
def test_simple_lib(dds: DDS, scope: ExitStack) -> None: | |||||
scope.enter_context(dds.set_contents( | |||||
'src/foo.cpp', | |||||
b'int the_answer() { return 42; }', | |||||
)) | |||||
scope.enter_context(dds.set_contents( | |||||
'library.json5', | |||||
b'''{ | |||||
name: 'TestLibrary', | |||||
}''', | |||||
)) | |||||
scope.enter_context( | |||||
dds.set_contents( | |||||
'package.json5', | |||||
b'''{ | |||||
name: 'TestProject', | |||||
version: '0.0.0', | |||||
namespace: 'test', | |||||
}''', | |||||
)) | |||||
dds.build(tests=True, apps=False, warnings=False) | |||||
assert (dds.build_dir / 'compile_commands.json').is_file() | |||||
assert list(dds.build_dir.glob('libTestLibrary*')) != [] |
from contextlib import ExitStack | |||||
from tests import DDS | |||||
from tests.fileutil import set_contents | |||||
from dds_ci import paths | |||||
def test_lib_with_just_test(dds: DDS, scope: ExitStack) -> None: | |||||
scope.enter_context(set_contents( | |||||
dds.source_root / 'src/foo.test.cpp', | |||||
b'int main() {}', | |||||
)) | |||||
dds.build(tests=True, apps=False, warnings=False) | |||||
assert (dds.build_dir / f'test/foo{paths.EXE_SUFFIX}').is_file() |
from tests import DDS | |||||
from tests.fileutil import ensure_dir | |||||
def test_create_catalog(dds: DDS) -> None: | |||||
dds.scope.enter_context(ensure_dir(dds.build_dir)) | |||||
dds.catalog_create() | |||||
assert dds.catalog_path.is_file() |
from tests.fileutil import ensure_dir | |||||
from tests import DDS | |||||
from tests.http import RepoFixture | |||||
def test_get(dds: DDS, http_repo: RepoFixture) -> None: | |||||
http_repo.import_json_data({ | |||||
'version': 2, | |||||
'packages': { | |||||
'neo-sqlite3': { | |||||
'0.3.0': { | |||||
'remote': { | |||||
'git': { | |||||
'url': 'https://github.com/vector-of-bool/neo-sqlite3.git', | |||||
'ref': '0.3.0', | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} | |||||
}) | |||||
dds.scope.enter_context(ensure_dir(dds.source_root)) | |||||
dds.repo_add(http_repo.url) | |||||
dds.catalog_get('neo-sqlite3@0.3.0') | |||||
assert (dds.scratch_dir / 'neo-sqlite3@0.3.0').is_dir() | |||||
assert (dds.scratch_dir / 'neo-sqlite3@0.3.0/package.jsonc').is_file() | |||||
def test_get_http(dds: DDS, http_repo: RepoFixture) -> None: | |||||
http_repo.import_json_data({ | |||||
'packages': { | |||||
'cmcstl2': { | |||||
'2020.2.24': { | |||||
'remote': { | |||||
'http': { | |||||
'url': | |||||
'https://github.com/CaseyCarter/cmcstl2/archive/684a96d527e4dc733897255c0177b784dc280980.tar.gz?dds_lm=cmc/stl2;', | |||||
}, | |||||
'auto-lib': 'cmc/stl2', | |||||
} | |||||
}, | |||||
}, | |||||
}, | |||||
}) | |||||
dds.scope.enter_context(ensure_dir(dds.source_root)) | |||||
dds.repo_add(http_repo.url) | |||||
dds.catalog_get('cmcstl2@2020.2.24') | |||||
assert dds.scratch_dir.joinpath('cmcstl2@2020.2.24/include').is_dir() |
from time import sleep | from time import sleep | ||||
from tests import DDS, dds_fixture_conf_1 | |||||
from dds_ci.testing import ProjectOpener | |||||
@dds_fixture_conf_1('copy_only') | |||||
def test_config_template(dds: DDS) -> None: | |||||
generated_fpath = dds.build_dir / '__dds/gen/info.hpp' | |||||
def test_config_template(project_opener: ProjectOpener) -> None: | |||||
proj = project_opener.open('copy_only') | |||||
generated_fpath = proj.build_root / '__dds/gen/info.hpp' | |||||
assert not generated_fpath.is_file() | assert not generated_fpath.is_file() | ||||
dds.build() | |||||
proj.build() | |||||
assert generated_fpath.is_file() | assert generated_fpath.is_file() | ||||
# Check that re-running the build will not update the generated file (the | # Check that re-running the build will not update the generated file (the | ||||
# cache and force a false-rebuild.) | # cache and force a false-rebuild.) | ||||
start_time = generated_fpath.stat().st_mtime | start_time = generated_fpath.stat().st_mtime | ||||
sleep(0.1) # Wait just long enough to register a new stamp time | sleep(0.1) # Wait just long enough to register a new stamp time | ||||
dds.build() | |||||
proj.build() | |||||
new_time = generated_fpath.stat().st_mtime | new_time = generated_fpath.stat().st_mtime | ||||
assert new_time == start_time | assert new_time == start_time | ||||
@dds_fixture_conf_1('simple') | |||||
def test_simple_substitution(dds: DDS) -> None: | |||||
dds.build() | |||||
def test_simple_substitution(project_opener: ProjectOpener) -> None: | |||||
simple = project_opener.open('simple') | |||||
simple.build() |
from contextlib import ExitStack | |||||
from typing import Any, Callable, Iterator | |||||
from typing_extensions import Protocol | |||||
from typing import Any | |||||
from pathlib import Path | from pathlib import Path | ||||
import shutil | |||||
from subprocess import check_call | |||||
import pytest | import pytest | ||||
from _pytest.config import Config as PyTestConfig | |||||
from dds_ci import paths | |||||
from tests import scoped_dds, DDSFixtureParams, DDS | |||||
# Exposes the HTTP fixtures: | |||||
from .http import http_repo, http_tmp_dir_server # pylint: disable=unused-import | |||||
class TempPathFactory(Protocol): | |||||
def mktemp(self, basename: str, numbered: bool = True) -> Path: | |||||
... | |||||
class PyTestConfig(Protocol): | |||||
def getoption(self, name: str) -> Any: | |||||
... | |||||
class TestRequest(Protocol): | |||||
fixturename: str | |||||
scope: str | |||||
config: PyTestConfig | |||||
fspath: str | |||||
function: Callable[..., Any] | |||||
param: DDSFixtureParams | |||||
@pytest.fixture(scope='session') | |||||
def dds_exe(pytestconfig: PyTestConfig) -> Path: | |||||
opt = pytestconfig.getoption('--dds-exe') or paths.CUR_BUILT_DDS | |||||
return Path(opt) | |||||
@pytest.yield_fixture(scope='session') # type: ignore | |||||
def dds_pizza_catalog(dds_exe: Path, tmp_path_factory: TempPathFactory) -> Path: | |||||
tmpdir: Path = tmp_path_factory.mktemp(basename='dds-catalog') | |||||
cat_path = tmpdir / 'catalog.db' | |||||
check_call([str(dds_exe), 'repo', 'add', 'https://dds.pizza/repo', '--update', f'--catalog={cat_path}']) | |||||
yield cat_path | |||||
@pytest.yield_fixture # type: ignore | |||||
def dds(request: TestRequest, dds_exe: Path, tmp_path: Path, worker_id: str, scope: ExitStack) -> Iterator[DDS]: | |||||
test_source_dir = Path(request.fspath).absolute().parent | |||||
test_root = test_source_dir | |||||
# If we are running in parallel, use a unique directory as scratch | |||||
# space so that we aren't stomping on anyone else | |||||
if worker_id != 'master': | |||||
test_root = tmp_path / request.function.__name__ | |||||
shutil.copytree(test_source_dir, test_root) | |||||
project_dir = test_root / 'project' | |||||
# Check if we have a special configuration | |||||
if hasattr(request, 'param'): | |||||
assert isinstance(request.param, DDSFixtureParams), \ | |||||
('Using the `dds` fixture requires passing in indirect ' | |||||
'params. Use @dds_fixture_conf to configure the fixture') | |||||
params: DDSFixtureParams = request.param | |||||
project_dir = test_root / params.subdir | |||||
# Create the instance. Auto-clean when we're done | |||||
yield scope.enter_context(scoped_dds(dds_exe, test_root, project_dir)) | |||||
@pytest.yield_fixture # type: ignore | |||||
def scope() -> Iterator[ExitStack]: | |||||
with ExitStack() as scope: | |||||
yield scope | |||||
# Ensure the fixtures are registered with PyTest: | |||||
from dds_ci.testing.fixtures import * # pylint: disable=wildcard-import,unused-wildcard-import | |||||
from dds_ci.testing.http import * # pylint: disable=wildcard-import,unused-wildcard-import | |||||
def pytest_addoption(parser: Any) -> None: | def pytest_addoption(parser: Any) -> None: |
import os | |||||
import itertools | |||||
from contextlib import contextmanager, ExitStack | |||||
from pathlib import Path | |||||
from typing import Union, NamedTuple, ContextManager, Optional, Iterator, TypeVar | |||||
import shutil | |||||
import pytest | |||||
import _pytest | |||||
from dds_ci import proc, toolchain as tc_mod | |||||
from . import fileutil | |||||
T = TypeVar('T') | |||||
class DDS: | |||||
def __init__(self, dds_exe: Path, test_dir: Path, project_dir: Path, scope: ExitStack) -> None: | |||||
self.dds_exe = dds_exe | |||||
self.test_dir = test_dir | |||||
self.source_root = project_dir | |||||
self.scratch_dir = project_dir / '_test_scratch/Ю́рий Алексе́евич Гага́рин' | |||||
self.scope = scope | |||||
self.scope.callback(self.cleanup) | |||||
@property | |||||
def repo_dir(self) -> Path: | |||||
return self.scratch_dir / 'repo' | |||||
@property | |||||
def catalog_path(self) -> Path: | |||||
return self.scratch_dir / 'catalog.db' | |||||
@property | |||||
def deps_build_dir(self) -> Path: | |||||
return self.scratch_dir / 'deps-build' | |||||
@property | |||||
def build_dir(self) -> Path: | |||||
return self.scratch_dir / 'build' | |||||
@property | |||||
def lmi_path(self) -> Path: | |||||
return self.scratch_dir / 'INDEX.lmi' | |||||
def cleanup(self) -> None: | |||||
if self.scratch_dir.exists(): | |||||
shutil.rmtree(self.scratch_dir) | |||||
def run_unchecked(self, cmd: proc.CommandLine, *, cwd: Optional[Path] = None) -> proc.ProcessResult: | |||||
full_cmd = itertools.chain([self.dds_exe, '-ltrace'], cmd) | |||||
return proc.run(full_cmd, cwd=cwd or self.source_root) # type: ignore | |||||
def run(self, cmd: proc.CommandLine, *, cwd: Optional[Path] = None, check: bool = True) -> proc.ProcessResult: | |||||
full_cmd = itertools.chain([self.dds_exe, '-ltrace'], cmd) | |||||
return proc.run(full_cmd, cwd=cwd, check=check) # type: ignore | |||||
@property | |||||
def repo_dir_arg(self) -> str: | |||||
return f'--repo-dir={self.repo_dir}' | |||||
@property | |||||
def project_dir_arg(self) -> str: | |||||
return f'--project-dir={self.source_root}' | |||||
@property | |||||
def catalog_path_arg(self) -> str: | |||||
return f'--catalog={self.catalog_path}' | |||||
def build_deps(self, args: proc.CommandLine, *, toolchain: Optional[str] = None) -> proc.ProcessResult: | |||||
return self.run([ | |||||
'build-deps', | |||||
f'--toolchain={toolchain or tc_mod.get_default_test_toolchain()}', | |||||
self.catalog_path_arg, | |||||
self.repo_dir_arg, | |||||
f'--out={self.deps_build_dir}', | |||||
f'--lmi-path={self.lmi_path}', | |||||
args, | |||||
]) | |||||
def repo_add(self, url: str) -> None: | |||||
self.run(['repo', 'add', url, '--update', self.catalog_path_arg]) | |||||
def build(self, | |||||
*, | |||||
toolchain: Optional[str] = None, | |||||
apps: bool = True, | |||||
warnings: bool = True, | |||||
catalog_path: Optional[Path] = None, | |||||
tests: bool = True, | |||||
more_args: proc.CommandLine = (), | |||||
check: bool = True) -> proc.ProcessResult: | |||||
catalog_path = catalog_path or self.catalog_path | |||||
return self.run( | |||||
[ | |||||
'build', | |||||
f'--out={self.build_dir}', | |||||
f'--toolchain={toolchain or tc_mod.get_default_test_toolchain()}', | |||||
f'--catalog={catalog_path}', | |||||
f'--repo-dir={self.repo_dir}', | |||||
['--no-tests'] if not tests else [], | |||||
['--no-apps'] if not apps else [], | |||||
['--no-warnings'] if not warnings else [], | |||||
self.project_dir_arg, | |||||
more_args, | |||||
], | |||||
check=check, | |||||
) | |||||
def sdist_create(self) -> proc.ProcessResult: | |||||
self.build_dir.mkdir(exist_ok=True, parents=True) | |||||
return self.run(['sdist', 'create', self.project_dir_arg], cwd=self.build_dir) | |||||
def sdist_export(self) -> proc.ProcessResult: | |||||
return self.run([ | |||||
'sdist', | |||||
'export', | |||||
self.project_dir_arg, | |||||
self.repo_dir_arg, | |||||
]) | |||||
def repo_import(self, sdist: Path) -> proc.ProcessResult: | |||||
return self.run(['repo', self.repo_dir_arg, 'import', sdist]) | |||||
def catalog_create(self) -> proc.ProcessResult: | |||||
self.scratch_dir.mkdir(parents=True, exist_ok=True) | |||||
return self.run(['catalog', 'create', f'--catalog={self.catalog_path}'], cwd=self.test_dir) | |||||
def catalog_get(self, req: str) -> proc.ProcessResult: | |||||
return self.run([ | |||||
'catalog', | |||||
'get', | |||||
f'--catalog={self.catalog_path}', | |||||
f'--out-dir={self.scratch_dir}', | |||||
req, | |||||
]) | |||||
def set_contents(self, path: Union[str, Path], content: bytes) -> ContextManager[Path]: | |||||
return fileutil.set_contents(self.source_root / path, content) | |||||
@contextmanager | |||||
def scoped_dds(dds_exe: Path, test_dir: Path, project_dir: Path) -> Iterator[DDS]: | |||||
if os.name == 'nt': | |||||
dds_exe = dds_exe.with_suffix('.exe') | |||||
with ExitStack() as scope: | |||||
yield DDS(dds_exe, test_dir, project_dir, scope) | |||||
class DDSFixtureParams(NamedTuple): | |||||
ident: str | |||||
subdir: Union[Path, str] | |||||
def dds_fixture_conf(*argsets: DDSFixtureParams) -> _pytest.mark.MarkDecorator: | |||||
args = list(argsets) | |||||
return pytest.mark.parametrize('dds', args, indirect=True, ids=[p.ident for p in args]) | |||||
def dds_fixture_conf_1(subdir: Union[Path, str]) -> _pytest.mark.MarkDecorator: | |||||
params = DDSFixtureParams(ident='only', subdir=subdir) | |||||
return pytest.mark.parametrize('dds', [params], indirect=True, ids=['.']) |
{ | |||||
"version": 2, | |||||
"packages": { | |||||
"neo-fun": { | |||||
"0.3.0": { | |||||
"remote": { | |||||
"git": { | |||||
"url": "https://github.com/vector-of-bool/neo-fun.git", | |||||
"ref": "0.3.0" | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} |
{ | |||||
depends: [ | |||||
'neo-fun+0.3.0' | |||||
], | |||||
} |
from tests import DDS | |||||
from tests.http import RepoFixture | |||||
def test_build_deps_from_file(dds: DDS, http_repo: RepoFixture) -> None: | |||||
assert not dds.deps_build_dir.is_dir() | |||||
http_repo.import_json_file(dds.source_root / 'catalog.json') | |||||
dds.repo_add(http_repo.url) | |||||
dds.build_deps(['-d', dds.source_root / 'deps.json5']) | |||||
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir() | |||||
assert (dds.scratch_dir / 'INDEX.lmi').is_file() | |||||
assert (dds.deps_build_dir / '_libman/neo-fun.lmp').is_file() | |||||
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file() | |||||
def test_build_deps_from_cmd(dds: DDS, http_repo: RepoFixture) -> None: | |||||
assert not dds.deps_build_dir.is_dir() | |||||
http_repo.import_json_file(dds.source_root / 'catalog.json') | |||||
dds.repo_add(http_repo.url) | |||||
dds.build_deps(['neo-fun=0.3.0']) | |||||
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir() | |||||
assert (dds.scratch_dir / 'INDEX.lmi').is_file() | |||||
assert (dds.deps_build_dir / '_libman/neo-fun.lmp').is_file() | |||||
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file() | |||||
def test_multiple_deps(dds: DDS, http_repo: RepoFixture) -> None: | |||||
assert not dds.deps_build_dir.is_dir() | |||||
http_repo.import_json_file(dds.source_root / 'catalog.json') | |||||
dds.repo_add(http_repo.url) | |||||
dds.build_deps(['neo-fun^0.2.0', 'neo-fun~0.3.0']) | |||||
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir() | |||||
assert (dds.scratch_dir / 'INDEX.lmi').is_file() | |||||
assert (dds.deps_build_dir / '_libman/neo-fun.lmp').is_file() | |||||
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file() |
import subprocess | |||||
from dds_ci import paths | |||||
from tests import DDS, DDSFixtureParams, dds_fixture_conf, dds_fixture_conf_1 | |||||
from tests.http import RepoFixture | |||||
dds_conf = dds_fixture_conf( | |||||
DDSFixtureParams(ident='git-remote', subdir='git-remote'), | |||||
DDSFixtureParams(ident='no-deps', subdir='no-deps'), | |||||
) | |||||
@dds_conf | |||||
def test_deps_build(dds: DDS, http_repo: RepoFixture) -> None: | |||||
http_repo.import_json_file(dds.source_root / 'catalog.json') | |||||
dds.repo_add(http_repo.url) | |||||
assert not dds.repo_dir.exists() | |||||
dds.build() | |||||
assert dds.repo_dir.exists(), '`Building` did not generate a repo directory' | |||||
@dds_fixture_conf_1('use-remote') | |||||
def test_use_nlohmann_json_remote(dds: DDS, http_repo: RepoFixture) -> None: | |||||
http_repo.import_json_file(dds.source_root / 'catalog.json') | |||||
dds.repo_add(http_repo.url) | |||||
dds.build(apps=True) | |||||
app_exe = dds.build_dir / f'app{paths.EXE_SUFFIX}' | |||||
assert app_exe.is_file() | |||||
subprocess.check_call([str(app_exe)]) |
{ | |||||
"version": 2, | |||||
"packages": { | |||||
"neo-fun": { | |||||
"0.3.2": { | |||||
"remote": { | |||||
"git": { | |||||
"url": "https://github.com/vector-of-bool/neo-fun.git", | |||||
"ref": "0.3.2" | |||||
} | |||||
} | |||||
} | |||||
}, | |||||
"range-v3": { | |||||
"0.9.1": { | |||||
"remote": { | |||||
"auto-lib": "Niebler/range-v3", | |||||
"git": { | |||||
"url": "https://github.com/ericniebler/range-v3.git", | |||||
"ref": "0.9.1" | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} |
{ | |||||
name: 'deps-test', | |||||
"namespace": "test", | |||||
version: '0.0.0', | |||||
depends: [ | |||||
'neo-fun@0.3.2', | |||||
'range-v3@0.9.1', | |||||
] | |||||
} |
{ | |||||
"version": 2, | |||||
"packages": {} | |||||
} |
{ | |||||
name: 'deps-test', | |||||
version: '0.0.0', | |||||
"namespace": "test", | |||||
} |
{ | |||||
"version": 2, | |||||
"packages": { | |||||
"cryptopp": { | |||||
"8.2.0": { | |||||
"remote": { | |||||
"git": { | |||||
"url": "https://github.com/weidai11/cryptopp.git", | |||||
"ref": "CRYPTOPP_8_2_0" | |||||
}, | |||||
"auto-lib": "cryptopp/cryptopp", | |||||
"transform": [ | |||||
{ | |||||
"move": { | |||||
"from": ".", | |||||
"to": "src/cryptopp", | |||||
"include": [ | |||||
"*.c", | |||||
"*.cpp", | |||||
"*.h" | |||||
] | |||||
} | |||||
} | |||||
] | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} |
{ | |||||
name: 'use-cryptopp', | |||||
uses: ['cryptopp/cryptopp'] | |||||
} |
{ | |||||
name: 'use-cryptopp', | |||||
version: '1.0.0', | |||||
namespace: 'test', | |||||
depends: [ | |||||
'cryptopp@8.2.0' | |||||
] | |||||
} |
#include <cryptopp/osrng.h> | |||||
#include <string> | |||||
int main() { | |||||
std::string arr; | |||||
arr.resize(256); | |||||
CryptoPP::OS_GenerateRandomBlock(false, | |||||
reinterpret_cast<CryptoPP::byte*>(arr.data()), | |||||
arr.size()); | |||||
for (auto b : arr) { | |||||
if (b != '\x00') { | |||||
return 0; | |||||
} | |||||
} | |||||
return 1; | |||||
} |
from tests import DDS | |||||
from tests.http import RepoFixture | |||||
import platform | |||||
import pytest | |||||
from dds_ci import proc, toolchain, paths | |||||
@pytest.mark.skipif(platform.system() == 'FreeBSD', reason='This one has trouble running on FreeBSD') | |||||
def test_get_build_use_cryptopp(dds: DDS, http_repo: RepoFixture) -> None: | |||||
http_repo.import_json_file(dds.source_root / 'catalog.json') | |||||
dds.repo_add(http_repo.url) | |||||
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc' | |||||
tc = str(dds.test_dir / tc_fname) | |||||
dds.build(toolchain=tc) | |||||
proc.check_run([(dds.build_dir / 'use-cryptopp').with_suffix(paths.EXE_SUFFIX)]) |
{ | |||||
"version": 2, | |||||
"packages": { | |||||
"nlohmann-json": { | |||||
"3.7.1": { | |||||
"remote": { | |||||
"git": { | |||||
"url": "https://github.com/vector-of-bool/json.git", | |||||
"ref": "dds/3.7.1" | |||||
} | |||||
}, | |||||
"depends": [] | |||||
} | |||||
} | |||||
} | |||||
} |
{ | |||||
name: "dummy", | |||||
uses: [ | |||||
'nlohmann/json', | |||||
] | |||||
} |
{ | |||||
"name": "json-test", | |||||
"version": "0.0.0", | |||||
"namespace": "test", | |||||
"depends": [ | |||||
"nlohmann-json@3.7.1" | |||||
] | |||||
} |
#include <nlohmann/json.hpp> | |||||
int main() { | |||||
nlohmann::json j = { | |||||
{"foo", "bar"}, | |||||
}; | |||||
return j.size() == 1 ? 0 : 12; | |||||
} |
from tests import DDS | |||||
from tests.http import RepoFixture | |||||
from dds_ci import proc, paths, toolchain | |||||
def test_get_build_use_spdlog(dds: DDS, http_repo: RepoFixture) -> None: | |||||
http_repo.import_json_file(dds.source_root / 'catalog.json') | |||||
dds.repo_add(http_repo.url) | |||||
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc' | |||||
tc = str(dds.test_dir / tc_fname) | |||||
dds.build(toolchain=tc, apps=True) | |||||
proc.check_run([(dds.build_dir / 'use-spdlog').with_suffix(paths.EXE_SUFFIX)]) |
from tests import DDS | |||||
from tests.fileutil import ensure_dir | |||||
def test_empty_dir(dds: DDS) -> None: | |||||
with ensure_dir(dds.source_root): | |||||
dds.build() |
from contextlib import contextmanager | |||||
from pathlib import Path | |||||
from typing import Iterator, Optional | |||||
import shutil | |||||
@contextmanager | |||||
def ensure_dir(dirpath: Path) -> Iterator[Path]: | |||||
""" | |||||
Ensure that the given directory (and any parents) exist. When the context | |||||
exists, removes any directories that were created. | |||||
""" | |||||
dirpath = dirpath.absolute() | |||||
if dirpath.exists(): | |||||
assert dirpath.is_dir(), f'Directory {dirpath} is a non-directory file' | |||||
yield dirpath | |||||
return | |||||
# Create the directory and clean it up when we are done | |||||
with ensure_dir(dirpath.parent): | |||||
dirpath.mkdir() | |||||
try: | |||||
yield dirpath | |||||
finally: | |||||
shutil.rmtree(dirpath) | |||||
@contextmanager | |||||
def auto_delete(fpath: Path) -> Iterator[Path]: | |||||
try: | |||||
yield fpath | |||||
finally: | |||||
if fpath.exists(): | |||||
fpath.unlink() | |||||
@contextmanager | |||||
def set_contents(fpath: Path, content: bytes) -> Iterator[Path]: | |||||
prev_content: Optional[bytes] = None | |||||
if fpath.exists(): | |||||
assert fpath.is_file(), 'File {fpath} exists and is not a regular file' | |||||
prev_content = fpath.read_bytes() | |||||
with ensure_dir(fpath.parent): | |||||
fpath.write_bytes(content) | |||||
try: | |||||
yield fpath | |||||
finally: | |||||
if prev_content is None: | |||||
fpath.unlink() | |||||
else: | |||||
fpath.write_bytes(prev_content) |
{ | |||||
"compiler_id": "gnu", | |||||
"c_compiler": "gcc-9", | |||||
"cxx_compiler": "g++-9", | |||||
"cxx_version": "c++17", | |||||
"cxx_flags": [ | |||||
"-fconcepts" | |||||
] | |||||
} |
{ | |||||
"$schema": "../res/toolchain-schema.json", | |||||
"compiler_id": "msvc", | |||||
"cxx_flags": "/std:c++latest" | |||||
} |
from tests.dds import DDS, dds_fixture_conf_1 | |||||
@dds_fixture_conf_1('create') | |||||
def test_create_sdist(dds: DDS) -> None: | |||||
dds.sdist_create() | |||||
sd_dir = dds.build_dir / 'foo@1.2.3.tar.gz' | |||||
assert sd_dir.is_file() | |||||
@dds_fixture_conf_1('create') | |||||
def test_export_sdist(dds: DDS) -> None: | |||||
dds.sdist_export() | |||||
assert (dds.repo_dir / 'foo@1.2.3').is_dir() | |||||
@dds_fixture_conf_1('create') | |||||
def test_import_sdist_archive(dds: DDS) -> None: | |||||
repo_content_path = dds.repo_dir / 'foo@1.2.3' | |||||
assert not repo_content_path.is_dir() | |||||
dds.sdist_create() | |||||
assert not repo_content_path.is_dir() | |||||
dds.repo_import(dds.build_dir / 'foo@1.2.3.tar.gz') | |||||
assert repo_content_path.is_dir() | |||||
assert repo_content_path.joinpath('library.jsonc').is_file() | |||||
# Excluded file will not be in the sdist: | |||||
assert not repo_content_path.joinpath('other-file.txt').is_file() |
import pytest | |||||
from subprocess import CalledProcessError | |||||
from dds_ci import paths | |||||
from dds_ci.testing import Project, PackageJSON | |||||
def test_build_empty(tmp_project: Project) -> None: | |||||
"""Check that dds is okay with building an empty project directory""" | |||||
tmp_project.build() | |||||
def test_lib_with_app_only(tmp_project: Project) -> None: | |||||
"""Test that dds can build a simple application""" | |||||
tmp_project.write('src/foo.main.cpp', r'int main() {}') | |||||
tmp_project.build() | |||||
assert (tmp_project.build_root / f'foo{paths.EXE_SUFFIX}').is_file() | |||||
def test_build_simple(tmp_project: Project) -> None: | |||||
""" | |||||
Test that dds can build a simple library, and handles rebuilds correctly. | |||||
""" | |||||
# Build a bad project | |||||
tmp_project.write('src/f.cpp', 'syntax error') | |||||
with pytest.raises(CalledProcessError): | |||||
tmp_project.build() | |||||
# Now we can build: | |||||
tmp_project.write('src/f.cpp', r'void f() {}') | |||||
tmp_project.build() | |||||
# Writing again will build again: | |||||
tmp_project.write('src/f.cpp', r'bad again') | |||||
with pytest.raises(CalledProcessError): | |||||
tmp_project.build() | |||||
def test_simple_lib(tmp_project: Project) -> None: | |||||
""" | |||||
Test that dds can build a simple library withsome actual content, and that | |||||
the manifest files will affect the output name. | |||||
""" | |||||
tmp_project.write('src/foo.cpp', 'int the_answer() { return 42; }') | |||||
tmp_project.package_json = { | |||||
'name': 'TestProject', | |||||
'version': '0.0.0', | |||||
'namespace': 'test', | |||||
} | |||||
tmp_project.library_json = {'name': 'TestLibrary'} | |||||
tmp_project.build() | |||||
assert (tmp_project.build_root / 'compile_commands.json').is_file() | |||||
assert list(tmp_project.build_root.glob('libTestLibrary.*')) != [] | |||||
def test_lib_with_just_test(tmp_project: Project) -> None: | |||||
tmp_project.write('src/foo.test.cpp', 'int main() {}') | |||||
tmp_project.build() | |||||
assert tmp_project.build_root.joinpath(f'test/foo{paths.EXE_SUFFIX}').is_file() | |||||
TEST_PACKAGE: PackageJSON = { | |||||
'name': 'test-pkg', | |||||
'version': '0.2.2', | |||||
'namespace': 'test', | |||||
} | |||||
def test_empty_with_pkg_dds(tmp_project: Project) -> None: | |||||
tmp_project.package_json = TEST_PACKAGE | |||||
tmp_project.build() | |||||
def test_empty_with_lib_dds(tmp_project: Project) -> None: | |||||
tmp_project.package_json = TEST_PACKAGE | |||||
tmp_project.build() | |||||
def test_empty_sdist_create(tmp_project: Project) -> None: | |||||
tmp_project.package_json = TEST_PACKAGE | |||||
tmp_project.sdist_create() | |||||
def test_empty_sdist_export(tmp_project: Project) -> None: | |||||
tmp_project.package_json = TEST_PACKAGE | |||||
tmp_project.sdist_export() |
import json | |||||
import pytest | |||||
from dds_ci.testing import RepoFixture, Project | |||||
SIMPLE_CATALOG = { | |||||
"packages": { | |||||
"neo-fun": { | |||||
"0.3.0": { | |||||
"remote": { | |||||
"git": { | |||||
"url": "https://github.com/vector-of-bool/neo-fun.git", | |||||
"ref": "0.3.0" | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} | |||||
@pytest.fixture() | |||||
def test_repo(http_repo: RepoFixture) -> RepoFixture: | |||||
http_repo.import_json_data(SIMPLE_CATALOG) | |||||
return http_repo | |||||
@pytest.fixture() | |||||
def test_project(tmp_project: Project, test_repo: RepoFixture) -> Project: | |||||
tmp_project.dds.repo_add(test_repo.url) | |||||
return tmp_project | |||||
def test_from_file(test_project: Project) -> None: | |||||
"""build-deps using a file listing deps""" | |||||
test_project.write('deps.json5', json.dumps({'depends': ['neo-fun+0.3.0']})) | |||||
test_project.dds.build_deps(['-d', 'deps.json5']) | |||||
assert test_project.root.joinpath('_deps/neo-fun@0.3.0').is_dir() | |||||
assert test_project.root.joinpath('_deps/_libman/neo-fun.lmp').is_file() | |||||
assert test_project.root.joinpath('_deps/_libman/neo/fun.lml').is_file() | |||||
assert test_project.root.joinpath('INDEX.lmi').is_file() | |||||
def test_from_cmd(test_project: Project) -> None: | |||||
"""build-deps using a command-line listing""" | |||||
test_project.dds.build_deps(['neo-fun=0.3.0']) | |||||
assert test_project.root.joinpath('_deps/neo-fun@0.3.0').is_dir() | |||||
assert test_project.root.joinpath('_deps/_libman/neo-fun.lmp').is_file() | |||||
assert test_project.root.joinpath('_deps/_libman/neo/fun.lml').is_file() | |||||
assert test_project.root.joinpath('INDEX.lmi').is_file() | |||||
def test_multiple_deps(test_project: Project) -> None: | |||||
"""build-deps with multiple deps resolves to a single version""" | |||||
test_project.dds.build_deps(['neo-fun^0.2.0', 'neo-fun~0.3.0']) | |||||
assert test_project.root.joinpath('_deps/neo-fun@0.3.0').is_dir() | |||||
assert test_project.root.joinpath('_deps/_libman/neo-fun.lmp').is_file() | |||||
assert test_project.root.joinpath('_deps/_libman/neo/fun.lml').is_file() | |||||
assert test_project.root.joinpath('INDEX.lmi').is_file() |
from pathlib import Path | |||||
from dds_ci.testing import Project, RepoFixture | |||||
from dds_ci.dds import DDSWrapper | |||||
def test_catalog_create(dds_2: DDSWrapper, tmp_path: Path) -> None: | |||||
cat_db = tmp_path / 'catalog.db' | |||||
assert not cat_db.is_file() | |||||
dds_2.run(['catalog', 'create', '--catalog', cat_db]) | |||||
assert cat_db.is_file() | |||||
def test_catalog_get_git(http_repo: RepoFixture, tmp_project: Project) -> None: | |||||
http_repo.import_json_data({ | |||||
'packages': { | |||||
'neo-sqlite3': { | |||||
'0.3.0': { | |||||
'remote': { | |||||
'git': { | |||||
'url': 'https://github.com/vector-of-bool/neo-sqlite3.git', | |||||
'ref': '0.3.0', | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} | |||||
}) | |||||
tmp_project.dds.repo_add(http_repo.url) | |||||
tmp_project.dds.catalog_get('neo-sqlite3@0.3.0') | |||||
assert tmp_project.root.joinpath('neo-sqlite3@0.3.0').is_dir() | |||||
assert tmp_project.root.joinpath('neo-sqlite3@0.3.0/package.jsonc').is_file() |
import pytest | import pytest | ||||
from tests import DDS | |||||
from dds_ci.testing import ProjectOpener, Project | |||||
from dds_ci import proc, paths | from dds_ci import proc, paths | ||||
## ############################################################################# | ## ############################################################################# | ||||
## detect file changes is a catastrophic bug! | ## detect file changes is a catastrophic bug! | ||||
def build_and_get_rc(dds: DDS) -> int: | |||||
dds.build() | |||||
app = dds.build_dir / ('app' + paths.EXE_SUFFIX) | |||||
@pytest.fixture() | |||||
def test_project(project_opener: ProjectOpener) -> Project: | |||||
return project_opener.open('projects/compile_deps') | |||||
def build_and_get_rc(proj: Project) -> int: | |||||
proj.build() | |||||
app = proj.build_root.joinpath('app' + paths.EXE_SUFFIX) | |||||
return proc.run([app]).returncode | return proc.run([app]).returncode | ||||
def test_simple_rebuild(dds: DDS) -> None: | |||||
def test_simple_rebuild(test_project: Project) -> None: | |||||
""" | """ | ||||
Check that changing a source file will update the resulting application. | Check that changing a source file will update the resulting application. | ||||
""" | """ | ||||
assert build_and_get_rc(dds) == 0 | |||||
dds.scope.enter_context( | |||||
dds.set_contents( | |||||
'src/1.cpp', | |||||
b''' | |||||
int value_1() { return 33; } | |||||
''', | |||||
)) | |||||
assert build_and_get_rc(test_project) == 0 | |||||
test_project.write('src/1.cpp', 'int value_1() { return 33; }') | |||||
# 33 - 32 = 1 | # 33 - 32 = 1 | ||||
assert build_and_get_rc(dds) == 1 | |||||
assert build_and_get_rc(test_project) == 1 | |||||
def test_rebuild_header_change(dds: DDS) -> None: | |||||
def test_rebuild_header_change(test_project: Project) -> None: | |||||
"""Change the content of the header which defines the values""" | """Change the content of the header which defines the values""" | ||||
assert build_and_get_rc(dds) == 0 | |||||
dds.scope.enter_context( | |||||
dds.set_contents( | |||||
'src/values.hpp', | |||||
b''' | |||||
const int first_value = 63; | |||||
const int second_value = 88; | |||||
''', | |||||
)) | |||||
assert build_and_get_rc(dds) == (88 - 63) | |||||
def test_partial_build_rebuild(dds: DDS) -> None: | |||||
assert build_and_get_rc(test_project) == 0 | |||||
test_project.write('src/values.hpp', ''' | |||||
const int first_value = 63; | |||||
const int second_value = 88; | |||||
''') | |||||
assert build_and_get_rc(test_project) == (88 - 63) | |||||
def test_partial_build_rebuild(test_project: Project) -> None: | |||||
""" | """ | ||||
Change the content of a header, but cause one user of that header to fail | Change the content of a header, but cause one user of that header to fail | ||||
compilation. The fact that compilation fails means it is still `out-of-date`, | compilation. The fact that compilation fails means it is still `out-of-date`, | ||||
and will need to be compiled after we have fixed it up. | and will need to be compiled after we have fixed it up. | ||||
""" | """ | ||||
assert build_and_get_rc(dds) == 0 | |||||
dds.scope.enter_context( | |||||
dds.set_contents( | |||||
'src/values.hpp', | |||||
b''' | |||||
const int first_value_q = 6; | |||||
const int second_value_q = 99; | |||||
''', | |||||
)) | |||||
assert build_and_get_rc(test_project) == 0 | |||||
test_project.write('src/values.hpp', ''' | |||||
const int first_value_q = 6; | |||||
const int second_value_q = 99; | |||||
''') | |||||
# Header now causes errors in 1.cpp and 2.cpp | # Header now causes errors in 1.cpp and 2.cpp | ||||
with pytest.raises(subprocess.CalledProcessError): | with pytest.raises(subprocess.CalledProcessError): | ||||
dds.build() | |||||
test_project.build() | |||||
# Fix 1.cpp | # Fix 1.cpp | ||||
dds.scope.enter_context( | |||||
dds.set_contents( | |||||
'src/1.cpp', | |||||
b''' | |||||
#include "./values.hpp" | |||||
int value_1() { return first_value_q; } | |||||
''', | |||||
)) | |||||
test_project.write('src/1.cpp', ''' | |||||
#include "./values.hpp" | |||||
int value_1() { return first_value_q; } | |||||
''') | |||||
# We will still see a failure, but now the DB will record the updated values.hpp | # We will still see a failure, but now the DB will record the updated values.hpp | ||||
with pytest.raises(subprocess.CalledProcessError): | with pytest.raises(subprocess.CalledProcessError): | ||||
dds.build() | |||||
test_project.build() | |||||
# Should should raise _again_, even though we've successfully compiled one | # Should should raise _again_, even though we've successfully compiled one | ||||
# of the two files with the changed `values.hpp`, because `2.cpp` still | # of the two files with the changed `values.hpp`, because `2.cpp` still | ||||
# has a pending update | # has a pending update | ||||
with pytest.raises(subprocess.CalledProcessError): | with pytest.raises(subprocess.CalledProcessError): | ||||
dds.build() | |||||
test_project.build() | |||||
dds.scope.enter_context( | |||||
dds.set_contents( | |||||
'src/2.cpp', | |||||
b''' | |||||
#include "./values.hpp" | |||||
test_project.write('src/2.cpp', ''' | |||||
#include "./values.hpp" | |||||
int value_2() { return second_value_q; } | |||||
''', | |||||
)) | |||||
int value_2() { return second_value_q; } | |||||
''') | |||||
# We should now compile and link to get the updated value | # We should now compile and link to get the updated value | ||||
assert build_and_get_rc(dds) == (99 - 6) | |||||
assert build_and_get_rc(test_project) == (99 - 6) |
from tests import DDS, dds_fixture_conf, DDSFixtureParams | |||||
from dds_ci import proc, paths | from dds_ci import proc, paths | ||||
from dds_ci.testing import ProjectOpener | |||||
@dds_fixture_conf( | |||||
DDSFixtureParams('main', 'main'), | |||||
DDSFixtureParams('custom-runner', 'custom-runner'), | |||||
) | |||||
def test_catch_testdriver(dds: DDS) -> None: | |||||
dds.build(tests=True) | |||||
test_exe = dds.build_dir / f'test/testlib/calc{paths.EXE_SUFFIX}' | |||||
assert test_exe.exists() | |||||
def test_main(project_opener: ProjectOpener) -> None: | |||||
proj = project_opener.open('main') | |||||
proj.build() | |||||
test_exe = proj.build_root.joinpath('test/testlib/calc' + paths.EXE_SUFFIX) | |||||
assert test_exe.is_file() | |||||
assert proc.run([test_exe]).returncode == 0 | |||||
def test_custom(project_opener: ProjectOpener) -> None: | |||||
proj = project_opener.open('custom-runner') | |||||
proj.build() | |||||
test_exe = proj.build_root.joinpath('test/testlib/calc' + paths.EXE_SUFFIX) | |||||
assert test_exe.is_file() | |||||
assert proc.run([test_exe]).returncode == 0 | assert proc.run([test_exe]).returncode == 0 |
import pytest | |||||
from dds_ci.testing import ProjectOpener, Project | |||||
@pytest.fixture() | |||||
def test_project(project_opener: ProjectOpener) -> Project: | |||||
return project_opener.open('projects/sdist') | |||||
def test_create_sdist(test_project: Project) -> None: | |||||
test_project.sdist_create() | |||||
sd_dir = test_project.build_root / 'foo@1.2.3.tar.gz' | |||||
assert sd_dir.is_file() | |||||
def test_export_sdist(test_project: Project) -> None: | |||||
test_project.sdist_export() | |||||
assert (test_project.dds.repo_dir / 'foo@1.2.3').is_dir() | |||||
def test_import_sdist_archive(test_project: Project) -> None: | |||||
repo_content_path = test_project.dds.repo_dir / 'foo@1.2.3' | |||||
assert not repo_content_path.is_dir() | |||||
test_project.sdist_create() | |||||
assert not repo_content_path.is_dir() | |||||
test_project.dds.repo_import(test_project.build_root / 'foo@1.2.3.tar.gz') | |||||
assert repo_content_path.is_dir() | |||||
assert repo_content_path.joinpath('library.jsonc').is_file() | |||||
# Excluded file will not be in the sdist: | |||||
assert not repo_content_path.joinpath('other-file.txt').is_file() |
from pathlib import Path | |||||
import platform | |||||
import pytest | |||||
from dds_ci.testing import RepoFixture, Project | |||||
from dds_ci import proc, toolchain, paths | |||||
CRYPTOPP_JSON = { | |||||
"packages": { | |||||
"cryptopp": { | |||||
"8.2.0": { | |||||
"remote": { | |||||
"git": { | |||||
"url": "https://github.com/weidai11/cryptopp.git", | |||||
"ref": "CRYPTOPP_8_2_0" | |||||
}, | |||||
"auto-lib": "cryptopp/cryptopp", | |||||
"transform": [{ | |||||
"move": { | |||||
"from": ".", | |||||
"to": "src/cryptopp", | |||||
"include": ["*.c", "*.cpp", "*.h"] | |||||
} | |||||
}] | |||||
} | |||||
} | |||||
} | |||||
} | |||||
} | |||||
APP_CPP = r''' | |||||
#include <cryptopp/osrng.h> | |||||
#include <string> | |||||
int main() { | |||||
std::string arr; | |||||
arr.resize(256); | |||||
CryptoPP::OS_GenerateRandomBlock(false, | |||||
reinterpret_cast<CryptoPP::byte*>(arr.data()), | |||||
arr.size()); | |||||
for (auto b : arr) { | |||||
if (b != '\x00') { | |||||
return 0; | |||||
} | |||||
} | |||||
return 1; | |||||
} | |||||
''' | |||||
@pytest.mark.skipif(platform.system() == 'FreeBSD', reason='This one has trouble running on FreeBSD') | |||||
def test_get_build_use_cryptopp(test_parent_dir: Path, tmp_project: Project, http_repo: RepoFixture) -> None: | |||||
http_repo.import_json_data(CRYPTOPP_JSON) | |||||
tmp_project.dds.repo_add(http_repo.url) | |||||
tmp_project.package_json = { | |||||
'name': 'usr-cryptopp', | |||||
'version': '1.0.0', | |||||
'namespace': 'test', | |||||
'depends': ['cryptopp@8.2.0'], | |||||
} | |||||
tmp_project.library_json = { | |||||
'name': 'use-cryptopp', | |||||
'uses': ['cryptopp/cryptopp'], | |||||
} | |||||
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc' | |||||
tmp_project.write('src/use-cryptopp.main.cpp', APP_CPP) | |||||
tmp_project.build(toolchain=test_parent_dir / tc_fname) | |||||
proc.check_run([(tmp_project.build_root / 'use-cryptopp').with_suffix(paths.EXE_SUFFIX)]) |
"cxx_version": 'c++17', | "cxx_version": 'c++17', | ||||
"cxx_compiler": 'g++-9', | "cxx_compiler": 'g++-9', | ||||
"flags": '-DSPDLOG_COMPILED_LIB', | "flags": '-DSPDLOG_COMPILED_LIB', | ||||
"link_flags": '-static-libgcc -static-libstdc++' | |||||
} | } |
from pathlib import Path | |||||
from dds_ci.testing import RepoFixture, ProjectOpener | |||||
from dds_ci import proc, paths, toolchain | |||||
def test_get_build_use_spdlog(test_parent_dir: Path, project_opener: ProjectOpener, http_repo: RepoFixture) -> None: | |||||
proj = project_opener.open('project') | |||||
http_repo.import_json_file(proj.root / 'catalog.json') | |||||
proj.dds.repo_add(http_repo.url) | |||||
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc' | |||||
proj.build(toolchain=test_parent_dir / tc_fname) | |||||
proc.check_run([(proj.build_root / 'use-spdlog').with_suffix(paths.EXE_SUFFIX)]) |
from pathlib import Path | from pathlib import Path | ||||
from typing import Optional | from typing import Optional | ||||
from . import paths, proc | |||||
from . import paths, proc, toolchain as tc_mod | |||||
from dds_ci.util import Pathish | |||||
class DDSWrapper: | class DDSWrapper: | ||||
Wraps a 'dds' executable with some convenience APIs that invoke various | Wraps a 'dds' executable with some convenience APIs that invoke various | ||||
'dds' subcommands. | 'dds' subcommands. | ||||
""" | """ | ||||
def __init__(self, path: Path) -> None: | |||||
def __init__(self, | |||||
path: Path, | |||||
*, | |||||
repo_dir: Optional[Pathish] = None, | |||||
catalog_path: Optional[Pathish] = None, | |||||
default_cwd: Optional[Pathish] = None) -> None: | |||||
self.path = path | self.path = path | ||||
self.repo_dir = paths.PREBUILT_DIR / 'ci-repo' | |||||
self.catalog_path = paths.PREBUILT_DIR / 'ci-catalog.db' | |||||
self.repo_dir = Path(repo_dir or (paths.PREBUILT_DIR / 'ci-repo')) | |||||
self.catalog_path = Path(catalog_path or (self.repo_dir.parent / 'ci-catalog.db')) | |||||
self.default_cwd = default_cwd or Path.cwd() | |||||
def clone(self) -> 'DDSWrapper': | |||||
return DDSWrapper(self.path, | |||||
repo_dir=self.repo_dir, | |||||
catalog_path=self.catalog_path, | |||||
default_cwd=self.default_cwd) | |||||
@property | @property | ||||
def catalog_path_arg(self) -> str: | def catalog_path_arg(self) -> str: | ||||
"""The arguments for --repo-dir""" | """The arguments for --repo-dir""" | ||||
return f'--repo-dir={self.repo_dir}' | return f'--repo-dir={self.repo_dir}' | ||||
def set_repo_scratch(self, path: Pathish) -> None: | |||||
self.repo_dir = Path(path) / 'data' | |||||
self.catalog_path = Path(path) / 'catalog.db' | |||||
def clean(self, *, build_dir: Optional[Path] = None, repo: bool = True, catalog: bool = True) -> None: | def clean(self, *, build_dir: Optional[Path] = None, repo: bool = True, catalog: bool = True) -> None: | ||||
""" | """ | ||||
Clean out prior executable output, including repos, catalog, and | Clean out prior executable output, including repos, catalog, and | ||||
if catalog and self.catalog_path.exists(): | if catalog and self.catalog_path.exists(): | ||||
self.catalog_path.unlink() | self.catalog_path.unlink() | ||||
def run(self, args: proc.CommandLine) -> None: | |||||
def run(self, args: proc.CommandLine, *, cwd: Optional[Pathish] = None) -> None: | |||||
"""Execute the 'dds' executable with the given arguments""" | """Execute the 'dds' executable with the given arguments""" | ||||
proc.check_run([self.path, args]) | |||||
proc.check_run([self.path, args], cwd=cwd or self.default_cwd) | |||||
def catalog_json_import(self, path: Path) -> None: | def catalog_json_import(self, path: Path) -> None: | ||||
"""Run 'catalog import' to import the given JSON. Only applicable to older 'dds'""" | """Run 'catalog import' to import the given JSON. Only applicable to older 'dds'""" | ||||
self.run(['catalog', 'import', self.catalog_path_arg, f'--json={path}']) | self.run(['catalog', 'import', self.catalog_path_arg, f'--json={path}']) | ||||
def catalog_get(self, what: str) -> None: | |||||
self.run(['catalog', 'get', self.catalog_path_arg, what]) | |||||
def repo_add(self, url: str) -> None: | |||||
self.run(['repo', 'add', self.catalog_path_arg, url, '--update']) | |||||
def repo_import(self, sdist: Path) -> None: | |||||
self.run(['repo', self.repo_dir_arg, 'import', sdist]) | |||||
def build(self, | def build(self, | ||||
*, | *, | ||||
toolchain: Path, | |||||
root: Path, | root: Path, | ||||
toolchain: Optional[Path] = None, | |||||
build_root: Optional[Path] = None, | build_root: Optional[Path] = None, | ||||
jobs: Optional[int] = None) -> None: | jobs: Optional[int] = None) -> None: | ||||
""" | """ | ||||
:param build_root: The root directory where the output will be written. | :param build_root: The root directory where the output will be written. | ||||
:param jobs: The number of jobs to use. Default is CPU-count + 2 | :param jobs: The number of jobs to use. Default is CPU-count + 2 | ||||
""" | """ | ||||
toolchain = toolchain or tc_mod.get_default_test_toolchain() | |||||
jobs = jobs or multiprocessing.cpu_count() + 2 | jobs = jobs or multiprocessing.cpu_count() + 2 | ||||
self.run([ | self.run([ | ||||
'build', | 'build', | ||||
f'--project-dir={root}', | f'--project-dir={root}', | ||||
f'--out={build_root}', | f'--out={build_root}', | ||||
]) | ]) | ||||
def build_deps(self, args: proc.CommandLine, *, toolchain: Optional[Path] = None) -> None: | |||||
toolchain = toolchain or tc_mod.get_default_test_toolchain() | |||||
self.run([ | |||||
'build-deps', | |||||
f'--toolchain={toolchain}', | |||||
self.catalog_path_arg, | |||||
self.repo_dir_arg, | |||||
args, | |||||
]) |
import json | import json | ||||
import os | import os | ||||
from pathlib import Path | from pathlib import Path | ||||
from typing import Optional, Dict | |||||
from typing import Optional, Dict, Any | |||||
from typing_extensions import Protocol | from typing_extensions import Protocol | ||||
from . import paths | from . import paths | ||||
out: Optional[Path] | out: Optional[Path] | ||||
def gen_task_json_data() -> Dict: | |||||
def gen_task_json_data() -> Dict[str, Any]: | |||||
dds_ci_exe = paths.find_exe('dds-ci') | dds_ci_exe = paths.find_exe('dds-ci') | ||||
assert dds_ci_exe, 'Unable to find the dds-ci executable. This command should be run in a Poetry' | assert dds_ci_exe, 'Unable to find the dds-ci executable. This command should be run in a Poetry' | ||||
envs = {key: os.environ[key] | envs = {key: os.environ[key] | ||||
return task | return task | ||||
def generate_vsc_task(): | |||||
def generate_vsc_task() -> None: | |||||
parser = argparse.ArgumentParser() | parser = argparse.ArgumentParser() | ||||
parser.add_argument('--out', '-o', help='File to write into', type=Path) | parser.add_argument('--out', '-o', help='File to write into', type=Path) | ||||
args: Arguments = parser.parse_args() | args: Arguments = parser.parse_args() |
from pathlib import PurePath, Path | |||||
from pathlib import PurePath | |||||
from typing import Iterable, Union, Optional, Iterator | from typing import Iterable, Union, Optional, Iterator | ||||
from typing_extensions import Protocol | from typing_extensions import Protocol | ||||
import subprocess | import subprocess | ||||
from .util import Pathish | |||||
CommandLineArg = Union[str, PurePath, int, float] | CommandLineArg = Union[str, PurePath, int, float] | ||||
CommandLineArg1 = Union[CommandLineArg, Iterable[CommandLineArg]] | CommandLineArg1 = Union[CommandLineArg, Iterable[CommandLineArg]] | ||||
CommandLineArg2 = Union[CommandLineArg1, Iterable[CommandLineArg1]] | CommandLineArg2 = Union[CommandLineArg1, Iterable[CommandLineArg1]] | ||||
assert False, f'Invalid command line element: {repr(cmd)}' | assert False, f'Invalid command line element: {repr(cmd)}' | ||||
def run(*cmd: CommandLine, cwd: Optional[Path] = None, check: bool = False) -> ProcessResult: | |||||
def run(*cmd: CommandLine, cwd: Optional[Pathish] = None, check: bool = False) -> ProcessResult: | |||||
return subprocess.run( | return subprocess.run( | ||||
list(flatten_cmd(cmd)), | list(flatten_cmd(cmd)), | ||||
cwd=cwd, | cwd=cwd, | ||||
) | ) | ||||
def check_run(*cmd: CommandLine, cwd: Optional[Path] = None) -> ProcessResult: | |||||
def check_run(*cmd: CommandLine, cwd: Optional[Pathish] = None) -> ProcessResult: | |||||
return subprocess.run( | return subprocess.run( | ||||
list(flatten_cmd(cmd)), | list(flatten_cmd(cmd)), | ||||
cwd=cwd, | cwd=cwd, |
from .fixtures import Project, ProjectOpener, PackageJSON, LibraryJSON | |||||
from .http import RepoFixture | |||||
__all__ = ( | |||||
'Project', | |||||
'ProjectOpener', | |||||
'PackageJSON', | |||||
'LibraryJSON', | |||||
'RepoFixture', | |||||
) |
""" | |||||
Test fixtures used by DDS in pytest | |||||
""" | |||||
from pathlib import Path | |||||
import pytest | |||||
import json | |||||
import shutil | |||||
from typing import Sequence, cast, Optional | |||||
from typing_extensions import TypedDict | |||||
from _pytest.config import Config as PyTestConfig | |||||
from _pytest.tmpdir import TempPathFactory | |||||
from _pytest.fixtures import FixtureRequest | |||||
from dds_ci import toolchain, paths | |||||
from ..dds import DDSWrapper | |||||
from ..util import Pathish | |||||
tc_mod = toolchain | |||||
def ensure_absent(path: Pathish) -> None: | |||||
path = Path(path) | |||||
if path.is_dir(): | |||||
shutil.rmtree(path) | |||||
elif path.exists(): | |||||
path.unlink() | |||||
else: | |||||
# File does not exist, wo we are safe to ignore it | |||||
pass | |||||
class _PackageJSONRequired(TypedDict): | |||||
name: str | |||||
namespace: str | |||||
version: str | |||||
class PackageJSON(_PackageJSONRequired, total=False): | |||||
depends: Sequence[str] | |||||
class _LibraryJSONRequired(TypedDict): | |||||
name: str | |||||
class LibraryJSON(_LibraryJSONRequired, total=False): | |||||
uses: Sequence[str] | |||||
class Project: | |||||
def __init__(self, dirpath: Path, dds: DDSWrapper) -> None: | |||||
self.dds = dds | |||||
self.root = dirpath | |||||
self.build_root = dirpath / '_build' | |||||
@property | |||||
def package_json(self) -> PackageJSON: | |||||
return cast(PackageJSON, json.loads(self.root.joinpath('package.jsonc').read_text())) | |||||
@package_json.setter | |||||
def package_json(self, data: PackageJSON) -> None: | |||||
self.root.joinpath('package.jsonc').write_text(json.dumps(data, indent=2)) | |||||
@property | |||||
def library_json(self) -> LibraryJSON: | |||||
return cast(LibraryJSON, json.loads(self.root.joinpath('library.jsonc').read_text())) | |||||
@library_json.setter | |||||
def library_json(self, data: LibraryJSON) -> None: | |||||
self.root.joinpath('library.jsonc').write_text(json.dumps(data, indent=2)) | |||||
@property | |||||
def project_dir_arg(self) -> str: | |||||
"""Argument for --project-dir""" | |||||
return f'--project-dir={self.root}' | |||||
def build(self, *, toolchain: Optional[Pathish] = None) -> None: | |||||
""" | |||||
Execute 'dds build' on the project | |||||
""" | |||||
with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc: | |||||
self.dds.build(root=self.root, build_root=self.build_root, toolchain=tc) | |||||
def sdist_create(self) -> None: | |||||
self.build_root.mkdir(exist_ok=True, parents=True) | |||||
self.dds.run(['sdist', 'create', self.project_dir_arg], cwd=self.build_root) | |||||
def sdist_export(self) -> None: | |||||
self.dds.run(['sdist', 'export', self.dds.repo_dir_arg, self.project_dir_arg]) | |||||
def write(self, path: Pathish, content: str) -> Path: | |||||
path = Path(path) | |||||
if not path.is_absolute(): | |||||
path = self.root / path | |||||
path.parent.mkdir(exist_ok=True, parents=True) | |||||
path.write_text(content) | |||||
return path | |||||
@pytest.fixture() | |||||
def test_parent_dir(request: FixtureRequest) -> Path: | |||||
return Path(request.fspath).parent | |||||
class ProjectOpener(): | |||||
def __init__(self, dds: DDSWrapper, request: FixtureRequest, worker: str, | |||||
tmp_path_factory: TempPathFactory) -> None: | |||||
self.dds = dds | |||||
self._request = request | |||||
self._worker_id = worker | |||||
self._tmppath_fac = tmp_path_factory | |||||
@property | |||||
def test_name(self) -> str: | |||||
"""The name of the test that requested this opener""" | |||||
return str(self._request.function.__name__) | |||||
@property | |||||
def test_dir(self) -> Path: | |||||
"""The directory that contains the test that requested this opener""" | |||||
return Path(self._request.fspath).parent | |||||
def open(self, dirpath: Pathish) -> Project: | |||||
dirpath = Path(dirpath) | |||||
if not dirpath.is_absolute(): | |||||
dirpath = self.test_dir / dirpath | |||||
proj_copy = self.test_dir / '__test_project' | |||||
if self._worker_id != 'master': | |||||
proj_copy = self._tmppath_fac.mktemp('test-project-') / self.test_name | |||||
else: | |||||
self._request.addfinalizer(lambda: ensure_absent(proj_copy)) | |||||
shutil.copytree(dirpath, proj_copy) | |||||
new_dds = self.dds.clone() | |||||
if self._worker_id == 'master': | |||||
repo_dir = self.test_dir / '__test_repo' | |||||
else: | |||||
repo_dir = self._tmppath_fac.mktemp('test-repo-') / self.test_name | |||||
new_dds.set_repo_scratch(repo_dir) | |||||
new_dds.default_cwd = proj_copy | |||||
self._request.addfinalizer(lambda: ensure_absent(repo_dir)) | |||||
return Project(proj_copy, new_dds) | |||||
@pytest.fixture() | |||||
def project_opener(request: FixtureRequest, worker_id: str, dds_2: DDSWrapper, | |||||
tmp_path_factory: TempPathFactory) -> ProjectOpener: | |||||
opener = ProjectOpener(dds_2, request, worker_id, tmp_path_factory) | |||||
return opener | |||||
@pytest.fixture() | |||||
def tmp_project(request: FixtureRequest, worker_id: str, project_opener: ProjectOpener, | |||||
tmp_path_factory: TempPathFactory) -> Project: | |||||
if worker_id != 'master': | |||||
proj_dir = tmp_path_factory.mktemp('temp-project') | |||||
return project_opener.open(proj_dir) | |||||
proj_dir = project_opener.test_dir / '__test_project_empty' | |||||
ensure_absent(proj_dir) | |||||
proj_dir.mkdir() | |||||
proj = project_opener.open(proj_dir) | |||||
request.addfinalizer(lambda: ensure_absent(proj_dir)) | |||||
return proj | |||||
@pytest.fixture(scope='session') | |||||
def dds_2(dds_exe: Path) -> DDSWrapper: | |||||
return DDSWrapper(dds_exe) | |||||
@pytest.fixture(scope='session') | |||||
def dds_exe(pytestconfig: PyTestConfig) -> Path: | |||||
opt = pytestconfig.getoption('--dds-exe') or paths.CUR_BUILT_DDS | |||||
return Path(opt) |
httpd.shutdown() | httpd.shutdown() | ||||
@pytest.yield_fixture() # type: ignore | |||||
@pytest.fixture() | |||||
def http_tmp_dir_server(tmp_path: Path, unused_tcp_port: int) -> Iterator[ServerInfo]: | def http_tmp_dir_server(tmp_path: Path, unused_tcp_port: int) -> Iterator[ServerInfo]: | ||||
""" | """ | ||||
Creates an HTTP server that serves the contents of a new | Creates an HTTP server that serves the contents of a new | ||||
]) | ]) | ||||
@pytest.yield_fixture() # type: ignore | |||||
@pytest.fixture() | |||||
def http_repo(dds_exe: Path, http_tmp_dir_server: ServerInfo) -> Iterator[RepoFixture]: | def http_repo(dds_exe: Path, http_tmp_dir_server: ServerInfo) -> Iterator[RepoFixture]: | ||||
""" | """ | ||||
Fixture that creates a new empty dds repository and an HTTP server to serve | Fixture that creates a new empty dds repository and an HTTP server to serve |
import json5 | import json5 | ||||
from . import paths | from . import paths | ||||
from .util import Pathish | |||||
@contextmanager | @contextmanager | ||||
def fixup_toolchain(json_file: Path) -> Iterator[Path]: | |||||
def fixup_toolchain(json_file: Pathish) -> Iterator[Path]: | |||||
""" | """ | ||||
Augment the toolchain at the given path by adding 'ccache' or -fuse-ld=lld, | Augment the toolchain at the given path by adding 'ccache' or -fuse-ld=lld, | ||||
if those tools are available on the system. Yields a new toolchain file | if those tools are available on the system. Yields a new toolchain file | ||||
based on 'json_file' | based on 'json_file' | ||||
""" | """ | ||||
json_file = Path(json_file) | |||||
data = json5.loads(json_file.read_text()) | data = json5.loads(json_file.read_text()) | ||||
# Check if we can add ccache | # Check if we can add ccache | ||||
ccache = paths.find_exe('ccache') | ccache = paths.find_exe('ccache') | ||||
if ccache: | |||||
if ccache and data.get('compiler_id') in ('gnu', 'clang'): | |||||
print('Found ccache:', ccache) | print('Found ccache:', ccache) | ||||
data['compiler_launcher'] = [str(ccache)] | data['compiler_launcher'] = [str(ccache)] | ||||
# Check for lld for use with GCC/Clang | # Check for lld for use with GCC/Clang |
from pathlib import PurePath | |||||
from os import PathLike | |||||
from typing import Union | |||||
#: A path, string, or convertible-to-Path object | |||||
Pathish = Union[PathLike, PurePath, str] |