Parcourir la source

Overhaul, refactor, and cleanup of test suite

default_compile_flags
vector-of-bool il y a 4 ans
Parent
révision
6d2d869c2f
78 fichiers modifiés avec 615 ajouts et 865 suppressions
  1. +2
    -2
      Makefile
  2. +0
    -16
      tests/__init__.py
  3. +0
    -14
      tests/basics/test_app_only.py
  4. +0
    -46
      tests/basics/test_basics.py
  5. +0
    -30
      tests/basics/test_simple.py
  6. +0
    -15
      tests/basics/test_test_only.py
  7. +0
    -8
      tests/catalog/create_test.py
  8. +0
    -49
      tests/catalog/get_test.py
  9. +0
    -0
      tests/config_template/copy_only/src/info.config.hpp
  10. +0
    -0
      tests/config_template/copy_only/src/info.test.cpp
  11. +0
    -0
      tests/config_template/simple/library.jsonc
  12. +0
    -0
      tests/config_template/simple/package.jsonc
  13. +0
    -0
      tests/config_template/simple/src/simple/config.config.hpp
  14. +0
    -0
      tests/config_template/simple/src/simple/simple.test.cpp
  15. +9
    -9
      tests/config_template/test_config_template.py
  16. +5
    -72
      tests/conftest.py
  17. +0
    -163
      tests/dds.py
  18. +0
    -15
      tests/deps/build-deps/project/catalog.json
  19. +0
    -5
      tests/deps/build-deps/project/deps.json5
  20. +0
    -35
      tests/deps/build-deps/test_build_deps.py
  21. +0
    -30
      tests/deps/do_test.py
  22. +0
    -26
      tests/deps/git-remote/catalog.json
  23. +0
    -9
      tests/deps/git-remote/package.json5
  24. +0
    -4
      tests/deps/no-deps/catalog.json
  25. +0
    -5
      tests/deps/no-deps/package.json5
  26. +0
    -29
      tests/deps/use-cryptopp/project/catalog.json
  27. +0
    -4
      tests/deps/use-cryptopp/project/library.json5
  28. +0
    -8
      tests/deps/use-cryptopp/project/package.json5
  29. +0
    -17
      tests/deps/use-cryptopp/project/src/use-cryptopp.main.cpp
  30. +0
    -17
      tests/deps/use-cryptopp/test_use_cryptopp.py
  31. +0
    -16
      tests/deps/use-remote/catalog.json
  32. +0
    -6
      tests/deps/use-remote/library.json5
  33. +0
    -8
      tests/deps/use-remote/package.json5
  34. +0
    -8
      tests/deps/use-remote/src/app.main.cpp
  35. +0
    -13
      tests/deps/use-spdlog/use_spdlog_test.py
  36. +0
    -7
      tests/errors/errors_test.py
  37. +0
    -53
      tests/fileutil.py
  38. +0
    -9
      tests/gcc-9.tc.jsonc
  39. +0
    -5
      tests/msvc.tc.jsonc
  40. +0
    -0
      tests/projects/compile_deps/src/1.cpp
  41. +0
    -0
      tests/projects/compile_deps/src/2.cpp
  42. +0
    -0
      tests/projects/compile_deps/src/app.main.cpp
  43. +0
    -0
      tests/projects/compile_deps/src/foo.hpp
  44. +0
    -0
      tests/projects/compile_deps/src/values.hpp
  45. +0
    -0
      tests/projects/sdist/include/header.h
  46. +0
    -0
      tests/projects/sdist/include/header.hpp
  47. +0
    -0
      tests/projects/sdist/library.jsonc
  48. +0
    -0
      tests/projects/sdist/other-file.txt
  49. +0
    -0
      tests/projects/sdist/package.json5
  50. +0
    -0
      tests/projects/sdist/src/foo.cpp
  51. +0
    -27
      tests/sdist/sdist_test.py
  52. +84
    -0
      tests/test_basics.py
  53. +60
    -0
      tests/test_build_deps.py
  54. +32
    -0
      tests/test_catalog.py
  55. +41
    -58
      tests/test_compile_deps.py
  56. +0
    -0
      tests/test_drivers/__init__.py
  57. +14
    -9
      tests/test_drivers/catch/test_catch.py
  58. +31
    -0
      tests/test_sdist.py
  59. +0
    -0
      tests/use-cryptopp/gcc.tc.jsonc
  60. +0
    -0
      tests/use-cryptopp/msvc.tc.jsonc
  61. +70
    -0
      tests/use-cryptopp/test_use_cryptopp.py
  62. +0
    -1
      tests/use-spdlog/gcc.tc.jsonc
  63. +0
    -0
      tests/use-spdlog/msvc.tc.jsonc
  64. +0
    -0
      tests/use-spdlog/project/catalog.json
  65. +0
    -0
      tests/use-spdlog/project/library.json5
  66. +0
    -0
      tests/use-spdlog/project/package.json5
  67. +0
    -0
      tests/use-spdlog/project/src/spdlog_user.cpp
  68. +0
    -0
      tests/use-spdlog/project/src/spdlog_user.hpp
  69. +0
    -0
      tests/use-spdlog/project/src/use-spdlog.main.cpp
  70. +13
    -0
      tests/use-spdlog/use_spdlog_test.py
  71. +44
    -7
      tools/dds_ci/dds.py
  72. +3
    -3
      tools/dds_ci/msvs.py
  73. +5
    -3
      tools/dds_ci/proc.py
  74. +10
    -0
      tools/dds_ci/testing/__init__.py
  75. +180
    -0
      tools/dds_ci/testing/fixtures.py
  76. +2
    -2
      tools/dds_ci/testing/http.py
  77. +4
    -2
      tools/dds_ci/toolchain.py
  78. +6
    -0
      tools/dds_ci/util.py

+ 2
- 2
Makefile Voir le fichier

@@ -101,8 +101,8 @@ site: docs
echo "Site generated at _site/"

py-check:
poetry run mypy tools/dds_ci
poetry run pylint tools/dds_ci
poetry run mypy tools/dds_ci $(shell find tests/ -name *.py)
poetry run pylint tools/dds_ci $(shell find tests/ -name *.py)

format:
poetry run dds-format

+ 0
- 16
tests/__init__.py Voir le fichier

@@ -1,16 +0,0 @@
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).absolute().parent.parent / 'tools'))

from .dds import DDS, DDSFixtureParams, scoped_dds, dds_fixture_conf, dds_fixture_conf_1
from .http import http_repo, RepoFixture

__all__ = (
'DDS',
'DDSFixtureParams',
'scoped_dds',
'dds_fixture_conf',
'dds_fixture_conf_1',
'http_repo',
'RepoFixture',
)

+ 0
- 14
tests/basics/test_app_only.py Voir le fichier

@@ -1,14 +0,0 @@
from tests import DDS
from tests.fileutil import set_contents

from dds_ci import paths


def test_lib_with_just_app(dds: DDS) -> None:
dds.scope.enter_context(set_contents(
dds.source_root / 'src/foo.main.cpp',
b'int main() {}',
))

dds.build()
assert (dds.build_dir / f'foo{paths.EXE_SUFFIX}').is_file()

+ 0
- 46
tests/basics/test_basics.py Voir le fichier

@@ -1,46 +0,0 @@
from typing import ContextManager
from pathlib import Path
from tests import DDS
from tests.fileutil import ensure_dir, set_contents


def test_build_empty(dds: DDS) -> None:
assert not dds.source_root.exists()
dds.scope.enter_context(ensure_dir(dds.source_root))
dds.build()


def test_build_simple(dds: DDS) -> None:
dds.scope.enter_context(set_contents(dds.source_root / 'src/f.cpp', b'void foo() {}'))
dds.build()


def basic_pkg_dds(dds: DDS) -> ContextManager[Path]:
return set_contents(
dds.source_root / 'package.json5', b'''
{
name: 'test-pkg',
version: '0.2.2',
namespace: 'test',
}
''')


def test_empty_with_pkg_dds(dds: DDS) -> None:
dds.scope.enter_context(basic_pkg_dds(dds))
dds.build()


def test_empty_with_lib_dds(dds: DDS) -> None:
dds.scope.enter_context(basic_pkg_dds(dds))
dds.build()


def test_empty_sdist_create(dds: DDS) -> None:
dds.scope.enter_context(basic_pkg_dds(dds))
dds.sdist_create()


def test_empty_sdist_export(dds: DDS) -> None:
dds.scope.enter_context(basic_pkg_dds(dds))
dds.sdist_export()

+ 0
- 30
tests/basics/test_simple.py Voir le fichier

@@ -1,30 +0,0 @@
from contextlib import ExitStack
from tests import DDS


def test_simple_lib(dds: DDS, scope: ExitStack) -> None:
scope.enter_context(dds.set_contents(
'src/foo.cpp',
b'int the_answer() { return 42; }',
))

scope.enter_context(dds.set_contents(
'library.json5',
b'''{
name: 'TestLibrary',
}''',
))

scope.enter_context(
dds.set_contents(
'package.json5',
b'''{
name: 'TestProject',
version: '0.0.0',
namespace: 'test',
}''',
))

dds.build(tests=True, apps=False, warnings=False)
assert (dds.build_dir / 'compile_commands.json').is_file()
assert list(dds.build_dir.glob('libTestLibrary*')) != []

+ 0
- 15
tests/basics/test_test_only.py Voir le fichier

@@ -1,15 +0,0 @@
from contextlib import ExitStack
from tests import DDS
from tests.fileutil import set_contents

from dds_ci import paths


def test_lib_with_just_test(dds: DDS, scope: ExitStack) -> None:
scope.enter_context(set_contents(
dds.source_root / 'src/foo.test.cpp',
b'int main() {}',
))

dds.build(tests=True, apps=False, warnings=False)
assert (dds.build_dir / f'test/foo{paths.EXE_SUFFIX}').is_file()

+ 0
- 8
tests/catalog/create_test.py Voir le fichier

@@ -1,8 +0,0 @@
from tests import DDS
from tests.fileutil import ensure_dir


def test_create_catalog(dds: DDS) -> None:
dds.scope.enter_context(ensure_dir(dds.build_dir))
dds.catalog_create()
assert dds.catalog_path.is_file()

+ 0
- 49
tests/catalog/get_test.py Voir le fichier

@@ -1,49 +0,0 @@
from tests.fileutil import ensure_dir
from tests import DDS
from tests.http import RepoFixture


def test_get(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_data({
'version': 2,
'packages': {
'neo-sqlite3': {
'0.3.0': {
'remote': {
'git': {
'url': 'https://github.com/vector-of-bool/neo-sqlite3.git',
'ref': '0.3.0',
}
}
}
}
}
})

dds.scope.enter_context(ensure_dir(dds.source_root))
dds.repo_add(http_repo.url)
dds.catalog_get('neo-sqlite3@0.3.0')
assert (dds.scratch_dir / 'neo-sqlite3@0.3.0').is_dir()
assert (dds.scratch_dir / 'neo-sqlite3@0.3.0/package.jsonc').is_file()


def test_get_http(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_data({
'packages': {
'cmcstl2': {
'2020.2.24': {
'remote': {
'http': {
'url':
'https://github.com/CaseyCarter/cmcstl2/archive/684a96d527e4dc733897255c0177b784dc280980.tar.gz?dds_lm=cmc/stl2;',
},
'auto-lib': 'cmc/stl2',
}
},
},
},
})
dds.scope.enter_context(ensure_dir(dds.source_root))
dds.repo_add(http_repo.url)
dds.catalog_get('cmcstl2@2020.2.24')
assert dds.scratch_dir.joinpath('cmcstl2@2020.2.24/include').is_dir()

tests/basics/config_template/copy_only/src/info.config.hpp → tests/config_template/copy_only/src/info.config.hpp Voir le fichier


tests/basics/config_template/copy_only/src/info.test.cpp → tests/config_template/copy_only/src/info.test.cpp Voir le fichier


tests/basics/config_template/simple/library.jsonc → tests/config_template/simple/library.jsonc Voir le fichier


tests/basics/config_template/simple/package.jsonc → tests/config_template/simple/package.jsonc Voir le fichier


tests/basics/config_template/simple/src/simple/config.config.hpp → tests/config_template/simple/src/simple/config.config.hpp Voir le fichier


tests/basics/config_template/simple/src/simple/simple.test.cpp → tests/config_template/simple/src/simple/simple.test.cpp Voir le fichier


tests/basics/config_template/test_config_template.py → tests/config_template/test_config_template.py Voir le fichier

@@ -1,13 +1,13 @@
from time import sleep

from tests import DDS, dds_fixture_conf_1
from dds_ci.testing import ProjectOpener


@dds_fixture_conf_1('copy_only')
def test_config_template(dds: DDS) -> None:
generated_fpath = dds.build_dir / '__dds/gen/info.hpp'
def test_config_template(project_opener: ProjectOpener) -> None:
proj = project_opener.open('copy_only')
generated_fpath = proj.build_root / '__dds/gen/info.hpp'
assert not generated_fpath.is_file()
dds.build()
proj.build()
assert generated_fpath.is_file()

# Check that re-running the build will not update the generated file (the
@@ -15,11 +15,11 @@ def test_config_template(dds: DDS) -> None:
# cache and force a false-rebuild.)
start_time = generated_fpath.stat().st_mtime
sleep(0.1) # Wait just long enough to register a new stamp time
dds.build()
proj.build()
new_time = generated_fpath.stat().st_mtime
assert new_time == start_time


@dds_fixture_conf_1('simple')
def test_simple_substitution(dds: DDS) -> None:
dds.build()
def test_simple_substitution(project_opener: ProjectOpener) -> None:
simple = project_opener.open('simple')
simple.build()

+ 5
- 72
tests/conftest.py Voir le fichier

@@ -1,79 +1,12 @@
from contextlib import ExitStack
from typing import Any, Callable, Iterator
from typing_extensions import Protocol
from typing import Any
from pathlib import Path
import shutil
from subprocess import check_call

import pytest
from _pytest.config import Config as PyTestConfig

from dds_ci import paths
from tests import scoped_dds, DDSFixtureParams, DDS
# Exposes the HTTP fixtures:
from .http import http_repo, http_tmp_dir_server # pylint: disable=unused-import


class TempPathFactory(Protocol):
def mktemp(self, basename: str, numbered: bool = True) -> Path:
...


class PyTestConfig(Protocol):
def getoption(self, name: str) -> Any:
...


class TestRequest(Protocol):
fixturename: str
scope: str
config: PyTestConfig
fspath: str
function: Callable[..., Any]
param: DDSFixtureParams


@pytest.fixture(scope='session')
def dds_exe(pytestconfig: PyTestConfig) -> Path:
opt = pytestconfig.getoption('--dds-exe') or paths.CUR_BUILT_DDS
return Path(opt)


@pytest.yield_fixture(scope='session') # type: ignore
def dds_pizza_catalog(dds_exe: Path, tmp_path_factory: TempPathFactory) -> Path:
tmpdir: Path = tmp_path_factory.mktemp(basename='dds-catalog')
cat_path = tmpdir / 'catalog.db'
check_call([str(dds_exe), 'repo', 'add', 'https://dds.pizza/repo', '--update', f'--catalog={cat_path}'])
yield cat_path


@pytest.yield_fixture # type: ignore
def dds(request: TestRequest, dds_exe: Path, tmp_path: Path, worker_id: str, scope: ExitStack) -> Iterator[DDS]:
test_source_dir = Path(request.fspath).absolute().parent
test_root = test_source_dir

# If we are running in parallel, use a unique directory as scratch
# space so that we aren't stomping on anyone else
if worker_id != 'master':
test_root = tmp_path / request.function.__name__
shutil.copytree(test_source_dir, test_root)

project_dir = test_root / 'project'
# Check if we have a special configuration
if hasattr(request, 'param'):
assert isinstance(request.param, DDSFixtureParams), \
('Using the `dds` fixture requires passing in indirect '
'params. Use @dds_fixture_conf to configure the fixture')
params: DDSFixtureParams = request.param
project_dir = test_root / params.subdir

# Create the instance. Auto-clean when we're done
yield scope.enter_context(scoped_dds(dds_exe, test_root, project_dir))


@pytest.yield_fixture # type: ignore
def scope() -> Iterator[ExitStack]:
with ExitStack() as scope:
yield scope
# Ensure the fixtures are registered with PyTest:
from dds_ci.testing.fixtures import * # pylint: disable=wildcard-import,unused-wildcard-import
from dds_ci.testing.http import * # pylint: disable=wildcard-import,unused-wildcard-import


def pytest_addoption(parser: Any) -> None:

+ 0
- 163
tests/dds.py Voir le fichier

@@ -1,163 +0,0 @@
import os
import itertools
from contextlib import contextmanager, ExitStack
from pathlib import Path
from typing import Union, NamedTuple, ContextManager, Optional, Iterator, TypeVar
import shutil

import pytest
import _pytest

from dds_ci import proc, toolchain as tc_mod

from . import fileutil

T = TypeVar('T')


class DDS:
def __init__(self, dds_exe: Path, test_dir: Path, project_dir: Path, scope: ExitStack) -> None:
self.dds_exe = dds_exe
self.test_dir = test_dir
self.source_root = project_dir
self.scratch_dir = project_dir / '_test_scratch/Ю́рий Алексе́евич Гага́рин'
self.scope = scope
self.scope.callback(self.cleanup)

@property
def repo_dir(self) -> Path:
return self.scratch_dir / 'repo'

@property
def catalog_path(self) -> Path:
return self.scratch_dir / 'catalog.db'

@property
def deps_build_dir(self) -> Path:
return self.scratch_dir / 'deps-build'

@property
def build_dir(self) -> Path:
return self.scratch_dir / 'build'

@property
def lmi_path(self) -> Path:
return self.scratch_dir / 'INDEX.lmi'

def cleanup(self) -> None:
if self.scratch_dir.exists():
shutil.rmtree(self.scratch_dir)

def run_unchecked(self, cmd: proc.CommandLine, *, cwd: Optional[Path] = None) -> proc.ProcessResult:
full_cmd = itertools.chain([self.dds_exe, '-ltrace'], cmd)
return proc.run(full_cmd, cwd=cwd or self.source_root) # type: ignore

def run(self, cmd: proc.CommandLine, *, cwd: Optional[Path] = None, check: bool = True) -> proc.ProcessResult:
full_cmd = itertools.chain([self.dds_exe, '-ltrace'], cmd)
return proc.run(full_cmd, cwd=cwd, check=check) # type: ignore

@property
def repo_dir_arg(self) -> str:
return f'--repo-dir={self.repo_dir}'

@property
def project_dir_arg(self) -> str:
return f'--project-dir={self.source_root}'

@property
def catalog_path_arg(self) -> str:
return f'--catalog={self.catalog_path}'

def build_deps(self, args: proc.CommandLine, *, toolchain: Optional[str] = None) -> proc.ProcessResult:
return self.run([
'build-deps',
f'--toolchain={toolchain or tc_mod.get_default_test_toolchain()}',
self.catalog_path_arg,
self.repo_dir_arg,
f'--out={self.deps_build_dir}',
f'--lmi-path={self.lmi_path}',
args,
])

def repo_add(self, url: str) -> None:
self.run(['repo', 'add', url, '--update', self.catalog_path_arg])

def build(self,
*,
toolchain: Optional[str] = None,
apps: bool = True,
warnings: bool = True,
catalog_path: Optional[Path] = None,
tests: bool = True,
more_args: proc.CommandLine = (),
check: bool = True) -> proc.ProcessResult:
catalog_path = catalog_path or self.catalog_path
return self.run(
[
'build',
f'--out={self.build_dir}',
f'--toolchain={toolchain or tc_mod.get_default_test_toolchain()}',
f'--catalog={catalog_path}',
f'--repo-dir={self.repo_dir}',
['--no-tests'] if not tests else [],
['--no-apps'] if not apps else [],
['--no-warnings'] if not warnings else [],
self.project_dir_arg,
more_args,
],
check=check,
)

def sdist_create(self) -> proc.ProcessResult:
self.build_dir.mkdir(exist_ok=True, parents=True)
return self.run(['sdist', 'create', self.project_dir_arg], cwd=self.build_dir)

def sdist_export(self) -> proc.ProcessResult:
return self.run([
'sdist',
'export',
self.project_dir_arg,
self.repo_dir_arg,
])

def repo_import(self, sdist: Path) -> proc.ProcessResult:
return self.run(['repo', self.repo_dir_arg, 'import', sdist])

def catalog_create(self) -> proc.ProcessResult:
self.scratch_dir.mkdir(parents=True, exist_ok=True)
return self.run(['catalog', 'create', f'--catalog={self.catalog_path}'], cwd=self.test_dir)

def catalog_get(self, req: str) -> proc.ProcessResult:
return self.run([
'catalog',
'get',
f'--catalog={self.catalog_path}',
f'--out-dir={self.scratch_dir}',
req,
])

def set_contents(self, path: Union[str, Path], content: bytes) -> ContextManager[Path]:
return fileutil.set_contents(self.source_root / path, content)


@contextmanager
def scoped_dds(dds_exe: Path, test_dir: Path, project_dir: Path) -> Iterator[DDS]:
if os.name == 'nt':
dds_exe = dds_exe.with_suffix('.exe')
with ExitStack() as scope:
yield DDS(dds_exe, test_dir, project_dir, scope)


class DDSFixtureParams(NamedTuple):
ident: str
subdir: Union[Path, str]


def dds_fixture_conf(*argsets: DDSFixtureParams) -> _pytest.mark.MarkDecorator:
args = list(argsets)
return pytest.mark.parametrize('dds', args, indirect=True, ids=[p.ident for p in args])


def dds_fixture_conf_1(subdir: Union[Path, str]) -> _pytest.mark.MarkDecorator:
params = DDSFixtureParams(ident='only', subdir=subdir)
return pytest.mark.parametrize('dds', [params], indirect=True, ids=['.'])

+ 0
- 15
tests/deps/build-deps/project/catalog.json Voir le fichier

@@ -1,15 +0,0 @@
{
"version": 2,
"packages": {
"neo-fun": {
"0.3.0": {
"remote": {
"git": {
"url": "https://github.com/vector-of-bool/neo-fun.git",
"ref": "0.3.0"
}
}
}
}
}
}

+ 0
- 5
tests/deps/build-deps/project/deps.json5 Voir le fichier

@@ -1,5 +0,0 @@
{
depends: [
'neo-fun+0.3.0'
],
}

+ 0
- 35
tests/deps/build-deps/test_build_deps.py Voir le fichier

@@ -1,35 +0,0 @@
from tests import DDS
from tests.http import RepoFixture


def test_build_deps_from_file(dds: DDS, http_repo: RepoFixture) -> None:
assert not dds.deps_build_dir.is_dir()
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
dds.build_deps(['-d', dds.source_root / 'deps.json5'])
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir()
assert (dds.scratch_dir / 'INDEX.lmi').is_file()
assert (dds.deps_build_dir / '_libman/neo-fun.lmp').is_file()
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file()


def test_build_deps_from_cmd(dds: DDS, http_repo: RepoFixture) -> None:
assert not dds.deps_build_dir.is_dir()
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
dds.build_deps(['neo-fun=0.3.0'])
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir()
assert (dds.scratch_dir / 'INDEX.lmi').is_file()
assert (dds.deps_build_dir / '_libman/neo-fun.lmp').is_file()
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file()


def test_multiple_deps(dds: DDS, http_repo: RepoFixture) -> None:
assert not dds.deps_build_dir.is_dir()
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
dds.build_deps(['neo-fun^0.2.0', 'neo-fun~0.3.0'])
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir()
assert (dds.scratch_dir / 'INDEX.lmi').is_file()
assert (dds.deps_build_dir / '_libman/neo-fun.lmp').is_file()
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file()

+ 0
- 30
tests/deps/do_test.py Voir le fichier

@@ -1,30 +0,0 @@
import subprocess

from dds_ci import paths
from tests import DDS, DDSFixtureParams, dds_fixture_conf, dds_fixture_conf_1
from tests.http import RepoFixture

dds_conf = dds_fixture_conf(
DDSFixtureParams(ident='git-remote', subdir='git-remote'),
DDSFixtureParams(ident='no-deps', subdir='no-deps'),
)


@dds_conf
def test_deps_build(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
assert not dds.repo_dir.exists()
dds.build()
assert dds.repo_dir.exists(), '`Building` did not generate a repo directory'


@dds_fixture_conf_1('use-remote')
def test_use_nlohmann_json_remote(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
dds.build(apps=True)

app_exe = dds.build_dir / f'app{paths.EXE_SUFFIX}'
assert app_exe.is_file()
subprocess.check_call([str(app_exe)])

+ 0
- 26
tests/deps/git-remote/catalog.json Voir le fichier

@@ -1,26 +0,0 @@
{
"version": 2,
"packages": {
"neo-fun": {
"0.3.2": {
"remote": {
"git": {
"url": "https://github.com/vector-of-bool/neo-fun.git",
"ref": "0.3.2"
}
}
}
},
"range-v3": {
"0.9.1": {
"remote": {
"auto-lib": "Niebler/range-v3",
"git": {
"url": "https://github.com/ericniebler/range-v3.git",
"ref": "0.9.1"
}
}
}
}
}
}

+ 0
- 9
tests/deps/git-remote/package.json5 Voir le fichier

@@ -1,9 +0,0 @@
{
name: 'deps-test',
"namespace": "test",
version: '0.0.0',
depends: [
'neo-fun@0.3.2',
'range-v3@0.9.1',
]
}

+ 0
- 4
tests/deps/no-deps/catalog.json Voir le fichier

@@ -1,4 +0,0 @@
{
"version": 2,
"packages": {}
}

+ 0
- 5
tests/deps/no-deps/package.json5 Voir le fichier

@@ -1,5 +0,0 @@
{
name: 'deps-test',
version: '0.0.0',
"namespace": "test",
}

+ 0
- 29
tests/deps/use-cryptopp/project/catalog.json Voir le fichier

@@ -1,29 +0,0 @@
{
"version": 2,
"packages": {
"cryptopp": {
"8.2.0": {
"remote": {
"git": {
"url": "https://github.com/weidai11/cryptopp.git",
"ref": "CRYPTOPP_8_2_0"
},
"auto-lib": "cryptopp/cryptopp",
"transform": [
{
"move": {
"from": ".",
"to": "src/cryptopp",
"include": [
"*.c",
"*.cpp",
"*.h"
]
}
}
]
}
}
}
}
}

+ 0
- 4
tests/deps/use-cryptopp/project/library.json5 Voir le fichier

@@ -1,4 +0,0 @@
{
name: 'use-cryptopp',
uses: ['cryptopp/cryptopp']
}

+ 0
- 8
tests/deps/use-cryptopp/project/package.json5 Voir le fichier

@@ -1,8 +0,0 @@
{
name: 'use-cryptopp',
version: '1.0.0',
namespace: 'test',
depends: [
'cryptopp@8.2.0'
]
}

+ 0
- 17
tests/deps/use-cryptopp/project/src/use-cryptopp.main.cpp Voir le fichier

@@ -1,17 +0,0 @@
#include <cryptopp/osrng.h>

#include <string>

int main() {
std::string arr;
arr.resize(256);
CryptoPP::OS_GenerateRandomBlock(false,
reinterpret_cast<CryptoPP::byte*>(arr.data()),
arr.size());
for (auto b : arr) {
if (b != '\x00') {
return 0;
}
}
return 1;
}

+ 0
- 17
tests/deps/use-cryptopp/test_use_cryptopp.py Voir le fichier

@@ -1,17 +0,0 @@
from tests import DDS
from tests.http import RepoFixture
import platform

import pytest

from dds_ci import proc, toolchain, paths


@pytest.mark.skipif(platform.system() == 'FreeBSD', reason='This one has trouble running on FreeBSD')
def test_get_build_use_cryptopp(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc)
proc.check_run([(dds.build_dir / 'use-cryptopp').with_suffix(paths.EXE_SUFFIX)])

+ 0
- 16
tests/deps/use-remote/catalog.json Voir le fichier

@@ -1,16 +0,0 @@
{
"version": 2,
"packages": {
"nlohmann-json": {
"3.7.1": {
"remote": {
"git": {
"url": "https://github.com/vector-of-bool/json.git",
"ref": "dds/3.7.1"
}
},
"depends": []
}
}
}
}

+ 0
- 6
tests/deps/use-remote/library.json5 Voir le fichier

@@ -1,6 +0,0 @@
{
name: "dummy",
uses: [
'nlohmann/json',
]
}

+ 0
- 8
tests/deps/use-remote/package.json5 Voir le fichier

@@ -1,8 +0,0 @@
{
"name": "json-test",
"version": "0.0.0",
"namespace": "test",
"depends": [
"nlohmann-json@3.7.1"
]
}

+ 0
- 8
tests/deps/use-remote/src/app.main.cpp Voir le fichier

@@ -1,8 +0,0 @@
#include <nlohmann/json.hpp>

int main() {
nlohmann::json j = {
{"foo", "bar"},
};
return j.size() == 1 ? 0 : 12;
}

+ 0
- 13
tests/deps/use-spdlog/use_spdlog_test.py Voir le fichier

@@ -1,13 +0,0 @@
from tests import DDS
from tests.http import RepoFixture

from dds_ci import proc, paths, toolchain


def test_get_build_use_spdlog(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url)
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc, apps=True)
proc.check_run([(dds.build_dir / 'use-spdlog').with_suffix(paths.EXE_SUFFIX)])

+ 0
- 7
tests/errors/errors_test.py Voir le fichier

@@ -1,7 +0,0 @@
from tests import DDS
from tests.fileutil import ensure_dir


def test_empty_dir(dds: DDS) -> None:
with ensure_dir(dds.source_root):
dds.build()

+ 0
- 53
tests/fileutil.py Voir le fichier

@@ -1,53 +0,0 @@
from contextlib import contextmanager
from pathlib import Path
from typing import Iterator, Optional

import shutil


@contextmanager
def ensure_dir(dirpath: Path) -> Iterator[Path]:
"""
Ensure that the given directory (and any parents) exist. When the context
exists, removes any directories that were created.
"""
dirpath = dirpath.absolute()
if dirpath.exists():
assert dirpath.is_dir(), f'Directory {dirpath} is a non-directory file'
yield dirpath
return

# Create the directory and clean it up when we are done
with ensure_dir(dirpath.parent):
dirpath.mkdir()
try:
yield dirpath
finally:
shutil.rmtree(dirpath)


@contextmanager
def auto_delete(fpath: Path) -> Iterator[Path]:
try:
yield fpath
finally:
if fpath.exists():
fpath.unlink()


@contextmanager
def set_contents(fpath: Path, content: bytes) -> Iterator[Path]:
prev_content: Optional[bytes] = None
if fpath.exists():
assert fpath.is_file(), 'File {fpath} exists and is not a regular file'
prev_content = fpath.read_bytes()

with ensure_dir(fpath.parent):
fpath.write_bytes(content)
try:
yield fpath
finally:
if prev_content is None:
fpath.unlink()
else:
fpath.write_bytes(prev_content)

+ 0
- 9
tests/gcc-9.tc.jsonc Voir le fichier

@@ -1,9 +0,0 @@
{
"compiler_id": "gnu",
"c_compiler": "gcc-9",
"cxx_compiler": "g++-9",
"cxx_version": "c++17",
"cxx_flags": [
"-fconcepts"
]
}

+ 0
- 5
tests/msvc.tc.jsonc Voir le fichier

@@ -1,5 +0,0 @@
{
"$schema": "../res/toolchain-schema.json",
"compiler_id": "msvc",
"cxx_flags": "/std:c++latest"
}

tests/db/project/src/1.cpp → tests/projects/compile_deps/src/1.cpp Voir le fichier


tests/db/project/src/2.cpp → tests/projects/compile_deps/src/2.cpp Voir le fichier


tests/db/project/src/app.main.cpp → tests/projects/compile_deps/src/app.main.cpp Voir le fichier


tests/db/project/src/foo.hpp → tests/projects/compile_deps/src/foo.hpp Voir le fichier


tests/db/project/src/values.hpp → tests/projects/compile_deps/src/values.hpp Voir le fichier


tests/sdist/create/include/header.h → tests/projects/sdist/include/header.h Voir le fichier


tests/sdist/create/include/header.hpp → tests/projects/sdist/include/header.hpp Voir le fichier


tests/sdist/create/library.jsonc → tests/projects/sdist/library.jsonc Voir le fichier


tests/sdist/create/other-file.txt → tests/projects/sdist/other-file.txt Voir le fichier


tests/sdist/create/package.json5 → tests/projects/sdist/package.json5 Voir le fichier


tests/sdist/create/src/foo.cpp → tests/projects/sdist/src/foo.cpp Voir le fichier


+ 0
- 27
tests/sdist/sdist_test.py Voir le fichier

@@ -1,27 +0,0 @@
from tests.dds import DDS, dds_fixture_conf_1


@dds_fixture_conf_1('create')
def test_create_sdist(dds: DDS) -> None:
dds.sdist_create()
sd_dir = dds.build_dir / 'foo@1.2.3.tar.gz'
assert sd_dir.is_file()


@dds_fixture_conf_1('create')
def test_export_sdist(dds: DDS) -> None:
dds.sdist_export()
assert (dds.repo_dir / 'foo@1.2.3').is_dir()


@dds_fixture_conf_1('create')
def test_import_sdist_archive(dds: DDS) -> None:
repo_content_path = dds.repo_dir / 'foo@1.2.3'
assert not repo_content_path.is_dir()
dds.sdist_create()
assert not repo_content_path.is_dir()
dds.repo_import(dds.build_dir / 'foo@1.2.3.tar.gz')
assert repo_content_path.is_dir()
assert repo_content_path.joinpath('library.jsonc').is_file()
# Excluded file will not be in the sdist:
assert not repo_content_path.joinpath('other-file.txt').is_file()

+ 84
- 0
tests/test_basics.py Voir le fichier

@@ -0,0 +1,84 @@
import pytest
from subprocess import CalledProcessError

from dds_ci import paths
from dds_ci.testing import Project, PackageJSON


def test_build_empty(tmp_project: Project) -> None:
"""Check that dds is okay with building an empty project directory"""
tmp_project.build()


def test_lib_with_app_only(tmp_project: Project) -> None:
"""Test that dds can build a simple application"""
tmp_project.write('src/foo.main.cpp', r'int main() {}')
tmp_project.build()
assert (tmp_project.build_root / f'foo{paths.EXE_SUFFIX}').is_file()


def test_build_simple(tmp_project: Project) -> None:
"""
Test that dds can build a simple library, and handles rebuilds correctly.
"""
# Build a bad project
tmp_project.write('src/f.cpp', 'syntax error')
with pytest.raises(CalledProcessError):
tmp_project.build()
# Now we can build:
tmp_project.write('src/f.cpp', r'void f() {}')
tmp_project.build()
# Writing again will build again:
tmp_project.write('src/f.cpp', r'bad again')
with pytest.raises(CalledProcessError):
tmp_project.build()


def test_simple_lib(tmp_project: Project) -> None:
"""
Test that dds can build a simple library withsome actual content, and that
the manifest files will affect the output name.
"""
tmp_project.write('src/foo.cpp', 'int the_answer() { return 42; }')
tmp_project.package_json = {
'name': 'TestProject',
'version': '0.0.0',
'namespace': 'test',
}
tmp_project.library_json = {'name': 'TestLibrary'}
tmp_project.build()
assert (tmp_project.build_root / 'compile_commands.json').is_file()
assert list(tmp_project.build_root.glob('libTestLibrary.*')) != []


def test_lib_with_just_test(tmp_project: Project) -> None:
tmp_project.write('src/foo.test.cpp', 'int main() {}')
tmp_project.build()
assert tmp_project.build_root.joinpath(f'test/foo{paths.EXE_SUFFIX}').is_file()


TEST_PACKAGE: PackageJSON = {
'name': 'test-pkg',
'version': '0.2.2',
'namespace': 'test',
}


def test_empty_with_pkg_dds(tmp_project: Project) -> None:
tmp_project.package_json = TEST_PACKAGE
tmp_project.build()


def test_empty_with_lib_dds(tmp_project: Project) -> None:
tmp_project.package_json = TEST_PACKAGE
tmp_project.build()


def test_empty_sdist_create(tmp_project: Project) -> None:
tmp_project.package_json = TEST_PACKAGE
tmp_project.sdist_create()


def test_empty_sdist_export(tmp_project: Project) -> None:
tmp_project.package_json = TEST_PACKAGE
tmp_project.sdist_export()

+ 60
- 0
tests/test_build_deps.py Voir le fichier

@@ -0,0 +1,60 @@
import json

import pytest

from dds_ci.testing import RepoFixture, Project

SIMPLE_CATALOG = {
"packages": {
"neo-fun": {
"0.3.0": {
"remote": {
"git": {
"url": "https://github.com/vector-of-bool/neo-fun.git",
"ref": "0.3.0"
}
}
}
}
}
}


@pytest.fixture()
def test_repo(http_repo: RepoFixture) -> RepoFixture:
http_repo.import_json_data(SIMPLE_CATALOG)
return http_repo


@pytest.fixture()
def test_project(tmp_project: Project, test_repo: RepoFixture) -> Project:
tmp_project.dds.repo_add(test_repo.url)
return tmp_project


def test_from_file(test_project: Project) -> None:
"""build-deps using a file listing deps"""
test_project.write('deps.json5', json.dumps({'depends': ['neo-fun+0.3.0']}))
test_project.dds.build_deps(['-d', 'deps.json5'])
assert test_project.root.joinpath('_deps/neo-fun@0.3.0').is_dir()
assert test_project.root.joinpath('_deps/_libman/neo-fun.lmp').is_file()
assert test_project.root.joinpath('_deps/_libman/neo/fun.lml').is_file()
assert test_project.root.joinpath('INDEX.lmi').is_file()


def test_from_cmd(test_project: Project) -> None:
"""build-deps using a command-line listing"""
test_project.dds.build_deps(['neo-fun=0.3.0'])
assert test_project.root.joinpath('_deps/neo-fun@0.3.0').is_dir()
assert test_project.root.joinpath('_deps/_libman/neo-fun.lmp').is_file()
assert test_project.root.joinpath('_deps/_libman/neo/fun.lml').is_file()
assert test_project.root.joinpath('INDEX.lmi').is_file()


def test_multiple_deps(test_project: Project) -> None:
"""build-deps with multiple deps resolves to a single version"""
test_project.dds.build_deps(['neo-fun^0.2.0', 'neo-fun~0.3.0'])
assert test_project.root.joinpath('_deps/neo-fun@0.3.0').is_dir()
assert test_project.root.joinpath('_deps/_libman/neo-fun.lmp').is_file()
assert test_project.root.joinpath('_deps/_libman/neo/fun.lml').is_file()
assert test_project.root.joinpath('INDEX.lmi').is_file()

+ 32
- 0
tests/test_catalog.py Voir le fichier

@@ -0,0 +1,32 @@
from pathlib import Path

from dds_ci.testing import Project, RepoFixture
from dds_ci.dds import DDSWrapper


def test_catalog_create(dds_2: DDSWrapper, tmp_path: Path) -> None:
cat_db = tmp_path / 'catalog.db'
assert not cat_db.is_file()
dds_2.run(['catalog', 'create', '--catalog', cat_db])
assert cat_db.is_file()


def test_catalog_get_git(http_repo: RepoFixture, tmp_project: Project) -> None:
http_repo.import_json_data({
'packages': {
'neo-sqlite3': {
'0.3.0': {
'remote': {
'git': {
'url': 'https://github.com/vector-of-bool/neo-sqlite3.git',
'ref': '0.3.0',
}
}
}
}
}
})
tmp_project.dds.repo_add(http_repo.url)
tmp_project.dds.catalog_get('neo-sqlite3@0.3.0')
assert tmp_project.root.joinpath('neo-sqlite3@0.3.0').is_dir()
assert tmp_project.root.joinpath('neo-sqlite3@0.3.0/package.jsonc').is_file()

tests/db/test_compile_deps.py → tests/test_compile_deps.py Voir le fichier

@@ -2,7 +2,7 @@ import subprocess

import pytest

from tests import DDS
from dds_ci.testing import ProjectOpener, Project
from dds_ci import proc, paths

## #############################################################################
@@ -18,88 +18,71 @@ from dds_ci import proc, paths
## detect file changes is a catastrophic bug!


def build_and_get_rc(dds: DDS) -> int:
dds.build()
app = dds.build_dir / ('app' + paths.EXE_SUFFIX)
@pytest.fixture()
def test_project(project_opener: ProjectOpener) -> Project:
return project_opener.open('projects/compile_deps')


def build_and_get_rc(proj: Project) -> int:
proj.build()
app = proj.build_root.joinpath('app' + paths.EXE_SUFFIX)
return proc.run([app]).returncode


def test_simple_rebuild(dds: DDS) -> None:
def test_simple_rebuild(test_project: Project) -> None:
"""
Check that changing a source file will update the resulting application.
"""
assert build_and_get_rc(dds) == 0
dds.scope.enter_context(
dds.set_contents(
'src/1.cpp',
b'''
int value_1() { return 33; }
''',
))
assert build_and_get_rc(test_project) == 0
test_project.write('src/1.cpp', 'int value_1() { return 33; }')
# 33 - 32 = 1
assert build_and_get_rc(dds) == 1
assert build_and_get_rc(test_project) == 1


def test_rebuild_header_change(dds: DDS) -> None:
def test_rebuild_header_change(test_project: Project) -> None:
"""Change the content of the header which defines the values"""
assert build_and_get_rc(dds) == 0
dds.scope.enter_context(
dds.set_contents(
'src/values.hpp',
b'''
const int first_value = 63;
const int second_value = 88;
''',
))
assert build_and_get_rc(dds) == (88 - 63)


def test_partial_build_rebuild(dds: DDS) -> None:
assert build_and_get_rc(test_project) == 0
test_project.write('src/values.hpp', '''
const int first_value = 63;
const int second_value = 88;
''')
assert build_and_get_rc(test_project) == (88 - 63)


def test_partial_build_rebuild(test_project: Project) -> None:
"""
Change the content of a header, but cause one user of that header to fail
compilation. The fact that compilation fails means it is still `out-of-date`,
and will need to be compiled after we have fixed it up.
"""
assert build_and_get_rc(dds) == 0
dds.scope.enter_context(
dds.set_contents(
'src/values.hpp',
b'''
const int first_value_q = 6;
const int second_value_q = 99;
''',
))
assert build_and_get_rc(test_project) == 0
test_project.write('src/values.hpp', '''
const int first_value_q = 6;
const int second_value_q = 99;
''')
# Header now causes errors in 1.cpp and 2.cpp
with pytest.raises(subprocess.CalledProcessError):
dds.build()
test_project.build()
# Fix 1.cpp
dds.scope.enter_context(
dds.set_contents(
'src/1.cpp',
b'''
#include "./values.hpp"

int value_1() { return first_value_q; }
''',
))
test_project.write('src/1.cpp', '''
#include "./values.hpp"

int value_1() { return first_value_q; }
''')
# We will still see a failure, but now the DB will record the updated values.hpp
with pytest.raises(subprocess.CalledProcessError):
dds.build()
test_project.build()

# Should should raise _again_, even though we've successfully compiled one
# of the two files with the changed `values.hpp`, because `2.cpp` still
# has a pending update
with pytest.raises(subprocess.CalledProcessError):
dds.build()
test_project.build()

dds.scope.enter_context(
dds.set_contents(
'src/2.cpp',
b'''
#include "./values.hpp"
test_project.write('src/2.cpp', '''
#include "./values.hpp"

int value_2() { return second_value_q; }
''',
))
int value_2() { return second_value_q; }
''')
# We should now compile and link to get the updated value
assert build_and_get_rc(dds) == (99 - 6)
assert build_and_get_rc(test_project) == (99 - 6)

+ 0
- 0
tests/test_drivers/__init__.py Voir le fichier


+ 14
- 9
tests/test_drivers/catch/test_catch.py Voir le fichier

@@ -1,13 +1,18 @@
from tests import DDS, dds_fixture_conf, DDSFixtureParams
from dds_ci import proc, paths
from dds_ci.testing import ProjectOpener


@dds_fixture_conf(
DDSFixtureParams('main', 'main'),
DDSFixtureParams('custom-runner', 'custom-runner'),
)
def test_catch_testdriver(dds: DDS) -> None:
dds.build(tests=True)
test_exe = dds.build_dir / f'test/testlib/calc{paths.EXE_SUFFIX}'
assert test_exe.exists()
def test_main(project_opener: ProjectOpener) -> None:
proj = project_opener.open('main')
proj.build()
test_exe = proj.build_root.joinpath('test/testlib/calc' + paths.EXE_SUFFIX)
assert test_exe.is_file()
assert proc.run([test_exe]).returncode == 0


def test_custom(project_opener: ProjectOpener) -> None:
proj = project_opener.open('custom-runner')
proj.build()
test_exe = proj.build_root.joinpath('test/testlib/calc' + paths.EXE_SUFFIX)
assert test_exe.is_file()
assert proc.run([test_exe]).returncode == 0

+ 31
- 0
tests/test_sdist.py Voir le fichier

@@ -0,0 +1,31 @@
import pytest

from dds_ci.testing import ProjectOpener, Project


@pytest.fixture()
def test_project(project_opener: ProjectOpener) -> Project:
return project_opener.open('projects/sdist')


def test_create_sdist(test_project: Project) -> None:
test_project.sdist_create()
sd_dir = test_project.build_root / 'foo@1.2.3.tar.gz'
assert sd_dir.is_file()


def test_export_sdist(test_project: Project) -> None:
test_project.sdist_export()
assert (test_project.dds.repo_dir / 'foo@1.2.3').is_dir()


def test_import_sdist_archive(test_project: Project) -> None:
repo_content_path = test_project.dds.repo_dir / 'foo@1.2.3'
assert not repo_content_path.is_dir()
test_project.sdist_create()
assert not repo_content_path.is_dir()
test_project.dds.repo_import(test_project.build_root / 'foo@1.2.3.tar.gz')
assert repo_content_path.is_dir()
assert repo_content_path.joinpath('library.jsonc').is_file()
# Excluded file will not be in the sdist:
assert not repo_content_path.joinpath('other-file.txt').is_file()

tests/deps/use-cryptopp/gcc.tc.jsonc → tests/use-cryptopp/gcc.tc.jsonc Voir le fichier


tests/deps/use-cryptopp/msvc.tc.jsonc → tests/use-cryptopp/msvc.tc.jsonc Voir le fichier


+ 70
- 0
tests/use-cryptopp/test_use_cryptopp.py Voir le fichier

@@ -0,0 +1,70 @@
from pathlib import Path
import platform

import pytest

from dds_ci.testing import RepoFixture, Project
from dds_ci import proc, toolchain, paths

CRYPTOPP_JSON = {
"packages": {
"cryptopp": {
"8.2.0": {
"remote": {
"git": {
"url": "https://github.com/weidai11/cryptopp.git",
"ref": "CRYPTOPP_8_2_0"
},
"auto-lib": "cryptopp/cryptopp",
"transform": [{
"move": {
"from": ".",
"to": "src/cryptopp",
"include": ["*.c", "*.cpp", "*.h"]
}
}]
}
}
}
}
}

APP_CPP = r'''
#include <cryptopp/osrng.h>

#include <string>

int main() {
std::string arr;
arr.resize(256);
CryptoPP::OS_GenerateRandomBlock(false,
reinterpret_cast<CryptoPP::byte*>(arr.data()),
arr.size());
for (auto b : arr) {
if (b != '\x00') {
return 0;
}
}
return 1;
}
'''


@pytest.mark.skipif(platform.system() == 'FreeBSD', reason='This one has trouble running on FreeBSD')
def test_get_build_use_cryptopp(test_parent_dir: Path, tmp_project: Project, http_repo: RepoFixture) -> None:
http_repo.import_json_data(CRYPTOPP_JSON)
tmp_project.dds.repo_add(http_repo.url)
tmp_project.package_json = {
'name': 'usr-cryptopp',
'version': '1.0.0',
'namespace': 'test',
'depends': ['cryptopp@8.2.0'],
}
tmp_project.library_json = {
'name': 'use-cryptopp',
'uses': ['cryptopp/cryptopp'],
}
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc'
tmp_project.write('src/use-cryptopp.main.cpp', APP_CPP)
tmp_project.build(toolchain=test_parent_dir / tc_fname)
proc.check_run([(tmp_project.build_root / 'use-cryptopp').with_suffix(paths.EXE_SUFFIX)])

tests/deps/use-spdlog/gcc.tc.jsonc → tests/use-spdlog/gcc.tc.jsonc Voir le fichier

@@ -3,5 +3,4 @@
"cxx_version": 'c++17',
"cxx_compiler": 'g++-9',
"flags": '-DSPDLOG_COMPILED_LIB',
"link_flags": '-static-libgcc -static-libstdc++'
}

tests/deps/use-spdlog/msvc.tc.jsonc → tests/use-spdlog/msvc.tc.jsonc Voir le fichier


tests/deps/use-spdlog/project/catalog.json → tests/use-spdlog/project/catalog.json Voir le fichier


tests/deps/use-spdlog/project/library.json5 → tests/use-spdlog/project/library.json5 Voir le fichier


tests/deps/use-spdlog/project/package.json5 → tests/use-spdlog/project/package.json5 Voir le fichier


tests/deps/use-spdlog/project/src/spdlog_user.cpp → tests/use-spdlog/project/src/spdlog_user.cpp Voir le fichier


tests/deps/use-spdlog/project/src/spdlog_user.hpp → tests/use-spdlog/project/src/spdlog_user.hpp Voir le fichier


tests/deps/use-spdlog/project/src/use-spdlog.main.cpp → tests/use-spdlog/project/src/use-spdlog.main.cpp Voir le fichier


+ 13
- 0
tests/use-spdlog/use_spdlog_test.py Voir le fichier

@@ -0,0 +1,13 @@
from pathlib import Path

from dds_ci.testing import RepoFixture, ProjectOpener
from dds_ci import proc, paths, toolchain


def test_get_build_use_spdlog(test_parent_dir: Path, project_opener: ProjectOpener, http_repo: RepoFixture) -> None:
proj = project_opener.open('project')
http_repo.import_json_file(proj.root / 'catalog.json')
proj.dds.repo_add(http_repo.url)
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc'
proj.build(toolchain=test_parent_dir / tc_fname)
proc.check_run([(proj.build_root / 'use-spdlog').with_suffix(paths.EXE_SUFFIX)])

+ 44
- 7
tools/dds_ci/dds.py Voir le fichier

@@ -3,7 +3,8 @@ import shutil
from pathlib import Path
from typing import Optional

from . import paths, proc
from . import paths, proc, toolchain as tc_mod
from dds_ci.util import Pathish


class DDSWrapper:
@@ -11,10 +12,22 @@ class DDSWrapper:
Wraps a 'dds' executable with some convenience APIs that invoke various
'dds' subcommands.
"""
def __init__(self, path: Path) -> None:
def __init__(self,
path: Path,
*,
repo_dir: Optional[Pathish] = None,
catalog_path: Optional[Pathish] = None,
default_cwd: Optional[Pathish] = None) -> None:
self.path = path
self.repo_dir = paths.PREBUILT_DIR / 'ci-repo'
self.catalog_path = paths.PREBUILT_DIR / 'ci-catalog.db'
self.repo_dir = Path(repo_dir or (paths.PREBUILT_DIR / 'ci-repo'))
self.catalog_path = Path(catalog_path or (self.repo_dir.parent / 'ci-catalog.db'))
self.default_cwd = default_cwd or Path.cwd()

def clone(self) -> 'DDSWrapper':
return DDSWrapper(self.path,
repo_dir=self.repo_dir,
catalog_path=self.catalog_path,
default_cwd=self.default_cwd)

@property
def catalog_path_arg(self) -> str:
@@ -26,6 +39,10 @@ class DDSWrapper:
"""The arguments for --repo-dir"""
return f'--repo-dir={self.repo_dir}'

def set_repo_scratch(self, path: Pathish) -> None:
self.repo_dir = Path(path) / 'data'
self.catalog_path = Path(path) / 'catalog.db'

def clean(self, *, build_dir: Optional[Path] = None, repo: bool = True, catalog: bool = True) -> None:
"""
Clean out prior executable output, including repos, catalog, and
@@ -38,18 +55,27 @@ class DDSWrapper:
if catalog and self.catalog_path.exists():
self.catalog_path.unlink()

def run(self, args: proc.CommandLine) -> None:
def run(self, args: proc.CommandLine, *, cwd: Optional[Pathish] = None) -> None:
"""Execute the 'dds' executable with the given arguments"""
proc.check_run([self.path, args])
proc.check_run([self.path, args], cwd=cwd or self.default_cwd)

def catalog_json_import(self, path: Path) -> None:
"""Run 'catalog import' to import the given JSON. Only applicable to older 'dds'"""
self.run(['catalog', 'import', self.catalog_path_arg, f'--json={path}'])

def catalog_get(self, what: str) -> None:
self.run(['catalog', 'get', self.catalog_path_arg, what])

def repo_add(self, url: str) -> None:
self.run(['repo', 'add', self.catalog_path_arg, url, '--update'])

def repo_import(self, sdist: Path) -> None:
self.run(['repo', self.repo_dir_arg, 'import', sdist])

def build(self,
*,
toolchain: Path,
root: Path,
toolchain: Optional[Path] = None,
build_root: Optional[Path] = None,
jobs: Optional[int] = None) -> None:
"""
@@ -60,6 +86,7 @@ class DDSWrapper:
:param build_root: The root directory where the output will be written.
:param jobs: The number of jobs to use. Default is CPU-count + 2
"""
toolchain = toolchain or tc_mod.get_default_test_toolchain()
jobs = jobs or multiprocessing.cpu_count() + 2
self.run([
'build',
@@ -70,3 +97,13 @@ class DDSWrapper:
f'--project-dir={root}',
f'--out={build_root}',
])

def build_deps(self, args: proc.CommandLine, *, toolchain: Optional[Path] = None) -> None:
toolchain = toolchain or tc_mod.get_default_test_toolchain()
self.run([
'build-deps',
f'--toolchain={toolchain}',
self.catalog_path_arg,
self.repo_dir_arg,
args,
])

+ 3
- 3
tools/dds_ci/msvs.py Voir le fichier

@@ -2,7 +2,7 @@ import argparse
import json
import os
from pathlib import Path
from typing import Optional, Dict
from typing import Optional, Dict, Any
from typing_extensions import Protocol

from . import paths
@@ -12,7 +12,7 @@ class Arguments(Protocol):
out: Optional[Path]


def gen_task_json_data() -> Dict:
def gen_task_json_data() -> Dict[str, Any]:
dds_ci_exe = paths.find_exe('dds-ci')
assert dds_ci_exe, 'Unable to find the dds-ci executable. This command should be run in a Poetry'
envs = {key: os.environ[key]
@@ -40,7 +40,7 @@ def gen_task_json_data() -> Dict:
return task


def generate_vsc_task():
def generate_vsc_task() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--out', '-o', help='File to write into', type=Path)
args: Arguments = parser.parse_args()

+ 5
- 3
tools/dds_ci/proc.py Voir le fichier

@@ -1,8 +1,10 @@
from pathlib import PurePath, Path
from pathlib import PurePath
from typing import Iterable, Union, Optional, Iterator
from typing_extensions import Protocol
import subprocess

from .util import Pathish

CommandLineArg = Union[str, PurePath, int, float]
CommandLineArg1 = Union[CommandLineArg, Iterable[CommandLineArg]]
CommandLineArg2 = Union[CommandLineArg1, Iterable[CommandLineArg1]]
@@ -36,7 +38,7 @@ def flatten_cmd(cmd: CommandLine) -> Iterable[str]:
assert False, f'Invalid command line element: {repr(cmd)}'


def run(*cmd: CommandLine, cwd: Optional[Path] = None, check: bool = False) -> ProcessResult:
def run(*cmd: CommandLine, cwd: Optional[Pathish] = None, check: bool = False) -> ProcessResult:
return subprocess.run(
list(flatten_cmd(cmd)),
cwd=cwd,
@@ -44,7 +46,7 @@ def run(*cmd: CommandLine, cwd: Optional[Path] = None, check: bool = False) -> P
)


def check_run(*cmd: CommandLine, cwd: Optional[Path] = None) -> ProcessResult:
def check_run(*cmd: CommandLine, cwd: Optional[Pathish] = None) -> ProcessResult:
return subprocess.run(
list(flatten_cmd(cmd)),
cwd=cwd,

+ 10
- 0
tools/dds_ci/testing/__init__.py Voir le fichier

@@ -0,0 +1,10 @@
from .fixtures import Project, ProjectOpener, PackageJSON, LibraryJSON
from .http import RepoFixture

__all__ = (
'Project',
'ProjectOpener',
'PackageJSON',
'LibraryJSON',
'RepoFixture',
)

+ 180
- 0
tools/dds_ci/testing/fixtures.py Voir le fichier

@@ -0,0 +1,180 @@
"""
Test fixtures used by DDS in pytest
"""

from pathlib import Path
import pytest
import json
import shutil
from typing import Sequence, cast, Optional
from typing_extensions import TypedDict

from _pytest.config import Config as PyTestConfig
from _pytest.tmpdir import TempPathFactory
from _pytest.fixtures import FixtureRequest

from dds_ci import toolchain, paths
from ..dds import DDSWrapper
from ..util import Pathish
tc_mod = toolchain


def ensure_absent(path: Pathish) -> None:
path = Path(path)
if path.is_dir():
shutil.rmtree(path)
elif path.exists():
path.unlink()
else:
# File does not exist, wo we are safe to ignore it
pass


class _PackageJSONRequired(TypedDict):
name: str
namespace: str
version: str


class PackageJSON(_PackageJSONRequired, total=False):
depends: Sequence[str]


class _LibraryJSONRequired(TypedDict):
name: str


class LibraryJSON(_LibraryJSONRequired, total=False):
uses: Sequence[str]


class Project:
def __init__(self, dirpath: Path, dds: DDSWrapper) -> None:
self.dds = dds
self.root = dirpath
self.build_root = dirpath / '_build'

@property
def package_json(self) -> PackageJSON:
return cast(PackageJSON, json.loads(self.root.joinpath('package.jsonc').read_text()))

@package_json.setter
def package_json(self, data: PackageJSON) -> None:
self.root.joinpath('package.jsonc').write_text(json.dumps(data, indent=2))

@property
def library_json(self) -> LibraryJSON:
return cast(LibraryJSON, json.loads(self.root.joinpath('library.jsonc').read_text()))

@library_json.setter
def library_json(self, data: LibraryJSON) -> None:
self.root.joinpath('library.jsonc').write_text(json.dumps(data, indent=2))

@property
def project_dir_arg(self) -> str:
"""Argument for --project-dir"""
return f'--project-dir={self.root}'

def build(self, *, toolchain: Optional[Pathish] = None) -> None:
"""
Execute 'dds build' on the project
"""
with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc:
self.dds.build(root=self.root, build_root=self.build_root, toolchain=tc)

def sdist_create(self) -> None:
self.build_root.mkdir(exist_ok=True, parents=True)
self.dds.run(['sdist', 'create', self.project_dir_arg], cwd=self.build_root)

def sdist_export(self) -> None:
self.dds.run(['sdist', 'export', self.dds.repo_dir_arg, self.project_dir_arg])

def write(self, path: Pathish, content: str) -> Path:
path = Path(path)
if not path.is_absolute():
path = self.root / path
path.parent.mkdir(exist_ok=True, parents=True)
path.write_text(content)
return path


@pytest.fixture()
def test_parent_dir(request: FixtureRequest) -> Path:
return Path(request.fspath).parent


class ProjectOpener():
def __init__(self, dds: DDSWrapper, request: FixtureRequest, worker: str,
tmp_path_factory: TempPathFactory) -> None:
self.dds = dds
self._request = request
self._worker_id = worker
self._tmppath_fac = tmp_path_factory

@property
def test_name(self) -> str:
"""The name of the test that requested this opener"""
return str(self._request.function.__name__)

@property
def test_dir(self) -> Path:
"""The directory that contains the test that requested this opener"""
return Path(self._request.fspath).parent

def open(self, dirpath: Pathish) -> Project:
dirpath = Path(dirpath)
if not dirpath.is_absolute():
dirpath = self.test_dir / dirpath

proj_copy = self.test_dir / '__test_project'
if self._worker_id != 'master':
proj_copy = self._tmppath_fac.mktemp('test-project-') / self.test_name
else:
self._request.addfinalizer(lambda: ensure_absent(proj_copy))

shutil.copytree(dirpath, proj_copy)
new_dds = self.dds.clone()

if self._worker_id == 'master':
repo_dir = self.test_dir / '__test_repo'
else:
repo_dir = self._tmppath_fac.mktemp('test-repo-') / self.test_name

new_dds.set_repo_scratch(repo_dir)
new_dds.default_cwd = proj_copy
self._request.addfinalizer(lambda: ensure_absent(repo_dir))

return Project(proj_copy, new_dds)


@pytest.fixture()
def project_opener(request: FixtureRequest, worker_id: str, dds_2: DDSWrapper,
tmp_path_factory: TempPathFactory) -> ProjectOpener:
opener = ProjectOpener(dds_2, request, worker_id, tmp_path_factory)
return opener


@pytest.fixture()
def tmp_project(request: FixtureRequest, worker_id: str, project_opener: ProjectOpener,
tmp_path_factory: TempPathFactory) -> Project:
if worker_id != 'master':
proj_dir = tmp_path_factory.mktemp('temp-project')
return project_opener.open(proj_dir)

proj_dir = project_opener.test_dir / '__test_project_empty'
ensure_absent(proj_dir)
proj_dir.mkdir()
proj = project_opener.open(proj_dir)
request.addfinalizer(lambda: ensure_absent(proj_dir))
return proj


@pytest.fixture(scope='session')
def dds_2(dds_exe: Path) -> DDSWrapper:
return DDSWrapper(dds_exe)


@pytest.fixture(scope='session')
def dds_exe(pytestconfig: PyTestConfig) -> Path:
opt = pytestconfig.getoption('--dds-exe') or paths.CUR_BUILT_DDS
return Path(opt)

tests/http.py → tools/dds_ci/testing/http.py Voir le fichier

@@ -53,7 +53,7 @@ def run_http_server(dirpath: Path, port: int) -> Iterator[ServerInfo]:
httpd.shutdown()


@pytest.yield_fixture() # type: ignore
@pytest.fixture()
def http_tmp_dir_server(tmp_path: Path, unused_tcp_port: int) -> Iterator[ServerInfo]:
"""
Creates an HTTP server that serves the contents of a new
@@ -96,7 +96,7 @@ class RepoFixture:
])


@pytest.yield_fixture() # type: ignore
@pytest.fixture()
def http_repo(dds_exe: Path, http_tmp_dir_server: ServerInfo) -> Iterator[RepoFixture]:
"""
Fixture that creates a new empty dds repository and an HTTP server to serve

+ 4
- 2
tools/dds_ci/toolchain.py Voir le fichier

@@ -8,19 +8,21 @@ import distro
import json5

from . import paths
from .util import Pathish


@contextmanager
def fixup_toolchain(json_file: Path) -> Iterator[Path]:
def fixup_toolchain(json_file: Pathish) -> Iterator[Path]:
"""
Augment the toolchain at the given path by adding 'ccache' or -fuse-ld=lld,
if those tools are available on the system. Yields a new toolchain file
based on 'json_file'
"""
json_file = Path(json_file)
data = json5.loads(json_file.read_text())
# Check if we can add ccache
ccache = paths.find_exe('ccache')
if ccache:
if ccache and data.get('compiler_id') in ('gnu', 'clang'):
print('Found ccache:', ccache)
data['compiler_launcher'] = [str(ccache)]
# Check for lld for use with GCC/Clang

+ 6
- 0
tools/dds_ci/util.py Voir le fichier

@@ -0,0 +1,6 @@
from pathlib import PurePath
from os import PathLike
from typing import Union

#: A path, string, or convertible-to-Path object
Pathish = Union[PathLike, PurePath, str]

Chargement…
Annuler
Enregistrer