Browse Source

Everything passes mypy and pylint

default_compile_flags
vector-of-bool 3 years ago
parent
commit
eb4d0acb6f
28 changed files with 441 additions and 245 deletions
  1. +161
    -0
      .pylintrc
  2. +6
    -0
      mypy.ini
  3. +11
    -1
      tests/__init__.py
  4. +2
    -3
      tests/basics/config_template/test_config_template.py
  5. +8
    -8
      tests/basics/test_app_only.py
  6. +10
    -10
      tests/basics/test_basics.py
  7. +9
    -12
      tests/basics/test_simple.py
  8. +8
    -7
      tests/basics/test_test_only.py
  9. +2
    -2
      tests/catalog/create_test.py
  10. +3
    -5
      tests/catalog/get_test.py
  11. +39
    -16
      tests/conftest.py
  12. +7
    -8
      tests/db/test_compile_deps.py
  13. +27
    -46
      tests/dds.py
  14. +5
    -5
      tests/deps/build-deps/test_build_deps.py
  15. +4
    -2
      tests/deps/deps_test.py
  16. +4
    -4
      tests/deps/do_test.py
  17. +4
    -4
      tests/deps/use-cryptopp/test_use_cryptopp.py
  18. +4
    -4
      tests/deps/use-spdlog/use_spdlog_test.py
  19. +1
    -1
      tests/errors/errors_test.py
  20. +3
    -2
      tests/fileutil.py
  21. +10
    -10
      tests/http.py
  22. +3
    -3
      tests/sdist/sdist_test.py
  23. +3
    -3
      tests/test_drivers/catch/test_catch.py
  24. +2
    -3
      tools/dds_ci/bootstrap.py
  25. +13
    -8
      tools/dds_ci/dds.py
  26. +7
    -67
      tools/dds_ci/main.py
  27. +24
    -11
      tools/dds_ci/proc.py
  28. +61
    -0
      tools/dds_ci/toolchain.py

+ 161
- 0
.pylintrc View File

[MASTER]

jobs=1
persistent=yes
suggestion-mode=yes
unsafe-load-any-extension=no

[MESSAGES CONTROL]

confidence=
disable=C,too-few-public-methods,redefined-outer-name
enable=c-extension-no-member


[REPORTS]

evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
output-format=colorized
reports=no
score=yes


[REFACTORING]

max-nested-blocks=5
never-returning-functions=optparse.Values,sys.exit


[BASIC]

argument-naming-style=snake_case
attr-naming-style=snake_case
class-attribute-naming-style=snake_case
class-naming-style=PascalCase
const-naming-style=UPPER_CASE
docstring-min-length=-1
function-naming-style=snake_case
# Good variable names which should always be accepted, separated by a comma
good-names=i,
j,
k,
ex,
Run,
fd,
_

include-naming-hint=no
inlinevar-naming-style=any
method-naming-style=snake_case
module-naming-style=snake_case
name-group=
no-docstring-rgx=^_
variable-naming-style=snake_case


[FORMAT]

expected-line-ending-format=LF
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
max-line-length=100
max-module-lines=1000
no-space-check=trailing-comma,
dict-separator
single-line-class-stmt=no
single-line-if-stmt=no


[LOGGING]

# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging


[MISCELLANEOUS]

# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO


[SIMILARITIES]

ignore-comments=yes
ignore-docstrings=yes
ignore-imports=no
min-similarity-lines=4


[SPELLING]

max-spelling-suggestions=4
spelling-dict=
spelling-ignore-words=
spelling-private-dict-file=
spelling-store-unknown-words=no


[TYPECHECK]

contextmanager-decorators=contextlib.contextmanager
generated-members=
ignore-mixin-members=yes
ignore-on-opaque-inference=yes
ignored-classes=optparse.Values,thread._local,_thread._local
ignored-modules=
missing-member-hint=yes
missing-member-hint-distance=1
missing-member-max-choices=1


[VARIABLES]

additional-builtins=
allow-global-unused-variables=yes
callbacks=cb_,
_cb
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
ignored-argument-names=_.*|^ignored_|^unused_
init-import=no
redefining-builtins-modules=six.moves,past.builtins,future.builtins


[CLASSES]

defining-attr-methods=__init__,__new__

exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
valid-classmethod-first-arg=cls
valid-metaclass-classmethod-first-arg=mcs


[DESIGN]

max-args=5
max-attributes=7
max-bool-expr=5
max-branches=12
max-locals=15
max-parents=7
max-public-methods=20
max-returns=6
max-statements=50
min-public-methods=2


[IMPORTS]

allow-wildcard-with-all=no
analyse-fallback-blocks=no
deprecated-modules=optparse,tkinter.tix
ext-import-graph=
import-graph=
int-import-graph=
known-standard-library=
known-third-party=enchant

+ 6
- 0
mypy.ini View File

[mypy]
strict=True
ignore_missing_imports=True
incremental=True
sqlite_cache=True
mypy_path = tools/

+ 11
- 1
tests/__init__.py View File

sys.path.append(str(Path(__file__).absolute().parent.parent / 'tools')) sys.path.append(str(Path(__file__).absolute().parent.parent / 'tools'))


from .dds import DDS, DDSFixtureParams, scoped_dds, dds_fixture_conf, dds_fixture_conf_1 from .dds import DDS, DDSFixtureParams, scoped_dds, dds_fixture_conf, dds_fixture_conf_1
from .http import http_repo, RepoFixture
from .http import http_repo, RepoFixture

__all__ = (
'DDS',
'DDSFixtureParams',
'scoped_dds',
'dds_fixture_conf',
'dds_fixture_conf_1',
'http_repo',
'RepoFixture',
)

+ 2
- 3
tests/basics/config_template/test_config_template.py View File

import pytest
from time import sleep from time import sleep


from tests import DDS, dds_fixture_conf_1 from tests import DDS, dds_fixture_conf_1




@dds_fixture_conf_1('copy_only') @dds_fixture_conf_1('copy_only')
def test_config_template(dds: DDS):
def test_config_template(dds: DDS) -> None:
generated_fpath = dds.build_dir / '__dds/gen/info.hpp' generated_fpath = dds.build_dir / '__dds/gen/info.hpp'
assert not generated_fpath.is_file() assert not generated_fpath.is_file()
dds.build() dds.build()




@dds_fixture_conf_1('simple') @dds_fixture_conf_1('simple')
def test_simple_substitution(dds: DDS):
def test_simple_substitution(dds: DDS) -> None:
dds.build() dds.build()

+ 8
- 8
tests/basics/test_app_only.py View File

from contextlib import ExitStack
from tests import DDS from tests import DDS
from tests.fileutil import set_contents from tests.fileutil import set_contents


from dds_ci import paths


def test_lib_with_just_app(dds: DDS):
dds.scope.enter_context(
set_contents(
dds.source_root / 'src/foo.main.cpp',
b'int main() {}',
))
def test_lib_with_just_app(dds: DDS) -> None:
dds.scope.enter_context(set_contents(
dds.source_root / 'src/foo.main.cpp',
b'int main() {}',
))


dds.build() dds.build()
assert (dds.build_dir / f'foo{dds.exe_suffix}').is_file()
assert (dds.build_dir / f'foo{paths.EXE_SUFFIX}').is_file()

+ 10
- 10
tests/basics/test_basics.py View File

from contextlib import contextmanager
from typing import ContextManager
from pathlib import Path
from tests import DDS from tests import DDS
from tests.fileutil import ensure_dir, set_contents from tests.fileutil import ensure_dir, set_contents




def test_build_empty(dds: DDS):
def test_build_empty(dds: DDS) -> None:
assert not dds.source_root.exists() assert not dds.source_root.exists()
dds.scope.enter_context(ensure_dir(dds.source_root)) dds.scope.enter_context(ensure_dir(dds.source_root))
dds.build() dds.build()




def test_build_simple(dds: DDS):
dds.scope.enter_context(
set_contents(dds.source_root / 'src/f.cpp', b'void foo() {}'))
def test_build_simple(dds: DDS) -> None:
dds.scope.enter_context(set_contents(dds.source_root / 'src/f.cpp', b'void foo() {}'))
dds.build() dds.build()




def basic_pkg_dds(dds: DDS):
def basic_pkg_dds(dds: DDS) -> ContextManager[Path]:
return set_contents( return set_contents(
dds.source_root / 'package.json5', b''' dds.source_root / 'package.json5', b'''
{ {
''') ''')




def test_empty_with_pkg_dds(dds: DDS):
def test_empty_with_pkg_dds(dds: DDS) -> None:
dds.scope.enter_context(basic_pkg_dds(dds)) dds.scope.enter_context(basic_pkg_dds(dds))
dds.build() dds.build()




def test_empty_with_lib_dds(dds: DDS):
def test_empty_with_lib_dds(dds: DDS) -> None:
dds.scope.enter_context(basic_pkg_dds(dds)) dds.scope.enter_context(basic_pkg_dds(dds))
dds.build() dds.build()




def test_empty_sdist_create(dds: DDS):
def test_empty_sdist_create(dds: DDS) -> None:
dds.scope.enter_context(basic_pkg_dds(dds)) dds.scope.enter_context(basic_pkg_dds(dds))
dds.sdist_create() dds.sdist_create()




def test_empty_sdist_export(dds: DDS):
def test_empty_sdist_export(dds: DDS) -> None:
dds.scope.enter_context(basic_pkg_dds(dds)) dds.scope.enter_context(basic_pkg_dds(dds))
dds.sdist_export() dds.sdist_export()

+ 9
- 12
tests/basics/test_simple.py View File

from contextlib import ExitStack from contextlib import ExitStack
from tests import DDS from tests import DDS
from tests.fileutil import set_contents




def test_simple_lib(dds: DDS, scope: ExitStack):
scope.enter_context(
dds.set_contents(
'src/foo.cpp',
b'int the_answer() { return 42; }',
))
def test_simple_lib(dds: DDS, scope: ExitStack) -> None:
scope.enter_context(dds.set_contents(
'src/foo.cpp',
b'int the_answer() { return 42; }',
))


scope.enter_context(
dds.set_contents(
'library.json5',
b'''{
scope.enter_context(dds.set_contents(
'library.json5',
b'''{
name: 'TestLibrary', name: 'TestLibrary',
}''', }''',
))
))


scope.enter_context( scope.enter_context(
dds.set_contents( dds.set_contents(

+ 8
- 7
tests/basics/test_test_only.py View File

from tests import DDS from tests import DDS
from tests.fileutil import set_contents from tests.fileutil import set_contents


from dds_ci import paths


def test_lib_with_just_test(dds: DDS, scope: ExitStack):
scope.enter_context(
set_contents(
dds.source_root / 'src/foo.test.cpp',
b'int main() {}',
))
def test_lib_with_just_test(dds: DDS, scope: ExitStack) -> None:
scope.enter_context(set_contents(
dds.source_root / 'src/foo.test.cpp',
b'int main() {}',
))


dds.build(tests=True, apps=False, warnings=False) dds.build(tests=True, apps=False, warnings=False)
assert (dds.build_dir / f'test/foo{dds.exe_suffix}').is_file()
assert (dds.build_dir / f'test/foo{paths.EXE_SUFFIX}').is_file()

+ 2
- 2
tests/catalog/create_test.py View File

from tests import dds, DDS
from tests import DDS
from tests.fileutil import ensure_dir from tests.fileutil import ensure_dir




def test_create_catalog(dds: DDS):
def test_create_catalog(dds: DDS) -> None:
dds.scope.enter_context(ensure_dir(dds.build_dir)) dds.scope.enter_context(ensure_dir(dds.build_dir))
dds.catalog_create() dds.catalog_create()
assert dds.catalog_path.is_file() assert dds.catalog_path.is_file()

+ 3
- 5
tests/catalog/get_test.py View File

import json

from tests.fileutil import ensure_dir from tests.fileutil import ensure_dir
from tests import dds, DDS
from tests import DDS
from tests.http import RepoFixture from tests.http import RepoFixture




def test_get(dds: DDS, http_repo: RepoFixture):
def test_get(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_data({ http_repo.import_json_data({
'version': 2, 'version': 2,
'packages': { 'packages': {
assert (dds.scratch_dir / 'neo-sqlite3@0.3.0/package.jsonc').is_file() assert (dds.scratch_dir / 'neo-sqlite3@0.3.0/package.jsonc').is_file()




def test_get_http(dds: DDS, http_repo: RepoFixture):
def test_get_http(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_data({ http_repo.import_json_data({
'packages': { 'packages': {
'cmcstl2': { 'cmcstl2': {

+ 39
- 16
tests/conftest.py View File

from contextlib import ExitStack from contextlib import ExitStack
from typing import Optional
from typing import Any, Callable, Iterator
from typing_extensions import Protocol
from pathlib import Path from pathlib import Path
import shutil import shutil
from subprocess import check_call from subprocess import check_call


import pytest import pytest


from tests import scoped_dds, DDSFixtureParams
from .http import * # Exposes the HTTP fixtures
from dds_ci import paths
from tests import scoped_dds, DDSFixtureParams, DDS
# Exposes the HTTP fixtures:
from .http import http_repo, http_tmp_dir_server # pylint: disable=unused-import


class TempPathFactory(Protocol):
def mktemp(self, basename: str, numbered: bool = True) -> Path:
...


class PyTestConfig(Protocol):
def getoption(self, name: str) -> Any:
...


class TestRequest(Protocol):
fixturename: str
scope: str
config: PyTestConfig
fspath: str
function: Callable[..., Any]
param: DDSFixtureParams




@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def dds_exe(pytestconfig) -> Path:
return Path(pytestconfig.getoption('--dds-exe'))
def dds_exe(pytestconfig: PyTestConfig) -> Path:
opt = pytestconfig.getoption('--dds-exe') or paths.CUR_BUILT_DDS
return Path(opt)




@pytest.yield_fixture(scope='session')
def dds_pizza_catalog(dds_exe: Path, tmp_path_factory) -> Path:
@pytest.yield_fixture(scope='session') # type: ignore
def dds_pizza_catalog(dds_exe: Path, tmp_path_factory: TempPathFactory) -> Path:
tmpdir: Path = tmp_path_factory.mktemp(basename='dds-catalog') tmpdir: Path = tmp_path_factory.mktemp(basename='dds-catalog')
cat_path = tmpdir / 'catalog.db' cat_path = tmpdir / 'catalog.db'
check_call([str(dds_exe), 'repo', 'add', 'https://dds.pizza/repo', '--update', f'--catalog={cat_path}']) check_call([str(dds_exe), 'repo', 'add', 'https://dds.pizza/repo', '--update', f'--catalog={cat_path}'])
yield cat_path yield cat_path




@pytest.yield_fixture
def dds(request, dds_exe: Path, tmp_path: Path, worker_id: str, scope: ExitStack):
@pytest.yield_fixture # type: ignore
def dds(request: TestRequest, dds_exe: Path, tmp_path: Path, worker_id: str, scope: ExitStack) -> Iterator[DDS]:
test_source_dir = Path(request.fspath).absolute().parent test_source_dir = Path(request.fspath).absolute().parent
test_root = test_source_dir test_root = test_source_dir


project_dir = test_root / params.subdir project_dir = test_root / params.subdir


# Create the instance. Auto-clean when we're done # Create the instance. Auto-clean when we're done
yield scope.enter_context(scoped_dds(dds_exe, test_root, project_dir, request.function.__name__))
yield scope.enter_context(scoped_dds(dds_exe, test_root, project_dir))




@pytest.fixture
def scope():
@pytest.yield_fixture # type: ignore
def scope() -> Iterator[ExitStack]:
with ExitStack() as scope: with ExitStack() as scope:
yield scope yield scope




def pytest_addoption(parser):
def pytest_addoption(parser: Any) -> None:
parser.addoption('--test-deps', parser.addoption('--test-deps',
action='store_true', action='store_true',
default=False, default=False,
help='Run the exhaustive and intensive dds-deps tests') help='Run the exhaustive and intensive dds-deps tests')
parser.addoption('--dds-exe', help='Path to the dds executable under test', required=True, type=Path)
parser.addoption('--dds-exe', help='Path to the dds executable under test', type=Path)




def pytest_configure(config):
def pytest_configure(config: Any) -> None:
config.addinivalue_line('markers', 'deps_test: Deps tests are slow. Enable with --test-deps') config.addinivalue_line('markers', 'deps_test: Deps tests are slow. Enable with --test-deps')




def pytest_collection_modifyitems(config, items):
def pytest_collection_modifyitems(config: PyTestConfig, items: Any) -> None:
if config.getoption('--test-deps'): if config.getoption('--test-deps'):
return return
for item in items: for item in items:

+ 7
- 8
tests/db/test_compile_deps.py View File

import subprocess import subprocess
import time


import pytest import pytest


from tests import dds, DDS, dds_fixture_conf_1
from dds_ci import proc
from tests import DDS
from dds_ci import proc, paths


## ############################################################################# ## #############################################################################
## ############################################################################# ## #############################################################################


def build_and_get_rc(dds: DDS) -> int: def build_and_get_rc(dds: DDS) -> int:
dds.build() dds.build()
app = dds.build_dir / ('app' + dds.exe_suffix)
return proc.run(app).returncode
app = dds.build_dir / ('app' + paths.EXE_SUFFIX)
return proc.run([app]).returncode




def test_simple_rebuild(dds: DDS):
def test_simple_rebuild(dds: DDS) -> None:
""" """
Check that changing a source file will update the resulting application. Check that changing a source file will update the resulting application.
""" """
assert build_and_get_rc(dds) == 1 assert build_and_get_rc(dds) == 1




def test_rebuild_header_change(dds: DDS):
def test_rebuild_header_change(dds: DDS) -> None:
"""Change the content of the header which defines the values""" """Change the content of the header which defines the values"""
assert build_and_get_rc(dds) == 0 assert build_and_get_rc(dds) == 0
dds.scope.enter_context( dds.scope.enter_context(
assert build_and_get_rc(dds) == (88 - 63) assert build_and_get_rc(dds) == (88 - 63)




def test_partial_build_rebuild(dds: DDS):
def test_partial_build_rebuild(dds: DDS) -> None:
""" """
Change the content of a header, but cause one user of that header to fail Change the content of a header, but cause one user of that header to fail
compilation. The fact that compilation fails means it is still `out-of-date`, compilation. The fact that compilation fails means it is still `out-of-date`,

+ 27
- 46
tests/dds.py View File

import itertools import itertools
from contextlib import contextmanager, ExitStack from contextlib import contextmanager, ExitStack
from pathlib import Path from pathlib import Path
from typing import Iterable, Union, Any, Dict, NamedTuple, ContextManager, Optional
import subprocess
from typing import Union, NamedTuple, ContextManager, Optional, Iterator, TypeVar
import shutil import shutil


import pytest import pytest
import _pytest


from dds_ci import proc
from dds_ci import proc, toolchain as tc_mod


from . import fileutil from . import fileutil


T = TypeVar('T')



class DDS: class DDS:
def __init__(self, dds_exe: Path, test_dir: Path, project_dir: Path, scope: ExitStack) -> None: def __init__(self, dds_exe: Path, test_dir: Path, project_dir: Path, scope: ExitStack) -> None:
def lmi_path(self) -> Path: def lmi_path(self) -> Path:
return self.scratch_dir / 'INDEX.lmi' return self.scratch_dir / 'INDEX.lmi'


def cleanup(self):
def cleanup(self) -> None:
if self.scratch_dir.exists(): if self.scratch_dir.exists():
shutil.rmtree(self.scratch_dir) shutil.rmtree(self.scratch_dir)


def run_unchecked(self, cmd: proc.CommandLine, *, cwd: Path = None) -> subprocess.CompletedProcess:
def run_unchecked(self, cmd: proc.CommandLine, *, cwd: Optional[Path] = None) -> proc.ProcessResult:
full_cmd = itertools.chain([self.dds_exe, '-ltrace'], cmd) full_cmd = itertools.chain([self.dds_exe, '-ltrace'], cmd)
return proc.run(full_cmd, cwd=cwd or self.source_root)
return proc.run(full_cmd, cwd=cwd or self.source_root) # type: ignore


def run(self, cmd: proc.CommandLine, *, cwd: Path = None, check=True) -> subprocess.CompletedProcess:
cmdline = list(proc.flatten_cmd(cmd))
res = self.run_unchecked(cmd, cwd=cwd)
if res.returncode != 0 and check:
raise subprocess.CalledProcessError(res.returncode, [self.dds_exe] + cmdline, res.stdout)
return res
def run(self, cmd: proc.CommandLine, *, cwd: Optional[Path] = None, check: bool = True) -> proc.ProcessResult:
full_cmd = itertools.chain([self.dds_exe, '-ltrace'], cmd)
return proc.run(full_cmd, cwd=cwd, check=check) # type: ignore


@property @property
def repo_dir_arg(self) -> str: def repo_dir_arg(self) -> str:
def catalog_path_arg(self) -> str: def catalog_path_arg(self) -> str:
return f'--catalog={self.catalog_path}' return f'--catalog={self.catalog_path}'


def build_deps(self, args: proc.CommandLine, *, toolchain: str = None) -> subprocess.CompletedProcess:
def build_deps(self, args: proc.CommandLine, *, toolchain: Optional[str] = None) -> proc.ProcessResult:
return self.run([ return self.run([
'build-deps', 'build-deps',
f'--toolchain={toolchain or self.default_builtin_toolchain}',
f'--toolchain={toolchain or tc_mod.get_default_test_toolchain()}',
self.catalog_path_arg, self.catalog_path_arg,
self.repo_dir_arg, self.repo_dir_arg,
f'--out={self.deps_build_dir}', f'--out={self.deps_build_dir}',


def build(self, def build(self,
*, *,
toolchain: str = None,
toolchain: Optional[str] = None,
apps: bool = True, apps: bool = True,
warnings: bool = True, warnings: bool = True,
catalog_path: Optional[Path] = None, catalog_path: Optional[Path] = None,
tests: bool = True, tests: bool = True,
more_args: proc.CommandLine = [],
check: bool = True) -> subprocess.CompletedProcess:
catalog_path = catalog_path or self.catalog_path.relative_to(self.source_root)
more_args: proc.CommandLine = (),
check: bool = True) -> proc.ProcessResult:
catalog_path = catalog_path or self.catalog_path
return self.run( return self.run(
[ [
'build', 'build',
f'--out={self.build_dir}', f'--out={self.build_dir}',
f'--toolchain={toolchain or self.default_builtin_toolchain}',
f'--toolchain={toolchain or tc_mod.get_default_test_toolchain()}',
f'--catalog={catalog_path}', f'--catalog={catalog_path}',
f'--repo-dir={self.repo_dir.relative_to(self.source_root)}',
f'--repo-dir={self.repo_dir}',
['--no-tests'] if not tests else [], ['--no-tests'] if not tests else [],
['--no-apps'] if not apps else [], ['--no-apps'] if not apps else [],
['--no-warnings'] if not warnings else [], ['--no-warnings'] if not warnings else [],
check=check, check=check,
) )


def sdist_create(self) -> subprocess.CompletedProcess:
def sdist_create(self) -> proc.ProcessResult:
self.build_dir.mkdir(exist_ok=True, parents=True) self.build_dir.mkdir(exist_ok=True, parents=True)
return self.run(['sdist', 'create', self.project_dir_arg], cwd=self.build_dir) return self.run(['sdist', 'create', self.project_dir_arg], cwd=self.build_dir)


def sdist_export(self) -> subprocess.CompletedProcess:
def sdist_export(self) -> proc.ProcessResult:
return self.run([ return self.run([
'sdist', 'sdist',
'export', 'export',
self.repo_dir_arg, self.repo_dir_arg,
]) ])


def repo_import(self, sdist: Path) -> subprocess.CompletedProcess:
def repo_import(self, sdist: Path) -> proc.ProcessResult:
return self.run(['repo', self.repo_dir_arg, 'import', sdist]) return self.run(['repo', self.repo_dir_arg, 'import', sdist])


@property
def default_builtin_toolchain(self) -> str:
if os.name == 'posix':
return str(Path(__file__).parent.joinpath('gcc-9.tc.jsonc'))
elif os.name == 'nt':
return str(Path(__file__).parent.joinpath('msvc.tc.jsonc'))
else:
raise RuntimeError(f'No default builtin toolchain defined for tests on platform "{os.name}"')

@property
def exe_suffix(self) -> str:
if os.name == 'posix':
return ''
elif os.name == 'nt':
return '.exe'
else:
raise RuntimeError(f'We don\'t know the executable suffix for the platform "{os.name}"')

def catalog_create(self) -> subprocess.CompletedProcess:
def catalog_create(self) -> proc.ProcessResult:
self.scratch_dir.mkdir(parents=True, exist_ok=True) self.scratch_dir.mkdir(parents=True, exist_ok=True)
return self.run(['catalog', 'create', f'--catalog={self.catalog_path}'], cwd=self.test_dir) return self.run(['catalog', 'create', f'--catalog={self.catalog_path}'], cwd=self.test_dir)


def catalog_get(self, req: str) -> subprocess.CompletedProcess:
def catalog_get(self, req: str) -> proc.ProcessResult:
return self.run([ return self.run([
'catalog', 'catalog',
'get', 'get',




@contextmanager @contextmanager
def scoped_dds(dds_exe: Path, test_dir: Path, project_dir: Path, name: str):
def scoped_dds(dds_exe: Path, test_dir: Path, project_dir: Path) -> Iterator[DDS]:
if os.name == 'nt': if os.name == 'nt':
dds_exe = dds_exe.with_suffix('.exe') dds_exe = dds_exe.with_suffix('.exe')
with ExitStack() as scope: with ExitStack() as scope:
subdir: Union[Path, str] subdir: Union[Path, str]




def dds_fixture_conf(*argsets: DDSFixtureParams):
def dds_fixture_conf(*argsets: DDSFixtureParams) -> _pytest.mark.MarkDecorator:
args = list(argsets) args = list(argsets)
return pytest.mark.parametrize('dds', args, indirect=True, ids=[p.ident for p in args]) return pytest.mark.parametrize('dds', args, indirect=True, ids=[p.ident for p in args])




def dds_fixture_conf_1(subdir: Union[Path, str]):
def dds_fixture_conf_1(subdir: Union[Path, str]) -> _pytest.mark.MarkDecorator:
params = DDSFixtureParams(ident='only', subdir=subdir) params = DDSFixtureParams(ident='only', subdir=subdir)
return pytest.mark.parametrize('dds', [params], indirect=True, ids=['.']) return pytest.mark.parametrize('dds', [params], indirect=True, ids=['.'])

+ 5
- 5
tests/deps/build-deps/test_build_deps.py View File

from tests import dds, DDS
from tests import DDS
from tests.http import RepoFixture from tests.http import RepoFixture




def test_build_deps_from_file(dds: DDS, http_repo: RepoFixture):
def test_build_deps_from_file(dds: DDS, http_repo: RepoFixture) -> None:
assert not dds.deps_build_dir.is_dir() assert not dds.deps_build_dir.is_dir()
http_repo.import_json_file(dds.source_root / 'catalog.json') http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url) dds.repo_add(http_repo.url)
dds.build_deps(['-d', 'deps.json5'])
dds.build_deps(['-d', dds.source_root / 'deps.json5'])
assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir() assert (dds.deps_build_dir / 'neo-fun@0.3.0').is_dir()
assert (dds.scratch_dir / 'INDEX.lmi').is_file() assert (dds.scratch_dir / 'INDEX.lmi').is_file()
assert (dds.deps_build_dir / '_libman/neo-fun.lmp').is_file() assert (dds.deps_build_dir / '_libman/neo-fun.lmp').is_file()
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file() assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file()




def test_build_deps_from_cmd(dds: DDS, http_repo: RepoFixture):
def test_build_deps_from_cmd(dds: DDS, http_repo: RepoFixture) -> None:
assert not dds.deps_build_dir.is_dir() assert not dds.deps_build_dir.is_dir()
http_repo.import_json_file(dds.source_root / 'catalog.json') http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url) dds.repo_add(http_repo.url)
assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file() assert (dds.deps_build_dir / '_libman/neo/fun.lml').is_file()




def test_multiple_deps(dds: DDS, http_repo: RepoFixture):
def test_multiple_deps(dds: DDS, http_repo: RepoFixture) -> None:
assert not dds.deps_build_dir.is_dir() assert not dds.deps_build_dir.is_dir()
http_repo.import_json_file(dds.source_root / 'catalog.json') http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url) dds.repo_add(http_repo.url)

+ 4
- 2
tests/deps/deps_test.py View File

usage: str usage: str
source: str source: str


def setup_root(self, dds: DDS):
def setup_root(self, dds: DDS) -> None:
dds.scope.enter_context( dds.scope.enter_context(
fileutil.set_contents( fileutil.set_contents(
dds.source_root / 'package.json', dds.source_root / 'package.json',
return list(catalog_dict['packages'][pkg].keys()) return list(catalog_dict['packages'][pkg].keys())




def add_cases(pkg: str, uses: str, versions: Sequence[str], source: str):
def add_cases(pkg: str, uses: str, versions: Sequence[str], source: str) -> None:
if versions == ['auto']: if versions == ['auto']:
versions = get_default_pkg_versions(pkg) versions = get_default_pkg_versions(pkg)
for ver in versions: for ver in versions:
CASES.append(DepsCase(f'{pkg}@{ver}', uses, source)) CASES.append(DepsCase(f'{pkg}@{ver}', uses, source))




# pylint: disable=pointless-string-statement

# magic_enum tests # magic_enum tests
""" """
## ## ### ###### #### ###### ######## ## ## ## ## ## ## ## ## ### ###### #### ###### ######## ## ## ## ## ## ##

+ 4
- 4
tests/deps/do_test.py View File

import pytest
import subprocess import subprocess


from dds_ci import paths
from tests import DDS, DDSFixtureParams, dds_fixture_conf, dds_fixture_conf_1 from tests import DDS, DDSFixtureParams, dds_fixture_conf, dds_fixture_conf_1
from tests.http import RepoFixture from tests.http import RepoFixture






@dds_conf @dds_conf
def test_deps_build(dds: DDS, http_repo: RepoFixture):
def test_deps_build(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_file(dds.source_root / 'catalog.json') http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url) dds.repo_add(http_repo.url)
assert not dds.repo_dir.exists() assert not dds.repo_dir.exists()




@dds_fixture_conf_1('use-remote') @dds_fixture_conf_1('use-remote')
def test_use_nlohmann_json_remote(dds: DDS, http_repo: RepoFixture):
def test_use_nlohmann_json_remote(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_file(dds.source_root / 'catalog.json') http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url) dds.repo_add(http_repo.url)
dds.build(apps=True) dds.build(apps=True)


app_exe = dds.build_dir / f'app{dds.exe_suffix}'
app_exe = dds.build_dir / f'app{paths.EXE_SUFFIX}'
assert app_exe.is_file() assert app_exe.is_file()
subprocess.check_call([str(app_exe)]) subprocess.check_call([str(app_exe)])

+ 4
- 4
tests/deps/use-cryptopp/test_use_cryptopp.py View File



import pytest import pytest


from dds_ci import proc
from dds_ci import proc, toolchain, paths




@pytest.mark.skipif(platform.system() == 'FreeBSD', reason='This one has trouble running on FreeBSD') @pytest.mark.skipif(platform.system() == 'FreeBSD', reason='This one has trouble running on FreeBSD')
def test_get_build_use_cryptopp(dds: DDS, http_repo: RepoFixture):
def test_get_build_use_cryptopp(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_file(dds.source_root / 'catalog.json') http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url) dds.repo_add(http_repo.url)
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc'
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname) tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc) dds.build(toolchain=tc)
proc.check_run((dds.build_dir / 'use-cryptopp').with_suffix(dds.exe_suffix))
proc.check_run([(dds.build_dir / 'use-cryptopp').with_suffix(paths.EXE_SUFFIX)])

+ 4
- 4
tests/deps/use-spdlog/use_spdlog_test.py View File

from tests import DDS from tests import DDS
from tests.http import RepoFixture from tests.http import RepoFixture


from dds_ci import proc
from dds_ci import proc, paths, toolchain




def test_get_build_use_spdlog(dds: DDS, http_repo: RepoFixture):
def test_get_build_use_spdlog(dds: DDS, http_repo: RepoFixture) -> None:
http_repo.import_json_file(dds.source_root / 'catalog.json') http_repo.import_json_file(dds.source_root / 'catalog.json')
dds.repo_add(http_repo.url) dds.repo_add(http_repo.url)
tc_fname = 'gcc.tc.jsonc' if 'gcc' in dds.default_builtin_toolchain else 'msvc.tc.jsonc'
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc'
tc = str(dds.test_dir / tc_fname) tc = str(dds.test_dir / tc_fname)
dds.build(toolchain=tc, apps=True) dds.build(toolchain=tc, apps=True)
proc.check_run((dds.build_dir / 'use-spdlog').with_suffix(dds.exe_suffix))
proc.check_run([(dds.build_dir / 'use-spdlog').with_suffix(paths.EXE_SUFFIX)])

+ 1
- 1
tests/errors/errors_test.py View File

from tests.fileutil import ensure_dir from tests.fileutil import ensure_dir




def test_empty_dir(dds: DDS):
def test_empty_dir(dds: DDS) -> None:
with ensure_dir(dds.source_root): with ensure_dir(dds.source_root):
dds.build() dds.build()

+ 3
- 2
tests/fileutil.py View File

from contextlib import contextmanager, ExitStack
from contextlib import contextmanager
from pathlib import Path from pathlib import Path
from typing import Iterator, Union, Optional
from typing import Iterator, Optional


import shutil import shutil



@contextmanager @contextmanager
def ensure_dir(dirpath: Path) -> Iterator[Path]: def ensure_dir(dirpath: Path) -> Iterator[Path]:
""" """

+ 10
- 10
tests/http.py View File

from contextlib import contextmanager from contextlib import contextmanager
import json import json
from http.server import SimpleHTTPRequestHandler, HTTPServer from http.server import SimpleHTTPRequestHandler, HTTPServer
from typing import NamedTuple
from typing import NamedTuple, Any, Iterator
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
from functools import partial from functools import partial
import tempfile import tempfile
""" """
A simple HTTP request handler that simply serves files from a directory given to the constructor. A simple HTTP request handler that simply serves files from a directory given to the constructor.
""" """
def __init__(self, *args, **kwargs) -> None:
def __init__(self, *args: Any, **kwargs: Any) -> None:
self.dir = kwargs.pop('dir') self.dir = kwargs.pop('dir')
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)


def translate_path(self, path) -> str:
def translate_path(self, path: str) -> str:
# Convert the given URL path to a path relative to the directory we are serving # Convert the given URL path to a path relative to the directory we are serving
abspath = Path(super().translate_path(path))
abspath = Path(super().translate_path(path)) # type: ignore
relpath = abspath.relative_to(Path.cwd()) relpath = abspath.relative_to(Path.cwd())
return str(self.dir / relpath) return str(self.dir / relpath)






@contextmanager @contextmanager
def run_http_server(dirpath: Path, port: int):
def run_http_server(dirpath: Path, port: int) -> Iterator[ServerInfo]:
""" """
Context manager that spawns an HTTP server that serves thegiven directory on Context manager that spawns an HTTP server that serves thegiven directory on
the given TCP port. the given TCP port.
httpd.shutdown() httpd.shutdown()




@pytest.yield_fixture()
def http_tmp_dir_server(tmp_path: Path, unused_tcp_port: int):
@pytest.yield_fixture() # type: ignore
def http_tmp_dir_server(tmp_path: Path, unused_tcp_port: int) -> Iterator[ServerInfo]:
""" """
Creates an HTTP server that serves the contents of a new Creates an HTTP server that serves the contents of a new
temporary directory. temporary directory.
self.url = info.base_url self.url = info.base_url
self.dds_exe = dds_exe self.dds_exe = dds_exe


def import_json_data(self, data) -> None:
def import_json_data(self, data: Any) -> None:
""" """
Import some packages into the repo for the given JSON data. Uses Import some packages into the repo for the given JSON data. Uses
mkrepo.py mkrepo.py
]) ])




@pytest.yield_fixture()
def http_repo(dds_exe: Path, http_tmp_dir_server: ServerInfo):
@pytest.yield_fixture() # type: ignore
def http_repo(dds_exe: Path, http_tmp_dir_server: ServerInfo) -> Iterator[RepoFixture]:
""" """
Fixture that creates a new empty dds repository and an HTTP server to serve Fixture that creates a new empty dds repository and an HTTP server to serve
it. it.

+ 3
- 3
tests/sdist/sdist_test.py View File





@dds_fixture_conf_1('create') @dds_fixture_conf_1('create')
def test_create_sdist(dds: DDS):
def test_create_sdist(dds: DDS) -> None:
dds.sdist_create() dds.sdist_create()
sd_dir = dds.build_dir / 'foo@1.2.3.tar.gz' sd_dir = dds.build_dir / 'foo@1.2.3.tar.gz'
assert sd_dir.is_file() assert sd_dir.is_file()




@dds_fixture_conf_1('create') @dds_fixture_conf_1('create')
def test_export_sdist(dds: DDS):
def test_export_sdist(dds: DDS) -> None:
dds.sdist_export() dds.sdist_export()
assert (dds.repo_dir / 'foo@1.2.3').is_dir() assert (dds.repo_dir / 'foo@1.2.3').is_dir()




@dds_fixture_conf_1('create') @dds_fixture_conf_1('create')
def test_import_sdist_archive(dds: DDS):
def test_import_sdist_archive(dds: DDS) -> None:
repo_content_path = dds.repo_dir / 'foo@1.2.3' repo_content_path = dds.repo_dir / 'foo@1.2.3'
assert not repo_content_path.is_dir() assert not repo_content_path.is_dir()
dds.sdist_create() dds.sdist_create()

+ 3
- 3
tests/test_drivers/catch/test_catch.py View File

from tests import DDS, dds_fixture_conf, DDSFixtureParams from tests import DDS, dds_fixture_conf, DDSFixtureParams
from dds_ci import proc
from dds_ci import proc, paths




@dds_fixture_conf( @dds_fixture_conf(
DDSFixtureParams('main', 'main'), DDSFixtureParams('main', 'main'),
DDSFixtureParams('custom-runner', 'custom-runner'), DDSFixtureParams('custom-runner', 'custom-runner'),
) )
def test_catch_testdriver(dds: DDS):
def test_catch_testdriver(dds: DDS) -> None:
dds.build(tests=True) dds.build(tests=True)
test_exe = dds.build_dir / f'test/testlib/calc{dds.exe_suffix}'
test_exe = dds.build_dir / f'test/testlib/calc{paths.EXE_SUFFIX}'
assert test_exe.exists() assert test_exe.exists()
assert proc.run([test_exe]).returncode == 0 assert proc.run([test_exe]).returncode == 0

+ 2
- 3
tools/dds_ci/bootstrap.py View File

import enum import enum
from pathlib import Path from pathlib import Path
from contextlib import contextmanager from contextlib import contextmanager
from typing import Iterator, ContextManager
from typing import Iterator
import sys import sys
import urllib.request import urllib.request
import shutil import shutil
import tempfile


from . import paths from . import paths
from .dds import DDSWrapper from .dds import DDSWrapper
elif mode is BootstrapMode.Download: elif mode is BootstrapMode.Download:
f = _do_bootstrap_download() f = _do_bootstrap_download()
elif mode is BootstrapMode.Build: elif mode is BootstrapMode.Build:
f = _do_bootstrap_build()
f = _do_bootstrap_build() # type: ignore # TODO
elif mode is BootstrapMode.Skip: elif mode is BootstrapMode.Skip:
f = paths.PREBUILT_DDS f = paths.PREBUILT_DDS



+ 13
- 8
tools/dds_ci/dds.py View File

from pathlib import Path
import multiprocessing import multiprocessing
import shutil import shutil
from pathlib import Path
from typing import Optional


from . import proc
from . import paths
from . import paths, proc




class DDSWrapper: class DDSWrapper:
self.catalog_path = paths.PREBUILT_DIR / 'ci-catalog.db' self.catalog_path = paths.PREBUILT_DIR / 'ci-catalog.db'


@property @property
def catalog_path_arg(self):
def catalog_path_arg(self) -> str:
"""The arguments for --catalog""" """The arguments for --catalog"""
return f'--catalog={self.catalog_path}' return f'--catalog={self.catalog_path}'


@property @property
def repo_dir_arg(self):
def repo_dir_arg(self) -> str:
"""The arguments for --repo-dir""" """The arguments for --repo-dir"""
return f'--repo-dir={self.repo_dir}' return f'--repo-dir={self.repo_dir}'


def clean(self, *, build_dir: Path = None, repo=True, catalog=True):
def clean(self, *, build_dir: Optional[Path] = None, repo: bool = True, catalog: bool = True) -> None:
""" """
Clean out prior executable output, including repos, catalog, and Clean out prior executable output, including repos, catalog, and
the build results at 'build_dir', if given. the build results at 'build_dir', if given.


def run(self, args: proc.CommandLine) -> None: def run(self, args: proc.CommandLine) -> None:
"""Execute the 'dds' executable with the given arguments""" """Execute the 'dds' executable with the given arguments"""
proc.check_run([self.path, args]) # type: ignore
proc.check_run([self.path, args])


def catalog_json_import(self, path: Path) -> None: def catalog_json_import(self, path: Path) -> None:
"""Run 'catalog import' to import the given JSON. Only applicable to older 'dds'""" """Run 'catalog import' to import the given JSON. Only applicable to older 'dds'"""
self.run(['catalog', 'import', self.catalog_path_arg, f'--json={path}']) self.run(['catalog', 'import', self.catalog_path_arg, f'--json={path}'])


def build(self, *, toolchain: Path, root: Path, build_root: Path = None, jobs: int = None) -> None:
def build(self,
*,
toolchain: Path,
root: Path,
build_root: Optional[Path] = None,
jobs: Optional[int] = None) -> None:
""" """
Run 'dds build' with the given arguments. Run 'dds build' with the given arguments.



+ 7
- 67
tools/dds_ci/main.py View File

import argparse import argparse
import json
from contextlib import contextmanager
import enum
import multiprocessing import multiprocessing
import pytest import pytest
from pathlib import Path from pathlib import Path
from concurrent import futures from concurrent import futures
import sys import sys
import os
from typing import NoReturn, Sequence, Optional, Iterator
from typing import NoReturn, Sequence, Optional
from typing_extensions import Protocol from typing_extensions import Protocol
import subprocess import subprocess


import json5

from . import paths
from . import paths, toolchain
from .dds import DDSWrapper from .dds import DDSWrapper
from .bootstrap import BootstrapMode, get_bootstrap_exe from .bootstrap import BootstrapMode, get_bootstrap_exe


return make_argparser().parse_args(argv) return make_argparser().parse_args(argv)




@contextmanager
def fixup_toolchain(json_file: Path) -> Iterator[Path]:
"""
Augment the toolchain at the given path by adding 'ccache' or -fuse-ld=lld,
if those tools are available on the system. Yields a new toolchain file
based on 'json_file'
"""
data = json5.loads(json_file.read_text())
# Check if we can add ccache
ccache = paths.find_exe('ccache')
if ccache:
print('Found ccache:', ccache)
data['compiler_launcher'] = [str(ccache)]
# Check for lld for use with GCC/Clang
if paths.find_exe('ld.lld') and data.get('compiler_id') in ('gnu', 'clang'):
print('Linking with `-fuse-ld=lld`')
data.setdefault('link_flags', []).append('-fuse-ld=lld')
# Save the new toolchain data
with paths.new_tempdir() as tdir:
new_json = tdir / json_file.name
new_json.write_text(json.dumps(data))
yield new_json


def get_default_test_toolchain() -> Path:
"""
Get the default toolchain that should be used for dev and test based on the
host platform.
"""
if sys.platform == 'win32':
return paths.TOOLS_DIR / 'msvc-audit.jsonc'
elif sys.platform in 'linux':
return paths.TOOLS_DIR / 'gcc-9-audit.jsonc'
elif sys.platform == 'darwin':
return paths.TOOLS_DIR / 'gcc-9-audit-macos.jsonc'
else:
raise RuntimeError(f'Unable to determine the default toolchain (sys.platform is {sys.platform!r})')


def get_default_toolchain() -> Path:
"""
Get the default toolchain that should be used to generate the release executable
based on the host platform.
"""
if sys.platform == 'win32':
return paths.TOOLS_DIR / 'msvc-rel.jsonc'
elif sys.platform == 'linux':
return paths.TOOLS_DIR / 'gcc-9-rel.jsonc'
elif sys.platform == 'darwin':
return paths.TOOLS_DIR / 'gcc-9-rel-macos.jsonc'
else:
raise RuntimeError(f'Unable to determine the default toolchain (sys.platform is {sys.platform!r})')


def test_build(dds: DDSWrapper, args: CommandArguments) -> DDSWrapper: def test_build(dds: DDSWrapper, args: CommandArguments) -> DDSWrapper:
""" """
Execute the build that generates the test-mode executable. Uses the given 'dds' Execute the build that generates the test-mode executable. Uses the given 'dds'
to build the new dds. Returns a DDSWrapper around the generated test executable. to build the new dds. Returns a DDSWrapper around the generated test executable.
""" """
test_tc = args.test_toolchain or get_default_test_toolchain()
test_tc = args.test_toolchain or toolchain.get_default_test_toolchain()
build_dir = paths.BUILD_DIR / '_ci-test' build_dir = paths.BUILD_DIR / '_ci-test'
with fixup_toolchain(test_tc) as new_tc:
with toolchain.fixup_toolchain(test_tc) as new_tc:
dds.build(toolchain=new_tc, root=paths.PROJECT_ROOT, build_root=build_dir, jobs=args.jobs) dds.build(toolchain=new_tc, root=paths.PROJECT_ROOT, build_root=build_dir, jobs=args.jobs)
return DDSWrapper(build_dir / ('dds' + paths.EXE_SUFFIX)) return DDSWrapper(build_dir / ('dds' + paths.EXE_SUFFIX))


""" """
main_tc = args.toolchain or ( main_tc = args.toolchain or (
# If we are in rapid-dev mode, use the test toolchain, which had audit/debug enabled # If we are in rapid-dev mode, use the test toolchain, which had audit/debug enabled
get_default_toolchain() if not args.rapid else get_default_test_toolchain())
with fixup_toolchain(main_tc) as new_tc:
toolchain.get_default_toolchain() if not args.rapid else toolchain.get_default_test_toolchain())
with toolchain.fixup_toolchain(main_tc) as new_tc:
try: try:
dds.build(toolchain=new_tc, root=paths.PROJECT_ROOT, build_root=paths.BUILD_DIR, jobs=args.jobs) dds.build(toolchain=new_tc, root=paths.PROJECT_ROOT, build_root=paths.BUILD_DIR, jobs=args.jobs)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
return ci_with_dds(f, args) return ci_with_dds(f, args)




def start():
def start() -> NoReturn:
sys.exit(main(sys.argv[1:])) sys.exit(main(sys.argv[1:]))





+ 24
- 11
tools/dds_ci/proc.py View File

from pathlib import PurePath, Path from pathlib import PurePath, Path
from typing import Iterable, Union
from typing import Iterable, Union, Optional, Iterator
from typing_extensions import Protocol
import subprocess import subprocess


CommandLineArg = Union[str, PurePath, int, float] CommandLineArg = Union[str, PurePath, int, float]
CommandLineArg2 = Union[CommandLineArg1, Iterable[CommandLineArg1]] CommandLineArg2 = Union[CommandLineArg1, Iterable[CommandLineArg1]]
CommandLineArg3 = Union[CommandLineArg2, Iterable[CommandLineArg2]] CommandLineArg3 = Union[CommandLineArg2, Iterable[CommandLineArg2]]
CommandLineArg4 = Union[CommandLineArg3, Iterable[CommandLineArg3]] CommandLineArg4 = Union[CommandLineArg3, Iterable[CommandLineArg3]]
CommandLine = Union[CommandLineArg4, Iterable[CommandLineArg4]]


class CommandLine(Protocol):
def __iter__(self) -> Iterator[Union['CommandLine', CommandLineArg]]:
pass


# CommandLine = Union[CommandLineArg4, Iterable[CommandLineArg4]]


class ProcessResult(Protocol):
returncode: int
stdout: bytes




def flatten_cmd(cmd: CommandLine) -> Iterable[str]: def flatten_cmd(cmd: CommandLine) -> Iterable[str]:
assert False, f'Invalid command line element: {repr(cmd)}' assert False, f'Invalid command line element: {repr(cmd)}'




def run(*cmd: CommandLine, cwd: Path = None) -> subprocess.CompletedProcess:
def run(*cmd: CommandLine, cwd: Optional[Path] = None, check: bool = False) -> ProcessResult:
return subprocess.run( return subprocess.run(
list(flatten_cmd(cmd)), # type: ignore
list(flatten_cmd(cmd)),
cwd=cwd, cwd=cwd,
check=check,
) )




def check_run(*cmd: CommandLine,
cwd: Path = None) -> subprocess.CompletedProcess:
flat_cmd = list(flatten_cmd(cmd)) # type: ignore
res = run(flat_cmd, cwd=cwd)
if res.returncode != 0:
raise subprocess.CalledProcessError(res.returncode, flat_cmd)
return res
def check_run(*cmd: CommandLine, cwd: Optional[Path] = None) -> ProcessResult:
return subprocess.run(
list(flatten_cmd(cmd)),
cwd=cwd,
check=True,
)

+ 61
- 0
tools/dds_ci/toolchain.py View File

import json
import sys
from contextlib import contextmanager
from pathlib import Path
from typing import Iterator

import json5

from . import paths


@contextmanager
def fixup_toolchain(json_file: Path) -> Iterator[Path]:
"""
Augment the toolchain at the given path by adding 'ccache' or -fuse-ld=lld,
if those tools are available on the system. Yields a new toolchain file
based on 'json_file'
"""
data = json5.loads(json_file.read_text())
# Check if we can add ccache
ccache = paths.find_exe('ccache')
if ccache:
print('Found ccache:', ccache)
data['compiler_launcher'] = [str(ccache)]
# Check for lld for use with GCC/Clang
if paths.find_exe('ld.lld') and data.get('compiler_id') in ('gnu', 'clang'):
print('Linking with `-fuse-ld=lld`')
data.setdefault('link_flags', []).append('-fuse-ld=lld')
# Save the new toolchain data
with paths.new_tempdir() as tdir:
new_json = tdir / json_file.name
new_json.write_text(json.dumps(data))
yield new_json


def get_default_test_toolchain() -> Path:
"""
Get the default toolchain that should be used for dev and test based on the
host platform.
"""
if sys.platform == 'win32':
return paths.TOOLS_DIR / 'msvc-audit.jsonc'
if sys.platform in 'linux':
return paths.TOOLS_DIR / 'gcc-9-audit.jsonc'
if sys.platform == 'darwin':
return paths.TOOLS_DIR / 'gcc-9-audit-macos.jsonc'
raise RuntimeError(f'Unable to determine the default toolchain (sys.platform is {sys.platform!r})')


def get_default_toolchain() -> Path:
"""
Get the default toolchain that should be used to generate the release executable
based on the host platform.
"""
if sys.platform == 'win32':
return paths.TOOLS_DIR / 'msvc-rel.jsonc'
if sys.platform == 'linux':
return paths.TOOLS_DIR / 'gcc-9-rel.jsonc'
if sys.platform == 'darwin':
return paths.TOOLS_DIR / 'gcc-9-rel-macos.jsonc'
raise RuntimeError(f'Unable to determine the default toolchain (sys.platform is {sys.platform!r})')

Loading…
Cancel
Save