| {"out"}, | {"out"}, | ||||
| dds::fs::current_path() / "_build"}; | dds::fs::current_path() / "_build"}; | ||||
| args::ValueFlagList<std::string> add_repos{ | |||||
| cmd, | |||||
| "<repo-url>", | |||||
| "Add the given repositories to the catalog before executing (Implies '--update-repos')", | |||||
| {"add-repo"}}; | |||||
| args::Flag update_repos{cmd, | |||||
| "update-repos", | |||||
| "Update repositories before building", | |||||
| {"update-repos", 'U'}}; | |||||
| int run() { | int run() { | ||||
| if (!add_repos.Get().empty()) { | |||||
| auto cat = cat_path.open(); | |||||
| for (auto& str : add_repos.Get()) { | |||||
| auto repo = dds::remote_repository::connect(str); | |||||
| repo.store(cat.database()); | |||||
| } | |||||
| } | |||||
| if (update_repos.Get() || !add_repos.Get().empty()) { | |||||
| auto cat = cat_path.open(); | |||||
| dds::update_all_remotes(cat.database()); | |||||
| } | |||||
| dds::sdist_build_params main_params = { | dds::sdist_build_params main_params = { | ||||
| .subdir = "", | .subdir = "", | ||||
| .build_tests = !no_tests.Get(), | .build_tests = !no_tests.Get(), |
| from typing import Optional | from typing import Optional | ||||
| from pathlib import Path | from pathlib import Path | ||||
| import shutil | import shutil | ||||
| from subprocess import check_call | |||||
| import pytest | import pytest | ||||
| from tests import scoped_dds, DDSFixtureParams | from tests import scoped_dds, DDSFixtureParams | ||||
| @pytest.fixture(scope='session') | |||||
| def dds_exe() -> Path: | |||||
| return Path(__file__).absolute().parent.parent / '_build/dds' | |||||
| @pytest.yield_fixture(scope='session') | |||||
| def dds_pizza_catalog(dds_exe: Path, tmp_path_factory) -> Path: | |||||
| tmpdir: Path = tmp_path_factory.mktemp(basename='dds-catalog') | |||||
| cat_path = tmpdir / 'catalog.db' | |||||
| check_call([str(dds_exe), 'repo', 'add', 'https://dds.pizza/repo', '--update', f'--catalog={cat_path}']) | |||||
| yield cat_path | |||||
| @pytest.yield_fixture | @pytest.yield_fixture | ||||
| def dds(request, tmp_path: Path, worker_id: str, scope: ExitStack): | |||||
| def dds(request, dds_exe: Path, tmp_path: Path, worker_id: str, scope: ExitStack): | |||||
| test_source_dir = Path(request.fspath).absolute().parent | test_source_dir = Path(request.fspath).absolute().parent | ||||
| test_root = test_source_dir | test_root = test_source_dir | ||||
| project_dir = test_root / params.subdir | project_dir = test_root / params.subdir | ||||
| # Create the instance. Auto-clean when we're done | # Create the instance. Auto-clean when we're done | ||||
| yield scope.enter_context( | |||||
| scoped_dds(test_root, project_dir, request.function.__name__)) | |||||
| yield scope.enter_context(scoped_dds(dds_exe, test_root, project_dir, request.function.__name__)) | |||||
| @pytest.fixture | @pytest.fixture | ||||
| def pytest_addoption(parser): | def pytest_addoption(parser): | ||||
| parser.addoption( | parser.addoption( | ||||
| '--test-deps', | |||||
| action='store_true', | |||||
| default=False, | |||||
| help='Run the exhaustive and intensive dds-deps tests') | |||||
| '--test-deps', action='store_true', default=False, help='Run the exhaustive and intensive dds-deps tests') | |||||
| def pytest_configure(config): | def pytest_configure(config): | ||||
| config.addinivalue_line( | |||||
| 'markers', 'deps_test: Deps tests are slow. Enable with --test-deps') | |||||
| config.addinivalue_line('markers', 'deps_test: Deps tests are slow. Enable with --test-deps') | |||||
| def pytest_collection_modifyitems(config, items): | def pytest_collection_modifyitems(config, items): | ||||
| continue | continue | ||||
| item.add_marker( | item.add_marker( | ||||
| pytest.mark.skip( | pytest.mark.skip( | ||||
| reason= | |||||
| 'Exhaustive deps tests are slow and perform many Git clones. Use --test-deps to run them.' | |||||
| )) | |||||
| reason='Exhaustive deps tests are slow and perform many Git clones. Use --test-deps to run them.')) |
| import itertools | import itertools | ||||
| from contextlib import contextmanager, ExitStack | from contextlib import contextmanager, ExitStack | ||||
| from pathlib import Path | from pathlib import Path | ||||
| from typing import Iterable, Union, Any, Dict, NamedTuple, ContextManager | |||||
| from typing import Iterable, Union, Any, Dict, NamedTuple, ContextManager, Optional | |||||
| import subprocess | import subprocess | ||||
| import shutil | import shutil | ||||
| args, | args, | ||||
| ]) | ]) | ||||
| def repo_add(self, url: str) -> None: | |||||
| return self.run(['repo', 'add', url, '--update', self.catalog_path_arg]) | |||||
| def build(self, | def build(self, | ||||
| *, | *, | ||||
| toolchain: str = None, | toolchain: str = None, | ||||
| apps: bool = True, | apps: bool = True, | ||||
| warnings: bool = True, | warnings: bool = True, | ||||
| catalog_path: Optional[Path] = None, | |||||
| tests: bool = True, | tests: bool = True, | ||||
| more_args: proc.CommandLine = [], | |||||
| check: bool = True) -> subprocess.CompletedProcess: | check: bool = True) -> subprocess.CompletedProcess: | ||||
| catalog_path = catalog_path or self.catalog_path.relative_to(self.source_root) | |||||
| return self.run( | return self.run( | ||||
| [ | [ | ||||
| 'build', | 'build', | ||||
| f'--out={self.build_dir}', | f'--out={self.build_dir}', | ||||
| f'--toolchain={toolchain or self.default_builtin_toolchain}', | f'--toolchain={toolchain or self.default_builtin_toolchain}', | ||||
| f'--catalog={self.catalog_path.relative_to(self.source_root)}', | |||||
| f'--catalog={catalog_path}', | |||||
| f'--repo-dir={self.repo_dir.relative_to(self.source_root)}', | f'--repo-dir={self.repo_dir.relative_to(self.source_root)}', | ||||
| ['--no-tests'] if not tests else [], | ['--no-tests'] if not tests else [], | ||||
| ['--no-apps'] if not apps else [], | ['--no-apps'] if not apps else [], | ||||
| ['--no-warnings'] if not warnings else [], | ['--no-warnings'] if not warnings else [], | ||||
| self.project_dir_arg, | self.project_dir_arg, | ||||
| more_args, | |||||
| ], | ], | ||||
| check=check, | check=check, | ||||
| ) | ) | ||||
| @contextmanager | @contextmanager | ||||
| def scoped_dds(test_dir: Path, project_dir: Path, name: str): | |||||
| dds_exe = Path(__file__).absolute().parent.parent / '_build/dds' | |||||
| def scoped_dds(dds_exe: Path, test_dir: Path, project_dir: Path, name: str): | |||||
| if os.name == 'nt': | if os.name == 'nt': | ||||
| dds_exe = dds_exe.with_suffix('.exe') | dds_exe = dds_exe.with_suffix('.exe') | ||||
| with ExitStack() as scope: | with ExitStack() as scope: |
| 'depends': [self.dep], | 'depends': [self.dep], | ||||
| }).encode())) | }).encode())) | ||||
| dds.scope.enter_context( | dds.scope.enter_context( | ||||
| fileutil.set_contents( | |||||
| dds.source_root / 'library.json', | |||||
| json.dumps({ | |||||
| 'name': 'test', | |||||
| 'uses': [self.usage], | |||||
| }).encode())) | |||||
| dds.scope.enter_context( | |||||
| fileutil.set_contents(dds.source_root / 'src/test.test.cpp', | |||||
| self.source.encode())) | |||||
| fileutil.set_contents(dds.source_root / 'library.json', | |||||
| json.dumps({ | |||||
| 'name': 'test', | |||||
| 'uses': [self.usage], | |||||
| }).encode())) | |||||
| dds.scope.enter_context(fileutil.set_contents(dds.source_root / 'src/test.test.cpp', self.source.encode())) | |||||
| CASES: List[DepsCase] = [] | CASES: List[DepsCase] = [] | ||||
| def get_default_pkg_versions(pkg: str) -> Sequence[str]: | def get_default_pkg_versions(pkg: str) -> Sequence[str]: | ||||
| catalog_json = Path( | |||||
| __file__).resolve().parent.parent.parent / 'catalog.json' | |||||
| catalog_json = Path(__file__).resolve().parent.parent.parent / 'catalog.json' | |||||
| catalog_dict = json.loads(catalog_json.read_text()) | catalog_dict = json.loads(catalog_json.read_text()) | ||||
| return list(catalog_dict['packages'][pkg].keys()) | return list(catalog_dict['packages'][pkg].keys()) | ||||
| ## ## ## ## | ## ## ## ## | ||||
| ## ## ## ## | ## ## ## ## | ||||
| """ | """ | ||||
| add_cases( | |||||
| 'fmt', 'fmt/fmt', ['auto'], r''' | |||||
| add_cases('fmt', 'fmt/fmt', ['auto'], r''' | |||||
| #include <fmt/core.h> | #include <fmt/core.h> | ||||
| int main() { | int main() { | ||||
| ## ## ## ## ## ## ## ## ## ## | ## ## ## ## ## ## ## ## ## ## | ||||
| ###### ## ######## ######## ####### ###### | ###### ## ######## ######## ####### ###### | ||||
| """ | """ | ||||
| add_cases( | |||||
| 'spdlog', 'spdlog/spdlog', ['auto'], r''' | |||||
| add_cases('spdlog', 'spdlog/spdlog', ['auto'], r''' | |||||
| #include <spdlog/spdlog.h> | #include <spdlog/spdlog.h> | ||||
| int main() { | int main() { | ||||
| @pytest.mark.deps_test | @pytest.mark.deps_test | ||||
| @pytest.mark.parametrize('case', CASES, ids=[c.dep for c in CASES]) | @pytest.mark.parametrize('case', CASES, ids=[c.dep for c in CASES]) | ||||
| def test_dep(case: DepsCase, dds: DDS) -> None: | |||||
| def test_dep(case: DepsCase, dds_pizza_catalog: Path, dds: DDS) -> None: | |||||
| case.setup_root(dds) | case.setup_root(dds) | ||||
| dds.build() | |||||
| dds.build(catalog_path=dds_pizza_catalog) |
| import urllib.request | import urllib.request | ||||
| import shutil | import shutil | ||||
| from self_build import self_build | |||||
| from self_build import self_build, dds_build | |||||
| from dds_ci import paths, proc | from dds_ci import paths, proc | ||||
| 'freebsd12': 'dds-freebsd-x64', | 'freebsd12': 'dds-freebsd-x64', | ||||
| }.get(sys.platform) | }.get(sys.platform) | ||||
| if filename is None: | if filename is None: | ||||
| raise RuntimeError(f'We do not have a prebuilt DDS binary for ' | |||||
| f'the "{sys.platform}" platform') | |||||
| raise RuntimeError(f'We do not have a prebuilt DDS binary for the "{sys.platform}" platform') | |||||
| url = f'https://github.com/vector-of-bool/dds/releases/download/0.1.0-alpha.4/{filename}' | url = f'https://github.com/vector-of-bool/dds/releases/download/0.1.0-alpha.4/{filename}' | ||||
| print(f'Downloading prebuilt DDS executable: {url}') | print(f'Downloading prebuilt DDS executable: {url}') | ||||
| required=True, | required=True, | ||||
| ) | ) | ||||
| parser.add_argument( | parser.add_argument( | ||||
| '--build-only', | |||||
| action='store_true', | |||||
| help='Only build the `dds` executable. Skip second-phase and tests.') | |||||
| '--build-only', action='store_true', help='Only build the `dds` executable. Skip second-phase and tests.') | |||||
| parser.add_argument( | parser.add_argument( | ||||
| '--no-clean', | '--no-clean', | ||||
| action='store_false', | action='store_false', | ||||
| paths.PREBUILT_DDS, | paths.PREBUILT_DDS, | ||||
| toolchain=opts.toolchain, | toolchain=opts.toolchain, | ||||
| cat_path=old_cat_path, | cat_path=old_cat_path, | ||||
| cat_json_path=Path('catalog.old.json'), | |||||
| cat_json_path=Path('catalog.json'), | |||||
| dds_flags=[('--repo-dir', ci_repo_dir)]) | dds_flags=[('--repo-dir', ci_repo_dir)]) | ||||
| print('Main build PASSED!') | print('Main build PASSED!') | ||||
| print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}') | print(f'A `dds` executable has been generated: {paths.CUR_BUILT_DDS}') | ||||
| if args.build_only: | if args.build_only: | ||||
| print( | |||||
| f'`--build-only` was given, so second phase and tests will not execute' | |||||
| ) | |||||
| print('`--build-only` was given, so second phase and tests will not execute') | |||||
| return 0 | return 0 | ||||
| print('Bootstrapping myself:') | print('Bootstrapping myself:') | ||||
| new_cat_path = paths.BUILD_DIR / 'catalog.db' | new_cat_path = paths.BUILD_DIR / 'catalog.db' | ||||
| new_repo_dir = paths.BUILD_DIR / 'ci-repo' | |||||
| self_build( | |||||
| new_repo_dir = paths.BUILD_DIR / 'ci-repo-2' | |||||
| if new_cat_path.is_file(): | |||||
| new_cat_path.unlink() | |||||
| if new_repo_dir.is_dir(): | |||||
| shutil.rmtree(new_repo_dir) | |||||
| dds_build( | |||||
| paths.CUR_BUILT_DDS, | paths.CUR_BUILT_DDS, | ||||
| toolchain=opts.toolchain, | toolchain=opts.toolchain, | ||||
| cat_path=new_cat_path, | |||||
| dds_flags=[f'--repo-dir={new_repo_dir}']) | |||||
| more_flags=[ | |||||
| f'--repo-dir={new_repo_dir}', | |||||
| f'--catalog={new_cat_path}', | |||||
| '--add-repo=https://dds.pizza/repo', | |||||
| ]) | |||||
| print('Bootstrap test PASSED!') | print('Bootstrap test PASSED!') | ||||
| return pytest.main([ | return pytest.main([ |
| d['auto-lib'] = self.auto_lib | d['auto-lib'] = self.auto_lib | ||||
| return d | return d | ||||
| def to_dict_2(self) -> str: | |||||
| url = f'git+{self.url}' | |||||
| if self.auto_lib: | |||||
| url += f'?lm={self.auto_lib}' | |||||
| url += f'#{self.ref}' | |||||
| return url | |||||
| RemoteInfo = Union[Git] | RemoteInfo = Union[Git] | ||||
| ret['git'] = self.remote.to_dict() | ret['git'] = self.remote.to_dict() | ||||
| return ret | return ret | ||||
| def to_dict_2(self) -> dict: | |||||
| ret: dict = { | |||||
| 'description': self.description, | |||||
| 'depends': list(self.depends), | |||||
| 'transform': [f.to_dict() for f in self.remote.transforms], | |||||
| } | |||||
| ret['url'] = self.remote.to_dict_2() | |||||
| return ret | |||||
| class VersionSet(NamedTuple): | class VersionSet(NamedTuple): | ||||
| version: str | version: str | ||||
| raise RuntimeError(f'Request is outside of api.github.com [{url}]') | raise RuntimeError(f'Request is outside of api.github.com [{url}]') | ||||
| resp = request.urlopen(req) | resp = request.urlopen(req) | ||||
| if resp.status != 200: | if resp.status != 200: | ||||
| raise RuntimeError( | |||||
| f'Request to [{url}] failed [{resp.status} {resp.reason}]') | |||||
| raise RuntimeError(f'Request to [{url}] failed [{resp.status} {resp.reason}]') | |||||
| return json5.loads(resp.read()) | return json5.loads(resp.read()) | ||||
| return content | return content | ||||
| def _version_for_github_tag(pkg_name: str, desc: str, clone_url: str, | |||||
| tag) -> Version: | |||||
| def _version_for_github_tag(pkg_name: str, desc: str, clone_url: str, tag) -> Version: | |||||
| print(f'Loading tag {tag["name"]}') | print(f'Loading tag {tag["name"]}') | ||||
| commit = github_http_get(tag['commit']['url']) | commit = github_http_get(tag['commit']['url']) | ||||
| tree = github_http_get(commit['commit']['tree']['url']) | tree = github_http_get(commit['commit']['tree']['url']) | ||||
| package_json_fname = cand | package_json_fname = cand | ||||
| break | break | ||||
| else: | else: | ||||
| raise RuntimeError( | |||||
| f'No package JSON5 file in tag {tag["name"]} for {pkg_name} (One of {tree_content.keys()})' | |||||
| ) | |||||
| raise RuntimeError(f'No package JSON5 file in tag {tag["name"]} for {pkg_name} (One of {tree_content.keys()})') | |||||
| package_json = json5.loads( | |||||
| _get_github_tree_file_content(tree_content[package_json_fname]['url'])) | |||||
| package_json = json5.loads(_get_github_tree_file_content(tree_content[package_json_fname]['url'])) | |||||
| version = package_json['version'] | version = package_json['version'] | ||||
| if pkg_name != package_json['name']: | if pkg_name != package_json['name']: | ||||
| raise RuntimeError(f'package name in repo "{package_json["name"]}" ' | raise RuntimeError(f'package name in repo "{package_json["name"]}" ' | ||||
| elif depends is None: | elif depends is None: | ||||
| pairs = [] | pairs = [] | ||||
| else: | else: | ||||
| raise RuntimeError( | |||||
| f'Unknown "depends" object from json file: {depends!r}') | |||||
| raise RuntimeError(f'Unknown "depends" object from json file: {depends!r}') | |||||
| remote = Git(url=clone_url, ref=tag['name']) | remote = Git(url=clone_url, ref=tag['name']) | ||||
| return Version(version, | |||||
| description=desc, | |||||
| depends=list(pairs), | |||||
| remote=remote) | |||||
| return Version(version, description=desc, depends=list(pairs), remote=remote) | |||||
| def github_package(name: str, repo: str, want_tags: Iterable[str]) -> Package: | def github_package(name: str, repo: str, want_tags: Iterable[str]) -> Package: | ||||
| missing_tags = set(want_tags) - set(t['name'] for t in avail_tags) | missing_tags = set(want_tags) - set(t['name'] for t in avail_tags) | ||||
| if missing_tags: | if missing_tags: | ||||
| raise RuntimeError( | |||||
| 'One or more wanted tags do not exist in ' | |||||
| f'the repository "{repo}" (Missing: {missing_tags})') | |||||
| raise RuntimeError('One or more wanted tags do not exist in ' | |||||
| f'the repository "{repo}" (Missing: {missing_tags})') | |||||
| tag_items = (t for t in avail_tags if t['name'] in want_tags) | tag_items = (t for t in avail_tags if t['name'] in want_tags) | ||||
| versions = HTTP_POOL.map( | |||||
| lambda tag: _version_for_github_tag(name, desc, repo_data['clone_url'], | |||||
| tag), tag_items) | |||||
| versions = HTTP_POOL.map(lambda tag: _version_for_github_tag(name, desc, repo_data['clone_url'], tag), tag_items) | |||||
| return Package(name, list(versions)) | return Package(name, list(versions)) | ||||
| *, | *, | ||||
| tag_fmt: str = '{}') -> Package: | tag_fmt: str = '{}') -> Package: | ||||
| return Package(name, [ | return Package(name, [ | ||||
| Version(ver.version, | |||||
| description=description, | |||||
| remote=Git( | |||||
| git_url, tag_fmt.format(ver.version), auto_lib=auto_lib), | |||||
| depends=ver.depends) for ver in versions | |||||
| Version( | |||||
| ver.version, | |||||
| description=description, | |||||
| remote=Git(git_url, tag_fmt.format(ver.version), auto_lib=auto_lib), | |||||
| depends=ver.depends) for ver in versions | |||||
| ]) | ]) | ||||
| transforms: Sequence[FSTransform] = (), | transforms: Sequence[FSTransform] = (), | ||||
| description='(No description was provided)') -> Package: | description='(No description was provided)') -> Package: | ||||
| return Package(name, [ | return Package(name, [ | ||||
| Version(ver, | |||||
| description='\n'.join(textwrap.wrap(description)), | |||||
| remote=Git(url=git_url, | |||||
| ref=tag_fmt.format(ver), | |||||
| auto_lib=auto_lib, | |||||
| transforms=transforms)) for ver in versions | |||||
| Version( | |||||
| ver, | |||||
| description='\n'.join(textwrap.wrap(description)), | |||||
| remote=Git(url=git_url, ref=tag_fmt.format(ver), auto_lib=auto_lib, transforms=transforms)) | |||||
| for ver in versions | |||||
| ]) | ]) | ||||
| PACKAGES = [ | PACKAGES = [ | ||||
| github_package('neo-buffer', 'vector-of-bool/neo-buffer', | github_package('neo-buffer', 'vector-of-bool/neo-buffer', | ||||
| ['0.2.1', '0.3.0', '0.4.0', '0.4.1', '0.4.2']), | ['0.2.1', '0.3.0', '0.4.0', '0.4.1', '0.4.2']), | ||||
| github_package('neo-compress', 'vector-of-bool/neo-compress', ['0.1.0', '0.1.1']), | |||||
| github_package('neo-compress', 'vector-of-bool/neo-compress', ['0.1.0', '0.1.1', '0.2.0']), | |||||
| github_package('neo-url', 'vector-of-bool/neo-url', | github_package('neo-url', 'vector-of-bool/neo-url', | ||||
| ['0.1.0', '0.1.1', '0.1.2', '0.2.0', '0.2.1', '0.2.2']), | ['0.1.0', '0.1.1', '0.1.2', '0.2.0', '0.2.1', '0.2.2']), | ||||
| github_package('neo-sqlite3', 'vector-of-bool/neo-sqlite3', | github_package('neo-sqlite3', 'vector-of-bool/neo-sqlite3', | ||||
| args = parser.parse_args() | args = parser.parse_args() | ||||
| data = { | data = { | ||||
| 'version': 2, | |||||
| 'packages': { | |||||
| pkg.name: {ver.version: ver.to_dict_2() | |||||
| for ver in pkg.versions} | |||||
| for pkg in PACKAGES | |||||
| } | |||||
| } | |||||
| old_data = { | |||||
| 'version': 1, | 'version': 1, | ||||
| 'packages': { | |||||
| pkg.name: {ver.version: ver.to_dict() | |||||
| for ver in pkg.versions} | |||||
| for pkg in PACKAGES | |||||
| } | |||||
| 'packages': {pkg.name: {ver.version: ver.to_dict() | |||||
| for ver in pkg.versions} | |||||
| for pkg in PACKAGES} | |||||
| } | } | ||||
| json_str = json.dumps(data, indent=2, sort_keys=True) | |||||
| Path('catalog.json').write_text(json_str) | |||||
| Path('catalog.old.json').write_text( | |||||
| json.dumps(old_data, indent=2, sort_keys=True)) | |||||
| Path('catalog.json').write_text(json.dumps(data, indent=2, sort_keys=True)) |
| ROOT = Path(__file__).parent.parent.absolute() | ROOT = Path(__file__).parent.parent.absolute() | ||||
| def dds_build(exe: Path, *, toolchain: str, more_flags: proc.CommandLine = ()): | |||||
| new_exe = ROOT / '_dds.bootstrap-test.exe' | |||||
| shutil.copy2(exe, new_exe) | |||||
| try: | |||||
| proc.check_run(new_exe, 'build', (f'--toolchain={toolchain}'), more_flags) | |||||
| finally: | |||||
| new_exe.unlink() | |||||
| def self_build(exe: Path, | def self_build(exe: Path, | ||||
| *, | *, | ||||
| toolchain: str, | toolchain: str, | ||||
| dds_flags: proc.CommandLine = ()): | dds_flags: proc.CommandLine = ()): | ||||
| # Copy the exe to another location, as windows refuses to let a binary be | # Copy the exe to another location, as windows refuses to let a binary be | ||||
| # replaced while it is executing | # replaced while it is executing | ||||
| new_exe = ROOT / '_dds.bootstrap-test.exe' | |||||
| shutil.copy2(exe, new_exe) | |||||
| try: | |||||
| proc.check_run( | |||||
| new_exe, | |||||
| 'catalog', | |||||
| 'import', | |||||
| f'--catalog={cat_path}', | |||||
| f'--json={cat_json_path}', | |||||
| ) | |||||
| proc.check_run( | |||||
| new_exe, | |||||
| 'build', | |||||
| f'--catalog={cat_path}', | |||||
| f'--repo-dir={ROOT}/_build/ci-repo', | |||||
| dds_flags, | |||||
| ('--toolchain', toolchain), | |||||
| proc.check_run( | |||||
| exe, | |||||
| 'catalog', | |||||
| 'import', | |||||
| f'--catalog={cat_path}', | |||||
| f'--json={cat_json_path}', | |||||
| ) | |||||
| dds_build( | |||||
| exe, | |||||
| toolchain=toolchain, | |||||
| more_flags=[ | |||||
| ('-I', lmi_path) if lmi_path else (), | ('-I', lmi_path) if lmi_path else (), | ||||
| ) | |||||
| finally: | |||||
| new_exe.unlink() | |||||
| f'--repo-dir={ROOT}/_build/ci-repo', | |||||
| f'--catalog={cat_path}', | |||||
| *dds_flags, | |||||
| ], | |||||
| ) | |||||
| def main(argv: List[str]) -> int: | def main(argv: List[str]) -> int: |