소스 검색

Rename from some old names, and increase timeout on cryptopp build

default_compile_flags
vector-of-bool 3 년 전
부모
커밋
bba5c46a5d
6개의 변경된 파일35개의 추가작업 그리고 31개의 파일을 삭제
  1. +1
    -1
      tests/test_build_deps.py
  2. +2
    -2
      tests/test_pkg.py
  3. +7
    -7
      tests/test_pkg_db.py
  4. +1
    -1
      tests/use-cryptopp/test_use_cryptopp.py
  5. +17
    -17
      tools/dds_ci/dds.py
  6. +7
    -3
      tools/dds_ci/testing/fixtures.py

+ 1
- 1
tests/test_build_deps.py 파일 보기

@@ -70,7 +70,7 @@ def test_cmake_simple(project_opener: ProjectOpener) -> None:
proj.dds.run(
[
'build-deps',
proj.dds.repo_dir_arg,
proj.dds.cache_dir_arg,
'foo@1.2.3',
('-t', ':gcc' if 'gcc' in toolchain.get_default_toolchain().name else ':msvc'),
f'--cmake=libraries.cmake',

+ 2
- 2
tests/test_pkg.py 파일 보기

@@ -52,7 +52,7 @@ def test_import_sdist_stdin(_test_pkg: Tuple[Path, Project]) -> None:
pipe = subprocess.Popen(
list(proc.flatten_cmd([
project.dds.path,
project.dds.repo_dir_arg,
project.dds.cache_dir_arg,
'pkg',
'import',
'--stdin',
@@ -73,7 +73,7 @@ def test_import_sdist_stdin(_test_pkg: Tuple[Path, Project]) -> None:


def test_import_sdist_dir(test_project: Project) -> None:
test_project.dds.run(['pkg', 'import', test_project.dds.repo_dir_arg, test_project.root])
test_project.dds.run(['pkg', 'import', test_project.dds.cache_dir_arg, test_project.root])
_check_import(test_project.dds.repo_dir / 'foo@1.2.3')



+ 7
- 7
tests/test_pkg_db.py 파일 보기

@@ -39,7 +39,7 @@ def test_pkg_get(_test_repo: RepoServer, tmp_project: Project) -> None:
def test_pkg_repo(_test_repo: RepoServer, tmp_project: Project) -> None:
dds = tmp_project.dds
dds.repo_add(_test_repo.url)
dds.run(['pkg', 'repo', dds.catalog_path_arg, 'ls'])
dds.run(['pkg', 'repo', dds.pkg_db_path_arg, 'ls'])


def test_pkg_repo_rm(_test_repo: RepoServer, tmp_project: Project) -> None:
@@ -49,7 +49,7 @@ def test_pkg_repo_rm(_test_repo: RepoServer, tmp_project: Project) -> None:
# Okay:
tmp_project.dds.pkg_get('neo-sqlite3@0.3.0')
# Remove the repo:
dds.run(['pkg', dds.catalog_path_arg, 'repo', 'ls'])
dds.run(['pkg', dds.pkg_db_path_arg, 'repo', 'ls'])
dds.repo_remove(_test_repo.repo_name)
# Cannot double-remove a repo:
with expect_error_marker('repo-rm-no-such-repo'):
@@ -63,10 +63,10 @@ def test_pkg_search(_test_repo: RepoServer, tmp_project: Project) -> None:
_test_repo.import_json_data(NEO_SQLITE_PKG_JSON)
dds = tmp_project.dds
with expect_error_marker('pkg-search-no-result'):
dds.run(['pkg', dds.catalog_path_arg, 'search'])
dds.run(['pkg', dds.pkg_db_path_arg, 'search'])
dds.repo_add(_test_repo.url)
dds.run(['pkg', dds.catalog_path_arg, 'search'])
dds.run(['pkg', dds.catalog_path_arg, 'search', 'neo-sqlite3'])
dds.run(['pkg', dds.catalog_path_arg, 'search', 'neo-*'])
dds.run(['pkg', dds.pkg_db_path_arg, 'search'])
dds.run(['pkg', dds.pkg_db_path_arg, 'search', 'neo-sqlite3'])
dds.run(['pkg', dds.pkg_db_path_arg, 'search', 'neo-*'])
with expect_error_marker('pkg-search-no-result'):
dds.run(['pkg', dds.catalog_path_arg, 'search', 'nonexistent'])
dds.run(['pkg', dds.pkg_db_path_arg, 'search', 'nonexistent'])

+ 1
- 1
tests/use-cryptopp/test_use_cryptopp.py 파일 보기

@@ -66,5 +66,5 @@ def test_get_build_use_cryptopp(test_parent_dir: Path, tmp_project: Project, htt
}
tc_fname = 'gcc.tc.jsonc' if 'gcc' in toolchain.get_default_test_toolchain().name else 'msvc.tc.jsonc'
tmp_project.write('src/use-cryptopp.main.cpp', APP_CPP)
tmp_project.build(toolchain=test_parent_dir / tc_fname)
tmp_project.build(toolchain=test_parent_dir / tc_fname, timeout = 60*10)
proc.check_run([(tmp_project.build_root / 'use-cryptopp').with_suffix(paths.EXE_SUFFIX)])

+ 17
- 17
tools/dds_ci/dds.py 파일 보기

@@ -31,12 +31,12 @@ class DDSWrapper:
return copy.deepcopy(self)

@property
def catalog_path_arg(self) -> str:
def pkg_db_path_arg(self) -> str:
"""The arguments for --catalog"""
return f'--catalog={self.pkg_db_path}'

@property
def repo_dir_arg(self) -> str:
def cache_dir_arg(self) -> str:
"""The arguments for --repo-dir"""
return f'--repo-dir={self.repo_dir}'

@@ -68,25 +68,25 @@ class DDSWrapper:

def catalog_json_import(self, path: Path) -> None:
"""Run 'catalog import' to import the given JSON. Only applicable to older 'dds'"""
self.run(['catalog', 'import', self.catalog_path_arg, f'--json={path}'])
self.run(['catalog', 'import', self.pkg_db_path_arg, f'--json={path}'])

def catalog_get(self, what: str) -> None:
self.run(['catalog', 'get', self.catalog_path_arg, what])
self.run(['catalog', 'get', self.pkg_db_path_arg, what])

def pkg_get(self, what: str) -> None:
self.run(['pkg', 'get', self.catalog_path_arg, what])
self.run(['pkg', 'get', self.pkg_db_path_arg, what])

def repo_add(self, url: str) -> None:
self.run(['pkg', 'repo', 'add', self.catalog_path_arg, url])
self.run(['pkg', 'repo', 'add', self.pkg_db_path_arg, url])

def repo_remove(self, name: str) -> None:
self.run(['pkg', 'repo', 'remove', self.catalog_path_arg, name])
self.run(['pkg', 'repo', 'remove', self.pkg_db_path_arg, name])

def repo_import(self, sdist: Path) -> None:
self.run(['repo', self.repo_dir_arg, 'import', sdist])
self.run(['repo', self.cache_dir_arg, 'import', sdist])

def pkg_import(self, filepath: Pathish) -> None:
self.run(['pkg', 'import', filepath, self.repo_dir_arg])
self.run(['pkg', 'import', filepath, self.cache_dir_arg])

def build(self,
*,
@@ -110,8 +110,8 @@ class DDSWrapper:
[
'build',
f'--toolchain={toolchain}',
self.repo_dir_arg,
self.catalog_path_arg,
self.cache_dir_arg,
self.pkg_db_path_arg,
f'--jobs={jobs}',
f'{self.project_dir_flag}={root}',
f'--out={build_root}',
@@ -132,8 +132,8 @@ class DDSWrapper:
toolchain = toolchain or tc_mod.get_default_audit_toolchain()
self.run([
'compile-file',
self.catalog_path_arg,
self.repo_dir_arg,
self.pkg_db_path_arg,
self.cache_dir_arg,
paths,
f'--toolchain={toolchain}',
f'{self.project_dir_flag}={project_dir}',
@@ -145,8 +145,8 @@ class DDSWrapper:
self.run([
'build-deps',
f'--toolchain={toolchain}',
self.catalog_path_arg,
self.repo_dir_arg,
self.pkg_db_path_arg,
self.cache_dir_arg,
args,
])

@@ -156,11 +156,11 @@ class NewDDSWrapper(DDSWrapper):
Wraps the new 'dds' executable with some convenience APIs
"""
@property
def repo_dir_arg(self) -> str:
def cache_dir_arg(self) -> str:
return f'--pkg-cache-dir={self.repo_dir}'

@property
def catalog_path_arg(self) -> str:
def pkg_db_path_arg(self) -> str:
return f'--pkg-db-path={self.pkg_db_path}'

@property

+ 7
- 3
tools/dds_ci/testing/fixtures.py 파일 보기

@@ -75,12 +75,16 @@ class Project:
"""Argument for --project"""
return f'--project={self.root}'

def build(self, *, toolchain: Optional[Pathish] = None) -> None:
def build(self, *, toolchain: Optional[Pathish] = None, timeout: Optional[int] = None) -> None:
"""
Execute 'dds build' on the project
"""
with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc:
self.dds.build(root=self.root, build_root=self.build_root, toolchain=tc, more_args=['-ldebug'])
self.dds.build(root=self.root,
build_root=self.build_root,
toolchain=tc,
timeout=timeout,
more_args=['-ldebug'])

def compile_file(self, *paths: Pathish, toolchain: Optional[Pathish] = None) -> None:
with tc_mod.fixup_toolchain(toolchain or tc_mod.get_default_test_toolchain()) as tc:
@@ -96,7 +100,7 @@ class Project:
], cwd=self.build_root)

def sdist_export(self) -> None:
self.dds.run(['sdist', 'export', self.dds.repo_dir_arg, self.project_dir_arg])
self.dds.run(['sdist', 'export', self.dds.cache_dir_arg, self.project_dir_arg])

def write(self, path: Pathish, content: str) -> Path:
path = Path(path)

Loading…
취소
저장