| @@ -0,0 +1,3 @@ | |||
| [style] | |||
| based_on_style = pep8 | |||
| column_limit = 120 | |||
| @@ -46,17 +46,13 @@ def test_import_json(dds: DDS): | |||
| } | |||
| }, | |||
| } | |||
| dds.scope.enter_context( | |||
| dds.set_contents(json_fpath, | |||
| json.dumps(import_data).encode())) | |||
| dds.scope.enter_context(dds.set_contents(json_fpath, json.dumps(import_data).encode())) | |||
| dds.catalog_import(json_fpath) | |||
| @pytest.yield_fixture | |||
| def http_import_server(): | |||
| handler = partial( | |||
| DirectoryServingHTTPRequestHandler, | |||
| dir=Path.cwd() / 'data/http-test-1') | |||
| handler = partial(DirectoryServingHTTPRequestHandler, dir=Path.cwd() / 'data/http-test-1') | |||
| addr = ('0.0.0.0', 8000) | |||
| pool = ThreadPoolExecutor() | |||
| with HTTPServer(addr, handler) as httpd: | |||
| @@ -69,33 +65,17 @@ def http_import_server(): | |||
| @pytest.yield_fixture | |||
| def http_repo_server(): | |||
| handler = partial( | |||
| DirectoryServingHTTPRequestHandler, | |||
| dir=Path.cwd() / 'data/test-repo-1') | |||
| handler = partial(DirectoryServingHTTPRequestHandler, dir=Path.cwd() / 'data/test-repo-1') | |||
| addr = ('0.0.0.0', 4646) | |||
| pool = ThreadPoolExecutor() | |||
| with HTTPServer(addr, handler) as httpd: | |||
| pool.submit(lambda: httpd.serve_forever(poll_interval=0.1)) | |||
| try: | |||
| yield | |||
| yield 'http://localhost:4646' | |||
| finally: | |||
| httpd.shutdown() | |||
| def test_import_http(dds: DDS, http_import_server): | |||
| dds.repo_dir.mkdir(parents=True, exist_ok=True) | |||
| dds.run( | |||
| [ | |||
| 'repo', | |||
| dds.repo_dir_arg, | |||
| 'import', | |||
| 'http://localhost:8000/neo-buffer-0.4.2.tar.gz', | |||
| ], | |||
| cwd=dds.repo_dir, | |||
| ) | |||
| assert dds.repo_dir.joinpath('neo-buffer@0.4.2').is_dir() | |||
| def test_repo_add(dds: DDS, http_repo_server): | |||
| dds.repo_dir.mkdir(parents=True, exist_ok=True) | |||
| dds.run([ | |||
| @@ -103,7 +83,7 @@ def test_repo_add(dds: DDS, http_repo_server): | |||
| dds.repo_dir_arg, | |||
| 'add', | |||
| dds.catalog_path_arg, | |||
| 'http://localhost:4646', | |||
| http_repo_server, | |||
| '--update', | |||
| ]) | |||
| # dds.build_deps(['neo-url@0.2.1']) | |||
| dds.build_deps(['neo-fun@0.6.0']) | |||
| @@ -14,8 +14,7 @@ from . import fileutil | |||
| class DDS: | |||
| def __init__(self, dds_exe: Path, test_dir: Path, project_dir: Path, | |||
| scope: ExitStack) -> None: | |||
| def __init__(self, dds_exe: Path, test_dir: Path, project_dir: Path, scope: ExitStack) -> None: | |||
| self.dds_exe = dds_exe | |||
| self.test_dir = test_dir | |||
| self.source_root = project_dir | |||
| @@ -47,18 +46,15 @@ class DDS: | |||
| if self.scratch_dir.exists(): | |||
| shutil.rmtree(self.scratch_dir) | |||
| def run_unchecked(self, cmd: proc.CommandLine, *, | |||
| cwd: Path = None) -> subprocess.CompletedProcess: | |||
| def run_unchecked(self, cmd: proc.CommandLine, *, cwd: Path = None) -> subprocess.CompletedProcess: | |||
| full_cmd = itertools.chain([self.dds_exe, '-ltrace'], cmd) | |||
| return proc.run(full_cmd, cwd=cwd or self.source_root) | |||
| def run(self, cmd: proc.CommandLine, *, cwd: Path = None, | |||
| check=True) -> subprocess.CompletedProcess: | |||
| def run(self, cmd: proc.CommandLine, *, cwd: Path = None, check=True) -> subprocess.CompletedProcess: | |||
| cmdline = list(proc.flatten_cmd(cmd)) | |||
| res = self.run_unchecked(cmd, cwd=cwd) | |||
| if res.returncode != 0 and check: | |||
| raise subprocess.CalledProcessError( | |||
| res.returncode, [self.dds_exe] + cmdline, res.stdout) | |||
| raise subprocess.CalledProcessError(res.returncode, [self.dds_exe] + cmdline, res.stdout) | |||
| return res | |||
| @property | |||
| @@ -73,8 +69,7 @@ class DDS: | |||
| def catalog_path_arg(self) -> str: | |||
| return f'--catalog={self.catalog_path}' | |||
| def build_deps(self, args: proc.CommandLine, *, | |||
| toolchain: str = None) -> subprocess.CompletedProcess: | |||
| def build_deps(self, args: proc.CommandLine, *, toolchain: str = None) -> subprocess.CompletedProcess: | |||
| return self.run([ | |||
| 'build-deps', | |||
| f'--toolchain={toolchain or self.default_builtin_toolchain}', | |||
| @@ -109,8 +104,7 @@ class DDS: | |||
| def sdist_create(self) -> subprocess.CompletedProcess: | |||
| self.build_dir.mkdir(exist_ok=True, parents=True) | |||
| return self.run(['sdist', 'create', self.project_dir_arg], | |||
| cwd=self.build_dir) | |||
| return self.run(['sdist', 'create', self.project_dir_arg], cwd=self.build_dir) | |||
| def sdist_export(self) -> subprocess.CompletedProcess: | |||
| return self.run([ | |||
| @@ -126,13 +120,11 @@ class DDS: | |||
| @property | |||
| def default_builtin_toolchain(self) -> str: | |||
| if os.name == 'posix': | |||
| return ':c++17:gcc-9' | |||
| return str(Path(__file__).parent.joinpath('gcc-9.tc.jsonc')) | |||
| elif os.name == 'nt': | |||
| return ':c++17:msvc' | |||
| return str(Path(__file__).parent.joinpath('msvc.tc.jsonc')) | |||
| else: | |||
| raise RuntimeError( | |||
| f'No default builtin toolchain defined for tests on platform "{os.name}"' | |||
| ) | |||
| raise RuntimeError(f'No default builtin toolchain defined for tests on platform "{os.name}"') | |||
| @property | |||
| def exe_suffix(self) -> str: | |||
| @@ -141,15 +133,11 @@ class DDS: | |||
| elif os.name == 'nt': | |||
| return '.exe' | |||
| else: | |||
| raise RuntimeError( | |||
| f'We don\'t know the executable suffix for the platform "{os.name}"' | |||
| ) | |||
| raise RuntimeError(f'We don\'t know the executable suffix for the platform "{os.name}"') | |||
| def catalog_create(self) -> subprocess.CompletedProcess: | |||
| self.scratch_dir.mkdir(parents=True, exist_ok=True) | |||
| return self.run( | |||
| ['catalog', 'create', f'--catalog={self.catalog_path}'], | |||
| cwd=self.test_dir) | |||
| return self.run(['catalog', 'create', f'--catalog={self.catalog_path}'], cwd=self.test_dir) | |||
| def catalog_import(self, json_path: Path) -> subprocess.CompletedProcess: | |||
| self.scratch_dir.mkdir(parents=True, exist_ok=True) | |||
| @@ -169,8 +157,7 @@ class DDS: | |||
| req, | |||
| ]) | |||
| def set_contents(self, path: Union[str, Path], | |||
| content: bytes) -> ContextManager[Path]: | |||
| def set_contents(self, path: Union[str, Path], content: bytes) -> ContextManager[Path]: | |||
| return fileutil.set_contents(self.source_root / path, content) | |||
| @@ -190,8 +177,7 @@ class DDSFixtureParams(NamedTuple): | |||
| def dds_fixture_conf(*argsets: DDSFixtureParams): | |||
| args = list(argsets) | |||
| return pytest.mark.parametrize( | |||
| 'dds', args, indirect=True, ids=[p.ident for p in args]) | |||
| return pytest.mark.parametrize('dds', args, indirect=True, ids=[p.ident for p in args]) | |||
| def dds_fixture_conf_1(subdir: Union[Path, str]): | |||
| @@ -0,0 +1,9 @@ | |||
| { | |||
| "compiler_id": "gnu", | |||
| "c_compiler": "gcc-9", | |||
| "cxx_compiler": "g++-9", | |||
| "cxx_version": "c++17", | |||
| "cxx_flags": [ | |||
| "-fconcepts" | |||
| ] | |||
| } | |||
| @@ -0,0 +1,4 @@ | |||
| { | |||
| "$schema": "../res/toolchain-schema.json", | |||
| "compiler_id": "msvc" | |||
| } | |||
| @@ -225,8 +225,10 @@ def _version_for_github_tag(pkg_name: str, desc: str, clone_url: str, | |||
| f'Unknown "depends" object from json file: {depends!r}') | |||
| remote = Git(url=clone_url, ref=tag['name']) | |||
| return Version( | |||
| version, description=desc, depends=list(pairs), remote=remote) | |||
| return Version(version, | |||
| description=desc, | |||
| depends=list(pairs), | |||
| remote=remote) | |||
| def github_package(name: str, repo: str, want_tags: Iterable[str]) -> Package: | |||
| @@ -244,8 +246,8 @@ def github_package(name: str, repo: str, want_tags: Iterable[str]) -> Package: | |||
| tag_items = (t for t in avail_tags if t['name'] in want_tags) | |||
| versions = HTTP_POOL.map( | |||
| lambda tag: _version_for_github_tag(name, desc, repo_data['clone_url'], tag), | |||
| tag_items) | |||
| lambda tag: _version_for_github_tag(name, desc, repo_data['clone_url'], | |||
| tag), tag_items) | |||
| return Package(name, list(versions)) | |||
| @@ -258,12 +260,11 @@ def simple_packages(name: str, | |||
| *, | |||
| tag_fmt: str = '{}') -> Package: | |||
| return Package(name, [ | |||
| Version( | |||
| ver.version, | |||
| description=description, | |||
| remote=Git( | |||
| git_url, tag_fmt.format(ver.version), auto_lib=auto_lib), | |||
| depends=ver.depends) for ver in versions | |||
| Version(ver.version, | |||
| description=description, | |||
| remote=Git( | |||
| git_url, tag_fmt.format(ver.version), auto_lib=auto_lib), | |||
| depends=ver.depends) for ver in versions | |||
| ]) | |||
| @@ -276,14 +277,12 @@ def many_versions(name: str, | |||
| transforms: Sequence[FSTransform] = (), | |||
| description='(No description was provided)') -> Package: | |||
| return Package(name, [ | |||
| Version( | |||
| ver, | |||
| description='\n'.join(textwrap.wrap(description)), | |||
| remote=Git( | |||
| url=git_url, | |||
| ref=tag_fmt.format(ver), | |||
| auto_lib=auto_lib, | |||
| transforms=transforms)) for ver in versions | |||
| Version(ver, | |||
| description='\n'.join(textwrap.wrap(description)), | |||
| remote=Git(url=git_url, | |||
| ref=tag_fmt.format(ver), | |||
| auto_lib=auto_lib, | |||
| transforms=transforms)) for ver in versions | |||
| ]) | |||
| @@ -298,9 +297,9 @@ PACKAGES = [ | |||
| ['0.2.3', '0.3.0', '0.4.0', '0.4.1']), | |||
| github_package('neo-fun', 'vector-of-bool/neo-fun', [ | |||
| '0.1.1', '0.2.0', '0.2.1', '0.3.0', '0.3.1', '0.3.2', '0.4.0', '0.4.1', | |||
| '0.4.2', '0.5.0', '0.5.1', '0.5.2', '0.5.3', '0.5.4', '0.5.5', | |||
| '0.4.2', '0.5.0', '0.5.1', '0.5.2', '0.5.3', '0.5.4', '0.5.5', '0.6.0', | |||
| ]), | |||
| github_package('neo-io', 'vector-of-bool/neo-io', ['0.1.0']), | |||
| github_package('neo-io', 'vector-of-bool/neo-io', ['0.1.0', '0.1.1']), | |||
| github_package('neo-http', 'vector-of-bool/neo-http', ['0.1.0']), | |||
| github_package('neo-concepts', 'vector-of-bool/neo-concepts', ( | |||
| '0.2.2', | |||
| @@ -974,47 +973,3 @@ if __name__ == "__main__": | |||
| Path('catalog.json').write_text(json_str) | |||
| Path('catalog.old.json').write_text( | |||
| json.dumps(old_data, indent=2, sort_keys=True)) | |||
| cpp_template = textwrap.dedent(r''' | |||
| #include <dds/catalog/package_info.hpp> | |||
| #include <dds/catalog/init_catalog.hpp> | |||
| #include <dds/catalog/import.hpp> | |||
| #include <neo/gzip.hpp> | |||
| #include <neo/transform_io.hpp> | |||
| #include <neo/string_io.hpp> | |||
| #include <neo/inflate.hpp> | |||
| /** | |||
| * The following array of integers is generated and contains gzip-compressed | |||
| * JSON encoded initial catalog. MSVC can't handle string literals over | |||
| * 64k large, so we have to resort to using a regular char array: | |||
| */ | |||
| static constexpr const unsigned char INIT_PACKAGES_CONTENT[] = { | |||
| @JSON@ | |||
| }; | |||
| const std::vector<dds::package_info>& | |||
| dds::init_catalog_packages() noexcept { | |||
| using std::nullopt; | |||
| static auto pkgs = []{ | |||
| using namespace neo; | |||
| string_dynbuf_io str_out; | |||
| buffer_copy(str_out, | |||
| buffer_transform_source{ | |||
| buffers_consumer(as_buffer(INIT_PACKAGES_CONTENT)), | |||
| gzip_decompressor{inflate_decompressor{}}}, | |||
| @JSON_LEN@); | |||
| return dds::parse_packages_json(str_out.read_area_view()); | |||
| }(); | |||
| return pkgs; | |||
| } | |||
| ''') | |||
| json_small = json.dumps(data, sort_keys=True) | |||
| json_compr = gzip.compress(json_small.encode('utf-8'), compresslevel=9) | |||
| json_small_arr = ','.join(str(c) for c in json_compr) | |||
| cpp_content = cpp_template.replace('@JSON@', json_small_arr).replace( | |||
| '@JSON_LEN@', str(len(json_small))) | |||
| Path('src/dds/catalog/init_catalog.cpp').write_text(cpp_content) | |||