} | } | ||||
}) | }) | ||||
dds.scope.enter_context(ensure_dir(dds.source_root)) | |||||
dds.repo_add(http_repo.url) | dds.repo_add(http_repo.url) | ||||
dds.catalog_get('neo-sqlite3@0.3.0') | dds.catalog_get('neo-sqlite3@0.3.0') | ||||
assert (dds.scratch_dir / 'neo-sqlite3@0.3.0').is_dir() | assert (dds.scratch_dir / 'neo-sqlite3@0.3.0').is_dir() |
the given TCP port. | the given TCP port. | ||||
""" | """ | ||||
handler = partial(DirectoryServingHTTPRequestHandler, dir=dirpath) | handler = partial(DirectoryServingHTTPRequestHandler, dir=dirpath) | ||||
addr = ('localhost', port) | |||||
addr = ('127.0.0.1', port) | |||||
pool = ThreadPoolExecutor() | pool = ThreadPoolExecutor() | ||||
with HTTPServer(addr, handler) as httpd: | with HTTPServer(addr, handler) as httpd: | ||||
pool.submit(lambda: httpd.serve_forever(poll_interval=0.1)) | pool.submit(lambda: httpd.serve_forever(poll_interval=0.1)) | ||||
try: | try: | ||||
yield ServerInfo(f'http://localhost:{port}', dirpath) | |||||
print('Serving at', addr) | |||||
yield ServerInfo(f'http://127.0.0.1:{port}', dirpath) | |||||
finally: | finally: | ||||
httpd.shutdown() | httpd.shutdown() | ||||
Import some packages into the repo for the given JSON data. Uses | Import some packages into the repo for the given JSON data. Uses | ||||
mkrepo.py | mkrepo.py | ||||
""" | """ | ||||
with tempfile.NamedTemporaryFile() as f: | |||||
with tempfile.NamedTemporaryFile(delete=False) as f: | |||||
f.write(json.dumps(data).encode()) | f.write(json.dumps(data).encode()) | ||||
f.flush() | |||||
f.close() | |||||
self.import_json_file(Path(f.name)) | self.import_json_file(Path(f.name)) | ||||
Path(f.name).unlink() | |||||
def import_json_file(self, fpath: Path) -> None: | def import_json_file(self, fpath: Path) -> None: | ||||
""" | """ |
{ | { | ||||
"$schema": "../res/toolchain-schema.json", | "$schema": "../res/toolchain-schema.json", | ||||
"compiler_id": "msvc" | |||||
"compiler_id": "msvc", | |||||
"cxx_flags": "/std:c++latest" | |||||
} | } |
import os | import os | ||||
import re | import re | ||||
import shutil | import shutil | ||||
import stat | |||||
import sys | import sys | ||||
import tarfile | import tarfile | ||||
import tempfile | import tempfile | ||||
if not self.only_matching: | if not self.only_matching: | ||||
# Remove everything | # Remove everything | ||||
if abspath.is_dir(): | if abspath.is_dir(): | ||||
shutil.rmtree(abspath) | |||||
better_rmtree(abspath) | |||||
else: | else: | ||||
abspath.unlink() | abspath.unlink() | ||||
return | return | ||||
items = glob_if_exists(abspath, pat) | items = glob_if_exists(abspath, pat) | ||||
for f in items: | for f in items: | ||||
if f.is_dir(): | if f.is_dir(): | ||||
shutil.rmtree(f) | |||||
better_rmtree(f) | |||||
else: | else: | ||||
f.unlink() | f.unlink() | ||||
check_call(['git', 'clone', '--quiet', self.url, f'--depth=1', f'--branch={self.ref}', str(tdir)]) | check_call(['git', 'clone', '--quiet', self.url, f'--depth=1', f'--branch={self.ref}', str(tdir)]) | ||||
yield tdir | yield tdir | ||||
finally: | finally: | ||||
shutil.rmtree(tdir) | |||||
better_rmtree(tdir) | |||||
class ForeignPackage(NamedTuple): | class ForeignPackage(NamedTuple): | ||||
yield SpecPackage.parse_data(name, version, defin) | yield SpecPackage.parse_data(name, version, defin) | ||||
def _on_rm_error_win32(fn, filepath, _exc_info): | |||||
p = Path(filepath) | |||||
p.chmod(stat.S_IWRITE) | |||||
p.unlink() | |||||
def better_rmtree(dir: Path) -> None: | |||||
if os.name == 'nt': | |||||
shutil.rmtree(dir, onerror=_on_rm_error_win32) | |||||
else: | |||||
shutil.rmtree(dir) | |||||
@contextmanager | @contextmanager | ||||
def http_dl_unpack(url: str) -> Iterator[Path]: | def http_dl_unpack(url: str) -> Iterator[Path]: | ||||
req = request.urlopen(url) | req = request.urlopen(url) | ||||
subdir = next(iter(Path(tdir).iterdir())) | subdir = next(iter(Path(tdir).iterdir())) | ||||
yield subdir | yield subdir | ||||
finally: | finally: | ||||
shutil.rmtree(tdir) | |||||
better_rmtree(tdir) | |||||
@contextmanager | @contextmanager |