buildcache push: improve printing (#36141)

This commit is contained in:
Scott Wittenburg 2023-05-03 02:42:22 -06:00 committed by GitHub
parent 11aa2d721e
commit c7e60f441a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 284 additions and 224 deletions

View File

@ -24,7 +24,7 @@
import warnings
from contextlib import closing, contextmanager
from gzip import GzipFile
from typing import Union
from typing import List, NamedTuple, Optional, Union
from urllib.error import HTTPError, URLError
import ruamel.yaml as yaml
@ -509,13 +509,11 @@ def _binary_index():
class NoOverwriteException(spack.error.SpackError):
"""
Raised when a file exists and must be overwritten.
"""
"""Raised when a file would be overwritten"""
def __init__(self, file_path):
super(NoOverwriteException, self).__init__(
'"{}" exists in buildcache. Use --force flag to overwrite.'.format(file_path)
f"Refusing to overwrite the following file: {file_path}"
)
@ -1205,48 +1203,42 @@ def _do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo):
tar_add_metadata(tar, buildinfo_file_name(pkg_dir), buildinfo)
def _build_tarball(
spec,
out_url,
force=False,
relative=False,
unsigned=False,
allow_root=False,
key=None,
regenerate_index=False,
):
class PushOptions(NamedTuple):
#: Overwrite existing tarball/metadata files in buildcache
force: bool = False
#: Whether to use relative RPATHs
relative: bool = False
#: Allow absolute paths to package prefixes when creating a tarball
allow_root: bool = False
#: Regenerated indices after pushing
regenerate_index: bool = False
#: Whether to sign or not.
unsigned: bool = False
#: What key to use for signing
key: Optional[str] = None
def push_or_raise(spec: Spec, out_url: str, options: PushOptions):
"""
Build a tarball from given spec and put it into the directory structure
used at the mirror (following <tarball_directory_name>).
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
spec.json file already exist in the buildcache.
"""
if not spec.concrete:
raise ValueError("spec must be concrete to build tarball")
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
_build_tarball_in_stage_dir(
spec,
out_url,
stage_dir=tmpdir,
force=force,
relative=relative,
unsigned=unsigned,
allow_root=allow_root,
key=key,
regenerate_index=regenerate_index,
)
_build_tarball_in_stage_dir(spec, out_url, stage_dir=tmpdir, options=options)
def _build_tarball_in_stage_dir(
spec,
out_url,
stage_dir,
force=False,
relative=False,
unsigned=False,
allow_root=False,
key=None,
regenerate_index=False,
):
def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, options: PushOptions):
cache_prefix = build_cache_prefix(stage_dir)
tarfile_name = tarball_name(spec, ".spack")
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
@ -1256,7 +1248,7 @@ def _build_tarball_in_stage_dir(
mkdirp(tarfile_dir)
if web_util.url_exists(remote_spackfile_path):
if force:
if options.force:
web_util.remove_url(remote_spackfile_path)
else:
raise NoOverwriteException(url_util.format(remote_spackfile_path))
@ -1276,7 +1268,7 @@ def _build_tarball_in_stage_dir(
remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path)
# If force and exists, overwrite. Otherwise raise exception on collision.
if force:
if options.force:
if web_util.url_exists(remote_specfile_path):
web_util.remove_url(remote_specfile_path)
if web_util.url_exists(remote_signed_specfile_path):
@ -1293,7 +1285,7 @@ def _build_tarball_in_stage_dir(
# mode, Spack unfortunately *does* mutate rpaths and links ahead of time.
# For now, we only make a full copy of the spec prefix when in relative mode.
if relative:
if options.relative:
# tarfile is used because it preserves hardlink etc best.
binaries_dir = workdir
temp_tarfile_name = tarball_name(spec, ".tar")
@ -1307,19 +1299,19 @@ def _build_tarball_in_stage_dir(
binaries_dir = spec.prefix
# create info for later relocation and create tar
buildinfo = get_buildinfo_dict(spec, relative)
buildinfo = get_buildinfo_dict(spec, options.relative)
# optionally make the paths in the binaries relative to each other
# in the spack install tree before creating tarball
if relative:
make_package_relative(workdir, spec, buildinfo, allow_root)
elif not allow_root:
if options.relative:
make_package_relative(workdir, spec, buildinfo, options.allow_root)
elif not options.allow_root:
ensure_package_relocatable(buildinfo, binaries_dir)
_do_create_tarball(tarfile_path, binaries_dir, pkg_dir, buildinfo)
# remove copy of install directory
if relative:
if options.relative:
shutil.rmtree(workdir)
# get the sha256 checksum of the tarball
@ -1342,7 +1334,7 @@ def _build_tarball_in_stage_dir(
# This will be used to determine is the directory layout has changed.
buildinfo = {}
buildinfo["relative_prefix"] = os.path.relpath(spec.prefix, spack.store.layout.root)
buildinfo["relative_rpaths"] = relative
buildinfo["relative_rpaths"] = options.relative
spec_dict["buildinfo"] = buildinfo
with open(specfile_path, "w") as outfile:
@ -1353,40 +1345,40 @@ def _build_tarball_in_stage_dir(
json.dump(spec_dict, outfile, indent=0, separators=(",", ":"))
# sign the tarball and spec file with gpg
if not unsigned:
key = select_signing_key(key)
sign_specfile(key, force, specfile_path)
if not options.unsigned:
key = select_signing_key(options.key)
sign_specfile(key, options.force, specfile_path)
# push tarball and signed spec json to remote mirror
web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
web_util.push_to_url(
signed_specfile_path if not unsigned else specfile_path,
remote_signed_specfile_path if not unsigned else remote_specfile_path,
signed_specfile_path if not options.unsigned else specfile_path,
remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
keep_original=False,
)
tty.debug('Buildcache for "{0}" written to \n {1}'.format(spec, remote_spackfile_path))
# push the key to the build cache's _pgp directory so it can be
# imported
if not unsigned:
push_keys(out_url, keys=[key], regenerate_index=regenerate_index, tmpdir=stage_dir)
if not options.unsigned:
push_keys(out_url, keys=[key], regenerate_index=options.regenerate_index, tmpdir=stage_dir)
# create an index.json for the build_cache directory so specs can be
# found
if regenerate_index:
if options.regenerate_index:
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
return None
def nodes_to_be_packaged(specs, root=True, dependencies=True):
def specs_to_be_packaged(
specs: List[Spec], root: bool = True, dependencies: bool = True
) -> List[Spec]:
"""Return the list of nodes to be packaged, given a list of specs.
Args:
specs (List[spack.spec.Spec]): list of root specs to be processed
root (bool): include the root of each spec in the nodes
dependencies (bool): include the dependencies of each
specs: list of root specs to be processed
root: include the root of each spec in the nodes
dependencies: include the dependencies of each
spec in the nodes
"""
if not root and not dependencies:
@ -1404,32 +1396,26 @@ def nodes_to_be_packaged(specs, root=True, dependencies=True):
return list(filter(packageable, nodes))
def push(specs, push_url, include_root: bool = True, include_dependencies: bool = True, **kwargs):
"""Create a binary package for each of the specs passed as input and push them
to a given push URL.
def push(spec: Spec, mirror_url: str, options: PushOptions):
"""Create and push binary package for a single spec to the specified
mirror url.
Args:
specs (List[spack.spec.Spec]): installed specs to be packaged
push_url (str): url where to push the binary package
include_root (bool): include the root of each spec in the nodes
include_dependencies (bool): include the dependencies of each
spec in the nodes
**kwargs: TODO
spec: Spec to package and push
mirror_url: Desired destination url for binary package
options:
Returns:
True if package was pushed, False otherwise.
"""
# Be explicit about the arugment type
if type(include_root) != bool or type(include_dependencies) != bool:
raise ValueError("Expected include_root/include_dependencies to be True/False")
try:
push_or_raise(spec, mirror_url, options)
except NoOverwriteException as e:
warnings.warn(str(e))
return False
nodes = nodes_to_be_packaged(specs, root=include_root, dependencies=include_dependencies)
# TODO: This seems to be an easy target for task
# TODO: distribution using a parallel pool
for node in nodes:
try:
_build_tarball(node, push_url, **kwargs)
except NoOverwriteException as e:
warnings.warn(str(e))
return True
def try_verify(specfile_path):

View File

@ -16,6 +16,8 @@
import tempfile
import time
import zipfile
from collections import namedtuple
from typing import List, Optional
from urllib.error import HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import HTTPHandler, Request, build_opener
@ -33,6 +35,7 @@
import spack.mirror
import spack.paths
import spack.repo
import spack.spec
import spack.util.git
import spack.util.gpg as gpg_util
import spack.util.spack_yaml as syaml
@ -52,6 +55,8 @@
spack_gpg = spack.main.SpackCommand("gpg")
spack_compiler = spack.main.SpackCommand("compiler")
PushResult = namedtuple("PushResult", "success url")
class TemporaryDirectory(object):
def __init__(self):
@ -1585,33 +1590,28 @@ def configure_compilers(compiler_action, scope=None):
return None
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
"""Unchecked version of the public API, for easier mocking"""
unsigned = not sign_binaries
tty.debug("Creating buildcache ({0})".format("unsigned" if unsigned else "signed"))
hashes = env.all_hashes() if env else None
matches = spack.store.specfile_matches(specfile_path, hashes=hashes)
push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
kwargs = {"force": True, "allow_root": True, "unsigned": unsigned}
bindist.push(matches, push_url, include_root=True, include_dependencies=False, **kwargs)
return bindist.push(
input_spec, push_url, bindist.PushOptions(force=True, allow_root=True, unsigned=unsigned)
)
def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
def push_mirror_contents(input_spec: spack.spec.Spec, mirror_url, sign_binaries):
"""Push one or more binary packages to the mirror.
Arguments:
env (spack.environment.Environment): Optional environment. If
provided, it is used to make sure binary package to push
exists in the environment.
specfile_path (str): Path to spec.json corresponding to built pkg
to push.
input_spec(spack.spec.Spec): Installed spec to push
mirror_url (str): Base url of target mirror
sign_binaries (bool): If True, spack will attempt to sign binary
package before pushing.
"""
try:
_push_mirror_contents(env, specfile_path, sign_binaries, mirror_url)
return _push_mirror_contents(input_spec, sign_binaries, mirror_url)
except Exception as inst:
# If the mirror we're pushing to is on S3 and there's some
# permissions problem, for example, we can't just target
@ -1628,6 +1628,7 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
if any(x in err_msg for x in ["Access Denied", "InvalidAccessKeyId"]):
tty.msg("Permission problem writing to {0}".format(mirror_url))
tty.msg(err_msg)
return False
else:
raise inst
@ -2131,39 +2132,50 @@ def process_command(name, commands, repro_dir):
return exit_code
def create_buildcache(**kwargs):
def create_buildcache(
input_spec: spack.spec.Spec,
*,
pr_pipeline: bool,
pipeline_mirror_url: Optional[str] = None,
buildcache_mirror_url: Optional[str] = None,
) -> List[PushResult]:
"""Create the buildcache at the provided mirror(s).
Arguments:
kwargs (dict): dictionary of arguments used to create the buildcache
input_spec: Installed spec to package and push
buildcache_mirror_url: URL for the buildcache mirror
pipeline_mirror_url: URL for the pipeline mirror
pr_pipeline: True if the CI job is for a PR
List of recognized keys:
* "env" (spack.environment.Environment): the active environment
* "buildcache_mirror_url" (str or None): URL for the buildcache mirror
* "pipeline_mirror_url" (str or None): URL for the pipeline mirror
* "pr_pipeline" (bool): True if the CI job is for a PR
* "json_path" (str): path the the spec's JSON file
Returns: A list of PushResults, indicating success or failure.
"""
env = kwargs.get("env")
buildcache_mirror_url = kwargs.get("buildcache_mirror_url")
pipeline_mirror_url = kwargs.get("pipeline_mirror_url")
pr_pipeline = kwargs.get("pr_pipeline")
json_path = kwargs.get("json_path")
sign_binaries = pr_pipeline is False and can_sign_binaries()
results = []
# Create buildcache in either the main remote mirror, or in the
# per-PR mirror, if this is a PR pipeline
if buildcache_mirror_url:
push_mirror_contents(env, json_path, buildcache_mirror_url, sign_binaries)
results.append(
PushResult(
success=push_mirror_contents(input_spec, buildcache_mirror_url, sign_binaries),
url=buildcache_mirror_url,
)
)
# Create another copy of that buildcache in the per-pipeline
# temporary storage mirror (this is only done if either
# artifacts buildcache is enabled or a temporary storage url
# prefix is set)
if pipeline_mirror_url:
push_mirror_contents(env, json_path, pipeline_mirror_url, sign_binaries)
results.append(
PushResult(
success=push_mirror_contents(input_spec, pipeline_mirror_url, sign_binaries),
url=pipeline_mirror_url,
)
)
return results
def write_broken_spec(url, pkg_name, stack_name, job_url, pipeline_url, spec_dict):

View File

@ -10,6 +10,8 @@
import tempfile
import llnl.util.tty as tty
import llnl.util.tty.color as clr
from llnl.util.lang import elide_list
import spack.binary_distribution as bindist
import spack.cmd
@ -449,32 +451,70 @@ def create_fn(args):
# TODO: remove this in 0.21. If we have mirror_flag, the first
# spec is in the positional mirror arg due to argparse limitations.
specs = args.specs
input_specs = args.specs
if args.mirror_flag and args.mirror:
specs.insert(0, args.mirror)
input_specs.insert(0, args.mirror)
url = mirror.push_url
matches = _matching_specs(specs, args.spec_file)
msg = "Pushing binary packages to {0}/build_cache".format(url)
tty.msg(msg)
kwargs = {
"key": args.key,
"force": args.force,
"relative": args.rel,
"unsigned": args.unsigned,
"allow_root": args.allow_root,
"regenerate_index": args.rebuild_index,
}
bindist.push(
matches,
url,
include_root="package" in args.things_to_install,
include_dependencies="dependencies" in args.things_to_install,
**kwargs,
specs = bindist.specs_to_be_packaged(
_matching_specs(input_specs, args.spec_file),
root="package" in args.things_to_install,
dependencies="dependencies" in args.things_to_install,
)
# When pushing multiple specs, print the url once ahead of time, as well as how
# many specs are being pushed.
if len(specs) > 1:
tty.info(f"Selected {len(specs)} specs to push to {url}")
skipped = []
# tty printing
color = clr.get_color_when()
format_spec = lambda s: s.format("{name}{@version}{/hash:7}", color=color)
total_specs = len(specs)
digits = len(str(total_specs))
for i, spec in enumerate(specs):
try:
bindist.push_or_raise(
spec,
url,
bindist.PushOptions(
force=args.force,
relative=args.rel,
unsigned=args.unsigned,
allow_root=args.allow_root,
key=args.key,
regenerate_index=args.rebuild_index,
),
)
if total_specs > 1:
msg = f"[{i+1:{digits}}/{total_specs}] Pushed {format_spec(spec)}"
else:
msg = f"Pushed {format_spec(spec)} to {url}"
tty.info(msg)
except bindist.NoOverwriteException:
skipped.append(format_spec(spec))
if skipped:
if len(specs) == 1:
tty.info("The spec is already in the buildcache. Use --force to overwrite it.")
elif len(skipped) == len(specs):
tty.info("All specs are already in the buildcache. Use --force to overwite them.")
else:
tty.info(
"The following {} specs were skipped as they already exist in the buildcache:\n"
" {}\n"
" Use --force to overwrite them.".format(
len(skipped), ", ".join(elide_list(skipped, 5))
)
)
def install_fn(args):
"""install from a binary package"""

View File

@ -9,6 +9,7 @@
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import llnl.util.tty.color as clr
import spack.binary_distribution as bindist
import spack.ci as spack_ci
@ -670,13 +671,20 @@ def ci_rebuild(args):
# outside of the pipeline environment.
if install_exit_code == 0:
if buildcache_mirror_url or pipeline_mirror_url:
spack_ci.create_buildcache(
env=env,
for result in spack_ci.create_buildcache(
input_spec=job_spec,
buildcache_mirror_url=buildcache_mirror_url,
pipeline_mirror_url=pipeline_mirror_url,
pr_pipeline=spack_is_pr_pipeline,
json_path=job_spec_json_path,
)
):
msg = tty.msg if result.success else tty.warn
msg(
"{} {} to {}".format(
"Pushed" if result.success else "Failed to push",
job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
result.url,
)
)
# If this is a develop pipeline, check if the spec that we just built is
# on the broken-specs list. If so, remove it.

View File

@ -889,73 +889,6 @@ def urlopen(request: urllib.request.Request):
fetcher.conditional_fetch()
@pytest.mark.parametrize(
"root,deps,expected",
[
(
True,
True,
[
"dttop",
"dtbuild1",
"dtbuild2",
"dtlink2",
"dtrun2",
"dtlink1",
"dtlink3",
"dtlink4",
"dtrun1",
"dtlink5",
"dtrun3",
"dtbuild3",
],
),
(
False,
True,
[
"dtbuild1",
"dtbuild2",
"dtlink2",
"dtrun2",
"dtlink1",
"dtlink3",
"dtlink4",
"dtrun1",
"dtlink5",
"dtrun3",
"dtbuild3",
],
),
(True, False, ["dttop"]),
(False, False, []),
],
)
def test_correct_specs_are_pushed(
root, deps, expected, default_mock_concretization, tmpdir, temporary_store, monkeypatch
):
# Concretize dttop and add it to the temporary database (without prefixes)
spec = default_mock_concretization("dttop")
temporary_store.db.add(spec, directory_layout=None)
# Create a mirror push url
push_url = spack.mirror.Mirror.from_local_path(str(tmpdir)).push_url
packages_to_push = []
def fake_build_tarball(node, push_url, **kwargs):
assert push_url == push_url
assert not kwargs
assert isinstance(node, Spec)
packages_to_push.append(node.name)
monkeypatch.setattr(bindist, "_build_tarball", fake_build_tarball)
bindist.push([spec], push_url, include_root=root, include_dependencies=deps)
assert packages_to_push == expected
def test_reproducible_tarball_is_reproducible(tmpdir):
p = tmpdir.mkdir("prefix")
p.mkdir("bin")

View File

@ -9,7 +9,7 @@
import pytest
import spack.binary_distribution
import spack.binary_distribution as bd
import spack.main
import spack.spec
import spack.util.url
@ -26,22 +26,22 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmpdi
# Runs fine the first time, throws the second time
out_url = spack.util.url.path_to_file_url(str(tmpdir))
spack.binary_distribution._build_tarball(spec, out_url, unsigned=True)
with pytest.raises(spack.binary_distribution.NoOverwriteException):
spack.binary_distribution._build_tarball(spec, out_url, unsigned=True)
bd.push_or_raise(spec, out_url, bd.PushOptions(unsigned=True))
with pytest.raises(bd.NoOverwriteException):
bd.push_or_raise(spec, out_url, bd.PushOptions(unsigned=True))
# Should work fine with force=True
spack.binary_distribution._build_tarball(spec, out_url, force=True, unsigned=True)
bd.push_or_raise(spec, out_url, bd.PushOptions(force=True, unsigned=True))
# Remove the tarball and try again.
# This must *also* throw, because of the existing .spec.json file
os.remove(
os.path.join(
spack.binary_distribution.build_cache_prefix("."),
spack.binary_distribution.tarball_directory_name(spec),
spack.binary_distribution.tarball_name(spec, ".spack"),
bd.build_cache_prefix("."),
bd.tarball_directory_name(spec),
bd.tarball_name(spec, ".spack"),
)
)
with pytest.raises(spack.binary_distribution.NoOverwriteException):
spack.binary_distribution._build_tarball(spec, out_url, unsigned=True)
with pytest.raises(bd.NoOverwriteException):
bd.push_or_raise(spec, out_url, bd.PushOptions(unsigned=True))

View File

@ -476,16 +476,31 @@ def _fail(self, args):
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
# Monkeypatching ci method tested elsewhere to reduce number of methods
# that would need to be patched here.
monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c, d: None)
"""Test that create_buildcache returns a list of objects with the correct
keys and types."""
monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c: True)
args = {
"env": None,
"buildcache_mirror_url": "file://fake-url",
"pipeline_mirror_url": "file://fake-url",
}
ci.create_buildcache(**args)
results = ci.create_buildcache(
None,
pr_pipeline=True,
buildcache_mirror_url="file:///fake-url-one",
pipeline_mirror_url="file:///fake-url-two",
)
assert len(results) == 2
result1, result2 = results
assert result1.success
assert result1.url == "file:///fake-url-one"
assert result2.success
assert result2.url == "file:///fake-url-two"
results = ci.create_buildcache(
None, pr_pipeline=True, buildcache_mirror_url="file:///fake-url-one"
)
assert len(results) == 1
assert results[0].success
assert results[0].url == "file:///fake-url-one"
def test_ci_run_standalone_tests_missing_requirements(

View File

@ -267,3 +267,70 @@ def test_buildcache_create_install(
tarball = spack.binary_distribution.tarball_name(spec, ".spec.json")
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball_path))
assert os.path.exists(os.path.join(str(tmpdir), "build_cache", tarball))
@pytest.mark.parametrize(
"things_to_install,expected",
[
(
"",
[
"dttop",
"dtbuild1",
"dtbuild2",
"dtlink2",
"dtrun2",
"dtlink1",
"dtlink3",
"dtlink4",
"dtrun1",
"dtlink5",
"dtrun3",
"dtbuild3",
],
),
(
"dependencies",
[
"dtbuild1",
"dtbuild2",
"dtlink2",
"dtrun2",
"dtlink1",
"dtlink3",
"dtlink4",
"dtrun1",
"dtlink5",
"dtrun3",
"dtbuild3",
],
),
("package", ["dttop"]),
],
)
def test_correct_specs_are_pushed(
things_to_install, expected, tmpdir, monkeypatch, default_mock_concretization, temporary_store
):
# Concretize dttop and add it to the temporary database (without prefixes)
spec = default_mock_concretization("dttop")
temporary_store.db.add(spec, directory_layout=None)
slash_hash = "/{0}".format(spec.dag_hash())
packages_to_push = []
def fake_push(node, push_url, options):
assert isinstance(node, Spec)
packages_to_push.append(node.name)
monkeypatch.setattr(spack.binary_distribution, "push_or_raise", fake_push)
buildcache_create_args = ["create", "-d", str(tmpdir), "--unsigned"]
if things_to_install != "":
buildcache_create_args.extend(["--only", things_to_install])
buildcache_create_args.extend([slash_hash])
buildcache(*buildcache_create_args)
assert packages_to_push == expected

View File

@ -1240,7 +1240,7 @@ def test_push_mirror_contents(
with tmpdir.as_cwd():
env_cmd("create", "test", "./spack.yaml")
with ev.read("test") as env:
with ev.read("test"):
concrete_spec = Spec("patchelf").concretized()
spec_json = concrete_spec.to_json(hash=ht.dag_hash)
json_path = str(tmpdir.join("spec.json"))
@ -1249,8 +1249,7 @@ def test_push_mirror_contents(
install_cmd("--add", "--keep-stage", json_path)
# env, spec, json_path, mirror_url, build_id, sign_binaries
ci.push_mirror_contents(env, json_path, mirror_url, True)
ci.push_mirror_contents(concrete_spec, mirror_url, True)
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
@ -1348,7 +1347,7 @@ def failing_access(*args, **kwargs):
# Input doesn't matter, as wwe are faking exceptional output
url = "fakejunk"
ci.push_mirror_contents(None, None, url, None)
ci.push_mirror_contents(None, url, None)
captured = capsys.readouterr()
std_out = captured[0]