diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index bc3b0089a70..ceba2767bca 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -872,6 +872,8 @@ def _fetch_spec_from_mirror(spec_url): return Spec.from_dict(specfile_json) if spec_url.endswith(".json"): return Spec.from_json(spec_file_contents) + if spec_url.endswith(".yaml"): + return Spec.from_yaml(spec_file_contents) tp = multiprocessing.pool.ThreadPool(processes=concurrency) try: @@ -946,6 +948,8 @@ def file_read_method(file_path): "*.spec.json.sig", "--include", "*.spec.json", + "--include", + "*.spec.yaml", cache_prefix, tmpspecsdir, ] @@ -955,7 +959,7 @@ def file_read_method(file_path): "Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir) ) aws(*sync_command_args, output=os.devnull, error=os.devnull) - file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json"]) + file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json", "*.spec.yaml"]) read_fn = file_read_method except Exception: tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch") @@ -991,7 +995,9 @@ def url_read_method(url): file_list = [ url_util.join(cache_prefix, entry) for entry in web_util.list_url(cache_prefix) - if entry.endswith("spec.json") or entry.endswith("spec.json.sig") + if entry.endswith(".yaml") + or entry.endswith("spec.json") + or entry.endswith("spec.json.sig") ] read_fn = url_read_method except KeyError as inst: @@ -1053,6 +1059,14 @@ def generate_package_index(cache_prefix, concurrency=32): tty.error("Unable to generate package index, {0}".format(err)) return + if any(x.endswith(".yaml") for x in file_list): + msg = ( + "The mirror in '{}' contains specs in the deprecated YAML format.\n\n\tSupport for " + "this format will be removed in v0.20, please regenerate the build cache with a " + "recent Spack\n" + ).format(cache_prefix) + warnings.warn(msg) + tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix)) tmpdir = tempfile.mkdtemp() @@ -1179,11 +1193,15 @@ def _build_tarball( specfile_name = tarball_name(spec, ".spec.json") specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name)) signed_specfile_path = "{0}.sig".format(specfile_path) + deprecated_specfile_path = specfile_path.replace(".spec.json", ".spec.yaml") remote_specfile_path = url_util.join( out_url, os.path.relpath(specfile_path, os.path.realpath(tmpdir)) ) remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path) + remote_specfile_path_deprecated = url_util.join( + outdir, os.path.relpath(deprecated_specfile_path, os.path.realpath(tmpdir)) + ) # If force and exists, overwrite. Otherwise raise exception on collision. if force: @@ -1191,8 +1209,12 @@ def _build_tarball( web_util.remove_url(remote_specfile_path) if web_util.url_exists(remote_signed_specfile_path): web_util.remove_url(remote_signed_specfile_path) - elif web_util.url_exists(remote_specfile_path) or web_util.url_exists( - remote_signed_specfile_path + if web_util.url_exists(remote_specfile_path_deprecated): + web_util.remove_url(remote_specfile_path_deprecated) + elif ( + web_util.url_exists(remote_specfile_path) + or web_util.url_exists(remote_signed_specfile_path) + or web_util.url_exists(remote_specfile_path_deprecated) ): raise NoOverwriteException(url_util.format(remote_specfile_path)) @@ -1248,10 +1270,12 @@ def _build_tarball( with open(spec_file, "r") as inputfile: content = inputfile.read() - if spec_file.endswith(".json"): + if spec_file.endswith(".yaml"): + spec_dict = yaml.load(content) + elif spec_file.endswith(".json"): spec_dict = sjson.load(content) else: - raise ValueError("{0} not a valid spec file type".format(spec_file)) + raise ValueError("{0} not a valid spec file type (json or yaml)".format(spec_file)) spec_dict["buildcache_layout_version"] = 1 bchecksum = {} bchecksum["hash_algorithm"] = "sha256" @@ -1472,7 +1496,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): # Assumes we care more about finding a spec file by preferred ext # than by mirrory priority. This can be made less complicated as # we remove support for deprecated spec formats and buildcache layouts. - for ext in ["json.sig", "json"]: + for ext in ["json.sig", "json", "yaml"]: for mirror_to_try in mirrors_to_try: specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext) spackfile_url = mirror_to_try["spackfile"] @@ -1509,6 +1533,13 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): # the remaining mirrors, looking for one we can use. tarball_stage = try_fetch(spackfile_url) if tarball_stage: + if ext == "yaml": + msg = ( + "Reading {} from mirror.\n\n\tThe YAML format for buildcaches is " + "deprecated and will be removed in v0.20\n" + ).format(spackfile_url) + warnings.warn(msg) + return { "tarball_stage": tarball_stage, "specfile_stage": local_specfile_stage, @@ -1748,6 +1779,8 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum spackfile_path = os.path.join(stagepath, spackfile_name) tarfile_name = tarball_name(spec, ".tar.gz") tarfile_path = os.path.join(extract_to, tarfile_name) + deprecated_yaml_name = tarball_name(spec, ".spec.yaml") + deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name) json_name = tarball_name(spec, ".spec.json") json_path = os.path.join(extract_to, json_name) with closing(tarfile.open(spackfile_path, "r")) as tar: @@ -1759,6 +1792,8 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum if os.path.exists(json_path): specfile_path = json_path + elif os.path.exists(deprecated_yaml_path): + specfile_path = deprecated_yaml_path else: raise ValueError("Cannot find spec file for {0}.".format(extract_to)) @@ -1805,8 +1840,10 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for content = inputfile.read() if specfile_path.endswith(".json.sig"): spec_dict = Spec.extract_json_from_clearsig(content) - else: + elif specfile_path.endswith(".json"): spec_dict = sjson.load(content) + else: + spec_dict = syaml.load(content) bchecksum = spec_dict["binary_cache_checksum"] filename = download_result["tarball_stage"].save_filename @@ -1818,7 +1855,7 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for or int(spec_dict["buildcache_layout_version"]) < 1 ): # Handle the older buildcache layout where the .spack file - # contains a spec json, maybe an .asc file (signature), + # contains a spec json/yaml, maybe an .asc file (signature), # and another tarball containing the actual install tree. tmpdir = tempfile.mkdtemp() try: @@ -1969,12 +2006,17 @@ def try_direct_fetch(spec, mirrors=None): """ Try to find the spec directly on the configured mirrors """ + deprecated_specfile_name = tarball_name(spec, ".spec.yaml") specfile_name = tarball_name(spec, ".spec.json") signed_specfile_name = tarball_name(spec, ".spec.json.sig") specfile_is_signed = False + specfile_is_json = True found_specs = [] for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values(): + buildcache_fetch_url_yaml = url_util.join( + mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name + ) buildcache_fetch_url_json = url_util.join( mirror.fetch_url, _build_cache_relative_path, specfile_name ) @@ -1988,19 +2030,28 @@ def try_direct_fetch(spec, mirrors=None): try: _, _, fs = web_util.read_from_url(buildcache_fetch_url_json) except (URLError, web_util.SpackWebError, HTTPError) as url_err_x: - tty.debug( - "Did not find {0} on {1}".format( - specfile_name, buildcache_fetch_url_signed_json - ), - url_err, - level=2, - ) - tty.debug( - "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json), - url_err_x, - level=2, - ) - continue + try: + _, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml) + specfile_is_json = False + except (URLError, web_util.SpackWebError, HTTPError) as url_err_y: + tty.debug( + "Did not find {0} on {1}".format( + specfile_name, buildcache_fetch_url_signed_json + ), + url_err, + level=2, + ) + tty.debug( + "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json), + url_err_x, + level=2, + ) + tty.debug( + "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_yaml), + url_err_y, + level=2, + ) + continue specfile_contents = codecs.getreader("utf-8")(fs).read() # read the spec from the build cache file. All specs in build caches @@ -2009,8 +2060,10 @@ def try_direct_fetch(spec, mirrors=None): if specfile_is_signed: specfile_json = Spec.extract_json_from_clearsig(specfile_contents) fetched_spec = Spec.from_dict(specfile_json) - else: + elif specfile_is_json: fetched_spec = Spec.from_json(specfile_contents) + else: + fetched_spec = Spec.from_yaml(specfile_contents) fetched_spec._mark_concrete() found_specs.append( @@ -2221,7 +2274,7 @@ def needs_rebuild(spec, mirror_url): specfile_path = os.path.join(cache_prefix, specfile_name) # Only check for the presence of the json version of the spec. If the - # mirror only has the json version, or doesn't have the spec at all, we + # mirror only has the yaml version, or doesn't have the spec at all, we # need to rebuild. return not web_util.url_exists(specfile_path) @@ -2329,6 +2382,7 @@ def download_single_spec(concrete_spec, destination, mirror_url=None): "url": [ tarball_name(concrete_spec, ".spec.json.sig"), tarball_name(concrete_spec, ".spec.json"), + tarball_name(concrete_spec, ".spec.yaml"), ], "path": destination, "required": True, diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index dc9003d42ad..8497cf18415 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -6,6 +6,7 @@ import io import os import platform +import shutil import sys import urllib.error import urllib.request @@ -71,6 +72,16 @@ def test_mirror(mirror_dir): mirror_cmd("rm", "--scope=site", "test-mirror-func") +@pytest.fixture(scope="function") +def test_legacy_mirror(mutable_config, tmpdir): + mirror_dir = tmpdir.join("legacy_yaml_mirror") + shutil.copytree(legacy_mirror_dir, mirror_dir.strpath) + mirror_url = "file://%s" % mirror_dir + mirror_cmd("add", "--scope", "site", "test-legacy-yaml", mirror_url) + yield mirror_dir + mirror_cmd("rm", "--scope=site", "test-legacy-yaml") + + @pytest.fixture(scope="module") def config_directory(tmpdir_factory): tmpdir = tmpdir_factory.mktemp("test_configs") @@ -574,6 +585,19 @@ def test_update_sbang(tmpdir, test_mirror): uninstall_cmd("-y", "/%s" % new_spec.dag_hash()) +# Need one where the platform has been changed to the test platform. +def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config, mock_packages): + install_cmd( + "--no-check-signature", + "--cache-only", + "-f", + legacy_mirror_dir + + "/build_cache/test-debian6-core2-gcc-4.5.0-zlib-" + + "1.2.11-t5mczux3tfqpxwmg7egp7axy2jvyulqk.spec.yaml", + ) + uninstall_cmd("-y", "/t5mczux3tfqpxwmg7egp7axy2jvyulqk") + + def test_install_legacy_buildcache_layout(install_mockery_mutable_config): """Legacy buildcache layout involved a nested archive structure where the .spack file contained a repeated spec.json and another