gitlab: Do not use root_spec['pkg_name'] anymore (#33403)
* gitlab: Do not use root_spec['pkg_name'] anymore For a long time it was fine to index a concrete root spec with the name of a dependency in order to access the concrete dependency spec. Since pipelines started using `--use-buildcache dependencies:only,package:never` though, it has exposed a scheduling issue in how pipelines are generated. If a concrete root spec depends on two different hashes of `openssl` for example, indexing that root with just the package name is ambiguous, so we should no longer depend on that approach when scheduling jobs. * env: make sure exactly one spec in env matches hash
This commit is contained in:
		| @@ -43,7 +43,6 @@ | ||||
| from spack.error import SpackError | ||||
| from spack.reporters.cdash import CDash | ||||
| from spack.reporters.cdash import build_stamp as cdash_build_stamp | ||||
| from spack.spec import Spec | ||||
| from spack.util.pattern import Bunch | ||||
| 
 | ||||
| JOB_RETRY_CONDITIONS = [ | ||||
| @@ -143,13 +142,6 @@ def _get_spec_string(spec): | ||||
|     return spec.format("".join(format_elements)) | ||||
| 
 | ||||
| 
 | ||||
| def _format_root_spec(spec, main_phase, strip_compiler): | ||||
|     if main_phase is False and strip_compiler is True: | ||||
|         return "{0}@{1} arch={2}".format(spec.name, spec.version, spec.architecture) | ||||
|     else: | ||||
|         return spec.dag_hash() | ||||
| 
 | ||||
| 
 | ||||
| def _spec_deps_key(s): | ||||
|     return "{0}/{1}".format(s.name, s.dag_hash(7)) | ||||
| 
 | ||||
| @@ -175,8 +167,7 @@ def _get_spec_dependencies( | ||||
| 
 | ||||
|         for entry in specs: | ||||
|             spec_labels[entry["label"]] = { | ||||
|                 "spec": Spec(entry["spec"]), | ||||
|                 "rootSpec": entry["root_spec"], | ||||
|                 "spec": entry["spec"], | ||||
|                 "needs_rebuild": entry["needs_rebuild"], | ||||
|             } | ||||
| 
 | ||||
| @@ -203,7 +194,7 @@ def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None): | ||||
|         and stages: | ||||
| 
 | ||||
|         spec_labels: A dictionary mapping the spec labels which are made of | ||||
|             (pkg-name/hash-prefix), to objects containing "rootSpec" and "spec" | ||||
|             (pkg-name/hash-prefix), to objects containing "spec" and "needs_rebuild" | ||||
|             keys.  The root spec is the spec of which this spec is a dependency | ||||
|             and the spec is the formatted spec string for this spec. | ||||
| 
 | ||||
| @@ -318,17 +309,14 @@ def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None) | ||||
|            ], | ||||
|            "specs": [ | ||||
|                { | ||||
|                  "root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...", | ||||
|                  "spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-highs...", | ||||
|                  "label": "readline/ip6aiun" | ||||
|                }, | ||||
|                { | ||||
|                  "root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...", | ||||
|                  "spec": "ncurses@6.1%apple-clang@9.1.0 arch=darwin-highsi...", | ||||
|                  "label": "ncurses/y43rifz" | ||||
|                }, | ||||
|                { | ||||
|                  "root_spec": "readline@7.0%apple-clang@9.1.0 arch=darwin-...", | ||||
|                  "spec": "pkgconf@1.5.4%apple-clang@9.1.0 arch=darwin-high...", | ||||
|                  "label": "pkgconf/eg355zb" | ||||
|                } | ||||
| @@ -350,8 +338,6 @@ def append_dep(s, d): | ||||
|         ) | ||||
| 
 | ||||
|     for spec in spec_list: | ||||
|         root_spec = spec | ||||
| 
 | ||||
|         for s in spec.traverse(deptype=all): | ||||
|             if s.external: | ||||
|                 tty.msg("Will not stage external pkg: {0}".format(s)) | ||||
| @@ -363,8 +349,7 @@ def append_dep(s, d): | ||||
| 
 | ||||
|             skey = _spec_deps_key(s) | ||||
|             spec_labels[skey] = { | ||||
|                 "spec": _get_spec_string(s), | ||||
|                 "root": root_spec, | ||||
|                 "spec": s, | ||||
|                 "needs_rebuild": not up_to_date_mirrors, | ||||
|             } | ||||
| 
 | ||||
| @@ -381,7 +366,6 @@ def append_dep(s, d): | ||||
|             { | ||||
|                 "label": spec_label, | ||||
|                 "spec": spec_holder["spec"], | ||||
|                 "root_spec": spec_holder["root"], | ||||
|                 "needs_rebuild": spec_holder["needs_rebuild"], | ||||
|             } | ||||
|         ) | ||||
| @@ -458,10 +442,6 @@ def _find_matching_config(spec, gitlab_ci): | ||||
|     return runner_attributes if matched else None | ||||
| 
 | ||||
| 
 | ||||
| def _pkg_name_from_spec_label(spec_label): | ||||
|     return spec_label[: spec_label.index("/")] | ||||
| 
 | ||||
| 
 | ||||
| def _format_job_needs( | ||||
|     phase_name, | ||||
|     strip_compilers, | ||||
| @@ -854,7 +834,6 @@ def generate_gitlab_ci_yaml( | ||||
|         phase_name = phase["name"] | ||||
|         strip_compilers = phase["strip-compilers"] | ||||
| 
 | ||||
|         main_phase = _is_main_phase(phase_name) | ||||
|         spec_labels, dependencies, stages = staged_phases[phase_name] | ||||
| 
 | ||||
|         for stage_jobs in stages: | ||||
| @@ -864,9 +843,7 @@ def generate_gitlab_ci_yaml( | ||||
| 
 | ||||
|             for spec_label in stage_jobs: | ||||
|                 spec_record = spec_labels[spec_label] | ||||
|                 root_spec = spec_record["rootSpec"] | ||||
|                 pkg_name = _pkg_name_from_spec_label(spec_label) | ||||
|                 release_spec = root_spec[pkg_name] | ||||
|                 release_spec = spec_record["spec"] | ||||
|                 release_spec_dag_hash = release_spec.dag_hash() | ||||
| 
 | ||||
|                 if prune_untouched_packages: | ||||
| @@ -936,7 +913,6 @@ def generate_gitlab_ci_yaml( | ||||
|                         compiler_action = "INSTALL_MISSING" | ||||
| 
 | ||||
|                 job_vars = { | ||||
|                     "SPACK_ROOT_SPEC": _format_root_spec(root_spec, main_phase, strip_compilers), | ||||
|                     "SPACK_JOB_SPEC_DAG_HASH": release_spec_dag_hash, | ||||
|                     "SPACK_JOB_SPEC_PKG_NAME": release_spec.name, | ||||
|                     "SPACK_COMPILER_ACTION": compiler_action, | ||||
| @@ -953,9 +929,7 @@ def generate_gitlab_ci_yaml( | ||||
|                         # purposes, so we only get the direct dependencies. | ||||
|                         dep_jobs = [] | ||||
|                         for dep_label in dependencies[spec_label]: | ||||
|                             dep_pkg = _pkg_name_from_spec_label(dep_label) | ||||
|                             dep_root = spec_labels[dep_label]["rootSpec"] | ||||
|                             dep_jobs.append(dep_root[dep_pkg]) | ||||
|                             dep_jobs.append(spec_labels[dep_label]["spec"]) | ||||
| 
 | ||||
|                     job_dependencies.extend( | ||||
|                         _format_job_needs( | ||||
| @@ -1039,7 +1013,11 @@ def generate_gitlab_ci_yaml( | ||||
|                             tty.debug(debug_msg) | ||||
| 
 | ||||
|                 if prune_dag and not rebuild_spec: | ||||
|                     tty.debug("Pruning {0}, does not need rebuild.".format(release_spec.name)) | ||||
|                     tty.debug( | ||||
|                         "Pruning {0}/{1}, does not need rebuild.".format( | ||||
|                             release_spec.name, release_spec.dag_hash() | ||||
|                         ) | ||||
|                     ) | ||||
|                     continue | ||||
| 
 | ||||
|                 if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls: | ||||
| @@ -1482,64 +1460,6 @@ def configure_compilers(compiler_action, scope=None): | ||||
|     return None | ||||
| 
 | ||||
| 
 | ||||
| def get_concrete_specs(env, root_spec, job_name, compiler_action): | ||||
|     """Build a dictionary of concrete specs relevant to a particular | ||||
|         rebuild job.  This includes the root spec and the spec to be | ||||
|         rebuilt (which could be the same). | ||||
| 
 | ||||
|     Arguments: | ||||
| 
 | ||||
|         env (spack.environment.Environment): Activated spack environment | ||||
|             used to get concrete root spec by hash in case compiler_action | ||||
|             is anthing other than FIND_ANY. | ||||
|         root_spec (str): If compiler_action is FIND_ANY root_spec is | ||||
|             a string representation which can be turned directly into | ||||
|             a spec, otherwise, it's a hash used to index the activated | ||||
|             spack environment. | ||||
|         job_name (str): Name of package to be built, used to index the | ||||
|             concrete root spec and produce the concrete spec to be | ||||
|             built. | ||||
|         compiler_action (str): Determines how to interpret the root_spec | ||||
|             parameter, either as a string representation as a hash. | ||||
| 
 | ||||
|     Returns: | ||||
| 
 | ||||
|     .. code-block:: JSON | ||||
| 
 | ||||
|        { | ||||
|            "root": "<spec>", | ||||
|            "<job-pkg-name>": "<spec>", | ||||
|         } | ||||
| 
 | ||||
|     """ | ||||
|     spec_map = { | ||||
|         "root": None, | ||||
|     } | ||||
| 
 | ||||
|     if compiler_action == "FIND_ANY": | ||||
|         # This corresponds to a bootstrapping phase where we need to | ||||
|         # rely on any available compiler to build the package (i.e. the | ||||
|         # compiler needed to be stripped from the spec when we generated | ||||
|         # the job), and thus we need to concretize the root spec again. | ||||
|         tty.debug("About to concretize {0}".format(root_spec)) | ||||
|         concrete_root = Spec(root_spec).concretized() | ||||
|         tty.debug("Resulting concrete root: {0}".format(concrete_root)) | ||||
|     else: | ||||
|         # in this case, either we're relying on Spack to install missing | ||||
|         # compiler bootstrapped in a previous phase, or else we only had one | ||||
|         # phase (like a site which already knows what compilers are available | ||||
|         # on it's runners), so we don't want to concretize that root spec | ||||
|         # again.  The reason we take this path in the first case (bootstrapped | ||||
|         # compiler), is that we can't concretize a spec at this point if we're | ||||
|         # going to ask spack to "install_missing_compilers". | ||||
|         concrete_root = env.specs_by_hash[root_spec] | ||||
| 
 | ||||
|     spec_map["root"] = concrete_root | ||||
|     spec_map[job_name] = concrete_root[job_name] | ||||
| 
 | ||||
|     return spec_map | ||||
| 
 | ||||
| 
 | ||||
| def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url): | ||||
|     """Unchecked version of the public API, for easier mocking""" | ||||
|     unsigned = not sign_binaries | ||||
|   | ||||
| @@ -277,8 +277,8 @@ def ci_rebuild(args): | ||||
|     ci_pipeline_id = get_env_var("CI_PIPELINE_ID") | ||||
|     ci_job_name = get_env_var("CI_JOB_NAME") | ||||
|     signing_key = get_env_var("SPACK_SIGNING_KEY") | ||||
|     root_spec = get_env_var("SPACK_ROOT_SPEC") | ||||
|     job_spec_pkg_name = get_env_var("SPACK_JOB_SPEC_PKG_NAME") | ||||
|     job_spec_dag_hash = get_env_var("SPACK_JOB_SPEC_DAG_HASH") | ||||
|     compiler_action = get_env_var("SPACK_COMPILER_ACTION") | ||||
|     spack_pipeline_type = get_env_var("SPACK_PIPELINE_TYPE") | ||||
|     remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE") | ||||
| @@ -297,7 +297,6 @@ def ci_rebuild(args): | ||||
| 
 | ||||
|     # Debug print some of the key environment variables we should have received | ||||
|     tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir)) | ||||
|     tty.debug("root_spec = {0}".format(root_spec)) | ||||
|     tty.debug("remote_mirror_url = {0}".format(remote_mirror_url)) | ||||
|     tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name)) | ||||
|     tty.debug("compiler_action = {0}".format(compiler_action)) | ||||
| @@ -360,10 +359,11 @@ def ci_rebuild(args): | ||||
|             mirror_msg = "artifact buildcache enabled, mirror url: {0}".format(pipeline_mirror_url) | ||||
|             tty.debug(mirror_msg) | ||||
| 
 | ||||
|     # Whatever form of root_spec we got, use it to get a map giving us concrete | ||||
|     # specs for this job and all of its dependencies. | ||||
|     spec_map = spack_ci.get_concrete_specs(env, root_spec, job_spec_pkg_name, compiler_action) | ||||
|     job_spec = spec_map[job_spec_pkg_name] | ||||
|     # Get the concrete spec to be built by this job. | ||||
|     try: | ||||
|         job_spec = env.get_one_by_hash(job_spec_dag_hash) | ||||
|     except AssertionError: | ||||
|         tty.die("Could not find environment spec with hash {0}".format(job_spec_dag_hash)) | ||||
| 
 | ||||
|     job_spec_json_file = "{0}.json".format(job_spec_pkg_name) | ||||
|     job_spec_json_path = os.path.join(repro_dir, job_spec_json_file) | ||||
| @@ -427,17 +427,11 @@ def ci_rebuild(args): | ||||
|     with open(job_spec_json_path, "w") as fd: | ||||
|         fd.write(job_spec.to_json(hash=ht.dag_hash)) | ||||
| 
 | ||||
|     # Write the concrete root spec json into the reproduction directory | ||||
|     root_spec_json_path = os.path.join(repro_dir, "root.json") | ||||
|     with open(root_spec_json_path, "w") as fd: | ||||
|         fd.write(spec_map["root"].to_json(hash=ht.dag_hash)) | ||||
| 
 | ||||
|     # Write some other details to aid in reproduction into an artifact | ||||
|     repro_file = os.path.join(repro_dir, "repro.json") | ||||
|     repro_details = { | ||||
|         "job_name": ci_job_name, | ||||
|         "job_spec_json": job_spec_json_file, | ||||
|         "root_spec_json": "root.json", | ||||
|         "ci_project_dir": ci_project_dir, | ||||
|     } | ||||
|     with open(repro_file, "w") as fd: | ||||
|   | ||||
| @@ -1819,6 +1819,14 @@ def get_by_hash(self, dag_hash): | ||||
|                     matches[dep_hash] = spec | ||||
|         return list(matches.values()) | ||||
| 
 | ||||
|     def get_one_by_hash(self, dag_hash): | ||||
|         """Returns the single spec from the environment which matches the | ||||
|         provided hash.  Raises an AssertionError if no specs match or if | ||||
|         more than one spec matches.""" | ||||
|         hash_matches = self.get_by_hash(dag_hash) | ||||
|         assert len(hash_matches) == 1 | ||||
|         return hash_matches[0] | ||||
| 
 | ||||
|     def matching_spec(self, spec): | ||||
|         """ | ||||
|         Given a spec (likely not concretized), find a matching concretized | ||||
|   | ||||
| @@ -83,37 +83,6 @@ def assert_present(config): | ||||
|     assert_present(last_config) | ||||
| 
 | ||||
| 
 | ||||
| @pytest.mark.skipif(sys.platform == "win32", reason="Not supported on Windows (yet)") | ||||
| def test_get_concrete_specs(config, mutable_mock_env_path, mock_packages): | ||||
|     e = ev.create("test1") | ||||
|     e.add("dyninst") | ||||
|     e.concretize() | ||||
| 
 | ||||
|     dyninst_hash = None | ||||
|     hash_dict = {} | ||||
| 
 | ||||
|     with e as active_env: | ||||
|         for s in active_env.all_specs(): | ||||
|             hash_dict[s.name] = s.dag_hash() | ||||
|             if s.name == "dyninst": | ||||
|                 dyninst_hash = s.dag_hash() | ||||
| 
 | ||||
|         assert dyninst_hash | ||||
| 
 | ||||
|         spec_map = ci.get_concrete_specs(active_env, dyninst_hash, "dyninst", "NONE") | ||||
|         assert "root" in spec_map | ||||
| 
 | ||||
|         concrete_root = spec_map["root"] | ||||
|         assert concrete_root.dag_hash() == dyninst_hash | ||||
| 
 | ||||
|         s = spec.Spec("dyninst") | ||||
|         print("nonconc spec name: {0}".format(s.name)) | ||||
| 
 | ||||
|         spec_map = ci.get_concrete_specs(active_env, s.name, s.name, "FIND_ANY") | ||||
| 
 | ||||
|         assert "root" in spec_map | ||||
| 
 | ||||
| 
 | ||||
| class FakeWebResponder(object): | ||||
|     def __init__(self, response_code=200, content_to_read=[]): | ||||
|         self._resp_code = response_code | ||||
|   | ||||
| @@ -885,7 +885,6 @@ def activate_rebuild_env(tmpdir, pkg_name, rebuild_env): | ||||
|             "SPACK_CONCRETE_ENV_DIR": rebuild_env.env_dir.strpath, | ||||
|             "CI_PIPELINE_ID": "7192", | ||||
|             "SPACK_SIGNING_KEY": _signing_key(), | ||||
|             "SPACK_ROOT_SPEC": rebuild_env.root_spec_dag_hash, | ||||
|             "SPACK_JOB_SPEC_DAG_HASH": rebuild_env.root_spec_dag_hash, | ||||
|             "SPACK_JOB_SPEC_PKG_NAME": pkg_name, | ||||
|             "SPACK_COMPILER_ACTION": "NONE", | ||||
| @@ -1084,7 +1083,6 @@ def test_ci_nothing_to_rebuild( | ||||
|                     "SPACK_JOB_TEST_DIR": "test_dir", | ||||
|                     "SPACK_LOCAL_MIRROR_DIR": mirror_dir.strpath, | ||||
|                     "SPACK_CONCRETE_ENV_DIR": tmpdir.strpath, | ||||
|                     "SPACK_ROOT_SPEC": root_spec_dag_hash, | ||||
|                     "SPACK_JOB_SPEC_DAG_HASH": root_spec_dag_hash, | ||||
|                     "SPACK_JOB_SPEC_PKG_NAME": "archive-files", | ||||
|                     "SPACK_COMPILER_ACTION": "NONE", | ||||
| @@ -1243,8 +1241,7 @@ def test_push_mirror_contents( | ||||
|     with tmpdir.as_cwd(): | ||||
|         env_cmd("create", "test", "./spack.yaml") | ||||
|         with ev.read("test") as env: | ||||
|             spec_map = ci.get_concrete_specs(env, "patchelf", "patchelf", "FIND_ANY") | ||||
|             concrete_spec = spec_map["patchelf"] | ||||
|             concrete_spec = Spec("patchelf").concretized() | ||||
|             spec_json = concrete_spec.to_json(hash=ht.dag_hash) | ||||
|             json_path = str(tmpdir.join("spec.json")) | ||||
|             with open(json_path, "w") as ypfd: | ||||
| @@ -1605,9 +1602,8 @@ def test_ci_rebuild_index( | ||||
| 
 | ||||
|     with tmpdir.as_cwd(): | ||||
|         env_cmd("create", "test", "./spack.yaml") | ||||
|         with ev.read("test") as env: | ||||
|             spec_map = ci.get_concrete_specs(env, "callpath", "callpath", "FIND_ANY") | ||||
|             concrete_spec = spec_map["callpath"] | ||||
|         with ev.read("test"): | ||||
|             concrete_spec = Spec("callpath").concretized() | ||||
|             spec_json = concrete_spec.to_json(hash=ht.dag_hash) | ||||
|             json_path = str(tmpdir.join("spec.json")) | ||||
|             with open(json_path, "w") as ypfd: | ||||
| @@ -2143,22 +2139,16 @@ def test_ci_reproduce( | ||||
|             shutil.copyfile(env.manifest_path, os.path.join(working_dir.strpath, "spack.yaml")) | ||||
|             shutil.copyfile(env.lock_path, os.path.join(working_dir.strpath, "spack.lock")) | ||||
| 
 | ||||
|             root_spec = None | ||||
|             job_spec = None | ||||
| 
 | ||||
|             for h, s in env.specs_by_hash.items(): | ||||
|                 if s.name == "archive-files": | ||||
|                     root_spec = s | ||||
|                     job_spec = s | ||||
| 
 | ||||
|             job_spec_json_path = os.path.join(working_dir.strpath, "archivefiles.json") | ||||
|             with open(job_spec_json_path, "w") as fd: | ||||
|                 fd.write(job_spec.to_json(hash=ht.dag_hash)) | ||||
| 
 | ||||
|             root_spec_json_path = os.path.join(working_dir.strpath, "root.json") | ||||
|             with open(root_spec_json_path, "w") as fd: | ||||
|                 fd.write(root_spec.to_json(hash=ht.dag_hash)) | ||||
| 
 | ||||
|             artifacts_root = os.path.join(working_dir.strpath, "scratch_dir") | ||||
|             pipeline_path = os.path.join(artifacts_root, "pipeline.yml") | ||||
| 
 | ||||
| @@ -2170,7 +2160,6 @@ def test_ci_reproduce( | ||||
|             repro_details = { | ||||
|                 "job_name": job_name, | ||||
|                 "job_spec_json": "archivefiles.json", | ||||
|                 "root_spec_json": "root.json", | ||||
|                 "ci_project_dir": working_dir.strpath, | ||||
|             } | ||||
|             with open(repro_file, "w") as fd: | ||||
|   | ||||
		Reference in New Issue
	
	Block a user
	 Scott Wittenburg
					Scott Wittenburg