ci: remove compiler bootstrapping code (#38543)

This commit is contained in:
Scott Wittenburg 2023-06-27 09:35:19 -06:00 committed by GitHub
parent 41582f76bd
commit b6b33cfe7a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 205 additions and 754 deletions

View File

@ -28,7 +28,6 @@
import spack
import spack.binary_distribution as bindist
import spack.compilers as compilers
import spack.config as cfg
import spack.environment as ev
import spack.main
@ -70,17 +69,10 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
return False
def _is_main_phase(phase_name):
return True if phase_name == "specs" else False
def get_job_name(phase, strip_compiler, spec, osarch, build_group):
def get_job_name(spec, osarch, build_group):
"""Given the necessary parts, format the gitlab job name
Arguments:
phase (str): Either 'specs' for the main phase, or the name of a
bootstrapping phase
strip_compiler (bool): Should compiler be stripped from job name
spec (spack.spec.Spec): Spec job will build
osarch: Architecture TODO: (this is a spack.spec.ArchSpec,
but sphinx doesn't recognize the type and fails).
@ -93,12 +85,7 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
format_str = ""
format_args = []
if phase:
format_str += "({{{0}}})".format(item_idx)
format_args.append(phase)
item_idx += 1
format_str += " {{{0}}}".format(item_idx)
format_str += "{{{0}}}".format(item_idx)
format_args.append(spec.name)
item_idx += 1
@ -110,10 +97,9 @@ def get_job_name(phase, strip_compiler, spec, osarch, build_group):
format_args.append(spec.version)
item_idx += 1
if _is_main_phase(phase) is True or strip_compiler is False:
format_str += " {{{0}}}".format(item_idx)
format_args.append(spec.compiler)
item_idx += 1
format_str += " {{{0}}}".format(item_idx)
format_args.append(spec.compiler)
item_idx += 1
format_str += " {{{0}}}".format(item_idx)
format_args.append(osarch)
@ -371,14 +357,7 @@ def _spec_matches(spec, match_string):
def _format_job_needs(
phase_name,
strip_compilers,
dep_jobs,
osname,
build_group,
prune_dag,
stage_spec_dict,
enable_artifacts_buildcache,
dep_jobs, osname, build_group, prune_dag, stage_spec_dict, enable_artifacts_buildcache
):
needs_list = []
for dep_job in dep_jobs:
@ -388,9 +367,7 @@ def _format_job_needs(
if not prune_dag or dep_spec_info["needs_rebuild"]:
needs_list.append(
{
"job": get_job_name(
phase_name, strip_compilers, dep_job, dep_job.architecture, build_group
),
"job": get_job_name(dep_job, dep_job.architecture, build_group),
"artifacts": enable_artifacts_buildcache,
}
)
@ -490,17 +467,13 @@ def get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None):
return affected_specs
def _build_jobs(phases, staged_phases):
for phase in phases:
phase_name = phase["name"]
spec_labels, dependencies, stages = staged_phases[phase_name]
for stage_jobs in stages:
for spec_label in stage_jobs:
spec_record = spec_labels[spec_label]
release_spec = spec_record["spec"]
release_spec_dag_hash = release_spec.dag_hash()
yield release_spec, release_spec_dag_hash
def _build_jobs(spec_labels, stages):
for stage_jobs in stages:
for spec_label in stage_jobs:
spec_record = spec_labels[spec_label]
release_spec = spec_record["spec"]
release_spec_dag_hash = release_spec.dag_hash()
yield release_spec, release_spec_dag_hash
def _noop(x):
@ -524,9 +497,9 @@ class SpackCI:
used by the CI generator(s).
"""
def __init__(self, ci_config, phases, staged_phases):
def __init__(self, ci_config, spec_labels, stages):
"""Given the information from the ci section of the config
and the job phases setup meta data needed for generating Spack
and the staged jobs, set up meta data needed for generating Spack
CI IR.
"""
@ -541,9 +514,6 @@ def __init__(self, ci_config, phases, staged_phases):
"enable-artifacts-buildcache": self.ci_config.get(
"enable-artifacts-buildcache", False
),
"bootstrap": self.ci_config.get(
"bootstrap", []
), # This is deprecated and should be removed
"rebuild-index": self.ci_config.get("rebuild-index", True),
"broken-specs-url": self.ci_config.get("broken-specs-url", None),
"broken-tests-packages": self.ci_config.get("broken-tests-packages", []),
@ -551,7 +521,7 @@ def __init__(self, ci_config, phases, staged_phases):
}
jobs = self.ir["jobs"]
for spec, dag_hash in _build_jobs(phases, staged_phases):
for spec, dag_hash in _build_jobs(spec_labels, stages):
jobs[dag_hash] = self.__init_job(spec)
for name in self.named_jobs:
@ -873,25 +843,6 @@ def generate_gitlab_ci_yaml(
if "temporary-storage-url-prefix" in ci_config:
temp_storage_url_prefix = ci_config["temporary-storage-url-prefix"]
bootstrap_specs = []
phases = []
if "bootstrap" in ci_config:
for phase in ci_config["bootstrap"]:
try:
phase_name = phase.get("name")
strip_compilers = phase.get("compiler-agnostic")
except AttributeError:
phase_name = phase
strip_compilers = False
phases.append({"name": phase_name, "strip-compilers": strip_compilers})
for bs in env.spec_lists[phase_name]:
bootstrap_specs.append(
{"spec": bs, "phase-name": phase_name, "strip-compilers": strip_compilers}
)
phases.append({"name": "specs", "strip-compilers": False})
# If a remote mirror override (alternate buildcache destination) was
# specified, add it here in case it has already built hashes we might
# generate.
@ -993,33 +944,17 @@ def generate_gitlab_ci_yaml(
except bindist.FetchCacheError as e:
tty.warn(e)
staged_phases = {}
try:
for phase in phases:
phase_name = phase["name"]
if phase_name == "specs":
# Anything in the "specs" of the environment are already
# concretized by the block at the top of this method, so we
# only need to find the concrete versions, and then avoid
# re-concretizing them needlessly later on.
concrete_phase_specs = [
concrete
for abstract, concrete in env.concretized_specs()
if abstract in env.spec_lists[phase_name]
]
else:
# Any specs lists in other definitions (but not in the
# "specs") of the environment are not yet concretized so we
# have to concretize them explicitly here.
concrete_phase_specs = env.spec_lists[phase_name]
with spack.concretize.disable_compiler_existence_check():
for phase_spec in concrete_phase_specs:
phase_spec.concretize()
staged_phases[phase_name] = stage_spec_jobs(
concrete_phase_specs,
check_index_only=check_index_only,
mirrors_to_check=mirrors_to_check,
)
concrete_env_specs = [
concrete
for abstract, concrete in env.concretized_specs()
if abstract in env.spec_lists["specs"]
]
spec_labels, dependencies, stages = stage_spec_jobs(
concrete_env_specs,
check_index_only=check_index_only,
mirrors_to_check=mirrors_to_check,
)
finally:
# Clean up remote mirror override if enabled
if remote_mirror_override:
@ -1048,276 +983,189 @@ def generate_gitlab_ci_yaml(
else:
broken_spec_urls = web_util.list_url(broken_specs_url)
spack_ci = SpackCI(ci_config, phases, staged_phases)
spack_ci = SpackCI(ci_config, spec_labels, stages)
spack_ci_ir = spack_ci.generate_ir()
for phase in phases:
phase_name = phase["name"]
strip_compilers = phase["strip-compilers"]
for stage_jobs in stages:
stage_name = "stage-{0}".format(stage_id)
stage_names.append(stage_name)
stage_id += 1
spec_labels, dependencies, stages = staged_phases[phase_name]
for spec_label in stage_jobs:
spec_record = spec_labels[spec_label]
release_spec = spec_record["spec"]
release_spec_dag_hash = release_spec.dag_hash()
for stage_jobs in stages:
stage_name = "stage-{0}".format(stage_id)
stage_names.append(stage_name)
stage_id += 1
for spec_label in stage_jobs:
spec_record = spec_labels[spec_label]
release_spec = spec_record["spec"]
release_spec_dag_hash = release_spec.dag_hash()
if prune_untouched_packages:
if release_spec not in affected_specs:
tty.debug(
"Pruning {0}/{1}, untouched by change.".format(
release_spec.name, release_spec.dag_hash()[:7]
)
)
spec_record["needs_rebuild"] = False
continue
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
if not job_object:
tty.warn("No match found for {0}, skipping it".format(release_spec))
continue
if spack_pipeline_type is not None:
# For spack pipelines "public" and "protected" are reserved tags
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
if spack_pipeline_type == "spack_protected_branch":
job_object["tags"].extend(["protected"])
elif spack_pipeline_type == "spack_pull_request":
job_object["tags"].extend(["public"])
if "script" not in job_object:
raise AttributeError
def main_script_replacements(cmd):
return cmd.replace("{env_dir}", rel_concrete_env_dir)
job_object["script"] = _unpack_script(
job_object["script"], op=main_script_replacements
)
if "before_script" in job_object:
job_object["before_script"] = _unpack_script(job_object["before_script"])
if "after_script" in job_object:
job_object["after_script"] = _unpack_script(job_object["after_script"])
osname = str(release_spec.architecture)
job_name = get_job_name(
phase_name, strip_compilers, release_spec, osname, build_group
)
compiler_action = "NONE"
if len(phases) > 1:
compiler_action = "FIND_ANY"
if _is_main_phase(phase_name):
compiler_action = "INSTALL_MISSING"
job_vars = job_object.setdefault("variables", {})
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
job_vars["SPACK_COMPILER_ACTION"] = compiler_action
job_object["needs"] = []
if spec_label in dependencies:
if enable_artifacts_buildcache:
# Get dependencies transitively, so they're all
# available in the artifacts buildcache.
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
else:
# In this case, "needs" is only used for scheduling
# purposes, so we only get the direct dependencies.
dep_jobs = []
for dep_label in dependencies[spec_label]:
dep_jobs.append(spec_labels[dep_label]["spec"])
job_object["needs"].extend(
_format_job_needs(
phase_name,
strip_compilers,
dep_jobs,
osname,
build_group,
prune_dag,
spec_labels,
enable_artifacts_buildcache,
)
)
rebuild_spec = spec_record["needs_rebuild"]
# This next section helps gitlab make sure the right
# bootstrapped compiler exists in the artifacts buildcache by
# creating an artificial dependency between this spec and its
# compiler. So, if we are in the main phase, and if the
# compiler we are supposed to use is listed in any of the
# bootstrap spec lists, then we will add more dependencies to
# the job (that compiler and maybe it's dependencies as well).
if _is_main_phase(phase_name):
spec_arch_family = release_spec.architecture.target.microarchitecture.family
compiler_pkg_spec = compilers.pkg_spec_for_compiler(release_spec.compiler)
for bs in bootstrap_specs:
c_spec = bs["spec"]
bs_arch = c_spec.architecture
bs_arch_family = bs_arch.target.microarchitecture.family
if (
c_spec.intersects(compiler_pkg_spec)
and bs_arch_family == spec_arch_family
):
# We found the bootstrap compiler this release spec
# should be built with, so for DAG scheduling
# purposes, we will at least add the compiler spec
# to the jobs "needs". But if artifact buildcache
# is enabled, we'll have to add all transtive deps
# of the compiler as well.
# Here we check whether the bootstrapped compiler
# needs to be rebuilt. Until compilers are proper
# dependencies, we artificially force the spec to
# be rebuilt if the compiler targeted to build it
# needs to be rebuilt.
bs_specs, _, _ = staged_phases[bs["phase-name"]]
c_spec_key = _spec_deps_key(c_spec)
rbld_comp = bs_specs[c_spec_key]["needs_rebuild"]
rebuild_spec = rebuild_spec or rbld_comp
# Also update record so dependents do not fail to
# add this spec to their "needs"
spec_record["needs_rebuild"] = rebuild_spec
dep_jobs = [c_spec]
if enable_artifacts_buildcache:
dep_jobs = [d for d in c_spec.traverse(deptype=all)]
job_object["needs"].extend(
_format_job_needs(
bs["phase-name"],
bs["strip-compilers"],
dep_jobs,
str(bs_arch),
build_group,
prune_dag,
bs_specs,
enable_artifacts_buildcache,
)
)
else:
debug_msg = "".join(
[
"Considered compiler {0} for spec ",
"{1}, but rejected it either because it was ",
"not the compiler required by the spec, or ",
"because the target arch families of the ",
"spec and the compiler did not match",
]
).format(c_spec, release_spec)
tty.debug(debug_msg)
if prune_dag and not rebuild_spec and not copy_only_pipeline:
if prune_untouched_packages:
if release_spec not in affected_specs:
tty.debug(
"Pruning {0}/{1}, does not need rebuild.".format(
release_spec.name, release_spec.dag_hash()
"Pruning {0}/{1}, untouched by change.".format(
release_spec.name, release_spec.dag_hash()[:7]
)
)
spec_record["needs_rebuild"] = False
continue
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
known_broken_specs_encountered.append(release_spec_dag_hash)
job_object = spack_ci_ir["jobs"][release_spec_dag_hash]["attributes"]
# Only keep track of these if we are copying rebuilt cache entries
if spack_buildcache_copy:
# TODO: This assumes signed version of the spec
buildcache_copies[release_spec_dag_hash] = [
{
"src": url_util.join(
buildcache_copy_src_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_name(release_spec, ".spec.json.sig"),
),
"dest": url_util.join(
buildcache_copy_dest_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_name(release_spec, ".spec.json.sig"),
),
},
{
"src": url_util.join(
buildcache_copy_src_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_path_name(release_spec, ".spack"),
),
"dest": url_util.join(
buildcache_copy_dest_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_path_name(release_spec, ".spack"),
),
},
]
if not job_object:
tty.warn("No match found for {0}, skipping it".format(release_spec))
continue
if artifacts_root:
job_object["needs"].append(
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
if spack_pipeline_type is not None:
# For spack pipelines "public" and "protected" are reserved tags
job_object["tags"] = _remove_reserved_tags(job_object.get("tags", []))
if spack_pipeline_type == "spack_protected_branch":
job_object["tags"].extend(["protected"])
elif spack_pipeline_type == "spack_pull_request":
job_object["tags"].extend(["public"])
if "script" not in job_object:
raise AttributeError
def main_script_replacements(cmd):
return cmd.replace("{env_dir}", rel_concrete_env_dir)
job_object["script"] = _unpack_script(
job_object["script"], op=main_script_replacements
)
if "before_script" in job_object:
job_object["before_script"] = _unpack_script(job_object["before_script"])
if "after_script" in job_object:
job_object["after_script"] = _unpack_script(job_object["after_script"])
osname = str(release_spec.architecture)
job_name = get_job_name(release_spec, osname, build_group)
job_vars = job_object.setdefault("variables", {})
job_vars["SPACK_JOB_SPEC_DAG_HASH"] = release_spec_dag_hash
job_vars["SPACK_JOB_SPEC_PKG_NAME"] = release_spec.name
job_object["needs"] = []
if spec_label in dependencies:
if enable_artifacts_buildcache:
# Get dependencies transitively, so they're all
# available in the artifacts buildcache.
dep_jobs = [d for d in release_spec.traverse(deptype=all, root=False)]
else:
# In this case, "needs" is only used for scheduling
# purposes, so we only get the direct dependencies.
dep_jobs = []
for dep_label in dependencies[spec_label]:
dep_jobs.append(spec_labels[dep_label]["spec"])
job_object["needs"].extend(
_format_job_needs(
dep_jobs,
osname,
build_group,
prune_dag,
spec_labels,
enable_artifacts_buildcache,
)
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
if cdash_handler:
cdash_handler.current_spec = release_spec
build_name = cdash_handler.build_name
all_job_names.append(build_name)
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
build_stamp = cdash_handler.build_stamp
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
job_object["artifacts"] = spack.config.merge_yaml(
job_object.get("artifacts", {}),
{
"when": "always",
"paths": [
rel_job_log_dir,
rel_job_repro_dir,
rel_job_test_dir,
rel_user_artifacts_dir,
],
},
)
if enable_artifacts_buildcache:
bc_root = os.path.join(local_mirror_dir, "build_cache")
job_object["artifacts"]["paths"].extend(
[
os.path.join(bc_root, p)
for p in [
bindist.tarball_name(release_spec, ".spec.json"),
bindist.tarball_directory_name(release_spec),
]
]
rebuild_spec = spec_record["needs_rebuild"]
if prune_dag and not rebuild_spec and not copy_only_pipeline:
tty.debug(
"Pruning {0}/{1}, does not need rebuild.".format(
release_spec.name, release_spec.dag_hash()
)
)
continue
job_object["stage"] = stage_name
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
job_object["interruptible"] = True
if broken_spec_urls is not None and release_spec_dag_hash in broken_spec_urls:
known_broken_specs_encountered.append(release_spec_dag_hash)
length_needs = len(job_object["needs"])
if length_needs > max_length_needs:
max_length_needs = length_needs
max_needs_job = job_name
# Only keep track of these if we are copying rebuilt cache entries
if spack_buildcache_copy:
# TODO: This assumes signed version of the spec
buildcache_copies[release_spec_dag_hash] = [
{
"src": url_util.join(
buildcache_copy_src_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_name(release_spec, ".spec.json.sig"),
),
"dest": url_util.join(
buildcache_copy_dest_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_name(release_spec, ".spec.json.sig"),
),
},
{
"src": url_util.join(
buildcache_copy_src_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_path_name(release_spec, ".spack"),
),
"dest": url_util.join(
buildcache_copy_dest_prefix,
bindist.build_cache_relative_path(),
bindist.tarball_path_name(release_spec, ".spack"),
),
},
]
if not copy_only_pipeline:
output_object[job_name] = job_object
job_id += 1
if artifacts_root:
job_object["needs"].append(
{"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)}
)
job_vars["SPACK_SPEC_NEEDS_REBUILD"] = str(rebuild_spec)
if cdash_handler:
cdash_handler.current_spec = release_spec
build_name = cdash_handler.build_name
all_job_names.append(build_name)
job_vars["SPACK_CDASH_BUILD_NAME"] = build_name
build_stamp = cdash_handler.build_stamp
job_vars["SPACK_CDASH_BUILD_STAMP"] = build_stamp
job_object["artifacts"] = spack.config.merge_yaml(
job_object.get("artifacts", {}),
{
"when": "always",
"paths": [
rel_job_log_dir,
rel_job_repro_dir,
rel_job_test_dir,
rel_user_artifacts_dir,
],
},
)
if enable_artifacts_buildcache:
bc_root = os.path.join(local_mirror_dir, "build_cache")
job_object["artifacts"]["paths"].extend(
[
os.path.join(bc_root, p)
for p in [
bindist.tarball_name(release_spec, ".spec.json"),
bindist.tarball_directory_name(release_spec),
]
]
)
job_object["stage"] = stage_name
job_object["retry"] = {"max": 2, "when": JOB_RETRY_CONDITIONS}
job_object["interruptible"] = True
length_needs = len(job_object["needs"])
if length_needs > max_length_needs:
max_length_needs = length_needs
max_needs_job = job_name
if not copy_only_pipeline:
output_object[job_name] = job_object
job_id += 1
if print_summary:
for phase in phases:
phase_name = phase["name"]
tty.msg('Stages for phase "{0}"'.format(phase_name))
phase_stages = staged_phases[phase_name]
_print_staging_summary(*phase_stages)
_print_staging_summary(spec_labels, dependencies, stages)
tty.debug("{0} build jobs generated in {1} stages".format(job_id, stage_id))
@ -1576,44 +1424,6 @@ def can_verify_binaries():
return len(gpg_util.public_keys()) >= 1
def configure_compilers(compiler_action, scope=None):
"""Depending on the compiler_action parameter, either turn on the
install_missing_compilers config option, or find spack compilers,
or do nothing. This is used from rebuild jobs in bootstrapping
pipelines, where in the bootsrapping phase we would pass
FIND_ANY in case of compiler-agnostic bootstrapping, while in the
spec building phase we would pass INSTALL_MISSING in order to get
spack to use the compiler which was built in the previous phase and
is now sitting in the binary mirror.
Arguments:
compiler_action (str): 'FIND_ANY', 'INSTALL_MISSING' have meanings
described above. Any other value essentially results in a no-op.
scope (spack.config.ConfigScope): Optional. The scope in which to look for
compilers, in case 'FIND_ANY' was provided.
"""
if compiler_action == "INSTALL_MISSING":
tty.debug("Make sure bootstrapped compiler will be installed")
config = cfg.get("config")
config["install_missing_compilers"] = True
cfg.set("config", config)
elif compiler_action == "FIND_ANY":
tty.debug("Just find any available compiler")
find_args = ["find"]
if scope:
find_args.extend(["--scope", scope])
output = spack_compiler(*find_args)
tty.debug("spack compiler find")
tty.debug(output)
output = spack_compiler("list")
tty.debug("spack compiler list")
tty.debug(output)
else:
tty.debug("No compiler action to be taken")
return None
def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
"""Unchecked version of the public API, for easier mocking"""
unsigned = not sign_binaries

View File

@ -274,7 +274,6 @@ def ci_rebuild(args):
signing_key = os.environ.get("SPACK_SIGNING_KEY")
job_spec_pkg_name = os.environ.get("SPACK_JOB_SPEC_PKG_NAME")
job_spec_dag_hash = os.environ.get("SPACK_JOB_SPEC_DAG_HASH")
compiler_action = os.environ.get("SPACK_COMPILER_ACTION")
spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE")
remote_mirror_override = os.environ.get("SPACK_REMOTE_MIRROR_OVERRIDE")
remote_mirror_url = os.environ.get("SPACK_REMOTE_MIRROR_URL")
@ -295,7 +294,6 @@ def ci_rebuild(args):
tty.debug("pipeline_artifacts_dir = {0}".format(pipeline_artifacts_dir))
tty.debug("remote_mirror_url = {0}".format(remote_mirror_url))
tty.debug("job_spec_pkg_name = {0}".format(job_spec_pkg_name))
tty.debug("compiler_action = {0}".format(compiler_action))
# Query the environment manifest to find out whether we're reporting to a
# CDash instance, and if so, gather some information from the manifest to
@ -411,14 +409,6 @@ def ci_rebuild(args):
if signing_key:
spack_ci.import_signing_key(signing_key)
# Depending on the specifics of this job, we might need to turn on the
# "config:install_missing compilers" option (to build this job spec
# with a bootstrapped compiler), or possibly run "spack compiler find"
# (to build a bootstrap compiler or one of its deps in a
# compiler-agnostic way), or maybe do nothing at all (to build a spec
# using a compiler already installed on the target system).
spack_ci.configure_compilers(compiler_action)
# Write this job's spec json into the reproduction directory, and it will
# also be used in the generated "spack install" command to install the spec
tty.debug("job concrete spec path: {0}".format(job_spec_json_path))

View File

@ -134,23 +134,6 @@
core_shared_properties = union_dicts(
{
"pipeline-gen": pipeline_gen_schema,
"bootstrap": {
"type": "array",
"items": {
"anyOf": [
{"type": "string"},
{
"type": "object",
"additionalProperties": False,
"required": ["name"],
"properties": {
"name": {"type": "string"},
"compiler-agnostic": {"type": "boolean", "default": False},
},
},
]
},
},
"rebuild-index": {"type": "boolean"},
"broken-specs-url": {"type": "string"},
"broken-tests-packages": {"type": "array", "items": {"type": "string"}},

View File

@ -46,31 +46,6 @@ def test_import_signing_key(mock_gnupghome):
ci.import_signing_key(signing_key)
def test_configure_compilers(mutable_config):
def assert_missing(config):
assert (
"install_missing_compilers" not in config
or config["install_missing_compilers"] is False
)
def assert_present(config):
assert (
"install_missing_compilers" in config and config["install_missing_compilers"] is True
)
original_config = spack.config.get("config")
assert_missing(original_config)
ci.configure_compilers("FIND_ANY", scope="site")
second_config = spack.config.get("config")
assert_missing(second_config)
ci.configure_compilers("INSTALL_MISSING")
last_config = spack.config.get("config")
assert_present(last_config)
class FakeWebResponder(object):
def __init__(self, response_code=200, content_to_read=[]):
self._resp_code = response_code
@ -248,7 +223,7 @@ def test_ci_workarounds():
fake_root_spec = "x" * 544
fake_spack_ref = "x" * 40
common_variables = {"SPACK_COMPILER_ACTION": "NONE", "SPACK_IS_PR_PIPELINE": "False"}
common_variables = {"SPACK_IS_PR_PIPELINE": "False"}
common_before_script = [
'git clone "https://github.com/spack/spack"',

View File

@ -17,7 +17,6 @@
import spack
import spack.binary_distribution
import spack.ci as ci
import spack.compilers as compilers
import spack.config
import spack.environment as ev
import spack.hash_types as ht
@ -30,7 +29,7 @@
from spack.schema.buildcache_spec import schema as specfile_schema
from spack.schema.ci import schema as ci_schema
from spack.schema.database_index import schema as db_idx_schema
from spack.spec import CompilerSpec, Spec
from spack.spec import Spec
from spack.util.pattern import Bunch
config_cmd = spack.main.SpackCommand("config")
@ -163,8 +162,6 @@ def test_ci_generate_with_env(
"""\
spack:
definitions:
- bootstrap:
- cmake@3.4.3
- old-gcc-pkgs:
- archive-files
- callpath
@ -179,9 +176,6 @@ def test_ci_generate_with_env(
mirrors:
some-mirror: {0}
ci:
bootstrap:
- name: bootstrap
compiler-agnostic: true
pipeline-gen:
- submapping:
- match:
@ -221,16 +215,10 @@ def test_ci_generate_with_env(
with open(outputfile) as f:
contents = f.read()
yaml_contents = syaml.load(contents)
found_spec = False
for ci_key in yaml_contents.keys():
if "(bootstrap)" in ci_key:
found_spec = True
assert "cmake" in ci_key
assert found_spec
assert "stages" in yaml_contents
assert len(yaml_contents["stages"]) == 6
assert len(yaml_contents["stages"]) == 5
assert yaml_contents["stages"][0] == "stage-0"
assert yaml_contents["stages"][5] == "stage-rebuild-index"
assert yaml_contents["stages"][4] == "stage-rebuild-index"
assert "rebuild-index" in yaml_contents
rebuild_job = yaml_contents["rebuild-index"]
@ -244,155 +232,6 @@ def test_ci_generate_with_env(
assert artifacts_root == "jobs_scratch_dir"
def _validate_needs_graph(yaml_contents, needs_graph, artifacts):
"""Validate the needs graph in the generate CI"""
# TODO: Fix the logic to catch errors where expected packages/needs are not
# found.
for job_name, job_def in yaml_contents.items():
for needs_def_name, needs_list in needs_graph.items():
if job_name.startswith(needs_def_name):
# check job needs against the expected needs definition
j_needs = job_def["needs"]
assert all(
[
job_needs["job"][: job_needs["job"].index("/")] in needs_list
for job_needs in j_needs
]
)
assert all(
[nl in [n["job"][: n["job"].index("/")] for n in j_needs] for nl in needs_list]
)
assert all([job_needs["artifacts"] == artifacts for job_needs in j_needs])
break
def test_ci_generate_bootstrap_gcc(
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
):
"""Test that we can bootstrap a compiler and use it as the
compiler for a spec in the environment"""
filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f:
f.write(
"""\
spack:
definitions:
- bootstrap:
- gcc@3.0
specs:
- dyninst%gcc@=3.0
mirrors:
some-mirror: https://my.fake.mirror
ci:
bootstrap:
- name: bootstrap
compiler-agnostic: true
pipeline-gen:
- submapping:
- match:
- arch=test-debian6-x86_64
build-job:
tags:
- donotcare
- match:
- arch=test-debian6-aarch64
build-job:
tags:
- donotcare
- any-job:
tags:
- donotcare
"""
)
needs_graph = {
"(bootstrap) conflict": [],
"(bootstrap) gcc": ["(bootstrap) conflict"],
"(specs) libelf": ["(bootstrap) gcc"],
"(specs) libdwarf": ["(bootstrap) gcc", "(specs) libelf"],
"(specs) dyninst": ["(bootstrap) gcc", "(specs) libelf", "(specs) libdwarf"],
}
with tmpdir.as_cwd():
env_cmd("create", "test", "./spack.yaml")
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
with ev.read("test"):
ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
yaml_contents = syaml.load(contents)
_validate_needs_graph(yaml_contents, needs_graph, False)
def test_ci_generate_bootstrap_artifacts_buildcache(
tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment
):
"""Test that we can bootstrap a compiler when artifacts buildcache
is turned on"""
filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f:
f.write(
"""\
spack:
definitions:
- bootstrap:
- gcc@3.0
specs:
- dyninst%gcc@=3.0
mirrors:
some-mirror: https://my.fake.mirror
ci:
bootstrap:
- name: bootstrap
compiler-agnostic: true
pipeline-gen:
- submapping:
- match:
- arch=test-debian6-x86_64
build-job:
tags:
- donotcare
- match:
- arch=test-debian6-aarch64
build-job:
tags:
- donotcare
- any-job:
tags:
- donotcare
enable-artifacts-buildcache: True
"""
)
needs_graph = {
"(bootstrap) conflict": [],
"(bootstrap) gcc": ["(bootstrap) conflict"],
"(specs) libelf": ["(bootstrap) gcc", "(bootstrap) conflict"],
"(specs) libdwarf": ["(bootstrap) gcc", "(bootstrap) conflict", "(specs) libelf"],
"(specs) dyninst": [
"(bootstrap) gcc",
"(bootstrap) conflict",
"(specs) libelf",
"(specs) libdwarf",
],
}
with tmpdir.as_cwd():
env_cmd("create", "test", "./spack.yaml")
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
with ev.read("test"):
ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as f:
contents = f.read()
yaml_contents = syaml.load(contents)
_validate_needs_graph(yaml_contents, needs_graph, True)
def test_ci_generate_with_env_missing_section(
tmpdir,
working_env,
@ -889,7 +728,7 @@ def activate_rebuild_env(tmpdir, pkg_name, rebuild_env):
"SPACK_JOB_SPEC_DAG_HASH": rebuild_env.root_spec_dag_hash,
"SPACK_JOB_SPEC_PKG_NAME": pkg_name,
"SPACK_COMPILER_ACTION": "NONE",
"SPACK_CDASH_BUILD_NAME": "(specs) {0}".format(pkg_name),
"SPACK_CDASH_BUILD_NAME": pkg_name,
"SPACK_REMOTE_MIRROR_URL": rebuild_env.mirror_url,
"SPACK_PIPELINE_TYPE": "spack_protected_branch",
"CI_JOB_URL": rebuild_env.ci_job_url,
@ -1283,7 +1122,7 @@ def test_push_mirror_contents(
found_spec_job = False
for ci_key in yaml_contents.keys():
if "(specs) patchelf" in ci_key:
if "patchelf" in ci_key:
the_elt = yaml_contents[ci_key]
assert "variables" in the_elt
job_vars = the_elt["variables"]
@ -1457,7 +1296,7 @@ def test_ci_generate_override_runner_attrs(
assert global_vars["SPACK_CHECKOUT_VERSION"] == "12ad69eb1"
for ci_key in yaml_contents.keys():
if "(specs) a" in ci_key:
if ci_key.startswith("a"):
# Make sure a's attributes override variables, and all the
# scripts. Also, make sure the 'toplevel' tag doesn't
# appear twice, but that a's specific extra tag does appear
@ -1477,7 +1316,7 @@ def test_ci_generate_override_runner_attrs(
assert the_elt["script"][0] == "custom main step"
assert len(the_elt["after_script"]) == 1
assert the_elt["after_script"][0] == "custom post step one"
if "(specs) dependency-install" in ci_key:
if "dependency-install" in ci_key:
# Since the dependency-install match omits any
# runner-attributes, make sure it inherited all the
# top-level attributes.
@ -1495,7 +1334,7 @@ def test_ci_generate_override_runner_attrs(
assert the_elt["script"][0] == "main step"
assert len(the_elt["after_script"]) == 1
assert the_elt["after_script"][0] == "post step one"
if "(specs) flatten-deps" in ci_key:
if "flatten-deps" in ci_key:
# The flatten-deps match specifies that we keep the two
# top level variables, but add a third specifc one. It
# also adds a custom tag which should be combined with
@ -1554,9 +1393,10 @@ def test_ci_generate_with_workarounds(
yaml_contents = syaml.load(contents)
found_one = False
non_rebuild_keys = ["workflow", "stages", "variables", "rebuild-index"]
for ci_key in yaml_contents.keys():
if ci_key.startswith("(specs) "):
if ci_key not in non_rebuild_keys:
found_one = True
job_obj = yaml_contents[ci_key]
assert "needs" not in job_obj
@ -1623,140 +1463,6 @@ def test_ci_rebuild_index(
jsonschema.validate(index_object, db_idx_schema)
def test_ci_generate_bootstrap_prune_dag(
install_mockery_mutable_config,
mock_packages,
mock_fetch,
mock_archive,
mutable_config,
monkeypatch,
tmpdir,
mutable_mock_env_path,
ci_base_environment,
):
"""Test compiler bootstrapping with DAG pruning. Specifically, make
sure that if we detect the bootstrapped compiler needs to be rebuilt,
we ensure the spec we want to build with that compiler is scheduled
for rebuild as well."""
# Create a temp mirror directory for buildcache usage
mirror_dir = tmpdir.join("mirror_dir")
mirror_url = "file://{0}".format(mirror_dir.strpath)
# Install a compiler, because we want to put it in a buildcache
install_cmd("gcc@=12.2.0%gcc@10.2.1")
# Put installed compiler in the buildcache
buildcache_cmd("push", "-u", "-a", "-f", mirror_dir.strpath, "gcc@12.2.0%gcc@10.2.1")
# Now uninstall the compiler
uninstall_cmd("-y", "gcc@12.2.0%gcc@10.2.1")
monkeypatch.setattr(spack.concretize.Concretizer, "check_for_compiler_existence", False)
spack.config.set("config:install_missing_compilers", True)
assert CompilerSpec("gcc@=12.2.0") not in compilers.all_compiler_specs()
# Configure the mirror where we put that buildcache w/ the compiler
mirror_cmd("add", "test-mirror", mirror_url)
install_cmd("--no-check-signature", "b%gcc@=12.2.0")
# Put spec built with installed compiler in the buildcache
buildcache_cmd("push", "-u", "-a", "-f", mirror_dir.strpath, "b%gcc@12.2.0")
# Now uninstall the spec
uninstall_cmd("-y", "b%gcc@12.2.0")
filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f:
f.write(
"""\
spack:
definitions:
- bootstrap:
- gcc@=12.2.0%gcc@10.2.1
specs:
- b%gcc@12.2.0
mirrors:
atestm: {0}
ci:
bootstrap:
- name: bootstrap
compiler-agnostic: true
pipeline-gen:
- submapping:
- match:
- arch=test-debian6-x86_64
build-job:
tags:
- donotcare
- match:
- arch=test-debian6-core2
build-job:
tags:
- meh
- match:
- arch=test-debian6-aarch64
build-job:
tags:
- donotcare
- match:
- arch=test-debian6-m1
build-job:
tags:
- meh
""".format(
mirror_url
)
)
# Without this monkeypatch, pipeline generation process would think that
# nothing in the environment needs rebuilding. With the monkeypatch, the
# process sees the compiler as needing a rebuild, which should then result
# in the specs built with that compiler needing a rebuild too.
def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
if spec.name == "gcc":
return []
else:
return [{"spec": spec, "mirror_url": mirror_url}]
with tmpdir.as_cwd():
env_cmd("create", "test", "./spack.yaml")
outputfile = str(tmpdir.join(".gitlab-ci.yml"))
with ev.read("test"):
ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as of:
yaml_contents = of.read()
original_yaml_contents = syaml.load(yaml_contents)
# without the monkeypatch, everything appears up to date and no
# rebuild jobs are generated.
assert original_yaml_contents
assert "no-specs-to-rebuild" in original_yaml_contents
monkeypatch.setattr(
spack.binary_distribution, "get_mirrors_for_spec", fake_get_mirrors_for_spec
)
ci_cmd("generate", "--output-file", outputfile)
with open(outputfile) as of:
yaml_contents = of.read()
new_yaml_contents = syaml.load(yaml_contents)
assert new_yaml_contents
# This 'needs' graph reflects that even though specs 'a' and 'b' do
# not otherwise need to be rebuilt (thanks to DAG pruning), they
# both end up in the generated pipeline because the compiler they
# depend on is bootstrapped, and *does* need to be rebuilt.
needs_graph = {"(bootstrap) gcc": [], "(specs) b": ["(bootstrap) gcc"]}
_validate_needs_graph(new_yaml_contents, needs_graph, False)
def test_ci_get_stack_changed(mock_git_repo, monkeypatch):
"""Test that we can detect the change to .gitlab-ci.yml in a
mock spack git repo."""
@ -1828,7 +1534,7 @@ def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"):
generated_hashes = []
for ci_key in yaml_contents.keys():
if ci_key.startswith("(specs)"):
if "variables" in yaml_contents[ci_key]:
generated_hashes.append(
yaml_contents[ci_key]["variables"]["SPACK_JOB_SPEC_DAG_HASH"]
)
@ -2240,9 +1946,7 @@ def test_ci_reproduce(
ci_cmd("generate", "--output-file", pipeline_path, "--artifacts-root", artifacts_root)
target_name = spack.platforms.test.Test.default
job_name = ci.get_job_name(
"specs", False, job_spec, "test-debian6-%s" % target_name, None
)
job_name = ci.get_job_name(job_spec, "test-debian6-%s" % target_name, None)
repro_file = os.path.join(working_dir.strpath, "repro.json")
repro_details = {
@ -2309,8 +2013,6 @@ def test_cmd_first_line():
legacy_spack_yaml_contents = """
spack:
definitions:
- bootstrap:
- cmake@3.4.3
- old-gcc-pkgs:
- archive-files
- callpath
@ -2325,9 +2027,6 @@ def test_cmd_first_line():
mirrors:
test-mirror: file:///some/fake/mirror
{0}:
bootstrap:
- name: bootstrap
compiler-agnostic: true
match_behavior: first
mappings:
- match:
@ -2379,16 +2078,10 @@ def test_gitlab_ci_deprecated(
contents = f.read()
yaml_contents = syaml.load(contents)
found_spec = False
for ci_key in yaml_contents.keys():
if "(bootstrap)" in ci_key:
found_spec = True
assert "cmake" in ci_key
assert found_spec
assert "stages" in yaml_contents
assert len(yaml_contents["stages"]) == 6
assert len(yaml_contents["stages"]) == 5
assert yaml_contents["stages"][0] == "stage-0"
assert yaml_contents["stages"][5] == "stage-rebuild-index"
assert yaml_contents["stages"][4] == "stage-rebuild-index"
assert "rebuild-index" in yaml_contents
rebuild_job = yaml_contents["rebuild-index"]