spack ci: remove relate-CDash-builds functionality (#29950)

gitlab ci: Remove code for relating CDash builds

Relating CDash builds to their dependencies was a seldom used feature.  Removing
it will make it easier for us to reorganize our CDash projects & build groups in the 
future by eliminating the needs to keep track of CDash build ids in our binary mirrors.
This commit is contained in:
Zack Galbreath 2022-04-14 12:42:30 -04:00 committed by GitHub
parent f5520de4bd
commit dec3e31e60
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
8 changed files with 17 additions and 360 deletions

View File

@ -2065,14 +2065,13 @@ def download_buildcache_entry(file_descriptions, mirror_url=None):
def download_single_spec(
concrete_spec, destination, require_cdashid=False, mirror_url=None
concrete_spec, destination, mirror_url=None
):
"""Download the buildcache files for a single concrete spec.
Args:
concrete_spec: concrete spec to be downloaded
destination (str): path where to put the downloaded buildcache
require_cdashid (bool): if False the `.cdashid` file is optional
mirror_url (str): url of the mirror from which to download
"""
tarfile_name = tarball_name(concrete_spec, '.spack')
@ -2090,10 +2089,6 @@ def download_single_spec(
tarball_name(concrete_spec, '.spec.yaml')],
'path': destination,
'required': True,
}, {
'url': [tarball_name(concrete_spec, '.cdashid')],
'path': destination,
'required': require_cdashid,
},
]

View File

@ -5,7 +5,6 @@
import base64
import copy
import datetime
import json
import os
import re
@ -1026,16 +1025,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
cdash_build_name = get_cdash_build_name(
release_spec, build_group)
all_job_names.append(cdash_build_name)
related_builds = [] # Used for relating CDash builds
if spec_label in dependencies:
related_builds = (
[spec_labels[d]['spec'].name
for d in dependencies[spec_label]])
job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name
job_vars['SPACK_RELATED_BUILDS_CDASH'] = ';'.join(
sorted(related_builds))
variables.update(job_vars)
@ -1050,7 +1040,6 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
local_mirror_dir, 'build_cache')
artifact_paths.extend([os.path.join(bc_root, p) for p in [
bindist.tarball_name(release_spec, '.spec.json'),
bindist.tarball_name(release_spec, '.cdashid'),
bindist.tarball_directory_name(release_spec),
]])
@ -1340,11 +1329,9 @@ def configure_compilers(compiler_action, scope=None):
return None
def get_concrete_specs(env, root_spec, job_name, related_builds,
compiler_action):
def get_concrete_specs(env, root_spec, job_name, compiler_action):
spec_map = {
'root': None,
'deps': {},
}
if compiler_action == 'FIND_ANY':
@ -1368,161 +1355,9 @@ def get_concrete_specs(env, root_spec, job_name, related_builds,
spec_map['root'] = concrete_root
spec_map[job_name] = concrete_root[job_name]
if related_builds:
for dep_job_name in related_builds.split(';'):
spec_map['deps'][dep_job_name] = concrete_root[dep_job_name]
return spec_map
def register_cdash_build(build_name, base_url, project, site, track):
url = base_url + '/api/v1/addBuild.php'
time_stamp = datetime.datetime.now().strftime('%Y%m%d-%H%M')
build_id = None
build_stamp = '{0}-{1}'.format(time_stamp, track)
payload = {
"project": project,
"site": site,
"name": build_name,
"stamp": build_stamp,
}
tty.debug('Registering cdash build to {0}, payload:'.format(url))
tty.debug(payload)
enc_data = json.dumps(payload).encode('utf-8')
headers = {
'Content-Type': 'application/json',
}
opener = build_opener(HTTPHandler)
request = Request(url, data=enc_data, headers=headers)
try:
response = opener.open(request)
response_code = response.getcode()
if response_code != 200 and response_code != 201:
msg = 'Adding build failed (response code = {0}'.format(response_code)
tty.warn(msg)
return (None, None)
response_text = response.read()
response_json = json.loads(response_text)
build_id = response_json['buildid']
except Exception as e:
print("Registering build in CDash failed: {0}".format(e))
return (build_id, build_stamp)
def relate_cdash_builds(spec_map, cdash_base_url, job_build_id, cdash_project,
cdashids_mirror_urls):
if not job_build_id:
return
dep_map = spec_map['deps']
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
}
cdash_api_url = '{0}/api/v1/relateBuilds.php'.format(cdash_base_url)
for dep_pkg_name in dep_map:
tty.debug('Fetching cdashid file for {0}'.format(dep_pkg_name))
dep_spec = dep_map[dep_pkg_name]
dep_build_id = None
for url in cdashids_mirror_urls:
try:
if url:
dep_build_id = read_cdashid_from_mirror(dep_spec, url)
break
except web_util.SpackWebError:
tty.debug('Did not find cdashid for {0} on {1}'.format(
dep_pkg_name, url))
else:
tty.warn('Did not find cdashid for {0} anywhere'.format(
dep_pkg_name))
return
payload = {
"project": cdash_project,
"buildid": job_build_id,
"relatedid": dep_build_id,
"relationship": "depends on"
}
enc_data = json.dumps(payload).encode('utf-8')
opener = build_opener(HTTPHandler)
request = Request(cdash_api_url, data=enc_data, headers=headers)
try:
response = opener.open(request)
response_code = response.getcode()
if response_code != 200 and response_code != 201:
msg = 'Relate builds ({0} -> {1}) failed (resp code = {2})'.format(
job_build_id, dep_build_id, response_code)
tty.warn(msg)
return
response_text = response.read()
tty.debug('Relate builds response: {0}'.format(response_text))
except Exception as e:
print("Relating builds in CDash failed: {0}".format(e))
def write_cdashid_to_mirror(cdashid, spec, mirror_url):
if not spec.concrete:
tty.die('Can only write cdashid for concrete spec to mirror')
with TemporaryDirectory() as tmpdir:
local_cdash_path = os.path.join(tmpdir, 'job.cdashid')
with open(local_cdash_path, 'w') as fd:
fd.write(cdashid)
buildcache_name = bindist.tarball_name(spec, '')
cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
remote_url = os.path.join(
mirror_url, bindist.build_cache_relative_path(), cdashid_file_name)
tty.debug('pushing cdashid to url')
tty.debug(' local file path: {0}'.format(local_cdash_path))
tty.debug(' remote url: {0}'.format(remote_url))
try:
web_util.push_to_url(local_cdash_path, remote_url)
except Exception as inst:
# No matter what went wrong here, don't allow the pipeline to fail
# just because there was an issue storing the cdashid on the mirror
msg = 'Failed to write cdashid {0} to mirror {1}'.format(
cdashid, mirror_url)
tty.warn(inst)
tty.warn(msg)
def read_cdashid_from_mirror(spec, mirror_url):
if not spec.concrete:
tty.die('Can only read cdashid for concrete spec from mirror')
buildcache_name = bindist.tarball_name(spec, '')
cdashid_file_name = '{0}.cdashid'.format(buildcache_name)
url = os.path.join(
mirror_url, bindist.build_cache_relative_path(), cdashid_file_name)
resp_url, resp_headers, response = web_util.read_from_url(url)
contents = response.fp.read()
return int(contents)
def _push_mirror_contents(env, specfile_path, sign_binaries, mirror_url):
"""Unchecked version of the public API, for easier mocking"""
unsigned = not sign_binaries

View File

@ -308,8 +308,7 @@ def optimizer(yaml):
# try factoring out commonly repeated portions
common_job = {
'variables': {
'SPACK_COMPILER_ACTION': 'NONE',
'SPACK_RELATED_BUILDS_CDASH': ''
'SPACK_COMPILER_ACTION': 'NONE'
},
'after_script': ['rm -rf "./spack"'],

View File

@ -180,9 +180,6 @@ def setup_parser(subparser):
download.add_argument(
'-p', '--path', default=None,
help="Path to directory where tarball should be downloaded")
download.add_argument(
'-c', '--require-cdashid', action='store_true', default=False,
help="Require .cdashid file to be downloaded with buildcache entry")
download.set_defaults(func=download_fn)
# Get buildcache name
@ -440,9 +437,7 @@ def download_fn(args):
"""Download buildcache entry from a remote mirror to local folder. This
command uses the process exit code to indicate its result, specifically,
a non-zero exit code indicates that the command failed to download at
least one of the required buildcache components. Normally, just the
tarball and .spec.json files are required, but if the --require-cdashid
argument was provided, then a .cdashid file is also required."""
least one of the required buildcache components."""
if not args.spec and not args.spec_file:
tty.msg('No specs provided, exiting.')
sys.exit(0)
@ -452,9 +447,7 @@ def download_fn(args):
sys.exit(0)
spec = _concrete_spec_from_args(args)
result = bindist.download_single_spec(
spec, args.path, require_cdashid=args.require_cdashid
)
result = bindist.download_single_spec(spec, args.path)
if not result:
sys.exit(1)
@ -560,11 +553,6 @@ def copy_fn(args):
specfile_src_path_yaml = os.path.join(args.base_dir, specfile_rel_path)
specfile_dest_path_yaml = os.path.join(dest_root_path, specfile_rel_path)
cdashidfile_rel_path = os.path.join(
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path)
cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path)
# Make sure directory structure exists before attempting to copy
os.makedirs(os.path.dirname(tarball_dest_path))
@ -578,11 +566,6 @@ def copy_fn(args):
tty.msg('Copying {0}'.format(specfile_rel_path_yaml))
shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml)
# Copy the cdashid file (if exists) to the destination mirror
if os.path.exists(cdashid_src_path):
tty.msg('Copying {0}'.format(cdashidfile_rel_path))
shutil.copyfile(cdashid_src_path, cdashid_dest_path)
def sync_fn(args):
""" Syncs binaries (and associated metadata) from one mirror to another.
@ -667,8 +650,6 @@ def sync_fn(args):
build_cache_dir, bindist.tarball_name(s, '.spec.yaml')),
os.path.join(
build_cache_dir, bindist.tarball_name(s, '.spec.json')),
os.path.join(
build_cache_dir, bindist.tarball_name(s, '.cdashid'))
])
tmpdir = tempfile.mkdtemp()

View File

@ -196,7 +196,6 @@ def ci_rebuild(args):
job_spec_pkg_name = get_env_var('SPACK_JOB_SPEC_PKG_NAME')
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
@ -236,7 +235,6 @@ def ci_rebuild(args):
tty.debug('cdash_project_enc = {0}'.format(cdash_project_enc))
tty.debug('cdash_build_name = {0}'.format(cdash_build_name))
tty.debug('cdash_site = {0}'.format(cdash_site))
tty.debug('related_builds = {0}'.format(related_builds))
tty.debug('job_spec_buildgroup = {0}'.format(job_spec_buildgroup))
# Is this a pipeline run on a spack PR or a merge to develop? It might
@ -279,7 +277,7 @@ def ci_rebuild(args):
# Whatever form of root_spec we got, use it to get a map giving us concrete
# specs for this job and all of its dependencies.
spec_map = spack_ci.get_concrete_specs(
env, root_spec, job_spec_pkg_name, related_builds, compiler_action)
env, root_spec, job_spec_pkg_name, compiler_action)
job_spec = spec_map[job_spec_pkg_name]
job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
@ -375,9 +373,6 @@ def ci_rebuild(args):
pipeline_mirror_url,
cfg.default_modify_scope())
cdash_build_id = None
cdash_build_stamp = None
# Check configured mirrors for a built spec with a matching full hash
matches = bindist.get_mirrors_for_spec(
job_spec, full_hash_match=True, index_only=False)
@ -402,7 +397,6 @@ def ci_rebuild(args):
bindist.download_single_spec(
job_spec,
build_cache_dir,
require_cdashid=False,
mirror_url=matching_mirror
)
@ -429,16 +423,8 @@ def ci_rebuild(args):
if not verify_binaries:
install_args.append('--no-check-signature')
# If CDash reporting is enabled, we first register this build with
# the specified CDash instance, then relate the build to those of
# its dependencies.
if enable_cdash:
tty.debug('CDash: Registering build')
(cdash_build_id,
cdash_build_stamp) = spack_ci.register_cdash_build(
cdash_build_name, cdash_base_url, cdash_project,
cdash_site, job_spec_buildgroup)
# Add additional arguments to `spack install` for CDash reporting.
cdash_upload_url = '{0}/submit.php?project={1}'.format(
cdash_base_url, cdash_project_enc)
@ -446,15 +432,9 @@ def ci_rebuild(args):
'--cdash-upload-url', cdash_upload_url,
'--cdash-build', cdash_build_name,
'--cdash-site', cdash_site,
'--cdash-buildstamp', cdash_build_stamp,
'--cdash-track', job_spec_buildgroup,
])
if cdash_build_id is not None:
tty.debug('CDash: Relating build with dependency builds')
spack_ci.relate_cdash_builds(
spec_map, cdash_base_url, cdash_build_id, cdash_project,
[pipeline_mirror_url, pr_mirror_url, remote_mirror_url])
# A compiler action of 'FIND_ANY' means we are building a bootstrap
# compiler or one of its deps.
# TODO: when compilers are dependencies, we should include --no-add
@ -562,12 +542,6 @@ def ci_rebuild(args):
env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
)
if cdash_build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
cdash_build_id, buildcache_mirror_url))
spack_ci.write_cdashid_to_mirror(
cdash_build_id, job_spec, buildcache_mirror_url)
# Create another copy of that buildcache in the per-pipeline
# temporary storage mirror (this is only done if either
# artifacts buildcache is enabled or a temporary storage url
@ -577,12 +551,6 @@ def ci_rebuild(args):
env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
)
if cdash_build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
cdash_build_id, pipeline_mirror_url))
spack_ci.write_cdashid_to_mirror(
cdash_build_id, job_spec, pipeline_mirror_url)
# If this is a develop pipeline, check if the spec that we just built is
# on the broken-specs list. If so, remove it.
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:

View File

@ -4,7 +4,6 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import itertools as it
import json
import os
import sys
@ -18,7 +17,6 @@
import spack.config as cfg
import spack.environment as ev
import spack.error
import spack.main as spack_main
import spack.paths as spack_paths
import spack.spec as spec
import spack.util.gpg
@ -104,25 +102,20 @@ def test_get_concrete_specs(config, mutable_mock_env_path, mock_packages):
assert(dyninst_hash)
dep_builds = 'libdwarf;libelf'
spec_map = ci.get_concrete_specs(
active_env, dyninst_hash, 'dyninst', dep_builds, 'NONE')
assert('root' in spec_map and 'deps' in spec_map)
active_env, dyninst_hash, 'dyninst', 'NONE')
assert 'root' in spec_map
concrete_root = spec_map['root']
assert(concrete_root.build_hash() == dyninst_hash)
concrete_deps = spec_map['deps']
for key, obj in concrete_deps.items():
assert(obj.build_hash() == hash_dict[key])
s = spec.Spec('dyninst')
print('nonconc spec name: {0}'.format(s.name))
spec_map = ci.get_concrete_specs(
active_env, s.name, s.name, dep_builds, 'FIND_ANY')
active_env, s.name, s.name, 'FIND_ANY')
assert('root' in spec_map and 'deps' in spec_map)
assert 'root' in spec_map
class FakeWebResponder(object):
@ -156,114 +149,6 @@ def read(self, length=None):
return None
@pytest.mark.maybeslow
def test_register_cdash_build(monkeypatch):
build_name = 'Some pkg'
base_url = 'http://cdash.fake.org'
project = 'spack'
site = 'spacktests'
track = 'Experimental'
response_obj = {
'buildid': 42
}
fake_responder = FakeWebResponder(
content_to_read=[json.dumps(response_obj)])
monkeypatch.setattr(ci, 'build_opener', lambda handler: fake_responder)
build_id, build_stamp = ci.register_cdash_build(
build_name, base_url, project, site, track)
assert(build_id == 42)
@pytest.mark.skipif(sys.platform == 'win32',
reason="Not supported on Windows (yet)")
def test_relate_cdash_builds(config, mutable_mock_env_path, mock_packages,
monkeypatch, capfd):
e = ev.create('test1')
e.add('dyninst')
e.concretize()
dyninst_hash = None
hash_dict = {}
with e as active_env:
for s in active_env.all_specs():
hash_dict[s.name] = s.build_hash()
if s.name == 'dyninst':
dyninst_hash = s.build_hash()
assert(dyninst_hash)
dep_builds = 'libdwarf;libelf'
spec_map = ci.get_concrete_specs(
active_env, dyninst_hash, 'dyninst', dep_builds, 'NONE')
assert('root' in spec_map and 'deps' in spec_map)
cdash_api_url = 'http://cdash.fake.org'
job_build_id = '42'
cdash_project = 'spack'
cdashids_mirror_url = 'https://my.fake.mirror'
dep_cdash_ids = {
'libdwarf': 1,
'libelf': 2
}
monkeypatch.setattr(ci, 'read_cdashid_from_mirror',
lambda s, u: dep_cdash_ids.pop(s.name))
fake_responder = FakeWebResponder(
content_to_read=['libdwarf', 'libelf'])
monkeypatch.setattr(ci, 'build_opener', lambda handler: fake_responder)
ci.relate_cdash_builds(spec_map, cdash_api_url, job_build_id,
cdash_project, [cdashids_mirror_url])
assert(not dep_cdash_ids)
dep_cdash_ids = {
'libdwarf': 1,
'libelf': 2
}
fake_responder._resp_code = 400
ci.relate_cdash_builds(spec_map, cdash_api_url, job_build_id,
cdash_project, [cdashids_mirror_url])
out, err = capfd.readouterr()
assert('Warning: Relate builds' in err)
assert('failed' in err)
dep_cdash_ids = {}
# Just make sure passing None for build id doesn't result in any
# calls to "read_cdashid_from_mirror"
ci.relate_cdash_builds(spec_map, cdash_api_url, None, cdash_project,
[cdashids_mirror_url])
@pytest.mark.skipif(sys.platform == 'win32',
reason="Not supported on Windows (yet)")
def test_read_write_cdash_ids(config, tmp_scope, tmpdir, mock_packages):
working_dir = tmpdir.join('working_dir')
mirror_dir = working_dir.join('mirror')
mirror_url = 'file://{0}'.format(mirror_dir.strpath)
mirror_cmd = spack_main.SpackCommand('mirror')
mirror_cmd('add', '--scope', tmp_scope, 'test_mirror', mirror_url)
mock_spec = spec.Spec('archive-files').concretized()
orig_cdashid = '42'
ci.write_cdashid_to_mirror(orig_cdashid, mock_spec, mirror_url)
# Now read it back
read_cdashid = ci.read_cdashid_from_mirror(mock_spec, mirror_url)
assert(str(read_cdashid) == orig_cdashid)
def test_download_and_extract_artifacts(tmpdir, monkeypatch, working_env):
os.environ.update({
'GITLAB_PRIVATE_TOKEN': 'faketoken',
@ -425,7 +310,6 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize,
'jobs_scratch_dir',
'cdash_report',
name + '.spec.json',
name + '.cdashid',
name
],
'when': 'always'

View File

@ -743,8 +743,6 @@ def test_ci_rebuild(tmpdir, mutable_mock_env_path,
def fake_cdash_register(build_name, base_url, project, site, track):
return ('fakebuildid', 'fakestamp')
monkeypatch.setattr(ci, 'register_cdash_build', fake_cdash_register)
monkeypatch.setattr(spack.cmd.ci, 'CI_REBUILD_INSTALL_BASE_ARGS', [
'notcommand'
])
@ -767,7 +765,6 @@ def fake_cdash_register(build_name, base_url, project, site, track):
'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
'SPACK_COMPILER_ACTION': 'NONE',
'SPACK_CDASH_BUILD_NAME': '(specs) archive-files',
'SPACK_RELATED_BUILDS_CDASH': '',
'SPACK_REMOTE_MIRROR_URL': mirror_url,
'SPACK_PIPELINE_TYPE': 'spack_protected_branch',
'CI_JOB_URL': ci_job_url,
@ -940,7 +937,7 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
env_cmd('create', 'test', './spack.yaml')
with ev.read('test') as env:
spec_map = ci.get_concrete_specs(
env, 'patchelf', 'patchelf', '', 'FIND_ANY')
env, 'patchelf', 'patchelf', 'FIND_ANY')
concrete_spec = spec_map['patchelf']
spec_json = concrete_spec.to_json(hash=ht.build_hash)
json_path = str(tmpdir.join('spec.json'))
@ -952,8 +949,6 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
# env, spec, json_path, mirror_url, build_id, sign_binaries
ci.push_mirror_contents(env, json_path, mirror_url, True)
ci.write_cdashid_to_mirror('42', concrete_spec, mirror_url)
buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')
# Now test the --prune-dag (default) option of spack ci generate
@ -1035,10 +1030,10 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
if not os.path.exists(dl_dir.strpath):
os.makedirs(dl_dir.strpath)
buildcache_cmd('download', '--spec-file', json_path, '--path',
dl_dir.strpath, '--require-cdashid')
dl_dir.strpath)
dl_dir_list = os.listdir(dl_dir.strpath)
assert(len(dl_dir_list) == 3)
assert(len(dl_dir_list) == 2)
def test_push_mirror_contents_exceptions(monkeypatch, capsys):
@ -1285,7 +1280,7 @@ def test_ci_rebuild_index(tmpdir, mutable_mock_env_path,
env_cmd('create', 'test', './spack.yaml')
with ev.read('test') as env:
spec_map = ci.get_concrete_specs(
env, 'callpath', 'callpath', '', 'FIND_ANY')
env, 'callpath', 'callpath', 'FIND_ANY')
concrete_spec = spec_map['callpath']
spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
yaml_path = str(tmpdir.join('spec.yaml'))

View File

@ -548,7 +548,7 @@ _spack_buildcache_check() {
}
_spack_buildcache_download() {
SPACK_COMPREPLY="-h --help -s --spec --spec-file -p --path -c --require-cdashid"
SPACK_COMPREPLY="-h --help -s --spec --spec-file -p --path"
}
_spack_buildcache_get_buildcache_name() {