add workaround for gitlab ci needs limit (#17219)
* add workaround for gitlab ci needs limit * fix style/address review comments * convert filter obj to list * update command completion * remove dict comprehension * add workaround tests * fix sorting issue between disparate types * add indeces to format
This commit is contained in:
parent
096bd69a94
commit
7c54aa2eb0
@ -450,7 +450,7 @@ def format_job_needs(phase_name, strip_compilers, dep_jobs,
|
||||
|
||||
def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
custom_spack_repo=None, custom_spack_ref=None,
|
||||
run_optimizer=False):
|
||||
run_optimizer=False, use_dependencies=False):
|
||||
# FIXME: What's the difference between one that opens with 'spack'
|
||||
# and one that opens with 'env'? This will only handle the former.
|
||||
with spack.concretize.disable_compiler_existence_check():
|
||||
@ -794,6 +794,11 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
import spack.ci_optimization as ci_opt
|
||||
sorted_output = ci_opt.optimizer(sorted_output)
|
||||
|
||||
# TODO(opadron): remove this or refactor
|
||||
if use_dependencies:
|
||||
import spack.ci_needs_workaround as cinw
|
||||
sorted_output = cinw.needs_to_dependencies(sorted_output)
|
||||
|
||||
with open(output_file, 'w') as outf:
|
||||
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
|
||||
|
||||
|
47
lib/spack/spack/ci_needs_workaround.py
Normal file
47
lib/spack/spack/ci_needs_workaround.py
Normal file
@ -0,0 +1,47 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import collections
|
||||
|
||||
try:
|
||||
# dynamically import to keep vermin from complaining
|
||||
collections_abc = __import__('collections.abc')
|
||||
except ImportError:
|
||||
collections_abc = collections
|
||||
|
||||
|
||||
get_job_name = lambda needs_entry: (
|
||||
needs_entry.get('job') if (
|
||||
isinstance(needs_entry, collections_abc.Mapping) and
|
||||
needs_entry.get('artifacts', True))
|
||||
|
||||
else
|
||||
|
||||
needs_entry if isinstance(needs_entry, str)
|
||||
|
||||
else None)
|
||||
|
||||
|
||||
def convert_job(job_entry):
|
||||
if not isinstance(job_entry, collections_abc.Mapping):
|
||||
return job_entry
|
||||
|
||||
needs = job_entry.get('needs')
|
||||
if needs is None:
|
||||
return job_entry
|
||||
|
||||
new_job = {}
|
||||
new_job.update(job_entry)
|
||||
del new_job['needs']
|
||||
|
||||
new_job['dependencies'] = list(filter(
|
||||
(lambda x: x is not None),
|
||||
(get_job_name(needs_entry) for needs_entry in needs)))
|
||||
|
||||
return new_job
|
||||
|
||||
|
||||
def needs_to_dependencies(yaml):
|
||||
return dict((k, convert_job(v)) for k, v in yaml.items())
|
@ -190,10 +190,10 @@ def print_delta(name, old, new, applied=None):
|
||||
applied = (new <= old)
|
||||
|
||||
print('\n'.join((
|
||||
'{} {}:',
|
||||
' before: {: 10d}',
|
||||
' after : {: 10d}',
|
||||
' delta : {:+10d} ({:=+3d}.{}%)',
|
||||
'{0} {1}:',
|
||||
' before: {2: 10d}',
|
||||
' after : {3: 10d}',
|
||||
' delta : {4:+10d} ({5:=+3d}.{6}%)',
|
||||
)).format(
|
||||
name,
|
||||
('+' if applied else 'x'),
|
||||
|
@ -55,10 +55,14 @@ def setup_parser(subparser):
|
||||
"should be checked out as a step in each generated job. " +
|
||||
"This argument is ignored if no --spack-repo is provided.")
|
||||
generate.add_argument(
|
||||
'--optimize', action='store_true',
|
||||
'--optimize', action='store_true', default=False,
|
||||
help="(Experimental) run the generated document through a series of "
|
||||
"optimization passes designed to reduce the size of the "
|
||||
"generated file.")
|
||||
generate.add_argument(
|
||||
'--dependencies', action='store_true', default=False,
|
||||
help="(Experimental) disable DAG scheduling; use "
|
||||
' "plain" dependencies.')
|
||||
generate.set_defaults(func=ci_generate)
|
||||
|
||||
# Check a spec against mirror. Rebuild, create buildcache and push to
|
||||
@ -81,6 +85,7 @@ def ci_generate(args):
|
||||
spack_repo = args.spack_repo
|
||||
spack_ref = args.spack_ref
|
||||
run_optimizer = args.optimize
|
||||
use_dependencies = args.dependencies
|
||||
|
||||
if not output_file:
|
||||
gen_ci_dir = os.getcwd()
|
||||
@ -93,7 +98,8 @@ def ci_generate(args):
|
||||
# Generate the jobs
|
||||
spack_ci.generate_gitlab_ci_yaml(
|
||||
env, True, output_file, spack_repo, spack_ref,
|
||||
run_optimizer=run_optimizer)
|
||||
run_optimizer=run_optimizer,
|
||||
use_dependencies=use_dependencies)
|
||||
|
||||
if copy_yaml_to:
|
||||
copy_to_dir = os.path.dirname(copy_yaml_to)
|
||||
|
@ -15,6 +15,17 @@
|
||||
import spack.util.web as web_util
|
||||
import spack.util.gpg
|
||||
|
||||
import spack.ci_optimization as ci_opt
|
||||
import spack.ci_needs_workaround as cinw
|
||||
import spack.util.spack_yaml as syaml
|
||||
import itertools as it
|
||||
import collections
|
||||
try:
|
||||
# dynamically import to keep vermin from complaining
|
||||
collections_abc = __import__('collections.abc')
|
||||
except ImportError:
|
||||
collections_abc = collections
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def tmp_scope():
|
||||
@ -166,3 +177,162 @@ def test_read_write_cdash_ids(config, tmp_scope, tmpdir, mock_packages):
|
||||
read_cdashid = ci.read_cdashid_from_mirror(mock_spec, mirror_url)
|
||||
|
||||
assert(str(read_cdashid) == orig_cdashid)
|
||||
|
||||
|
||||
def test_ci_workarounds():
|
||||
fake_root_spec = 'x' * 544
|
||||
fake_spack_ref = 'x' * 40
|
||||
|
||||
common_variables = {
|
||||
'SPACK_COMPILER_ACTION': 'NONE',
|
||||
'SPACK_IS_PR_PIPELINE': 'False',
|
||||
}
|
||||
|
||||
common_script = ['spack ci rebuild']
|
||||
|
||||
common_before_script = [
|
||||
'git clone "https://github.com/spack/spack"',
|
||||
' && '.join((
|
||||
'pushd ./spack',
|
||||
'git checkout "{ref}"'.format(ref=fake_spack_ref),
|
||||
'popd')),
|
||||
'. "./spack/share/spack/setup-env.sh"'
|
||||
]
|
||||
|
||||
def make_build_job(name, deps, stage, use_artifact_buildcache, optimize,
|
||||
use_dependencies):
|
||||
variables = common_variables.copy()
|
||||
variables['SPACK_JOB_SPEC_PKG_NAME'] = name
|
||||
|
||||
result = {
|
||||
'stage': stage,
|
||||
'tags': ['tag-0', 'tag-1'],
|
||||
'artifacts': {
|
||||
'paths': [
|
||||
'jobs_scratch_dir',
|
||||
'cdash_report',
|
||||
name + '.spec.yaml',
|
||||
name + '.cdashid',
|
||||
name
|
||||
],
|
||||
'when': 'always'
|
||||
},
|
||||
'retry': {'max': 2, 'when': ['always']},
|
||||
'after_script': ['rm -rf "./spack"'],
|
||||
'image': {'name': 'spack/centos7', 'entrypoint': ['']},
|
||||
}
|
||||
|
||||
if optimize:
|
||||
result['extends'] = ['.c0', '.c1', '.c2']
|
||||
else:
|
||||
variables['SPACK_ROOT_SPEC'] = fake_root_spec
|
||||
result['script'] = common_script
|
||||
result['before_script'] = common_before_script
|
||||
|
||||
result['variables'] = variables
|
||||
|
||||
if use_dependencies:
|
||||
result['dependencies'] = (
|
||||
list(deps) if use_artifact_buildcache
|
||||
else [])
|
||||
else:
|
||||
result['needs'] = [
|
||||
{'job': dep, 'artifacts': use_artifact_buildcache}
|
||||
for dep in deps]
|
||||
|
||||
return {name: result}
|
||||
|
||||
def make_rebuild_index_job(
|
||||
use_artifact_buildcache, optimize, use_dependencies):
|
||||
|
||||
result = {
|
||||
'stage': 'stage-rebuild-index',
|
||||
'script': 'spack buildcache update-index -d s3://mirror',
|
||||
'tags': ['tag-0', 'tag-1'],
|
||||
'image': {'name': 'spack/centos7', 'entrypoint': ['']},
|
||||
'after_script': ['rm -rf "./spack"'],
|
||||
}
|
||||
|
||||
if optimize:
|
||||
result['extends'] = '.c1'
|
||||
else:
|
||||
result['before_script'] = common_before_script
|
||||
|
||||
return {'rebuild-index': result}
|
||||
|
||||
def make_factored_jobs(optimize):
|
||||
return {
|
||||
'.c0': {'script': common_script},
|
||||
'.c1': {'before_script': common_before_script},
|
||||
'.c2': {'variables': {'SPACK_ROOT_SPEC': fake_root_spec}}
|
||||
} if optimize else {}
|
||||
|
||||
def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {}
|
||||
|
||||
result.update(make_build_job(
|
||||
'pkg-a', [], 'stage-0', use_artifact_buildcache, optimize,
|
||||
use_dependencies))
|
||||
|
||||
result.update(make_build_job(
|
||||
'pkg-b', ['pkg-a'], 'stage-1', use_artifact_buildcache, optimize,
|
||||
use_dependencies))
|
||||
|
||||
result.update(make_build_job(
|
||||
'pkg-c', ['pkg-a', 'pkg-b'], 'stage-2', use_artifact_buildcache,
|
||||
optimize, use_dependencies))
|
||||
|
||||
result.update(make_rebuild_index_job(
|
||||
use_artifact_buildcache, optimize, use_dependencies))
|
||||
|
||||
result.update(make_factored_jobs(optimize))
|
||||
|
||||
return result
|
||||
|
||||
def sort_yaml_obj(obj):
|
||||
if isinstance(obj, collections_abc.Mapping):
|
||||
result = syaml.syaml_dict()
|
||||
for k in sorted(obj.keys(), key=str):
|
||||
result[k] = sort_yaml_obj(obj[k])
|
||||
return result
|
||||
|
||||
if (isinstance(obj, collections_abc.Sequence) and
|
||||
not isinstance(obj, str)):
|
||||
return syaml.syaml_list(sorted(
|
||||
(sort_yaml_obj(x) for x in obj), key=str))
|
||||
|
||||
return obj
|
||||
|
||||
# test every combination of:
|
||||
# use artifact buildcache: true or false
|
||||
# run optimization pass: true or false
|
||||
# convert needs to dependencies: true or false
|
||||
for use_ab in (False, True):
|
||||
original = make_yaml_obj(
|
||||
use_artifact_buildcache=use_ab,
|
||||
optimize=False,
|
||||
use_dependencies=False)
|
||||
|
||||
for opt, deps in it.product(*(((False, True),) * 2)):
|
||||
# neither optimizing nor converting needs->dependencies
|
||||
if not (opt or deps):
|
||||
# therefore, nothing to test
|
||||
continue
|
||||
|
||||
predicted = make_yaml_obj(
|
||||
use_artifact_buildcache=use_ab,
|
||||
optimize=opt,
|
||||
use_dependencies=deps)
|
||||
|
||||
actual = original.copy()
|
||||
if opt:
|
||||
actual = ci_opt.optimizer(actual)
|
||||
if deps:
|
||||
actual = cinw.needs_to_dependencies(actual)
|
||||
|
||||
predicted = syaml.dump_config(
|
||||
sort_yaml_obj(predicted), default_flow_style=True)
|
||||
actual = syaml.dump_config(
|
||||
sort_yaml_obj(actual), default_flow_style=True)
|
||||
|
||||
assert(predicted == actual)
|
||||
|
@ -474,7 +474,7 @@ _spack_ci() {
|
||||
}
|
||||
|
||||
_spack_ci_generate() {
|
||||
SPACK_COMPREPLY="-h --help --output-file --copy-to --spack-repo --spack-ref --optimize"
|
||||
SPACK_COMPREPLY="-h --help --output-file --copy-to --spack-repo --spack-ref --optimize --dependencies"
|
||||
}
|
||||
|
||||
_spack_ci_rebuild() {
|
||||
|
Loading…
Reference in New Issue
Block a user