Merge tag 'v0.15.0' into features/shared
This commit is contained in:
@@ -165,7 +165,7 @@ of environments:
|
||||
# Extra instructions
|
||||
extra_instructions:
|
||||
final: |
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ \[$(tput sgr0)\]"' >> ~/.bashrc
|
||||
RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ \[$(tput sgr0)\]"' >> ~/.bashrc
|
||||
|
||||
# Labels for the image
|
||||
labels:
|
||||
|
||||
@@ -167,15 +167,6 @@ Any directory can be treated as an environment if it contains a file
|
||||
|
||||
$ spack env activate -d /path/to/directory
|
||||
|
||||
Spack commands that are environment sensitive will also act on the
|
||||
environment any time the current working directory contains a
|
||||
``spack.yaml`` file. Changing working directory to a directory
|
||||
containing a ``spack.yaml`` file is equivalent to the command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
$ spack env activate -d /path/to/dir --without-view
|
||||
|
||||
Anonymous specs can be created in place using the command:
|
||||
|
||||
.. code-block:: console
|
||||
|
||||
@@ -45,6 +45,7 @@ for setting up a build pipeline are as follows:
|
||||
tags:
|
||||
- <custom-tag>
|
||||
script:
|
||||
- spack env activate .
|
||||
- spack ci generate
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||
artifacts:
|
||||
@@ -384,6 +385,7 @@ a custom spack and make sure the generated rebuild jobs will clone it too:
|
||||
- git clone ${SPACK_REPO} --branch ${SPACK_REF}
|
||||
- . ./spack/share/spack/setup-env.sh
|
||||
script:
|
||||
- spack env activate .
|
||||
- spack ci generate
|
||||
--spack-repo ${SPACK_REPO} --spack-ref ${SPACK_REF}
|
||||
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/pipeline.yml"
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
|
||||
|
||||
#: major, minor, patch version for Spack, in a tuple
|
||||
spack_version_info = (0, 14, 2)
|
||||
spack_version_info = (0, 15, 0)
|
||||
|
||||
#: String containing Spack version joined with .'s
|
||||
spack_version = '.'.join(str(v) for v in spack_version_info)
|
||||
|
||||
@@ -25,6 +25,7 @@
|
||||
|
||||
import spack.cmd
|
||||
import spack.config as config
|
||||
import spack.database as spack_db
|
||||
import spack.fetch_strategy as fs
|
||||
import spack.util.gpg
|
||||
import spack.relocate as relocate
|
||||
@@ -32,7 +33,6 @@
|
||||
import spack.mirror
|
||||
import spack.util.url as url_util
|
||||
import spack.util.web as web_util
|
||||
|
||||
from spack.spec import Spec
|
||||
from spack.stage import Stage
|
||||
from spack.util.gpg import Gpg
|
||||
@@ -282,31 +282,47 @@ def sign_tarball(key, force, specfile_path):
|
||||
def generate_package_index(cache_prefix):
|
||||
"""Create the build cache index page.
|
||||
|
||||
Creates (or replaces) the "index.html" page at the location given in
|
||||
Creates (or replaces) the "index.json" page at the location given in
|
||||
cache_prefix. This page contains a link for each binary package (*.yaml)
|
||||
and public key (*.key) under cache_prefix.
|
||||
"""
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
index_html_path = os.path.join(tmpdir, 'index.html')
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if (entry.endswith('.yaml')
|
||||
or entry.endswith('.key')))
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
enable_transaction_locking=False,
|
||||
record_fields=['spec', 'ref_count'])
|
||||
|
||||
with open(index_html_path, 'w') as f:
|
||||
f.write(BUILD_CACHE_INDEX_TEMPLATE.format(
|
||||
title='Spack Package Index',
|
||||
path_list='\n'.join(
|
||||
BUILD_CACHE_INDEX_ENTRY_TEMPLATE.format(path=path)
|
||||
for path in file_list)))
|
||||
file_list = (
|
||||
entry
|
||||
for entry in web_util.list_url(cache_prefix)
|
||||
if entry.endswith('.yaml'))
|
||||
|
||||
tty.debug('Retrieving spec.yaml files from {0} to build index'.format(
|
||||
cache_prefix))
|
||||
for file_path in file_list:
|
||||
try:
|
||||
yaml_url = url_util.join(cache_prefix, file_path)
|
||||
tty.debug('fetching {0}'.format(yaml_url))
|
||||
_, _, yaml_file = web_util.read_from_url(yaml_url)
|
||||
yaml_contents = codecs.getreader('utf-8')(yaml_file).read()
|
||||
# yaml_obj = syaml.load(yaml_contents)
|
||||
# s = Spec.from_yaml(yaml_obj)
|
||||
s = Spec.from_yaml(yaml_contents)
|
||||
db.add(s, None)
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Error reading spec.yaml: {0}'.format(file_path))
|
||||
tty.error(url_err)
|
||||
|
||||
try:
|
||||
index_json_path = os.path.join(db_root_dir, 'index.json')
|
||||
with open(index_json_path, 'w') as f:
|
||||
db._write_to_file(f)
|
||||
|
||||
web_util.push_to_url(
|
||||
index_html_path,
|
||||
url_util.join(cache_prefix, 'index.html'),
|
||||
index_json_path,
|
||||
url_util.join(cache_prefix, 'index.json'),
|
||||
keep_original=False,
|
||||
extra_args={'ContentType': 'text/html'})
|
||||
extra_args={'ContentType': 'application/json'})
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
@@ -825,49 +841,55 @@ def get_spec(spec=None, force=False):
|
||||
return try_download_specs(urls=urls, force=force)
|
||||
|
||||
|
||||
def get_specs(force=False, allarch=False):
|
||||
def get_specs(allarch=False):
|
||||
"""
|
||||
Get spec.yaml's for build caches available on mirror
|
||||
"""
|
||||
global _cached_specs
|
||||
arch = architecture.Arch(architecture.platform(),
|
||||
'default_os', 'default_target')
|
||||
arch_pattern = ('([^-]*-[^-]*-[^-]*)')
|
||||
if not allarch:
|
||||
arch_pattern = '(%s-%s-[^-]*)' % (arch.platform, arch.os)
|
||||
|
||||
regex_pattern = '%s(.*)(spec.yaml$)' % (arch_pattern)
|
||||
arch_re = re.compile(regex_pattern)
|
||||
|
||||
if not spack.mirror.MirrorCollection():
|
||||
tty.debug("No Spack mirrors are currently configured")
|
||||
return {}
|
||||
|
||||
urls = set()
|
||||
for mirror in spack.mirror.MirrorCollection().values():
|
||||
fetch_url_build_cache = url_util.join(
|
||||
mirror.fetch_url, _build_cache_relative_path)
|
||||
|
||||
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
|
||||
if mirror_dir:
|
||||
tty.msg("Finding buildcaches in %s" % mirror_dir)
|
||||
if os.path.exists(mirror_dir):
|
||||
files = os.listdir(mirror_dir)
|
||||
for file in files:
|
||||
m = arch_re.search(file)
|
||||
if m:
|
||||
link = url_util.join(fetch_url_build_cache, file)
|
||||
urls.add(link)
|
||||
else:
|
||||
tty.msg("Finding buildcaches at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
p, links = web_util.spider(
|
||||
url_util.join(fetch_url_build_cache, 'index.html'))
|
||||
for link in links:
|
||||
m = arch_re.search(link)
|
||||
if m:
|
||||
urls.add(link)
|
||||
tty.msg("Finding buildcaches at %s" %
|
||||
url_util.format(fetch_url_build_cache))
|
||||
|
||||
return try_download_specs(urls=urls, force=force)
|
||||
index_url = url_util.join(fetch_url_build_cache, 'index.json')
|
||||
|
||||
try:
|
||||
_, _, file_stream = web_util.read_from_url(
|
||||
index_url, 'application/json')
|
||||
index_object = codecs.getreader('utf-8')(file_stream).read()
|
||||
except (URLError, web_util.SpackWebError) as url_err:
|
||||
tty.error('Failed to read index {0}'.format(index_url))
|
||||
tty.debug(url_err)
|
||||
# Just return whatever specs we may already have cached
|
||||
return _cached_specs
|
||||
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
index_file_path = os.path.join(tmpdir, 'index.json')
|
||||
with open(index_file_path, 'w') as fd:
|
||||
fd.write(index_object)
|
||||
|
||||
db_root_dir = os.path.join(tmpdir, 'db_root')
|
||||
db = spack_db.Database(None, db_dir=db_root_dir,
|
||||
enable_transaction_locking=False)
|
||||
|
||||
db._read_from_file(index_file_path)
|
||||
spec_list = db.query_local(installed=False)
|
||||
|
||||
for indexed_spec in spec_list:
|
||||
spec_arch = architecture.arch_for_spec(indexed_spec.architecture)
|
||||
if (allarch is True or spec_arch == arch):
|
||||
_cached_specs.add(indexed_spec)
|
||||
|
||||
return _cached_specs
|
||||
|
||||
|
||||
def get_keys(install=False, trust=False, force=False):
|
||||
|
||||
@@ -198,6 +198,9 @@ def set_compiler_environment_variables(pkg, env):
|
||||
compiler = pkg.compiler
|
||||
spec = pkg.spec
|
||||
|
||||
# Make sure the executables for this compiler exist
|
||||
compiler.verify_executables()
|
||||
|
||||
# Set compiler variables used by CMake and autotools
|
||||
assert all(key in compiler.link_paths for key in (
|
||||
'cc', 'cxx', 'f77', 'fc'))
|
||||
|
||||
@@ -118,13 +118,15 @@ def _do_patch_config_files(self):
|
||||
config_file = 'config.{0}'.format(config_name)
|
||||
if os.path.exists(config_file):
|
||||
# First search the top-level source directory
|
||||
my_config_files[config_name] = config_file
|
||||
my_config_files[config_name] = os.path.join(
|
||||
self.configure_directory, config_file)
|
||||
else:
|
||||
# Then search in all sub directories recursively.
|
||||
# We would like to use AC_CONFIG_AUX_DIR, but not all packages
|
||||
# ship with their configure.in or configure.ac.
|
||||
config_path = next((os.path.join(r, f)
|
||||
for r, ds, fs in os.walk('.') for f in fs
|
||||
for r, ds, fs in os.walk(
|
||||
self.configure_directory) for f in fs
|
||||
if f == config_file), None)
|
||||
my_config_files[config_name] = config_path
|
||||
|
||||
|
||||
@@ -612,7 +612,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
|
||||
if 'enable-debug-messages' in gitlab_ci:
|
||||
debug_flag = '-d '
|
||||
|
||||
job_scripts = ['spack {0}ci rebuild'.format(debug_flag)]
|
||||
job_scripts = [
|
||||
'spack env activate .',
|
||||
'spack {0}ci rebuild'.format(debug_flag),
|
||||
]
|
||||
|
||||
compiler_action = 'NONE'
|
||||
if len(phases) > 1:
|
||||
@@ -1025,9 +1028,9 @@ def read_cdashid_from_mirror(spec, mirror_url):
|
||||
def push_mirror_contents(env, spec, yaml_path, mirror_url, build_id):
|
||||
if mirror_url:
|
||||
tty.debug('Creating buildcache')
|
||||
buildcache._createtarball(env, yaml_path, None, True, False,
|
||||
mirror_url, None, True, False, False, True,
|
||||
False)
|
||||
buildcache._createtarball(env, spec_yaml=yaml_path, add_deps=False,
|
||||
output_location=mirror_url, force=True,
|
||||
allow_root=True)
|
||||
if build_id:
|
||||
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
|
||||
build_id, mirror_url))
|
||||
|
||||
@@ -17,6 +17,19 @@
|
||||
import spack.util.spack_yaml as syaml
|
||||
|
||||
|
||||
def sort_yaml_obj(obj):
|
||||
if isinstance(obj, collections_abc.Mapping):
|
||||
return syaml.syaml_dict(
|
||||
(k, sort_yaml_obj(v))
|
||||
for k, v in
|
||||
sorted(obj.items(), key=(lambda item: str(item[0]))))
|
||||
|
||||
if isinstance(obj, collections_abc.Sequence) and not isinstance(obj, str):
|
||||
return syaml.syaml_list(sort_yaml_obj(x) for x in obj)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def matches(obj, proto):
|
||||
"""Returns True if the test object "obj" matches the prototype object
|
||||
"proto".
|
||||
@@ -235,8 +248,10 @@ def try_optimization_pass(name, yaml, optimization_pass, *args, **kwargs):
|
||||
# pass was not applied
|
||||
return (yaml, new_yaml, False, other_results)
|
||||
|
||||
pre_size = len(syaml.dump_config(yaml, default_flow_style=True))
|
||||
post_size = len(syaml.dump_config(new_yaml, default_flow_style=True))
|
||||
pre_size = len(syaml.dump_config(
|
||||
sort_yaml_obj(yaml), default_flow_style=True))
|
||||
post_size = len(syaml.dump_config(
|
||||
sort_yaml_obj(new_yaml), default_flow_style=True))
|
||||
|
||||
# pass makes the size worse: not applying
|
||||
applied = (post_size <= pre_size)
|
||||
@@ -281,7 +296,7 @@ def build_histogram(iterator, key):
|
||||
continue
|
||||
|
||||
value_hash = hashlib.sha1()
|
||||
value_hash.update(syaml.dump_config(val).encode())
|
||||
value_hash.update(syaml.dump_config(sort_yaml_obj(val)).encode())
|
||||
value_hash = value_hash.hexdigest()
|
||||
|
||||
buckets[value_hash] += 1
|
||||
@@ -292,7 +307,8 @@ def build_histogram(iterator, key):
|
||||
|
||||
|
||||
def optimizer(yaml):
|
||||
original_size = len(syaml.dump_config(yaml, default_flow_style=True))
|
||||
original_size = len(syaml.dump_config(
|
||||
sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
# try factoring out commonly repeated portions
|
||||
common_job = {
|
||||
@@ -369,7 +385,8 @@ def optimizer(yaml):
|
||||
common_subobject,
|
||||
{'variables': {'SPACK_ROOT_SPEC': spec}})
|
||||
|
||||
new_size = len(syaml.dump_config(yaml, default_flow_style=True))
|
||||
new_size = len(syaml.dump_config(
|
||||
sort_yaml_obj(yaml), default_flow_style=True))
|
||||
|
||||
print('\n')
|
||||
print_delta('overall summary', original_size, new_size)
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import spack.cmd.configure as cfg
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.build_systems.qmake import QMakePackage
|
||||
from spack.build_systems.scons import SConsPackage
|
||||
from spack.build_systems.waf import WafPackage
|
||||
from spack.build_systems.python import PythonPackage
|
||||
from spack.build_systems.perl import PerlPackage
|
||||
from spack.build_systems.meson import MesonPackage
|
||||
from spack.build_systems.sip import SIPPackage
|
||||
|
||||
description = 'DEPRECATED: stops at build stage when installing a package'
|
||||
section = "build"
|
||||
level = "long"
|
||||
|
||||
|
||||
build_system_to_phase = {
|
||||
AutotoolsPackage: 'build',
|
||||
CMakePackage: 'build',
|
||||
QMakePackage: 'build',
|
||||
SConsPackage: 'build',
|
||||
WafPackage: 'build',
|
||||
PythonPackage: 'build',
|
||||
PerlPackage: 'build',
|
||||
MesonPackage: 'build',
|
||||
SIPPackage: 'build',
|
||||
}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
cfg.setup_parser(subparser)
|
||||
|
||||
|
||||
def build(parser, args):
|
||||
tty.warn("This command is deprecated. Use `spack install --until` to"
|
||||
" select an end phase instead. The `spack build` command will be"
|
||||
" removed in a future version of Spack")
|
||||
cfg._stop_at_phase_during_install(args, build, build_system_to_phase)
|
||||
@@ -68,9 +68,9 @@ def setup_parser(subparser):
|
||||
type=str,
|
||||
help="URL of the mirror where " +
|
||||
"buildcaches will be written.")
|
||||
create.add_argument('--no-rebuild-index', action='store_true',
|
||||
default=False, help="skip rebuilding index after " +
|
||||
"building package(s)")
|
||||
create.add_argument('--rebuild-index', action='store_true',
|
||||
default=False, help="Regenerate buildcache index " +
|
||||
"after building package(s)")
|
||||
create.add_argument('-y', '--spec-yaml', default=None,
|
||||
help='Create buildcache entry for spec from yaml file')
|
||||
create.add_argument('--only', default='package,dependencies',
|
||||
@@ -108,8 +108,6 @@ def setup_parser(subparser):
|
||||
action='store_true',
|
||||
dest='variants',
|
||||
help='show variants in output (can be long)')
|
||||
listcache.add_argument('-f', '--force', action='store_true',
|
||||
help="force new download of specs")
|
||||
listcache.add_argument('-a', '--allarch', action='store_true',
|
||||
help="list specs for all available architectures" +
|
||||
" instead of default platform and OS")
|
||||
@@ -291,7 +289,7 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
specs_from_cli = []
|
||||
has_errors = False
|
||||
allarch = other_arch
|
||||
specs = bindist.get_specs(force, allarch)
|
||||
specs = bindist.get_specs(allarch)
|
||||
for pkg in pkgs:
|
||||
matches = []
|
||||
tty.msg("buildcache spec(s) matching %s \n" % pkg)
|
||||
@@ -323,9 +321,10 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
|
||||
return specs_from_cli
|
||||
|
||||
|
||||
def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
|
||||
output_location, key, force, rel, unsigned, allow_root,
|
||||
no_rebuild_index):
|
||||
def _createtarball(env, spec_yaml=None, packages=None, add_spec=True,
|
||||
add_deps=True, output_location=os.getcwd(),
|
||||
signing_key=None, force=False, make_relative=False,
|
||||
unsigned=False, allow_root=False, rebuild_index=False):
|
||||
if spec_yaml:
|
||||
packages = set()
|
||||
with open(spec_yaml, 'r') as fd:
|
||||
@@ -355,10 +354,6 @@ def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
|
||||
msg = 'Buildcache files will be output to %s/build_cache' % outdir
|
||||
tty.msg(msg)
|
||||
|
||||
signkey = None
|
||||
if key:
|
||||
signkey = key
|
||||
|
||||
matches = find_matching_specs(pkgs, env=env)
|
||||
|
||||
if matches:
|
||||
@@ -398,9 +393,9 @@ def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
|
||||
|
||||
for spec in specs:
|
||||
tty.debug('creating binary cache file for package %s ' % spec.format())
|
||||
bindist.build_tarball(spec, outdir, force, rel,
|
||||
unsigned, allow_root, signkey,
|
||||
not no_rebuild_index)
|
||||
bindist.build_tarball(spec, outdir, force, make_relative,
|
||||
unsigned, allow_root, signing_key,
|
||||
rebuild_index)
|
||||
|
||||
|
||||
def createtarball(args):
|
||||
@@ -447,9 +442,12 @@ def createtarball(args):
|
||||
add_spec = ('package' in args.things_to_install)
|
||||
add_deps = ('dependencies' in args.things_to_install)
|
||||
|
||||
_createtarball(env, args.spec_yaml, args.specs, add_spec, add_deps,
|
||||
output_location, args.key, args.force, args.rel,
|
||||
args.unsigned, args.allow_root, args.no_rebuild_index)
|
||||
_createtarball(env, spec_yaml=args.spec_yaml, packages=args.specs,
|
||||
add_spec=add_spec, add_deps=add_deps,
|
||||
output_location=output_location, signing_key=args.key,
|
||||
force=args.force, make_relative=args.rel,
|
||||
unsigned=args.unsigned, allow_root=args.allow_root,
|
||||
rebuild_index=args.rebuild_index)
|
||||
|
||||
|
||||
def installtarball(args):
|
||||
@@ -458,8 +456,7 @@ def installtarball(args):
|
||||
tty.die("build cache file installation requires" +
|
||||
" at least one package spec argument")
|
||||
pkgs = set(args.specs)
|
||||
matches = match_downloaded_specs(pkgs, args.multiple, args.force,
|
||||
args.otherarch)
|
||||
matches = match_downloaded_specs(pkgs, args.multiple, args.otherarch)
|
||||
|
||||
for match in matches:
|
||||
install_tarball(match, args)
|
||||
@@ -491,7 +488,7 @@ def install_tarball(spec, args):
|
||||
|
||||
def listspecs(args):
|
||||
"""list binary packages available from mirrors"""
|
||||
specs = bindist.get_specs(args.force, args.allarch)
|
||||
specs = bindist.get_specs(args.allarch)
|
||||
if args.specs:
|
||||
constraints = set(args.specs)
|
||||
specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
|
||||
|
||||
@@ -38,7 +38,7 @@ def setup_parser(subparser):
|
||||
generate = subparsers.add_parser('generate', help=ci_generate.__doc__)
|
||||
generate.add_argument(
|
||||
'--output-file', default=None,
|
||||
help="Absolute path to file where generated jobs file should be " +
|
||||
help="Path to file where generated jobs file should be " +
|
||||
"written. The default is .gitlab-ci.yml in the root of the " +
|
||||
"repository.")
|
||||
generate.add_argument(
|
||||
@@ -88,10 +88,10 @@ def ci_generate(args):
|
||||
use_dependencies = args.dependencies
|
||||
|
||||
if not output_file:
|
||||
gen_ci_dir = os.getcwd()
|
||||
output_file = os.path.join(gen_ci_dir, '.gitlab-ci.yml')
|
||||
output_file = os.path.abspath(".gitlab-ci.yml")
|
||||
else:
|
||||
gen_ci_dir = os.path.dirname(output_file)
|
||||
output_file_path = os.path.abspath(output_file)
|
||||
gen_ci_dir = os.path.dirname(output_file_path)
|
||||
if not os.path.exists(gen_ci_dir):
|
||||
os.makedirs(gen_ci_dir)
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ def setup_parser(subparser):
|
||||
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
|
||||
find_parser.add_argument(
|
||||
'--scope', choices=scopes, metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope(),
|
||||
default=spack.config.default_modify_scope('compilers'),
|
||||
help="configuration scope to modify")
|
||||
|
||||
# Remove
|
||||
@@ -49,7 +49,7 @@ def setup_parser(subparser):
|
||||
remove_parser.add_argument('compiler_spec')
|
||||
remove_parser.add_argument(
|
||||
'--scope', choices=scopes, metavar=scopes_metavar,
|
||||
default=spack.config.default_modify_scope(),
|
||||
default=spack.config.default_modify_scope('compilers'),
|
||||
help="configuration scope to modify")
|
||||
|
||||
# List
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import argparse
|
||||
|
||||
import llnl.util.tty as tty
|
||||
import spack.cmd
|
||||
import spack.cmd.common.arguments as arguments
|
||||
import spack.cmd.install as inst
|
||||
|
||||
from spack.build_systems.autotools import AutotoolsPackage
|
||||
from spack.build_systems.cmake import CMakePackage
|
||||
from spack.build_systems.qmake import QMakePackage
|
||||
from spack.build_systems.waf import WafPackage
|
||||
from spack.build_systems.perl import PerlPackage
|
||||
from spack.build_systems.intel import IntelPackage
|
||||
from spack.build_systems.meson import MesonPackage
|
||||
from spack.build_systems.sip import SIPPackage
|
||||
|
||||
description = 'DEPRECATED: stage and configure a package but do not install'
|
||||
section = "build"
|
||||
level = "long"
|
||||
|
||||
|
||||
build_system_to_phase = {
|
||||
AutotoolsPackage: 'configure',
|
||||
CMakePackage: 'cmake',
|
||||
QMakePackage: 'qmake',
|
||||
WafPackage: 'configure',
|
||||
PerlPackage: 'configure',
|
||||
IntelPackage: 'configure',
|
||||
MesonPackage: 'meson',
|
||||
SIPPackage: 'configure',
|
||||
}
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
subparser.add_argument(
|
||||
'-v', '--verbose',
|
||||
action='store_true',
|
||||
help="print additional output during builds"
|
||||
)
|
||||
arguments.add_common_arguments(subparser, ['spec'])
|
||||
|
||||
|
||||
def _stop_at_phase_during_install(args, calling_fn, phase_mapping):
|
||||
if not args.package:
|
||||
tty.die("configure requires at least one package argument")
|
||||
|
||||
# TODO: to be refactored with code in install
|
||||
specs = spack.cmd.parse_specs(args.package, concretize=True)
|
||||
if len(specs) != 1:
|
||||
tty.error('only one spec can be installed at a time.')
|
||||
spec = specs.pop()
|
||||
pkg = spec.package
|
||||
try:
|
||||
key = [cls for cls in phase_mapping if isinstance(pkg, cls)].pop()
|
||||
phase = phase_mapping[key]
|
||||
# Install package dependencies if needed
|
||||
parser = argparse.ArgumentParser()
|
||||
inst.setup_parser(parser)
|
||||
tty.msg('Checking dependencies for {0}'.format(args.spec[0]))
|
||||
cli_args = ['-v'] if args.verbose else []
|
||||
install_args = parser.parse_args(cli_args + ['--only=dependencies'])
|
||||
install_args.spec = args.spec
|
||||
inst.install(parser, install_args)
|
||||
# Install package and stop at the given phase
|
||||
cli_args = ['-v'] if args.verbose else []
|
||||
install_args = parser.parse_args(cli_args + ['--only=package'])
|
||||
install_args.spec = args.spec
|
||||
inst.install(parser, install_args, stop_at=phase)
|
||||
except IndexError:
|
||||
tty.error(
|
||||
'Package {0} has no {1} phase, or its {1} phase is not separated from install'.format( # NOQA: ignore=E501
|
||||
spec.name, calling_fn.__name__)
|
||||
)
|
||||
|
||||
|
||||
def configure(parser, args):
|
||||
tty.warn("This command is deprecated. Use `spack install --until` to"
|
||||
" select an end phase instead. The `spack configure` command will"
|
||||
" be removed in a future version of Spack.")
|
||||
_stop_at_phase_during_install(args, configure, build_system_to_phase)
|
||||
@@ -1,20 +0,0 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
import spack.cmd.dev_build
|
||||
import llnl.util.tty as tty
|
||||
|
||||
description = "DEPRECATED: do-it-yourself: build from local source directory"
|
||||
section = "build"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
spack.cmd.dev_build.setup_parser(subparser)
|
||||
|
||||
|
||||
def diy(self, args):
|
||||
tty.warn("`spack diy` has been renamed to `spack dev-build`."
|
||||
"The `diy` command will be removed in a future version of Spack")
|
||||
spack.cmd.dev_build.dev_build(self, args)
|
||||
@@ -52,6 +52,9 @@ def env_activate_setup_parser(subparser):
|
||||
shells.add_argument(
|
||||
'--csh', action='store_const', dest='shell', const='csh',
|
||||
help="print csh commands to activate the environment")
|
||||
shells.add_argument(
|
||||
'--fish', action='store_const', dest='shell', const='fish',
|
||||
help="print fish commands to activate the environment")
|
||||
|
||||
view_options = subparser.add_mutually_exclusive_group()
|
||||
view_options.add_argument(
|
||||
@@ -127,6 +130,9 @@ def env_deactivate_setup_parser(subparser):
|
||||
shells.add_argument(
|
||||
'--csh', action='store_const', dest='shell', const='csh',
|
||||
help="print csh commands to deactivate the environment")
|
||||
shells.add_argument(
|
||||
'--fish', action='store_const', dest='shell', const='fish',
|
||||
help="print fish commands to activate the environment")
|
||||
|
||||
|
||||
def env_deactivate(args):
|
||||
|
||||
@@ -32,6 +32,9 @@ def setup_parser(subparser):
|
||||
shells.add_argument(
|
||||
'--csh', action='store_const', dest='shell', const='csh',
|
||||
help="print csh commands to load the package")
|
||||
shells.add_argument(
|
||||
'--fish', action='store_const', dest='shell', const='fish',
|
||||
help="print fish commands to load the package")
|
||||
|
||||
subparser.add_argument(
|
||||
'--first',
|
||||
|
||||
@@ -39,13 +39,6 @@ def setup_parser(subparser):
|
||||
arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
|
||||
|
||||
|
||||
def spack_transitive_include_path():
|
||||
return ';'.join(
|
||||
os.path.join(dep, 'include')
|
||||
for dep in os.environ['SPACK_DEPENDENCIES'].split(os.pathsep)
|
||||
)
|
||||
|
||||
|
||||
def write_spconfig(package, dirty):
|
||||
# Set-up the environment
|
||||
spack.build_environment.setup_package(package, dirty)
|
||||
@@ -57,8 +50,8 @@ def write_spconfig(package, dirty):
|
||||
paths = os.environ['PATH'].split(':')
|
||||
paths = [item for item in paths if 'spack/env' not in item]
|
||||
env['PATH'] = ':'.join(paths)
|
||||
env['SPACK_TRANSITIVE_INCLUDE_PATH'] = spack_transitive_include_path()
|
||||
env['CMAKE_PREFIX_PATH'] = os.environ['CMAKE_PREFIX_PATH']
|
||||
env['SPACK_INCLUDE_DIRS'] = os.environ['SPACK_INCLUDE_DIRS']
|
||||
env['CC'] = os.environ['SPACK_CC']
|
||||
env['CXX'] = os.environ['SPACK_CXX']
|
||||
env['FC'] = os.environ['SPACK_FC']
|
||||
@@ -84,7 +77,7 @@ def cmdlist(str):
|
||||
if name.find('PATH') < 0:
|
||||
fout.write('env[%s] = %s\n' % (repr(name), repr(val)))
|
||||
else:
|
||||
if name == 'SPACK_TRANSITIVE_INCLUDE_PATH':
|
||||
if name == 'SPACK_INCLUDE_DIRS':
|
||||
sep = ';'
|
||||
else:
|
||||
sep = ':'
|
||||
|
||||
@@ -31,6 +31,9 @@ def setup_parser(subparser):
|
||||
shells.add_argument(
|
||||
'--csh', action='store_const', dest='shell', const='csh',
|
||||
help="print csh commands to activate the environment")
|
||||
shells.add_argument(
|
||||
'--fish', action='store_const', dest='shell', const='fish',
|
||||
help="print fish commands to load the package")
|
||||
|
||||
subparser.add_argument('-a', '--all', action='store_true',
|
||||
help='unload all loaded Spack packages.')
|
||||
|
||||
@@ -1,214 +0,0 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
# TODO: This will be merged into the buildcache command once
|
||||
# everything is working.
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
try:
|
||||
import boto3
|
||||
import botocore
|
||||
have_boto3_support = True
|
||||
except ImportError:
|
||||
have_boto3_support = False
|
||||
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.error import SpackError
|
||||
import spack.tengine as template_engine
|
||||
from spack.spec import Spec
|
||||
|
||||
|
||||
import spack.binary_distribution as bindist
|
||||
|
||||
|
||||
description = "temporary command to upload buildcaches to 's3.spack.io'"
|
||||
section = "packaging"
|
||||
level = "long"
|
||||
|
||||
|
||||
def setup_parser(subparser):
|
||||
setup_parser.parser = subparser
|
||||
subparsers = subparser.add_subparsers(help='upload-s3 sub-commands')
|
||||
|
||||
# sub-command to upload a built spec to s3
|
||||
spec = subparsers.add_parser('spec', help=upload_spec.__doc__)
|
||||
|
||||
spec.add_argument('-s', '--spec', default=None,
|
||||
help='Spec to upload')
|
||||
|
||||
spec.add_argument('-y', '--spec-yaml', default=None,
|
||||
help='Path to spec yaml file containing spec to upload')
|
||||
|
||||
spec.add_argument('-b', '--base-dir', default=None,
|
||||
help='Path to root of buildcaches')
|
||||
|
||||
spec.add_argument('-e', '--endpoint-url',
|
||||
default='https://s3.spack.io', help='URL of mirror')
|
||||
|
||||
spec.set_defaults(func=upload_spec)
|
||||
|
||||
# sub-command to update the index of a buildcache on s3
|
||||
index = subparsers.add_parser('index', help=update_index.__doc__)
|
||||
|
||||
index.add_argument('-e', '--endpoint-url',
|
||||
default='https://s3.spack.io', help='URL of mirror')
|
||||
|
||||
index.set_defaults(func=update_index)
|
||||
|
||||
|
||||
def get_s3_session(endpoint_url):
|
||||
if not have_boto3_support:
|
||||
raise SpackError('boto3 module not available')
|
||||
|
||||
session = boto3.Session()
|
||||
s3 = session.resource('s3', endpoint_url=endpoint_url)
|
||||
|
||||
bucket_names = []
|
||||
for bucket in s3.buckets.all():
|
||||
bucket_names.append(bucket.name)
|
||||
|
||||
if len(bucket_names) > 1:
|
||||
raise SpackError('More than one bucket associated with credentials')
|
||||
|
||||
bucket_name = bucket_names[0]
|
||||
|
||||
return s3, bucket_name
|
||||
|
||||
|
||||
def update_index(args):
|
||||
"""Update the index of an s3 buildcache"""
|
||||
s3, bucket_name = get_s3_session(args.endpoint_url)
|
||||
|
||||
bucket = s3.Bucket(bucket_name)
|
||||
exists = True
|
||||
|
||||
try:
|
||||
s3.meta.client.head_bucket(Bucket=bucket_name)
|
||||
except botocore.exceptions.ClientError as e:
|
||||
# If a client error is thrown, then check that it was a 404 error.
|
||||
# If it was a 404 error, then the bucket does not exist.
|
||||
error_code = e.response['Error']['Code']
|
||||
if error_code == '404':
|
||||
exists = False
|
||||
|
||||
if not exists:
|
||||
tty.error('S3 bucket "{0}" does not exist'.format(bucket_name))
|
||||
sys.exit(1)
|
||||
|
||||
build_cache_dir = os.path.join(
|
||||
'mirror', bindist.build_cache_relative_path())
|
||||
|
||||
spec_yaml_regex = re.compile('{0}/(.+\\.spec\\.yaml)$'.format(
|
||||
build_cache_dir))
|
||||
spack_regex = re.compile('{0}/([^/]+)/.+\\.spack$'.format(
|
||||
build_cache_dir))
|
||||
|
||||
top_level_keys = set()
|
||||
|
||||
for key in bucket.objects.all():
|
||||
m = spec_yaml_regex.search(key.key)
|
||||
if m:
|
||||
top_level_keys.add(m.group(1))
|
||||
print(m.group(1))
|
||||
continue
|
||||
|
||||
m = spack_regex.search(key.key)
|
||||
if m:
|
||||
top_level_keys.add(m.group(1))
|
||||
print(m.group(1))
|
||||
continue
|
||||
|
||||
index_data = {
|
||||
'top_level_keys': top_level_keys,
|
||||
}
|
||||
|
||||
env = template_engine.make_environment()
|
||||
template_dir = 'misc'
|
||||
index_template = os.path.join(template_dir, 'buildcache_index.html')
|
||||
t = env.get_template(index_template)
|
||||
contents = t.render(index_data)
|
||||
|
||||
index_key = os.path.join(build_cache_dir, 'index.html')
|
||||
|
||||
tty.debug('Generated index:')
|
||||
tty.debug(contents)
|
||||
tty.debug('Pushing it to {0} -> {1}'.format(bucket_name, index_key))
|
||||
|
||||
s3_obj = s3.Object(bucket_name, index_key)
|
||||
s3_obj.put(Body=contents, ACL='public-read')
|
||||
|
||||
|
||||
def upload_spec(args):
|
||||
"""Upload a spec to s3 bucket"""
|
||||
if not args.spec and not args.spec_yaml:
|
||||
tty.error('Cannot upload spec without spec arg or path to spec yaml')
|
||||
sys.exit(1)
|
||||
|
||||
if not args.base_dir:
|
||||
tty.error('No base directory for buildcache specified')
|
||||
sys.exit(1)
|
||||
|
||||
if args.spec:
|
||||
try:
|
||||
spec = Spec(args.spec)
|
||||
spec.concretize()
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
tty.error('Unable to concrectize spec from string {0}'.format(
|
||||
args.spec))
|
||||
sys.exit(1)
|
||||
else:
|
||||
try:
|
||||
with open(args.spec_yaml, 'r') as fd:
|
||||
spec = Spec.from_yaml(fd.read())
|
||||
except Exception as e:
|
||||
tty.debug(e)
|
||||
tty.error('Unable to concrectize spec from yaml {0}'.format(
|
||||
args.spec_yaml))
|
||||
sys.exit(1)
|
||||
|
||||
s3, bucket_name = get_s3_session(args.endpoint_url)
|
||||
|
||||
build_cache_dir = bindist.build_cache_relative_path()
|
||||
|
||||
tarball_key = os.path.join(
|
||||
build_cache_dir, bindist.tarball_path_name(spec, '.spack'))
|
||||
tarball_path = os.path.join(args.base_dir, tarball_key)
|
||||
|
||||
specfile_key = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
|
||||
specfile_path = os.path.join(args.base_dir, specfile_key)
|
||||
|
||||
cdashidfile_key = os.path.join(
|
||||
build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
|
||||
cdashidfile_path = os.path.join(args.base_dir, cdashidfile_key)
|
||||
|
||||
tty.msg('Uploading {0}'.format(tarball_key))
|
||||
s3.meta.client.upload_file(
|
||||
tarball_path, bucket_name,
|
||||
os.path.join('mirror', tarball_key),
|
||||
ExtraArgs={'ACL': 'public-read'})
|
||||
|
||||
tty.msg('Uploading {0}'.format(specfile_key))
|
||||
s3.meta.client.upload_file(
|
||||
specfile_path, bucket_name,
|
||||
os.path.join('mirror', specfile_key),
|
||||
ExtraArgs={'ACL': 'public-read'})
|
||||
|
||||
if os.path.exists(cdashidfile_path):
|
||||
tty.msg('Uploading {0}'.format(cdashidfile_key))
|
||||
s3.meta.client.upload_file(
|
||||
cdashidfile_path, bucket_name,
|
||||
os.path.join('mirror', cdashidfile_key),
|
||||
ExtraArgs={'ACL': 'public-read'})
|
||||
|
||||
|
||||
def upload_s3(parser, args):
|
||||
if args.func:
|
||||
args.func(args)
|
||||
@@ -27,12 +27,6 @@
|
||||
__all__ = ['Compiler']
|
||||
|
||||
|
||||
def _verify_executables(*paths):
|
||||
for path in paths:
|
||||
if not os.path.isfile(path) and os.access(path, os.X_OK):
|
||||
raise CompilerAccessError(path)
|
||||
|
||||
|
||||
@llnl.util.lang.memoized
|
||||
def get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
|
||||
"""Invokes the compiler at a given path passing a single
|
||||
@@ -158,6 +152,10 @@ def _parse_non_system_link_dirs(string):
|
||||
"""
|
||||
link_dirs = _parse_link_paths(string)
|
||||
|
||||
# Remove directories that do not exist. Some versions of the Cray compiler
|
||||
# report nonexistent directories
|
||||
link_dirs = [d for d in link_dirs if os.path.isdir(d)]
|
||||
|
||||
# Return set of directories containing needed compiler libs, minus
|
||||
# system paths. Note that 'filter_system_paths' only checks for an
|
||||
# exact match, while 'in_system_subdirectory' checks if a path contains
|
||||
@@ -271,20 +269,16 @@ def __init__(self, cspec, operating_system, target,
|
||||
self.extra_rpaths = extra_rpaths
|
||||
self.enable_implicit_rpaths = enable_implicit_rpaths
|
||||
|
||||
def check(exe):
|
||||
if exe is None:
|
||||
return None
|
||||
_verify_executables(exe)
|
||||
return exe
|
||||
|
||||
self.cc = check(paths[0])
|
||||
self.cxx = check(paths[1])
|
||||
self.cc = paths[0]
|
||||
self.cxx = paths[1]
|
||||
self.f77 = None
|
||||
self.fc = None
|
||||
if len(paths) > 2:
|
||||
self.f77 = check(paths[2])
|
||||
self.f77 = paths[2]
|
||||
if len(paths) == 3:
|
||||
self.fc = self.f77
|
||||
else:
|
||||
self.fc = check(paths[3])
|
||||
self.fc = paths[3]
|
||||
|
||||
self.environment = environment
|
||||
self.extra_rpaths = extra_rpaths or []
|
||||
@@ -298,6 +292,31 @@ def check(exe):
|
||||
if value is not None:
|
||||
self.flags[flag] = tokenize_flags(value)
|
||||
|
||||
def verify_executables(self):
|
||||
"""Raise an error if any of the compiler executables is not valid.
|
||||
|
||||
This method confirms that for all of the compilers (cc, cxx, f77, fc)
|
||||
that have paths, those paths exist and are executable by the current
|
||||
user.
|
||||
Raises a CompilerAccessError if any of the non-null paths for the
|
||||
compiler are not accessible.
|
||||
"""
|
||||
def accessible_exe(exe):
|
||||
# compilers may contain executable names (on Cray or user edited)
|
||||
if not os.path.isabs(exe):
|
||||
exe = spack.util.executable.which_string(exe)
|
||||
if not exe:
|
||||
return False
|
||||
return os.path.isfile(exe) and os.access(exe, os.X_OK)
|
||||
|
||||
# setup environment before verifying in case we have executable names
|
||||
# instead of absolute paths
|
||||
with self._compiler_environment():
|
||||
missing = [cmp for cmp in (self.cc, self.cxx, self.f77, self.fc)
|
||||
if cmp and not accessible_exe(cmp)]
|
||||
if missing:
|
||||
raise CompilerAccessError(self, missing)
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self.spec.version
|
||||
@@ -575,10 +594,10 @@ def _compiler_environment(self):
|
||||
|
||||
|
||||
class CompilerAccessError(spack.error.SpackError):
|
||||
|
||||
def __init__(self, path):
|
||||
super(CompilerAccessError, self).__init__(
|
||||
"'%s' is not a valid compiler." % path)
|
||||
def __init__(self, compiler, paths):
|
||||
msg = "Compiler '%s' has executables that are missing" % compiler.spec
|
||||
msg += " or are not executable: %s" % paths
|
||||
super(CompilerAccessError, self).__init__(msg)
|
||||
|
||||
|
||||
class InvalidCompilerError(spack.error.SpackError):
|
||||
|
||||
@@ -91,16 +91,24 @@ def c11_flag(self):
|
||||
|
||||
@property
|
||||
def cc_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def cxx_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def f77_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@property
|
||||
def fc_pic_flag(self):
|
||||
if self.is_clang_based:
|
||||
return "-fPIC"
|
||||
return "-h PIC"
|
||||
|
||||
@@ -11,7 +11,8 @@
|
||||
"0.14.0": "0.14.0",
|
||||
"0.14.1": "0.14.1",
|
||||
"0.14.2": "0.14.2",
|
||||
"0.14.3": "0.14.3"
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0"
|
||||
}
|
||||
},
|
||||
"ubuntu:16.04": {
|
||||
@@ -26,7 +27,8 @@
|
||||
"0.14.0": "0.14.0",
|
||||
"0.14.1": "0.14.1",
|
||||
"0.14.2": "0.14.2",
|
||||
"0.14.3": "0.14.3"
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0"
|
||||
}
|
||||
},
|
||||
"centos:7": {
|
||||
@@ -41,7 +43,8 @@
|
||||
"0.14.0": "0.14.0",
|
||||
"0.14.1": "0.14.1",
|
||||
"0.14.2": "0.14.2",
|
||||
"0.14.3": "0.14.3"
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0"
|
||||
}
|
||||
},
|
||||
"centos:6": {
|
||||
@@ -56,7 +59,8 @@
|
||||
"0.14.0": "0.14.0",
|
||||
"0.14.1": "0.14.1",
|
||||
"0.14.2": "0.14.2",
|
||||
"0.14.3": "0.14.3"
|
||||
"0.15": "0.15",
|
||||
"0.15.0": "0.15.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,6 +48,12 @@
|
||||
from spack.util.crypto import bit_length
|
||||
from spack.version import Version
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def nullcontext(*args, **kwargs):
|
||||
yield
|
||||
|
||||
|
||||
# TODO: Provide an API automatically retyring a build after detecting and
|
||||
# TODO: clearing a failure.
|
||||
|
||||
@@ -87,6 +93,17 @@
|
||||
# Types of dependencies tracked by the database
|
||||
_tracked_deps = ('link', 'run')
|
||||
|
||||
# Default list of fields written for each install record
|
||||
default_install_record_fields = [
|
||||
'spec',
|
||||
'ref_count',
|
||||
'path',
|
||||
'installed',
|
||||
'explicit',
|
||||
'installation_time',
|
||||
'deprecated_for',
|
||||
]
|
||||
|
||||
|
||||
def _now():
|
||||
"""Returns the time since the epoch"""
|
||||
@@ -187,17 +204,17 @@ def install_type_matches(self, installed):
|
||||
else:
|
||||
return InstallStatuses.MISSING in installed
|
||||
|
||||
def to_dict(self):
|
||||
rec_dict = {
|
||||
'spec': self.spec.to_node_dict(),
|
||||
'path': self.path,
|
||||
'installed': self.installed,
|
||||
'ref_count': self.ref_count,
|
||||
'explicit': self.explicit,
|
||||
'installation_time': self.installation_time,
|
||||
}
|
||||
if self.deprecated_for:
|
||||
rec_dict.update({'deprecated_for': self.deprecated_for})
|
||||
def to_dict(self, include_fields=default_install_record_fields):
|
||||
rec_dict = {}
|
||||
|
||||
for field_name in include_fields:
|
||||
if field_name == 'spec':
|
||||
rec_dict.update({'spec': self.spec.to_node_dict()})
|
||||
elif field_name == 'deprecated_for' and self.deprecated_for:
|
||||
rec_dict.update({'deprecated_for': self.deprecated_for})
|
||||
else:
|
||||
rec_dict.update({field_name: getattr(self, field_name)})
|
||||
|
||||
return rec_dict
|
||||
|
||||
@classmethod
|
||||
@@ -206,9 +223,12 @@ def from_dict(cls, spec, dictionary):
|
||||
d.pop('spec', None)
|
||||
|
||||
# Old databases may have "None" for path for externals
|
||||
if d['path'] == 'None':
|
||||
if 'path' not in d or d['path'] == 'None':
|
||||
d['path'] = None
|
||||
|
||||
if 'installed' not in d:
|
||||
d['installed'] = False
|
||||
|
||||
return InstallRecord(spec, **d)
|
||||
|
||||
|
||||
@@ -275,7 +295,8 @@ class Database(object):
|
||||
_prefix_failures = {}
|
||||
|
||||
def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
is_upstream=False):
|
||||
is_upstream=False, enable_transaction_locking=True,
|
||||
record_fields=default_install_record_fields):
|
||||
"""Create a Database for Spack installations under ``root``.
|
||||
|
||||
A Database is a cache of Specs data from ``$prefix/spec.yaml``
|
||||
@@ -293,6 +314,12 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
Caller may optionally provide a custom ``db_dir`` parameter
|
||||
where data will be stored. This is intended to be used for
|
||||
testing the Database class.
|
||||
|
||||
This class supports writing buildcache index files, in which case
|
||||
certain fields are not needed in each install record, and no
|
||||
transaction locking is required. To use this feature, provide
|
||||
``enable_transaction_locking=False``, and specify a list of needed
|
||||
fields in ``record_fields``.
|
||||
"""
|
||||
self.root = root
|
||||
|
||||
@@ -375,14 +402,23 @@ def __init__(self, root, db_dir=None, upstream_dbs=None,
|
||||
# message)
|
||||
self._fail_when_missing_deps = False
|
||||
|
||||
if enable_transaction_locking:
|
||||
self._write_transaction_impl = lk.WriteTransaction
|
||||
self._read_transaction_impl = lk.ReadTransaction
|
||||
else:
|
||||
self._write_transaction_impl = nullcontext
|
||||
self._read_transaction_impl = nullcontext
|
||||
|
||||
self._record_fields = record_fields
|
||||
|
||||
def write_transaction(self):
|
||||
"""Get a write lock context manager for use in a `with` block."""
|
||||
return lk.WriteTransaction(
|
||||
return self._write_transaction_impl(
|
||||
self.lock, acquire=self._read, release=self._write)
|
||||
|
||||
def read_transaction(self):
|
||||
"""Get a read lock context manager for use in a `with` block."""
|
||||
return lk.ReadTransaction(self.lock, acquire=self._read)
|
||||
return self._read_transaction_impl(self.lock, acquire=self._read)
|
||||
|
||||
def _failed_spec_path(self, spec):
|
||||
"""Return the path to the spec's failure file, which may not exist."""
|
||||
@@ -592,7 +628,8 @@ def _write_to_file(self, stream):
|
||||
This function does not do any locking or transactions.
|
||||
"""
|
||||
# map from per-spec hash code to installation record.
|
||||
installs = dict((k, v.to_dict()) for k, v in self._data.items())
|
||||
installs = dict((k, v.to_dict(include_fields=self._record_fields))
|
||||
for k, v in self._data.items())
|
||||
|
||||
# database includes installation list and version.
|
||||
|
||||
@@ -726,7 +763,8 @@ def check(cond, msg):
|
||||
|
||||
self.reindex(spack.store.layout)
|
||||
installs = dict(
|
||||
(k, v.to_dict()) for k, v in self._data.items()
|
||||
(k, v.to_dict(include_fields=self._record_fields))
|
||||
for k, v in self._data.items()
|
||||
)
|
||||
|
||||
def invalid_record(hash_key, error):
|
||||
|
||||
@@ -115,7 +115,7 @@ def activate(
|
||||
use_env_repo (bool): use the packages exactly as they appear in the
|
||||
environment's repository
|
||||
add_view (bool): generate commands to add view to path variables
|
||||
shell (string): One of `sh`, `csh`.
|
||||
shell (string): One of `sh`, `csh`, `fish`.
|
||||
prompt (string): string to add to the users prompt, or None
|
||||
|
||||
Returns:
|
||||
@@ -141,6 +141,19 @@ def activate(
|
||||
cmds += 'if (! $?SPACK_OLD_PROMPT ) '
|
||||
cmds += 'setenv SPACK_OLD_PROMPT "${prompt}";\n'
|
||||
cmds += 'set prompt="%s ${prompt}";\n' % prompt
|
||||
elif shell == 'fish':
|
||||
if os.getenv('TERM') and 'color' in os.getenv('TERM') and prompt:
|
||||
prompt = colorize('@G{%s} ' % prompt, color=True)
|
||||
|
||||
cmds += 'set -gx SPACK_ENV %s;\n' % env.path
|
||||
cmds += 'function despacktivate;\n'
|
||||
cmds += ' spack env deactivate;\n'
|
||||
cmds += 'end;\n'
|
||||
#
|
||||
# NOTE: We're not changing the fish_prompt function (which is fish's
|
||||
# solution to the PS1 variable) here. This is a bit fiddly, and easy to
|
||||
# screw up => spend time reasearching a solution. Feedback welcome.
|
||||
#
|
||||
else:
|
||||
if os.getenv('TERM') and 'color' in os.getenv('TERM') and prompt:
|
||||
prompt = colorize('@G{%s} ' % prompt, color=True)
|
||||
@@ -156,6 +169,12 @@ def activate(
|
||||
cmds += 'fi;\n'
|
||||
cmds += 'export PS1="%s ${PS1}";\n' % prompt
|
||||
|
||||
#
|
||||
# NOTE in the fish-shell: Path variables are a special kind of variable
|
||||
# used to support colon-delimited path lists including PATH, CDPATH,
|
||||
# MANPATH, PYTHONPATH, etc. All variables that end in PATH (case-sensitive)
|
||||
# become PATH variables.
|
||||
#
|
||||
if add_view and default_view_name in env.views:
|
||||
with spack.store.db.read_transaction():
|
||||
cmds += env.add_default_view_to_shell(shell)
|
||||
@@ -167,7 +186,7 @@ def deactivate(shell='sh'):
|
||||
"""Undo any configuration or repo settings modified by ``activate()``.
|
||||
|
||||
Arguments:
|
||||
shell (string): One of `sh`, `csh`. Shell style to use.
|
||||
shell (string): One of `sh`, `csh`, `fish`. Shell style to use.
|
||||
|
||||
Returns:
|
||||
(string): shell commands for `shell` to undo environment variables
|
||||
@@ -191,6 +210,12 @@ def deactivate(shell='sh'):
|
||||
cmds += 'set prompt="$SPACK_OLD_PROMPT" && '
|
||||
cmds += 'unsetenv SPACK_OLD_PROMPT;\n'
|
||||
cmds += 'unalias despacktivate;\n'
|
||||
elif shell == 'fish':
|
||||
cmds += 'set -e SPACK_ENV;\n'
|
||||
cmds += 'functions -e despacktivate;\n'
|
||||
#
|
||||
# NOTE: Not changing fish_prompt (above) => no need to restore it here.
|
||||
#
|
||||
else:
|
||||
cmds += 'if [ ! -z ${SPACK_ENV+x} ]; then\n'
|
||||
cmds += 'unset SPACK_ENV; export SPACK_ENV;\n'
|
||||
@@ -247,18 +272,13 @@ def find_environment(args):
|
||||
# at env_dir (env and env_dir are mutually exclusive)
|
||||
env = getattr(args, 'env_dir', None)
|
||||
|
||||
# if no argument, look for a manifest file
|
||||
# if no argument, look for the environment variable
|
||||
if not env:
|
||||
if os.path.exists(manifest_name):
|
||||
env = os.getcwd()
|
||||
env = os.environ.get(spack_env_var)
|
||||
|
||||
# if no env, env_dir, or manifest try the environment
|
||||
# nothing was set; there's no active environment
|
||||
if not env:
|
||||
env = os.environ.get(spack_env_var)
|
||||
|
||||
# nothing was set; there's no active environment
|
||||
if not env:
|
||||
return None
|
||||
return None
|
||||
|
||||
# if we get here, env isn't the name of a spack environment; it has
|
||||
# to be a path to an environment, or there is something wrong.
|
||||
|
||||
@@ -146,7 +146,8 @@ def detect_version(self, detect_version_args):
|
||||
compiler_cls.PrgEnv_compiler
|
||||
)
|
||||
matches = re.findall(version_regex, output)
|
||||
version = tuple(version for _, version in matches)
|
||||
version = tuple(version for _, version in matches
|
||||
if 'classic' not in version)
|
||||
compiler_id = detect_version_args.id
|
||||
value = detect_version_args._replace(
|
||||
id=compiler_id._replace(version=version)
|
||||
|
||||
@@ -5,8 +5,11 @@
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import re
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
import llnl.util.lang
|
||||
import llnl.util.tty as tty
|
||||
|
||||
from spack.operating_systems.linux_distro import LinuxDistro
|
||||
from spack.util.environment import get_path
|
||||
@@ -60,6 +63,43 @@ def compiler_search_paths(self):
|
||||
This prevents from detecting Cray compiler wrappers and avoids
|
||||
possible false detections.
|
||||
"""
|
||||
import spack.compilers
|
||||
|
||||
with unload_programming_environment():
|
||||
search_paths = fs.search_paths_for_executables(*get_path('PATH'))
|
||||
return search_paths
|
||||
search_paths = get_path('PATH')
|
||||
|
||||
extract_path_re = re.compile(r'prepend-path[\s]*PATH[\s]*([/\w\.:-]*)')
|
||||
|
||||
for compiler_cls in spack.compilers.all_compiler_types():
|
||||
# Check if the compiler class is supported on Cray
|
||||
prg_env = getattr(compiler_cls, 'PrgEnv', None)
|
||||
compiler_module = getattr(compiler_cls, 'PrgEnv_compiler', None)
|
||||
if not (prg_env and compiler_module):
|
||||
continue
|
||||
|
||||
# It is supported, check which versions are available
|
||||
output = module('avail', compiler_cls.PrgEnv_compiler)
|
||||
version_regex = r'({0})/([\d\.]+[\d]-?[\w]*)'.format(
|
||||
compiler_cls.PrgEnv_compiler
|
||||
)
|
||||
matches = re.findall(version_regex, output)
|
||||
versions = tuple(version for _, version in matches
|
||||
if 'classic' not in version)
|
||||
|
||||
# Now inspect the modules and add to paths
|
||||
msg = "[CRAY FE] Detected FE compiler [name={0}, versions={1}]"
|
||||
tty.debug(msg.format(compiler_module, versions))
|
||||
for v in versions:
|
||||
try:
|
||||
current_module = compiler_module + '/' + v
|
||||
out = module('show', current_module)
|
||||
match = extract_path_re.search(out)
|
||||
search_paths += match.group(1).split(':')
|
||||
except Exception as e:
|
||||
msg = ("[CRAY FE] An unexpected error occurred while "
|
||||
"detecting FE compiler [compiler={0}, "
|
||||
" version={1}, error={2}]")
|
||||
tty.debug(msg.format(compiler_cls.name, v, str(e)))
|
||||
|
||||
search_paths = list(llnl.util.lang.dedupe(search_paths))
|
||||
return fs.search_paths_for_executables(*search_paths)
|
||||
|
||||
@@ -54,6 +54,7 @@ def __init__(self):
|
||||
'10.13': 'highsierra',
|
||||
'10.14': 'mojave',
|
||||
'10.15': 'catalina',
|
||||
'11.0': 'bigsur',
|
||||
}
|
||||
|
||||
mac_ver = str(macos_version().up_to(2))
|
||||
|
||||
42
lib/spack/spack/schema/buildcache_spec.py
Normal file
42
lib/spack/spack/schema/buildcache_spec.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for a buildcache spec.yaml file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/buildcache_spec.py
|
||||
:lines: 14-
|
||||
"""
|
||||
import spack.schema.spec
|
||||
|
||||
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'title': 'Spack buildcache spec.yaml schema',
|
||||
'type': 'object',
|
||||
# 'additionalProperties': True,
|
||||
'properties': {
|
||||
'buildinfo': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': ['relative_prefix'],
|
||||
'properties': {
|
||||
'relative_prefix': {'type': 'string'},
|
||||
'relative_rpaths': {'type': 'boolean'},
|
||||
},
|
||||
},
|
||||
'full_hash': {'type': 'string'},
|
||||
'spec': {
|
||||
'type': 'array',
|
||||
'items': spack.schema.spec.properties,
|
||||
},
|
||||
'binary_cache_checksum': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'hash_algorithm': {'type': 'string'},
|
||||
'hash': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -31,7 +31,8 @@
|
||||
'type': 'string',
|
||||
'enum': [
|
||||
'develop',
|
||||
'0.14', '0.14.0', '0.14.1', '0.14.2', '0.14.3'
|
||||
'0.14', '0.14.0', '0.14.1', '0.14.2',
|
||||
'0.15', '0.15.0',
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
58
lib/spack/spack/schema/database_index.py
Normal file
58
lib/spack/spack/schema/database_index.py
Normal file
@@ -0,0 +1,58 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for database index.json file
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/database_index.py
|
||||
:lines: 36-
|
||||
"""
|
||||
import spack.schema.spec
|
||||
|
||||
# spack.schema.spec.properties
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'title': 'Spack spec schema',
|
||||
'type': 'object',
|
||||
'required': ['database'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'database': {
|
||||
'type': 'object',
|
||||
'required': ['installs', 'version'],
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'installs': {
|
||||
'type': 'object',
|
||||
'patternProperties': {
|
||||
r'^[\w\d]{32}$': {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'spec': spack.schema.spec.properties,
|
||||
'path': {
|
||||
'oneOf': [
|
||||
{'type': 'string'},
|
||||
{'type': 'null'},
|
||||
],
|
||||
},
|
||||
'installed': {'type': 'boolean'},
|
||||
'ref_count': {
|
||||
'type': 'integer',
|
||||
'minimum': 0,
|
||||
},
|
||||
'explicit': {'type': 'boolean'},
|
||||
'installation_time': {
|
||||
'type': 'number',
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
'version': {'type': 'string'},
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
159
lib/spack/spack/schema/spec.py
Normal file
159
lib/spack/spack/schema/spec.py
Normal file
@@ -0,0 +1,159 @@
|
||||
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
"""Schema for a spec found in spec.yaml or database index.json files
|
||||
|
||||
.. literalinclude:: _spack_root/lib/spack/spack/schema/spec.py
|
||||
:lines: 13-
|
||||
"""
|
||||
|
||||
|
||||
target = {
|
||||
'oneOf': [
|
||||
{
|
||||
'type': 'string',
|
||||
}, {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': [
|
||||
'name',
|
||||
'vendor',
|
||||
'features',
|
||||
'generation',
|
||||
'parents',
|
||||
],
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'vendor': {'type': 'string'},
|
||||
'features': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'generation': {'type': 'integer'},
|
||||
'parents': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
arch = {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'platform': {},
|
||||
'platform_os': {},
|
||||
'target': target,
|
||||
},
|
||||
}
|
||||
|
||||
dependencies = {
|
||||
'type': 'object',
|
||||
'patternProperties': {
|
||||
r'\w[\w-]*': { # package name
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'hash': {'type': 'string'},
|
||||
'type': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
#: Properties for inclusion in other schemas
|
||||
properties = {
|
||||
r'\w[\w-]*': { # package name
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'required': [
|
||||
'version',
|
||||
'arch',
|
||||
'compiler',
|
||||
'namespace',
|
||||
'parameters',
|
||||
],
|
||||
'properties': {
|
||||
'hash': {'type': 'string'},
|
||||
'version': {
|
||||
'oneOf': [
|
||||
{'type': 'string'},
|
||||
{'type': 'number'},
|
||||
],
|
||||
},
|
||||
'arch': arch,
|
||||
'compiler': {
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'properties': {
|
||||
'name': {'type': 'string'},
|
||||
'version': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
'namespace': {'type': 'string'},
|
||||
'parameters': {
|
||||
'type': 'object',
|
||||
'required': [
|
||||
'cflags',
|
||||
'cppflags',
|
||||
'cxxflags',
|
||||
'fflags',
|
||||
'ldflags',
|
||||
'ldlibs',
|
||||
],
|
||||
'additionalProperties': True,
|
||||
'properties': {
|
||||
'patches': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cppflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'cxxflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'fflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'ldflags': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
'ldlib': {
|
||||
'type': 'array',
|
||||
'items': {'type': 'string'},
|
||||
},
|
||||
},
|
||||
},
|
||||
'patches': {
|
||||
'type': 'array',
|
||||
'items': {},
|
||||
},
|
||||
'dependencies': dependencies,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
#: Full schema with metadata
|
||||
schema = {
|
||||
'$schema': 'http://json-schema.org/schema#',
|
||||
'title': 'Spack spec schema',
|
||||
'type': 'object',
|
||||
'additionalProperties': False,
|
||||
'patternProperties': properties,
|
||||
}
|
||||
@@ -188,8 +188,6 @@ def test_ci_workarounds():
|
||||
'SPACK_IS_PR_PIPELINE': 'False',
|
||||
}
|
||||
|
||||
common_script = ['spack ci rebuild']
|
||||
|
||||
common_before_script = [
|
||||
'git clone "https://github.com/spack/spack"',
|
||||
' && '.join((
|
||||
@@ -219,14 +217,14 @@ def make_build_job(name, deps, stage, use_artifact_buildcache, optimize,
|
||||
},
|
||||
'retry': {'max': 2, 'when': ['always']},
|
||||
'after_script': ['rm -rf "./spack"'],
|
||||
'image': {'name': 'spack/centos7', 'entrypoint': ['']},
|
||||
'script': ['spack ci rebuild'],
|
||||
'image': {'name': 'spack/centos7', 'entrypoint': ['']}
|
||||
}
|
||||
|
||||
if optimize:
|
||||
result['extends'] = ['.c0', '.c1', '.c2']
|
||||
result['extends'] = ['.c0', '.c1']
|
||||
else:
|
||||
variables['SPACK_ROOT_SPEC'] = fake_root_spec
|
||||
result['script'] = common_script
|
||||
result['before_script'] = common_before_script
|
||||
|
||||
result['variables'] = variables
|
||||
@@ -254,7 +252,7 @@ def make_rebuild_index_job(
|
||||
}
|
||||
|
||||
if optimize:
|
||||
result['extends'] = '.c1'
|
||||
result['extends'] = '.c0'
|
||||
else:
|
||||
result['before_script'] = common_before_script
|
||||
|
||||
@@ -262,11 +260,16 @@ def make_rebuild_index_job(
|
||||
|
||||
def make_factored_jobs(optimize):
|
||||
return {
|
||||
'.c0': {'script': common_script},
|
||||
'.c1': {'before_script': common_before_script},
|
||||
'.c2': {'variables': {'SPACK_ROOT_SPEC': fake_root_spec}}
|
||||
'.c0': {'before_script': common_before_script},
|
||||
'.c1': {'variables': {'SPACK_ROOT_SPEC': fake_root_spec}}
|
||||
} if optimize else {}
|
||||
|
||||
def make_stage_list(num_build_stages):
|
||||
return {
|
||||
'stages': (
|
||||
['-'.join(('stage', str(i))) for i in range(num_build_stages)]
|
||||
+ ['stage-rebuild-index'])}
|
||||
|
||||
def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
|
||||
result = {}
|
||||
|
||||
@@ -287,22 +290,10 @@ def make_yaml_obj(use_artifact_buildcache, optimize, use_dependencies):
|
||||
|
||||
result.update(make_factored_jobs(optimize))
|
||||
|
||||
result.update(make_stage_list(3))
|
||||
|
||||
return result
|
||||
|
||||
def sort_yaml_obj(obj):
|
||||
if isinstance(obj, collections_abc.Mapping):
|
||||
result = syaml.syaml_dict()
|
||||
for k in sorted(obj.keys(), key=str):
|
||||
result[k] = sort_yaml_obj(obj[k])
|
||||
return result
|
||||
|
||||
if (isinstance(obj, collections_abc.Sequence) and
|
||||
not isinstance(obj, str)):
|
||||
return syaml.syaml_list(sorted(
|
||||
(sort_yaml_obj(x) for x in obj), key=str))
|
||||
|
||||
return obj
|
||||
|
||||
# test every combination of:
|
||||
# use artifact buildcache: true or false
|
||||
# run optimization pass: true or false
|
||||
@@ -331,8 +322,8 @@ def sort_yaml_obj(obj):
|
||||
actual = cinw.needs_to_dependencies(actual)
|
||||
|
||||
predicted = syaml.dump_config(
|
||||
sort_yaml_obj(predicted), default_flow_style=True)
|
||||
ci_opt.sort_yaml_obj(predicted), default_flow_style=True)
|
||||
actual = syaml.dump_config(
|
||||
sort_yaml_obj(actual), default_flow_style=True)
|
||||
ci_opt.sort_yaml_obj(actual), default_flow_style=True)
|
||||
|
||||
assert(predicted == actual)
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
def mock_get_specs(database, monkeypatch):
|
||||
specs = database.query_local()
|
||||
monkeypatch.setattr(
|
||||
spack.binary_distribution, 'get_specs', lambda x, y: specs
|
||||
spack.binary_distribution, 'get_specs', lambda x: specs
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -4,8 +4,10 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import filecmp
|
||||
import json
|
||||
import os
|
||||
import pytest
|
||||
from jsonschema import validate
|
||||
|
||||
import spack
|
||||
import spack.ci as ci
|
||||
@@ -15,6 +17,8 @@
|
||||
from spack.main import SpackCommand
|
||||
import spack.paths as spack_paths
|
||||
import spack.repo as repo
|
||||
from spack.schema.buildcache_spec import schema as spec_yaml_schema
|
||||
from spack.schema.database_index import schema as db_idx_schema
|
||||
from spack.spec import Spec
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
import spack.util.executable as exe
|
||||
@@ -717,10 +721,28 @@ def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
|
||||
ci.push_mirror_contents(
|
||||
env, concrete_spec, yaml_path, mirror_url, '42')
|
||||
|
||||
buildcache_list_output = buildcache_cmd('list', output=str)
|
||||
buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')
|
||||
|
||||
# Test generating buildcache index while we have bin mirror
|
||||
buildcache_cmd('update-index', '--mirror-url', mirror_url)
|
||||
index_path = os.path.join(buildcache_path, 'index.json')
|
||||
with open(index_path) as idx_fd:
|
||||
index_object = json.load(idx_fd)
|
||||
validate(index_object, db_idx_schema)
|
||||
|
||||
# Now that index is regenerated, validate "buildcache list" output
|
||||
buildcache_list_output = buildcache_cmd('list', output=str)
|
||||
assert('patchelf' in buildcache_list_output)
|
||||
|
||||
# Also test buildcache_spec schema
|
||||
bc_files_list = os.listdir(buildcache_path)
|
||||
for file_name in bc_files_list:
|
||||
if file_name.endswith('.spec.yaml'):
|
||||
spec_yaml_path = os.path.join(buildcache_path, file_name)
|
||||
with open(spec_yaml_path) as yaml_fd:
|
||||
yaml_object = syaml.load(yaml_fd)
|
||||
validate(yaml_object, spec_yaml_schema)
|
||||
|
||||
logs_dir = working_dir.join('logs_dir')
|
||||
if not os.path.exists(logs_dir.strpath):
|
||||
os.makedirs(logs_dir.strpath)
|
||||
|
||||
@@ -159,7 +159,13 @@ def __init__(self):
|
||||
default_compiler_entry['paths']['f77']],
|
||||
environment={})
|
||||
|
||||
_get_compiler_link_paths = Compiler._get_compiler_link_paths
|
||||
def _get_compiler_link_paths(self, paths):
|
||||
# Mock os.path.isdir so the link paths don't have to exist
|
||||
old_isdir = os.path.isdir
|
||||
os.path.isdir = lambda x: True
|
||||
ret = super(MockCompiler, self)._get_compiler_link_paths(paths)
|
||||
os.path.isdir = old_isdir
|
||||
return ret
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -222,6 +228,7 @@ def call_compiler(exe, *args, **kwargs):
|
||||
('f77', 'fflags'),
|
||||
('f77', 'cppflags'),
|
||||
])
|
||||
@pytest.mark.enable_compiler_link_paths
|
||||
def test_get_compiler_link_paths(monkeypatch, exe, flagname):
|
||||
# create fake compiler that emits mock verbose output
|
||||
compiler = MockCompiler()
|
||||
@@ -261,6 +268,7 @@ def test_get_compiler_link_paths_no_verbose_flag():
|
||||
assert dirs == []
|
||||
|
||||
|
||||
@pytest.mark.enable_compiler_link_paths
|
||||
def test_get_compiler_link_paths_load_env(working_env, monkeypatch, tmpdir):
|
||||
gcc = str(tmpdir.join('gcc'))
|
||||
with open(gcc, 'w') as f:
|
||||
@@ -377,6 +385,10 @@ def test_cce_flags():
|
||||
supported_flag_test("cxx_pic_flag", "-h PIC", "cce@1.0")
|
||||
supported_flag_test("f77_pic_flag", "-h PIC", "cce@1.0")
|
||||
supported_flag_test("fc_pic_flag", "-h PIC", "cce@1.0")
|
||||
supported_flag_test("cc_pic_flag", "-fPIC", "cce@9.1.0")
|
||||
supported_flag_test("cxx_pic_flag", "-fPIC", "cce@9.1.0")
|
||||
supported_flag_test("f77_pic_flag", "-fPIC", "cce@9.1.0")
|
||||
supported_flag_test("fc_pic_flag", "-fPIC", "cce@9.1.0")
|
||||
supported_flag_test("debug_flags", ['-g', '-G0', '-G1', '-G2', '-Gfast'],
|
||||
'cce@1.0')
|
||||
|
||||
@@ -823,3 +835,33 @@ class MockPackage(object):
|
||||
pkg = MockPackage()
|
||||
with pytest.raises(OSError):
|
||||
compiler.setup_custom_environment(pkg, env)
|
||||
|
||||
|
||||
@pytest.mark.enable_compiler_verification
|
||||
def test_compiler_executable_verification_raises(tmpdir):
|
||||
compiler = MockCompiler()
|
||||
compiler.cc = '/this/path/does/not/exist'
|
||||
|
||||
with pytest.raises(spack.compiler.CompilerAccessError):
|
||||
compiler.verify_executables()
|
||||
|
||||
|
||||
@pytest.mark.enable_compiler_verification
|
||||
def test_compiler_executable_verification_success(tmpdir):
|
||||
def prepare_executable(name):
|
||||
real = str(tmpdir.join('cc').ensure())
|
||||
fs.set_executable(real)
|
||||
setattr(compiler, name, real)
|
||||
|
||||
# setup mock compiler with real paths
|
||||
compiler = MockCompiler()
|
||||
for name in ('cc', 'cxx', 'f77', 'fc'):
|
||||
prepare_executable(name)
|
||||
|
||||
# testing that this doesn't raise an error because the paths exist and
|
||||
# are executable
|
||||
compiler.verify_executables()
|
||||
|
||||
# Test that null entries don't fail
|
||||
compiler.cc = None
|
||||
compiler.verify_executables()
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
"""Test detection of compiler version"""
|
||||
import pytest
|
||||
import os
|
||||
|
||||
import llnl.util.filesystem as fs
|
||||
|
||||
import spack.compilers.arm
|
||||
import spack.compilers.cce
|
||||
@@ -16,6 +19,9 @@
|
||||
import spack.compilers.xl
|
||||
import spack.compilers.xl_r
|
||||
|
||||
from spack.operating_systems.cray_frontend import CrayFrontend
|
||||
import spack.util.module_cmd
|
||||
|
||||
|
||||
@pytest.mark.parametrize('version_str,expected_version', [
|
||||
('Arm C/C++/Fortran Compiler version 19.0 (build number 73) (based on LLVM 7.0.2)\n' # NOQA
|
||||
@@ -189,3 +195,41 @@ def test_xl_version_detection(version_str, expected_version):
|
||||
|
||||
version = spack.compilers.xl_r.XlR.extract_version_from_output(version_str)
|
||||
assert version == expected_version
|
||||
|
||||
|
||||
@pytest.mark.parametrize('compiler,version', [
|
||||
('gcc', '8.1.0'),
|
||||
('gcc', '1.0.0-foo'),
|
||||
('pgi', '19.1'),
|
||||
('pgi', '19.1a'),
|
||||
('intel', '9.0.0'),
|
||||
('intel', '0.0.0-foobar')
|
||||
])
|
||||
def test_cray_frontend_compiler_detection(
|
||||
compiler, version, tmpdir, monkeypatch, working_env
|
||||
):
|
||||
"""Test that the Cray frontend properly finds compilers form modules"""
|
||||
# setup the fake compiler directory
|
||||
compiler_dir = tmpdir.join(compiler)
|
||||
compiler_exe = compiler_dir.join('cc').ensure()
|
||||
fs.set_executable(str(compiler_exe))
|
||||
|
||||
# mock modules
|
||||
def _module(cmd, *args):
|
||||
module_name = '%s/%s' % (compiler, version)
|
||||
module_contents = 'prepend-path PATH %s' % compiler_dir
|
||||
if cmd == 'avail':
|
||||
return module_name if compiler in args[0] else ''
|
||||
if cmd == 'show':
|
||||
return module_contents if module_name in args else ''
|
||||
monkeypatch.setattr(spack.operating_systems.cray_frontend, 'module',
|
||||
_module)
|
||||
|
||||
# remove PATH variable
|
||||
os.environ.pop('PATH', None)
|
||||
|
||||
# get a CrayFrontend object
|
||||
cray_fe_os = CrayFrontend()
|
||||
|
||||
paths = cray_fe_os.compiler_search_paths
|
||||
assert paths == [str(compiler_dir)]
|
||||
|
||||
@@ -70,6 +70,25 @@ def clean_user_environment():
|
||||
ev.activate(active)
|
||||
|
||||
|
||||
#
|
||||
# Disable checks on compiler executable existence
|
||||
#
|
||||
@pytest.fixture(scope='function', autouse=True)
|
||||
def mock_compiler_executable_verification(request, monkeypatch):
|
||||
"""Mock the compiler executable verification to allow missing executables.
|
||||
|
||||
This fixture can be disabled for tests of the compiler verification
|
||||
functionality by::
|
||||
|
||||
@pytest.mark.enable_compiler_verification
|
||||
|
||||
If a test is marked in that way this is a no-op."""
|
||||
if 'enable_compiler_verification' not in request.keywords:
|
||||
monkeypatch.setattr(spack.compiler.Compiler,
|
||||
'verify_executables',
|
||||
lambda x: None)
|
||||
|
||||
|
||||
# Hooks to add command line options or set other custom behaviors.
|
||||
# They must be placed here to be found by pytest. See:
|
||||
#
|
||||
@@ -600,18 +619,26 @@ def dirs_with_libfiles(tmpdir_factory):
|
||||
|
||||
|
||||
@pytest.fixture(scope='function', autouse=True)
|
||||
def disable_compiler_execution(monkeypatch):
|
||||
def noop(*args):
|
||||
return []
|
||||
def disable_compiler_execution(monkeypatch, request):
|
||||
"""
|
||||
This fixture can be disabled for tests of the compiler link path
|
||||
functionality by::
|
||||
|
||||
# Compiler.determine_implicit_rpaths actually runs the compiler. So this
|
||||
# replaces that function with a noop that simulates finding no implicit
|
||||
# RPATHs
|
||||
monkeypatch.setattr(
|
||||
spack.compiler.Compiler,
|
||||
'_get_compiler_link_paths',
|
||||
noop
|
||||
)
|
||||
@pytest.mark.enable_compiler_link_paths
|
||||
|
||||
If a test is marked in that way this is a no-op."""
|
||||
if 'enable_compiler_link_paths' not in request.keywords:
|
||||
def noop(*args):
|
||||
return []
|
||||
|
||||
# Compiler.determine_implicit_rpaths actually runs the compiler. So
|
||||
# replace that function with a noop that simulates finding no implicit
|
||||
# RPATHs
|
||||
monkeypatch.setattr(
|
||||
spack.compiler.Compiler,
|
||||
'_get_compiler_link_paths',
|
||||
noop
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
|
||||
@@ -21,6 +21,8 @@
|
||||
_use_uuid = False
|
||||
pass
|
||||
|
||||
from jsonschema import validate
|
||||
|
||||
import llnl.util.lock as lk
|
||||
from llnl.util.tty.colify import colify
|
||||
|
||||
@@ -31,6 +33,7 @@
|
||||
import spack.spec
|
||||
from spack.util.mock_package import MockPackageMultiRepo
|
||||
from spack.util.executable import Executable
|
||||
from spack.schema.database_index import schema
|
||||
|
||||
|
||||
pytestmark = pytest.mark.db
|
||||
@@ -438,6 +441,10 @@ def test_005_db_exists(database):
|
||||
assert os.path.exists(str(index_file))
|
||||
assert os.path.exists(str(lock_file))
|
||||
|
||||
with open(index_file) as fd:
|
||||
index_object = json.load(fd)
|
||||
validate(index_object, schema)
|
||||
|
||||
|
||||
def test_010_all_install_sanity(database):
|
||||
"""Ensure that the install layout reflects what we think it does."""
|
||||
@@ -730,6 +737,8 @@ def test_old_external_entries_prefix(mutable_database):
|
||||
with open(spack.store.db._index_path, 'r') as f:
|
||||
db_obj = json.loads(f.read())
|
||||
|
||||
validate(db_obj, schema)
|
||||
|
||||
s = spack.spec.Spec('externaltool')
|
||||
s.concretize()
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# Spack Project Developers. See the top-level COPYRIGHT file for details.
|
||||
#
|
||||
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
|
||||
|
||||
import pytest
|
||||
import os
|
||||
|
||||
import spack.paths
|
||||
@@ -13,6 +13,13 @@
|
||||
'compiler_verbose_output')
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def allow_nonexistent_paths(monkeypatch):
|
||||
# Allow nonexistent paths to be detected as part of the output
|
||||
# for testing purposes.
|
||||
monkeypatch.setattr(os.path, 'isdir', lambda x: True)
|
||||
|
||||
|
||||
def check_link_paths(filename, paths):
|
||||
with open(os.path.join(datadir, filename)) as file:
|
||||
output = file.read()
|
||||
|
||||
@@ -108,6 +108,8 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
else:
|
||||
create_args.insert(create_args.index('-a'), '-u')
|
||||
|
||||
create_args.insert(create_args.index('-a'), '--rebuild-index')
|
||||
|
||||
args = parser.parse_args(create_args)
|
||||
buildcache.buildcache(parser, args)
|
||||
# trigger overwrite warning
|
||||
@@ -165,7 +167,7 @@ def test_buildcache(mock_archive, tmpdir):
|
||||
args = parser.parse_args(['list'])
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
args = parser.parse_args(['list', '-f'])
|
||||
args = parser.parse_args(['list'])
|
||||
buildcache.buildcache(parser, args)
|
||||
|
||||
args = parser.parse_args(['list', 'trivial'])
|
||||
|
||||
@@ -32,12 +32,14 @@
|
||||
_shell_set_strings = {
|
||||
'sh': 'export {0}={1};\n',
|
||||
'csh': 'setenv {0} {1};\n',
|
||||
'fish': 'set -gx {0} {1};\n'
|
||||
}
|
||||
|
||||
|
||||
_shell_unset_strings = {
|
||||
'sh': 'unset {0};\n',
|
||||
'csh': 'unsetenv {0};\n',
|
||||
'fish': 'set -e {0};\n',
|
||||
}
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user