Compare commits

..

8 Commits

Author SHA1 Message Date
Gregory Becker
26e38d9b26 flake 2021-02-19 01:21:54 -08:00
Gregory Becker
f279ce2e7f update completion 2021-02-19 01:14:58 -08:00
Gregory Becker
3ac497dfec add capability to pull subcomponents 2021-02-19 01:10:40 -08:00
Gregory Becker
9810572411 run git from proper directory 2021-02-19 00:48:35 -08:00
Gregory Becker
d4895435f1 update bash completions 2021-02-19 00:37:22 -08:00
Gregory Becker
ae7ca04997 flake 2021-02-19 00:36:28 -08:00
Gregory Becker
29562596c3 fixup imports 2021-02-19 00:36:28 -08:00
Gregory Becker
892dd4d97f add spack checkout command 2021-02-19 00:36:28 -08:00
215 changed files with 730 additions and 5417 deletions

View File

@@ -165,24 +165,3 @@ jobs:
- uses: codecov/codecov-action@v1
with:
flags: unittests,linux,clingo
clingo-cffi:
# Test for the clingo based solver (using clingo-cffi)
runs-on: ubuntu-latest
container: spack/github-actions:clingo-cffi
steps:
- name: Run unit tests
run: |
whoami && echo PWD=$PWD && echo HOME=$HOME && echo SPACK_TEST_SOLVER=$SPACK_TEST_SOLVER
python3 -c "import clingo; print(hasattr(clingo.Symbol, '_rep'), clingo.__version__)"
git clone https://github.com/spack/spack.git && cd spack
git fetch origin ${{ github.ref }}:test-branch
git checkout test-branch
. share/spack/setup-env.sh
spack compiler find
spack solve mpileaks%gcc
coverage run $(which spack) unit-test -v
coverage combine
coverage xml
- uses: codecov/codecov-action@v1
with:
flags: unittests,linux,clingo

View File

@@ -1,32 +1,3 @@
# v0.16.1 (2021-02-22)
This minor release includes a new feature and associated fixes:
* intel-oneapi support through new packages (#20411, #20686, #20693, #20717,
#20732, #20808, #21377, #21448)
This release also contains bug fixes/enhancements for:
* HIP/ROCm support (#19715, #20095)
* concretization (#19988, #20020, #20082, #20086, #20099, #20102, #20128,
#20182, #20193, #20194, #20196, #20203, #20247, #20259, #20307, #20362,
#20383, #20423, #20473, #20506, #20507, #20604, #20638, #20649, #20677,
#20680, #20790)
* environment install reporting fix (#20004)
* avoid import in ABI compatibility info (#20236)
* restore ability of dev-build to skip patches (#20351)
* spack find -d spec grouping (#20028)
* spack smoke test support (#19987, #20298)
* macOS fixes (#20038, #21662)
* abstract spec comparisons (#20341)
* continuous integration (#17563)
* performance improvements for binary relocation (#19690, #20768)
* additional sanity checks for variants in builtin packages (#20373)
* do not pollute auto-generated configuration files with empty lists or
dicts (#20526)
plus assorted documentation (#20021, #20174) and package bug fixes/enhancements
(#19617, #19933, #19986, #20006, #20097, #20198, #20794, #20906, #21411).
# v0.16.0 (2020-11-18)
`v0.16.0` is a major feature release.

View File

@@ -25,6 +25,16 @@ modules:
- MANPATH
share/aclocal:
- ACLOCAL_PATH
lib:
- LIBRARY_PATH
lib64:
- LIBRARY_PATH
include:
- C_INCLUDE_PATH
- CPLUS_INCLUDE_PATH
# The INCLUDE env variable specifies paths to look for
# .mod file for Intel Fortran compilers
- INCLUDE
lib/pkgconfig:
- PKG_CONFIG_PATH
lib64/pkgconfig:

View File

@@ -441,8 +441,8 @@ Environments can include files with either relative or absolute
paths. Inline configurations take precedence over included
configurations, so you don't have to change shared configuration files
to make small changes to an individual Environment. Included configs
listed earlier will have higher precedence, as the included configs are
applied in reverse order.
listed later will have higher precedence, as the included configs are
applied in order.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Manually Editing the Specs List

View File

@@ -1,11 +1,11 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
#: major, minor, patch version for Spack, in a tuple
spack_version_info = (0, 16, 1)
spack_version_info = (0, 16, 0)
#: String containing Spack version joined with .'s
spack_version = '.'.join(str(v) for v in spack_version_info)

View File

@@ -866,8 +866,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file, prune_dag=False,
'retry': {
'max': 2,
'when': JOB_RETRY_CONDITIONS,
},
'interruptible': True
}
}
length_needs = len(job_dependencies)

View File

@@ -181,19 +181,6 @@ def parse_specs(args, **kwargs):
raise spack.error.SpackError(msg)
def matching_spec_from_env(spec):
"""
Returns a concrete spec, matching what is available in the environment.
If no matching spec is found in the environment (or if no environment is
active), this will return the given spec but concretized.
"""
env = spack.environment.get_env({}, cmd_name)
if env:
return env.matching_spec(spec) or spec.concretized()
else:
return spec.concretized()
def elide_list(line_list, max_num=10):
"""Takes a long list and limits it to a smaller number of elements,
replacing intervening elements with '...'. For example::

View File

@@ -0,0 +1,108 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import llnl.util.filesystem as fs
import llnl.util.tty as tty
import spack.environment as ev
import spack.paths
import spack.repo
from spack.util.executable import which
_SPACK_UPSTREAM = 'https://github.com/spack/spack'
description = "update the spack prefix or subcomponent to a git ref"
section = "admin"
level = "long"
git = None
def setup_parser(subparser):
subparser.add_argument(
'-r', '--remote', action='store', default='origin',
help="name of the git remote from which to fetch")
subparser.add_argument(
'--url', action='store', default=None,
help="url to use if the remote does not already exist")
subrepo_subparser = subparser.add_mutually_exclusive_group()
subrepo_subparser.add_argument(
'--env', action='store', default=None,
help="checkout an environment instead of the Spack source")
subrepo_subparser.add_argument(
'--repo', action='store', default=None,
help="checkout a Spack repo instead of the Spack source")
subparser.add_argument(
'ref', help="git reference to checkout")
def fetch_remote(remote, url):
# Ensure we have the appropriate remote configured
remotes = git('remote', output=str, errror=str).split('\n')
if remote in remotes:
remote_url = git('remote', 'get-url', remote, output=str, error=str)
remote_url = remote_url.strip('\n')
if url and remote_url != url:
msg = "Git url %s does not match given url %s" % (remote_url, url)
msg += " for remote '%s'. Either use the git url or" % remote
msg += " specify a new remote name for the new url."
tty.die(msg)
elif not url:
msg = "Spack requires url to checkout from unknown remote %s" % remote
tty.die(msg)
else:
git('remote', 'add', remote, url)
git('fetch', remote)
def known_commit_or_tag(ref):
# No need to fetch for tags and commits if we have the ref already
# Fetch on other types rather than failing here because a tree ref could
# be ambiguous with a commit ref after fetching
ref_type = git('cat-file', '-t', ref,
output=str, error=str, fail_on_error=False).strip('\n ')
return ref_type in ('commit', 'tag')
def checkout(parser, args):
remote = args.remote or 'origin'
url = args.url
ref = args.ref
global git # make git available to called methods
git = which('git', required=True)
work_dir = spack.paths.prefix
# Set the appropriate workdir if we are modifying an environment or repo
# instead of Spack itself
if args.env:
if ev.exists(args.env):
work_dir = ev.read(args.env).path
elif ev.is_env_dir(args.env):
work_dir = ev.Environment(args.env)
else:
raise ValueError("'%s' is not a valid Spack environment." %
args.env)
elif args.repo:
if args.repo in spack.repo.path.by_namespace:
work_dir = spack.repo.path.by_namespace[args.repo]
else:
raise ValueError("'%s' is not a valid Spack repo namespace." %
args.repo)
with fs.working_dir(work_dir):
# Always fetch branches
# branches includes emptry string; since ref cannot be empty string,
# this does not cause a bug and fixing it reduces code legibility
branches = map(lambda b: b.strip('* '),
git('branch', output=str, error=str).split('\n'))
if ref in branches or not known_commit_or_tag(ref):
fetch_remote(remote, url)
# For branches, ensure we're getting the version from the correct remote
full_ref = '%s/%s' % (remote, ref) if ref in branches else ref
git('checkout', full_ref)

View File

@@ -53,13 +53,11 @@ def emulate_env_utility(cmd_name, context, args):
spec = args.spec[0]
cmd = args.spec[1:]
specs = spack.cmd.parse_specs(spec, concretize=False)
specs = spack.cmd.parse_specs(spec, concretize=True)
if len(specs) > 1:
tty.die("spack %s only takes one spec." % cmd_name)
spec = specs[0]
spec = spack.cmd.matching_spec_from_env(spec)
build_environment.setup_package(spec.package, args.dirty, context)
if args.dump:

View File

@@ -17,8 +17,6 @@
import spack.schema.packages
import spack.util.spack_yaml as syaml
from spack.util.editor import editor
import spack.store
import spack.repo
description = "get and set configuration options"
section = "config"
@@ -75,16 +73,6 @@ def setup_parser(subparser):
help="file from which to set all config values"
)
prefer_upstream_parser = sp.add_parser(
'prefer-upstream',
help='set package preferences from upstream')
prefer_upstream_parser.add_argument(
'--local', action='store_true', default=False,
help="Set packages preferences based on local installs, rather "
"than upstream."
)
remove_parser = sp.add_parser('remove', aliases=['rm'],
help='remove configuration parameters')
remove_parser.add_argument(
@@ -443,79 +431,6 @@ def config_revert(args):
tty.msg(msg.format(cfg_file))
def config_prefer_upstream(args):
"""Generate a packages config based on the configuration of all upstream
installs."""
scope = args.scope
if scope is None:
scope = spack.config.default_modify_scope('packages')
all_specs = set(spack.store.db.query(installed=True))
local_specs = set(spack.store.db.query_local(installed=True))
pref_specs = local_specs if args.local else all_specs - local_specs
conflicting_variants = set()
pkgs = {}
for spec in pref_specs:
# Collect all the upstream compilers and versions for this package.
pkg = pkgs.get(spec.name, {
'version': [],
'compiler': [],
})
pkgs[spec.name] = pkg
# We have no existing variant if this is our first added version.
existing_variants = pkg.get('variants',
None if not pkg['version'] else '')
version = spec.version.string
if version not in pkg['version']:
pkg['version'].append(version)
compiler = str(spec.compiler)
if compiler not in pkg['compiler']:
pkg['compiler'].append(compiler)
# Get and list all the variants that differ from the default.
variants = []
for var_name, variant in spec.variants.items():
if (var_name in ['patches']
or var_name not in spec.package.variants):
continue
if variant.value != spec.package.variants[var_name].default:
variants.append(str(variant))
variants.sort()
variants = ' '.join(variants)
if spec.name not in conflicting_variants:
# Only specify the variants if there's a single variant
# set across all versions/compilers.
if existing_variants is not None and existing_variants != variants:
conflicting_variants.add(spec.name)
pkg.pop('variants', None)
elif variants:
pkg['variants'] = variants
if conflicting_variants:
tty.warn(
"The following packages have multiple conflicting upstream "
"specs. You may have to specify, by "
"concretized hash, which spec you want when building "
"packages that depend on them:\n - {0}"
.format("\n - ".join(sorted(conflicting_variants))))
# Simply write the config to the specified file.
existing = spack.config.get('packages', scope=scope)
new = spack.config.merge_yaml(existing, pkgs)
spack.config.set('packages', new, scope)
config_file = spack.config.config.get_config_filename(scope, section)
tty.msg("Updated config at {0}".format(config_file))
def config(parser, args):
action = {
'get': config_get,
@@ -526,7 +441,6 @@ def config(parser, args):
'rm': config_remove,
'remove': config_remove,
'update': config_update,
'revert': config_revert,
'prefer-upstream': config_prefer_upstream,
'revert': config_revert
}
action[args.config_command](args)

View File

@@ -8,7 +8,6 @@
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
import spack.repo
description = "fetch archives for packages"
@@ -19,54 +18,25 @@
def setup_parser(subparser):
arguments.add_common_arguments(subparser, ['no_checksum', 'deprecated'])
subparser.add_argument(
"-m",
"--missing",
action="store_true",
help="fetch only missing (not yet installed) dependencies",
)
'-m', '--missing', action='store_true',
help="fetch only missing (not yet installed) dependencies")
subparser.add_argument(
"-D",
"--dependencies",
action="store_true",
help="also fetch all dependencies",
)
arguments.add_common_arguments(subparser, ["specs"])
subparser.epilog = (
"With an active environment, the specs "
"parameter can be omitted. In this case all (uninstalled"
", in case of --missing) specs from the environment are fetched"
)
'-D', '--dependencies', action='store_true',
help="also fetch all dependencies")
arguments.add_common_arguments(subparser, ['specs'])
def fetch(parser, args):
if args.specs:
specs = spack.cmd.parse_specs(args.specs, concretize=True)
else:
# No specs were given explicitly, check if we are in an
# environment. If yes, check the missing argument, if yes
# fetch all uninstalled specs from it otherwise fetch all.
# If we are also not in an environment, complain to the
# user that we don't know what to do.
env = ev.get_env(args, "fetch")
if env:
if args.missing:
specs = env.uninstalled_specs()
else:
specs = env.all_specs()
if specs == []:
tty.die(
"No uninstalled specs in environment. Did you "
"run `spack concretize` yet?"
)
else:
tty.die("fetch requires at least one spec argument")
if not args.specs:
tty.die("fetch requires at least one package argument")
if args.no_checksum:
spack.config.set("config:checksum", False, scope="command_line")
spack.config.set('config:checksum', False, scope='command_line')
if args.deprecated:
spack.config.set('config:deprecated', True, scope='command_line')
specs = spack.cmd.parse_specs(args.specs, concretize=True)
for spec in specs:
if args.missing or args.dependencies:
for s in spec.traverse():

View File

@@ -98,8 +98,9 @@ def location(parser, args):
print(spack.repo.path.dirname_for_package_name(spec.name))
else:
spec = spack.cmd.matching_spec_from_env(spec)
pkg = spec.package
# These versions need concretized specs.
spec.concretize()
pkg = spack.repo.get(spec)
if args.stage_dir:
print(pkg.stage.path)

View File

@@ -29,7 +29,7 @@ class Oneapi(Compiler):
PrgEnv_compiler = 'oneapi'
version_argument = '--version'
version_regex = r'(?:(?:oneAPI DPC\+\+ Compiler)|(?:\(IFORT\))) (\S+)'
version_regex = r'(?:(?:oneAPI DPC\+\+ Compiler)|(?:ifx \(IFORT\))) (\S+)'
@property
def verbose_flag(self):

View File

@@ -553,7 +553,7 @@ def get_config(self, section, scope=None):
If ``scope`` is ``None`` or not provided, return the merged contents
of all of Spack's configuration scopes. If ``scope`` is provided,
return only the configuration as specified in that scope.
return only the confiugration as specified in that scope.
This off the top-level name from the YAML section. That is, for a
YAML config file that looks like this::

View File

@@ -15,7 +15,6 @@
from llnl.util.filesystem import mkdirp
import spack.config
import spack.hash_types as ht
import spack.spec
from spack.error import SpackError
@@ -243,9 +242,7 @@ def write_spec(self, spec, path):
"""Write a spec out to a file."""
_check_concrete(spec)
with open(path, 'w') as f:
# The hash the the projection is the DAG hash but we write out the
# full provenance by full hash so it's availabe if we want it later
spec.to_yaml(f, hash=ht.full_hash)
spec.to_yaml(f)
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
@@ -347,13 +344,7 @@ def check_installed(self, spec):
#
# TODO: remove this when we do better concretization and don't
# ignore build-only deps in hashes.
elif (installed_spec.copy(deps=('link', 'run')) ==
spec.copy(deps=('link', 'run'))):
# The directory layout prefix is based on the dag hash, so among
# specs with differing full-hash but matching dag-hash, only one
# may be installed. This means for example that for two instances
# that differ only in CMake version used to build, only one will
# be installed.
elif installed_spec == spec.copy(deps=('link', 'run')):
return path
if spec.dag_hash() == installed_spec.dag_hash():

View File

@@ -1397,21 +1397,6 @@ def _install_log_links(self, spec):
os.remove(build_log_link)
os.symlink(spec.package.build_log_path, build_log_link)
def uninstalled_specs(self):
"""Return a list of all uninstalled (and non-dev) specs."""
# Do the installed check across all specs within a single
# DB read transaction to reduce time spent in lock acquisition.
uninstalled_specs = []
with spack.store.db.read_transaction():
for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash]
if not spec.package.installed or (
spec.satisfies('dev_path=*') or
spec.satisfies('^dev_path=*')
):
uninstalled_specs.append(spec)
return uninstalled_specs
def install_all(self, args=None, **install_args):
"""Install all concretized specs in an environment.
@@ -1422,13 +1407,22 @@ def install_all(self, args=None, **install_args):
args (Namespace): argparse namespace with command arguments
install_args (dict): keyword install arguments
"""
tty.debug('Assessing installation status of environment packages')
# If "spack install" is invoked repeatedly for a large environment
# where all specs are already installed, the operation can take
# a large amount of time due to repeatedly acquiring and releasing
# locks, this does an initial check across all specs within a single
# DB read transaction to reduce time spent in this case.
specs_to_install = self.uninstalled_specs()
tty.debug('Assessing installation status of environment packages')
specs_to_install = []
with spack.store.db.read_transaction():
for concretized_hash in self.concretized_order:
spec = self.specs_by_hash[concretized_hash]
if not spec.package.installed or (
spec.satisfies('dev_path=*') or
spec.satisfies('^dev_path=*')
):
# If it's a dev build it could need to be reinstalled
specs_to_install.append(spec)
if not specs_to_install:
tty.msg('All of the packages are already installed')
@@ -1515,67 +1509,6 @@ def concretized_specs(self):
for s, h in zip(self.concretized_user_specs, self.concretized_order):
yield (s, self.specs_by_hash[h])
def matching_spec(self, spec):
"""
Given a spec (likely not concretized), find a matching concretized
spec in the environment.
The matching spec does not have to be installed in the environment,
but must be concrete (specs added with `spack add` without an
intervening `spack concretize` will not be matched).
If there is a single root spec that matches the provided spec or a
single dependency spec that matches the provided spec, then the
concretized instance of that spec will be returned.
If multiple root specs match the provided spec, or no root specs match
and multiple dependency specs match, then this raises an error
and reports all matching specs.
"""
# Root specs will be keyed by concrete spec, value abstract
# Dependency-only specs will have value None
matches = {}
for user_spec, concretized_user_spec in self.concretized_specs():
if concretized_user_spec.satisfies(spec):
matches[concretized_user_spec] = user_spec
for dep_spec in concretized_user_spec.traverse(root=False):
if dep_spec.satisfies(spec):
# Don't overwrite the abstract spec if present
# If not present already, set to None
matches[dep_spec] = matches.get(dep_spec, None)
if not matches:
return None
elif len(matches) == 1:
return list(matches.keys())[0]
root_matches = dict((concrete, abstract)
for concrete, abstract in matches.items()
if abstract)
if len(root_matches) == 1:
return root_matches[0][1]
# More than one spec matched, and either multiple roots matched or
# none of the matches were roots
# If multiple root specs match, it is assumed that the abstract
# spec will most-succinctly summarize the difference between them
# (and the user can enter one of these to disambiguate)
match_strings = []
fmt_str = '{hash:7} ' + spack.spec.default_format
for concrete, abstract in matches.items():
if abstract:
s = 'Root spec %s\n %s' % (abstract, concrete.format(fmt_str))
else:
s = 'Dependency spec\n %s' % concrete.format(fmt_str)
match_strings.append(s)
matches_str = '\n'.join(match_strings)
msg = ("{0} matches multiple specs in the environment {1}: \n"
"{2}".format(str(spec), self.name, matches_str))
raise SpackEnvironmentError(msg)
def removed_specs(self):
"""Tuples of (user spec, concrete spec) for all specs that will be
removed on nexg concretize."""

View File

@@ -326,8 +326,6 @@ def _existing_url(self, url):
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
# portable.
curl_args = ['--stderr', '-', '-s', '-f', '-r', '0-0', url]
if not spack.config.get('config:verify_ssl'):
curl_args.append('-k')
_ = curl(*curl_args, fail_on_error=False, output=os.devnull)
return curl.returncode == 0

View File

@@ -18,9 +18,6 @@ class SpecHashDescriptor(object):
We currently use different hashes for different use cases.
"""
hash_types = ('_dag_hash', '_build_hash', '_full_hash')
def __init__(self, deptype=('link', 'run'), package_hash=False, attr=None):
self.deptype = dp.canonical_deptype(deptype)
self.package_hash = package_hash
@@ -38,5 +35,5 @@ def __init__(self, deptype=('link', 'run'), package_hash=False, attr=None):
#: Full hash used in build pipelines to determine when to rebuild packages.
full_hash = SpecHashDescriptor(
deptype=('build', 'link', 'run'), package_hash=True, attr='_full_hash')
full_hash = SpecHashDescriptor(deptype=('link', 'run'), package_hash=True,
attr='_full_hash')

View File

@@ -1526,16 +1526,10 @@ def content_hash(self, content=None):
hash_content.extend(':'.join((p.sha256, str(p.level))).encode('utf-8')
for p in self.spec.patches)
hash_content.append(package_hash(self.spec, content))
b32_hash = base64.b32encode(
return base64.b32encode(
hashlib.sha256(bytes().join(
sorted(hash_content))).digest()).lower()
# convert from bytes if running python 3
if sys.version_info[0] >= 3:
b32_hash = b32_hash.decode('utf-8')
return b32_hash
def _has_make_target(self, target):
"""Checks to see if 'target' is a valid target in a Makefile.

View File

@@ -18,8 +18,6 @@
try:
import clingo
# There may be a better way to detect this
clingo_cffi = hasattr(clingo.Symbol, '_rep')
except ImportError:
clingo = None # type: ignore
@@ -121,11 +119,11 @@ def __call__(self, *args):
def symbol(self, positive=True):
def argify(arg):
if isinstance(arg, bool):
return clingo.String(str(arg))
return str(arg)
elif isinstance(arg, int):
return clingo.Number(arg)
return arg
else:
return clingo.String(str(arg))
return str(arg)
return clingo.Function(
self.name, [argify(arg) for arg in self.args], positive=positive)
@@ -320,26 +318,18 @@ def solve(
def on_model(model):
models.append((model.cost, model.symbols(shown=True, terms=True)))
solve_kwargs = {"assumptions": self.assumptions,
"on_model": on_model,
"on_core": cores.append}
if clingo_cffi:
solve_kwargs["on_unsat"] = cores.append
solve_result = self.control.solve(**solve_kwargs)
solve_result = self.control.solve(
assumptions=self.assumptions,
on_model=on_model,
on_core=cores.append
)
timer.phase("solve")
# once done, construct the solve result
result.satisfiable = solve_result.satisfiable
def stringify(x):
if clingo_cffi:
# Clingo w/ CFFI will throw an exception on failure
try:
return x.string
except RuntimeError:
return str(x)
else:
return x.string or str(x)
return x.string or str(x)
if result.satisfiable:
builder = SpecBuilder(specs)
@@ -657,15 +647,11 @@ def _condition_facts(
self.gen.fact(cond_fn(condition_id, pkg_name, dep_spec.name))
# conditions that trigger the condition
conditions = self.checked_spec_clauses(
named_cond, body=True, required_from=pkg_name
)
conditions = self.spec_clauses(named_cond, body=True)
for pred in conditions:
self.gen.fact(require_fn(condition_id, pred.name, *pred.args))
imposed_constraints = self.checked_spec_clauses(
dep_spec, required_from=pkg_name
)
imposed_constraints = self.spec_clauses(dep_spec)
for pred in imposed_constraints:
# imposed "node"-like conditions are no-ops
if pred.name in ("node", "virtual_node"):
@@ -871,20 +857,6 @@ def flag_defaults(self):
self.gen.fact(fn.compiler_version_flag(
compiler.name, compiler.version, name, flag))
def checked_spec_clauses(self, *args, **kwargs):
"""Wrap a call to spec clauses into a try/except block that raise
a comprehensible error message in case of failure.
"""
requestor = kwargs.pop('required_from', None)
try:
clauses = self.spec_clauses(*args, **kwargs)
except RuntimeError as exc:
msg = str(exc)
if requestor:
msg += ' [required from package "{0}"]'.format(requestor)
raise RuntimeError(msg)
return clauses
def spec_clauses(self, spec, body=False, transitive=True):
"""Return a list of clauses for a spec mandates are true.
@@ -953,14 +925,9 @@ class Body(object):
# validate variant value
reserved_names = spack.directives.reserved_names
if not spec.virtual and vname not in reserved_names:
try:
variant_def = spec.package.variants[vname]
except KeyError:
msg = 'variant "{0}" not found in package "{1}"'
raise RuntimeError(msg.format(vname, spec.name))
else:
variant_def.validate_or_raise(variant, spec.package)
if (not spec.virtual and vname not in reserved_names):
variant_def = spec.package.variants[vname]
variant_def.validate_or_raise(variant, spec.package)
clauses.append(f.variant_value(spec.name, vname, value))

View File

@@ -1088,12 +1088,6 @@ def __init__(self, spec_like=None,
# external specs. None signal that it was not set yet.
self.extra_attributes = None
# This attribute holds the original build copy of the spec if it is
# deployed differently than it was built. None signals that the spec
# is deployed "as built."
# Build spec should be the actual build spec unless marked dirty.
self._build_spec = None
if isinstance(spec_like, six.string_types):
spec_list = SpecParser(self).parse(spec_like)
if len(spec_list) > 1:
@@ -1308,13 +1302,6 @@ def concrete(self):
"""
return self._concrete
@property
def spliced(self):
"""Returns whether or not this Spec is being deployed as built i.e.
whether or not this Spec has ever been spliced.
"""
return any(s.build_spec is not s for s in self.traverse(root=True))
def traverse(self, **kwargs):
direction = kwargs.get('direction', 'children')
depth = kwargs.get('depth', False)
@@ -2278,8 +2265,6 @@ def _expand_virtual_packages(self, concretizer):
# If replacement is external then trim the dependencies
if replacement.external:
if (spec._dependencies):
for dep in spec.dependencies():
del dep._dependents[spec.name]
changed = True
spec._dependencies = DependencyMap()
replacement._dependencies = DependencyMap()
@@ -2566,13 +2551,7 @@ def _mark_concrete(self, value=True):
Only for internal use -- client code should use "concretize"
unless there is a need to force a spec to be concrete.
"""
# if set to false, clear out all hashes (set to None or remove attr)
# may need to change references to respect None
for s in self.traverse():
if (not value) and s.concrete and s.package.installed:
continue
elif not value:
s.clear_cached_hashes()
s._mark_root_concrete(value)
def concretized(self, tests=False):
@@ -3386,7 +3365,6 @@ def _dup(self, other, deps=True, cleardeps=True, caches=None):
self.compiler_flags = other.compiler_flags.copy()
self.compiler_flags.spec = self
self.variants = other.variants.copy()
self._build_spec = other._build_spec
# FIXME: we manage _patches_in_order_of_appearance specially here
# to keep it from leaking out of spec.py, but we should figure
@@ -4247,89 +4225,6 @@ def target(self):
# to give to the attribute the appropriate comparison semantic
return self.architecture.target.microarchitecture
@property
def build_spec(self):
return self._build_spec or self
@build_spec.setter
def build_spec(self, value):
self._build_spec = value
def splice(self, other, transitive):
"""Splices dependency "other" into this ("target") Spec, and return the
result as a concrete Spec.
If transitive, then other and its dependencies will be extrapolated to
a list of Specs and spliced in accordingly.
For example, let there exist a dependency graph as follows:
T
| \
Z<-H
In this example, Spec T depends on H and Z, and H also depends on Z.
Suppose, however, that we wish to use a differently-built H, known as
H'. This function will splice in the new H' in one of two ways:
1. transitively, where H' depends on the Z' it was built with, and the
new T* also directly depends on this new Z', or
2. intransitively, where the new T* and H' both depend on the original
Z.
Since the Spec returned by this splicing function is no longer deployed
the same way it was built, any such changes are tracked by setting the
build_spec to point to the corresponding dependency from the original
Spec.
TODO: Extend this for non-concrete Specs.
"""
assert self.concrete
assert other.concrete
assert other.name in self
# Multiple unique specs with the same name will collide, so the
# _dependents of these specs should not be trusted.
# Variants may also be ignored here for now...
if transitive:
self_nodes = dict((s.name, s.copy(deps=False))
for s in self.traverse(root=True)
if s.name not in other)
other_nodes = dict((s.name, s.copy(deps=False))
for s in other.traverse(root=True))
else:
# If we're not doing a transitive splice, then we only want the
# root of other.
self_nodes = dict((s.name, s.copy(deps=False))
for s in self.traverse(root=True)
if s.name != other.name)
other_nodes = {other.name: other.copy(deps=False)}
nodes = other_nodes.copy()
nodes.update(self_nodes)
for name in nodes:
if name in self_nodes:
dependencies = self[name]._dependencies
for dep in dependencies:
nodes[name]._add_dependency(nodes[dep],
dependencies[dep].deptypes)
if any(dep not in self_nodes for dep in dependencies):
nodes[name].build_spec = self[name].build_spec
else:
dependencies = other[name]._dependencies
for dep in dependencies:
nodes[name]._add_dependency(nodes[dep],
dependencies[dep].deptypes)
if any(dep not in other_nodes for dep in dependencies):
nodes[name].build_spec = other[name].build_spec
# Clear cached hashes
nodes[self.name].clear_cached_hashes()
return nodes[self.name]
def clear_cached_hashes(self):
"""
Clears all cached hashes in a Spec, while preserving other properties.
"""
for attr in ht.SpecHashDescriptor.hash_types:
if hasattr(self, attr):
setattr(self, attr, None)
class LazySpecCache(collections.defaultdict):
"""Cache for Specs that uses a spec_like as key, and computes lazily

View File

@@ -12,7 +12,6 @@
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.config
import spack.environment as ev
@pytest.fixture()
@@ -82,40 +81,3 @@ def test_parse_spec_flags_with_spaces(
assert all(x not in s.variants for x in unexpected_variants)
assert all(x in s.variants for x in expected_variants)
@pytest.mark.usefixtures('config')
def test_match_spec_env(mock_packages, mutable_mock_env_path):
"""
Concretize a spec with non-default options in an environment. Make
sure that when we ask for a matching spec when the environment is
active that we get the instance concretized in the environment.
"""
# Initial sanity check: we are planning on choosing a non-default
# value, so make sure that is in fact not the default.
check_defaults = spack.cmd.parse_specs(['a'], concretize=True)[0]
assert not check_defaults.satisfies('foobar=baz')
e = ev.create('test')
e.add('a foobar=baz')
e.concretize()
with e:
env_spec = spack.cmd.matching_spec_from_env(
spack.cmd.parse_specs(['a'])[0])
assert env_spec.satisfies('foobar=baz')
assert env_spec.concrete
@pytest.mark.usefixtures('config')
def test_multiple_env_match_raises_error(mock_packages, mutable_mock_env_path):
e = ev.create('test')
e.add('a foobar=baz')
e.add('a foobar=fee')
e.concretize()
with e:
with pytest.raises(
spack.environment.SpackEnvironmentError) as exc_info:
spack.cmd.matching_spec_from_env(spack.cmd.parse_specs(['a'])[0])
assert 'matches multiple specs' in exc_info.value.message

View File

@@ -11,9 +11,6 @@
import spack.environment as ev
import spack.main
import spack.util.spack_yaml as syaml
import spack.spec
import spack.database
import spack.store
config = spack.main.SpackCommand('config')
env = spack.main.SpackCommand('env')
@@ -648,50 +645,3 @@ def check_config_updated(data):
assert isinstance(data['install_tree'], dict)
assert data['install_tree']['root'] == '/fake/path'
assert data['install_tree']['projections'] == {'all': '{name}-{version}'}
def test_config_prefer_upstream(tmpdir_factory, install_mockery, mock_fetch,
mutable_config, gen_mock_layout, monkeypatch):
"""Check that when a dependency package is recorded as installed in
an upstream database that it is not reinstalled.
"""
mock_db_root = str(tmpdir_factory.mktemp('mock_db_root'))
prepared_db = spack.database.Database(mock_db_root)
upstream_layout = gen_mock_layout('/a/')
for spec in [
'hdf5 +mpi',
'hdf5 ~mpi',
'boost+debug~icu+graph',
'dependency-install',
'patch']:
dep = spack.spec.Spec(spec)
dep.concretize()
prepared_db.add(dep, upstream_layout)
downstream_db_root = str(
tmpdir_factory.mktemp('mock_downstream_db_root'))
db_for_test = spack.database.Database(
downstream_db_root, upstream_dbs=[prepared_db])
monkeypatch.setattr(spack.store, 'db', db_for_test)
output = config('prefer-upstream')
scope = spack.config.default_modify_scope('packages')
cfg_file = spack.config.config.get_config_filename(scope, 'packages')
packages = syaml.load(open(cfg_file))['packages']
# Make sure only the non-default variants are set.
assert packages['boost'] == {
'compiler': ['gcc@4.5.0'],
'variants': '+debug +graph',
'version': ['1.63.0']}
assert packages['dependency-install'] == {
'compiler': ['gcc@4.5.0'], 'version': ['2.0']}
# Ensure that neither variant gets listed for hdf5, since they conflict
assert packages['hdf5'] == {
'compiler': ['gcc@4.5.0'], 'version': ['2.3']}
# Make sure a message about the conflicting hdf5's was given.
assert '- hdf5' in output

View File

@@ -139,19 +139,6 @@ def test_concretize():
assert any(x.name == 'mpileaks' for x in env_specs)
def test_env_uninstalled_specs(install_mockery, mock_fetch):
e = ev.create('test')
e.add('cmake-client')
e.concretize()
assert any(s.name == 'cmake-client' for s in e.uninstalled_specs())
e.install_all()
assert not any(s.name == 'cmake-client' for s in e.uninstalled_specs())
e.add('mpileaks')
e.concretize()
assert not any(s.name == 'cmake-client' for s in e.uninstalled_specs())
assert any(s.name == 'mpileaks' for s in e.uninstalled_specs())
def test_env_install_all(install_mockery, mock_fetch):
e = ev.create('test')
e.add('cmake-client')

View File

@@ -1,48 +0,0 @@
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import pytest
import spack.environment as ev
from spack.main import SpackCommand, SpackCommandError
# everything here uses the mock_env_path
pytestmark = pytest.mark.usefixtures(
"mutable_mock_env_path", "config", "mutable_mock_repo"
)
@pytest.mark.disable_clean_stage_check
def test_fetch_in_env(
tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
):
SpackCommand("env")("create", "test")
with ev.read("test"):
SpackCommand("add")("python")
with pytest.raises(SpackCommandError):
SpackCommand("fetch")()
SpackCommand("concretize")()
SpackCommand("fetch")()
@pytest.mark.disable_clean_stage_check
def test_fetch_single_spec(
tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
):
SpackCommand("fetch")("mpileaks")
@pytest.mark.disable_clean_stage_check
def test_fetch_multiple_specs(
tmpdir, mock_archive, mock_stage, mock_fetch, install_mockery
):
SpackCommand("fetch")("mpileaks", "gcc@10.2.0", "python")
def test_fetch_no_argument():
with pytest.raises(SpackCommandError):
SpackCommand("fetch")()

View File

@@ -925,35 +925,6 @@ def test_compiler_constraint_with_external_package(
assert s.external
assert s.satisfies(expected)
@pytest.mark.regression('20976')
@pytest.mark.parametrize('compiler,spec_str,expected,xfailold', [
('gcc', 'external-common-python %clang',
'%clang ^external-common-openssl%gcc ^external-common-gdbm%clang', False),
('clang', 'external-common-python',
'%clang ^external-common-openssl%clang ^external-common-gdbm%clang', True)
])
def test_compiler_in_nonbuildable_external_package(
self, compiler, spec_str, expected, xfailold
):
"""Check that the compiler of a non-buildable external package does not
spread to other dependencies, unless no other commpiler is specified."""
packages_yaml = {
'external-common-openssl': {
'externals': [
{'spec': 'external-common-openssl@1.1.1i%' + compiler,
'prefix': '/usr'}
],
'buildable': False
}
}
spack.config.set('packages', packages_yaml)
s = Spec(spec_str).concretized()
if xfailold and spack.config.get('config:concretizer') == 'original':
pytest.xfail('This only works on the ASP-based concretizer')
assert s.satisfies(expected)
assert 'external-common-perl' not in [d.name for d in s.dependencies()]
def test_external_packages_have_consistent_hash(self):
if spack.config.get('config:concretizer') == 'original':
pytest.skip('This tests needs the ASP-based concretizer')
@@ -1127,15 +1098,3 @@ def test_concretization_of_test_dependencies(self):
# dependency type declared to infer that the dependency holds.
s = Spec('test-dep-with-imposed-conditions').concretized()
assert 'c' not in s
@pytest.mark.parametrize('spec_str', [
'wrong-variant-in-conflicts',
'wrong-variant-in-depends-on'
])
def test_error_message_for_inconsistent_variants(self, spec_str):
if spack.config.get('config:concretizer') == 'original':
pytest.xfail('Known failure of the original concretizer')
s = Spec(spec_str)
with pytest.raises(RuntimeError, match='not found in package'):
s.concretize()

View File

@@ -985,74 +985,6 @@ def test_forwarding_of_architecture_attributes(self):
assert 'avx512' not in spec.target
assert spec.target < 'broadwell'
@pytest.mark.parametrize('transitive', [True, False])
def test_splice(self, transitive):
# Tests the new splice function in Spec using a somewhat simple case
# with a variant with a conditional dependency.
# TODO: Test being able to splice in different provider for a virtual.
# Example: mvapich for mpich.
spec = Spec('splice-t')
dep = Spec('splice-h+foo')
spec.concretize()
dep.concretize()
# Sanity checking that these are not the same thing.
assert dep.dag_hash() != spec['splice-h'].dag_hash()
assert dep.build_hash() != spec['splice-h'].build_hash()
# Do the splice.
out = spec.splice(dep, transitive)
# Returned spec should still be concrete.
assert out.concrete
# Traverse the spec and assert that all dependencies are accounted for.
for node in spec.traverse():
assert node.name in out
# If the splice worked, then the full hash of the spliced dep should
# now match the full hash of the build spec of the dependency from the
# returned spec.
out_h_build = out['splice-h'].build_spec
assert out_h_build.full_hash() == dep.full_hash()
# Transitivity should determine whether the transitive dependency was
# changed.
expected_z = dep['splice-z'] if transitive else spec['splice-z']
assert out['splice-z'].full_hash() == expected_z.full_hash()
# Sanity check build spec of out should be the original spec.
assert (out['splice-t'].build_spec.full_hash() ==
spec['splice-t'].full_hash())
# Finally, the spec should know it's been spliced:
assert out.spliced
@pytest.mark.parametrize('transitive', [True, False])
def test_splice_input_unchanged(self, transitive):
spec = Spec('splice-t').concretized()
dep = Spec('splice-h+foo').concretized()
orig_spec_hash = spec.full_hash()
orig_dep_hash = dep.full_hash()
spec.splice(dep, transitive)
# Post-splice, dag hash should still be different; no changes should be
# made to these specs.
assert spec.full_hash() == orig_spec_hash
assert dep.full_hash() == orig_dep_hash
@pytest.mark.parametrize('transitive', [True, False])
def test_splice_subsequent(self, transitive):
spec = Spec('splice-t')
dep = Spec('splice-h+foo')
spec.concretize()
dep.concretize()
out = spec.splice(dep, transitive)
# Now we attempt a second splice.
dep = Spec('splice-z+bar')
dep.concretize()
# Transitivity shouldn't matter since Splice Z has no dependencies.
out2 = out.splice(dep, transitive)
assert out2.concrete
assert out2['splice-z'].build_hash() != spec['splice-z'].build_hash()
assert out2['splice-z'].build_hash() != out['splice-z'].build_hash()
assert out2['splice-z'].full_hash() != spec['splice-z'].full_hash()
assert out2['splice-z'].full_hash() != out['splice-z'].full_hash()
assert (out2['splice-t'].build_spec.full_hash() ==
spec['splice-t'].full_hash())
assert out2.spliced
@pytest.mark.parametrize('spec,constraint,expected_result', [
('libelf target=haswell', 'target=broadwell', False),
('libelf target=haswell', 'target=haswell', True),

View File

@@ -130,13 +130,8 @@ def test_to_record_dict(mock_packages, config):
assert record[key] == value
@pytest.mark.parametrize("hash_type", [
ht.dag_hash,
ht.build_hash,
ht.full_hash
])
def test_ordered_read_not_required_for_consistent_dag_hash(
hash_type, config, mock_packages
config, mock_packages
):
"""Make sure ordered serialization isn't required to preserve hashes.
@@ -153,15 +148,15 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
#
# Dict & corresponding YAML & JSON from the original spec.
#
spec_dict = spec.to_dict(hash=hash_type)
spec_yaml = spec.to_yaml(hash=hash_type)
spec_json = spec.to_json(hash=hash_type)
spec_dict = spec.to_dict()
spec_yaml = spec.to_yaml()
spec_json = spec.to_json()
#
# Make a spec with reversed OrderedDicts for every
# OrderedDict in the original.
#
reversed_spec_dict = reverse_all_dicts(spec.to_dict(hash=hash_type))
reversed_spec_dict = reverse_all_dicts(spec.to_dict())
#
# Dump to YAML and JSON
@@ -195,13 +190,11 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
reversed_json_string
)
# Strip spec if we stripped the yaml
spec = spec.copy(deps=hash_type.deptype)
# TODO: remove this when build deps are in provenance.
spec = spec.copy(deps=('link', 'run'))
# specs are equal to the original
assert spec == round_trip_yaml_spec
assert spec == round_trip_json_spec
assert spec == round_trip_reversed_yaml_spec
assert spec == round_trip_reversed_json_spec
assert round_trip_yaml_spec == round_trip_reversed_yaml_spec
@@ -211,18 +204,16 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
assert spec.dag_hash() == round_trip_json_spec.dag_hash()
assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
# full_hashes are equal if we round-tripped by build_hash or full_hash
if hash_type in (ht.build_hash, ht.full_hash):
spec.concretize()
round_trip_yaml_spec.concretize()
round_trip_json_spec.concretize()
round_trip_reversed_yaml_spec.concretize()
round_trip_reversed_json_spec.concretize()
assert spec.full_hash() == round_trip_yaml_spec.full_hash()
assert spec.full_hash() == round_trip_json_spec.full_hash()
assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash()
assert spec.full_hash() == round_trip_reversed_json_spec.full_hash()
# full_hashes are equal
spec.concretize()
round_trip_yaml_spec.concretize()
round_trip_json_spec.concretize()
round_trip_reversed_yaml_spec.concretize()
round_trip_reversed_json_spec.concretize()
assert spec.full_hash() == round_trip_yaml_spec.full_hash()
assert spec.full_hash() == round_trip_json_spec.full_hash()
assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash()
assert spec.full_hash() == round_trip_reversed_json_spec.full_hash()
@pytest.mark.parametrize("module", [

View File

@@ -155,7 +155,7 @@ def path_from_modules(modules):
if candidate_path and not os.path.exists(candidate_path):
msg = ("Extracted path from module does not exist "
"[module={0}, path={1}]")
"[module={0}, path={0}]")
tty.warn(msg.format(module_name, candidate_path))
# If anything is found, then it's the best choice. This means

View File

@@ -7,17 +7,17 @@ stages: [generate, build]
- spack --version
- cd share/spack/gitlab/cloud_e4s_pipelines/stacks/e4s
- spack env activate --without-view .
- spack ci generate --check-index-only
- spack -d ci generate --check-index-only
--output-file "${CI_PROJECT_DIR}/jobs_scratch_dir/e4s_pipeline.yml"
artifacts:
paths:
- "${CI_PROJECT_DIR}/jobs_scratch_dir/e4s_pipeline.yml"
tags: ["spack", "public", "medium"]
interruptible: true
e4s-pr-generate:
only:
- /^github\/pr[\d]+_.*$/
- move-pr-stack-into-spack
extends: .generate
before_script:
- export SPACK_PR_BRANCH=${CI_COMMIT_REF_NAME}
@@ -33,6 +33,7 @@ e4s-develop-generate:
e4s-pr-build:
only:
- /^github\/pr[\d]+_.*$/
- move-pr-stack-into-spack
stage: build
trigger:
include:

View File

@@ -88,10 +88,10 @@ spack:
- raja
- rempi
- scr
- slate ^openblas@0.3.6 threads=openmp ^cuda@10.2.89
- slate ^openblas@0.3.5 threads=openmp ^cuda@10.2.89
- slepc
- stc
- strumpack ~slate ^openblas@0.3.6 threads=openmp
- strumpack ~slate
- sundials
- superlu
- superlu-dist

View File

@@ -109,6 +109,7 @@ succeeds spack -m load b
fails spack -m load -l
# test a variable MacOS clears and one it doesn't for recursive loads
contains "export LD_LIBRARY_PATH=$(spack -m location -i a)/lib:$(spack -m location -i b)/lib" spack -m load --sh a
contains "export LIBRARY_PATH=$(spack -m location -i a)/lib:$(spack -m location -i b)/lib" spack -m load --sh a
succeeds spack -m load --only dependencies a
succeeds spack -m load --only package a
fails spack -m load d

View File

@@ -333,7 +333,7 @@ _spack() {
then
SPACK_COMPREPLY="-h --help -H --all-help --color -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
else
SPACK_COMPREPLY="activate add arch blame build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
SPACK_COMPREPLY="activate add arch blame build-env buildcache cd checkout checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mark mirror module patch pkg providers pydoc python reindex remove rm repo resource restage solve spec stage style test test-env tutorial undevelop uninstall unit-test unload url verify versions view"
fi
}
@@ -459,6 +459,15 @@ _spack_cd() {
fi
}
_spack_checkout() {
if $list_options
then
SPACK_COMPREPLY="-h --help -r --remote --url --env --repo"
else
SPACK_COMPREPLY=""
fi
}
_spack_checksum() {
if $list_options
then
@@ -587,7 +596,7 @@ _spack_config() {
then
SPACK_COMPREPLY="-h --help --scope"
else
SPACK_COMPREPLY="get blame edit list add prefer-upstream remove rm update revert"
SPACK_COMPREPLY="get blame edit list add remove rm update revert"
fi
}
@@ -631,10 +640,6 @@ _spack_config_add() {
fi
}
_spack_config_prefer_upstream() {
SPACK_COMPREPLY="-h --help --local"
}
_spack_config_remove() {
if $list_options
then

View File

@@ -1,13 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class ExternalCommonGdbm(Package):
homepage = "http://www.gnu.org.ua/software/gdbm/gdbm.html"
url = "https://ftpmirror.gnu.org/gdbm/gdbm-1.18.1.tar.gz"
version('1.18.1', 'be78e48cdfc1a7ad90efff146dce6cfe')

View File

@@ -1,14 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class ExternalCommonOpenssl(Package):
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.1.1i.tar.gz"
version('1.1.1i', 'be78e48cdfc1a7ad90efff146dce6cfe')
depends_on('external-common-perl')

View File

@@ -1,14 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class ExternalCommonPerl(Package):
homepage = "http://www.perl.org"
url = "http://www.cpan.org/src/5.0/perl-5.32.0.tar.gz"
version('5.32.0', 'be78e48cdfc1a7ad90efff146dce6cfe')
depends_on('external-common-gdbm')

View File

@@ -1,15 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class ExternalCommonPython(Package):
homepage = "http://www.python.org"
url = "http://www.python.org/ftp/python/3.8.7/Python-3.8.7.tgz"
version('3.8.7', 'be78e48cdfc1a7ad90efff146dce6cfe')
depends_on('external-common-openssl')
depends_on('external-common-gdbm')

View File

@@ -1,22 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class SpliceH(AutotoolsPackage):
"""Simple package with one optional dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/splice-h-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
variant('foo', default=False, description='nope')
variant('bar', default=False, description='nope')
variant('baz', default=False, description='nope')
depends_on('splice-z')
depends_on('splice-z+foo', when='+foo')

View File

@@ -1,18 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class SpliceT(AutotoolsPackage):
"""Simple package with one optional dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/splice-t-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('splice-h')
depends_on('splice-z')

View File

@@ -1,18 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class SpliceZ(AutotoolsPackage):
"""Simple package with one optional dependency"""
homepage = "http://www.example.com"
url = "http://www.example.com/splice-z-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
variant('foo', default=False, description='nope')
variant('bar', default=False, description='nope')

View File

@@ -1,13 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class WrongVariantInConflicts(Package):
"""This package has a wrong variant spelled in a conflict."""
homepage = "http://www.example.com"
url = "http://www.example.com/b-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
conflicts('+foo', when='@1.0')

View File

@@ -1,13 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class WrongVariantInDependsOn(Package):
"""This package has a wrong variant spelled in a depends_on."""
homepage = "http://www.example.com"
url = "http://www.example.com/b-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
depends_on('b+doesnotexist')

View File

@@ -7,7 +7,7 @@
from spack import *
class Aluminum(CMakePackage, CudaPackage, ROCmPackage):
class Aluminum(CMakePackage, CudaPackage):
"""Aluminum provides a generic interface to high-performance
communication libraries, with a focus on allreduce
algorithms. Blocking and non-blocking algorithms and GPU-aware
@@ -38,19 +38,13 @@ class Aluminum(CMakePackage, CudaPackage, ROCmPackage):
' communication of accelerator data')
variant('cuda_rma', default=False, description='Builds with support for CUDA intra-node '
' Put/Get and IPC RMA functionality')
variant('rccl', default=False, description='Builds with support for NCCL communication lib')
depends_on('cmake@3.17.0:', type='build')
depends_on('mpi')
depends_on('nccl', when='+nccl')
depends_on('hwloc@1.11:')
depends_on('hwloc +cuda +nvml', when='+cuda')
depends_on('hwloc@2.3.0:', when='+rocm')
depends_on('cub', when='@:0.1,0.6.0: +cuda ^cuda@:10.99')
depends_on('hipcub', when='@:0.1,0.6.0: +rocm')
conflicts('~cuda', when='+cuda_rma', msg='CUDA RMA support requires CUDA')
conflicts('+cuda', when='+rocm', msg='CUDA and ROCm support are mutually exclusive')
generator = 'Ninja'
depends_on('ninja', type='build')
@@ -60,8 +54,7 @@ def cmake_args(self):
args = [
'-DCMAKE_CXX_STANDARD=14',
'-DALUMINUM_ENABLE_CUDA:BOOL=%s' % ('+cuda' in spec),
'-DALUMINUM_ENABLE_NCCL:BOOL=%s' % ('+nccl' in spec or '+rccl' in spec),
'-DALUMINUM_ENABLE_ROCM:BOOL=%s' % ('+rocm' in spec)]
'-DALUMINUM_ENABLE_NCCL:BOOL=%s' % ('+nccl' in spec)]
if '+cuda' in spec:
args.append('-DCMAKE_CUDA_STANDARD=14')
@@ -89,16 +82,4 @@ def cmake_args(self):
args.extend([
'-DOpenMP_DIR={0}'.format(clang_root)])
if '+rocm' in spec:
args.extend([
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix),
'-DHIP_CXX_COMPILER={0}'.format(self.spec['hip'].hipcc)])
archs = self.spec.variants['amdgpu_target'].value
if archs != 'none':
arch_str = ",".join(archs)
args.append(
'-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
' -g -fsized-deallocation -fPIC'.format(arch_str)
)
return args

View File

@@ -1,111 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import itertools
def process_amrex_constraints():
"""Map constraints when building with external AMReX"""
a1 = ['+', '~']
a2 = ['mpi', 'hypre', 'cuda']
a3 = [[x + y for x in a1] for y in a2]
for k in itertools.product(*a3):
if '+cuda' in k:
for arch in CudaPackage.cuda_arch_values:
yield ''.join(k) + " cuda_arch=%s" % arch
else:
yield ''.join(k)
class AmrWind(CMakePackage, CudaPackage):
"""AMR-Wind is a massively parallel, block-structured adaptive-mesh,
incompressible flow sover for wind turbine and wind farm simulations. """
homepage = "https://github.com/Exawind/amr-wind"
git = "https://github.com/exawind/amr-wind.git"
maintainers = ['sayerhs', 'jrood-nrel', 'michaeljbrazell']
tags = ['ecp', 'ecp-apps']
version('main', branch='main', submodules=True)
variant('shared', default=True,
description='Build shared libraries')
variant('unit', default=True,
description='Build unit tests')
variant('tests', default=True,
description='Activate regression tests')
variant('mpi', default=True,
description='Enable MPI support')
variant('openmp', default=False,
description='Enable OpenMP for CPU builds')
variant('netcdf', default=True,
description='Enable NetCDF support')
variant('hypre', default=True,
description='Enable Hypre integration')
variant('masa', default=False,
description='Enable MASA integration')
variant('openfast', default=False,
description='Enable OpenFAST integration')
variant('internal-amrex', default=True,
description='Use AMRex submodule to build')
variant('fortran', default=False,
description='Build fortran interfaces')
conflicts('+openmp', when='+cuda')
depends_on('mpi', when='+mpi')
for opt in process_amrex_constraints():
dopt = '+particles' + opt
if '+hypre' in dopt:
dopt = "+fortran" + dopt
depends_on('amrex@develop' + dopt, when='~internal-amrex' + opt)
depends_on('hypre+mpi+int64~cuda@2.20.0:', when='+mpi~cuda+hypre')
depends_on('hypre~mpi+int64~cuda@2.20.0:', when='~mpi~cuda+hypre')
for arch in CudaPackage.cuda_arch_values:
depends_on('hypre+mpi~int64+cuda cuda_arch=%s @2.20.0:' % arch,
when='+mpi+cuda+hypre cuda_arch=%s' % arch)
depends_on('hypre~mpi~int64+cuda cuda_arch=%s @2.20.0:' % arch,
when='~mpi+cuda+hypre cuda_arch=%s' % arch)
depends_on('netcdf-c', when='+netcdf')
depends_on('masa', when='+masa')
depends_on('openfast+cxx', when='+openfast')
def cmake_args(self):
define = CMakePackage.define
vs = ["mpi", "cuda", "openmp", "netcdf", "hypre", "masa",
"openfast", "tests", "fortran"]
args = [
self.define_from_variant("AMR_WIND_ENABLE_%s" % v.upper(), v)
for v in vs
]
args += [
define('CMAKE_EXPORT_COMPILE_COMMANDS', True),
define('AMR_WIND_ENABLE_ALL_WARNINGS', True),
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
self.define_from_variant('AMR_WIND_TEST_WITH_FCOMPARE', 'tests'),
]
if '+cuda' in self.spec:
amrex_arch = ['{0:.1f}'.format(float(i) / 10.0)
for i in self.spec.variants['cuda_arch'].value]
if amrex_arch:
args.append(define('AMReX_CUDA_ARCH', amrex_arch))
if '+internal-amrex' in self.spec:
args.append(self.define('AMR_WIND_USE_INTERNAL_AMREX', True))
else:
args += [
self.define('AMR_WIND_USE_INTERNAL_AMREX', False),
self.define('AMReX_ROOT', self.spec['amrex'].prefix)
]
return args

View File

@@ -68,8 +68,6 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
description='Enable Hypre interfaces')
variant('petsc', default=False,
description='Enable PETSc interfaces')
variant('pic', default=False,
description='Enable PIC')
# Build dependencies
depends_on('mpi', when='+mpi')
@@ -83,8 +81,6 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
depends_on('cmake@3.17:', type='build', when='^cuda @11:')
depends_on('hdf5@1.10.4: +mpi', when='+hdf5')
depends_on('rocrand', type='build', when='+rocm')
depends_on('hypre', type='link', when='+hypre')
depends_on('petsc', type='link', when='+petsc')
conflicts('%apple-clang')
conflicts('%clang')
@@ -97,14 +93,14 @@ class Amrex(CMakePackage, CudaPackage, ROCmPackage):
msg='AMReX HDF5 support needs AMReX newer than version 20.06')
conflicts('+hypre', when='@:20.06',
msg='AMReX Hypre support needs AMReX newer than version 20.06')
conflicts('+hypre', when='@:20.07 ~fortran',
msg='AMReX < 20.08 needs the Fortran API (+fortran) for Hypre support')
conflicts('+hypre', when='~fortran',
msg='AMReX Hypre support needs AMReX Fortran API (+fortran)')
conflicts('+hypre', when='~linear_solvers',
msg='AMReX Hypre support needs variant +linear_solvers')
conflicts('+petsc', when='@:20.06',
msg='PETSc support needs AMReX newer than version 20.06')
conflicts('+petsc', when='@:20.07 ~fortran',
msg='AMReX < 20.08 needs the Fortran API (+fortran) for PETSc supportx')
msg='AMReX PETSc support needs AMReX newer than version 20.06')
conflicts('+petsc', when='~fortran',
msg='AMReX PETSc support needs AMReX Fortran API (+fortran)')
conflicts('+petsc', when='~linear_solvers',
msg='AMReX PETSc support needs variant +linear_solvers')
conflicts('+cuda', when='@:19.08',
@@ -135,50 +131,6 @@ def get_cuda_arch_string(self, values):
vf = tuple(float(x) / 10.0 for x in values)
return ';'.join(str(x) for x in vf)
#
# For versions > 20.11
#
@when('@20.12:,develop')
def cmake_args(self):
args = [
'-DUSE_XSDK_DEFAULTS=ON',
self.define_from_variant('AMReX_SPACEDIM', 'dimensions'),
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
self.define_from_variant('AMReX_MPI', 'mpi'),
self.define_from_variant('AMReX_OMP', 'openmp'),
'-DXSDK_PRECISION:STRING=%s' %
self.spec.variants['precision'].value.upper(),
self.define_from_variant('XSDK_ENABLE_Fortran', 'fortran'),
self.define_from_variant('AMReX_FORTRAN_INTERFACES', 'fortran'),
self.define_from_variant('AMReX_EB', 'eb'),
self.define_from_variant('AMReX_LINEAR_SOLVERS',
'linear_solvers'),
self.define_from_variant('AMReX_AMRDATA', 'amrdata'),
self.define_from_variant('AMReX_PARTICLES', 'particles'),
self.define_from_variant('AMReX_HDF5', 'hdf5'),
self.define_from_variant('AMReX_HYPRE', 'hypre'),
self.define_from_variant('AMReX_PETSC', 'petsc'),
self.define_from_variant('AMReX_PIC', 'pic'),
]
if self.spec.satisfies('%fj'):
args.append('-DCMAKE_Fortran_MODDIR_FLAG=-M')
if '+cuda' in self.spec:
args.append('-DAMReX_GPU_BACKEND=CUDA')
args.append('-DAMReX_CUDA_ERROR_CAPTURE_THIS=ON')
args.append('-DAMReX_CUDA_ERROR_CROSS_EXECUTION_SPACE_CALL=ON')
cuda_arch = self.spec.variants['cuda_arch'].value
args.append('-DCUDA_ARCH=' + self.get_cuda_arch_string(cuda_arch))
if '+rocm' in self.spec:
args.append('-DCMAKE_CXX_COMPILER={0}'.format(self.spec['hip'].hipcc))
args.append('-DAMReX_GPU_BACKEND=HIP')
targets = self.spec.variants['amdgpu_target'].value
args.append('-DAMReX_AMD_ARCH=' + ';'.join(str(x) for x in targets))
return args
#
# For versions <= 20.11
#
@@ -214,3 +166,46 @@ def cmake_args(self):
args.append('-DCUDA_ARCH=' + self.get_cuda_arch_string(cuda_arch))
return args
#
# For versions > 20.11
#
@when('@20.12:')
def cmake_args(self):
args = [
'-DUSE_XSDK_DEFAULTS=ON',
self.define_from_variant('AMReX_SPACEDIM', 'dimensions'),
self.define_from_variant('BUILD_SHARED_LIBS', 'shared'),
self.define_from_variant('AMReX_MPI', 'mpi'),
self.define_from_variant('AMReX_OMP', 'openmp'),
'-DXSDK_PRECISION:STRING=%s' %
self.spec.variants['precision'].value.upper(),
self.define_from_variant('XSDK_ENABLE_Fortran', 'fortran'),
self.define_from_variant('AMReX_FORTRAN_INTERFACES', 'fortran'),
self.define_from_variant('AMReX_EB', 'eb'),
self.define_from_variant('AMReX_LINEAR_SOLVERS',
'linear_solvers'),
self.define_from_variant('AMReX_AMRDATA', 'amrdata'),
self.define_from_variant('AMReX_PARTICLES', 'particles'),
self.define_from_variant('AMReX_HDF5', 'hdf5'),
self.define_from_variant('AMReX_HYPRE', 'hypre'),
self.define_from_variant('AMReX_PETSC', 'petsc'),
]
if self.spec.satisfies('%fj'):
args.append('-DCMAKE_Fortran_MODDIR_FLAG=-M')
if '+cuda' in self.spec:
args.append('-DAMReX_GPU_BACKEND=CUDA')
args.append('-DAMReX_CUDA_ERROR_CAPTURE_THIS=ON')
args.append('-DAMReX_CUDA_ERROR_CROSS_EXECUTION_SPACE_CALL=ON')
cuda_arch = self.spec.variants['cuda_arch'].value
args.append('-DCUDA_ARCH=' + self.get_cuda_arch_string(cuda_arch))
if '+rocm' in self.spec:
args.append('-DCMAKE_CXX_COMPILER={0}'.format(self.spec['hip'].hipcc))
args.append('-DAMReX_GPU_BACKEND=HIP')
targets = self.spec.variants['amdgpu_target'].value
args.append('-DAMReX_AMD_ARCH=' + ';'.join(str(x) for x in targets))
return args

View File

@@ -11,33 +11,11 @@ class Asciidoc(AutotoolsPackage):
pages and other small to medium sized documents."""
homepage = "http://asciidoc.org"
# Always working URL but strangely with another checksum
url = "https://github.com/asciidoc-py/asciidoc-py/archive/8.6.10.tar.gz"
git = "https://github.com/asciidoc-py/asciidoc-py.git"
url = "http://downloads.sourceforge.net/project/asciidoc/asciidoc/8.6.9/asciidoc-8.6.9.tar.gz"
version('master', branch='master')
version('9.1.0', sha256='5056c20157349f8dc74f005b6e88ccbf1078c4e26068876f13ca3d1d7d045fe7')
version('9.0.5', sha256='edc8328c3682a8568172656f6fc309b189f65219a49517966c7ea144cb25f8b2')
version('9.0.4', sha256='fb0e683ae6a4baf34a8969c3af764ca729526196576729ee9275b9f39fd8b79c')
version('9.0.3', sha256='b6ef4accd7959f51b532ab4d3aaa211e15f18fd544c4c3cc3ed712f5590a50de')
version('9.0.2', sha256='93fbe32d56380afee2f26389d8ebfdf33de72536449d53308120d3c20d2c1e17')
version('8.6.10', sha256='22d6793d4f48cefb4a6963853212a214591a591ece1bcbc56af3c67c642003ea')
version('8.6.9', sha256='45e95bed1e341980f7de0a66fdc467090956fe55d4625bdad8057cd926e0c6c6')
version('8.6.9', sha256='78db9d0567c8ab6570a6eff7ffdf84eadd91f2dfc0a92a2d0105d323cab4e1f0')
depends_on('libxml2', type=('build', 'run'))
depends_on('libxslt', type=('build', 'run'))
depends_on('docbook-xml', type=('build', 'run'))
depends_on('docbook-xsl', type=('build', 'run'))
depends_on('python@2.3.0:2.7.99', when='@:8.6.9', type=('build', 'run'))
depends_on('python@3.5:', when='@9.0.2:', type=('build', 'run'))
@when('@:8.6.9')
def install(self, spec, prefix):
# Old release demands python2
mpythpath = spec['python'].command.path
exes = ['asciidoc', 'a2x']
for exe in exes:
fthfile = FileFilter(exe + '.py')
fthfile.filter('#!/usr/bin/env python', '#!' + mpythpath)
make('install')
depends_on('libxml2')
depends_on('libxslt')
depends_on('docbook-xml')
depends_on('docbook-xsl')

View File

@@ -1,25 +0,0 @@
From 310ecb91e619192fd7097a24889485526315667a Mon Sep 17 00:00:00 2001
From: Harmen Stoppels <harmenstoppels@gmail.com>
Date: Thu, 25 Feb 2021 16:32:30 +0100
Subject: [PATCH] remove smth
---
dist/Makefile.in | 2 ++
1 file changed, 2 insertions(+)
diff --git a/dist/Makefile.in b/dist/Makefile.in
index e90c0c2..d041d47 100644
--- a/dist/Makefile.in
+++ b/dist/Makefile.in
@@ -1160,6 +1160,8 @@ DOCLIST=api_reference articles bdb-sql collections csharp \
gsg gsg_db_rep gsg_txn index.html installation java \
license porting programmer_reference upgrading
+DOCLIST=index.html
+
install_docs:
@echo "Installing documentation: $(DESTDIR)$(docdir) ..."
@test -d $(DESTDIR)$(docdir) || \
--
2.25.1

View File

@@ -12,22 +12,15 @@ class BerkeleyDb(AutotoolsPackage):
url = "http://download.oracle.com/berkeley-db/db-18.1.40.tar.gz"
version("18.1.40", sha256="0cecb2ef0c67b166de93732769abdeba0555086d51de1090df325e18ee8da9c8")
version('18.1.32', sha256='fa1fe7de9ba91ad472c25d026f931802597c29f28ae951960685cde487c8d654', deprecated=True)
version('18.1.32', sha256='fa1fe7de9ba91ad472c25d026f931802597c29f28ae951960685cde487c8d654')
version('6.2.32', sha256='a9c5e2b004a5777aa03510cfe5cd766a4a3b777713406b02809c17c8e0e7a8fb')
version('6.1.29', sha256='b3c18180e4160d97dd197ba1d37c19f6ea2ec91d31bbfaf8972d99ba097af17d')
version('6.0.35', sha256='24421affa8ae436fe427ae4f5f2d1634da83d3d55a5ad6354a98eeedb825de55', deprecated=True)
version('6.0.35', sha256='24421affa8ae436fe427ae4f5f2d1634da83d3d55a5ad6354a98eeedb825de55')
version('5.3.28', sha256='e0a992d740709892e81f9d93f06daf305cf73fb81b545afe72478043172c3628')
variant('docs', default=False)
configure_directory = 'dist'
build_directory = 'build_unix'
patch("drop-docs.patch", when='~docs')
conflicts('%clang@7:', when='@5.3.28')
conflicts('%gcc@8:', when='@5.3.28')
def patch(self):
# some of the docs are missing in 18.1.40
if self.spec.satisfies("@18.1.40"):

View File

@@ -29,13 +29,8 @@ class Blaspp(CMakePackage, CudaPackage):
depends_on('cmake@3.15.0:', type='build')
depends_on('blas')
# only supported with clingo solver: virtual dependency preferences
# depends_on('openblas threads=openmp', when='+openmp ^openblas')
# BLASpp tests will fail when using openblas > 0.3.5 without multithreading support
# locking is only supported in openblas 3.7+
conflicts('^openblas@0.3.6 threads=none', msg='BLASpp requires a threadsafe openblas')
conflicts('^openblas@0.3.7: ~locking', msg='BLASpp requires a threadsafe openblas')
conflicts('^openblas@0.3.6: threads=none', msg='BLASpp requires openblas multithreading support')
def cmake_args(self):
spec = self.spec

View File

@@ -14,7 +14,6 @@ class CBlosc(CMakePackage):
homepage = "http://www.blosc.org"
url = "https://github.com/Blosc/c-blosc/archive/v1.11.1.tar.gz"
version('1.21.0', sha256='b0ef4fda82a1d9cbd11e0f4b9685abf14372db51703c595ecd4d76001a8b342d')
version('1.17.0', sha256='75d98c752b8cf0d4a6380a3089d56523f175b0afa2d0cf724a1bd0a1a8f975a4')
version('1.16.3', sha256='bec56cb0956725beb93d50478e918aca09f489f1bfe543dbd3087827a7344396')
version('1.15.0', sha256='dbbb01f9fedcdf2c2ff73296353a9253f44ce9de89c081cbd8146170dce2ba8f')

View File

@@ -71,7 +71,7 @@ class Cdo(AutotoolsPackage):
depends_on('proj@:7', when='@1.9.7+proj')
depends_on('proj@5:', when='@1.9.8:+proj')
depends_on('curl', when='+curl')
depends_on('fftw-api@3:', when='+fftw3')
depends_on('fftw@3:', when='+fftw3')
depends_on('magics', when='+magics')
depends_on('uuid')

View File

@@ -16,8 +16,6 @@ class Cgns(CMakePackage):
url = "https://github.com/CGNS/CGNS/archive/v3.3.0.tar.gz"
git = "https://github.com/CGNS/CGNS"
parallel = False
version('develop', branch='develop')
version('master', branch='master')
version('4.1.1', sha256='055d345c3569df3ae832fb2611cd7e0bc61d56da41b2be1533407e949581e226')

View File

@@ -103,10 +103,6 @@ class Cmake(Package):
version('3.0.2', sha256='6b4ea61eadbbd9bec0ccb383c29d1f4496eacc121ef7acf37c7a24777805693e')
version('2.8.10.2', sha256='ce524fb39da06ee6d47534bbcec6e0b50422e18b62abc4781a4ba72ea2910eb1')
variant('build_type', default='Release',
description='CMake build type',
values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel'))
# Revert the change that introduced a regression when parsing mpi link
# flags, see: https://gitlab.kitware.com/cmake/cmake/issues/19516
patch('cmake-revert-findmpi-link-flag-list.patch', when='@3.15.0')
@@ -244,8 +240,8 @@ def bootstrap_args(self):
# Now for CMake arguments to pass after the initial bootstrap
args.append('--')
args.append('-DCMAKE_BUILD_TYPE={0}'.format(
self.spec.variants['build_type'].value))
# Make sure to create an optimized release build
args.append('-DCMAKE_BUILD_TYPE=Release')
# Install CMake correctly, even if `spack install` runs
# inside a ctest environment

View File

@@ -98,8 +98,4 @@ def test(self):
exe_name = 'cpmd.x'
opts.append(test_file)
opts.append(test_dir)
expected = ['2 1 H O 1.84444 0.97604',
'3 1 H O 1.84444 0.97604',
'2 1 3 H O H 103.8663'
]
self.run_test(exe_name, options=opts, expected=expected)
self.run_test(exe_name, options=opts)

View File

@@ -23,10 +23,6 @@
# format returned by platform.system() and 'arch' by platform.machine()
_versions = {
'11.2.1': {
'Linux-aarch64': ('4b322fa6477d1a2cd2f2f526fa520c0f90bef2c264ef8435cb016bebb5456c5e', 'https://developer.download.nvidia.com/compute/cuda/11.2.1/local_installers/cuda_11.2.1_460.32.03_linux_sbsa.run'),
'Linux-x86_64': ('1da98cb897cc5f58a7445a4a66ca4f6926867706cb3af58a669cdcd8dc3d17c8', 'https://developer.download.nvidia.com/compute/cuda/11.2.1/local_installers/cuda_11.2.1_460.32.03_linux.run'),
'Linux-ppc64le': ('b3e8b6cd76872deb3acd050d32e197bc1c655e142b169070f0f9753680461a3f', 'https://developer.download.nvidia.com/compute/cuda/11.2.1/local_installers/cuda_11.2.1_460.32.03_linux_ppc64le.run')},
'11.2.0': {
'Linux-aarch64': ('c11dc274660e9b47b0f25ca66861a7406246a7191f1b04d0710515fcac0fa6cd', 'https://developer.download.nvidia.com/compute/cuda/11.2.0/local_installers/cuda_11.2.0_460.27.04_linux_sbsa.run'),
'Linux-x86_64': ('9c50283241ac325d3085289ed9b9c170531369de41165ce271352d4a898cbdce', 'https://developer.download.nvidia.com/compute/cuda/11.2.0/local_installers/cuda_11.2.0_460.27.04_linux.run'),
@@ -40,12 +36,12 @@
'Linux-x86_64': ('858cbab091fde94556a249b9580fadff55a46eafbcb4d4a741d2dcd358ab94a5', 'https://developer.download.nvidia.com/compute/cuda/11.1.0/local_installers/cuda_11.1.0_455.23.05_linux.run'),
'Linux-ppc64le': ('a561e6f7f659bc4100e4713523b0b8aad6b36aa77fac847f6423e7780c750064', 'https://developer.download.nvidia.com/compute/cuda/11.1.0/local_installers/cuda_11.1.0_455.23.05_linux_ppc64le.run')},
'11.0.2': {
'Linux-aarch64': ('23851e30f7c47a1baad92891abde0adbc783de5962c7480b9725198ceacda4a0', 'https://developer.download.nvidia.com/compute/cuda/11.0.2/local_installers/cuda_11.0.2_450.51.05_linux_sbsa.run'),
'Linux-x86_64': ('48247ada0e3f106051029ae8f70fbd0c238040f58b0880e55026374a959a69c1', 'https://developer.download.nvidia.com/compute/cuda/11.0.2/local_installers/cuda_11.0.2_450.51.05_linux.run'),
'Linux-ppc64le': ('db06d0f3fbf6f7aa1f106fc921ad1c86162210a26e8cb65b171c5240a3bf75da', 'https://developer.download.nvidia.com/compute/cuda/11.0.2/local_installers/cuda_11.0.2_450.51.05_linux_ppc64le.run')},
'Linux-aarch64': ('23851e30f7c47a1baad92891abde0adbc783de5962c7480b9725198ceacda4a0', 'http://developer.download.nvidia.com/compute/cuda/11.0.2/local_installers/cuda_11.0.2_450.51.05_linux_sbsa.run'),
'Linux-x86_64': ('48247ada0e3f106051029ae8f70fbd0c238040f58b0880e55026374a959a69c1', 'http://developer.download.nvidia.com/compute/cuda/11.0.2/local_installers/cuda_11.0.2_450.51.05_linux.run'),
'Linux-ppc64le': ('db06d0f3fbf6f7aa1f106fc921ad1c86162210a26e8cb65b171c5240a3bf75da', 'http://developer.download.nvidia.com/compute/cuda/11.0.2/local_installers/cuda_11.0.2_450.51.05_linux_ppc64le.run')},
'10.2.89': {
'Linux-x86_64': ('560d07fdcf4a46717f2242948cd4f92c5f9b6fc7eae10dd996614da913d5ca11', 'https://developer.download.nvidia.com/compute/cuda/10.2/Prod/local_installers/cuda_10.2.89_440.33.01_linux.run'),
'Linux-ppc64le': ('5227774fcb8b10bd2d8714f0a716a75d7a2df240a9f2a49beb76710b1c0fc619', 'https://developer.download.nvidia.com/compute/cuda/10.2/Prod/local_installers/cuda_10.2.89_440.33.01_linux_ppc64le.run')},
'Linux-x86_64': ('560d07fdcf4a46717f2242948cd4f92c5f9b6fc7eae10dd996614da913d5ca11', 'http://developer.download.nvidia.com/compute/cuda/10.2/Prod/local_installers/cuda_10.2.89_440.33.01_linux.run'),
'Linux-ppc64le': ('5227774fcb8b10bd2d8714f0a716a75d7a2df240a9f2a49beb76710b1c0fc619', 'http://developer.download.nvidia.com/compute/cuda/10.2/Prod/local_installers/cuda_10.2.89_440.33.01_linux_ppc64le.run')},
'10.1.243': {
'Linux-x86_64': ('e7c22dc21278eb1b82f34a60ad7640b41ad3943d929bebda3008b72536855d31', 'https://developer.download.nvidia.com/compute/cuda/10.1/Prod/local_installers/cuda_10.1.243_418.87.00_linux.run'),
'Linux-ppc64le': ('b198002eef010bab9e745ae98e47567c955d00cf34cc8f8d2f0a6feb810523bf', 'https://developer.download.nvidia.com/compute/cuda/10.1/Prod/local_installers/cuda_10.1.243_418.87.00_linux_ppc64le.run')},

View File

@@ -9,10 +9,11 @@
class Davix(CMakePackage):
"""High-performance file management over WebDAV/HTTP."""
homepage = "https://davix.web.cern.ch/davix/docs/devel/index.html"
url = "https://github.com/cern-fts/davix/releases/download/R_0_7_6/davix-0.7.6.tar.gz"
homepage = "https://dmc.web.cern.ch/projects/davix"
url = "http://grid-deployment.web.cern.ch/grid-deployment/dms/lcgutil/tar/davix/0.6.8/davix-0.6.8.tar.gz"
list_url = "http://grid-deployment.web.cern.ch/grid-deployment/dms/lcgutil/tar/davix/"
list_depth = 1
version('0.7.6', sha256='a2e7fdff29f7ba247a3bcdb08ab1db6d6ed745de2d3971b46526986caf360673')
version('0.7.5', sha256='d920ca976846875d83af4dc50c99280bb3741fcf8351d5733453e70fa5fe6fc8')
version('0.7.3', sha256='cd46276e72c6a0da1e2ad30eb66ec509a4c023687767c62a66713fa8c23d328a')
version('0.6.9', sha256='fbd97eb5fdf82ca48770d06bf8e2805b35f23255478aa381a9d25a49eb98e348')

View File

@@ -19,4 +19,3 @@ class Dbow2(CMakePackage):
depends_on('opencv+calib3d+core+features2d+highgui+imgproc')
depends_on('boost')
depends_on('dlib')
depends_on('eigen', type='link')

View File

@@ -7,7 +7,7 @@
from spack import *
class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage):
class Dihydrogen(CMakePackage, CudaPackage):
"""DiHydrogen is the second version of the Hydrogen fork of the
well-known distributed linear algebra library,
Elemental. DiHydrogen aims to be a basic distributed
@@ -77,16 +77,10 @@ class Dihydrogen(CMakePackage, CudaPackage, ROCmPackage):
# Add Aluminum variants
depends_on('aluminum +cuda +nccl +ht +cuda_rma', when='+al +cuda')
depends_on('aluminum +rocm +rccl +ht', when='+al +rocm')
for arch in CudaPackage.cuda_arch_values:
depends_on('aluminum cuda_arch=%s' % arch, when='+al +cuda cuda_arch=%s' % arch)
# variants +rocm and amdgpu_targets are not automatically passed to
# dependencies, so do it manually.
for val in ROCmPackage.amdgpu_targets:
depends_on('aluminum amdgpu_target=%s' % val, when='amdgpu_target=%s' % val)
depends_on('cuda', when=('+cuda' or '+legacy'))
depends_on('cudnn', when=('+cuda' or '+legacy'))
depends_on('cub', when='^cuda@:10.99')
@@ -196,18 +190,6 @@ def cmake_args(self):
'-DOpenMP_libomp_LIBRARY={0}/lib/libomp.dylib'.format(
clang_root)])
if '+rocm' in spec:
args.extend([
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix),
'-DHIP_CXX_COMPILER={0}'.format(self.spec['hip'].hipcc)])
archs = self.spec.variants['amdgpu_target'].value
if archs != 'none':
arch_str = ",".join(archs)
args.append(
'-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
' -g -fsized-deallocation -fPIC'.format(arch_str)
)
return args
def setup_build_environment(self, env):

View File

@@ -1,29 +0,0 @@
--- a/configure.ac
+++ c/configure.ac
@@ -440,7 +440,7 @@ else
FFLAGS=${I_BASE}" -Ofast "${I_ARCH}
elif test x"${FC_ACT}" = x"gfortran"; then
- FFLAGS=${G_BASE}" -O3 "${G_ARCH}
+ FFLAGS=${G_BASE}" -O3 -fopenmp "${G_ARCH}
elif test x"${FC_ACT}" = x"mpifrtpx"; then
FFLAGS=${F_BASE}" -Kfast,ocl,simd,swp "${F_ARCH}
@@ -465,7 +465,7 @@ else
FCFLAGS=${I_BASE}" -Ofast "${I_ARCH}
elif test x"${FC_ACT}" = x"gfortran"; then
- FCFLAGS=${G_BASE}" -O3 "${G_ARCH}
+ FCFLAGS=${G_BASE}" -O3 -fopenmp "${G_ARCH}
elif test x"${FC_ACT}" = x"mpifrtpx"; then
FCFLAGS=${F_BASE}" -Kfast,ocl,simd,swp "${F_ARCH}
@@ -486,7 +486,7 @@ if test x"${FC_ACT}" = x"ifort"; then
FFLAGS0=${I_BASE}" -O0 "
elif test x"${FC_ACT}" = x"gfortran"; then
- FFLAGS0=${G_BASE}" -O0"
+ FFLAGS0=${G_BASE}" -O0 -fopenmp "
elif test x"${FC_ACT}" = x"mpifrtpx"; then
FFLAGS0="-Kopenmp,openmp_ordered_reduction -Cpp"

View File

@@ -1,4 +1,4 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
@@ -25,10 +25,8 @@ class Eigenexa(AutotoolsPackage):
depends_on("scalapack")
patch("fj_compiler.patch", when="%fj")
patch("gcc_compiler.patch", when="%gcc")
parallel = False
force_autoreconf = True
def setup_build_environment(self, env):
env.set("FC", self.spec["mpi"].mpifc, force=True)
@@ -49,21 +47,3 @@ def setup_build_environment(self, env):
)
),
)
@run_after('install')
def cache_test_sources(self):
self.cache_extra_test_sources("benchmark")
def test(self):
test_dir = self.test_suite.current_test_data_dir
exe_name = join_path(test_dir, "run-test.sh")
mpi_name = self.spec["mpi"].prefix.bin.mpirun
test_file = join_path(
self.install_test_root, "benchmark", "eigenexa_benchmark"
)
input_file = join_path(self.install_test_root, "benchmark", "IN")
opts = [exe_name, mpi_name, '-n', '1', test_file, '-f', input_file]
env["OMP_NUM_THREADS"] = "1"
self.run_test(
"sh", options=opts, expected="EigenExa Test Passed !", work_dir=test_dir
)

View File

@@ -1,36 +0,0 @@
#!/bin/bash
grep -q "Benchmark completed" $@
if [ $? -eq 1 ]; then
echo "EigenExa Test Failed !"
exit 1
fi
while read line;do
case $line in
*'Eigenvalue Relative Error'* )
tmp=$line
read line
if [[ "$line" != '|w| is too small, so it is not severe.' ]]; then
echo "${tmp}" >> output.txt
fi
;;
*'Eigenvalue Absolute Error'* )
tmp=$line
read line
if [ "$line" != 'Do not mind it. Condition number is too large.' ] && \
[ "$line" != 'Do not mind it. Relative error is small enough' ]; then
echo "${tmp}" >> output.txt
fi
;;
*'Residual Error Test'* | *'Orthogonality Test'* )
echo "${line}" >> output.txt ;;
esac
done < $@
grep -q "FAILED" output.txt
if [ $? -eq 1 ]; then
echo "EigenExa Test Passed !"
elif [ $? -eq 0 ]; then
echo "EigenExa Test Failed !"
fi

View File

@@ -1,4 +0,0 @@
#!/bin/bash
$@ 2>&1 | tee run_test_results.txt
result-check.sh run_test_results.txt

View File

@@ -10,11 +10,10 @@ class EnvironmentModules(Package):
"""
homepage = 'https://cea-hpc.github.io/modules/'
url = 'https://github.com/cea-hpc/modules/releases/download/v4.7.0/modules-4.7.0.tar.gz'
url = 'https://github.com/cea-hpc/modules/releases/download/v4.6.1/modules-4.6.1.tar.gz'
maintainers = ['xdelaruelle']
version('4.7.0', sha256='3ab0a649e23b4dd00963e4cae60e573b449194ecb4035c5ce487330b272b4d06')
version('4.6.1', sha256='3445df39abe5838b94552b53e7dbff56ada8347b9fdc6c04a72297d5b04af76f')
version('4.6.0', sha256='b42b14bb696bf1075ade1ecaefe7735dbe411db4c29031a1dae549435eafa946')
version('4.5.3', sha256='7cbd9c61e6dcd82a3f81b5ced92c3cf84ecc5489639bdfc94869256383a2c915')

View File

@@ -6,68 +6,37 @@
from spack import *
class Evtgen(CMakePackage):
class Evtgen(AutotoolsPackage):
""" EvtGen is a Monte Carlo event generator that simulates
the decays of heavy flavour particles, primarily B and D mesons. """
homepage = "https://evtgen.hepforge.org/"
url = "https://evtgen.hepforge.org/downloads?f=EvtGen-02.00.00.tar.gz"
url = "http://lcgpackages.web.cern.ch/lcgpackages/tarFiles/sources/MCGeneratorsTarFiles/evtgen-R01-07-00.tar.gz"
tags = ['hep']
maintainers = ['vvolkl']
version('02.00.00', sha256='02372308e1261b8369d10538a3aa65fe60728ab343fcb64b224dac7313deb719')
# switched to cmake in 02.00.00
version('01.07.00', sha256='2648f1e2be5f11568d589d2079f22f589c283a2960390bbdb8d9d7f71bc9c014', deprecated=True)
version('02-00-00', sha256='02372308e1261b8369d10538a3aa65fe60728ab343fcb64b224dac7313deb719')
version('01-07-00', sha256='2648f1e2be5f11568d589d2079f22f589c283a2960390bbdb8d9d7f71bc9c014', preferred=True)
variant('pythia8', default=True, description='Build with pythia8')
variant('tauola', default=False, description='Build with tauola')
variant('photos', default=False, description='Build with photos')
variant('hepmc3', default=False, description='Link with hepmc3 (instead of hepmc)')
patch("g2c.patch", when='@01.07.00')
patch("g2c.patch")
depends_on('hepmc', when='~hepmc3')
depends_on('hepmc3', when='+hepmc3')
depends_on('hepmc@:2.99.99')
depends_on("pythia8", when="+pythia8")
depends_on("tauola~hepmc3", when="+tauola~hepmc3")
depends_on("photos~hepmc3", when="+photos~hepmc3")
depends_on("tauola+hepmc3", when="+tauola+hepmc3")
depends_on("photos+hepmc3", when="+photos+hepmc3")
depends_on("tauola", when="+tauola")
depends_on("photos", when="+photos")
conflicts("^pythia8+evtgen", when="+pythia8",
msg="Building pythia with evtgen bindings and "
"evtgen with pythia bindings results in a circular dependency "
"that cannot be resolved at the moment! "
"Use evtgen+pythia8^pythia8~evtgen.")
conflicts('+hepmc3', when='@:01.99.99',
msg='hepmc3 support was added in 02.00.00')
def cmake_args(self):
args = []
args.append(self.define_from_variant('EVTGEN_PYTHIA', 'pythia8'))
args.append(self.define_from_variant('EVTGEN_TAUOLA', 'tauola'))
args.append(self.define_from_variant('EVTGEN_PHOTOS', 'photos'))
args.append(self.define_from_variant('EVTGEN_HEPMC3', 'hepmc3'))
return args
# Taken from AutotoolsPackage
def configure(self, spec, prefix):
"""Runs configure with the arguments specified in
:py:meth:`~.AutotoolsPackage.configure_args`
and an appropriately set prefix.
"""
options = getattr(self, 'configure_flag_args', [])
options += ['--prefix={0}'.format(prefix)]
options += self.configure_args()
with working_dir(self.build_directory, create=True):
inspect.getmodule(self).configure(*options)
@when('@:01.99.99')
def configure_args(self):
args = []
@@ -81,23 +50,11 @@ def configure_args(self):
return args
@when('@:01.99.99')
def cmake(self, spec, prefix):
pass
@when('@:01.99.99')
def build(self, spec, prefix):
self.configure(spec, prefix)
# avoid parallel compilation errors
# due to libext_shared depending on lib_shared
with working_dir(self.build_directory):
make('lib_shared')
make('all')
@when('@:01.99.99')
def install(self, spec, prefix):
with working_dir(self.build_directory):
make('install')
make('lib_shared')
make('all')
def setup_run_environment(self, env):
env.set("EVTGEN", self.prefix.share)

View File

@@ -54,7 +54,7 @@ class Fastjet(AutotoolsPackage):
variant('auto-ptr', default=False, description='Use auto_ptr')
variant('atlas', default=False, description='Patch to make random generator thread_local')
patch('atlas.patch', when='+atlas', level=0)
patch('atlas.patch', when='+atlas')
def configure_args(self):
extra_args = ["--enable-allplugins"]

View File

@@ -176,10 +176,4 @@ def flag_handler(self, name, flags):
flags = []
flags.append('-Wno-error=stringop-truncation')
if self.spec.satisfies('%gcc@8:') and \
self.spec.satisfies('@0.23.0'):
if flags is None:
flags = []
flags.append('-Wno-error=maybe-uninitialized')
return (flags, None, None)

View File

@@ -75,7 +75,7 @@ class FontUtil(AutotoolsPackage, XorgPackage):
conflicts('fonts=font-bh-ttf', when='platform=cray')
conflicts('fonts=font-bh-ttf', when='arch=linux-rhel7-broadwell')
if f != 'font-bh-ttf':
if f != 'font-bh-tff':
default_fonts.append(f)
fonts.append(f)

View File

@@ -13,7 +13,6 @@ class Form(AutotoolsPackage):
url = "https://github.com/vermaseren/form/archive/v4.2.1.tar.gz"
version('4.2.1', sha256='6f32c7470d00e8ab6934dc352f5a78e29290146a00e5775f8cd5fef7810bbbb8')
version('4.1-20131025', sha256='caece2c6e605ccf32eb3612c4ed5c9257a7a62824ad219c5e46b6d00066f1ba6')
depends_on('autoconf', type='build')
depends_on('automake', type='build')

View File

@@ -12,9 +12,6 @@ class FuseOverlayfs(AutotoolsPackage):
homepage = "https://github.com/containers/fuse-overlayfs"
url = "https://github.com/containers/fuse-overlayfs/archive/v1.1.2.tar.gz"
version('1.4.0', sha256='7e5666aef4f2047e6a5202d6438b08c2d314dba5b40e431014e7dbb8168d9018')
version('1.3.0', sha256='91e78a93aac7698c65083deea04952bc86af6abbb0830785ef1dd4a8707ad8bf')
version('1.2.0', sha256='5df218732244059057686194b0e1fef66fb822d4087db48af88e1bc29bb1afde')
version('1.1.2', sha256='1c0fa67f806c44d5c51f4bce02fdcb546137a2688a8de76d93d07b79defc9cac')
version('1.1.1', sha256='9a1c4221a82059fd9686dd8b519d432bae126c08f9d891fb722bcb51ba4933ec')
version('1.1.0', sha256='060168c2d5a8c6cc768b4542eba9953b7ff4a31f94bfb2e05b3d1051390838b1')

View File

@@ -1,342 +0,0 @@
--- a/configure.ac
+++ b/configure.ac
@@ -124,11 +124,7 @@ AC_PROG_F77($genesis_default_fc)
# check actual compiler
AC_MSG_CHECKING([for actual compiler])
-if test x"${FC}" = x"mpifrtpx"; then
- VER_OPT="-V"
-else
- VER_OPT="--version"
-fi
+VER_OPT="--version"
fc_ver_info=$(${FC} -E ${VER_OPT})
for vtok in ${fc_ver_info} ; do
@@ -158,9 +154,13 @@ for vtok in ${fc_ver_info} ; do
done
if test -z "${FC_ACT}"; then
- FC_ACT=${FC}
+ FC_ACT=`basename ${FC}`
fi
+
+
+
+
AC_MSG_RESULT([$FC_ACT])
version_warning=yes
@@ -256,7 +256,7 @@ for vtok in ${cc_ver_info} ; do
done
if test -z "${CC_ACT}"; then
- CC_ACT=${CC}
+ CC_ACT=`basename ${CC}`
fi
AC_MSG_RESULT([$CC_ACT])
@@ -322,18 +322,12 @@ if test x"${enable_openmp}" = x"yes"; th
elif test x"${FC_ACT}" = x"pgf90"; then
OPT_OPENMP="-mp"
- elif test x"${FC_ACT}" = x"frtpx"; then
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
OPT_OPENMP="-Kopenmp"
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
OPT_OPENMP="-Kopenmp"
- elif test x"${FC_ACT}" = x"frt"; then
- OPT_OPENMP="-KOMP"
-
- elif test x"${FC_ACT}" = x"mpifrt"; then
- OPT_OPENMP="-KOMP"
-
fi
fi
@@ -439,7 +433,8 @@ fi
AC_ARG_ENABLE([parallel_IO],
[AS_HELP_STRING([--enable-parallel_IO], [enable parallel_IO (defalt: no)])],
[], [enable_parallel_IO=no])
-if test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"mpifrtpx"; then
+if test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"mpifrtpx" -o \
+ x"${FC_ACT}" = x"frt" -o x"${FC_ACT}" = x"mpifrt"; then
enable_parallel_IO=no
fi
#if test x"${mainly_single}" = x"yes"; then
@@ -742,22 +737,16 @@ else
elif test x"${FC_ACT}" = x"pgf90"; then
FCFLAGS="-O2 -fastsse -Mipa=fast,inline -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge"
- elif test x"${FC_ACT}" = x"frtpx"; then
- FCFLAGS="-m -Kvisimpact -Kocl -Kswp"
-
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- FCFLAGS="-m -Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
- FCFLAGS_FFTE_KERNEL_FPP__PGI__SP=" -m -Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
- FCFLAGS_FPP__PGI_NOOPT="-m -Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_PAIRLIST="-m -Kocl -Kfast,openmp,parallel,simd=2,nounroll,swp_strong,noalias=s -Nlst=t -Koptmsg=2"
- FCFLAGS_DIHEDRAL="-m -xsp_energy_dihedrals_mod.calculate_dihedral_2 -Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS=" -m -Kocl -Kfast,openmp,parallel,simd=2,swp_strong,noalias=s -Nlst=t -Koptmsg=2"
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
+ FCFLAGS="-Kocl"
- elif test x"${FC_ACT}" = x"frt"; then
- FCFLAGS="-m -Am -Kfast"
-
- elif test x"${FC_ACT}" = x"mpifrt"; then
- FCFLAGS="-m -Am -Kfast"
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ FCFLAGS="-Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
+ FCFLAGS_FFTE_KERNEL_FPP__PGI__SP="-Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
+ FCFLAGS_FPP__PGI_NOOPT="-Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_PAIRLIST="-Kocl -Kfast,openmp,parallel,simd=2,nounroll,swp_strong,noalias=s -Nlst=t -Koptmsg=2"
+ FCFLAGS_DIHEDRAL="-xsp_energy_dihedrals_mod.calculate_dihedral_2 -Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS="-Kocl -Kfast,openmp,parallel,simd=2,swp_strong,noalias=s -Nlst=t -Koptmsg=2"
fi
@@ -776,22 +765,16 @@ else
elif test x"${FC_ACT}" = x"pgf90"; then
FCFLAGS="-m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge"
- elif test x"${FC_ACT}" = x"frtpx"; then
- FCFLAGS="-m -Kocl "
-
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- FCFLAGS=" -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS=" -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_PAIRLIST="-m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_FFTE_KERNEL_FPP__PGI__SP=" -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_FPP__PGI_NOOPT=" -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_DIHEDRAL=" -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
-
- elif test x"${FC_ACT}" = x"frt"; then
- FCFLAGS="-m "
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
+ FCFLAGS="-Kocl "
- elif test x"${FC_ACT}" = x"mpifrt"; then
- FCFLAGS="-m "
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ FCFLAGS=" -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS="-Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_PAIRLIST="-Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_FFTE_KERNEL_FPP__PGI__SP="-Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_FPP__PGI_NOOPT="-Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_DIHEDRAL="-Kocl -Kopenmp -Nlst=t -Koptmsg=2"
fi
@@ -813,22 +796,16 @@ else
elif test x"${FC_ACT}" = x"pgf90"; then
FCFLAGS="-g -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge"
- elif test x"${FC_ACT}" = x"frtpx"; then
- FCFLAGS="-g -m -Kocl -Kopenmp"
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
+ FCFLAGS="-g -Kocl -Kopenmp"
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- FCFLAGS=" -g -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS=" -g -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_PAIRLIST=" -g -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_FFTE_KERNEL_FPP__PGI__SP=" -g -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_FPP__PGI_NOOPT=" -g -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_DIHEDRAL=" -g -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
-
- elif test x"${FC_ACT}" = x"frt"; then
- FCFLAGS="-g -m "
-
- elif test x"${FC_ACT}" = x"mpifrt"; then
- FCFLAGS="-g -m "
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ FCFLAGS=" -g -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS=" -g -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_PAIRLIST=" -g -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_FFTE_KERNEL_FPP__PGI__SP=" -g -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_FPP__PGI_NOOPT=" -g -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_DIHEDRAL=" -g -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
fi
@@ -850,23 +827,23 @@ else
elif test x"${FC_ACT}" = x"pgf90"; then
FCFLAGS="-O0 -g -m64 -Msmartalloc=huge"
- elif test x"${FC_ACT}" = x"frtpx"; then
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
# FCFLAGS="-O0 -g -Hs -Hu -Ha"
FCFLAGS="-O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- FCFLAGS=" -O0 -g -m -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS=" -O0 -g -m -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_PAIRLIST=" -O0 -g -m -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
- FCFLAGS_FFTE_KERNEL_FPP__PGI__SP=" -O0 -g -m -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
- FCFLAGS_FPP__PGI_NOOPT=" -O0 -g -m -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
- FCFLAGS_DIHEDRAL=" -O0 -g -m -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ FCFLAGS=" -O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS=" -O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_PAIRLIST=" -O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
+ FCFLAGS_FFTE_KERNEL_FPP__PGI__SP=" -O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
+ FCFLAGS_FPP__PGI_NOOPT=" -O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
+ FCFLAGS_DIHEDRAL=" -O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
- elif test x"${FC_ACT}" = x"frt"; then
- FCFLAGS="-O0 -g"
+ #elif test x"${FC_ACT}" = x"frt"; then
+ # FCFLAGS="-O0 -g"
- elif test x"${FC_ACT}" = x"mpifrt"; then
- FCFLAGS="-O0 -g"
+ #elif test x"${FC_ACT}" = x"mpifrt"; then
+ # FCFLAGS="-O0 -g"
fi
else
@@ -930,17 +907,11 @@ else
elif test x"${CC_ACT}" = x"pgcc"; then
CFLAGS="-O3 -fastsse -Mipa=fast,inline -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge"
- elif test x"${CC_ACT}" = x"fccpx"; then
- CFLAGS="-m -Kvisimpact -Kocl -Kswp"
-
- elif test x"${CC_ACT}" = x"mpifccpx"; then
- CFLAGS="-m -Kvisimpact -Kocl -Kswp"
+ elif test x"${CC_ACT}" = x"fccpx" -o x"${CC_ACT}" = x"fcc"; then
+ CFLAGS="-Kocl"
- elif test x"${CC_ACT}" = x"fcc"; then
- CFLAGS="-m -Am -Kfast"
-
- elif test x"${CC_ACT}" = x"mpifcc"; then
- CFLAGS="-m -Am -Kfast"
+ elif test x"${CC_ACT}" = x"mpifccpx" -o x"${CC_ACT}" = x"mpifcc"; then
+ CFLAGS="-Kocl"
fi
@@ -959,17 +930,11 @@ else
elif test x"${CC_ACT}" = x"pgcc"; then
CFLAGS="-fastsse -Mipa=fast,inline -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge"
- elif test x"${CC_ACT}" = x"fccpx"; then
- CFLAGS="-m -Kocl"
-
- elif test x"${CC_ACT}" = x"mpifccpx"; then
- CFLAGS="-m -Kocl"
-
- elif test x"${CC_ACT}" = x"fcc"; then
- CFLAGS="-m -Am -Kfast"
+ elif test x"${CC_ACT}" = x"fccpx" -o x"${CC_ACT}" = x"fcc"; then
+ CFLAGS="-Kocl"
- elif test x"${CC_ACT}" = x"mpifcc"; then
- CFLAGS="-m -Am -Kfast"
+ elif test x"${CC_ACT}" = x"mpifccpx" -o x"${CC_ACT}" = x"mpifcc"; then
+ CFLAGS="-Kocl"
fi
@@ -991,17 +956,11 @@ else
elif test x"${CC_ACT}" = x"pgcc"; then
CFLAGS="-g -fastsse -Mipa=fast,inline -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge"
- elif test x"${CC_ACT}" = x"fccpx"; then
- CFLAGS="-g -m -Kocl"
+ elif test x"${CC_ACT}" = x"fccpx" -o x"${CC_ACT}" = x"fcc"; then
+ CFLAGS="-g -Kocl"
- elif test x"${CC_ACT}" = x"mpifccpx"; then
- CFLAGS="-g -m -Kocl"
-
- elif test x"${CC_ACT}" = x"fcc"; then
- CFLAGS="-g -m -Am -Kfast"
-
- elif test x"${CC_ACT}" = x"mpifcc"; then
- CFLAGS="-g -m -Am -Kfast"
+ elif test x"${CC_ACT}" = x"mpifccpx" -o x"${CC_ACT}" = x"mpifcc"; then
+ CFLAGS="-g -Kocl"
fi
@@ -1023,19 +982,13 @@ else
elif test x"${CC_ACT}" = x"pgcc"; then
CFLAGS="-O0 -g -m64 -Msmartalloc=huge"
- elif test x"${CC_ACT}" = x"fccpx"; then
+ elif test x"${CC_ACT}" = x"fccpx" -o x"${CC_ACT}" = x"fcc"; then
# CFLAGS="-O0 -g -Hs -Hu -Ha"
CFLAGS="-O0 -g -Kocl"
- elif test x"${CC_ACT}" = x"mpifccpx"; then
+ elif test x"${CC_ACT}" = x"mpifccpx" -o x"${CC_ACT}" = x"mpifcc"; then
CFLAGS="-O0 -g -Kocl"
- elif test x"${CC_ACT}" = x"fcc"; then
- CFLAGS="-O0 -g"
-
- elif test x"${CC_ACT}" = x"mpifcc"; then
- CFLAGS="-O0 -g"
-
fi
fi
@@ -1056,17 +1009,11 @@ if test -z "${LDFLAGS}"; then
LDFLAGS=""
#LDFLAGS="-march=native -fuse-linker-plugin"
- elif test x"${FC_ACT}" = x"frtpx"; then
- LDFLAGS="-SSL2 -Kparallel -Nfjomplib"
-
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- LDFLAGS="-SSL2BLAMP -Kparallel -Nfjomplib"
-
- elif test x"${FC_ACT}" = x"frt"; then
- LDFLAGS="-KSPARC64VII,V9FMADD -SSL2"
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
+ LDFLAGS="-Kparallel -Kopenmp -Nlibomp"
- elif test x"${FC_ACT}" = x"mpifrt"; then
- LDFLAGS="-KSPARC64VII,V9FMADD -SSL2"
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ LDFLAGS="-Kparallel -Kopenmp -Nlibomp"
fi
fi
@@ -1101,7 +1048,9 @@ AC_SUBST(PPFLAGS)
# USEKCOMP
AM_CONDITIONAL(USEKCOMP, test x"${FC_ACT}" = x"frtpx" -o \
- x"${FC_ACT}" = x"mpifrtpx")
+ x"${FC_ACT}" = x"mpifrtpx" -o \
+ x"${FC_ACT}" = x"frt" -o \
+ x"${FC_ACT}" = x"mpifrt")
# KCOMP preprocess macro
AC_ARG_ENABLE([pktimer],
@@ -1130,7 +1079,8 @@ AC_ARG_WITH([fj_timer_2],
[use FJ-timer in main loop (default: not use)])],
[], [with_fj_timer_2=no])
-if test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"mpifrtpx"; then
+if test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"mpifrtpx" -o \
+ x"${FC_ACT}" = x"frt" -o x"${FC_ACT}" = x"mpifrt"; then
AC_DEFINE(KCOMP, 1, [defined if K-computer compiler is used.])
DEFINED_VARIABLES+=" -DKCOMP"
@@ -1148,7 +1098,7 @@ if test x"${FC_ACT}" = x"frtpx" -o x"${F
mainly_pktimer=no
if test x"${enable_pktimer}" = x"yes"; then
- if test x"${FC_ACT}" = x"mpifrtpx"; then
+ if test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
AC_DEFINE(PKTIMER, 1, [defined if PKTIMER is used.])
DEFINED_VARIABLES+=" -DPKTIMER"
mainly_pktimer=yes

View File

@@ -1,286 +0,0 @@
--- a/configure.ac
+++ b/configure.ac
@@ -90,11 +90,7 @@ AC_PROG_F77($genesis_default_fc)
# check actual compiler
AC_MSG_CHECKING([for actual compiler])
-if test x"${FC}" = x"mpifrtpx"; then
- VER_OPT="-V"
-else
- VER_OPT="--version"
-fi
+VER_OPT="--version"
fc_ver_info=$(${FC} -E ${VER_OPT})
for vtok in ${fc_ver_info} ; do
@@ -124,7 +120,7 @@ for vtok in ${fc_ver_info} ; do
done
if test -z "${FC_ACT}"; then
- FC_ACT=${FC}
+ FC_ACT=`basename ${FC}`
fi
AC_MSG_RESULT([$FC_ACT])
@@ -228,7 +224,7 @@ for vtok in ${cc_ver_info} ; do
done
if test -z "${CC_ACT}"; then
- CC_ACT=${CC}
+ CC_ACT=`basename ${CC}`
fi
AC_MSG_RESULT([$CC_ACT])
@@ -294,18 +290,12 @@ if test x"${enable_openmp}" = x"yes"; th
elif test x"${FC_ACT}" = x"pgf90"; then
OPT_OPENMP="-mp"
- elif test x"${FC_ACT}" = x"frtpx"; then
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
OPT_OPENMP="-Kopenmp"
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
OPT_OPENMP="-Kopenmp"
- elif test x"${FC_ACT}" = x"frt"; then
- OPT_OPENMP="-KOMP"
-
- elif test x"${FC_ACT}" = x"mpifrt"; then
- OPT_OPENMP="-KOMP"
-
fi
fi
@@ -411,7 +401,8 @@ fi
AC_ARG_ENABLE([parallel_IO],
[AS_HELP_STRING([--enable-parallel_IO], [enable parallel_IO (defalt: yes(intel)/no(fujitsu))])],
[], [enable_parallel_IO=yes])
-if test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"mpifrtpx"; then
+if test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"mpifrtpx" -o \
+ x"${FC_ACT}" = x"frt" -o x"${FC_ACT}" = x"mpifrt"; then
enable_parallel_IO=no
fi
if test x"${enable_single}" = x"yes"; then
@@ -639,19 +630,13 @@ else
elif test x"${FC_ACT}" = x"pgf90"; then
FCFLAGS="-O2 -fastsse -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge -Mbackslash"
- elif test x"${FC_ACT}" = x"frtpx"; then
- FCFLAGS="-m -Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_NONBOND="-m -O3 -Kocl -Kfsimple,noreduction,fp_contract,fp_relaxed,fz,ilfunc,mfunc,omitfp,simd_packed_promotion,openmp,parallel,simd=2 -Nlst=t -Koptmsg=2"
-
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- FCFLAGS="-m -Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_NONBOND="-m -O3 -Kocl -Kfsimple,noreduction,fp_contract,fp_relaxed,fz,ilfunc,mfunc,omitfp,simd_packed_promotion,openmp,parallel,simd=2 -Nlst=t -Koptmsg=2"
-
- elif test x"${FC_ACT}" = x"frt"; then
- FCFLAGS="-m -Am -Kfast"
-
- elif test x"${FC_ACT}" = x"mpifrt"; then
- FCFLAGS="-m -Am -Kfast"
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
+ FCFLAGS="-Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_NONBOND="-O3 -Kocl -Kfsimple,noreduction,fp_contract,fp_relaxed,fz,ilfunc,mfunc,omitfp,simd_packed_promotion,openmp,parallel,simd=2 -Nlst=t -Koptmsg=2"
+
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ FCFLAGS="-Kocl -Kfast,openmp,parallel,simd=2,noalias -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_NONBOND="-O3 -Kocl -Kfsimple,noreduction,fp_contract,fp_relaxed,fz,ilfunc,mfunc,omitfp,simd_packed_promotion,openmp,parallel,simd=2 -Nlst=t -Koptmsg=2"
fi
@@ -670,20 +655,14 @@ else
elif test x"${FC_ACT}" = x"pgf90"; then
FCFLAGS="-m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge -Mbackslash"
- elif test x"${FC_ACT}" = x"frtpx"; then
- FCFLAGS="-m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_NONBOND="-m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
-
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
+ FCFLAGS="-Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_NONBOND="-Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- FCFLAGS="-m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_NONBOND="-m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- elif test x"${FC_ACT}" = x"frt"; then
- FCFLAGS="-m "
-
- elif test x"${FC_ACT}" = x"mpifrt"; then
- FCFLAGS="-m "
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ FCFLAGS="-Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_NONBOND="-Kocl -Kopenmp -Nlst=t -Koptmsg=2"
fi
@@ -705,19 +684,13 @@ else
elif test x"${FC_ACT}" = x"pgf90"; then
FCFLAGS="-g -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge -Mbackslash"
- elif test x"${FC_ACT}" = x"frtpx"; then
- FCFLAGS="-g -m -Kocl -Kopenmp"
- FCFLAGS_KERNELS_NONBOND=" -g -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
-
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- FCFLAGS=" -g -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_NONBOND=" -g -m -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
-
- elif test x"${FC_ACT}" = x"frt"; then
- FCFLAGS="-g -m "
-
- elif test x"${FC_ACT}" = x"mpifrt"; then
- FCFLAGS="-g -m "
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
+ FCFLAGS="-g -Kocl -Kopenmp"
+ FCFLAGS_KERNELS_NONBOND=" -g -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ FCFLAGS=" -g -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_NONBOND=" -g -Kocl -Kopenmp -Nlst=t -Koptmsg=2"
fi
@@ -739,18 +712,12 @@ else
elif test x"${FC_ACT}" = x"pgf90"; then
FCFLAGS="-O0 -g -m64 -Msmartalloc=huge"
- elif test x"${FC_ACT}" = x"frtpx"; then
- FCFLAGS=" -O0 -g -m -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
- FCFLAGS_KERNELS_NONBOND=" -O0 -g -m -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
-
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- FCFLAGS_KERNELS_NONBOND=" -O0 -g -m -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
+ FCFLAGS=" -O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
+ FCFLAGS_KERNELS_NONBOND=" -O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
- elif test x"${FC_ACT}" = x"frt"; then
- FCFLAGS="-O0 -g"
-
- elif test x"${FC_ACT}" = x"mpifrt"; then
- FCFLAGS="-O0 -g"
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ FCFLAGS_KERNELS_NONBOND=" -O0 -g -Kocl -Kopenmp -Hs -Hu -Ha -Nlst=t -Koptmsg=2"
fi
else
@@ -804,16 +771,16 @@ else
CFLAGS="-O3 -fastsse -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge"
elif test x"${CC_ACT}" = x"fccpx"; then
- CFLAGS="-m -Kvisimpact -Kocl -Kswp"
+ CFLAGS="-Kvisimpact -Kocl -Kswp"
elif test x"${CC_ACT}" = x"mpifccpx"; then
- CFLAGS="-m -Kvisimpact -Kocl -Kswp"
+ CFLAGS="-Kvisimpact -Kocl -Kswp"
elif test x"${CC_ACT}" = x"fcc"; then
- CFLAGS="-m -Am -Kfast"
+ CFLAGS="-Am -Kfast"
elif test x"${CC_ACT}" = x"mpifcc"; then
- CFLAGS="-m -Am -Kfast"
+ CFLAGS="-Am -Kfast"
fi
@@ -833,16 +800,16 @@ else
CFLAGS="-fastsse -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge"
elif test x"${CC_ACT}" = x"fccpx"; then
- CFLAGS="-m -Kocl"
+ CFLAGS=" -Kocl"
elif test x"${CC_ACT}" = x"mpifccpx"; then
- CFLAGS="-m -Kocl"
+ CFLAGS=" -Kocl"
elif test x"${CC_ACT}" = x"fcc"; then
- CFLAGS="-m -Am -Kfast"
+ CFLAGS=" -Am -Kfast"
elif test x"${CC_ACT}" = x"mpifcc"; then
- CFLAGS="-m -Am -Kfast"
+ CFLAGS=" -Am -Kfast"
fi
@@ -865,16 +832,16 @@ else
CFLAGS="-g -fastsse -m64 -Mflushz -pc 64 -mcmodel=medium -Msmartalloc=huge"
elif test x"${CC_ACT}" = x"fccpx"; then
- CFLAGS="-g -m -Kocl"
+ CFLAGS="-g -Kocl"
elif test x"${CC_ACT}" = x"mpifccpx"; then
- CFLAGS="-g -m -Kocl"
+ CFLAGS="-g -Kocl"
elif test x"${CC_ACT}" = x"fcc"; then
- CFLAGS="-g -m -Am -Kfast"
+ CFLAGS="-g -Am -Kfast"
elif test x"${CC_ACT}" = x"mpifcc"; then
- CFLAGS="-g -m -Am -Kfast"
+ CFLAGS="-g -Am -Kfast"
fi
@@ -928,17 +895,11 @@ if test -z "${LDFLAGS}"; then
LDFLAGS=""
#LDFLAGS="-march=native -fuse-linker-plugin"
- elif test x"${FC_ACT}" = x"frtpx"; then
- LDFLAGS="-SSL2 -Kparallel -Nfjomplib"
-
- elif test x"${FC_ACT}" = x"mpifrtpx"; then
- LDFLAGS="-SSL2BLAMP -Kparallel -Nfjomplib"
-
- elif test x"${FC_ACT}" = x"frt"; then
- LDFLAGS="-KSPARC64VII,V9FMADD -SSL2"
+ elif test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"frt"; then
+ LDFLAGS="-Kparallel -Nfjomplib -Nlibomp"
- elif test x"${FC_ACT}" = x"mpifrt"; then
- LDFLAGS="-KSPARC64VII,V9FMADD -SSL2"
+ elif test x"${FC_ACT}" = x"mpifrtpx" -o x"${FC_ACT}" = x"mpifrt"; then
+ LDFLAGS="-Kparallel -Nfjomplib -Nlibomp"
fi
fi
@@ -973,10 +934,13 @@ AC_SUBST(PPFLAGS)
# USEKCOMP
AM_CONDITIONAL(USEKCOMP, test x"${FC_ACT}" = x"frtpx" -o \
- x"${FC_ACT}" = x"mpifrtpx")
+ x"${FC_ACT}" = x"mpifrtpx" -o \
+ x"${FC_ACT}" = x"frt" -o \
+ x"${FC_ACT}" = x"mpifrt")
# KCOMP preprocess macro
-if test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"mpifrtpx"; then
+if test x"${FC_ACT}" = x"frtpx" -o x"${FC_ACT}" = x"mpifrtpx" -o \
+ x"${FC_ACT}" = x"frt" -o x"${FC_ACT}" = x"mpifrt"; then
AC_DEFINE(KCOMP, 1, [defined if K-computer compiler is used.])
DEFINED_VARIABLES+=" -DKCOMP"
fi
@@ -1049,13 +1013,13 @@ if test x"${IS_RICC}" == x"yes"; then
if test -n "${RICC_FCFLAGS}"; then
FCFLAGS=${RICC_FCFLAGS}
else
- FCFLAGS="-pc -m -omp"
+ FCFLAGS="-pc -omp"
fi
if test -n "${RICC_CFLAGS}"; then
CFLAGS=${RICC_CFLAGS}
else
- CFLAGS="-pc -m -omp"
+ CFLAGS="-pc -omp"
fi
if test -n "${RICC_PPFLAGS}"; then

View File

@@ -1,112 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Genesis(AutotoolsPackage, CudaPackage):
"""GENESIS is a Molecular dynamics and modeling software
for bimolecular systems such as proteins, lipids, glycans,
and their complexes.
"""
homepage = "https://www.r-ccs.riken.jp/labs/cbrt/"
url = "https://www.r-ccs.riken.jp/labs/cbrt/wp-content/uploads/2020/09/genesis-1.5.1.tar.bz2"
git = "https://github.com/genesis-release-r-ccs/genesis-2.0.git"
version("master", branch="master")
version(
"1.5.1",
sha256="62a453a573c36779484b4ffed2dfa56ea03dfe1308d631b33ef03f733259b3ac",
url="https://www.r-ccs.riken.jp/labs/cbrt/wp-content/uploads/2020/09/genesis-1.5.1.tar.bz2",
)
resource(
when="@1.5.1",
name="user_guide",
url="https://www.r-ccs.riken.jp/labs/cbrt/wp-content/uploads/2019/10/GENESIS-1.4.0.pdf",
sha256="da2c3f8bfa1e93adb992d3cfce09fb45d8d447a94f9a4f884ac834ea7279b9c7",
expand=False,
placement="doc",
)
variant("openmp", default=True, description="Enable OpenMP.")
variant("single", default=False, description="Enable single precision.")
variant("hmdisk", default=False, description="Enable huge molecule on hard disk.")
conflicts("%apple-clang", when="+openmp")
depends_on("autoconf", type="build", when="@1.5.1 %fj")
depends_on("autoconf", type="build", when="@master")
depends_on("automake", type="build", when="@1.5.1 %fj")
depends_on("automake", type="build", when="@master")
depends_on("libtool", type="build", when="@1.5.1 %fj")
depends_on("libtool", type="build", when="@master")
depends_on("m4", type="build", when="@1.5.1 %fj")
depends_on("m4", type="build", when="@master")
depends_on("mpi", type=("build", "run"))
depends_on("lapack")
depends_on("python@2.6.9:2.8.0", type=("build", "run"), when="@master")
patch("fj_compiler.patch", when="@master %fj")
patch("fj_compiler_1.5.1.patch", when="@1.5.1 %fj")
parallel = False
@property
def force_autoreconf(self):
# Run autoreconf due to build system patch
return self.spec.satisfies("@1.5.1 %fj")
def configure_args(self):
spec = self.spec
options = []
options.extend(self.enable_or_disable("openmp"))
options.extend(self.enable_or_disable("single"))
options.extend(self.enable_or_disable("hmdisk"))
if "+cuda" in spec:
options.append("--enable-gpu")
options.append("--with-cuda=%s" % spec["cuda"].prefix)
else:
options.append("--disable-gpu")
if spec.target == "a64fx" and self.spec.satisfies("@master %fj"):
options.append("--host=Fugaku")
return options
def setup_build_environment(self, env):
env.set("FC", self.spec["mpi"].mpifc, force=True)
env.set("F77", self.spec["mpi"].mpif77, force=True)
env.set("CC", self.spec["mpi"].mpicc, force=True)
env.set("CXX", self.spec["mpi"].mpicxx, force=True)
env.set("LAPACK_LIBS", self.spec["lapack"].libs.ld_flags)
if "+cuda" in self.spec:
cuda_arch = self.spec.variants["cuda_arch"].value
cuda_gencode = " ".join(self.cuda_flags(cuda_arch))
env.set("NVCCFLAGS", cuda_gencode)
def install(self, spec, prefix):
make("install")
install_tree("doc", prefix.share.doc)
@run_after("install")
def cache_test_sources(self):
if self.spec.satisfies("@master"):
self.cache_extra_test_sources(["tests"])
def test(self):
if self.spec.satisfies("@master"):
exe_name = self.spec["python"].command.path
test_name = join_path(
self.install_test_root, "tests", "regression_test", "test.py"
)
bin_name = join_path(self.prefix.bin, "spdyn")
opts = [
test_name,
self.spec["mpi"].prefix.bin.mpirun + " -np 8 " + bin_name,
]
env["OMP_NUM_THREADS"] = "1"
self.run_test(exe_name, options=opts, expected="Passed 53 / 53")

View File

@@ -15,7 +15,6 @@ class Gperftools(AutotoolsPackage):
homepage = "https://github.com/gperftools/gperftools"
url = "https://github.com/gperftools/gperftools/releases/download/gperftools-2.7/gperftools-2.7.tar.gz"
version('2.8.1', sha256='12f07a8ba447f12a3ae15e6e3a6ad74de35163b787c0c7b76288d7395f2f74e0')
version('2.7', sha256='1ee8c8699a0eff6b6a203e59b43330536b22bbcbe6448f54c7091e5efb0763c9')
version('2.4', sha256='982a37226eb42f40714e26b8076815d5ea677a422fb52ff8bfca3704d9c30a2d')
version('2.3', sha256='093452ad45d639093c144b4ec732a3417e8ee1f3744f2b0f8d45c996223385ce')

View File

@@ -50,7 +50,6 @@ def configure_args(self):
args.append('CFLAGS=-g -O0 -w')
args.append('CXXFLAGS=-g -O0 -w -std=c++14')
args.append('LDFLAGS=-g -O0 -w')
args.append('--with-xerces=' + self.spec['xerces-c'].prefix)
return args

View File

@@ -25,7 +25,7 @@ class Harfbuzz(AutotoolsPackage):
depends_on("glib")
depends_on("icu4c")
depends_on("freetype")
depends_on("cairo+pdf+ft")
depends_on("cairo")
depends_on("zlib")
depends_on("graphite2", when='+graphite2')
@@ -40,21 +40,12 @@ def url_for_version(self, version):
return url.format(version)
# Function borrowed from superlu
def flag_handler(self, name, flags):
flags = list(flags)
if name == 'cxxflags':
flags.append(self.compiler.cxx11_flag)
if name == 'cflags':
if '%pgi' not in self.spec and self.spec.satisfies('%gcc@:5.1'):
flags.append('-std=gnu99')
return (None, None, flags)
def configure_args(self):
args = []
# disable building of gtk-doc files following #9771
args.append('--disable-gtk-doc-html')
true = which('true')
args.append('CXXFLAGS={0}'.format(self.compiler.cxx11_flag))
args.append('GTKDOC_CHECK={0}'.format(true))
args.append('GTKDOC_CHECK_PATH={0}'.format(true))
args.append('GTKDOC_MKPDF={0}'.format(true))

View File

@@ -22,11 +22,6 @@ class Hdf5(AutotoolsPackage):
test_requires_compiler = True
# We rely on the *.la files to be removed and, therefore, do not try to make
# sure that they are correct. The following is a precaution against someone
# blindly changing the value to True, either here or in the baseclass.
install_libtool_archives = False
version('develop', branch='develop')
version('1.12.0', sha256='a62dcb276658cb78e6795dd29bf926ed7a9bc4edf6e77025cd2c689a8f97c17a')
@@ -234,28 +229,40 @@ def fortran_check(self):
msg = 'cannot build a Fortran variant without a Fortran compiler'
raise RuntimeError(msg)
def with_or_without_szip(self, activated):
return '--{0}-szlib'.format('with' if activated else 'without')
def configure_args(self):
# Always enable this option. This does not actually enable any
# features: it only *allows* the user to specify certain
# combinations of other arguments. Enabling it just skips a
# sanity check in configure, so this doesn't merit a variant.
extra_args = ['--enable-unsupported',
'--enable-symbols=yes',
'--with-zlib']
extra_args = ['--enable-unsupported']
extra_args += ['--enable-symbols=yes']
extra_args += self.enable_or_disable('threadsafe')
extra_args += self.enable_or_disable('cxx')
extra_args += self.enable_or_disable('hl')
extra_args += self.enable_or_disable('fortran')
extra_args += self.enable_or_disable('java')
extra_args += self.with_or_without('szip')
api = self.spec.variants['api'].value
if api != 'none':
extra_args.append('--with-default-api-version=' + api)
if '+szip' in self.spec:
szip_spec = self.spec['szip']
# The configure script of HDF5 accepts a comma-separated tuple of
# two paths: the first one points to the directory with include
# files, the second one points to the directory with library files.
# If the second path is not specified, the configure script assumes
# that it equals to prefix/lib. However, the correct directory
# might be prefix/lib64. It is not a problem when the building is
# done with Spack's compiler wrapper but it makes the Libtool
# files (*.la) invalid, which makes it problematic to use the
# installed library outside of Spack environment.
extra_args.append('--with-szlib=%s,%s' %
(szip_spec.headers.directories[0],
szip_spec.libs.directories[0]))
else:
extra_args.append('--without-szlib')
if self.spec.satisfies('@1.10:'):
if '+debug' in self.spec:
extra_args.append('--enable-build-mode=debug')
@@ -301,6 +308,8 @@ def configure_args(self):
if '+fortran' in self.spec:
extra_args.append('FC=%s' % self.spec['mpi'].mpifc)
extra_args.append('--with-zlib=%s' % self.spec['zlib'].prefix)
return extra_args
@run_after('configure')

View File

@@ -37,8 +37,7 @@ class Hip(CMakePackage):
depends_on('rocm-device-libs@' + ver, type=('build', 'link', 'run'), when='@' + ver)
depends_on('rocminfo@' + ver, type=('build', 'run'), when='@' + ver)
# hipcc likes to add `-lnuma` by default :(
# ref https://github.com/ROCm-Developer-Tools/HIP/pull/2202
# Notice: most likely this will only be a hard dependency on 3.7.0
depends_on('numactl', when='@3.7.0:')
# Note: the ROCm ecosystem expects `lib/` and `bin/` folders with symlinks
@@ -53,107 +52,74 @@ class Hip(CMakePackage):
patch('0002-Fix-detection-of-HIP_CLANG_ROOT.patch', when='@:3.9.0')
# See https://github.com/ROCm-Developer-Tools/HIP/pull/2218
patch('0003-Improve-compilation-without-git-repo.3.7.0.patch', when='@3.7.0:3.9.0')
patch('0003-Improve-compilation-without-git-repo.3.9.0.patch', when='@3.9.0')
patch('0003-Improve-compilation-without-git-repo.3.10.0.patch', when='@3.10.0:4.0.0')
# See https://github.com/ROCm-Developer-Tools/HIP/pull/2219
patch('0004-Drop-clang-rt-builtins-linking-on-hip-host.3.7.0.patch', when='@3.7.0:3.9.0')
patch('0004-Drop-clang-rt-builtins-linking-on-hip-host.3.9.0.patch', when='@3.9.0')
patch('0004-Drop-clang-rt-builtins-linking-on-hip-host.3.10.0.patch', when='@3.10.0:4.0.0')
def get_paths(self):
def get_rocm_prefix_info(self):
# External packages in Spack do not currently contain dependency
# information. External installations of hip therefore must compute
# necessary paths to other rocm components by relative paths. This
# assumes all components are installed under a single umbrella
# directory. Manual edits to `fallback_path` may be necessary if this
# assumption does not hold.
if self.spec.external:
# For external packages we only assume the `hip` prefix is known,
# because spack does not set prefixes of dependencies of externals.
# We assume self.spec.prefix is /opt/rocm-x.y.z/hip and rocm has a
# default installation with everything installed under
# /opt/rocm-x.y.z
rocm_prefix = Prefix(os.path.dirname(self.spec.prefix))
if not os.path.isdir(rocm_prefix):
# typically, self.spec.prefix is /opt/rocm/hip, so fallback_path
# will be /opt/rocm. The rocminfo executable is usually
# found at /opt/rocm/bin/rocminfo.
fallback_prefix = Prefix(os.path.dirname(self.spec.prefix))
if not os.path.isdir(fallback_prefix):
msg = "Could not determine prefix for other rocm components\n"
msg += "Either report a bug at github.com/spack/spack or "
msg += "manually edit rocm_prefix in the package file as "
msg += "manually edit fallback_prefix in the package file as "
msg += "a workaround."
raise RuntimeError(msg)
paths = {
'rocm-path': rocm_prefix,
'llvm-amdgpu': rocm_prefix.llvm,
'hsa-rocr-dev': rocm_prefix.hsa,
'rocminfo': rocm_prefix,
'rocm-device-libs': rocm_prefix
return {
'rocm-path': fallback_prefix,
'llvm-amdgpu': fallback_prefix.llvm,
'hsa-rocr-dev': fallback_prefix.hsa,
'rocminfo': fallback_prefix.bin,
'rocm-device-libs': fallback_prefix.lib,
'device_lib_path': fallback_prefix.lib
}
else:
paths = {
'rocm-path': self.spec.prefix,
'llvm-amdgpu': self.spec['llvm-amdgpu'].prefix,
'hsa-rocr-dev': self.spec['hsa-rocr-dev'].prefix,
'rocminfo': self.spec['rocminfo'].prefix,
'rocm-device-libs': self.spec['rocm-device-libs'].prefix
}
# `device_lib_path` is the path to the bitcode directory
if '@:3.8.0' in self.spec:
paths['device_lib_path'] = paths['rocm-device-libs'].lib
else:
paths['device_lib_path'] = paths['rocm-device-libs'].amdgcn.bitcode
return paths
mydict = dict((name, self.spec[name].prefix)
for name in ('llvm-amdgpu', 'hsa-rocr-dev',
'rocminfo', 'rocm-device-libs'))
mydict['rocm-path'] = self.spec.prefix
if '@:3.8.0' in self.spec:
device_lib_path = mydict['rocm-device-libs'].lib
else:
device_lib_path = mydict['rocm-device-libs'].amdgcn.bitcode
mydict['device_lib_path'] = device_lib_path
return mydict
def set_variables(self, env):
# Note: do not use self.spec[name] here, since not all dependencies
# have defined prefixes when hip is marked as external.
paths = self.get_paths()
# Indirection for dependency paths because hip may be an external in
# Spack. See block comment on get_rocm_prefix_info .
# Used in hipcc, but only useful when hip is external, since only then
# there is a common prefix /opt/rocm-x.y.z.
env.set('ROCM_PATH', paths['rocm-path'])
# NOTE: DO NOT PUT LOGIC LIKE self.spec[name] in this function!!!!!
# It DOES NOT WORK FOR EXTERNAL PACKAGES!!!! See get_rocm_prefix_info
rocm_prefixes = self.get_rocm_prefix_info()
# hipcc recognizes HIP_PLATFORM == hcc and HIP_COMPILER == clang, even
# though below we specified HIP_PLATFORM=rocclr and HIP_COMPILER=clang
# in the CMake args.
env.set('HIP_PLATFORM', 'hcc')
env.set('ROCM_PATH', rocm_prefixes['rocm-path'])
env.set('HIP_COMPILER', 'clang')
# bin directory where clang++ resides
env.set('HIP_CLANG_PATH', paths['llvm-amdgpu'].bin)
# Path to hsa-rocr-dev prefix used by hipcc.
env.set('HSA_PATH', paths['hsa-rocr-dev'])
# This is a variable that does not exist in hipcc but was introduced
# in a patch of ours since 3.5.0 to locate rocm_agent_enumerator:
# https://github.com/ROCm-Developer-Tools/HIP/pull/2138
env.set('ROCMINFO_PATH', paths['rocminfo'])
# This one is used in hipcc to run `hipcc --hip-device-lib-path=...`
env.set('DEVICE_LIB_PATH', paths['device_lib_path'])
# And this is used in clang whenever the --hip-device-lib-path is not
# used (e.g. when clang is invoked directly)
env.set('HIP_DEVICE_LIB_PATH', paths['device_lib_path'])
# Just the prefix of hip (used in hipcc)
env.set('HIP_PATH', paths['rocm-path'])
# Used in comgr and seems necessary when using the JIT compiler, e.g.
# hiprtcCreateProgram:
env.set('HIP_PLATFORM', 'hcc')
env.set('HIP_CLANG_PATH', rocm_prefixes['llvm-amdgpu'].bin)
env.set('HSA_PATH', rocm_prefixes['hsa-rocr-dev'])
env.set('ROCMINFO_PATH', rocm_prefixes['rocminfo'])
env.set('DEVICE_LIB_PATH', rocm_prefixes['device_lib_path'])
env.set('HIP_PATH', rocm_prefixes['rocm-path'])
# this guy is used in comgr, see the following file:
# https://github.com/RadeonOpenCompute/ROCm-CompilerSupport/blob/rocm-4.0.0/lib/comgr/src/comgr-env.cpp
env.set('LLVM_PATH', paths['llvm-amdgpu'])
# Finally we have to set --rocm-path=<prefix> ourselves, which is not
# the same as --hip-device-lib-path (set by hipcc). It's used to set
# default bin, include and lib folders in clang. If it's not set it is
# infered from the clang install dir (and they try to find
# /opt/rocm again...). If this path is set, there is no strict checking
# and parsing of the <prefix>/bin/.hipVersion file. Let's just set this
# to the hip prefix directory for non-external builds so that the
# bin/.hipVersion file can still be parsed.
# See also https://github.com/ROCm-Developer-Tools/HIP/issues/2223
if '@3.8.0:' in self.spec:
env.append_path('HIPCC_COMPILE_FLAGS_APPEND',
'--rocm-path={0}'.format(paths['rocm-path']),
separator=' ')
# it's necessary on runtime when using hiprtcCreateProgram and such
env.set('LLVM_PATH', rocm_prefixes['llvm-amdgpu'])
env.set('HIPCC_COMPILE_FLAGS_APPEND',
'--rocm-path={0}'.format(rocm_prefixes['device_lib_path']))
def setup_run_environment(self, env):
self.set_variables(env)
@@ -209,15 +175,11 @@ def flag_handler(self, name, flags):
def cmake_args(self):
args = [
self.define('HIP_COMPILER', 'clang'),
self.define('HIP_PLATFORM', 'rocclr'),
self.define('HSA_PATH', self.spec['hsa-rocr-dev'].prefix),
self.define('HIP_RUNTIME', 'ROCclr'),
'-DHIP_COMPILER=clang',
'-DHIP_PLATFORM=rocclr',
'-DHSA_PATH={0}'.format(self.spec['hsa-rocr-dev'].prefix),
'-DHIP_RUNTIME=ROCclr',
'-DLIBROCclr_STATIC_DIR={0}/lib'.format
(self.spec['hip-rocclr'].prefix)
]
# LIBROCclr_STATIC_DIR is unused from 3.6.0 and above
if '@3.5.0' in self.spec:
args.append(self.define('LIBROCclr_STATIC_DIR',
self.spec['hip-rocclr'].prefix.lib))
return args

View File

@@ -30,7 +30,6 @@ class Hwloc(AutotoolsPackage):
maintainers = ['bgoglin']
version('master', branch='master')
version('2.4.1', sha256='4267fe1193a8989f3ab7563a7499e047e77e33fed8f4dec16822a7aebcf78459')
version('2.4.0', sha256='30404065dc1d6872b0181269d0bb2424fbbc6e3b0a80491aa373109554006544')
version('2.3.0', sha256='155480620c98b43ddf9ca66a6c318b363ca24acb5ff0683af9d25d9324f59836')
version('2.2.0', sha256='2defba03ddd91761b858cbbdc2e3a6e27b44e94696dbfa21380191328485a433')

View File

@@ -7,7 +7,7 @@
from spack import *
class Hydrogen(CMakePackage, CudaPackage, ROCmPackage):
class Hydrogen(CMakePackage, CudaPackage):
"""Hydrogen: Distributed-memory dense and sparse-direct linear algebra
and optimization library. Based on the Elemental library."""
@@ -64,13 +64,11 @@ class Hydrogen(CMakePackage, CudaPackage, ROCmPackage):
description='Builds with support for FP16 precision data types')
conflicts('~openmp', when='+omp_taskloops')
conflicts('+cuda', when='+rocm', msg='CUDA and ROCm support are mutually exclusive')
depends_on('cmake@3.17.0:', type='build')
depends_on('mpi')
depends_on('hwloc@1.11:')
depends_on('hwloc +cuda +nvml', when='+cuda')
depends_on('hwloc@2.3.0:', when='+rocm')
# Note that #1712 forces us to enumerate the different blas variants
depends_on('openblas', when='blas=openblas')
@@ -98,16 +96,10 @@ class Hydrogen(CMakePackage, CudaPackage, ROCmPackage):
# Add Aluminum variants
depends_on('aluminum +cuda +nccl +ht +cuda_rma', when='+al +cuda')
depends_on('aluminum +rocm +rccl +ht', when='+al +rocm')
for arch in CudaPackage.cuda_arch_values:
depends_on('aluminum cuda_arch=%s' % arch, when='+al +cuda cuda_arch=%s' % arch)
# variants +rocm and amdgpu_targets are not automatically passed to
# dependencies, so do it manually.
for val in ROCmPackage.amdgpu_targets:
depends_on('aluminum amdgpu_target=%s' % val, when='+al +rocm amdgpu_target=%s' % val)
# Note that this forces us to use OpenBLAS until #1712 is fixed
depends_on('lapack', when='blas=openblas ~openmp_blas')
@@ -118,7 +110,6 @@ class Hydrogen(CMakePackage, CudaPackage, ROCmPackage):
depends_on('cuda', when='+cuda')
depends_on('cub', when='^cuda@:10.99')
depends_on('hipcub', when='+rocm')
depends_on('half', when='+half')
depends_on('llvm-openmp', when='%apple-clang +openmp')
@@ -152,9 +143,8 @@ def cmake_args(self):
'-DHydrogen_ENABLE_MPC:BOOL=%s' % ('+mpfr' in spec),
'-DHydrogen_GENERAL_LAPACK_FALLBACK=ON',
'-DHydrogen_ENABLE_ALUMINUM=%s' % ('+al' in spec),
'-DHydrogen_ENABLE_CUB=%s' % ('+cuda' in spec or '+rocm' in spec),
'-DHydrogen_ENABLE_CUB=%s' % ('+cuda' in spec),
'-DHydrogen_ENABLE_CUDA=%s' % ('+cuda' in spec),
'-DHydrogen_ENABLE_ROCM=%s' % ('+rocm' in spec),
'-DHydrogen_ENABLE_TESTING=%s' % ('+test' in spec),
'-DHydrogen_ENABLE_HALF=%s' % ('+half' in spec),
'-DHydrogen_ENABLE_GPU_FP16=%s' % enable_gpu_fp16,
@@ -163,18 +153,6 @@ def cmake_args(self):
if '+cuda' in spec:
args.append('-DCMAKE_CUDA_STANDARD=14')
if '+rocm' in spec:
args.extend([
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix),
'-DHIP_CXX_COMPILER={0}'.format(self.spec['hip'].hipcc)])
archs = self.spec.variants['amdgpu_target'].value
if archs != 'none':
arch_str = ",".join(archs)
args.append(
'-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
' -g -fsized-deallocation -fPIC'.format(arch_str)
)
# Add support for OS X to find OpenMP (LLVM installed via brew)
if self.spec.satisfies('%clang +openmp platform=darwin'):
clang = self.compiler.cc

View File

@@ -8,7 +8,7 @@
import sys
class Hypre(Package, CudaPackage):
class Hypre(Package):
"""Hypre is a library of high performance preconditioners that
features parallel multigrid methods for both structured and
unstructured grid problems."""
@@ -78,8 +78,6 @@ class Hypre(Package, CudaPackage):
depends_on("lapack")
depends_on('superlu-dist', when='+superlu-dist+mpi')
conflicts('+cuda', when='+int64')
# Patch to build shared libraries on Darwin does not apply to
# versions before 2.13.0
conflicts("+shared@:2.12.99 platform=darwin")
@@ -102,8 +100,7 @@ def url_for_version(self, version):
return url.format(version)
def _configure_args(self):
spec = self.spec
def install(self, spec, prefix):
# Note: --with-(lapack|blas)_libs= needs space separated list of names
lapack = spec['lapack'].libs
blas = spec['blas'].libs
@@ -165,40 +162,6 @@ def _configure_args(self):
else:
configure_args.append("--disable-debug")
if '+cuda' in self.spec:
configure_args.extend([
'--with-cuda',
'--enable-curand',
'--enable-cub'
])
else:
configure_args.extend([
'--without-cuda',
'--disable-curand',
'--disable-cub'
])
return configure_args
def setup_build_environment(self, env):
if '+mpi' in self.spec:
env.set('CC', self.spec['mpi'].mpicc)
env.set('CXX', self.spec['mpi'].mpicxx)
env.set('F77', self.spec['mpi'].mpif77)
if '+cuda' in self.spec:
env.set('CUDA_HOME', self.spec['cuda'].prefix)
env.set('CUDA_PATH', self.spec['cuda'].prefix)
cuda_arch = self.spec.variants['cuda_arch'].value
if cuda_arch:
arch_sorted = list(sorted(cuda_arch, reverse=True))
env.set('HYPRE_CUDA_SM', arch_sorted[0])
# In CUDA builds hypre currently doesn't handle flags correctly
env.append_flags(
'CXXFLAGS', '-O2' if '~debug' in self.spec else '-g')
def install(self, spec, prefix):
configure_args = self._configure_args()
# Hypre's source is staged under ./src so we'll have to manually
# cd into it.
with working_dir("src"):

View File

@@ -18,7 +18,6 @@ class Julia(Package):
maintainers = ['glennpj']
version('master', branch='master')
version('1.5.3', sha256='fb69337ca037576758547c7eed9ae8f153a9c052318327b6b7f1917408c14d91')
version('1.5.2', sha256='850aed3fe39057488ec633f29af705f5ada87e3058fd65e48ad26f91b713a19a')
version('1.5.1', sha256='1d0debfccfc7cd07047aa862dd2b1a96f7438932da1f5feff6c1033a63f9b1d4')
version('1.5.0', sha256='4a6ffadc8dd04ca0b7fdef6ae203d0af38185e57b78f7c0b972c4707354a6d1b')

View File

@@ -19,8 +19,6 @@ class Kcov(CMakePackage):
depends_on('cmake@2.8.4:', type='build')
depends_on('zlib')
depends_on('curl')
depends_on('elfutils')
depends_on('binutils +libiberty', type='link')
def cmake_args(self):
# Necessary at least on macOS, fixes linking error to LLDB

View File

@@ -7,7 +7,7 @@
from spack import *
class Lbann(CMakePackage, CudaPackage, ROCmPackage):
class Lbann(CMakePackage, CudaPackage):
"""LBANN: Livermore Big Artificial Neural Network Toolkit. A distributed
memory, HPC-optimized, model and data parallel training toolkit for deep
neural networks."""
@@ -42,7 +42,7 @@ class Lbann(CMakePackage, CudaPackage, ROCmPackage):
'(note that for v0.99 conduit is required)')
variant('deterministic', default=False,
description='Builds with support for deterministic execution')
variant('dihydrogen', default=True,
variant('dihydrogen', default=False,
description='Builds with support for DiHydrogen Tensor Library')
variant('distconv', default=False,
description='Builds with support for spatial, filter, or channel '
@@ -64,19 +64,15 @@ class Lbann(CMakePackage, CudaPackage, ROCmPackage):
variant('vtune', default=False, description='Builds with support for Intel VTune')
variant('onednn', default=False, description='Support for OneDNN')
variant('nvshmem', default=False, description='Support for NVSHMEM')
variant('python', default=True, description='Support for Python extensions (e.g. Data Reader)')
variant('python_dr', default=False, description='Support for generic Python Data Reader')
variant('pfe', default=True, description='Python Frontend for generating and launching models')
variant('boost', default=False, description='Enable callbacks that use Boost libraries')
# Variant Conflicts
conflicts('@:0.90,0.99:', when='~conduit')
conflicts('@0.90:0.101.99', when='+fft')
conflicts('@:0.90,0.101.99:', when='~dihydrogen')
conflicts('~cuda', when='+nvprof')
conflicts('~hwloc', when='+al')
conflicts('~cuda', when='+nvshmem')
conflicts('+cuda', when='+rocm', msg='CUDA and ROCm support are mutually exclusive')
conflicts('+extras', when='~pfe', msg='Python extras require the Python front end support')
depends_on('cmake@3.17.0:', type='build')
@@ -93,8 +89,6 @@ class Lbann(CMakePackage, CudaPackage, ROCmPackage):
depends_on('hydrogen +cuda', when='+cuda')
depends_on('hydrogen ~half', when='~half')
depends_on('hydrogen +half', when='+half')
depends_on('hydrogen ~rocm', when='~rocm')
depends_on('hydrogen +rocm', when='+rocm')
depends_on('hydrogen build_type=Debug', when='build_type=Debug')
# Older versions depended on Elemental not Hydrogen
@@ -109,7 +103,6 @@ class Lbann(CMakePackage, CudaPackage, ROCmPackage):
# Add Aluminum variants
depends_on('aluminum +cuda +nccl +ht +cuda_rma', when='+al +cuda')
depends_on('aluminum +rocm +rccl +ht', when='+al +rocm')
depends_on('dihydrogen +openmp', when='+dihydrogen')
depends_on('dihydrogen ~cuda', when='+dihydrogen ~cuda')
@@ -121,8 +114,6 @@ class Lbann(CMakePackage, CudaPackage, ROCmPackage):
depends_on('dihydrogen +half', when='+dihydrogen +half')
depends_on('dihydrogen ~nvshmem', when='+dihydrogen ~nvshmem')
depends_on('dihydrogen +nvshmem', when='+dihydrogen +nvshmem')
depends_on('dihydrogen ~rocm', when='+dihydrogen ~rocm')
depends_on('dihydrogen +rocm', when='+dihydrogen +rocm')
depends_on('dihydrogen@0.1', when='@0.101:0.101.99 +dihydrogen')
depends_on('dihydrogen@:0.0,0.2:', when='@:0.90,0.102: +dihydrogen')
conflicts('~dihydrogen', when='+distconv')
@@ -133,22 +124,13 @@ class Lbann(CMakePackage, CudaPackage, ROCmPackage):
depends_on('dihydrogen cuda_arch=%s' % arch, when='+dihydrogen cuda_arch=%s' % arch)
depends_on('nccl cuda_arch=%s' % arch, when='+cuda cuda_arch=%s' % arch)
# variants +rocm and amdgpu_targets are not automatically passed to
# dependencies, so do it manually.
for val in ROCmPackage.amdgpu_targets:
depends_on('hydrogen amdgpu_target=%s' % val, when='amdgpu_target=%s' % val)
depends_on('aluminum amdgpu_target=%s' % val, when='+al amdgpu_target=%s' % val)
depends_on('dihydrogen amdgpu_target=%s' % val, when='+dihydrogen amdgpu_target=%s' % val)
depends_on('cudnn', when='@0.90:0.100.99 +cuda')
depends_on('cudnn@8.0.2:', when='@:0.90,0.101: +cuda')
depends_on('cub', when='@0.94:0.98.2 +cuda ^cuda@:10.99')
depends_on('hipcub', when='+rocm')
depends_on('mpi')
depends_on('hwloc@1.11:', when='@:0.90,0.102: +hwloc')
depends_on('hwloc@1.11:1.11.99', when='@0.95:0.101.99 +hwloc')
depends_on('hwloc +cuda +nvml', when='+cuda')
depends_on('hwloc@2.3.0:', when='+rocm')
depends_on('half', when='+half')
@@ -170,28 +152,19 @@ class Lbann(CMakePackage, CudaPackage, ROCmPackage):
depends_on('conduit@0.4.0: +hdf5~hdf5_compat', when='@0.94:0.99 +conduit')
depends_on('conduit@0.4.0: +hdf5~hdf5_compat', when='@:0.90,0.99:')
# LBANN can use Python in two modes 1) as part of an extensible framework
# and 2) to drive the front end model creation and launch
# Core library support for Python Data Reader and extensible interface
depends_on('python@3: +shared', type=('run'), when='@:0.90,0.99: +python')
extends("python", when='+python')
# Python front end and possible extra packages
depends_on('python@3: +shared', type=('build', 'run'), when='@:0.90,0.99: +pfe')
extends("python", when='+pfe')
depends_on('py-setuptools', type='build', when='+pfe')
depends_on('py-argparse', type='run', when='@:0.90,0.99: ^python@:2.6 +pfe')
depends_on('py-configparser', type='run', when='@:0.90,0.99: +pfe +extras')
depends_on('py-graphviz@0.10.1:', type='run', when='@:0.90,0.99: +pfe +extras')
depends_on('py-matplotlib@3.0.0:', type='run', when='@:0.90,0.99: +pfe +extras')
depends_on('py-numpy@1.16.0:', type=('build', 'run'), when='@:0.90,0.99: +pfe +extras')
depends_on('py-onnx@1.3.0:', type='run', when='@:0.90,0.99: +pfe +extras')
depends_on('py-pandas@0.24.1:', type='run', when='@:0.90,0.99: +pfe +extras')
depends_on('py-texttable@1.4.0:', type='run', when='@:0.90,0.99: +pfe +extras')
depends_on('py-pytest', type='test', when='@:0.90,0.99: +pfe')
depends_on('py-protobuf+cpp@3.10.0', type=('build', 'run'), when='@:0.90,0.99: +pfe')
depends_on('python@3: +shared', type=('build', 'run'), when='@:0.90,0.99:')
extends("python")
depends_on('py-setuptools', type='build')
depends_on('py-argparse', type='run', when='@:0.90,0.99: ^python@:2.6')
depends_on('py-configparser', type='run', when='@:0.90,0.99: +extras')
depends_on('py-graphviz@0.10.1:', type='run', when='@:0.90,0.99: +extras')
depends_on('py-matplotlib@3.0.0:', type='run', when='@:0.90,0.99: +extras')
depends_on('py-numpy@1.16.0:', type=('build', 'run'), when='@:0.90,0.99: +extras')
depends_on('py-onnx@1.3.0:', type='run', when='@:0.90,0.99: +extras')
depends_on('py-pandas@0.24.1:', type='run', when='@:0.90,0.99: +extras')
depends_on('py-texttable@1.4.0:', type='run', when='@:0.90,0.99: +extras')
depends_on('py-pytest', type='test', when='@:0.90,0.99:')
depends_on('py-protobuf+cpp@3.10.0', type=('build', 'run'), when='@:0.90,0.99:')
depends_on('protobuf+shared@3.10.0', when='@:0.90,0.99:')
depends_on('py-breathe', type='build', when='+docs')
@@ -262,13 +235,14 @@ def cmake_args(self):
'-DLBANN_DETERMINISTIC:BOOL=%s' % ('+deterministic' in spec),
'-DLBANN_WITH_HWLOC=%s' % ('+hwloc' in spec),
'-DLBANN_WITH_ALUMINUM:BOOL=%s' % ('+al' in spec),
'-DLBANN_WITH_BOOST:BOOL=%s' % ('+boost' in spec),
'-DLBANN_WITH_CONDUIT:BOOL=%s' % ('+conduit' in spec),
'-DLBANN_WITH_CUDA:BOOL=%s' % ('+cuda' in spec),
'-DLBANN_WITH_CUDNN:BOOL=%s' % ('+cuda' in spec),
'-DLBANN_WITH_NVSHMEM:BOOL=%s' % ('+nvshmem' in spec),
'-DLBANN_WITH_FFT:BOOL=%s' % ('+fft' in spec),
'-DLBANN_WITH_ONEDNN:BOOL=%s' % ('+onednn' in spec),
'-DLBANN_WITH_EMBEDDED_PYTHON:BOOL=%s' % ('+python' in spec),
'-DLBANN_WITH_PYTHON_FRONTEND:BOOL=%s' % ('+pfe' in spec),
'-DLBANN_WITH_EMBEDDED_PYTHON:BOOL=%s' % ('+python_dr' in spec),
'-DLBANN_WITH_PYTHON:BOOL=%s' % ('+pfe' in spec),
'-DLBANN_WITH_TBINF=OFF',
'-DLBANN_WITH_UNIT_TESTING:BOOL=%s' % (self.run_tests),
'-DLBANN_WITH_VISION:BOOL=%s' % ('+vision' in spec),
@@ -348,18 +322,6 @@ def cmake_args(self):
args.append(
'-DLBANN_WITH_DISTCONV:BOOL=%s' % ('+distconv' in spec))
if '+rocm' in spec:
args.extend([
'-DHIP_ROOT_DIR={0}'.format(spec['hip'].prefix),
'-DHIP_CXX_COMPILER={0}'.format(self.spec['hip'].hipcc)])
archs = self.spec.variants['amdgpu_target'].value
if archs != 'none':
arch_str = ",".join(archs)
args.append(
'-DHIP_HIPCC_FLAGS=--amdgpu-target={0}'
' -g -fsized-deallocation -fPIC -std=c++17'.format(arch_str)
)
return args
@when('@0.91:0.93')

View File

@@ -1,22 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libblastrampoline(MakefilePackage):
"""Using PLT trampolines to provide a BLAS and LAPACK demuxing library."""
homepage = "https://github.com/staticfloat/libblastrampoline"
git = "https://github.com/staticfloat/libblastrampoline.git"
version('3.0.0', commit='7bb259a69e5bad0adb55171b2bee164a30ce2e91')
version('2.2.0', commit='45f4a20ffdba5d368db66d71885312f5f73c2dc7')
build_directory = 'src'
def install(self, spec, prefix):
with working_dir(self.build_directory):
make('prefix={0}'.format(prefix), 'install')

View File

@@ -20,7 +20,6 @@ class Libcircle(AutotoolsPackage):
depends_on('mpi')
depends_on('pkgconfig', type='build')
depends_on('libpciaccess', type='link')
@when('@master')
def autoreconf(self, spec, prefix):

View File

@@ -13,21 +13,6 @@ class Libfuse(MesonPackage):
homepage = "https://github.com/libfuse/libfuse"
url = "https://github.com/libfuse/libfuse/archive/fuse-3.9.3.tar.gz"
version('3.10.2', sha256='a16f93cc083264afd0d2958a0dc88f24c6c5d40a9f3842c645b1909e13edb75f')
version('3.10.1', sha256='d8954e7b4c022c651aa80db3bb4a161437dd285cd5f1a23d0e25f055dcebe00d')
version('3.10.0', sha256='52bbb52035f7eeaa54d139e21805d357f848f6e02ac956831d04988165a92c7b')
version('3.9.4', sha256='9e076ae757a09cac9ce1beb50b3361ae83a831e5abc0f1bf5cdf771cd1320338')
version('3.9.3', sha256='0f8f7ad9cc6667c6751efa425dd0a665dcc9d75f0b7fc0cb5b85141a514110e9')
version('3.9.2', sha256='b4409255cbda6f6975ca330f5b04cb335b823a95ddd8c812c3d224ec53478fc0')
variant('useroot', default=False)
def meson_args(self):
args = []
if '+useroot' in self.spec:
args.append('-Duseroot=true')
else:
args.append('-Duseroot=false')
return args
version('3.9.4', sha256='9e076ae757a09cac9ce1beb50b3361ae83a831e5abc0f1bf5cdf771cd1320338')
version('3.9.3', sha256='0f8f7ad9cc6667c6751efa425dd0a665dcc9d75f0b7fc0cb5b85141a514110e9')
version('3.9.2', sha256='b4409255cbda6f6975ca330f5b04cb335b823a95ddd8c812c3d224ec53478fc0')

View File

@@ -12,9 +12,8 @@ class Libjpeg(AutotoolsPackage):
alongside various utilities for handling JPEG data."""
homepage = "http://www.ijg.org"
url = "http://www.ijg.org/files/jpegsrc.v9d.tar.gz"
url = "http://www.ijg.org/files/jpegsrc.v9c.tar.gz"
version('9d', sha256='6c434a3be59f8f62425b2e3c077e785c9ce30ee5874ea1c270e843f273ba71ee')
version('9c', sha256='650250979303a649e21f87b5ccd02672af1ea6954b911342ea491f351ceb7122')
version('9b', sha256='240fd398da741669bf3c90366f58452ea59041cacc741a489b99f2f6a0bad052')
version('9a', sha256='3a753ea48d917945dd54a2d97de388aa06ca2eb1066cbfdc6652036349fe05a7')

View File

@@ -24,7 +24,6 @@ class Libpulsar(CMakePackage):
depends_on('pkg-config')
depends_on('openssl')
depends_on('cmake @3.14:', type='build')
depends_on('curl', type=('build', 'link'))
root_cmakelists_dir = 'pulsar-client-cpp'

View File

@@ -1,44 +0,0 @@
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Libqrencode(AutotoolsPackage):
"""libqrencode - a fast and compact QR Code encoding library."""
homepage = "https://fukuchi.org/works/qrencode/"
url = "https://github.com/fukuchi/libqrencode/archive/v4.1.1.tar.gz"
git = "https://github.com/fukuchi/libqrencode.git"
maintainers = ['cessenat']
version('master', branch='master')
version('4.1.1', sha256='5385bc1b8c2f20f3b91d258bf8ccc8cf62023935df2d2676b5b67049f31a049c')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
# We assume a reasonably recent libtool is necessary
depends_on('libtool@2.4.2:', type='build')
depends_on('m4', type='build')
depends_on('pkgconfig', type='build')
# https://fukuchi.org/works/qrencode/ requires libpng-dev
depends_on('libpng@1.2.0:', type='link')
def autoreconf(self, spec, prefix):
# We had to call for autoreconf as well:
# https://stackoverflow.com/questions/3096989/libtool-version-mismatch-error
# There appears $LIBTOOLIZE --force --automake --copy is not necessary
args = [
'autoreconf --force --install',
]
with open('autogen.sh', 'a') as fh:
fh.write('\n'.join(args))
# https://fukuchi.org/works/qrencode/
# If there is no "configure" script in the source code directory,
# run "autogen.sh" at first to generate it - this is mandatory if
# you downloaded the source from GitHub
Executable('./autogen.sh')()

View File

@@ -27,8 +27,6 @@ class Libxc(AutotoolsPackage, CudaPackage):
conflicts('+shared +cuda', msg='Only ~shared supported with +cuda')
conflicts('+cuda', when='@:4', msg='CUDA support only in libxc 5.0.0 and above')
depends_on('perl', type='build')
patch('0001-Bugfix-avoid-implicit-pointer-cast-to-make-libxc-com.patch', when='@5.0.0')
patch('0002-Mark-xc_erfcx-a-GPU_FUNCTION.patch', when='@5.0.0')

View File

@@ -410,12 +410,12 @@ def cmake_args(self):
if "+python" in spec and "+lldb" in spec:
cmake_args.append("-DLLDB_USE_SYSTEM_SIX:Bool=TRUE")
if "+lldb" in spec and spec.satisfies("@10.0.0:,doe"):
cmake_args.append("-DLLDB_ENABLE_PYTHON:Bool={0}".format(
'ON' if '+python' in spec else 'OFF'))
if "+lldb" in spec and spec.satisfies("@:9.9.9"):
cmake_args.append("-DLLDB_DISABLE_PYTHON:Bool={0}".format(
'ON' if '~python' in spec else 'OFF'))
if "+lldb" in spec and spec.satisfies("@10.0.0:"):
cmake_args.append("-DLLDB_ENABLE_PYTHON:Bool={0}".format(
'ON' if '+python' in spec else 'OFF'))
if "+gold" in spec:
cmake_args.append(

View File

@@ -36,6 +36,8 @@ class Mapnik(AutotoolsPackage):
depends_on('sqlite+rtree', type=('build', 'link', 'run'))
depends_on('libwebp')
conflicts('%gcc@9.0.0:')
def setup_build_environment(self, env):
spec = self.spec
env.set('GDAL_DATA', spec['gdal'].prefix.share.gdal)

View File

@@ -14,7 +14,6 @@ class Masurca(Package):
homepage = "http://www.genome.umd.edu/masurca.html"
url = "https://github.com/alekseyzimin/masurca/releases/download/v3.3.1/MaSuRCA-3.3.1.tar.gz"
version('4.0.1', sha256='68628acaf3681d09288b48a35fec7909b347b84494fb26c84051942256299870')
version('3.3.1', sha256='587d0ee2c6b9fbd3436ca2a9001e19f251b677757fe5e88e7f94a0664231e020')
version('3.2.9', sha256='795ad4bd42e15cf3ef2e5329aa7e4f2cdeb7e186ce2e350a45127e319db2904b')
@@ -31,12 +30,7 @@ def patch(self):
m = join_path('global-1', makefile)
filter_file('-minline-all-stringops', '', m)
def setup_build_environment(self, env):
if '@4:' in self.spec:
env.set('DEST', self.prefix)
def install(self, spec, prefix):
installer = Executable('./install.sh')
installer()
if '@:4' in self.spec:
install_tree('.', prefix)
install_tree('.', prefix)

View File

@@ -10,26 +10,25 @@ class Mcutils(MakefilePackage):
"""A collection of routines for classification and manipulation of
particle physics simulated HepMC event records."""
homepage = "https://gitlab.com/hepcedar/mcutils"
git = "https://gitlab.com/hepcedar/mcutils.git"
homepage = "https://bitbucket.org/andybuckley/mcutils"
url = "https://bitbucket.org/andybuckley/mcutils/get/mcutils-1.3.4.tar.gz"
tags = ['hep']
version('1.3.5', tag='mcutils-1.3.5')
version('1.3.4', tag='mcutils-1.3.4')
version('1.3.3', tag='mcutils-1.3.3')
version('1.3.2', tag='mcutils-1.3.2')
version('1.3.1', tag='mcutils-1.3.1')
version('1.3.1', tag='mcutils-1.3.0')
version('1.2.1', tag='mcutils-1.2.1')
version('1.2.0', tag='mcutils-1.2.0')
version('1.1.2', tag='mcutils-1.1.2')
version('1.1.1', tag='mcutils-1.1.1')
version('1.1.0', tag='mcutils-1.1.0')
version('1.0.3', tag='mcutils-1.0.3')
version('1.0.2', tag='mcutils-1.0.2')
version('1.0.1', tag='mcutils-1.0.1')
version('1.0.0', tag='mcutils-1.0.0')
version('1.3.4', sha256='0bf9795cc248871ab2b663d2eef647311eacaea4982997950096de68747e65a3')
version('1.3.3', sha256='bfb2f0e0e6de358928436f309f3f1b084d3d652073c440f262de878332116ecb')
version('1.3.2', sha256='e17d417e8d4f8d17a6879ea18dcd2cd76e161d37eae08b84893504d1b08f9708')
version('1.3.1', sha256='081263ee6844fccedad780e6a2fbaf1ad0073a6706bc4b34109050b72c2c4b27')
version('1.3.0', sha256='20a89ce536547dc8f56e7779a3ec8cfe9987edb1646009ecfc682ff1ddf0277b')
version('1.2.1', sha256='004325be41925d97e711ffe4311d9c8aa8e88873541bcc1a385d2e1ce1d17a96')
version('1.2.0', sha256='f9589d45bff06d8c8742d35d78d1ed570a0d181fd7ee5d6f97ab9e48f0ee32f4')
version('1.1.2', sha256='5a5781caf2d81c21f4b040a1d31975c354526bcf7c8c9067543f7303c8155844')
version('1.1.1', sha256='3e5c47d2264886613fc9423b020cf50dc7031a02b752da3a84f794c36ba7443a')
version('1.1.0', sha256='96fc2586430032ed4b378edb02150c5c9db405e1767dbf847ffe9ac043daf6e9')
version('1.0.3', sha256='b5bec5a4b2146b6987b351d632119c3b4c449c2ee53ae0ddc8cb1d3672907df5')
version('1.0.2', sha256='74e2c381f5f3719888b15a2e00075051bb2b84b3d73633d429818a77de66ca7c')
version('1.0.1', sha256='bb884a4cfb56b5139c08df0be554466e504e9c46096a858f904d659894a62131')
version('1.0.0', sha256='d08dea19fb42b1846e0a7134e2347648b037bf82b2d75086d018734bc2996b06')
depends_on('heputils', when='@1.1.0:')

View File

@@ -16,7 +16,6 @@ class Meson(PythonPackage):
maintainers = ['michaelkuhn']
version('0.57.1', sha256='0c043c9b5350e9087cd4f6becf6c0d10b1d618ca3f919e0dcca2cdf342360d5d')
version('0.57.0', sha256='fd26a27c1a509240c668ebd29d280649d9239cf8684ead51d5cb499d1e1188bd')
version('0.56.2', sha256='aaae961c3413033789248ffe6762589e80b6cf487c334d0b808e31a32c48f35f')
version('0.56.0', sha256='a9ca7adf66dc69fbb7e583f7c7aef16b9fe56ec2874a3d58747e69a3affdf300')

View File

@@ -1,18 +0,0 @@
--- a/src/interface/makefile
+++ b/src/interface/makefile
@@ -45,10 +45,11 @@ lammps-nnp:
tar -xzf $(LAMMPS_VERSION).tar.gz && mv lammps-$(LAMMPS_VERSION) lammps-nnp
ln -s $(PROJECT_DIR)/../../ lammps-nnp/lib/nnp
cp -r ./LAMMPS/src/USER-NNP lammps-nnp/src/
- sed -i "s/^CC .*$$/CC = $(PROJECT_MPICC)/" lammps-nnp/src/MAKE/Makefile.mpi
- sed -i "s/^CCFLAGS .*$$/CCFLAGS = $(PROJECT_CFLAGS) $(PROJECT_CFLAGS_MPI)/" lammps-nnp/src/MAKE/Makefile.mpi
- sed -i "s/^LINK .*$$/LINK = $(PROJECT_MPICC)/" lammps-nnp/src/MAKE/Makefile.mpi
- sed -i "s/^LINKFLAGS .*$$/LINKFLAGS = $(PROJECT_CFLAGS) $(PROJECT_CFLAGS_MPI)/" lammps-nnp/src/MAKE/Makefile.mpi
+ sed -i "s/final(/final2(/g" lammps-nnp/src/hashlittle.cpp
+ sed -i "s|^CC .*$$|CC = $(PROJECT_MPICC)|" lammps-nnp/src/MAKE/Makefile.mpi
+ sed -i "s|^CCFLAGS .*$$|CCFLAGS = $(PROJECT_CFLAGS) $(PROJECT_CFLAGS_MPI)|" lammps-nnp/src/MAKE/Makefile.mpi
+ sed -i "s|^LINK .*$$|LINK = $(PROJECT_MPICC)|" lammps-nnp/src/MAKE/Makefile.mpi
+ sed -i "s|^LINKFLAGS .*$$|LINKFLAGS = $(PROJECT_CFLAGS) $(PROJECT_CFLAGS_MPI)|" lammps-nnp/src/MAKE/Makefile.mpi
if [ "$(MODE)" = "test" ]; then \
sed -i "/^CCFLAGS =/ s/$$/ $(PROJECT_DEBUG) $(PROJECT_TEST)/" lammps-nnp/src/MAKE/Makefile.mpi; \
sed -i "/^LINKFLAGS =/ s/$$/ $(PROJECT_DEBUG) $(PROJECT_TEST)/" lammps-nnp/src/MAKE/Makefile.mpi; \

View File

@@ -1,17 +0,0 @@
--- a/src/interface/makefile
+++ b/src/interface/makefile
@@ -46,10 +46,10 @@ lammps-nnp:
tar -xzf $(LAMMPS_VERSION).tar.gz && mv lammps-$(LAMMPS_VERSION) lammps-nnp
ln -s $(PROJECT_DIR)/../../ lammps-nnp/lib/nnp
cp -r ./LAMMPS/src/USER-NNP lammps-nnp/src/
- @sed -i.bak "s/^CC .*$$/CC = $(PROJECT_MPICC)/" lammps-nnp/src/MAKE/Makefile.mpi
- @sed -i.bak "s/^CCFLAGS .*$$/CCFLAGS = $(PROJECT_CFLAGS) $(PROJECT_CFLAGS_MPI)/" lammps-nnp/src/MAKE/Makefile.mpi
- @sed -i.bak "s/^LINK .*$$/LINK = $(PROJECT_MPICC)/" lammps-nnp/src/MAKE/Makefile.mpi
- @sed -i.bak "s/^LINKFLAGS .*$$/LINKFLAGS = $(PROJECT_CFLAGS) $(PROJECT_CFLAGS_MPI)/" lammps-nnp/src/MAKE/Makefile.mpi
+ @sed -i.bak "s|^CC .*$$|CC = $(PROJECT_MPICC)|" lammps-nnp/src/MAKE/Makefile.mpi
+ @sed -i.bak "s|^CCFLAGS .*$$|CCFLAGS = $(PROJECT_CFLAGS) $(PROJECT_CFLAGS_MPI)|" lammps-nnp/src/MAKE/Makefile.mpi
+ @sed -i.bak "s|^LINK .*$$|LINK = $(PROJECT_MPICC)|" lammps-nnp/src/MAKE/Makefile.mpi
+ @sed -i.bak "s|^LINKFLAGS .*$$|LINKFLAGS = $(PROJECT_CFLAGS) $(PROJECT_CFLAGS_MPI)|" lammps-nnp/src/MAKE/Makefile.mpi
if [ "$(MODE)" = "test" ]; then \
sed -i.bak "/^CCFLAGS =/ s/$$/ $(LAMMPS_DEBUG) $(PROJECT_TEST)/" lammps-nnp/src/MAKE/Makefile.mpi; \
sed -i.bak "/^LINKFLAGS =/ s/$$/ $(LAMMPS_DEBUG) $(PROJECT_TEST)/" lammps-nnp/src/MAKE/Makefile.mpi; \

View File

@@ -1,30 +0,0 @@
--- a/src/libnnp/makefile
+++ b/src/libnnp/makefile
@@ -72,11 +72,11 @@ headers: version
version:
@$(eval GIT_VERSION = $(shell git describe --tags --always))
- @sed -i.bak -E "s/(NNP_GIT_VERSION) .*/\1 \"$(GIT_VERSION)\"/" version.h
+ @sed -i.bak -E "s|(NNP_GIT_VERSION) .*|\1 \"$(GIT_VERSION)\"|" version.h
@$(eval GIT_REV = $(shell git rev-parse HEAD))
- @sed -i.bak -E "s/(NNP_GIT_REV) .*/\1 \"$(GIT_REV)\"/" version.h
+ @sed -i.bak -E "s|(NNP_GIT_REV) .*|\1 \"$(GIT_REV)\"|" version.h
@$(eval GIT_BRANCH = $(shell git rev-parse --abbrev-ref HEAD))
- @sed -i.bak -E "s/(NNP_GIT_BRANCH) .*/\1 \"$(GIT_BRANCH)\"/" version.h
+ @sed -i.bak -E "s|(NNP_GIT_BRANCH) .*|\1 \"$(GIT_BRANCH)\"|" version.h
@rm version.h.bak
$(LIB).so: CFLAGS+= -fPIC
@@ -97,9 +97,9 @@ clean: clean-version clean-headers
$(RM) $(PROJECT_LIB)/$(LIB).so $(PROJECT_LIB)/$(LIB).a
clean-version:
- @sed -i.bak -E "s/(NNP_GIT_VERSION) .*/\1 \"\"/" version.h
- @sed -i.bak -E "s/(NNP_GIT_REV) .*/\1 \"\"/" version.h
- @sed -i.bak -E "s/(NNP_GIT_BRANCH) .*/\1 \"\"/" version.h
+ @sed -i.bak -E "s|(NNP_GIT_VERSION) .*|\1 \"\"|" version.h
+ @sed -i.bak -E "s|(NNP_GIT_REV) .*|\1 \"\"|" version.h
+ @sed -i.bak -E "s|(NNP_GIT_BRANCH) .*|\1 \"\"|" version.h
@rm version.h.bak
clean-headers:

Some files were not shown because too many files have changed in this diff Show More