From 0ce4eef2562fde2163bad9fc45695308ab7a1d3a Mon Sep 17 00:00:00 2001 From: Patrick Gartung Date: Mon, 27 Jan 2020 14:55:09 -0600 Subject: [PATCH 001/178] Only set tcl default. Remove lmod default. (#14640) --- lib/spack/spack/test/data/config/config.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/spack/spack/test/data/config/config.yaml b/lib/spack/spack/test/data/config/config.yaml index 31e5e36bada..0cf0d32cb0e 100644 --- a/lib/spack/spack/test/data/config/config.yaml +++ b/lib/spack/spack/test/data/config/config.yaml @@ -14,4 +14,3 @@ config: dirty: false module_roots: tcl: $spack/share/spack/modules - lmod: $spack/share/spack/lmod From e01c39019c696230d0dee0c1c59d79bd6d099ef6 Mon Sep 17 00:00:00 2001 From: Joe Koning Date: Mon, 27 Jan 2020 15:26:43 -0800 Subject: [PATCH 002/178] Add the py-merlinwf package (#14622) * Add the py-merlinwf package * Fix importlib-resources package name for spack naming convention. * Add build to dependencies and add updated versions. * Remove pytest-runner dependency. * Fix typo. * Add the py-tabulate dependency. * Add sha256 for version 1.0.0 * Change to maestro version 1.1.5. * Increase to version 1.0.4. * Bump maestrowf version and prepare for new pypi version. * Add sha256sum for version 1.1.5 * Add version 1.1.1. Update maestrowf version to 1.1.7 * Add versions 1.0.5, 1.1.0, 1.1.1 and potential 1.2.0. * Add version 1.2.0 and when on maestrowf@1.1.6. * Add version 1.2.2 , remove 1.2.1 and 1.1.0. * Update var/spack/repos/builtin/packages/py-merlinwf/package.py Co-Authored-By: Adam J. Stewart * Update var/spack/repos/builtin/packages/py-merlinwf/package.py Co-Authored-By: Adam J. Stewart * Remove mysql variant until new mysql interface module is enabled. The mysql code may be removed. Co-authored-by: Adam J. Stewart --- .../builtin/packages/py-merlinwf/package.py | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 var/spack/repos/builtin/packages/py-merlinwf/package.py diff --git a/var/spack/repos/builtin/packages/py-merlinwf/package.py b/var/spack/repos/builtin/packages/py-merlinwf/package.py new file mode 100644 index 00000000000..2082e779978 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-merlinwf/package.py @@ -0,0 +1,38 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class PyMerlinwf(PythonPackage): + """Merlin Workflow for HPC.""" + + homepage = "https://github.com/LLNL/merlin" + url = "https://pypi.io/packages/source/m/merlinwf/merlinwf-1.2.3.tar.gz" + git = "https://github.com/LLNL/merlin.git" + + version('1.2.3', sha256='6b13a315f3e8e2894ea05d9cc072639f02eaf71ae0fdbd2bafebd1c20c8470ab') + version('1.1.1', sha256='306055a987e42a79ce348a3f9d71293ed8a9b7f5909c26b6fd233d6a176fff6d') + version('1.0.5', sha256='d66f50eac84ff9d7aa484f2d9655dc60f0352196d333284d81b6623a6f0aa180') + version('master', branch='master') + version('develop', branch='develop') + + depends_on('python@3.6:', type=('build', 'run')) + depends_on('py-setuptools', type=('build', 'run')) + + depends_on('py-pytest', type='test') + + depends_on('py-cached-property', type=('build', 'run')) + depends_on('py-celery@4.3.0:+redis', type=('build', 'run')) + depends_on('py-coloredlogs@10.0:', type=('build', 'run')) + depends_on('py-cryptography', type=('build', 'run')) + depends_on('py-importlib-resources', when="^python@3.0:3.6.99", type=('build', 'run')) + depends_on('py-maestrowf@1.1.7dev0:', when="@1.2.0:", type=('build', 'run')) + depends_on('py-maestrowf@1.1.6:', when="@:1.1.99", type=('build', 'run')) + depends_on('py-numpy', type=('build', 'run')) + depends_on('py-parse', type=('build', 'run')) + depends_on('py-psutil@5.1.0:', type=('build', 'run')) + depends_on('py-pyyaml@5.1.2:', type=('build', 'run')) + depends_on('py-tabulate', type=('build', 'run')) From d0523ca0871dd9e232136f6a0bfdcdef3ea9424e Mon Sep 17 00:00:00 2001 From: Patrick Gartung Date: Mon, 27 Jan 2020 20:04:48 -0600 Subject: [PATCH 003/178] Follow the example of spack arch (#14642) --- lib/spack/spack/binary_distribution.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 64ce23a8d63..fd89a80fa81 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -33,6 +33,7 @@ from spack.spec import Spec from spack.stage import Stage from spack.util.gpg import Gpg +import spack.architecture as architecture _build_cache_relative_path = 'build_cache' @@ -669,6 +670,9 @@ def get_specs(force=False, use_arch=False): """ global _cached_specs + arch = architecture.Arch(architecture.platform(), + 'default_os', 'default_target') + if _cached_specs: tty.debug("Using previously-retrieved specs") return _cached_specs @@ -691,8 +695,8 @@ def get_specs(force=False, use_arch=False): if re.search('spec.yaml', file): link = url_util.join(fetch_url_build_cache, file) if use_arch and re.search('%s-%s' % - (spack.architecture.platform, - spack.architecture.os), + (arch.platform, + arch.os), file): urls.add(link) else: @@ -705,8 +709,8 @@ def get_specs(force=False, use_arch=False): for link in links: if re.search("spec.yaml", link): if use_arch and re.search('%s-%s' % - (spack.architecture.platform, - spack.architecture.os), + (arch.platform, + arch.os), link): urls.add(link) else: From 7badd69d1e652e470eb2b856db97402b9b072d0e Mon Sep 17 00:00:00 2001 From: Patrick Gartung Date: Mon, 27 Jan 2020 20:10:01 -0600 Subject: [PATCH 004/178] Package source ID cannot be determined when the url can't be extrapolated for older version. (#14237) --- lib/spack/spack/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index cc13017f4c8..6856e3a3973 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1262,7 +1262,10 @@ def content_hash(self, content=None): raise spack.error.SpackError(err_msg) hash_content = list() - source_id = fs.for_package_version(self, self.version).source_id() + try: + source_id = fs.for_package_version(self, self.version).source_id() + except fs.ExtrapolationError: + source_id = None if not source_id: # TODO? in cases where a digest or source_id isn't available, # should this attempt to download the source and set one? This From d86816bc1acc1cd8f9abb054ec92fe493ea6ae50 Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Mon, 27 Jan 2020 21:25:23 -0500 Subject: [PATCH 005/178] Fix: hash-based references to upstream specs (#14629) Spack commands referring to upstream-installed specs by hash have been broken since 6b619da (merged September 2019), which added a new Database function specifically for parsing hashes from command-line specs; this function was inappropriately attempting to acquire locks on upstream databases. This PR updates the offending function to avoid locking upstream databases and also updates associated tests to catch regression errors: the upstream database created for these tests was not explicitly set as an upstream (i.e. initialized with upstream=True) so it was not guarding against inappropriate accesses. --- lib/spack/spack/database.py | 52 ++++++++++++++++++-------------- lib/spack/spack/test/database.py | 40 +++++++++++++++--------- 2 files changed, 56 insertions(+), 36 deletions(-) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index 22fc266b5b6..c06d1ae5463 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -1118,7 +1118,27 @@ def activated_extensions_for(self, extendee_spec, extensions_layout=None): continue # TODO: conditional way to do this instead of catching exceptions - def get_by_hash_local(self, dag_hash, default=None, installed=any): + def _get_by_hash_local(self, dag_hash, default=None, installed=any): + # hash is a full hash and is in the data somewhere + if dag_hash in self._data: + rec = self._data[dag_hash] + if rec.install_type_matches(installed): + return [rec.spec] + else: + return default + + # check if hash is a prefix of some installed (or previously + # installed) spec. + matches = [record.spec for h, record in self._data.items() + if h.startswith(dag_hash) and + record.install_type_matches(installed)] + if matches: + return matches + + # nothing found + return default + + def get_by_hash_local(self, *args, **kwargs): """Look up a spec in *this DB* by DAG hash, or by a DAG hash prefix. Arguments: @@ -1142,24 +1162,7 @@ def get_by_hash_local(self, dag_hash, default=None, installed=any): """ with self.read_transaction(): - # hash is a full hash and is in the data somewhere - if dag_hash in self._data: - rec = self._data[dag_hash] - if rec.install_type_matches(installed): - return [rec.spec] - else: - return default - - # check if hash is a prefix of some installed (or previously - # installed) spec. - matches = [record.spec for h, record in self._data.items() - if h.startswith(dag_hash) and - record.install_type_matches(installed)] - if matches: - return matches - - # nothing found - return default + return self._get_by_hash_local(*args, **kwargs) def get_by_hash(self, dag_hash, default=None, installed=any): """Look up a spec by DAG hash, or by a DAG hash prefix. @@ -1184,9 +1187,14 @@ def get_by_hash(self, dag_hash, default=None, installed=any): (list): a list of specs matching the hash or hash prefix """ - search_path = [self] + self.upstream_dbs - for db in search_path: - spec = db.get_by_hash_local( + + spec = self.get_by_hash_local( + dag_hash, default=default, installed=installed) + if spec is not None: + return spec + + for upstream_db in self.upstream_dbs: + spec = upstream_db._get_by_hash_local( dag_hash, default=default, installed=installed) if spec is not None: return spec diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index 24a219a491a..a2b9677ec69 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -41,10 +41,11 @@ def test_store(tmpdir): @pytest.fixture() def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout): mock_db_root = str(tmpdir_factory.mktemp('mock_db_root')) - upstream_db = spack.database.Database(mock_db_root) + upstream_write_db = spack.database.Database(mock_db_root) + upstream_db = spack.database.Database(mock_db_root, is_upstream=True) # Generate initial DB file to avoid reindex - with open(upstream_db._index_path, 'w') as db_file: - upstream_db._write_to_file(db_file) + with open(upstream_write_db._index_path, 'w') as db_file: + upstream_write_db._write_to_file(db_file) upstream_layout = gen_mock_layout('/a/') downstream_db_root = str( @@ -55,13 +56,14 @@ def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout): downstream_db._write_to_file(db_file) downstream_layout = gen_mock_layout('/b/') - yield upstream_db, upstream_layout, downstream_db, downstream_layout + yield upstream_write_db, upstream_db, upstream_layout,\ + downstream_db, downstream_layout @pytest.mark.usefixtures('config') def test_installed_upstream(upstream_and_downstream_db): - upstream_db, upstream_layout, downstream_db, downstream_layout = ( - upstream_and_downstream_db) + upstream_write_db, upstream_db, upstream_layout,\ + downstream_db, downstream_layout = (upstream_and_downstream_db) default = ('build', 'link') x = MockPackage('x', [], []) @@ -75,7 +77,14 @@ def test_installed_upstream(upstream_and_downstream_db): spec.concretize() for dep in spec.traverse(root=False): - upstream_db.add(dep, upstream_layout) + upstream_write_db.add(dep, upstream_layout) + upstream_db._read() + + for dep in spec.traverse(root=False): + record = downstream_db.get_by_hash(dep.dag_hash()) + assert record is not None + with pytest.raises(spack.database.ForbiddenLockError): + record = upstream_db.get_by_hash(dep.dag_hash()) new_spec = spack.spec.Spec('w') new_spec.concretize() @@ -96,8 +105,8 @@ def test_installed_upstream(upstream_and_downstream_db): @pytest.mark.usefixtures('config') def test_removed_upstream_dep(upstream_and_downstream_db): - upstream_db, upstream_layout, downstream_db, downstream_layout = ( - upstream_and_downstream_db) + upstream_write_db, upstream_db, upstream_layout,\ + downstream_db, downstream_layout = (upstream_and_downstream_db) default = ('build', 'link') z = MockPackage('z', [], []) @@ -108,13 +117,15 @@ def test_removed_upstream_dep(upstream_and_downstream_db): spec = spack.spec.Spec('y') spec.concretize() - upstream_db.add(spec['z'], upstream_layout) + upstream_write_db.add(spec['z'], upstream_layout) + upstream_db._read() new_spec = spack.spec.Spec('y') new_spec.concretize() downstream_db.add(new_spec, downstream_layout) - upstream_db.remove(new_spec['z']) + upstream_write_db.remove(new_spec['z']) + upstream_db._read() new_downstream = spack.database.Database( downstream_db.root, upstream_dbs=[upstream_db]) @@ -129,8 +140,8 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db): DB. When a package is recorded as installed in both, the results should refer to the downstream DB. """ - upstream_db, upstream_layout, downstream_db, downstream_layout = ( - upstream_and_downstream_db) + upstream_write_db, upstream_db, upstream_layout,\ + downstream_db, downstream_layout = (upstream_and_downstream_db) x = MockPackage('x', [], []) mock_repo = MockPackageMultiRepo([x]) @@ -141,7 +152,8 @@ def test_add_to_upstream_after_downstream(upstream_and_downstream_db): downstream_db.add(spec, downstream_layout) - upstream_db.add(spec, upstream_layout) + upstream_write_db.add(spec, upstream_layout) + upstream_db._read() upstream, record = downstream_db.query_by_spec_hash(spec.dag_hash()) # Even though the package is recorded as installed in the upstream DB, From 6ab2c3caa3437a47ddb3d2c96322fa091663cc25 Mon Sep 17 00:00:00 2001 From: Adam Moody Date: Mon, 27 Jan 2020 20:33:26 -0800 Subject: [PATCH 006/178] mpifileutils: add v0.10 (#14644) Signed-off-by: Adam Moody --- .../repos/builtin/packages/mpifileutils/package.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/mpifileutils/package.py b/var/spack/repos/builtin/packages/mpifileutils/package.py index 686174b1b0e..30fc9825054 100644 --- a/var/spack/repos/builtin/packages/mpifileutils/package.py +++ b/var/spack/repos/builtin/packages/mpifileutils/package.py @@ -21,13 +21,14 @@ class Mpifileutils(Package): git = "https://github.com/hpc/mpifileutils.git" version('develop', branch='master') + version('0.10', sha256='5a71a9acd9841c3c258fc0eaea942f18abcb40098714cc90462b57696c07e3c5') version('0.9.1', sha256='15a22450f86b15e7dc4730950b880fda3ef6f59ac82af0b268674d272aa61c69') - version('0.9', sha256='1b8250af01aae91c985ca5d61521bfaa4564e46efa15cee65cd0f82cf5a2bcfb') + version('0.9', sha256='1b8250af01aae91c985ca5d61521bfaa4564e46efa15cee65cd0f82cf5a2bcfb') conflicts('platform=darwin') depends_on('mpi') - depends_on('libcircle') + depends_on('libcircle@0.3:') # need precise version of dtcmp, since DTCMP_Segmented_exscan added # in v1.0.3 but renamed in v1.1.0 and later @@ -44,14 +45,14 @@ class Mpifileutils(Package): variant('lustre', default=False, description="Enable optimizations and features for Lustre") - variant('experimental', default=False, - description="Install experimental tools") - conflicts('+experimental', when='@:0.6') - variant('gpfs', default=False, description="Enable optimizations and features for GPFS") conflicts('+gpfs', when='@:0.8.1') + variant('experimental', default=False, + description="Install experimental tools") + conflicts('+experimental', when='@:0.6') + def cmake_args(self): args = std_cmake_args args.append('-DCMAKE_INSTALL_PREFIX=%s' % self.spec.prefix) From f58004e4369a97d514bc143284f423aeaead3964 Mon Sep 17 00:00:00 2001 From: Owen Solberg Date: Mon, 27 Jan 2020 18:49:53 -1000 Subject: [PATCH 007/178] fix spack env loads example (#14558) --- lib/spack/docs/environments.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/spack/docs/environments.rst b/lib/spack/docs/environments.rst index 5c1089351b8..336d574bd7f 100644 --- a/lib/spack/docs/environments.rst +++ b/lib/spack/docs/environments.rst @@ -382,11 +382,12 @@ the Environment. Loading ^^^^^^^ -Once an environment has been installed, the following creates a load script for it: +Once an environment has been installed, the following creates a load +script for it: .. code-block:: console - $ spack env myenv loads -r + $ spack env loads -r This creates a file called ``loads`` in the environment directory. Sourcing that file in Bash will make the environment available to the From 3f5bed2e36640798c6d1d7097885e8411931f1a6 Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Mon, 27 Jan 2020 23:53:52 -0500 Subject: [PATCH 008/178] make the new 'spack load' faster (#14628) before, a 'time spack load singularity' 4.129u 0.346s 0:04.47 99.7% 0+0k 0+8io 0pf+0w after, a 'time spack load singularity' 0.844u 0.319s 0:01.16 99.1% 0+0k 0+16io 0pf+0w --- lib/spack/spack/cmd/load.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py index 80c7263a7af..09f3fd31ee9 100644 --- a/lib/spack/spack/cmd/load.py +++ b/lib/spack/spack/cmd/load.py @@ -12,6 +12,7 @@ import spack.environment as ev import spack.util.environment import spack.user_environment as uenv +import spack.store description = "add package to the user environment" section = "user environment" @@ -63,15 +64,17 @@ def load(parser, args): tty.msg(*msg) return 1 - if 'dependencies' in args.things_to_load: - include_roots = 'package' in args.things_to_load - specs = [dep for spec in specs - for dep in spec.traverse(root=include_roots, order='post')] + with spack.store.db.read_transaction(): + if 'dependencies' in args.things_to_load: + include_roots = 'package' in args.things_to_load + specs = [dep for spec in specs + for dep in + spec.traverse(root=include_roots, order='post')] - env_mod = spack.util.environment.EnvironmentModifications() - for spec in specs: - env_mod.extend(uenv.environment_modifications_for_spec(spec)) - env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash()) - cmds = env_mod.shell_modifications(args.shell) + env_mod = spack.util.environment.EnvironmentModifications() + for spec in specs: + env_mod.extend(uenv.environment_modifications_for_spec(spec)) + env_mod.prepend_path(uenv.spack_loaded_hashes_var, spec.dag_hash()) + cmds = env_mod.shell_modifications(args.shell) - sys.stdout.write(cmds) + sys.stdout.write(cmds) From 12b0340d2fc110f21bf6691a3046405bbea3df64 Mon Sep 17 00:00:00 2001 From: Christoph Junghans Date: Tue, 28 Jan 2020 10:34:32 -0700 Subject: [PATCH 009/178] ninja: add v1.10.0 (#14647) --- var/spack/repos/builtin/packages/ninja/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/ninja/package.py b/var/spack/repos/builtin/packages/ninja/package.py index 48dbc01a67e..41e2a9d8d99 100644 --- a/var/spack/repos/builtin/packages/ninja/package.py +++ b/var/spack/repos/builtin/packages/ninja/package.py @@ -17,6 +17,7 @@ class Ninja(Package): git = "https://github.com/Kitware/ninja.git" version('kitware', branch='features-for-fortran') + version('1.10.0', sha256='3810318b08489435f8efc19c05525e80a993af5a55baa0dfeae0465a9d45f99f') version('1.9.0', sha256='5d7ec75828f8d3fd1a0c2f31b5b0cea780cdfe1031359228c428c1a48bfcd5b9') version('1.8.2', sha256='86b8700c3d0880c2b44c2ff67ce42774aaf8c28cbf57725cb881569288c1c6f4') version('1.7.2', sha256='2edda0a5421ace3cf428309211270772dd35a91af60c96f93f90df6bc41b16d9') From 67c6d99219f5a49c68baf9020880410ded85413a Mon Sep 17 00:00:00 2001 From: iarspider Date: Tue, 28 Jan 2020 21:15:29 +0100 Subject: [PATCH 010/178] Fix for #14148 (#14595) * Dirty hack to fix #14148 * A better way of checking if a package is taken from system * Update var/spack/repos/builtin/packages/qt/package.py Co-Authored-By: Adam J. Stewart * Update qt/package.py Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/qt/package.py | 48 +++++++++++-------- 1 file changed, 28 insertions(+), 20 deletions(-) diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index f443c7800fd..1a463c50195 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -323,8 +323,8 @@ def common_config_args(self): '-release', '-confirm-license', '-openssl-linked', - '{0}'.format(openssl.libs.search_flags), - '{0}'.format(openssl.headers.include_flags), + openssl.libs.search_flags, + openssl.headers.include_flags, '-optimized-qmake', '-no-pch', ] @@ -351,7 +351,7 @@ def common_config_args(self): sqlite = self.spec['sqlite'] config_args.extend([ '-system-sqlite', - '-R', '{0}'.format(sqlite.prefix.lib), + '-R', sqlite.prefix.lib, ]) else: comps = ['db2', 'ibase', 'oci', 'tds', 'mysql', 'odbc', 'psql', @@ -369,19 +369,19 @@ def common_config_args(self): harfbuzz = self.spec['harfbuzz'] config_args.extend([ '-system-harfbuzz', - '{0}'.format(harfbuzz.libs.search_flags), - '{0}'.format(harfbuzz.headers.include_flags), + harfbuzz.libs.search_flags, + harfbuzz.headers.include_flags, '-system-pcre', - '{0}'.format(pcre.libs.search_flags), - '{0}'.format(pcre.headers.include_flags) + pcre.libs.search_flags, + pcre.headers.include_flags ]) if self.spec.satisfies('@5.7:'): dc = self.spec['double-conversion'] config_args.extend([ '-system-doubleconversion', - '{0}'.format(dc.libs.search_flags), - '{0}'.format(dc.headers.include_flags) + dc.libs.search_flags, + dc.headers.include_flags ]) if '@:5.7.1' in self.spec: @@ -389,19 +389,27 @@ def common_config_args(self): else: # FIXME: those could work for other versions png = self.spec['libpng'] + config_args.append('-system-libpng') + if not png.external: + config_args.extend([ + png.libs.search_flags, + png.headers.include_flags + ]) + jpeg = self.spec['jpeg'] + config_args.append('-system-libjpeg') + if not jpeg.external: + config_args.extend([ + jpeg.libs.search_flags, + jpeg.headers.include_flags, + ]) zlib = self.spec['zlib'] - config_args.extend([ - '-system-libpng', - '{0}'.format(png.libs.search_flags), - '{0}'.format(png.headers.include_flags), - '-system-libjpeg', - '{0}'.format(jpeg.libs.search_flags), - '{0}'.format(jpeg.headers.include_flags), - '-system-zlib', - '{0}'.format(zlib.libs.search_flags), - '{0}'.format(zlib.headers.include_flags) - ]) + config_args.append('-system-zlib') + if not zlib.external: + config_args.extend([ + zlib.libs.search_flags, + zlib.headers.include_flags + ]) if '@:5.7.0' in self.spec: config_args.extend([ From 52ab2421bb5aeba38ec47bbb88b75d21d8793387 Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Tue, 28 Jan 2020 12:49:26 -0800 Subject: [PATCH 011/178] Fix handling of filter_file exceptions (#14651) --- lib/spack/llnl/util/filesystem.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index d525884ee97..273840a424f 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -201,7 +201,6 @@ def groupid_to_group(x): output_file.writelines(input_file.readlines()) except BaseException: - os.remove(tmp_filename) # clean up the original file on failure. shutil.move(backup_filename, filename) raise From 48a12c8773295f95caa08a77bb660a7ed221995c Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Tue, 28 Jan 2020 14:57:06 -0600 Subject: [PATCH 012/178] Note about Intel compiler segfault with long paths (#14652) This PR adds a note about segfaults with the Intel compiler when the install paths are long and the dependencies many. --- lib/spack/docs/config_yaml.rst | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/lib/spack/docs/config_yaml.rst b/lib/spack/docs/config_yaml.rst index a64ab272823..865845b2743 100644 --- a/lib/spack/docs/config_yaml.rst +++ b/lib/spack/docs/config_yaml.rst @@ -30,11 +30,21 @@ Default is ``$spack/opt/spack``. ``install_hash_length`` and ``install_path_scheme`` --------------------------------------------------- -The default Spack installation path can be very long and can create -problems for scripts with hardcoded shebangs. There are two parameters -to help with that. Firstly, the ``install_hash_length`` parameter can -set the length of the hash in the installation path from 1 to 32. The -default path uses the full 32 characters. +The default Spack installation path can be very long and can create problems +for scripts with hardcoded shebangs. Additionally, when using the Intel +compiler, and if there is also a long list of dependencies, the compiler may +segfault. If you see the following: + + .. code-block:: console + + : internal error: ** The compiler has encountered an unexpected problem. + ** Segmentation violation signal raised. ** + Access violation or stack overflow. Please contact Intel Support for assistance. + +it may be because variables containing dependency specs may be too long. There +are two parameters to help with long path names. Firstly, the +``install_hash_length`` parameter can set the length of the hash in the +installation path from 1 to 32. The default path uses the full 32 characters. Secondly, it is also possible to modify the entire installation scheme. By default Spack uses From 16da648d03564871ade5fcce09dc45171b706b8c Mon Sep 17 00:00:00 2001 From: Ben Morgan Date: Tue, 28 Jan 2020 20:57:25 +0000 Subject: [PATCH 013/178] intel-tbb: Fix install names on Darwin (#14650) * intel-tbb: Fix install names on Darwin Intel-TBB's libraries on Darwin are installed with "@rpath" prefixed to their install names. This was found to cause issues building the root package on Darwin due to libtbb not being found when running some of the generated tools linking to it. Follow example from other packages with the same issue and fixup up install names for intel-tbb post install. * intel-tbb: fix flake8 errors --- var/spack/repos/builtin/packages/intel-tbb/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py index d299f7f9fb5..2cbda4f5a42 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-tbb/package.py @@ -7,6 +7,7 @@ import glob import inspect import platform +import sys class IntelTbb(Package): @@ -201,3 +202,9 @@ def install(self, spec, prefix): 'tbb_config_generator.cmake') with working_dir(join_path(self.stage.source_path, 'cmake')): inspect.getmodule(self).cmake(*cmake_args) + + @run_after('install') + def darwin_fix(self): + # Replace @rpath in ids with full path + if sys.platform == 'darwin': + fix_darwin_install_name(self.prefix.lib) From da091c0cf5eff1a9254c3a4988ed54f34f263cae Mon Sep 17 00:00:00 2001 From: noguchi-k <55966120+noguchi-k@users.noreply.github.com> Date: Wed, 29 Jan 2020 05:59:23 +0900 Subject: [PATCH 014/178] med: add a space between literal and identifier (#14646) --- .../builtin/packages/med/add_space.patch | 20 +++++++++++++++++++ .../repos/builtin/packages/med/package.py | 3 +++ 2 files changed, 23 insertions(+) create mode 100644 var/spack/repos/builtin/packages/med/add_space.patch diff --git a/var/spack/repos/builtin/packages/med/add_space.patch b/var/spack/repos/builtin/packages/med/add_space.patch new file mode 100644 index 00000000000..e7577425296 --- /dev/null +++ b/var/spack/repos/builtin/packages/med/add_space.patch @@ -0,0 +1,20 @@ +--- spack-src/tools/medimport/2.3.1/MED231champRefInfoEtRenMaabuilt.cxx.org 2020-01-28 16:02:49.656440054 +0900 ++++ spack-src/tools/medimport/2.3.1/MED231champRefInfoEtRenMaabuilt.cxx 2020-01-28 16:03:45.222261190 +0900 +@@ -95,7 +95,7 @@ + SSCRUTE(maai); goto ERROR; + } + if ( MAJ_231_232_chaine(maai,maaf) ) { +- fprintf(stdout," >>> Normalisation du nom de maillage par défaut [%s] associé au champ [%s] pour (n°dt,n°it) ("IFORMAT","IFORMAT")\n",maai,champ,numdt,numo); ++ fprintf(stdout," >>> Normalisation du nom de maillage par défaut [%s] associé au champ [%s] pour (n°dt,n°it) (" IFORMAT "," IFORMAT ")\n",maai,champ,numdt,numo); + ret = _MEDattrStringEcrire(datagroup2,MED_NOM_MAI,MED_TAILLE_NOM,maaf); + EXIT_IF(ret < 0,"Renommage du maillage en",maaf); + fprintf(stdout," >>> Normalisation du nom du maillage par défaut [%s] ... OK ... \n",maaf); +@@ -120,7 +120,7 @@ + + if ( MAJ_231_232_chaine(maa,maaf) ) { + +- fprintf(stdout," >>> Normalisation du nom de maillage [%s] associé au champ [%s] pour (n°dt,n°it) ("IFORMAT","IFORMAT")\n",maa,champ,numdt,numo); ++ fprintf(stdout," >>> Normalisation du nom de maillage [%s] associé au champ [%s] pour (n°dt,n°it) (" IFORMAT "," IFORMAT ")\n",maa,champ,numdt,numo); + /* on accede au maillage */ + strcpy(chemini,chemin); + strcat(chemini,maa); diff --git a/var/spack/repos/builtin/packages/med/package.py b/var/spack/repos/builtin/packages/med/package.py index 591a31e9233..3b416c02742 100644 --- a/var/spack/repos/builtin/packages/med/package.py +++ b/var/spack/repos/builtin/packages/med/package.py @@ -22,6 +22,9 @@ class Med(CMakePackage): depends_on('mpi') depends_on('hdf5@:1.8.19+mpi') + # C++11 requires a space between literal and identifier + patch('add_space.patch') + # FIXME This is minimal installation. def cmake_args(self): From 635fc62de0574cfd1a9ba5d697575f00a26cfb34 Mon Sep 17 00:00:00 2001 From: noguchi-k <55966120+noguchi-k@users.noreply.github.com> Date: Wed, 29 Jan 2020 05:59:59 +0900 Subject: [PATCH 015/178] powerapi: add a space between literal and identifier (#14645) --- .../builtin/packages/powerapi/add_space.patch | 186 ++++++++++++++++++ .../builtin/packages/powerapi/package.py | 3 + 2 files changed, 189 insertions(+) create mode 100644 var/spack/repos/builtin/packages/powerapi/add_space.patch diff --git a/var/spack/repos/builtin/packages/powerapi/add_space.patch b/var/spack/repos/builtin/packages/powerapi/add_space.patch new file mode 100644 index 00000000000..307abafb295 --- /dev/null +++ b/var/spack/repos/builtin/packages/powerapi/add_space.patch @@ -0,0 +1,186 @@ +diff -ur spack-src.org/src/pwr/deviceStat.cc spack-src/src/pwr/deviceStat.cc +--- spack-src.org/src/pwr/deviceStat.cc 2020-01-28 11:00:47.690108007 +0900 ++++ spack-src/src/pwr/deviceStat.cc 2020-01-28 11:04:02.630472034 +0900 +@@ -47,7 +47,7 @@ + + int DeviceStat::startObj( ) { + int retval = m_obj->attrStartLog( m_attrName ); +- DBGX("%s time=%"PRIu64" sec\n",objTypeToString(m_obj->type()),m_startTime); ++ DBGX("%s time=%" PRIu64 " sec\n",objTypeToString(m_obj->type()),m_startTime); + return retval; + } + +@@ -64,7 +64,7 @@ + return PWR_RET_SUCCESS; + } + int DeviceStat::stopObj( ) { +- DBGX("%s time=%"PRIu64" sec\n",objTypeToString(m_obj->type()),m_startTime); ++ DBGX("%s time=%" PRIu64 " sec\n",objTypeToString(m_obj->type()),m_startTime); + int retval = m_obj->attrStopLog( m_attrName ); + return retval; + } +@@ -85,7 +85,7 @@ + int DeviceStat::stop( ) { + m_isLogging = false; + m_stopTime = getTime(); +- DBGX("time=%"PRIu64" sec\n",m_stopTime); ++ DBGX("time=%" PRIu64 " sec\n",m_stopTime); + if ( m_obj ) { + return stopObj(); + } else { +@@ -135,7 +135,7 @@ + + statTimes->start = timeStamp; + statTimes->stop = timeStamp + nSamples * m_period * 1000000000 ; +- DBGX("actual: start=%lf stop=%lf count=%"PRIu32"\n", ++ DBGX("actual: start=%lf stop=%lf count=%" PRIu32 "\n", + (double) timeStamp/1000000000, + (double) statTimes->stop/1000000000, nSamples); + +@@ -146,7 +146,7 @@ + statTimes->instant = statTimes->start + pos * m_period * 1000000000; + } + +- DBGX("actual: start=%lf stop=%lf instant=%lf count=%"PRIu32"\n", ++ DBGX("actual: start=%lf stop=%lf instant=%lf count=%" PRIu32 "\n", + (double) timeStamp/1000000000, + (double) statTimes->stop/1000000000, + (double) statTimes->instant/1000000000,nSamples); +diff -ur spack-src.org/src/pwr/distRequest.cc spack-src/src/pwr/distRequest.cc +--- spack-src.org/src/pwr/distRequest.cc 2020-01-28 11:00:47.690108007 +0900 ++++ spack-src/src/pwr/distRequest.cc 2020-01-28 11:05:11.687685948 +0900 +@@ -56,7 +56,7 @@ + // can we use "this" + void DistRequest::getSamples( DistCommReq* req, CommGetSamplesRespEvent* ev ) + { +- DBGX("count %"PRIu32"\n",ev->count); ++ DBGX("count %" PRIu32 "\n",ev->count); + + #if 0 + // FIX ME +@@ -67,7 +67,7 @@ + for ( unsigned i = 0; i< ev->count; i++ ) { + ((uint64_t*)value[0])[i] = ev->data[i]; + } +- DBGX("start time %"PRIu64", samples %"PRIu32"n",*timeStamp[0], ev->count); ++ DBGX("start time %" PRIu64 ", samples %" PRIu32 "n",*timeStamp[0], ev->count); + *timeStamp[0] = ev->startTime; + m_commReqs.erase( req ); + } +diff -ur spack-src.org/tools/pwrdaemon/router/commCreateEvent.cc spack-src/tools/pwrdaemon/router/commCreateEvent.cc +--- spack-src.org/tools/pwrdaemon/router/commCreateEvent.cc 2020-01-28 11:00:47.730112186 +0900 ++++ spack-src/tools/pwrdaemon/router/commCreateEvent.cc 2020-01-28 11:12:35.644062899 +0900 +@@ -21,7 +21,7 @@ + bool RtrCommCreateEvent::process( EventGenerator* _rtr, EventChannel* ec ) { + Router& rtr = *static_cast(_rtr); + Router::Client* client = rtr.getClient( ec ); +- DBGX("id=%"PRIx64"\n",commID); ++ DBGX("id=%" PRIx64 "\n",commID); + + client->addComm( commID, this ); + +diff -ur spack-src.org/tools/pwrdaemon/router/commGetSamplesEvent.h spack-src/tools/pwrdaemon/router/commGetSamplesEvent.h +--- spack-src.org/tools/pwrdaemon/router/commGetSamplesEvent.h 2020-01-28 11:00:47.730112186 +0900 ++++ spack-src/tools/pwrdaemon/router/commGetSamplesEvent.h 2020-01-28 11:07:20.211111872 +0900 +@@ -45,7 +45,7 @@ + info->resp->id = id; + id = (EventId) info; + +- DBGX("commID=%" PRIx64 " eventId=%"PRIx64" new eventId=%p\n", ++ DBGX("commID=%" PRIx64 " eventId=%" PRIx64 " new eventId=%p\n", + commID, id, info ); + + for ( unsigned int i=0; i < commList.size(); i++ ) { +diff -ur spack-src.org/tools/pwrdaemon/router/commLogEvents.h spack-src/tools/pwrdaemon/router/commLogEvents.h +--- spack-src.org/tools/pwrdaemon/router/commLogEvents.h 2020-01-28 11:00:47.730112186 +0900 ++++ spack-src/tools/pwrdaemon/router/commLogEvents.h 2020-01-28 11:09:28.254487649 +0900 +@@ -46,7 +46,7 @@ + info->resp->id = id; + id = (EventId) info; + +- DBGX("commID=%"PRIu64" eventId=%" PRIx64 " new eventId=%p\n", ++ DBGX("commID=%" PRIu64 " eventId=%" PRIx64 " new eventId=%p\n", + commID, id, info ); + + for ( unsigned int i=0; i < commList.size(); i++ ) { +diff -ur spack-src.org/tools/pwrdaemon/router/commReqEvent.h spack-src/tools/pwrdaemon/router/commReqEvent.h +--- spack-src.org/tools/pwrdaemon/router/commReqEvent.h 2020-01-28 11:00:47.730112186 +0900 ++++ spack-src/tools/pwrdaemon/router/commReqEvent.h 2020-01-28 11:09:59.347735739 +0900 +@@ -35,7 +35,7 @@ + + CommReqInfo* info = new CommReqInfo; + +- DBGX("commID=%"PRIx64" eventId=%"PRIx64" new eventId=%p\n", ++ DBGX("commID=%" PRIx64 " eventId=%" PRIx64 " new eventId=%p\n", + commID, id, info ); + + info->src = ec; +diff -ur spack-src.org/tools/pwrdaemon/router/commRespEvent.h spack-src/tools/pwrdaemon/router/commRespEvent.h +--- spack-src.org/tools/pwrdaemon/router/commRespEvent.h 2020-01-28 11:00:47.730112186 +0900 ++++ spack-src/tools/pwrdaemon/router/commRespEvent.h 2020-01-28 11:10:43.862385866 +0900 +@@ -33,7 +33,7 @@ + + bool process( EventGenerator* _rtr, EventChannel* ) { + +- DBGX("id=%p status=%"PRIi32" grpIndex=%" PRIu64 "\n", ++ DBGX("id=%p status=%" PRIi32 " grpIndex=%" PRIu64 "\n", + (void*)id, status, grpIndex ); + + CommReqInfo* info = (CommReqInfo*) id; +@@ -44,7 +44,7 @@ + + CommRespEvent* resp = static_cast(info->resp); + if ( Get == info->ev->op ) { +- DBGX("index %"PRIu64" is ready, num attrs %zu\n", ++ DBGX("index %" PRIu64 " is ready, num attrs %zu\n", + grpIndex, info->valueOp.size() ); + resp->timeStamp[grpIndex].resize( info->valueOp.size() ); + resp->value[grpIndex].resize( info->valueOp.size() ); +diff -ur spack-src.org/tools/pwrdaemon/router/rtrRouterEvent.h spack-src/tools/pwrdaemon/router/rtrRouterEvent.h +--- spack-src.org/tools/pwrdaemon/router/rtrRouterEvent.h 2020-01-28 11:00:47.730112186 +0900 ++++ spack-src/tools/pwrdaemon/router/rtrRouterEvent.h 2020-01-28 11:11:05.824680110 +0900 +@@ -25,7 +25,7 @@ + + bool process( EventGenerator* _rtr, EventChannel* ec ) { + Router& rtr = *static_cast(_rtr); +- DBGX("dest=%"PRIx64"\n",dest); ++ DBGX("dest=%" PRIx64 "\n",dest); + rtr.sendEvent( dest, this ); + + return true; +diff -ur spack-src.org/tools/pwrdaemon/server/commCreateEvent.h spack-src/tools/pwrdaemon/server/commCreateEvent.h +--- spack-src.org/tools/pwrdaemon/server/commCreateEvent.h 2020-01-28 11:00:47.740113231 +0900 ++++ spack-src/tools/pwrdaemon/server/commCreateEvent.h 2020-01-28 11:13:00.426651760 +0900 +@@ -21,7 +21,7 @@ + bool process( EventGenerator* gen, EventChannel* ) { + Server& info = *static_cast(gen); + +- DBGX("commID=%"PRIx64"\n", commID); ++ DBGX("commID=%" PRIx64 "\n", commID); + + CommInfo& cInfo = info.m_commMap[commID]; + cInfo.objects.resize( members.size() ); +diff -ur spack-src.org/tools/pwrdaemon/server/commDestroyEvent.h spack-src/tools/pwrdaemon/server/commDestroyEvent.h +--- spack-src.org/tools/pwrdaemon/server/commDestroyEvent.h 2020-01-28 11:00:47.740113231 +0900 ++++ spack-src/tools/pwrdaemon/server/commDestroyEvent.h 2020-01-28 11:11:42.438504895 +0900 +@@ -24,7 +24,7 @@ + bool process( EventGenerator* gen, EventChannel* ) { + Server& info = *static_cast(gen); + +- DBGX("commID=%"PRIx64"\n", commID); ++ DBGX("commID=%" PRIx64 "\n", commID); + + // We have a bunch of PWR_Obj hanging off of the comm. + // How/should we clean them up? +diff -ur spack-src.org/tools/pwrdaemon/server/commReqEvent.h spack-src/tools/pwrdaemon/server/commReqEvent.h +--- spack-src.org/tools/pwrdaemon/server/commReqEvent.h 2020-01-28 11:00:47.740113231 +0900 ++++ spack-src/tools/pwrdaemon/server/commReqEvent.h 2020-01-28 11:12:06.881058234 +0900 +@@ -31,7 +31,7 @@ + + PWR_Obj obj = m_info->m_commMap[commID].objects[0]; + +- DBGX("commID=%"PRIx64" grpIndex=%"PRIu64"\n",commID, grpIndex); ++ DBGX("commID=%" PRIx64 " grpIndex=%" PRIu64 "\n",commID, grpIndex); + char name[100]; + PWR_ObjGetName(obj,name,100); + diff --git a/var/spack/repos/builtin/packages/powerapi/package.py b/var/spack/repos/builtin/packages/powerapi/package.py index 929bfc45157..ccb8ffe780f 100644 --- a/var/spack/repos/builtin/packages/powerapi/package.py +++ b/var/spack/repos/builtin/packages/powerapi/package.py @@ -26,6 +26,9 @@ class Powerapi(AutotoolsPackage): depends_on('hwloc', when='+hwloc') depends_on('mpi', when='+mpi') + # C++11 requires a space between literal and identifier. + patch('add_space.patch') + def autoreconf(self, spec, prefix): bash = which('bash') bash('./autogen.sh') From 2b6106524adaef29917edb033b0734411d8e0d40 Mon Sep 17 00:00:00 2001 From: Axel Huebl Date: Tue, 28 Jan 2020 13:01:15 -0800 Subject: [PATCH 016/178] openPMD-api: With ADIOS2 by Default (#14643) Replace the deprecated ADIOS1 backend default with ADIOS2 default. Disable sz since we do not need it and it conflicts with supported version ranges between ADIOS2 and ADIOS1 if someone enables both. --- .../repos/builtin/packages/openpmd-api/package.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/openpmd-api/package.py b/var/spack/repos/builtin/packages/openpmd-api/package.py index 31dfe541c1d..f05caa68f5d 100644 --- a/var/spack/repos/builtin/packages/openpmd-api/package.py +++ b/var/spack/repos/builtin/packages/openpmd-api/package.py @@ -26,9 +26,9 @@ class OpenpmdApi(CMakePackage): description='Enable parallel I/O') variant('hdf5', default=True, description='Enable HDF5 support') - variant('adios1', default=True, + variant('adios1', default=False, description='Enable ADIOS1 support') - variant('adios2', default=False, + variant('adios2', default=True, description='Enable ADIOS2 support') variant('python', default=False, description='Enable Python bindings') @@ -40,9 +40,9 @@ class OpenpmdApi(CMakePackage): depends_on('hdf5@1.8.13:', when='+hdf5') depends_on('hdf5@1.8.13: ~mpi', when='~mpi +hdf5') depends_on('hdf5@1.8.13: +mpi', when='+mpi +hdf5') - depends_on('adios@1.13.1:', when='+adios1') - depends_on('adios@1.13.1: ~mpi', when='~mpi +adios1') - depends_on('adios@1.13.1: +mpi', when='+mpi +adios1') + depends_on('adios@1.13.1: ~sz', when='+adios1') + depends_on('adios@1.13.1: ~mpi ~sz', when='~mpi +adios1') + depends_on('adios@1.13.1: +mpi ~sz', when='+mpi +adios1') depends_on('adios2@2.5.0:', when='+adios2') depends_on('adios2@2.5.0: ~mpi', when='~mpi +adios2') depends_on('adios2@2.5.0: +mpi', when='+mpi +adios2') From f17ce36da2a7baafbab82633b511cfc2ce81408c Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Tue, 28 Jan 2020 13:01:50 -0800 Subject: [PATCH 017/178] Fixed path for CEREAL and Protobuf (#14641) --- var/spack/repos/builtin/packages/lbann/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py index 7d4615a65a2..ca7d03703c3 100644 --- a/var/spack/repos/builtin/packages/lbann/package.py +++ b/var/spack/repos/builtin/packages/lbann/package.py @@ -159,7 +159,10 @@ def cmake_args(self): '-DLBANN_WITH_TBINF=OFF', '-DLBANN_WITH_VTUNE:BOOL=%s' % ('+vtune' in spec), '-DLBANN_DATATYPE={0}'.format(spec.variants['dtype'].value), - '-DLBANN_VERBOSE=0']) + '-DLBANN_VERBOSE=0', + '-DCEREAL_DIR={0}'.format(spec['cereal'].prefix), + # protobuf is included by py-protobuf+cpp + '-DProtobuf_DIR={0}'.format(spec['protobuf'].prefix)]) if self.spec.satisfies('@:0.90') or self.spec.satisfies('@0.95:'): args.extend([ From 94def872ee41ebfeb0736745d031b64304dc2983 Mon Sep 17 00:00:00 2001 From: Matthias Diener Date: Tue, 28 Jan 2020 15:02:07 -0600 Subject: [PATCH 018/178] Moreutils: add new package (#14653) * moreutils: add new package * fix flake8 --- .../builtin/packages/moreutils/package.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 var/spack/repos/builtin/packages/moreutils/package.py diff --git a/var/spack/repos/builtin/packages/moreutils/package.py b/var/spack/repos/builtin/packages/moreutils/package.py new file mode 100644 index 00000000000..96e081c92ad --- /dev/null +++ b/var/spack/repos/builtin/packages/moreutils/package.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class Moreutils(MakefilePackage): + """Additional Unix utilities. This is a growing collection of the Unix + tools that nobody thought to write long ago, when Unix was young.""" + + homepage = "https://joeyh.name/code/moreutils" + url = "https://deb.debian.org/debian/pool/main/m/moreutils/moreutils_0.63.orig.tar.xz" + + maintainers = ['matthiasdiener'] + + version('0.63', sha256='01f0b331e07e62c70d58c2dabbb68f5c4ddae4ee6f2d8f070fd1e316108af72c') + + depends_on('perl', type='build') + depends_on('docbook-xsl', type='build') + depends_on('libxml2', type='build') + + def edit(self, spec, prefix): + isutf8_makefile = FileFilter('is_utf8/Makefile') + isutf8_makefile.filter('CC = .*', '') + + env['DOCBOOKXSL'] = spec['docbook-xsl'].prefix + env['PREFIX'] = self.prefix From 0605fc155772d59c026a084e5dace6b7de360f0e Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Tue, 28 Jan 2020 15:03:13 -0600 Subject: [PATCH 019/178] Build graphite2 with Intel compiler (#14636) This PR sets the definition of REGPARM when building with the Intel compiler. --- var/spack/repos/builtin/packages/graphite2/package.py | 2 ++ .../repos/builtin/packages/graphite2/regparm.patch | 11 +++++++++++ 2 files changed, 13 insertions(+) create mode 100644 var/spack/repos/builtin/packages/graphite2/regparm.patch diff --git a/var/spack/repos/builtin/packages/graphite2/package.py b/var/spack/repos/builtin/packages/graphite2/package.py index 173eb102421..544f3a53d35 100644 --- a/var/spack/repos/builtin/packages/graphite2/package.py +++ b/var/spack/repos/builtin/packages/graphite2/package.py @@ -16,3 +16,5 @@ class Graphite2(CMakePackage): url = "https://github.com/silnrsi/graphite/releases/download/1.3.13/graphite2-1.3.13.tgz" version('1.3.13', sha256='dd63e169b0d3cf954b397c122551ab9343e0696fb2045e1b326db0202d875f06') + + patch('regparm.patch') diff --git a/var/spack/repos/builtin/packages/graphite2/regparm.patch b/var/spack/repos/builtin/packages/graphite2/regparm.patch new file mode 100644 index 00000000000..0f6ca1ec482 --- /dev/null +++ b/var/spack/repos/builtin/packages/graphite2/regparm.patch @@ -0,0 +1,11 @@ +--- a/src/inc/Machine.h 2018-12-20 00:28:50.000000000 -0600 ++++ b/src/inc/Machine.h 2020-01-26 19:15:29.965965418 -0600 +@@ -46,7 +46,7 @@ + #endif + #else + #define HOT __attribute__((hot)) +-#if defined(__x86_64) ++#if defined(__x86_64) && !defined(__INTEL_COMPILER) + #define REGPARM(n) __attribute__((hot, regparm(n))) + #else + #define REGPARM(n) From 492b600d29b12edc0ad654414887484e51492cf4 Mon Sep 17 00:00:00 2001 From: t-karatsu <49965247+t-karatsu@users.noreply.github.com> Date: Wed, 29 Jan 2020 06:06:37 +0900 Subject: [PATCH 020/178] =?UTF-8?q?diffutils:=20Changed=20the=20handling?= =?UTF-8?q?=20of=20undeclared=20functions=20from=20warning=20=E2=80=A6=20(?= =?UTF-8?q?#14593)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * diffutils: Changed the handling of undeclared functions from warning to error. * diffutils: Change the handling of warnings or error * Delete '-Werror=implicit-function-declaration' * Add '-Qunused-arguments' --- var/spack/repos/builtin/packages/diffutils/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/diffutils/package.py b/var/spack/repos/builtin/packages/diffutils/package.py index b87c9174d9c..c8f2dca83f3 100644 --- a/var/spack/repos/builtin/packages/diffutils/package.py +++ b/var/spack/repos/builtin/packages/diffutils/package.py @@ -19,3 +19,8 @@ class Diffutils(AutotoolsPackage, GNUMirrorPackage): build_directory = 'spack-build' depends_on('libiconv') + + def setup_build_environment(self, env): + if self.spec.satisfies('%fj'): + env.append_flags('CFLAGS', + '-Qunused-arguments') From ea0a549db380c0e0dee322a9f980dae02b0e05e2 Mon Sep 17 00:00:00 2001 From: noguchi-k <55966120+noguchi-k@users.noreply.github.com> Date: Wed, 29 Jan 2020 06:08:35 +0900 Subject: [PATCH 021/178] pcma: set return value and change return type of function (#14579) --- .../packages/pcma/fix_return_type_err.patch | 42 +++++++++++++++++++ .../repos/builtin/packages/pcma/package.py | 3 ++ 2 files changed, 45 insertions(+) create mode 100644 var/spack/repos/builtin/packages/pcma/fix_return_type_err.patch diff --git a/var/spack/repos/builtin/packages/pcma/fix_return_type_err.patch b/var/spack/repos/builtin/packages/pcma/fix_return_type_err.patch new file mode 100644 index 00000000000..1b34a360138 --- /dev/null +++ b/var/spack/repos/builtin/packages/pcma/fix_return_type_err.patch @@ -0,0 +1,42 @@ +diff -ur spack-src.org/alcomp2.c spack-src/alcomp2.c +--- spack-src.org/alcomp2.c 2020-01-21 15:13:59.483665899 +0900 ++++ spack-src/alcomp2.c 2020-01-21 15:15:25.702732353 +0900 +@@ -155,7 +155,7 @@ + int *ScoreOverColumn (int colScore, int flag1, int flag2, int flag3, int flag4, int flag5, int flag6, int *column_Score); + + void *ReadRef (char *inputfile); +-int CompareAlnVsReferenceAln (int *apos1, int *apos2, int *aposref1, int *aposref2, int start_ref1, int start_ref2, int end_ref1, int end_ref2 /* , int coverage1, int coverage2, int accuracy1, int accuracy2*/ ); ++void CompareAlnVsReferenceAln (int *apos1, int *apos2, int *aposref1, int *aposref2, int start_ref1, int start_ref2, int end_ref1, int end_ref2 /* , int coverage1, int coverage2, int accuracy1, int accuracy2*/ ); + + double p_dayhoff[]={0, 0.0143859, 0.0384319, 0.0352742, 0.0195027, 0.086209, 0.048466, 0.0708956, 0.0866279, 0.0193078, + 0.0832518, 0.0457631, 0.0610127, 0.0695179, 0.0390894, 0.0367281, 0.0570451, 0.0580589, 0.0244313, 0.043972, 0.0620286}; +@@ -1738,7 +1738,7 @@ + int **ali; + readali(filename); + +-if (flag_errread==1) return; ++if (flag_errread==1) return NULL; + + ali=ali_char2int(aseq,start_num,start_seq); + return ali; +@@ -3247,7 +3247,7 @@ + reflen_nogp = pos; + } + +-int CompareAlnVsReferenceAln (int *apos1, int *apos2, int *aposref1, int *aposref2, int start_ref1, int start_ref2, int end_ref1, int end_ref2 /*, int coverage1, int coverage2, int accuracy1, int accuracy2 */) ++void CompareAlnVsReferenceAln (int *apos1, int *apos2, int *aposref1, int *aposref2, int start_ref1, int start_ref2, int end_ref1, int end_ref2 /*, int coverage1, int coverage2, int accuracy1, int accuracy2 */) + { + int i,j,k; + int start_ali1, start_ali2, end_ali1, end_ali2; +diff -ur spack-src.org/interface.c spack-src/interface.c +--- spack-src.org/interface.c 2020-01-21 15:13:59.483665899 +0900 ++++ spack-src/interface.c 2020-01-21 15:14:23.466183088 +0900 +@@ -202,7 +202,7 @@ + match[i] = 0; + } + +- if(args[0]==NULL) return; ++ if(args[0]==NULL) return 0; + + params[0]=(char *)ckalloc((strlen(args[0])+1)*sizeof(char)); + if (args[0][0]!=COMMANDSEP) diff --git a/var/spack/repos/builtin/packages/pcma/package.py b/var/spack/repos/builtin/packages/pcma/package.py index 30c4eb7c29e..efde640c07e 100644 --- a/var/spack/repos/builtin/packages/pcma/package.py +++ b/var/spack/repos/builtin/packages/pcma/package.py @@ -22,3 +22,6 @@ def edit(self, spec, prefix): def install(self, spec, prefix): mkdirp(prefix.bin) install('pcma', prefix.bin) + + # set return value and change return type of function [-Wreturn-type] + patch('fix_return_type_err.patch') From 2b0b340aabc77dd24c2c3158dec979a6352130f4 Mon Sep 17 00:00:00 2001 From: darmac Date: Wed, 29 Jan 2020 05:12:40 +0800 Subject: [PATCH 022/178] racon: support aarch64 and fix install error (#14529) * racon: support aarch64 and fix install error * add aarch64 patch for racon --- .../builtin/packages/racon/aarch64.patch | 2806 +++++++++++++++++ .../repos/builtin/packages/racon/package.py | 6 +- 2 files changed, 2808 insertions(+), 4 deletions(-) create mode 100644 var/spack/repos/builtin/packages/racon/aarch64.patch diff --git a/var/spack/repos/builtin/packages/racon/aarch64.patch b/var/spack/repos/builtin/packages/racon/aarch64.patch new file mode 100644 index 00000000000..332be1179d0 --- /dev/null +++ b/var/spack/repos/builtin/packages/racon/aarch64.patch @@ -0,0 +1,2806 @@ +diff --git a/vendor/spoa/include/arch/aarch64/sse2neon.h b/vendor/spoa/include/arch/aarch64/sse2neon.h +new file mode 100644 +index 0000000..1477ae1 +--- /dev/null ++++ b/vendor/spoa/include/arch/aarch64/sse2neon.h +@@ -0,0 +1,2783 @@ ++#ifndef SSE2NEON_H ++#define SSE2NEON_H ++ ++// This header file provides a simple API translation layer ++// between SSE intrinsics to their corresponding Arm/Aarch64 NEON versions ++// ++// This header file does not yet translate all of the SSE intrinsics. ++// ++// Contributors to this work are: ++// John W. Ratcliff ++// Brandon Rowlett ++// Ken Fast ++// Eric van Beurden ++// Alexander Potylitsin ++// Hasindu Gamaarachchi ++// Jim Huang ++// Mark Cheng ++// Malcolm James MacLeod ++ ++/* ++ * The MIT license: ++ * ++ * Permission is hereby granted, free of charge, to any person obtaining a copy ++ * of this software and associated documentation files (the "Software"), to deal ++ * in the Software without restriction, including without limitation the rights ++ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell ++ * copies of the Software, and to permit persons to whom the Software is ++ * furnished to do so, subject to the following conditions: ++ * ++ * The above copyright notice and this permission notice shall be included in ++ * all copies or substantial portions of the Software. ++ * ++ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR ++ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, ++ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE ++ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER ++ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, ++ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE ++ * SOFTWARE. ++ */ ++ ++#if defined(__GNUC__) || defined(__clang__) ++ ++#pragma push_macro("FORCE_INLINE") ++#pragma push_macro("ALIGN_STRUCT") ++#define FORCE_INLINE static inline __attribute__((always_inline)) ++#define ALIGN_STRUCT(x) __attribute__((aligned(x))) ++ ++#else ++ ++#error "Macro name collisions may happens with unknown compiler" ++#ifdef FORCE_INLINE ++#undef FORCE_INLINE ++#endif ++#define FORCE_INLINE static inline ++#ifndef ALIGN_STRUCT ++#define ALIGN_STRUCT(x) __declspec(align(x)) ++#endif ++ ++#endif ++ ++#include ++ ++#include "arm_neon.h" ++ ++/** ++ * MACRO for shuffle parameter for _mm_shuffle_ps(). ++ * Argument fp3 is a digit[0123] that represents the fp from argument "b" ++ * of mm_shuffle_ps that will be placed in fp3 of result. fp2 is the same ++ * for fp2 in result. fp1 is a digit[0123] that represents the fp from ++ * argument "a" of mm_shuffle_ps that will be places in fp1 of result. ++ * fp0 is the same for fp0 of result. ++ */ ++#define _MM_SHUFFLE(fp3, fp2, fp1, fp0) \ ++ (((fp3) << 6) | ((fp2) << 4) | ((fp1) << 2) | ((fp0))) ++ ++/* indicate immediate constant argument in a given range */ ++#define __constrange(a, b) const ++ ++typedef float32x2_t __m64; ++typedef float32x4_t __m128; ++typedef int32x4_t __m128i; ++ ++// ****************************************** ++// type-safe casting between types ++// ****************************************** ++ ++#define vreinterpretq_m128_f16(x) vreinterpretq_f32_f16(x) ++#define vreinterpretq_m128_f32(x) (x) ++#define vreinterpretq_m128_f64(x) vreinterpretq_f32_f64(x) ++ ++#define vreinterpretq_m128_u8(x) vreinterpretq_f32_u8(x) ++#define vreinterpretq_m128_u16(x) vreinterpretq_f32_u16(x) ++#define vreinterpretq_m128_u32(x) vreinterpretq_f32_u32(x) ++#define vreinterpretq_m128_u64(x) vreinterpretq_f32_u64(x) ++ ++#define vreinterpretq_m128_s8(x) vreinterpretq_f32_s8(x) ++#define vreinterpretq_m128_s16(x) vreinterpretq_f32_s16(x) ++#define vreinterpretq_m128_s32(x) vreinterpretq_f32_s32(x) ++#define vreinterpretq_m128_s64(x) vreinterpretq_f32_s64(x) ++ ++#define vreinterpretq_f16_m128(x) vreinterpretq_f16_f32(x) ++#define vreinterpretq_f32_m128(x) (x) ++#define vreinterpretq_f64_m128(x) vreinterpretq_f64_f32(x) ++ ++#define vreinterpretq_u8_m128(x) vreinterpretq_u8_f32(x) ++#define vreinterpretq_u16_m128(x) vreinterpretq_u16_f32(x) ++#define vreinterpretq_u32_m128(x) vreinterpretq_u32_f32(x) ++#define vreinterpretq_u64_m128(x) vreinterpretq_u64_f32(x) ++ ++#define vreinterpretq_s8_m128(x) vreinterpretq_s8_f32(x) ++#define vreinterpretq_s16_m128(x) vreinterpretq_s16_f32(x) ++#define vreinterpretq_s32_m128(x) vreinterpretq_s32_f32(x) ++#define vreinterpretq_s64_m128(x) vreinterpretq_s64_f32(x) ++ ++#define vreinterpretq_m128i_s8(x) vreinterpretq_s32_s8(x) ++#define vreinterpretq_m128i_s16(x) vreinterpretq_s32_s16(x) ++#define vreinterpretq_m128i_s32(x) (x) ++#define vreinterpretq_m128i_s64(x) vreinterpretq_s32_s64(x) ++ ++#define vreinterpretq_m128i_u8(x) vreinterpretq_s32_u8(x) ++#define vreinterpretq_m128i_u16(x) vreinterpretq_s32_u16(x) ++#define vreinterpretq_m128i_u32(x) vreinterpretq_s32_u32(x) ++#define vreinterpretq_m128i_u64(x) vreinterpretq_s32_u64(x) ++ ++#define vreinterpretq_s8_m128i(x) vreinterpretq_s8_s32(x) ++#define vreinterpretq_s16_m128i(x) vreinterpretq_s16_s32(x) ++#define vreinterpretq_s32_m128i(x) (x) ++#define vreinterpretq_s64_m128i(x) vreinterpretq_s64_s32(x) ++ ++#define vreinterpretq_u8_m128i(x) vreinterpretq_u8_s32(x) ++#define vreinterpretq_u16_m128i(x) vreinterpretq_u16_s32(x) ++#define vreinterpretq_u32_m128i(x) vreinterpretq_u32_s32(x) ++#define vreinterpretq_u64_m128i(x) vreinterpretq_u64_s32(x) ++ ++// A struct is defined in this header file called 'SIMDVec' which can be used ++// by applications which attempt to access the contents of an _m128 struct ++// directly. It is important to note that accessing the __m128 struct directly ++// is bad coding practice by Microsoft: @see: ++// https://msdn.microsoft.com/en-us/library/ayeb3ayc.aspx ++// ++// However, some legacy source code may try to access the contents of an __m128 ++// struct directly so the developer can use the SIMDVec as an alias for it. Any ++// casting must be done manually by the developer, as you cannot cast or ++// otherwise alias the base NEON data type for intrinsic operations. ++// ++// union intended to allow direct access to an __m128 variable using the names ++// that the MSVC compiler provides. This union should really only be used when ++// trying to access the members of the vector as integer values. GCC/clang ++// allow native access to the float members through a simple array access ++// operator (in C since 4.6, in C++ since 4.8). ++// ++// Ideally direct accesses to SIMD vectors should not be used since it can cause ++// a performance hit. If it really is needed however, the original __m128 ++// variable can be aliased with a pointer to this union and used to access ++// individual components. The use of this union should be hidden behind a macro ++// that is used throughout the codebase to access the members instead of always ++// declaring this type of variable. ++typedef union ALIGN_STRUCT(16) SIMDVec { ++ float ++ m128_f32[4]; // as floats - do not to use this. Added for convenience. ++ int8_t m128_i8[16]; // as signed 8-bit integers. ++ int16_t m128_i16[8]; // as signed 16-bit integers. ++ int32_t m128_i32[4]; // as signed 32-bit integers. ++ int64_t m128_i64[2]; // as signed 64-bit integers. ++ uint8_t m128_u8[16]; // as unsigned 8-bit integers. ++ uint16_t m128_u16[8]; // as unsigned 16-bit integers. ++ uint32_t m128_u32[4]; // as unsigned 32-bit integers. ++ uint64_t m128_u64[2]; // as unsigned 64-bit integers. ++} SIMDVec; ++ ++// casting using SIMDVec ++#define vreinterpretq_nth_u64_m128i(x, n) (((SIMDVec *) &x)->m128_u64[n]) ++#define vreinterpretq_nth_u32_m128i(x, n) (((SIMDVec *) &x)->m128_u32[n]) ++ ++ ++// ****************************************** ++// Backwards compatibility for compilers with lack of specific type support ++// ****************************************** ++ ++// Older gcc does not define vld1q_u8_x4 type ++#if defined(__GNUC__) && !defined(__clang__) ++#if __GNUC__ < 9 || (__GNUC__ == 9 && (__GNUC_MINOR__ <= 2)) ++FORCE_INLINE uint8x16x4_t vld1q_u8_x4(const uint8_t *p) ++{ ++ uint8x16x4_t ret; ++ ret.val[0] = vld1q_u8(p + 0); ++ ret.val[1] = vld1q_u8(p + 16); ++ ret.val[2] = vld1q_u8(p + 32); ++ ret.val[3] = vld1q_u8(p + 48); ++ return ret; ++} ++#endif ++#endif ++ ++ ++// ****************************************** ++// Set/get methods ++// ****************************************** ++ ++// Loads one cache line of data from address p to a location closer to the ++// processor. https://msdn.microsoft.com/en-us/library/84szxsww(v=vs.100).aspx ++FORCE_INLINE void _mm_prefetch(const void *p, int i) ++{ ++ __builtin_prefetch(p); ++} ++ ++// extracts the lower order floating point value from the parameter : ++// https://msdn.microsoft.com/en-us/library/bb514059%28v=vs.120%29.aspx?f=255&MSPPError=-2147217396 ++FORCE_INLINE float _mm_cvtss_f32(__m128 a) ++{ ++ return vgetq_lane_f32(vreinterpretq_f32_m128(a), 0); ++} ++ ++// Sets the 128-bit value to zero ++// https://msdn.microsoft.com/en-us/library/vstudio/ys7dw0kh(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_setzero_si128(void) ++{ ++ return vreinterpretq_m128i_s32(vdupq_n_s32(0)); ++} ++ ++// Clears the four single-precision, floating-point values. ++// https://msdn.microsoft.com/en-us/library/vstudio/tk1t2tbz(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_setzero_ps(void) ++{ ++ return vreinterpretq_m128_f32(vdupq_n_f32(0)); ++} ++ ++// Sets the four single-precision, floating-point values to w. ++// ++// r0 := r1 := r2 := r3 := w ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/2x1se8ha(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_set1_ps(float _w) ++{ ++ return vreinterpretq_m128_f32(vdupq_n_f32(_w)); ++} ++ ++// Sets the four single-precision, floating-point values to w. ++// https://msdn.microsoft.com/en-us/library/vstudio/2x1se8ha(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_set_ps1(float _w) ++{ ++ return vreinterpretq_m128_f32(vdupq_n_f32(_w)); ++} ++ ++// Sets the four single-precision, floating-point values to the four inputs. ++// https://msdn.microsoft.com/en-us/library/vstudio/afh0zf75(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_set_ps(float w, float z, float y, float x) ++{ ++ float __attribute__((aligned(16))) data[4] = {x, y, z, w}; ++ return vreinterpretq_m128_f32(vld1q_f32(data)); ++} ++ ++// Sets the four single-precision, floating-point values to the four inputs in ++// reverse order. ++// https://msdn.microsoft.com/en-us/library/vstudio/d2172ct3(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_setr_ps(float w, float z, float y, float x) ++{ ++ float __attribute__((aligned(16))) data[4] = {w, z, y, x}; ++ return vreinterpretq_m128_f32(vld1q_f32(data)); ++} ++ ++// Sets the 8 signed 16-bit integer values in reverse order. ++// ++// Return Value ++// r0 := w0 ++// r1 := w1 ++// ... ++// r7 := w7 ++FORCE_INLINE __m128i _mm_setr_epi16(short w0, ++ short w1, ++ short w2, ++ short w3, ++ short w4, ++ short w5, ++ short w6, ++ short w7) ++{ ++ int16_t __attribute__((aligned(16))) ++ data[8] = {w0, w1, w2, w3, w4, w5, w6, w7}; ++ return vreinterpretq_m128i_s16(vld1q_s16((int16_t *) data)); ++} ++ ++// Sets the 4 signed 32-bit integer values in reverse order ++// https://technet.microsoft.com/en-us/library/security/27yb3ee5(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_setr_epi32(int i3, int i2, int i1, int i0) ++{ ++ int32_t __attribute__((aligned(16))) data[4] = {i3, i2, i1, i0}; ++ return vreinterpretq_m128i_s32(vld1q_s32(data)); ++} ++ ++// Sets the 16 signed 8-bit integer values to b. ++// ++// r0 := b ++// r1 := b ++// ... ++// r15 := b ++// ++// https://msdn.microsoft.com/en-us/library/6e14xhyf(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_set1_epi8(char w) ++{ ++ return vreinterpretq_m128i_s8(vdupq_n_s8(w)); ++} ++ ++// Sets the 8 signed 16-bit integer values to w. ++// ++// r0 := w ++// r1 := w ++// ... ++// r7 := w ++// ++// https://msdn.microsoft.com/en-us/library/k0ya3x0e(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_set1_epi16(short w) ++{ ++ return vreinterpretq_m128i_s16(vdupq_n_s16(w)); ++} ++ ++// Sets the 16 signed 8-bit integer values. ++// https://msdn.microsoft.com/en-us/library/x0cx8zd3(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_set_epi8(char b15, ++ char b14, ++ char b13, ++ char b12, ++ char b11, ++ char b10, ++ char b9, ++ char b8, ++ char b7, ++ char b6, ++ char b5, ++ char b4, ++ char b3, ++ char b2, ++ char b1, ++ char b0) ++{ ++ int8_t __attribute__((aligned(16))) ++ data[16] = {(int8_t) b0, (int8_t) b1, (int8_t) b2, (int8_t) b3, ++ (int8_t) b4, (int8_t) b5, (int8_t) b6, (int8_t) b7, ++ (int8_t) b8, (int8_t) b9, (int8_t) b10, (int8_t) b11, ++ (int8_t) b12, (int8_t) b13, (int8_t) b14, (int8_t) b15}; ++ return (__m128i) vld1q_s8(data); ++} ++ ++// Sets the 8 signed 16-bit integer values. ++// https://msdn.microsoft.com/en-au/library/3e0fek84(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_set_epi16(short i7, ++ short i6, ++ short i5, ++ short i4, ++ short i3, ++ short i2, ++ short i1, ++ short i0) ++{ ++ int16_t __attribute__((aligned(16))) ++ data[8] = {i0, i1, i2, i3, i4, i5, i6, i7}; ++ return vreinterpretq_m128i_s16(vld1q_s16(data)); ++} ++ ++// Sets the 16 signed 8-bit integer values in reverse order. ++// https://msdn.microsoft.com/en-us/library/2khb9c7k(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_setr_epi8(char b0, ++ char b1, ++ char b2, ++ char b3, ++ char b4, ++ char b5, ++ char b6, ++ char b7, ++ char b8, ++ char b9, ++ char b10, ++ char b11, ++ char b12, ++ char b13, ++ char b14, ++ char b15) ++{ ++ int8_t __attribute__((aligned(16))) ++ data[16] = {(int8_t) b0, (int8_t) b1, (int8_t) b2, (int8_t) b3, ++ (int8_t) b4, (int8_t) b5, (int8_t) b6, (int8_t) b7, ++ (int8_t) b8, (int8_t) b9, (int8_t) b10, (int8_t) b11, ++ (int8_t) b12, (int8_t) b13, (int8_t) b14, (int8_t) b15}; ++ return (__m128i) vld1q_s8(data); ++} ++ ++// Sets the 4 signed 32-bit integer values to i. ++// ++// r0 := i ++// r1 := i ++// r2 := i ++// r3 := I ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/h4xscxat(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_set1_epi32(int _i) ++{ ++ return vreinterpretq_m128i_s32(vdupq_n_s32(_i)); ++} ++ ++// Sets the 4 signed 64-bit integer values to i. ++// https://msdn.microsoft.com/en-us/library/vstudio/h4xscxat(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_set1_epi64(int64_t _i) ++{ ++ return vreinterpretq_m128i_s64(vdupq_n_s64(_i)); ++} ++ ++// Sets the 4 signed 32-bit integer values. ++// https://msdn.microsoft.com/en-us/library/vstudio/019beekt(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_set_epi32(int i3, int i2, int i1, int i0) ++{ ++ int32_t __attribute__((aligned(16))) data[4] = {i0, i1, i2, i3}; ++ return vreinterpretq_m128i_s32(vld1q_s32(data)); ++} ++ ++// Returns the __m128i structure with its two 64-bit integer values ++// initialized to the values of the two 64-bit integers passed in. ++// https://msdn.microsoft.com/en-us/library/dk2sdw0h(v=vs.120).aspx ++FORCE_INLINE __m128i _mm_set_epi64x(int64_t i1, int64_t i2) ++{ ++ int64_t __attribute__((aligned(16))) data[2] = {i2, i1}; ++ return vreinterpretq_m128i_s64(vld1q_s64(data)); ++} ++ ++// Stores four single-precision, floating-point values. ++// https://msdn.microsoft.com/en-us/library/vstudio/s3h4ay6y(v=vs.100).aspx ++FORCE_INLINE void _mm_store_ps(float *p, __m128 a) ++{ ++ vst1q_f32(p, vreinterpretq_f32_m128(a)); ++} ++ ++// Stores four single-precision, floating-point values. ++// https://msdn.microsoft.com/en-us/library/44e30x22(v=vs.100).aspx ++FORCE_INLINE void _mm_storeu_ps(float *p, __m128 a) ++{ ++ vst1q_f32(p, vreinterpretq_f32_m128(a)); ++} ++ ++// Stores four 32-bit integer values as (as a __m128i value) at the address p. ++// https://msdn.microsoft.com/en-us/library/vstudio/edk11s13(v=vs.100).aspx ++FORCE_INLINE void _mm_store_si128(__m128i *p, __m128i a) ++{ ++ vst1q_s32((int32_t *) p, vreinterpretq_s32_m128i(a)); ++} ++ ++// Stores four 32-bit integer values as (as a __m128i value) at the address p. ++// https://msdn.microsoft.com/en-us/library/vstudio/edk11s13(v=vs.100).aspx ++FORCE_INLINE void _mm_storeu_si128(__m128i *p, __m128i a) ++{ ++ vst1q_s32((int32_t *) p, vreinterpretq_s32_m128i(a)); ++} ++ ++// Stores the lower single - precision, floating - point value. ++// https://msdn.microsoft.com/en-us/library/tzz10fbx(v=vs.100).aspx ++FORCE_INLINE void _mm_store_ss(float *p, __m128 a) ++{ ++ vst1q_lane_f32(p, vreinterpretq_f32_m128(a), 0); ++} ++ ++// Reads the lower 64 bits of b and stores them into the lower 64 bits of a. ++// https://msdn.microsoft.com/en-us/library/hhwf428f%28v=vs.90%29.aspx ++FORCE_INLINE void _mm_storel_epi64(__m128i *a, __m128i b) ++{ ++ uint64x1_t hi = vget_high_u64(vreinterpretq_u64_m128i(*a)); ++ uint64x1_t lo = vget_low_u64(vreinterpretq_u64_m128i(b)); ++ *a = vreinterpretq_m128i_u64(vcombine_u64(lo, hi)); ++} ++ ++// Stores the lower two single-precision floating point values of a to the ++// address p. ++// ++// *p0 := b0 ++// *p1 := b1 ++// ++// https://msdn.microsoft.com/en-us/library/h54t98ks(v=vs.90).aspx ++FORCE_INLINE void _mm_storel_pi(__m64 *p, __m128 a) ++{ ++ *p = vget_low_f32(a); ++} ++ ++// Loads a single single-precision, floating-point value, copying it into all ++// four words ++// https://msdn.microsoft.com/en-us/library/vstudio/5cdkf716(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_load1_ps(const float *p) ++{ ++ return vreinterpretq_m128_f32(vld1q_dup_f32(p)); ++} ++#define _mm_load_ps1 _mm_load1_ps ++ ++// Sets the lower two single-precision, floating-point values with 64 ++// bits of data loaded from the address p; the upper two values are passed ++// through from a. ++// ++// Return Value ++// r0 := *p0 ++// r1 := *p1 ++// r2 := a2 ++// r3 := a3 ++// ++// https://msdn.microsoft.com/en-us/library/s57cyak2(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_loadl_pi(__m128 a, __m64 const *b) ++{ ++ return vreinterpretq_m128_f32( ++ vcombine_f32(vld1_f32((const float32_t *) b), vget_high_f32(a))); ++} ++ ++// Loads four single-precision, floating-point values. ++// https://msdn.microsoft.com/en-us/library/vstudio/zzd50xxt(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_load_ps(const float *p) ++{ ++ return vreinterpretq_m128_f32(vld1q_f32(p)); ++} ++ ++// Loads four single-precision, floating-point values. ++// https://msdn.microsoft.com/en-us/library/x1b16s7z%28v=vs.90%29.aspx ++FORCE_INLINE __m128 _mm_loadu_ps(const float *p) ++{ ++ // for neon, alignment doesn't matter, so _mm_load_ps and _mm_loadu_ps are ++ // equivalent for neon ++ return vreinterpretq_m128_f32(vld1q_f32(p)); ++} ++ ++// Loads an single - precision, floating - point value into the low word and ++// clears the upper three words. ++// https://msdn.microsoft.com/en-us/library/548bb9h4%28v=vs.90%29.aspx ++FORCE_INLINE __m128 _mm_load_ss(const float *p) ++{ ++ return vreinterpretq_m128_f32(vsetq_lane_f32(*p, vdupq_n_f32(0), 0)); ++} ++ ++FORCE_INLINE __m128i _mm_loadl_epi64(__m128i const *p) ++{ ++ /* Load the lower 64 bits of the value pointed to by p into the ++ * lower 64 bits of the result, zeroing the upper 64 bits of the result. ++ */ ++ return vcombine_s32(vld1_s32((int32_t const *) p), vcreate_s32(0)); ++} ++ ++// ****************************************** ++// Logic/Binary operations ++// ****************************************** ++ ++// Compares for inequality. ++// https://msdn.microsoft.com/en-us/library/sf44thbx(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_cmpneq_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_u32(vmvnq_u32( ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)))); ++} ++ ++// Computes the bitwise AND-NOT of the four single-precision, floating-point ++// values of a and b. ++// ++// r0 := ~a0 & b0 ++// r1 := ~a1 & b1 ++// r2 := ~a2 & b2 ++// r3 := ~a3 & b3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/68h7wd02(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_andnot_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_s32( ++ vbicq_s32(vreinterpretq_s32_m128(b), ++ vreinterpretq_s32_m128(a))); // *NOTE* argument swap ++} ++ ++// Computes the bitwise AND of the 128-bit value in b and the bitwise NOT of the ++// 128-bit value in a. ++// ++// r := (~a) & b ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/1beaceh8(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_andnot_si128(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s32( ++ vbicq_s32(vreinterpretq_s32_m128i(b), ++ vreinterpretq_s32_m128i(a))); // *NOTE* argument swap ++} ++ ++// Computes the bitwise AND of the 128-bit value in a and the 128-bit value in ++// b. ++// ++// r := a & b ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/6d1txsa8(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_and_si128(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s32( ++ vandq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Computes the bitwise AND of the four single-precision, floating-point values ++// of a and b. ++// ++// r0 := a0 & b0 ++// r1 := a1 & b1 ++// r2 := a2 & b2 ++// r3 := a3 & b3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/73ck1xc5(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_and_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_s32( ++ vandq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b))); ++} ++ ++// Computes the bitwise OR of the four single-precision, floating-point values ++// of a and b. ++// https://msdn.microsoft.com/en-us/library/vstudio/7ctdsyy0(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_or_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_s32( ++ vorrq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b))); ++} ++ ++// Computes bitwise EXOR (exclusive-or) of the four single-precision, ++// floating-point values of a and b. ++// https://msdn.microsoft.com/en-us/library/ss6k3wk8(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_xor_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_s32( ++ veorq_s32(vreinterpretq_s32_m128(a), vreinterpretq_s32_m128(b))); ++} ++ ++// Computes the bitwise OR of the 128-bit value in a and the 128-bit value in b. ++// ++// r := a | b ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/ew8ty0db(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_or_si128(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s32( ++ vorrq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Computes the bitwise XOR of the 128-bit value in a and the 128-bit value in ++// b. https://msdn.microsoft.com/en-us/library/fzt08www(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_xor_si128(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s32( ++ veorq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Moves the upper two values of B into the lower two values of A. ++// ++// r3 := a3 ++// r2 := a2 ++// r1 := b3 ++// r0 := b2 ++FORCE_INLINE __m128 _mm_movehl_ps(__m128 __A, __m128 __B) ++{ ++ float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(__A)); ++ float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(__B)); ++ return vreinterpretq_m128_f32(vcombine_f32(b32, a32)); ++} ++ ++// Moves the lower two values of B into the upper two values of A. ++// ++// r3 := b1 ++// r2 := b0 ++// r1 := a1 ++// r0 := a0 ++FORCE_INLINE __m128 _mm_movelh_ps(__m128 __A, __m128 __B) ++{ ++ float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(__A)); ++ float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(__B)); ++ return vreinterpretq_m128_f32(vcombine_f32(a10, b10)); ++} ++ ++// NEON does not provide this method ++// Creates a 4-bit mask from the most significant bits of the four ++// single-precision, floating-point values. ++// https://msdn.microsoft.com/en-us/library/vstudio/4490ys29(v=vs.100).aspx ++FORCE_INLINE int _mm_movemask_ps(__m128 a) ++{ ++#if 0 /* C version */ ++ uint32x4_t &ia = *(uint32x4_t *) &a; ++ return (ia[0] >> 31) | ((ia[1] >> 30) & 2) | ((ia[2] >> 29) & 4) | ++ ((ia[3] >> 28) & 8); ++#endif ++ static const uint32x4_t movemask = {1, 2, 4, 8}; ++ static const uint32x4_t highbit = {0x80000000, 0x80000000, 0x80000000, ++ 0x80000000}; ++ uint32x4_t t0 = vreinterpretq_u32_m128(a); ++ uint32x4_t t1 = vtstq_u32(t0, highbit); ++ uint32x4_t t2 = vandq_u32(t1, movemask); ++ uint32x2_t t3 = vorr_u32(vget_low_u32(t2), vget_high_u32(t2)); ++ return vget_lane_u32(t3, 0) | vget_lane_u32(t3, 1); ++} ++ ++FORCE_INLINE __m128i _mm_abs_epi32(__m128i a) ++{ ++ return vqabsq_s32(a); ++} ++ ++FORCE_INLINE __m128i _mm_abs_epi16(__m128i a) ++{ ++ return vreinterpretq_s32_s16(vqabsq_s16(vreinterpretq_s16_s32(a))); ++} ++ ++// Takes the upper 64 bits of a and places it in the low end of the result ++// Takes the lower 64 bits of b and places it into the high end of the result. ++FORCE_INLINE __m128 _mm_shuffle_ps_1032(__m128 a, __m128 b) ++{ ++ float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a)); ++ float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b)); ++ return vreinterpretq_m128_f32(vcombine_f32(a32, b10)); ++} ++ ++// takes the lower two 32-bit values from a and swaps them and places in high ++// end of result takes the higher two 32 bit values from b and swaps them and ++// places in low end of result. ++FORCE_INLINE __m128 _mm_shuffle_ps_2301(__m128 a, __m128 b) ++{ ++ float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a))); ++ float32x2_t b23 = vrev64_f32(vget_high_f32(vreinterpretq_f32_m128(b))); ++ return vreinterpretq_m128_f32(vcombine_f32(a01, b23)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_0321(__m128 a, __m128 b) ++{ ++ float32x2_t a21 = vget_high_f32( ++ vextq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 3)); ++ float32x2_t b03 = vget_low_f32( ++ vextq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b), 3)); ++ return vreinterpretq_m128_f32(vcombine_f32(a21, b03)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_2103(__m128 a, __m128 b) ++{ ++ float32x2_t a03 = vget_low_f32( ++ vextq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a), 3)); ++ float32x2_t b21 = vget_high_f32( ++ vextq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b), 3)); ++ return vreinterpretq_m128_f32(vcombine_f32(a03, b21)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_1010(__m128 a, __m128 b) ++{ ++ float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a)); ++ float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b)); ++ return vreinterpretq_m128_f32(vcombine_f32(a10, b10)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_1001(__m128 a, __m128 b) ++{ ++ float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a))); ++ float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b)); ++ return vreinterpretq_m128_f32(vcombine_f32(a01, b10)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_0101(__m128 a, __m128 b) ++{ ++ float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a))); ++ float32x2_t b01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(b))); ++ return vreinterpretq_m128_f32(vcombine_f32(a01, b01)); ++} ++ ++// keeps the low 64 bits of b in the low and puts the high 64 bits of a in the ++// high ++FORCE_INLINE __m128 _mm_shuffle_ps_3210(__m128 a, __m128 b) ++{ ++ float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a)); ++ float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b)); ++ return vreinterpretq_m128_f32(vcombine_f32(a10, b32)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_0011(__m128 a, __m128 b) ++{ ++ float32x2_t a11 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(a)), 1); ++ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); ++ return vreinterpretq_m128_f32(vcombine_f32(a11, b00)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_0022(__m128 a, __m128 b) ++{ ++ float32x2_t a22 = ++ vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 0); ++ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); ++ return vreinterpretq_m128_f32(vcombine_f32(a22, b00)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_2200(__m128 a, __m128 b) ++{ ++ float32x2_t a00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(a)), 0); ++ float32x2_t b22 = ++ vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(b)), 0); ++ return vreinterpretq_m128_f32(vcombine_f32(a00, b22)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_3202(__m128 a, __m128 b) ++{ ++ float32_t a0 = vgetq_lane_f32(vreinterpretq_f32_m128(a), 0); ++ float32x2_t a22 = ++ vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 0); ++ float32x2_t a02 = vset_lane_f32(a0, a22, 1); /* TODO: use vzip ?*/ ++ float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b)); ++ return vreinterpretq_m128_f32(vcombine_f32(a02, b32)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_1133(__m128 a, __m128 b) ++{ ++ float32x2_t a33 = ++ vdup_lane_f32(vget_high_f32(vreinterpretq_f32_m128(a)), 1); ++ float32x2_t b11 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 1); ++ return vreinterpretq_m128_f32(vcombine_f32(a33, b11)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_2010(__m128 a, __m128 b) ++{ ++ float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a)); ++ float32_t b2 = vgetq_lane_f32(vreinterpretq_f32_m128(b), 2); ++ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); ++ float32x2_t b20 = vset_lane_f32(b2, b00, 1); ++ return vreinterpretq_m128_f32(vcombine_f32(a10, b20)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_2001(__m128 a, __m128 b) ++{ ++ float32x2_t a01 = vrev64_f32(vget_low_f32(vreinterpretq_f32_m128(a))); ++ float32_t b2 = vgetq_lane_f32(b, 2); ++ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); ++ float32x2_t b20 = vset_lane_f32(b2, b00, 1); ++ return vreinterpretq_m128_f32(vcombine_f32(a01, b20)); ++} ++ ++FORCE_INLINE __m128 _mm_shuffle_ps_2032(__m128 a, __m128 b) ++{ ++ float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a)); ++ float32_t b2 = vgetq_lane_f32(b, 2); ++ float32x2_t b00 = vdup_lane_f32(vget_low_f32(vreinterpretq_f32_m128(b)), 0); ++ float32x2_t b20 = vset_lane_f32(b2, b00, 1); ++ return vreinterpretq_m128_f32(vcombine_f32(a32, b20)); ++} ++ ++// NEON does not support a general purpose permute intrinsic ++// Selects four specific single-precision, floating-point values from a and b, ++// based on the mask i. ++// https://msdn.microsoft.com/en-us/library/vstudio/5f0858x0(v=vs.100).aspx ++#if 0 /* C version */ ++FORCE_INLINE __m128 _mm_shuffle_ps_default(__m128 a, ++ __m128 b, ++ __constrange(0, 255) int imm) ++{ ++ __m128 ret; ++ ret[0] = a[imm & 0x3]; ++ ret[1] = a[(imm >> 2) & 0x3]; ++ ret[2] = b[(imm >> 4) & 0x03]; ++ ret[3] = b[(imm >> 6) & 0x03]; ++ return ret; ++} ++#endif ++#define _mm_shuffle_ps_default(a, b, imm) \ ++ ({ \ ++ float32x4_t ret; \ ++ ret = vmovq_n_f32( \ ++ vgetq_lane_f32(vreinterpretq_f32_m128(a), (imm) &0x3)); \ ++ ret = vsetq_lane_f32( \ ++ vgetq_lane_f32(vreinterpretq_f32_m128(a), ((imm) >> 2) & 0x3), \ ++ ret, 1); \ ++ ret = vsetq_lane_f32( \ ++ vgetq_lane_f32(vreinterpretq_f32_m128(b), ((imm) >> 4) & 0x3), \ ++ ret, 2); \ ++ ret = vsetq_lane_f32( \ ++ vgetq_lane_f32(vreinterpretq_f32_m128(b), ((imm) >> 6) & 0x3), \ ++ ret, 3); \ ++ vreinterpretq_m128_f32(ret); \ ++ }) ++ ++// FORCE_INLINE __m128 _mm_shuffle_ps(__m128 a, __m128 b, __constrange(0,255) ++// int imm) ++#define _mm_shuffle_ps(a, b, imm) \ ++ ({ \ ++ __m128 ret; \ ++ switch (imm) { \ ++ case _MM_SHUFFLE(1, 0, 3, 2): \ ++ ret = _mm_shuffle_ps_1032((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(2, 3, 0, 1): \ ++ ret = _mm_shuffle_ps_2301((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(0, 3, 2, 1): \ ++ ret = _mm_shuffle_ps_0321((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(2, 1, 0, 3): \ ++ ret = _mm_shuffle_ps_2103((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(1, 0, 1, 0): \ ++ ret = _mm_movelh_ps((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(1, 0, 0, 1): \ ++ ret = _mm_shuffle_ps_1001((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(0, 1, 0, 1): \ ++ ret = _mm_shuffle_ps_0101((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(3, 2, 1, 0): \ ++ ret = _mm_shuffle_ps_3210((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(0, 0, 1, 1): \ ++ ret = _mm_shuffle_ps_0011((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(0, 0, 2, 2): \ ++ ret = _mm_shuffle_ps_0022((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(2, 2, 0, 0): \ ++ ret = _mm_shuffle_ps_2200((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(3, 2, 0, 2): \ ++ ret = _mm_shuffle_ps_3202((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(3, 2, 3, 2): \ ++ ret = _mm_movehl_ps((b), (a)); \ ++ break; \ ++ case _MM_SHUFFLE(1, 1, 3, 3): \ ++ ret = _mm_shuffle_ps_1133((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(2, 0, 1, 0): \ ++ ret = _mm_shuffle_ps_2010((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(2, 0, 0, 1): \ ++ ret = _mm_shuffle_ps_2001((a), (b)); \ ++ break; \ ++ case _MM_SHUFFLE(2, 0, 3, 2): \ ++ ret = _mm_shuffle_ps_2032((a), (b)); \ ++ break; \ ++ default: \ ++ ret = _mm_shuffle_ps_default((a), (b), (imm)); \ ++ break; \ ++ } \ ++ ret; \ ++ }) ++ ++// Takes the upper 64 bits of a and places it in the low end of the result ++// Takes the lower 64 bits of a and places it into the high end of the result. ++FORCE_INLINE __m128i _mm_shuffle_epi_1032(__m128i a) ++{ ++ int32x2_t a32 = vget_high_s32(vreinterpretq_s32_m128i(a)); ++ int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a)); ++ return vreinterpretq_m128i_s32(vcombine_s32(a32, a10)); ++} ++ ++// takes the lower two 32-bit values from a and swaps them and places in low end ++// of result takes the higher two 32 bit values from a and swaps them and places ++// in high end of result. ++FORCE_INLINE __m128i _mm_shuffle_epi_2301(__m128i a) ++{ ++ int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a))); ++ int32x2_t a23 = vrev64_s32(vget_high_s32(vreinterpretq_s32_m128i(a))); ++ return vreinterpretq_m128i_s32(vcombine_s32(a01, a23)); ++} ++ ++// rotates the least significant 32 bits into the most signficant 32 bits, and ++// shifts the rest down ++FORCE_INLINE __m128i _mm_shuffle_epi_0321(__m128i a) ++{ ++ return vreinterpretq_m128i_s32( ++ vextq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(a), 1)); ++} ++ ++// rotates the most significant 32 bits into the least signficant 32 bits, and ++// shifts the rest up ++FORCE_INLINE __m128i _mm_shuffle_epi_2103(__m128i a) ++{ ++ return vreinterpretq_m128i_s32( ++ vextq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(a), 3)); ++} ++ ++// gets the lower 64 bits of a, and places it in the upper 64 bits ++// gets the lower 64 bits of a and places it in the lower 64 bits ++FORCE_INLINE __m128i _mm_shuffle_epi_1010(__m128i a) ++{ ++ int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a)); ++ return vreinterpretq_m128i_s32(vcombine_s32(a10, a10)); ++} ++ ++// gets the lower 64 bits of a, swaps the 0 and 1 elements, and places it in the ++// lower 64 bits gets the lower 64 bits of a, and places it in the upper 64 bits ++FORCE_INLINE __m128i _mm_shuffle_epi_1001(__m128i a) ++{ ++ int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a))); ++ int32x2_t a10 = vget_low_s32(vreinterpretq_s32_m128i(a)); ++ return vreinterpretq_m128i_s32(vcombine_s32(a01, a10)); ++} ++ ++// gets the lower 64 bits of a, swaps the 0 and 1 elements and places it in the ++// upper 64 bits gets the lower 64 bits of a, swaps the 0 and 1 elements, and ++// places it in the lower 64 bits ++FORCE_INLINE __m128i _mm_shuffle_epi_0101(__m128i a) ++{ ++ int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a))); ++ return vreinterpretq_m128i_s32(vcombine_s32(a01, a01)); ++} ++ ++FORCE_INLINE __m128i _mm_shuffle_epi_2211(__m128i a) ++{ ++ int32x2_t a11 = vdup_lane_s32(vget_low_s32(vreinterpretq_s32_m128i(a)), 1); ++ int32x2_t a22 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 0); ++ return vreinterpretq_m128i_s32(vcombine_s32(a11, a22)); ++} ++ ++FORCE_INLINE __m128i _mm_shuffle_epi_0122(__m128i a) ++{ ++ int32x2_t a22 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 0); ++ int32x2_t a01 = vrev64_s32(vget_low_s32(vreinterpretq_s32_m128i(a))); ++ return vreinterpretq_m128i_s32(vcombine_s32(a22, a01)); ++} ++ ++FORCE_INLINE __m128i _mm_shuffle_epi_3332(__m128i a) ++{ ++ int32x2_t a32 = vget_high_s32(vreinterpretq_s32_m128i(a)); ++ int32x2_t a33 = vdup_lane_s32(vget_high_s32(vreinterpretq_s32_m128i(a)), 1); ++ return vreinterpretq_m128i_s32(vcombine_s32(a32, a33)); ++} ++ ++// Shuffle packed 8-bit integers in a according to shuffle control mask in the ++// corresponding 8-bit element of b, and store the results in dst. ++// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_shuffle_epi8&expand=5146 ++FORCE_INLINE __m128i _mm_shuffle_epi8(__m128i a, __m128i b) ++{ ++#if __aarch64__ ++ int8x16_t tbl = vreinterpretq_s8_m128i(a); // input a ++ uint8x16_t idx = vreinterpretq_u8_m128i(b); // input b ++ uint8_t __attribute__((aligned(16))) ++ mask[16] = {0x8F, 0x8F, 0x8F, 0x8F, 0x8F, 0x8F, 0x8F, 0x8F, ++ 0x8F, 0x8F, 0x8F, 0x8F, 0x8F, 0x8F, 0x8F, 0x8F}; ++ uint8x16_t idx_masked = ++ vandq_u8(idx, vld1q_u8(mask)); // avoid using meaningless bits ++ ++ return vreinterpretq_m128i_s8(vqtbl1q_s8(tbl, idx_masked)); ++#else ++ uint8_t *tbl = (uint8_t *) &a; // input a ++ uint8_t *idx = (uint8_t *) &b; // input b ++ int32_t r[4]; ++ ++ r[0] = ((idx[3] & 0x80) ? 0 : tbl[idx[3] % 16]) << 24; ++ r[0] |= ((idx[2] & 0x80) ? 0 : tbl[idx[2] % 16]) << 16; ++ r[0] |= ((idx[1] & 0x80) ? 0 : tbl[idx[1] % 16]) << 8; ++ r[0] |= ((idx[0] & 0x80) ? 0 : tbl[idx[0] % 16]); ++ ++ r[1] = ((idx[7] & 0x80) ? 0 : tbl[idx[7] % 16]) << 24; ++ r[1] |= ((idx[6] & 0x80) ? 0 : tbl[idx[6] % 16]) << 16; ++ r[1] |= ((idx[5] & 0x80) ? 0 : tbl[idx[5] % 16]) << 8; ++ r[1] |= ((idx[4] & 0x80) ? 0 : tbl[idx[4] % 16]); ++ ++ r[2] = ((idx[11] & 0x80) ? 0 : tbl[idx[11] % 16]) << 24; ++ r[2] |= ((idx[10] & 0x80) ? 0 : tbl[idx[10] % 16]) << 16; ++ r[2] |= ((idx[9] & 0x80) ? 0 : tbl[idx[9] % 16]) << 8; ++ r[2] |= ((idx[8] & 0x80) ? 0 : tbl[idx[8] % 16]); ++ ++ r[3] = ((idx[15] & 0x80) ? 0 : tbl[idx[15] % 16]) << 24; ++ r[3] |= ((idx[14] & 0x80) ? 0 : tbl[idx[14] % 16]) << 16; ++ r[3] |= ((idx[13] & 0x80) ? 0 : tbl[idx[13] % 16]) << 8; ++ r[3] |= ((idx[12] & 0x80) ? 0 : tbl[idx[12] % 16]); ++ ++ return vld1q_s32(r); ++#endif ++} ++ ++ ++#if 0 /* C version */ ++FORCE_INLINE __m128i _mm_shuffle_epi32_default(__m128i a, ++ __constrange(0, 255) int imm) ++{ ++ __m128i ret; ++ ret[0] = a[imm & 0x3]; ++ ret[1] = a[(imm >> 2) & 0x3]; ++ ret[2] = a[(imm >> 4) & 0x03]; ++ ret[3] = a[(imm >> 6) & 0x03]; ++ return ret; ++} ++#endif ++#define _mm_shuffle_epi32_default(a, imm) \ ++ ({ \ ++ int32x4_t ret; \ ++ ret = vmovq_n_s32( \ ++ vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm) &0x3)); \ ++ ret = vsetq_lane_s32( \ ++ vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 2) & 0x3), \ ++ ret, 1); \ ++ ret = vsetq_lane_s32( \ ++ vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 4) & 0x3), \ ++ ret, 2); \ ++ ret = vsetq_lane_s32( \ ++ vgetq_lane_s32(vreinterpretq_s32_m128i(a), ((imm) >> 6) & 0x3), \ ++ ret, 3); \ ++ vreinterpretq_m128i_s32(ret); \ ++ }) ++ ++// FORCE_INLINE __m128i _mm_shuffle_epi32_splat(__m128i a, __constrange(0,255) ++// int imm) ++#if defined(__aarch64__) ++#define _mm_shuffle_epi32_splat(a, imm) \ ++ ({ \ ++ vreinterpretq_m128i_s32( \ ++ vdupq_laneq_s32(vreinterpretq_s32_m128i(a), (imm))); \ ++ }) ++#else ++#define _mm_shuffle_epi32_splat(a, imm) \ ++ ({ \ ++ vreinterpretq_m128i_s32( \ ++ vdupq_n_s32(vgetq_lane_s32(vreinterpretq_s32_m128i(a), (imm)))); \ ++ }) ++#endif ++ ++// Shuffles the 4 signed or unsigned 32-bit integers in a as specified by imm. ++// https://msdn.microsoft.com/en-us/library/56f67xbk%28v=vs.90%29.aspx ++// FORCE_INLINE __m128i _mm_shuffle_epi32(__m128i a, __constrange(0,255) int ++// imm) ++#define _mm_shuffle_epi32(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ switch (imm) { \ ++ case _MM_SHUFFLE(1, 0, 3, 2): \ ++ ret = _mm_shuffle_epi_1032((a)); \ ++ break; \ ++ case _MM_SHUFFLE(2, 3, 0, 1): \ ++ ret = _mm_shuffle_epi_2301((a)); \ ++ break; \ ++ case _MM_SHUFFLE(0, 3, 2, 1): \ ++ ret = _mm_shuffle_epi_0321((a)); \ ++ break; \ ++ case _MM_SHUFFLE(2, 1, 0, 3): \ ++ ret = _mm_shuffle_epi_2103((a)); \ ++ break; \ ++ case _MM_SHUFFLE(1, 0, 1, 0): \ ++ ret = _mm_shuffle_epi_1010((a)); \ ++ break; \ ++ case _MM_SHUFFLE(1, 0, 0, 1): \ ++ ret = _mm_shuffle_epi_1001((a)); \ ++ break; \ ++ case _MM_SHUFFLE(0, 1, 0, 1): \ ++ ret = _mm_shuffle_epi_0101((a)); \ ++ break; \ ++ case _MM_SHUFFLE(2, 2, 1, 1): \ ++ ret = _mm_shuffle_epi_2211((a)); \ ++ break; \ ++ case _MM_SHUFFLE(0, 1, 2, 2): \ ++ ret = _mm_shuffle_epi_0122((a)); \ ++ break; \ ++ case _MM_SHUFFLE(3, 3, 3, 2): \ ++ ret = _mm_shuffle_epi_3332((a)); \ ++ break; \ ++ case _MM_SHUFFLE(0, 0, 0, 0): \ ++ ret = _mm_shuffle_epi32_splat((a), 0); \ ++ break; \ ++ case _MM_SHUFFLE(1, 1, 1, 1): \ ++ ret = _mm_shuffle_epi32_splat((a), 1); \ ++ break; \ ++ case _MM_SHUFFLE(2, 2, 2, 2): \ ++ ret = _mm_shuffle_epi32_splat((a), 2); \ ++ break; \ ++ case _MM_SHUFFLE(3, 3, 3, 3): \ ++ ret = _mm_shuffle_epi32_splat((a), 3); \ ++ break; \ ++ default: \ ++ ret = _mm_shuffle_epi32_default((a), (imm)); \ ++ break; \ ++ } \ ++ ret; \ ++ }) ++ ++// Shuffles the upper 4 signed or unsigned 16 - bit integers in a as specified ++// by imm. https://msdn.microsoft.com/en-us/library/13ywktbs(v=vs.100).aspx ++// FORCE_INLINE __m128i _mm_shufflehi_epi16_function(__m128i a, ++// __constrange(0,255) int imm) ++#define _mm_shufflelo_epi16_function(a, imm) \ ++ ({ \ ++ int16x8_t ret = vreinterpretq_s16_s32(a); \ ++ int16x4_t lowBits = vget_low_s16(ret); \ ++ ret = vsetq_lane_s16(vget_lane_s16(lowBits, (imm) &0x3), ret, 4); \ ++ ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 2) & 0x3), ret, \ ++ 5); \ ++ ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 4) & 0x3), ret, \ ++ 6); \ ++ ret = vsetq_lane_s16(vget_lane_s16(lowBits, ((imm) >> 6) & 0x3), ret, \ ++ 7); \ ++ vreinterpretq_s32_s16(ret); \ ++ }) ++ ++// FORCE_INLINE __m128i _mm_shufflehi_epi16(__m128i a, __constrange(0,255) int ++// imm) ++#define _mm_shufflelo_epi16(a, imm) _mm_shufflehi_epi16_function((a), (imm)) ++ ++// Shuffles the upper 4 signed or unsigned 16 - bit integers in a as specified ++// by imm. https://msdn.microsoft.com/en-us/library/13ywktbs(v=vs.100).aspx ++// FORCE_INLINE __m128i _mm_shufflehi_epi16_function(__m128i a, ++// __constrange(0,255) int imm) ++#define _mm_shufflehi_epi16_function(a, imm) \ ++ ({ \ ++ int16x8_t ret = vreinterpretq_s16_s32(a); \ ++ int16x4_t highBits = vget_high_s16(ret); \ ++ ret = vsetq_lane_s16(vget_lane_s16(highBits, (imm) &0x3), ret, 4); \ ++ ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 2) & 0x3), ret, \ ++ 5); \ ++ ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 4) & 0x3), ret, \ ++ 6); \ ++ ret = vsetq_lane_s16(vget_lane_s16(highBits, ((imm) >> 6) & 0x3), ret, \ ++ 7); \ ++ vreinterpretq_s32_s16(ret); \ ++ }) ++ ++// FORCE_INLINE __m128i _mm_shufflehi_epi16(__m128i a, __constrange(0,255) int ++// imm) ++#define _mm_shufflehi_epi16(a, imm) _mm_shufflehi_epi16_function((a), (imm)) ++ ++// Shifts the 4 signed 32-bit integers in a right by count bits while shifting ++// in the sign bit. ++// ++// r0 := a0 >> count ++// r1 := a1 >> count ++// r2 := a2 >> count ++// r3 := a3 >> count immediate ++FORCE_INLINE __m128i _mm_srai_epi32(__m128i a, int count) ++{ ++ return vshlq_s32(a, vdupq_n_s32(-count)); ++} ++ ++// Shifts the 8 signed 16-bit integers in a right by count bits while shifting ++// in the sign bit. ++// ++// r0 := a0 >> count ++// r1 := a1 >> count ++// ... ++// r7 := a7 >> count ++FORCE_INLINE __m128i _mm_srai_epi16(__m128i a, int count) ++{ ++ return (__m128i) vshlq_s16((int16x8_t) a, vdupq_n_s16(-count)); ++} ++ ++// Shifts the 8 signed or unsigned 16-bit integers in a left by count bits while ++// shifting in zeros. ++// ++// r0 := a0 << count ++// r1 := a1 << count ++// ... ++// r7 := a7 << count ++// ++// https://msdn.microsoft.com/en-us/library/es73bcsy(v=vs.90).aspx ++#define _mm_slli_epi16(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ if ((imm) <= 0) { \ ++ ret = a; \ ++ } else if ((imm) > 31) { \ ++ ret = _mm_setzero_si128(); \ ++ } else { \ ++ ret = vreinterpretq_m128i_s16( \ ++ vshlq_n_s16(vreinterpretq_s16_m128i(a), (imm))); \ ++ } \ ++ ret; \ ++ }) ++ ++// Shifts the 4 signed or unsigned 32-bit integers in a left by count bits while ++// shifting in zeros. : ++// https://msdn.microsoft.com/en-us/library/z2k3bbtb%28v=vs.90%29.aspx ++// FORCE_INLINE __m128i _mm_slli_epi32(__m128i a, __constrange(0,255) int imm) ++#define _mm_slli_epi32(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ if ((imm) <= 0) { \ ++ ret = a; \ ++ } else if ((imm) > 31) { \ ++ ret = _mm_setzero_si128(); \ ++ } else { \ ++ ret = vreinterpretq_m128i_s32( \ ++ vshlq_n_s32(vreinterpretq_s32_m128i(a), (imm))); \ ++ } \ ++ ret; \ ++ }) ++ ++// Shift packed 64-bit integers in a left by imm8 while shifting in zeros, and ++// store the results in dst. ++#define _mm_slli_epi64(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ if ((imm) <= 0) { \ ++ ret = a; \ ++ } else if ((imm) > 63) { \ ++ ret = _mm_setzero_si128(); \ ++ } else { \ ++ ret = vreinterpretq_m128i_s64( \ ++ vshlq_n_s64(vreinterpretq_s64_m128i(a), (imm))); \ ++ } \ ++ ret; \ ++ }) ++ ++// Shifts the 8 signed or unsigned 16-bit integers in a right by count bits ++// while shifting in zeros. ++// ++// r0 := srl(a0, count) ++// r1 := srl(a1, count) ++// ... ++// r7 := srl(a7, count) ++// ++// https://msdn.microsoft.com/en-us/library/6tcwd38t(v=vs.90).aspx ++#define _mm_srli_epi16(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ if ((imm) <= 0) { \ ++ ret = a; \ ++ } else if ((imm) > 31) { \ ++ ret = _mm_setzero_si128(); \ ++ } else { \ ++ ret = vreinterpretq_m128i_u16( \ ++ vshrq_n_u16(vreinterpretq_u16_m128i(a), (imm))); \ ++ } \ ++ ret; \ ++ }) ++ ++// Shifts the 4 signed or unsigned 32-bit integers in a right by count bits ++// while shifting in zeros. ++// https://msdn.microsoft.com/en-us/library/w486zcfa(v=vs.100).aspx FORCE_INLINE ++// __m128i _mm_srli_epi32(__m128i a, __constrange(0,255) int imm) ++#define _mm_srli_epi32(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ if ((imm) <= 0) { \ ++ ret = a; \ ++ } else if ((imm) > 31) { \ ++ ret = _mm_setzero_si128(); \ ++ } else { \ ++ ret = vreinterpretq_m128i_u32( \ ++ vshrq_n_u32(vreinterpretq_u32_m128i(a), (imm))); \ ++ } \ ++ ret; \ ++ }) ++ ++// Shift packed 64-bit integers in a right by imm8 while shifting in zeros, and ++// store the results in dst. ++#define _mm_srli_epi64(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ if ((imm) <= 0) { \ ++ ret = a; \ ++ } else if ((imm) > 63) { \ ++ ret = _mm_setzero_si128(); \ ++ } else { \ ++ ret = vreinterpretq_m128i_u64( \ ++ vshrq_n_u64(vreinterpretq_u64_m128i(a), (imm))); \ ++ } \ ++ ret; \ ++ }) ++ ++// Shifts the 4 signed 32 - bit integers in a right by count bits while shifting ++// in the sign bit. ++// https://msdn.microsoft.com/en-us/library/z1939387(v=vs.100).aspx ++// FORCE_INLINE __m128i _mm_srai_epi32(__m128i a, __constrange(0,255) int imm) ++#define _mm_srai_epi32(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ if ((imm) <= 0) { \ ++ ret = a; \ ++ } else if ((imm) > 31) { \ ++ ret = vreinterpretq_m128i_s32( \ ++ vshrq_n_s32(vreinterpretq_s32_m128i(a), 16)); \ ++ ret = vreinterpretq_m128i_s32( \ ++ vshrq_n_s32(vreinterpretq_s32_m128i(ret), 16)); \ ++ } else { \ ++ ret = vreinterpretq_m128i_s32( \ ++ vshrq_n_s32(vreinterpretq_s32_m128i(a), (imm))); \ ++ } \ ++ ret; \ ++ }) ++ ++// Shifts the 128 - bit value in a right by imm bytes while shifting in ++// zeros.imm must be an immediate. ++// ++// r := srl(a, imm*8) ++// ++// https://msdn.microsoft.com/en-us/library/305w28yz(v=vs.100).aspx ++// FORCE_INLINE _mm_srli_si128(__m128i a, __constrange(0,255) int imm) ++#define _mm_srli_si128(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ if ((imm) <= 0) { \ ++ ret = a; \ ++ } else if ((imm) > 15) { \ ++ ret = _mm_setzero_si128(); \ ++ } else { \ ++ ret = vreinterpretq_m128i_s8( \ ++ vextq_s8(vreinterpretq_s8_m128i(a), vdupq_n_s8(0), (imm))); \ ++ } \ ++ ret; \ ++ }) ++ ++// Shifts the 128-bit value in a left by imm bytes while shifting in zeros. imm ++// must be an immediate. ++// ++// r := a << (imm * 8) ++// ++// https://msdn.microsoft.com/en-us/library/34d3k2kt(v=vs.100).aspx ++// FORCE_INLINE __m128i _mm_slli_si128(__m128i a, __constrange(0,255) int imm) ++#define _mm_slli_si128(a, imm) \ ++ ({ \ ++ __m128i ret; \ ++ if ((imm) <= 0) { \ ++ ret = a; \ ++ } else if ((imm) > 15) { \ ++ ret = _mm_setzero_si128(); \ ++ } else { \ ++ ret = vreinterpretq_m128i_s8(vextq_s8( \ ++ vdupq_n_s8(0), vreinterpretq_s8_m128i(a), 16 - (imm))); \ ++ } \ ++ ret; \ ++ }) ++ ++// NEON does not provide a version of this function, here is an article about ++// some ways to repro the results. ++// http://stackoverflow.com/questions/11870910/sse-mm-movemask-epi8-equivalent-method-for-arm-neon ++// Creates a 16-bit mask from the most significant bits of the 16 signed or ++// unsigned 8-bit integers in a and zero extends the upper bits. ++// https://msdn.microsoft.com/en-us/library/vstudio/s090c8fk(v=vs.100).aspx ++FORCE_INLINE int _mm_movemask_epi8(__m128i _a) ++{ ++ uint8x16_t input = vreinterpretq_u8_m128i(_a); ++ static const int8_t __attribute__((aligned(16))) ++ xr[8] = {-7, -6, -5, -4, -3, -2, -1, 0}; ++ uint8x8_t mask_and = vdup_n_u8(0x80); ++ int8x8_t mask_shift = vld1_s8(xr); ++ ++ uint8x8_t lo = vget_low_u8(input); ++ uint8x8_t hi = vget_high_u8(input); ++ ++ lo = vand_u8(lo, mask_and); ++ lo = vshl_u8(lo, mask_shift); ++ ++ hi = vand_u8(hi, mask_and); ++ hi = vshl_u8(hi, mask_shift); ++ ++ lo = vpadd_u8(lo, lo); ++ lo = vpadd_u8(lo, lo); ++ lo = vpadd_u8(lo, lo); ++ ++ hi = vpadd_u8(hi, hi); ++ hi = vpadd_u8(hi, hi); ++ hi = vpadd_u8(hi, hi); ++ ++ return ((hi[0] << 8) | (lo[0] & 0xFF)); ++} ++ ++// Compute the bitwise AND of 128 bits (representing integer data) in a and ++// mask, and return 1 if the result is zero, otherwise return 0. ++// https://software.intel.com/sites/landingpage/IntrinsicsGuide/#text=_mm_test_all_zeros&expand=5871 ++FORCE_INLINE int _mm_test_all_zeros(__m128i a, __m128i mask) ++{ ++ int64x2_t a_and_mask = ++ vandq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(mask)); ++ return (vgetq_lane_s64(a_and_mask, 0) | vgetq_lane_s64(a_and_mask, 1)) ? 0 ++ : 1; ++} ++ ++// ****************************************** ++// Math operations ++// ****************************************** ++ ++// Subtracts the four single-precision, floating-point values of a and b. ++// ++// r0 := a0 - b0 ++// r1 := a1 - b1 ++// r2 := a2 - b2 ++// r3 := a3 - b3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/1zad2k61(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_sub_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_f32( ++ vsubq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// Subtract 2 packed 64-bit integers in b from 2 packed 64-bit integers in a, ++// and store the results in dst. ++// r0 := a0 - b0 ++// r1 := a1 - b1 ++FORCE_INLINE __m128i _mm_sub_epi64(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s64( ++ vsubq_s64(vreinterpretq_s64_m128i(a), vreinterpretq_s64_m128i(b))); ++} ++ ++// Subtracts the 4 signed or unsigned 32-bit integers of b from the 4 signed or ++// unsigned 32-bit integers of a. ++// ++// r0 := a0 - b0 ++// r1 := a1 - b1 ++// r2 := a2 - b2 ++// r3 := a3 - b3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/fhh866h0(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_sub_epi32(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128_f32( ++ vsubq_s32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++FORCE_INLINE __m128i _mm_sub_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s16( ++ vsubq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); ++} ++ ++FORCE_INLINE __m128i _mm_sub_epi8(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s8( ++ vsubq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); ++} ++ ++// Subtracts the 8 unsigned 16-bit integers of bfrom the 8 unsigned 16-bit ++// integers of a and saturates.. ++// https://technet.microsoft.com/en-us/subscriptions/index/f44y0s19(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_subs_epu16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u16( ++ vqsubq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b))); ++} ++ ++// Subtracts the 16 unsigned 8-bit integers of b from the 16 unsigned 8-bit ++// integers of a and saturates. ++// ++// r0 := UnsignedSaturate(a0 - b0) ++// r1 := UnsignedSaturate(a1 - b1) ++// ... ++// r15 := UnsignedSaturate(a15 - b15) ++// ++// https://technet.microsoft.com/en-us/subscriptions/yadkxc18(v=vs.90) ++FORCE_INLINE __m128i _mm_subs_epu8(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u8( ++ vqsubq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b))); ++} ++ ++// Subtracts the 8 signed 16-bit integers of b from the 8 signed 16-bit integers ++// of a and saturates. ++// ++// r0 := SignedSaturate(a0 - b0) ++// r1 := SignedSaturate(a1 - b1) ++// ... ++// r7 := SignedSaturate(a7 - b7) ++FORCE_INLINE __m128i _mm_subs_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s16( ++ vqsubq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); ++} ++ ++FORCE_INLINE __m128i _mm_adds_epu16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u16( ++ vqaddq_u16(vreinterpretq_u16_m128i(a), vreinterpretq_u16_m128i(b))); ++} ++ ++FORCE_INLINE __m128i _mm_sign_epi32(__m128i a, __m128i b) ++{ ++ __m128i zer0 = vdupq_n_s32(0); ++ __m128i ltMask = vreinterpretq_s32_u32(vcltq_s32(b, zer0)); ++ __m128i gtMask = vreinterpretq_s32_u32(vcgtq_s32(b, zer0)); ++ __m128i neg = vnegq_s32(a); ++ __m128i tmp = vandq_s32(a, gtMask); ++ return vorrq_s32(tmp, vandq_s32(neg, ltMask)); ++} ++ ++FORCE_INLINE __m128i _mm_sign_epi16(__m128i a, __m128i b) ++{ ++ int16x8_t zer0 = vdupq_n_s16(0); ++ int16x8_t ltMask = ++ vreinterpretq_s16_u16(vcltq_s16(vreinterpretq_s16_s32(b), zer0)); ++ int16x8_t gtMask = ++ vreinterpretq_s16_u16(vcgtq_s16(vreinterpretq_s16_s32(b), zer0)); ++ int16x8_t neg = vnegq_s16(vreinterpretq_s16_s32(a)); ++ int16x8_t tmp = vandq_s16(vreinterpretq_s16_s32(a), gtMask); ++ return vreinterpretq_s32_s16(vorrq_s16(tmp, vandq_s16(neg, ltMask))); ++} ++ ++// Adds the four single-precision, floating-point values of a and b. ++// ++// r0 := a0 + b0 ++// r1 := a1 + b1 ++// r2 := a2 + b2 ++// r3 := a3 + b3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/c9848chc(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_add_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_f32( ++ vaddq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// adds the scalar single-precision floating point values of a and b. ++// https://msdn.microsoft.com/en-us/library/be94x2y6(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_add_ss(__m128 a, __m128 b) ++{ ++ float32_t b0 = vgetq_lane_f32(vreinterpretq_f32_m128(b), 0); ++ float32x4_t value = vsetq_lane_f32(b0, vdupq_n_f32(0), 0); ++ // the upper values in the result must be the remnants of . ++ return vreinterpretq_m128_f32(vaddq_f32(a, value)); ++} ++ ++// Adds the 4 signed or unsigned 64-bit integers in a to the 4 signed or ++// unsigned 32-bit integers in b. ++// https://msdn.microsoft.com/en-us/library/vstudio/09xs4fkk(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_add_epi64(__m128i a, __m128i b) ++{ ++ return vreinterpretq_s32_s64( ++ vaddq_s64(vreinterpretq_s64_s32(a), vreinterpretq_s64_s32(b))); ++} ++ ++// Adds the 4 signed or unsigned 32-bit integers in a to the 4 signed or ++// unsigned 32-bit integers in b. ++// ++// r0 := a0 + b0 ++// r1 := a1 + b1 ++// r2 := a2 + b2 ++// r3 := a3 + b3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/09xs4fkk(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_add_epi32(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s32( ++ vaddq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Adds the 8 signed or unsigned 16-bit integers in a to the 8 signed or ++// unsigned 16-bit integers in b. ++// https://msdn.microsoft.com/en-us/library/fceha5k4(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_add_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s16( ++ vaddq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); ++} ++ ++// Adds the 16 signed or unsigned 8-bit integers in a to the 16 signed or ++// unsigned 8-bit integers in b. ++// https://technet.microsoft.com/en-us/subscriptions/yc7tcyzs(v=vs.90) ++FORCE_INLINE __m128i _mm_add_epi8(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s8( ++ vaddq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); ++} ++ ++// Adds the 8 signed 16-bit integers in a to the 8 signed 16-bit integers in b ++// and saturates. ++// ++// r0 := SignedSaturate(a0 + b0) ++// r1 := SignedSaturate(a1 + b1) ++// ... ++// r7 := SignedSaturate(a7 + b7) ++// ++// https://msdn.microsoft.com/en-us/library/1a306ef8(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_adds_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s16( ++ vqaddq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); ++} ++ ++// Adds the 16 unsigned 8-bit integers in a to the 16 unsigned 8-bit integers in ++// b and saturates.. ++// https://msdn.microsoft.com/en-us/library/9hahyddy(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_adds_epu8(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u8( ++ vqaddq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b))); ++} ++ ++// Multiplies the 8 signed or unsigned 16-bit integers from a by the 8 signed or ++// unsigned 16-bit integers from b. ++// ++// r0 := (a0 * b0)[15:0] ++// r1 := (a1 * b1)[15:0] ++// ... ++// r7 := (a7 * b7)[15:0] ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/9ks1472s(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_mullo_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s16( ++ vmulq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); ++} ++ ++// Multiplies the 4 signed or unsigned 32-bit integers from a by the 4 signed or ++// unsigned 32-bit integers from b. ++// https://msdn.microsoft.com/en-us/library/vstudio/bb531409(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_mullo_epi32(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s32( ++ vmulq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Multiplies the four single-precision, floating-point values of a and b. ++// ++// r0 := a0 * b0 ++// r1 := a1 * b1 ++// r2 := a2 * b2 ++// r3 := a3 * b3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/22kbk6t9(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_mul_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_f32( ++ vmulq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// Multiply the low unsigned 32-bit integers from each packed 64-bit element in ++// a and b, and store the unsigned 64-bit results in dst. ++// ++// r0 := (uint32_t*)a0 * (uint32_t*)b0 ++// r1 := (uint32_t*)a2 * (uint32_t*)b2 ++#if 1 /* C version */ ++FORCE_INLINE __m128i _mm_mul_epu32(__m128i a, __m128i b) ++{ ++ __m128i d; ++ vreinterpretq_nth_u64_m128i(d, 0) = ++ (uint64_t)(vreinterpretq_nth_u32_m128i(a, 0)) * ++ (uint64_t)(vreinterpretq_nth_u32_m128i(b, 0)); ++ vreinterpretq_nth_u64_m128i(d, 1) = ++ (uint64_t)(vreinterpretq_nth_u32_m128i(a, 2)) * ++ (uint64_t)(vreinterpretq_nth_u32_m128i(b, 2)); ++ return d; ++} ++#else /* Neon version */ ++// Default to c version until casting can be sorted out on neon version. ++// (Otherwise requires compiling with -fpermissive) Also unclear whether neon ++// version actually performs better. ++FORCE_INLINE __m128i _mm_mul_epu32(__m128i a, __m128i b) ++{ ++ // shuffle: 0, 1, 2, 3 -> 0, 2, 1, 3 */ ++ __m128i const a_shuf = ++ *(__m128i *) (&vzip_u32(vget_low_u32(vreinterpretq_u32_m128i(a)), ++ vget_high_u32(vreinterpretq_u32_m128i(a)))); ++ __m128i const b_shuf = ++ *(__m128i *) (&vzip_u32(vget_low_u32(vreinterpretq_u32_m128i(b)), ++ vget_high_u32(vreinterpretq_u32_m128i(b)))); ++ // Multiply low word (32 bit) against low word (32 bit) and high word (32 ++ // bit) against high word (32 bit). Pack both results (64 bit) into 128 bit ++ // register and return result. ++ return vreinterpretq_m128i_u64( ++ vmull_u32(vget_low_u32(vreinterpretq_u32_m128i(a_shuf)), ++ vget_low_u32(vreinterpretq_u32_m128i(b_shuf)))); ++} ++#endif ++ ++// Multiplies the 8 signed 16-bit integers from a by the 8 signed 16-bit ++// integers from b. ++// ++// r0 := (a0 * b0) + (a1 * b1) ++// r1 := (a2 * b2) + (a3 * b3) ++// r2 := (a4 * b4) + (a5 * b5) ++// r3 := (a6 * b6) + (a7 * b7) ++// https://msdn.microsoft.com/en-us/library/yht36sa6(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_madd_epi16(__m128i a, __m128i b) ++{ ++ int32x4_t low = vmull_s16(vget_low_s16(vreinterpretq_s16_m128i(a)), ++ vget_low_s16(vreinterpretq_s16_m128i(b))); ++ int32x4_t high = vmull_s16(vget_high_s16(vreinterpretq_s16_m128i(a)), ++ vget_high_s16(vreinterpretq_s16_m128i(b))); ++ ++ int32x2_t low_sum = vpadd_s32(vget_low_s32(low), vget_high_s32(low)); ++ int32x2_t high_sum = vpadd_s32(vget_low_s32(high), vget_high_s32(high)); ++ ++ return vreinterpretq_s32_m128i(vcombine_s32(low_sum, high_sum)); ++} ++ ++// Computes the absolute difference of the 16 unsigned 8-bit integers from a ++// and the 16 unsigned 8-bit integers from b. ++// ++// Return Value ++// Sums the upper 8 differences and lower 8 differences and packs the ++// resulting 2 unsigned 16-bit integers into the upper and lower 64-bit ++// elements. ++// ++// r0 := abs(a0 - b0) + abs(a1 - b1) +...+ abs(a7 - b7) ++// r1 := 0x0 ++// r2 := 0x0 ++// r3 := 0x0 ++// r4 := abs(a8 - b8) + abs(a9 - b9) +...+ abs(a15 - b15) ++// r5 := 0x0 ++// r6 := 0x0 ++// r7 := 0x0 ++FORCE_INLINE __m128i _mm_sad_epu8(__m128i a, __m128i b) ++{ ++ uint16x8_t t = vpaddlq_u8(vabdq_u8((uint8x16_t) a, (uint8x16_t) b)); ++ uint16_t r0 = t[0] + t[1] + t[2] + t[3]; ++ uint16_t r4 = t[4] + t[5] + t[6] + t[7]; ++ uint16x8_t r = vsetq_lane_u16(r0, vdupq_n_u16(0), 0); ++ return (__m128i) vsetq_lane_u16(r4, r, 4); ++} ++ ++// Divides the four single-precision, floating-point values of a and b. ++// ++// r0 := a0 / b0 ++// r1 := a1 / b1 ++// r2 := a2 / b2 ++// r3 := a3 / b3 ++// ++// https://msdn.microsoft.com/en-us/library/edaw8147(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_div_ps(__m128 a, __m128 b) ++{ ++ float32x4_t recip0 = vrecpeq_f32(vreinterpretq_f32_m128(b)); ++ float32x4_t recip1 = ++ vmulq_f32(recip0, vrecpsq_f32(recip0, vreinterpretq_f32_m128(b))); ++ return vreinterpretq_m128_f32(vmulq_f32(vreinterpretq_f32_m128(a), recip1)); ++} ++ ++// Divides the scalar single-precision floating point value of a by b. ++// https://msdn.microsoft.com/en-us/library/4y73xa49(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_div_ss(__m128 a, __m128 b) ++{ ++ float32_t value = ++ vgetq_lane_f32(vreinterpretq_f32_m128(_mm_div_ps(a, b)), 0); ++ return vreinterpretq_m128_f32( ++ vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0)); ++} ++ ++// This version does additional iterations to improve accuracy. Between 1 and 4 ++// recommended. Computes the approximations of reciprocals of the four ++// single-precision, floating-point values of a. ++// https://msdn.microsoft.com/en-us/library/vstudio/796k1tty(v=vs.100).aspx ++FORCE_INLINE __m128 recipq_newton(__m128 in, int n) ++{ ++ int i; ++ float32x4_t recip = vrecpeq_f32(vreinterpretq_f32_m128(in)); ++ for (i = 0; i < n; ++i) { ++ recip = ++ vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(in))); ++ } ++ return vreinterpretq_m128_f32(recip); ++} ++ ++// Computes the approximations of reciprocals of the four single-precision, ++// floating-point values of a. ++// https://msdn.microsoft.com/en-us/library/vstudio/796k1tty(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_rcp_ps(__m128 in) ++{ ++ float32x4_t recip = vrecpeq_f32(vreinterpretq_f32_m128(in)); ++ recip = vmulq_f32(recip, vrecpsq_f32(recip, vreinterpretq_f32_m128(in))); ++ return vreinterpretq_m128_f32(recip); ++} ++ ++// Computes the approximations of square roots of the four single-precision, ++// floating-point values of a. First computes reciprocal square roots and then ++// reciprocals of the four values. ++// ++// r0 := sqrt(a0) ++// r1 := sqrt(a1) ++// r2 := sqrt(a2) ++// r3 := sqrt(a3) ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/8z67bwwk(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_sqrt_ps(__m128 in) ++{ ++ float32x4_t recipsq = vrsqrteq_f32(vreinterpretq_f32_m128(in)); ++ float32x4_t sq = vrecpeq_f32(recipsq); ++ // ??? use step versions of both sqrt and recip for better accuracy? ++ return vreinterpretq_m128_f32(sq); ++} ++ ++// Computes the approximation of the square root of the scalar single-precision ++// floating point value of in. ++// https://msdn.microsoft.com/en-us/library/ahfsc22d(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_sqrt_ss(__m128 in) ++{ ++ float32_t value = ++ vgetq_lane_f32(vreinterpretq_f32_m128(_mm_sqrt_ps(in)), 0); ++ return vreinterpretq_m128_f32( ++ vsetq_lane_f32(value, vreinterpretq_f32_m128(in), 0)); ++} ++ ++// Computes the approximations of the reciprocal square roots of the four ++// single-precision floating point values of in. ++// https://msdn.microsoft.com/en-us/library/22hfsh53(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_rsqrt_ps(__m128 in) ++{ ++ return vreinterpretq_m128_f32(vrsqrteq_f32(vreinterpretq_f32_m128(in))); ++} ++ ++// Computes the maximums of the four single-precision, floating-point values of ++// a and b. ++// https://msdn.microsoft.com/en-us/library/vstudio/ff5d607a(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_max_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_f32( ++ vmaxq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// Computes the minima of the four single-precision, floating-point values of a ++// and b. ++// https://msdn.microsoft.com/en-us/library/vstudio/wh13kadz(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_min_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_f32( ++ vminq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// Computes the maximum of the two lower scalar single-precision floating point ++// values of a and b. ++// https://msdn.microsoft.com/en-us/library/s6db5esz(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_max_ss(__m128 a, __m128 b) ++{ ++ float32_t value = vgetq_lane_f32( ++ vmaxq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)), 0); ++ return vreinterpretq_m128_f32( ++ vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0)); ++} ++ ++// Computes the minimum of the two lower scalar single-precision floating point ++// values of a and b. ++// https://msdn.microsoft.com/en-us/library/0a9y7xaa(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_min_ss(__m128 a, __m128 b) ++{ ++ float32_t value = vgetq_lane_f32( ++ vminq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)), 0); ++ return vreinterpretq_m128_f32( ++ vsetq_lane_f32(value, vreinterpretq_f32_m128(a), 0)); ++} ++ ++// Computes the pairwise maxima of the 16 unsigned 8-bit integers from a and the ++// 16 unsigned 8-bit integers from b. ++// https://msdn.microsoft.com/en-us/library/st6634za(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_max_epu8(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u8( ++ vmaxq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b))); ++} ++ ++// Computes the pairwise minima of the 16 unsigned 8-bit integers from a and the ++// 16 unsigned 8-bit integers from b. ++// https://msdn.microsoft.com/ko-kr/library/17k8cf58(v=vs.100).aspxx ++FORCE_INLINE __m128i _mm_min_epu8(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u8( ++ vminq_u8(vreinterpretq_u8_m128i(a), vreinterpretq_u8_m128i(b))); ++} ++ ++// Computes the pairwise minima of the 8 signed 16-bit integers from a and the 8 ++// signed 16-bit integers from b. ++// https://msdn.microsoft.com/en-us/library/vstudio/6te997ew(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_min_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s16( ++ vminq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); ++} ++ ++// Computes the pairwise maxima of the 8 signed 16-bit integers from a and the 8 ++// signed 16-bit integers from b. ++// https://msdn.microsoft.com/en-us/LIBRary/3x060h7c(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_max_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s16( ++ vmaxq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); ++} ++ ++// epi versions of min/max ++// Computes the pariwise maximums of the four signed 32-bit integer values of a ++// and b. ++// ++// A 128-bit parameter that can be defined with the following equations: ++// r0 := (a0 > b0) ? a0 : b0 ++// r1 := (a1 > b1) ? a1 : b1 ++// r2 := (a2 > b2) ? a2 : b2 ++// r3 := (a3 > b3) ? a3 : b3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/bb514055(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_max_epi32(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s32( ++ vmaxq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Computes the pariwise minima of the four signed 32-bit integer values of a ++// and b. ++// ++// A 128-bit parameter that can be defined with the following equations: ++// r0 := (a0 < b0) ? a0 : b0 ++// r1 := (a1 < b1) ? a1 : b1 ++// r2 := (a2 < b2) ? a2 : b2 ++// r3 := (a3 < b3) ? a3 : b3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/bb531476(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_min_epi32(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s32( ++ vminq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Multiplies the 8 signed 16-bit integers from a by the 8 signed 16-bit ++// integers from b. ++// ++// r0 := (a0 * b0)[31:16] ++// r1 := (a1 * b1)[31:16] ++// ... ++// r7 := (a7 * b7)[31:16] ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/59hddw1d(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_mulhi_epi16(__m128i a, __m128i b) ++{ ++ /* FIXME: issue with large values because of result saturation */ ++ // int16x8_t ret = vqdmulhq_s16(vreinterpretq_s16_m128i(a), ++ // vreinterpretq_s16_m128i(b)); /* =2*a*b */ return ++ // vreinterpretq_m128i_s16(vshrq_n_s16(ret, 1)); ++ int16x4_t a3210 = vget_low_s16(vreinterpretq_s16_m128i(a)); ++ int16x4_t b3210 = vget_low_s16(vreinterpretq_s16_m128i(b)); ++ int32x4_t ab3210 = vmull_s16(a3210, b3210); /* 3333222211110000 */ ++ int16x4_t a7654 = vget_high_s16(vreinterpretq_s16_m128i(a)); ++ int16x4_t b7654 = vget_high_s16(vreinterpretq_s16_m128i(b)); ++ int32x4_t ab7654 = vmull_s16(a7654, b7654); /* 7777666655554444 */ ++ uint16x8x2_t r = ++ vuzpq_u16(vreinterpretq_u16_s32(ab3210), vreinterpretq_u16_s32(ab7654)); ++ return vreinterpretq_m128i_u16(r.val[1]); ++} ++ ++// Computes pairwise add of each argument as single-precision, floating-point ++// values a and b. ++// https://msdn.microsoft.com/en-us/library/yd9wecaa.aspx ++FORCE_INLINE __m128 _mm_hadd_ps(__m128 a, __m128 b) ++{ ++#if defined(__aarch64__) ++ return vreinterpretq_m128_f32(vpaddq_f32( ++ vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); // AArch64 ++#else ++ float32x2_t a10 = vget_low_f32(vreinterpretq_f32_m128(a)); ++ float32x2_t a32 = vget_high_f32(vreinterpretq_f32_m128(a)); ++ float32x2_t b10 = vget_low_f32(vreinterpretq_f32_m128(b)); ++ float32x2_t b32 = vget_high_f32(vreinterpretq_f32_m128(b)); ++ return vreinterpretq_m128_f32( ++ vcombine_f32(vpadd_f32(a10, a32), vpadd_f32(b10, b32))); ++#endif ++} ++ ++// ****************************************** ++// Compare operations ++// ****************************************** ++ ++// Compares for less than ++// https://msdn.microsoft.com/en-us/library/vstudio/f330yhc8(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_cmplt_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_u32( ++ vcltq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// Compares for greater than. ++// ++// r0 := (a0 > b0) ? 0xffffffff : 0x0 ++// r1 := (a1 > b1) ? 0xffffffff : 0x0 ++// r2 := (a2 > b2) ? 0xffffffff : 0x0 ++// r3 := (a3 > b3) ? 0xffffffff : 0x0 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/11dy102s(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_cmpgt_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_u32( ++ vcgtq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// Compares for greater than or equal. ++// https://msdn.microsoft.com/en-us/library/vstudio/fs813y2t(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_cmpge_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_u32( ++ vcgeq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// Compares for less than or equal. ++// ++// r0 := (a0 <= b0) ? 0xffffffff : 0x0 ++// r1 := (a1 <= b1) ? 0xffffffff : 0x0 ++// r2 := (a2 <= b2) ? 0xffffffff : 0x0 ++// r3 := (a3 <= b3) ? 0xffffffff : 0x0 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/1s75w83z(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_cmple_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_u32( ++ vcleq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// Compares for equality. ++// https://msdn.microsoft.com/en-us/library/vstudio/36aectz5(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_cmpeq_ps(__m128 a, __m128 b) ++{ ++ return vreinterpretq_m128_u32( ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++} ++ ++// Compares the 16 signed or unsigned 8-bit integers in a and the 16 signed or ++// unsigned 8-bit integers in b for equality. ++// https://msdn.microsoft.com/en-us/library/windows/desktop/bz5xk21a(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_cmpeq_epi8(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u8( ++ vceqq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); ++} ++ ++// Compares the 8 signed or unsigned 16-bit integers in a and the 8 signed or ++// unsigned 16-bit integers in b for equality. ++// https://msdn.microsoft.com/en-us/library/2ay060te(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_cmpeq_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u16( ++ vceqq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); ++} ++ ++// Compare packed 32-bit integers in a and b for equality, and store the results ++// in dst ++FORCE_INLINE __m128i _mm_cmpeq_epi32(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u32( ++ vceqq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Compares the 16 signed 8-bit integers in a and the 16 signed 8-bit integers ++// in b for lesser than. ++// https://msdn.microsoft.com/en-us/library/windows/desktop/9s46csht(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_cmplt_epi8(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u8( ++ vcltq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); ++} ++ ++// Compares the 16 signed 8-bit integers in a and the 16 signed 8-bit integers ++// in b for greater than. ++// ++// r0 := (a0 > b0) ? 0xff : 0x0 ++// r1 := (a1 > b1) ? 0xff : 0x0 ++// ... ++// r15 := (a15 > b15) ? 0xff : 0x0 ++// ++// https://msdn.microsoft.com/zh-tw/library/wf45zt2b(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_cmpgt_epi8(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u8( ++ vcgtq_s8(vreinterpretq_s8_m128i(a), vreinterpretq_s8_m128i(b))); ++} ++ ++// Compares the 8 signed 16-bit integers in a and the 8 signed 16-bit integers ++// in b for greater than. ++// ++// r0 := (a0 > b0) ? 0xffff : 0x0 ++// r1 := (a1 > b1) ? 0xffff : 0x0 ++// ... ++// r7 := (a7 > b7) ? 0xffff : 0x0 ++// ++// https://technet.microsoft.com/en-us/library/xd43yfsa(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_cmpgt_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u16( ++ vcgtq_s16(vreinterpretq_s16_m128i(a), vreinterpretq_s16_m128i(b))); ++} ++ ++ ++// Compares the 4 signed 32-bit integers in a and the 4 signed 32-bit integers ++// in b for less than. ++// https://msdn.microsoft.com/en-us/library/vstudio/4ak0bf5d(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_cmplt_epi32(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u32( ++ vcltq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Compares the 4 signed 32-bit integers in a and the 4 signed 32-bit integers ++// in b for greater than. ++// https://msdn.microsoft.com/en-us/library/vstudio/1s9f2z0y(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_cmpgt_epi32(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_u32( ++ vcgtq_s32(vreinterpretq_s32_m128i(a), vreinterpretq_s32_m128i(b))); ++} ++ ++// Compares the four 32-bit floats in a and b to check if any values are NaN. ++// Ordered compare between each value returns true for "orderable" and false for ++// "not orderable" (NaN). ++// https://msdn.microsoft.com/en-us/library/vstudio/0h9w00fx(v=vs.100).aspx see ++// also: ++// http://stackoverflow.com/questions/8627331/what-does-ordered-unordered-comparison-mean ++// http://stackoverflow.com/questions/29349621/neon-isnanval-intrinsics ++FORCE_INLINE __m128 _mm_cmpord_ps(__m128 a, __m128 b) ++{ ++ // Note: NEON does not have ordered compare builtin ++ // Need to compare a eq a and b eq b to check for NaN ++ // Do AND of results to get final ++ uint32x4_t ceqaa = ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); ++ uint32x4_t ceqbb = ++ vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); ++ return vreinterpretq_m128_u32(vandq_u32(ceqaa, ceqbb)); ++} ++ ++// Compares the lower single-precision floating point scalar values of a and b ++// using a less than operation. : ++// https://msdn.microsoft.com/en-us/library/2kwe606b(v=vs.90).aspx Important ++// note!! The documentation on MSDN is incorrect! If either of the values is a ++// NAN the docs say you will get a one, but in fact, it will return a zero!! ++FORCE_INLINE int _mm_comilt_ss(__m128 a, __m128 b) ++{ ++ uint32x4_t a_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); ++ uint32x4_t b_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); ++ uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); ++ uint32x4_t a_lt_b = ++ vcltq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); ++ return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_lt_b), 0) != 0) ? 1 : 0; ++} ++ ++// Compares the lower single-precision floating point scalar values of a and b ++// using a greater than operation. : ++// https://msdn.microsoft.com/en-us/library/b0738e0t(v=vs.100).aspx ++FORCE_INLINE int _mm_comigt_ss(__m128 a, __m128 b) ++{ ++ // return vgetq_lane_u32(vcgtq_f32(vreinterpretq_f32_m128(a), ++ // vreinterpretq_f32_m128(b)), 0); ++ uint32x4_t a_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); ++ uint32x4_t b_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); ++ uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); ++ uint32x4_t a_gt_b = ++ vcgtq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); ++ return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_gt_b), 0) != 0) ? 1 : 0; ++} ++ ++// Compares the lower single-precision floating point scalar values of a and b ++// using a less than or equal operation. : ++// https://msdn.microsoft.com/en-us/library/1w4t7c57(v=vs.90).aspx ++FORCE_INLINE int _mm_comile_ss(__m128 a, __m128 b) ++{ ++ // return vgetq_lane_u32(vcleq_f32(vreinterpretq_f32_m128(a), ++ // vreinterpretq_f32_m128(b)), 0); ++ uint32x4_t a_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); ++ uint32x4_t b_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); ++ uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); ++ uint32x4_t a_le_b = ++ vcleq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); ++ return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_le_b), 0) != 0) ? 1 : 0; ++} ++ ++// Compares the lower single-precision floating point scalar values of a and b ++// using a greater than or equal operation. : ++// https://msdn.microsoft.com/en-us/library/8t80des6(v=vs.100).aspx ++FORCE_INLINE int _mm_comige_ss(__m128 a, __m128 b) ++{ ++ // return vgetq_lane_u32(vcgeq_f32(vreinterpretq_f32_m128(a), ++ // vreinterpretq_f32_m128(b)), 0); ++ uint32x4_t a_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); ++ uint32x4_t b_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); ++ uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); ++ uint32x4_t a_ge_b = ++ vcgeq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); ++ return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_ge_b), 0) != 0) ? 1 : 0; ++} ++ ++// Compares the lower single-precision floating point scalar values of a and b ++// using an equality operation. : ++// https://msdn.microsoft.com/en-us/library/93yx2h2b(v=vs.100).aspx ++FORCE_INLINE int _mm_comieq_ss(__m128 a, __m128 b) ++{ ++ // return vgetq_lane_u32(vceqq_f32(vreinterpretq_f32_m128(a), ++ // vreinterpretq_f32_m128(b)), 0); ++ uint32x4_t a_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); ++ uint32x4_t b_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); ++ uint32x4_t a_and_b_not_nan = vandq_u32(a_not_nan, b_not_nan); ++ uint32x4_t a_eq_b = ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b)); ++ return (vgetq_lane_u32(vandq_u32(a_and_b_not_nan, a_eq_b), 0) != 0) ? 1 : 0; ++} ++ ++// Compares the lower single-precision floating point scalar values of a and b ++// using an inequality operation. : ++// https://msdn.microsoft.com/en-us/library/bafh5e0a(v=vs.90).aspx ++FORCE_INLINE int _mm_comineq_ss(__m128 a, __m128 b) ++{ ++ // return !vgetq_lane_u32(vceqq_f32(vreinterpretq_f32_m128(a), ++ // vreinterpretq_f32_m128(b)), 0); ++ uint32x4_t a_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(a)); ++ uint32x4_t b_not_nan = ++ vceqq_f32(vreinterpretq_f32_m128(b), vreinterpretq_f32_m128(b)); ++ uint32x4_t a_or_b_nan = vmvnq_u32(vandq_u32(a_not_nan, b_not_nan)); ++ uint32x4_t a_neq_b = vmvnq_u32( ++ vceqq_f32(vreinterpretq_f32_m128(a), vreinterpretq_f32_m128(b))); ++ return (vgetq_lane_u32(vorrq_u32(a_or_b_nan, a_neq_b), 0) != 0) ? 1 : 0; ++} ++ ++// according to the documentation, these intrinsics behave the same as the ++// non-'u' versions. We'll just alias them here. ++#define _mm_ucomilt_ss _mm_comilt_ss ++#define _mm_ucomile_ss _mm_comile_ss ++#define _mm_ucomigt_ss _mm_comigt_ss ++#define _mm_ucomige_ss _mm_comige_ss ++#define _mm_ucomieq_ss _mm_comieq_ss ++#define _mm_ucomineq_ss _mm_comineq_ss ++ ++// ****************************************** ++// Conversions ++// ****************************************** ++ ++// Converts the four single-precision, floating-point values of a to signed ++// 32-bit integer values using truncate. ++// https://msdn.microsoft.com/en-us/library/vstudio/1h005y6x(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_cvttps_epi32(__m128 a) ++{ ++ return vreinterpretq_m128i_s32(vcvtq_s32_f32(vreinterpretq_f32_m128(a))); ++} ++ ++// Converts the four signed 32-bit integer values of a to single-precision, ++// floating-point values ++// https://msdn.microsoft.com/en-us/library/vstudio/36bwxcx5(v=vs.100).aspx ++FORCE_INLINE __m128 _mm_cvtepi32_ps(__m128i a) ++{ ++ return vreinterpretq_m128_f32(vcvtq_f32_s32(vreinterpretq_s32_m128i(a))); ++} ++ ++// Converts the four unsigned 8-bit integers in the lower 32 bits to four ++// unsigned 32-bit integers. ++// https://msdn.microsoft.com/en-us/library/bb531467%28v=vs.100%29.aspx ++FORCE_INLINE __m128i _mm_cvtepu8_epi32(__m128i a) ++{ ++ uint8x16_t u8x16 = vreinterpretq_u8_s32(a); /* xxxx xxxx xxxx DCBA */ ++ uint16x8_t u16x8 = vmovl_u8(vget_low_u8(u8x16)); /* 0x0x 0x0x 0D0C 0B0A */ ++ uint32x4_t u32x4 = vmovl_u16(vget_low_u16(u16x8)); /* 000D 000C 000B 000A */ ++ return vreinterpretq_s32_u32(u32x4); ++} ++ ++// Converts the four signed 16-bit integers in the lower 64 bits to four signed ++// 32-bit integers. ++// https://msdn.microsoft.com/en-us/library/bb514079%28v=vs.100%29.aspx ++FORCE_INLINE __m128i _mm_cvtepi16_epi32(__m128i a) ++{ ++ return vreinterpretq_m128i_s32( ++ vmovl_s16(vget_low_s16(vreinterpretq_s16_m128i(a)))); ++} ++ ++// Converts the four single-precision, floating-point values of a to signed ++// 32-bit integer values. ++// ++// r0 := (int) a0 ++// r1 := (int) a1 ++// r2 := (int) a2 ++// r3 := (int) a3 ++// ++// https://msdn.microsoft.com/en-us/library/vstudio/xdc42k5e(v=vs.100).aspx ++// *NOTE*. The default rounding mode on SSE is 'round to even', which ArmV7-A ++// does not support! It is supported on ARMv8-A however. ++FORCE_INLINE __m128i _mm_cvtps_epi32(__m128 a) ++{ ++#if defined(__aarch64__) ++ return vcvtnq_s32_f32(a); ++#else ++ uint32x4_t signmask = vdupq_n_u32(0x80000000); ++ float32x4_t half = vbslq_f32(signmask, vreinterpretq_f32_m128(a), ++ vdupq_n_f32(0.5f)); /* +/- 0.5 */ ++ int32x4_t r_normal = vcvtq_s32_f32(vaddq_f32( ++ vreinterpretq_f32_m128(a), half)); /* round to integer: [a + 0.5]*/ ++ int32x4_t r_trunc = ++ vcvtq_s32_f32(vreinterpretq_f32_m128(a)); /* truncate to integer: [a] */ ++ int32x4_t plusone = vreinterpretq_s32_u32(vshrq_n_u32( ++ vreinterpretq_u32_s32(vnegq_s32(r_trunc)), 31)); /* 1 or 0 */ ++ int32x4_t r_even = vbicq_s32(vaddq_s32(r_trunc, plusone), ++ vdupq_n_s32(1)); /* ([a] + {0,1}) & ~1 */ ++ float32x4_t delta = vsubq_f32( ++ vreinterpretq_f32_m128(a), ++ vcvtq_f32_s32(r_trunc)); /* compute delta: delta = (a - [a]) */ ++ uint32x4_t is_delta_half = vceqq_f32(delta, half); /* delta == +/- 0.5 */ ++ return vreinterpretq_m128i_s32(vbslq_s32(is_delta_half, r_even, r_normal)); ++#endif ++} ++ ++// Moves the least significant 32 bits of a to a 32-bit integer. ++// https://msdn.microsoft.com/en-us/library/5z7a9642%28v=vs.90%29.aspx ++FORCE_INLINE int _mm_cvtsi128_si32(__m128i a) ++{ ++ return vgetq_lane_s32(vreinterpretq_s32_m128i(a), 0); ++} ++ ++// Extracts the low order 64-bit integer from the parameter. ++// https://msdn.microsoft.com/en-us/library/bb531384(v=vs.120).aspx ++FORCE_INLINE uint64_t _mm_cvtsi128_si64(__m128i a) ++{ ++ return vgetq_lane_s64(vreinterpretq_s64_m128i(a), 0); ++} ++ ++// Moves 32-bit integer a to the least significant 32 bits of an __m128 object, ++// zero extending the upper bits. ++// ++// r0 := a ++// r1 := 0x0 ++// r2 := 0x0 ++// r3 := 0x0 ++// ++// https://msdn.microsoft.com/en-us/library/ct3539ha%28v=vs.90%29.aspx ++FORCE_INLINE __m128i _mm_cvtsi32_si128(int a) ++{ ++ return vreinterpretq_m128i_s32(vsetq_lane_s32(a, vdupq_n_s32(0), 0)); ++} ++ ++// Applies a type cast to reinterpret four 32-bit floating point values passed ++// in as a 128-bit parameter as packed 32-bit integers. ++// https://msdn.microsoft.com/en-us/library/bb514099.aspx ++FORCE_INLINE __m128i _mm_castps_si128(__m128 a) ++{ ++ return vreinterpretq_m128i_s32(vreinterpretq_s32_m128(a)); ++} ++ ++// Applies a type cast to reinterpret four 32-bit integers passed in as a ++// 128-bit parameter as packed 32-bit floating point values. ++// https://msdn.microsoft.com/en-us/library/bb514029.aspx ++FORCE_INLINE __m128 _mm_castsi128_ps(__m128i a) ++{ ++ return vreinterpretq_m128_s32(vreinterpretq_s32_m128i(a)); ++} ++ ++// Loads 128-bit value. : ++// https://msdn.microsoft.com/en-us/library/atzzad1h(v=vs.80).aspx ++FORCE_INLINE __m128i _mm_load_si128(const __m128i *p) ++{ ++ return vreinterpretq_m128i_s32(vld1q_s32((const int32_t *) p)); ++} ++ ++// Loads 128-bit value. : ++// https://msdn.microsoft.com/zh-cn/library/f4k12ae8(v=vs.90).aspx ++FORCE_INLINE __m128i _mm_loadu_si128(const __m128i *p) ++{ ++ return vreinterpretq_m128i_s32(vld1q_s32((const int32_t *) p)); ++} ++ ++// ****************************************** ++// Miscellaneous Operations ++// ****************************************** ++ ++// Packs the 16 signed 16-bit integers from a and b into 8-bit integers and ++// saturates. ++// https://msdn.microsoft.com/en-us/library/k4y4f7w5%28v=vs.90%29.aspx ++FORCE_INLINE __m128i _mm_packs_epi16(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s8( ++ vcombine_s8(vqmovn_s16(vreinterpretq_s16_m128i(a)), ++ vqmovn_s16(vreinterpretq_s16_m128i(b)))); ++} ++ ++// Packs the 16 signed 16 - bit integers from a and b into 8 - bit unsigned ++// integers and saturates. ++// ++// r0 := UnsignedSaturate(a0) ++// r1 := UnsignedSaturate(a1) ++// ... ++// r7 := UnsignedSaturate(a7) ++// r8 := UnsignedSaturate(b0) ++// r9 := UnsignedSaturate(b1) ++// ... ++// r15 := UnsignedSaturate(b7) ++// ++// https://msdn.microsoft.com/en-us/library/07ad1wx4(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_packus_epi16(const __m128i a, const __m128i b) ++{ ++ return vreinterpretq_m128i_u8( ++ vcombine_u8(vqmovun_s16(vreinterpretq_s16_m128i(a)), ++ vqmovun_s16(vreinterpretq_s16_m128i(b)))); ++} ++ ++// Packs the 8 signed 32-bit integers from a and b into signed 16-bit integers ++// and saturates. ++// ++// r0 := SignedSaturate(a0) ++// r1 := SignedSaturate(a1) ++// r2 := SignedSaturate(a2) ++// r3 := SignedSaturate(a3) ++// r4 := SignedSaturate(b0) ++// r5 := SignedSaturate(b1) ++// r6 := SignedSaturate(b2) ++// r7 := SignedSaturate(b3) ++// ++// https://msdn.microsoft.com/en-us/library/393t56f9%28v=vs.90%29.aspx ++FORCE_INLINE __m128i _mm_packs_epi32(__m128i a, __m128i b) ++{ ++ return vreinterpretq_m128i_s16( ++ vcombine_s16(vqmovn_s32(vreinterpretq_s32_m128i(a)), ++ vqmovn_s32(vreinterpretq_s32_m128i(b)))); ++} ++ ++// Interleaves the lower 8 signed or unsigned 8-bit integers in a with the lower ++// 8 signed or unsigned 8-bit integers in b. ++// ++// r0 := a0 ++// r1 := b0 ++// r2 := a1 ++// r3 := b1 ++// ... ++// r14 := a7 ++// r15 := b7 ++// ++// https://msdn.microsoft.com/en-us/library/xf7k860c%28v=vs.90%29.aspx ++FORCE_INLINE __m128i _mm_unpacklo_epi8(__m128i a, __m128i b) ++{ ++ int8x8_t a1 = vreinterpret_s8_s16(vget_low_s16(vreinterpretq_s16_m128i(a))); ++ int8x8_t b1 = vreinterpret_s8_s16(vget_low_s16(vreinterpretq_s16_m128i(b))); ++ int8x8x2_t result = vzip_s8(a1, b1); ++ return vreinterpretq_m128i_s8(vcombine_s8(result.val[0], result.val[1])); ++} ++ ++// Interleaves the lower 4 signed or unsigned 16-bit integers in a with the ++// lower 4 signed or unsigned 16-bit integers in b. ++// ++// r0 := a0 ++// r1 := b0 ++// r2 := a1 ++// r3 := b1 ++// r4 := a2 ++// r5 := b2 ++// r6 := a3 ++// r7 := b3 ++// ++// https://msdn.microsoft.com/en-us/library/btxb17bw%28v=vs.90%29.aspx ++FORCE_INLINE __m128i _mm_unpacklo_epi16(__m128i a, __m128i b) ++{ ++ int16x4_t a1 = vget_low_s16(vreinterpretq_s16_m128i(a)); ++ int16x4_t b1 = vget_low_s16(vreinterpretq_s16_m128i(b)); ++ int16x4x2_t result = vzip_s16(a1, b1); ++ return vreinterpretq_m128i_s16(vcombine_s16(result.val[0], result.val[1])); ++} ++ ++// Interleaves the lower 2 signed or unsigned 32 - bit integers in a with the ++// lower 2 signed or unsigned 32 - bit integers in b. ++// ++// r0 := a0 ++// r1 := b0 ++// r2 := a1 ++// r3 := b1 ++// ++// https://msdn.microsoft.com/en-us/library/x8atst9d(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_unpacklo_epi32(__m128i a, __m128i b) ++{ ++ int32x2_t a1 = vget_low_s32(vreinterpretq_s32_m128i(a)); ++ int32x2_t b1 = vget_low_s32(vreinterpretq_s32_m128i(b)); ++ int32x2x2_t result = vzip_s32(a1, b1); ++ return vreinterpretq_m128i_s32(vcombine_s32(result.val[0], result.val[1])); ++} ++ ++FORCE_INLINE __m128i _mm_unpacklo_epi64(__m128i a, __m128i b) ++{ ++ int64x1_t a_l = vget_low_s64(vreinterpretq_s64_m128i(a)); ++ int64x1_t b_l = vget_low_s64(vreinterpretq_s64_m128i(b)); ++ return vreinterpretq_m128i_s64(vcombine_s64(a_l, b_l)); ++} ++ ++// Selects and interleaves the lower two single-precision, floating-point values ++// from a and b. ++// ++// r0 := a0 ++// r1 := b0 ++// r2 := a1 ++// r3 := b1 ++// ++// https://msdn.microsoft.com/en-us/library/25st103b%28v=vs.90%29.aspx ++FORCE_INLINE __m128 _mm_unpacklo_ps(__m128 a, __m128 b) ++{ ++ float32x2_t a1 = vget_low_f32(vreinterpretq_f32_m128(a)); ++ float32x2_t b1 = vget_low_f32(vreinterpretq_f32_m128(b)); ++ float32x2x2_t result = vzip_f32(a1, b1); ++ return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1])); ++} ++ ++// Selects and interleaves the upper two single-precision, floating-point values ++// from a and b. ++// ++// r0 := a2 ++// r1 := b2 ++// r2 := a3 ++// r3 := b3 ++// ++// https://msdn.microsoft.com/en-us/library/skccxx7d%28v=vs.90%29.aspx ++FORCE_INLINE __m128 _mm_unpackhi_ps(__m128 a, __m128 b) ++{ ++ float32x2_t a1 = vget_high_f32(vreinterpretq_f32_m128(a)); ++ float32x2_t b1 = vget_high_f32(vreinterpretq_f32_m128(b)); ++ float32x2x2_t result = vzip_f32(a1, b1); ++ return vreinterpretq_m128_f32(vcombine_f32(result.val[0], result.val[1])); ++} ++ ++// Interleaves the upper 8 signed or unsigned 8-bit integers in a with the upper ++// 8 signed or unsigned 8-bit integers in b. ++// ++// r0 := a8 ++// r1 := b8 ++// r2 := a9 ++// r3 := b9 ++// ... ++// r14 := a15 ++// r15 := b15 ++// ++// https://msdn.microsoft.com/en-us/library/t5h7783k(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_unpackhi_epi8(__m128i a, __m128i b) ++{ ++ int8x8_t a1 = ++ vreinterpret_s8_s16(vget_high_s16(vreinterpretq_s16_m128i(a))); ++ int8x8_t b1 = ++ vreinterpret_s8_s16(vget_high_s16(vreinterpretq_s16_m128i(b))); ++ int8x8x2_t result = vzip_s8(a1, b1); ++ return vreinterpretq_m128i_s8(vcombine_s8(result.val[0], result.val[1])); ++} ++ ++// Interleaves the upper 4 signed or unsigned 16-bit integers in a with the ++// upper 4 signed or unsigned 16-bit integers in b. ++// ++// r0 := a4 ++// r1 := b4 ++// r2 := a5 ++// r3 := b5 ++// r4 := a6 ++// r5 := b6 ++// r6 := a7 ++// r7 := b7 ++// ++// https://msdn.microsoft.com/en-us/library/03196cz7(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_unpackhi_epi16(__m128i a, __m128i b) ++{ ++ int16x4_t a1 = vget_high_s16(vreinterpretq_s16_m128i(a)); ++ int16x4_t b1 = vget_high_s16(vreinterpretq_s16_m128i(b)); ++ int16x4x2_t result = vzip_s16(a1, b1); ++ return vreinterpretq_m128i_s16(vcombine_s16(result.val[0], result.val[1])); ++} ++ ++// Interleaves the upper 2 signed or unsigned 32-bit integers in a with the ++// upper 2 signed or unsigned 32-bit integers in b. ++// https://msdn.microsoft.com/en-us/library/65sa7cbs(v=vs.100).aspx ++FORCE_INLINE __m128i _mm_unpackhi_epi32(__m128i a, __m128i b) ++{ ++ int32x2_t a1 = vget_high_s32(vreinterpretq_s32_m128i(a)); ++ int32x2_t b1 = vget_high_s32(vreinterpretq_s32_m128i(b)); ++ int32x2x2_t result = vzip_s32(a1, b1); ++ return vreinterpretq_m128i_s32(vcombine_s32(result.val[0], result.val[1])); ++} ++ ++// Interleaves the upper signed or unsigned 64-bit integer in a with the ++// upper signed or unsigned 64-bit integer in b. ++// ++// r0 := a1 ++// r1 := b1 ++FORCE_INLINE __m128i _mm_unpackhi_epi64(__m128i a, __m128i b) ++{ ++ int64x1_t a_h = vget_high_s64(vreinterpretq_s64_m128i(a)); ++ int64x1_t b_h = vget_high_s64(vreinterpretq_s64_m128i(b)); ++ return vreinterpretq_m128i_s64(vcombine_s64(a_h, b_h)); ++} ++ ++// shift to right ++// https://msdn.microsoft.com/en-us/library/bb514041(v=vs.120).aspx ++// http://blog.csdn.net/hemmingway/article/details/44828303 ++FORCE_INLINE __m128i _mm_alignr_epi8(__m128i a, __m128i b, const int c) ++{ ++ return (__m128i) vextq_s8((int8x16_t) a, (int8x16_t) b, c); ++} ++ ++// Extracts the selected signed or unsigned 16-bit integer from a and zero ++// extends. https://msdn.microsoft.com/en-us/library/6dceta0c(v=vs.100).aspx ++// FORCE_INLINE int _mm_extract_epi16(__m128i a, __constrange(0,8) int imm) ++#define _mm_extract_epi16(a, imm) \ ++ ({ (vgetq_lane_s16(vreinterpretq_s16_m128i(a), (imm)) & 0x0000ffffUL); }) ++ ++// Inserts the least significant 16 bits of b into the selected 16-bit integer ++// of a. https://msdn.microsoft.com/en-us/library/kaze8hz1%28v=vs.100%29.aspx ++// FORCE_INLINE __m128i _mm_insert_epi16(__m128i a, const int b, ++// __constrange(0,8) int imm) ++#define _mm_insert_epi16(a, b, imm) \ ++ ({ \ ++ vreinterpretq_m128i_s16( \ ++ vsetq_lane_s16((b), vreinterpretq_s16_m128i(a), (imm))); \ ++ }) ++ ++// ****************************************** ++// Crypto Extensions ++// ****************************************** ++#if !defined(__ARM_FEATURE_CRYPTO) && defined(__aarch64__) ++// In the absence of crypto extensions, implement aesenc using regular neon ++// intrinsics instead. See: ++// http://www.workofard.com/2017/01/accelerated-aes-for-the-arm64-linux-kernel/ ++// http://www.workofard.com/2017/07/ghash-for-low-end-cores/ and ++// https://github.com/ColinIanKing/linux-next-mirror/blob/b5f466091e130caaf0735976648f72bd5e09aa84/crypto/aegis128-neon-inner.c#L52 ++// for more information Reproduced with permission of the author. ++FORCE_INLINE __m128i _mm_aesenc_si128(__m128i EncBlock, __m128i RoundKey) ++{ ++ static const uint8_t crypto_aes_sbox[256] = { ++ 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, ++ 0xfe, 0xd7, 0xab, 0x76, 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, ++ 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0, 0xb7, 0xfd, 0x93, 0x26, ++ 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15, ++ 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, ++ 0xeb, 0x27, 0xb2, 0x75, 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, ++ 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84, 0x53, 0xd1, 0x00, 0xed, ++ 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf, ++ 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, ++ 0x50, 0x3c, 0x9f, 0xa8, 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, ++ 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2, 0xcd, 0x0c, 0x13, 0xec, ++ 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73, ++ 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, ++ 0xde, 0x5e, 0x0b, 0xdb, 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, ++ 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79, 0xe7, 0xc8, 0x37, 0x6d, ++ 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08, ++ 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, ++ 0x4b, 0xbd, 0x8b, 0x8a, 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, ++ 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e, 0xe1, 0xf8, 0x98, 0x11, ++ 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf, ++ 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, ++ 0xb0, 0x54, 0xbb, 0x16}; ++ static const uint8_t shift_rows[] = {0x0, 0x5, 0xa, 0xf, 0x4, 0x9, ++ 0xe, 0x3, 0x8, 0xd, 0x2, 0x7, ++ 0xc, 0x1, 0x6, 0xb}; ++ static const uint8_t ror32by8[] = {0x1, 0x2, 0x3, 0x0, 0x5, 0x6, 0x7, 0x4, ++ 0x9, 0xa, 0xb, 0x8, 0xd, 0xe, 0xf, 0xc}; ++ ++ uint8x16_t v; ++ uint8x16_t w = vreinterpretq_u8_m128i(EncBlock); ++ ++ // shift rows ++ w = vqtbl1q_u8(w, vld1q_u8(shift_rows)); ++ ++ // sub bytes ++ v = vqtbl4q_u8(vld1q_u8_x4(crypto_aes_sbox), w); ++ v = vqtbx4q_u8(v, vld1q_u8_x4(crypto_aes_sbox + 0x40), w - 0x40); ++ v = vqtbx4q_u8(v, vld1q_u8_x4(crypto_aes_sbox + 0x80), w - 0x80); ++ v = vqtbx4q_u8(v, vld1q_u8_x4(crypto_aes_sbox + 0xc0), w - 0xc0); ++ ++ // mix columns ++ w = (v << 1) ^ (uint8x16_t)(((int8x16_t) v >> 7) & 0x1b); ++ w ^= (uint8x16_t) vrev32q_u16((uint16x8_t) v); ++ w ^= vqtbl1q_u8(v ^ w, vld1q_u8(ror32by8)); ++ ++ // add round key ++ return vreinterpretq_m128i_u8(w) ^ RoundKey; ++} ++#elif defined(__ARM_FEATURE_CRYPTO) ++// Implements equivalent of 'aesenc' by combining AESE (with an empty key) and ++// AESMC and then manually applying the real key as an xor operation This ++// unfortunately means an additional xor op; the compiler should be able to ++// optimise this away for repeated calls however See ++// https://blog.michaelbrase.com/2018/05/08/emulating-x86-aes-intrinsics-on-armv8-a ++// for more details. ++inline __m128i _mm_aesenc_si128(__m128i a, __m128i b) ++{ ++ return vreinterpretq_s32_u8( ++ vaesmcq_u8(vaeseq_u8(vreinterpretq_u8_s32(a), uint8x16_t{})) ^ ++ vreinterpretq_u8_s32(b)); ++} ++#endif ++ ++// ****************************************** ++// Streaming Extensions ++// ****************************************** ++ ++// Guarantees that every preceding store is globally visible before any ++// subsequent store. ++// https://msdn.microsoft.com/en-us/library/5h2w73d1%28v=vs.90%29.aspx ++FORCE_INLINE void _mm_sfence(void) ++{ ++ __sync_synchronize(); ++} ++ ++// Stores the data in a to the address p without polluting the caches. If the ++// cache line containing address p is already in the cache, the cache will be ++// updated.Address p must be 16 - byte aligned. ++// https://msdn.microsoft.com/en-us/library/ba08y07y%28v=vs.90%29.aspx ++FORCE_INLINE void _mm_stream_si128(__m128i *p, __m128i a) ++{ ++ vst1q_s32((int32_t *) p, a); ++} ++ ++// Cache line containing p is flushed and invalidated from all caches in the ++// coherency domain. : ++// https://msdn.microsoft.com/en-us/library/ba08y07y(v=vs.100).aspx ++FORCE_INLINE void _mm_clflush(void const *p) ++{ ++ // no corollary for Neon? ++} ++ ++#if defined(__GNUC__) || defined(__clang__) ++#pragma pop_macro("ALIGN_STRUCT") ++#pragma pop_macro("FORCE_INLINE") ++#endif ++ ++#endif +diff --git a/vendor/spoa/src/simd_alignment_engine.cpp b/vendor/spoa/src/simd_alignment_engine.cpp +index 3e5815a..2fd10d7 100644 +--- a/vendor/spoa/src/simd_alignment_engine.cpp ++++ b/vendor/spoa/src/simd_alignment_engine.cpp +@@ -7,9 +7,12 @@ + #include + #include + ++#include "arch/aarch64/sse2neon.h" ++/* + extern "C" { + #include // AVX2 and lower + } ++*/ + + #include "spoa/graph.hpp" + #include "simd_alignment_engine.hpp" diff --git a/var/spack/repos/builtin/packages/racon/package.py b/var/spack/repos/builtin/packages/racon/package.py index 35f5eabf31c..1e661b4bd7f 100644 --- a/var/spack/repos/builtin/packages/racon/package.py +++ b/var/spack/repos/builtin/packages/racon/package.py @@ -29,10 +29,8 @@ class Racon(CMakePackage): conflicts('%gcc@:4.7') conflicts('%clang@:3.1') + patch('aarch64.patch', when='target=aarch64:') + def cmake_args(self): args = ['-Dracon_build_wrapper=ON'] return args - - def install(self, spec, prefix): - install_tree('spack-build/bin', prefix.bin) - install_tree('spack-build/lib', prefix.lib) From e7106563109276f0dbb472ae88fc14575a001db2 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Tue, 28 Jan 2020 15:13:51 -0600 Subject: [PATCH 023/178] Set netcdf-fortran to build serially with Intel compiler (#14461) * Set netcdf-fortran to build serially with Intel compiler This PR turns off parallel builds when the Intel compiler is used. Builds with the Intel compiler will fail otherwise. * Change how parallel build is handled Use patch from netcdf-fortran project to turn off parallel buildi for version 4.5.2. --- .../packages/netcdf-fortran/no_parallel_build.patch | 12 ++++++++++++ .../repos/builtin/packages/netcdf-fortran/package.py | 4 ++++ 2 files changed, 16 insertions(+) create mode 100644 var/spack/repos/builtin/packages/netcdf-fortran/no_parallel_build.patch diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/no_parallel_build.patch b/var/spack/repos/builtin/packages/netcdf-fortran/no_parallel_build.patch new file mode 100644 index 00000000000..c232a62bc51 --- /dev/null +++ b/var/spack/repos/builtin/packages/netcdf-fortran/no_parallel_build.patch @@ -0,0 +1,12 @@ +--- a/fortran/Makefile.in 2019-09-18 12:29:45.000000000 -0500 ++++ b/fortran/Makefile.in 2020-01-24 10:56:03.660035265 -0600 +@@ -1095,6 +1095,9 @@ + @USE_LOGGING_TRUE@ echo ' integer nf_set_log_level' >> netcdf.inc + @USE_LOGGING_TRUE@ echo ' external nf_set_log_level' >> netcdf.inc + ++# Turn off parallel builds in this directory. ++.NOTPARALLEL: ++ + # Tell versions [3.59,3.63) of GNU make to not export all variables. + # Otherwise a system limit (for SysV at least) may be exceeded. + .NOEXPORT: diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/package.py b/var/spack/repos/builtin/packages/netcdf-fortran/package.py index 891ad5b81a3..e3703fd6097 100644 --- a/var/spack/repos/builtin/packages/netcdf-fortran/package.py +++ b/var/spack/repos/builtin/packages/netcdf-fortran/package.py @@ -36,6 +36,10 @@ class NetcdfFortran(AutotoolsPackage): # https://github.com/Unidata/netcdf-fortran/issues/94 patch('nag.patch', when='@:4.4.4%nag') + # Parallel builds do not work in the fortran directory. This patch is + # derived from https://github.com/Unidata/netcdf-fortran/pull/211 + patch('no_parallel_build.patch', when='@4.5.2') + def flag_handler(self, name, flags): if name in ['cflags', 'fflags'] and '+pic' in self.spec: flags.append(self.compiler.pic_flag) From 69feea280d5504b0ff72a0e309ed7cfcb7fb43a4 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Tue, 28 Jan 2020 17:26:26 -0800 Subject: [PATCH 024/178] env: synchronize updates to environments (#14621) Updates to environments were not multi-process safe, which prevented them from taking advantage of parallel builds as implemented in #13100. This is a minimal set of changes to enable `spack install` in an environment to be parallelized: - [x] add an internal lock, stored in the `.spack-env` directory, to synchronize updates to `spack.yaml` and `spack.lock` - [x] add `Environment.write_transaction` interface for this lock - [x] makes use of `Environment.write_transaction` in `install`, `add`, and `remove` commands - `uninstall` is not synchronized yet; that is left for a future PR. --- lib/spack/spack/cmd/add.py | 15 +-- lib/spack/spack/cmd/concretize.py | 7 +- lib/spack/spack/cmd/install.py | 25 ++-- lib/spack/spack/cmd/remove.py | 15 +-- lib/spack/spack/environment.py | 182 +++++++++++++++++++----------- 5 files changed, 152 insertions(+), 92 deletions(-) diff --git a/lib/spack/spack/cmd/add.py b/lib/spack/spack/cmd/add.py index e08c2c5aacc..94c8620dbb6 100644 --- a/lib/spack/spack/cmd/add.py +++ b/lib/spack/spack/cmd/add.py @@ -25,10 +25,11 @@ def setup_parser(subparser): def add(parser, args): env = ev.get_env(args, 'add', required=True) - for spec in spack.cmd.parse_specs(args.specs): - if not env.add(spec, args.list_name): - tty.msg("Package {0} was already added to {1}" - .format(spec.name, env.name)) - else: - tty.msg('Adding %s to environment %s' % (spec, env.name)) - env.write() + with env.write_transaction(): + for spec in spack.cmd.parse_specs(args.specs): + if not env.add(spec, args.list_name): + tty.msg("Package {0} was already added to {1}" + .format(spec.name, env.name)) + else: + tty.msg('Adding %s to environment %s' % (spec, env.name)) + env.write() diff --git a/lib/spack/spack/cmd/concretize.py b/lib/spack/spack/cmd/concretize.py index a7080424210..d28f7b4a5d9 100644 --- a/lib/spack/spack/cmd/concretize.py +++ b/lib/spack/spack/cmd/concretize.py @@ -18,6 +18,7 @@ def setup_parser(subparser): def concretize(parser, args): env = ev.get_env(args, 'concretize', required=True) - concretized_specs = env.concretize(force=args.force) - ev.display_specs(concretized_specs) - env.write() + with env.write_transaction(): + concretized_specs = env.concretize(force=args.force) + ev.display_specs(concretized_specs) + env.write() diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 18dad6108bf..8f1eab0eb32 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -223,8 +223,13 @@ def install_spec(cli_args, kwargs, abstract_spec, spec): # handle active environment, if any env = ev.get_env(cli_args, 'install') if env: - env.install(abstract_spec, spec, **kwargs) - env.write() + with env.write_transaction(): + concrete = env.concretize_and_add( + abstract_spec, spec) + env.write(regenerate_views=False) + env._install(concrete, **kwargs) + with env.write_transaction(): + env.regenerate_views() else: spec.package.do_install(**kwargs) @@ -259,16 +264,20 @@ def install(parser, args, **kwargs): env = ev.get_env(args, 'install') if env: if not args.only_concrete: - concretized_specs = env.concretize() - ev.display_specs(concretized_specs) + with env.write_transaction(): + concretized_specs = env.concretize() + ev.display_specs(concretized_specs) - # save view regeneration for later, so that we only do it - # once, as it can be slow. - env.write(regenerate_views=False) + # save view regeneration for later, so that we only do it + # once, as it can be slow. + env.write(regenerate_views=False) tty.msg("Installing environment %s" % env.name) env.install_all(args) - env.regenerate_views() + with env.write_transaction(): + # It is not strictly required to synchronize view regeneration + # but doing so can prevent redundant work in the filesystem. + env.regenerate_views() return else: tty.die("install requires a package argument or a spack.yaml file") diff --git a/lib/spack/spack/cmd/remove.py b/lib/spack/spack/cmd/remove.py index 049041ce835..ef01052c29f 100644 --- a/lib/spack/spack/cmd/remove.py +++ b/lib/spack/spack/cmd/remove.py @@ -31,10 +31,11 @@ def setup_parser(subparser): def remove(parser, args): env = ev.get_env(args, 'remove', required=True) - if args.all: - env.clear() - else: - for spec in spack.cmd.parse_specs(args.specs): - tty.msg('Removing %s from environment %s' % (spec, env.name)) - env.remove(spec, args.list_name, force=args.force) - env.write() + with env.write_transaction(): + if args.all: + env.clear() + else: + for spec in spack.cmd.parse_specs(args.specs): + tty.msg('Removing %s from environment %s' % (spec, env.name)) + env.remove(spec, args.list_name, force=args.force) + env.write() diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 351120b127a..87276eacbc8 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -36,6 +36,7 @@ from spack.spec import Spec from spack.spec_list import SpecList, InvalidSpecConstraintError from spack.variant import UnknownVariantError +import spack.util.lock as lk #: environment variable used to indicate the active environment spack_env_var = 'SPACK_ENV' @@ -557,6 +558,9 @@ def __init__(self, path, init_file=None, with_view=None): path to the view. """ self.path = os.path.abspath(path) + + self.txlock = lk.Lock(self._transaction_lock_path) + # This attribute will be set properly from configuration # during concretization self.concretization = None @@ -571,26 +575,7 @@ def __init__(self, path, init_file=None, with_view=None): else: self._read_manifest(f, raw_yaml=default_manifest_yaml) else: - default_manifest = not os.path.exists(self.manifest_path) - if default_manifest: - # No manifest, use default yaml - self._read_manifest(default_manifest_yaml) - else: - with open(self.manifest_path) as f: - self._read_manifest(f) - - if os.path.exists(self.lock_path): - with open(self.lock_path) as f: - read_lock_version = self._read_lockfile(f) - if default_manifest: - # No manifest, set user specs from lockfile - self._set_user_specs_from_lockfile() - - if read_lock_version == 1: - tty.debug( - "Storing backup of old lockfile {0} at {1}".format( - self.lock_path, self._lock_backup_v1_path)) - shutil.copy(self.lock_path, self._lock_backup_v1_path) + self._read() if with_view is False: self.views = {} @@ -602,6 +587,42 @@ def __init__(self, path, init_file=None, with_view=None): # If with_view is None, then defer to the view settings determined by # the manifest file + def _re_read(self): + """Reinitialize the environment object if it has been written (this + may not be true if the environment was just created in this running + instance of Spack).""" + if not os.path.exists(self.manifest_path): + return + + self.clear() + self._read() + + def _read(self): + default_manifest = not os.path.exists(self.manifest_path) + if default_manifest: + # No manifest, use default yaml + self._read_manifest(default_manifest_yaml) + else: + with open(self.manifest_path) as f: + self._read_manifest(f) + + if os.path.exists(self.lock_path): + with open(self.lock_path) as f: + read_lock_version = self._read_lockfile(f) + if default_manifest: + # No manifest, set user specs from lockfile + self._set_user_specs_from_lockfile() + + if read_lock_version == 1: + tty.debug( + "Storing backup of old lockfile {0} at {1}".format( + self.lock_path, self._lock_backup_v1_path)) + shutil.copy(self.lock_path, self._lock_backup_v1_path) + + def write_transaction(self): + """Get a write lock context manager for use in a `with` block.""" + return lk.WriteTransaction(self.txlock, acquire=self._re_read) + def _read_manifest(self, f, raw_yaml=None): """Read manifest file and set up user specs.""" if raw_yaml: @@ -694,6 +715,13 @@ def manifest_path(self): """Path to spack.yaml file in this environment.""" return os.path.join(self.path, manifest_name) + @property + def _transaction_lock_path(self): + """The location of the lock file used to synchronize multiple + processes updating the same environment. + """ + return os.path.join(self.path, 'transaction_lock') + @property def lock_path(self): """Path to spack.lock file in this environment.""" @@ -986,11 +1014,18 @@ def _concretize_separately(self): concretized_specs.append((uspec, concrete)) return concretized_specs - def install(self, user_spec, concrete_spec=None, **install_args): - """Install a single spec into an environment. + def concretize_and_add(self, user_spec, concrete_spec=None): + """Concretize and add a single spec to the environment. - This will automatically concretize the single spec, but it won't - affect other as-yet unconcretized specs. + Concretize the provided ``user_spec`` and add it along with the + concretized result to the environment. If the given ``user_spec`` was + already present in the environment, this does not add a duplicate. + The concretized spec will be added unless the ``user_spec`` was + already present and an associated concrete spec was already present. + + Args: + concrete_spec: if provided, then it is assumed that it is the + result of concretizing the provided ``user_spec`` """ if self.concretization == 'together': msg = 'cannot install a single spec in an environment that is ' \ @@ -1001,37 +1036,21 @@ def install(self, user_spec, concrete_spec=None, **install_args): spec = Spec(user_spec) - with spack.store.db.read_transaction(): - if self.add(spec): - concrete = concrete_spec or spec.concretized() + if self.add(spec): + concrete = concrete_spec or spec.concretized() + self._add_concrete_spec(spec, concrete) + else: + # spec might be in the user_specs, but not installed. + # TODO: Redo name-based comparison for old style envs + spec = next( + s for s in self.user_specs if s.satisfies(user_spec) + ) + concrete = self.specs_by_hash.get(spec.build_hash()) + if not concrete: + concrete = spec.concretized() self._add_concrete_spec(spec, concrete) - else: - # spec might be in the user_specs, but not installed. - # TODO: Redo name-based comparison for old style envs - spec = next( - s for s in self.user_specs if s.satisfies(user_spec) - ) - concrete = self.specs_by_hash.get(spec.build_hash()) - if not concrete: - concrete = spec.concretized() - self._add_concrete_spec(spec, concrete) - self._install(concrete, **install_args) - - def _install(self, spec, **install_args): - spec.package.do_install(**install_args) - - # Make sure log directory exists - log_path = self.log_path - fs.mkdirp(log_path) - - with fs.working_dir(self.path): - # Link the resulting log file into logs dir - build_log_link = os.path.join( - log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7))) - if os.path.lexists(build_log_link): - os.remove(build_log_link) - os.symlink(spec.package.build_log_path, build_log_link) + return concrete @property def default_view(self): @@ -1131,6 +1150,33 @@ def _add_concrete_spec(self, spec, concrete, new=True): self.concretized_order.append(h) self.specs_by_hash[h] = concrete + def install(self, user_spec, concrete_spec=None, **install_args): + """Install a single spec into an environment. + + This will automatically concretize the single spec, but it won't + affect other as-yet unconcretized specs. + """ + concrete = self.concretize_and_add(user_spec, concrete_spec) + + self._install(concrete, **install_args) + + def _install(self, spec, **install_args): + # "spec" must be concrete + spec.package.do_install(**install_args) + + if not spec.external: + # Make sure log directory exists + log_path = self.log_path + fs.mkdirp(log_path) + + with fs.working_dir(self.path): + # Link the resulting log file into logs dir + build_log_link = os.path.join( + log_path, '%s-%s.log' % (spec.name, spec.dag_hash(7))) + if os.path.lexists(build_log_link): + os.remove(build_log_link) + os.symlink(spec.package.build_log_path, build_log_link) + def install_all(self, args=None): """Install all concretized specs in an environment. @@ -1138,25 +1184,27 @@ def install_all(self, args=None): that needs to be done separately with a call to write(). """ + + # If "spack install" is invoked repeatedly for a large environment + # where all specs are already installed, the operation can take + # a large amount of time due to repeatedly acquiring and releasing + # locks, this does an initial check across all specs within a single + # DB read transaction to reduce time spent in this case. + uninstalled_specs = [] with spack.store.db.read_transaction(): for concretized_hash in self.concretized_order: spec = self.specs_by_hash[concretized_hash] + if not spec.package.installed: + uninstalled_specs.append(spec) - # Parse cli arguments and construct a dictionary - # that will be passed to Package.do_install API - kwargs = dict() - if args: - spack.cmd.install.update_kwargs_from_args(args, kwargs) + for spec in uninstalled_specs: + # Parse cli arguments and construct a dictionary + # that will be passed to Package.do_install API + kwargs = dict() + if args: + spack.cmd.install.update_kwargs_from_args(args, kwargs) - self._install(spec, **kwargs) - - if not spec.external: - # Link the resulting log file into logs dir - log_name = '%s-%s' % (spec.name, spec.dag_hash(7)) - build_log_link = os.path.join(self.log_path, log_name) - if os.path.lexists(build_log_link): - os.remove(build_log_link) - os.symlink(spec.package.build_log_path, build_log_link) + self._install(spec, **kwargs) def all_specs_by_hash(self): """Map of hashes to spec for all specs in this environment.""" From 9d7e482497e3ee3b4aeb6c495a27cd88d8b2a64c Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Tue, 28 Jan 2020 22:01:25 -0500 Subject: [PATCH 025/178] git: add version 2.25.0 and fixup pcre dependency (#14649) * git: add version 2.25.0 and fixup pcre dependency pcre2 became optional in 2.14 and the default in 2.18. I noticed this as git was compiling against the system pcre2 (spack was specifying pcre as the dependency). * missed a chunk from my internal repo --- var/spack/repos/builtin/packages/git/package.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py index a7f97a4601f..c77c1fa916d 100644 --- a/var/spack/repos/builtin/packages/git/package.py +++ b/var/spack/repos/builtin/packages/git/package.py @@ -24,6 +24,11 @@ class Git(AutotoolsPackage): # You can find the source here: https://mirrors.edge.kernel.org/pub/software/scm/git/sha256sums.asc releases = [ + { + 'version': '2.25.0', + 'sha256': 'a98c9b96d91544b130f13bf846ff080dda2867e77fe08700b793ab14ba5346f6', + 'sha256_manpages': '22b2380842ef75e9006c0358de250ead449e1376d7e5138070b9a3073ef61d44' + }, { 'version': '2.21.0', 'sha256': '85eca51c7404da75e353eba587f87fea9481ba41e162206a6f70ad8118147bee', @@ -175,7 +180,7 @@ class Git(AutotoolsPackage): depends_on('libiconv') depends_on('openssl') depends_on('pcre', when='@:2.13') - depends_on('pcre+jit', when='@2.14:') + depends_on('pcre2', when='@2.14:') depends_on('perl') depends_on('zlib') @@ -216,12 +221,17 @@ def configure_args(self): '--with-curl={0}'.format(spec['curl'].prefix), '--with-expat={0}'.format(spec['expat'].prefix), '--with-iconv={0}'.format(spec['libiconv'].prefix), - '--with-libpcre={0}'.format(spec['pcre'].prefix), '--with-openssl={0}'.format(spec['openssl'].prefix), '--with-perl={0}'.format(spec['perl'].command.path), '--with-zlib={0}'.format(spec['zlib'].prefix), ] + if '^pcre' in self.spec: + configure_args.append('--with-libpcre={0}'.format( + spec['pcre'].prefix)) + if '^pcre2' in self.spec: + configure_args.append('--with-libpcre2={0}'.format( + spec['pcre2'].prefix)) if '+tcltk' in self.spec: configure_args.append('--with-tcltk={0}'.format( self.spec['tk'].prefix.bin.wish)) From f7ec09d30bd835b8318b48365edece35d08b4114 Mon Sep 17 00:00:00 2001 From: t-karatsu <49965247+t-karatsu@users.noreply.github.com> Date: Wed, 29 Jan 2020 12:02:40 +0900 Subject: [PATCH 026/178] Fujitsu compiler: Defining option that is always added. (#14657) --- lib/spack/spack/compilers/fj.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/spack/spack/compilers/fj.py b/lib/spack/spack/compilers/fj.py index 1f7d52f0261..083fe35b0d0 100644 --- a/lib/spack/spack/compilers/fj.py +++ b/lib/spack/spack/compilers/fj.py @@ -61,3 +61,7 @@ def c11_flag(self): @property def pic_flag(self): return "-KPIC" + + def setup_custom_environment(self, pkg, env): + env.append_flags('fcc_ENV', '-Nclang') + env.append_flags('FCC_ENV', '-Nclang') From 60ed6d2012b6a540f55cc5b20ab0dd7ef043a971 Mon Sep 17 00:00:00 2001 From: Tamara Dahlgren <35777542+tldahlgren@users.noreply.github.com> Date: Tue, 28 Jan 2020 22:57:26 -0800 Subject: [PATCH 027/178] bugfix: correct exception message matching in tests (#14655) This commit makes two fundamental corrections to tests: 1) Changes 'matches' to the correct 'match' argument for 'pytest.raises' (for all affected tests except those checking for 'SystemExit'); 2) Replaces the 'match' argument for tests expecting 'SystemExit' (since the exit code is retained instead) with 'capsys' error message capture. Both changes are needed to ensure the associated exception message is actually checked. --- lib/spack/spack/test/cmd/create.py | 16 +++++++++++----- lib/spack/spack/test/git_fetch.py | 2 +- lib/spack/spack/test/install.py | 10 +++++----- lib/spack/spack/test/llnl/util/cpu.py | 4 ++-- lib/spack/spack/test/llnl/util/filesystem.py | 4 ++-- lib/spack/spack/test/stage.py | 7 +++++-- 6 files changed, 26 insertions(+), 17 deletions(-) diff --git a/lib/spack/spack/test/cmd/create.py b/lib/spack/spack/test/cmd/create.py index a973e4ee5b3..42627443177 100644 --- a/lib/spack/spack/test/cmd/create.py +++ b/lib/spack/spack/test/cmd/create.py @@ -95,12 +95,16 @@ def test_create_template(parser, mock_test_repo, args, name, expected): (' ', 'name must be provided'), ('bad#name', 'name can only contain'), ]) -def test_create_template_bad_name(parser, mock_test_repo, name, expected): +def test_create_template_bad_name( + parser, mock_test_repo, name, expected, capsys): """Test template creation with bad name options.""" constr_args = parser.parse_args(['--skip-editor', '-n', name]) - with pytest.raises(SystemExit, matches=expected): + with pytest.raises(SystemExit): spack.cmd.create.create(parser, constr_args) + captured = capsys.readouterr() + assert expected in str(captured) + def test_build_system_guesser_no_stage(parser): """Test build system guesser when stage not provided.""" @@ -108,7 +112,7 @@ def test_build_system_guesser_no_stage(parser): # Ensure get the expected build system with pytest.raises(AttributeError, - matches="'NoneType' object has no attribute"): + match="'NoneType' object has no attribute"): guesser(None, '/the/url/does/not/matter') @@ -142,7 +146,7 @@ def test_get_name_urls(parser, url, expected): assert name == expected -def test_get_name_error(parser, monkeypatch): +def test_get_name_error(parser, monkeypatch, capsys): """Test get_name UndetectableNameError exception path.""" def _parse_name_offset(path, v): raise UndetectableNameError(path) @@ -152,5 +156,7 @@ def _parse_name_offset(path, v): url = 'downloads.sourceforge.net/noapp/' args = parser.parse_args([url]) - with pytest.raises(SystemExit, matches="Couldn't guess a name"): + with pytest.raises(SystemExit): spack.cmd.create.get_name(args) + captured = capsys.readouterr() + assert "Couldn't guess a name" in str(captured) diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index abdeadcdb2f..57474e56b7c 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -169,7 +169,7 @@ def test_git_extra_fetch(tmpdir): def test_needs_stage(): """Trigger a NoStageError when attempt a fetch without a stage.""" with pytest.raises(spack.fetch_strategy.NoStageError, - matches=_mock_transport_error): + match=r"set_stage.*before calling fetch"): fetcher = GitFetchStrategy(git='file:///not-a-real-git-repo') fetcher.fetch() diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py index 8877e785d3e..914ae527a07 100644 --- a/lib/spack/spack/test/install.py +++ b/lib/spack/spack/test/install.py @@ -316,14 +316,14 @@ def test_uninstall_by_spec_errors(mutable_database): """Test exceptional cases with the uninstall command.""" # Try to uninstall a spec that has not been installed - rec = mutable_database.get_record('zmpi') - with pytest.raises(InstallError, matches="not installed"): - PackageBase.uninstall_by_spec(rec.spec) + spec = Spec('dependent-install') + spec.concretize() + with pytest.raises(InstallError, match="is not installed"): + PackageBase.uninstall_by_spec(spec) # Try an unforced uninstall of a spec with dependencies rec = mutable_database.get_record('mpich') - - with pytest.raises(PackageStillNeededError, matches="cannot uninstall"): + with pytest.raises(PackageStillNeededError, match="Cannot uninstall"): PackageBase.uninstall_by_spec(rec.spec) diff --git a/lib/spack/spack/test/llnl/util/cpu.py b/lib/spack/spack/test/llnl/util/cpu.py index 300ee3c284d..319d9e684e8 100644 --- a/lib/spack/spack/test/llnl/util/cpu.py +++ b/lib/spack/spack/test/llnl/util/cpu.py @@ -245,7 +245,7 @@ def test_unsupported_optimization_flags(target_name, compiler, version): target = llnl.util.cpu.targets[target_name] with pytest.raises( llnl.util.cpu.UnsupportedMicroarchitecture, - matches='cannot produce optimized binary' + match='cannot produce optimized binary' ): target.optimization_flags(compiler, version) @@ -287,5 +287,5 @@ def test_invalid_family(): vendor='Imagination', features=[], compilers={}, generation=0 ) with pytest.raises(AssertionError, - matches='a target is expected to belong'): + match='a target is expected to belong'): multi_parents.family diff --git a/lib/spack/spack/test/llnl/util/filesystem.py b/lib/spack/spack/test/llnl/util/filesystem.py index d0aca223d6f..b48abb4fc60 100644 --- a/lib/spack/spack/test/llnl/util/filesystem.py +++ b/lib/spack/spack/test/llnl/util/filesystem.py @@ -116,13 +116,13 @@ def test_parent_dir(self, stage): # Make sure we get the right error if we try to copy a parent into # a descendent directory. - with pytest.raises(ValueError, matches="Cannot copy"): + with pytest.raises(ValueError, match="Cannot copy"): with fs.working_dir(str(stage)): fs.copy_tree('source', 'source/sub/directory') # Only point with this check is to make sure we don't try to perform # the copy. - with pytest.raises(IOError, matches="No such file or directory"): + with pytest.raises(IOError, match="No such file or directory"): with fs.working_dir(str(stage)): fs.copy_tree('foo/ba', 'foo/bar') diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py index 1e77652957e..9cf50b8da2a 100644 --- a/lib/spack/spack/test/stage.py +++ b/lib/spack/spack/test/stage.py @@ -927,8 +927,11 @@ def test_stage_create_replace_path(tmp_build_stage_dir): assert os.path.isdir(stage.path) -def test_cannot_access(): +def test_cannot_access(capsys): """Ensure can_access dies with the expected error.""" - with pytest.raises(SystemExit, matches='Insufficient permissions'): + with pytest.raises(SystemExit): # It's far more portable to use a non-existent filename. spack.stage.ensure_access('/no/such/file') + + captured = capsys.readouterr() + assert 'Insufficient permissions' in str(captured) From 53d891a501bf7f3d536d9dc96f7a0fe46d5e7de4 Mon Sep 17 00:00:00 2001 From: Matthias Wolf Date: Wed, 29 Jan 2020 15:56:54 +0100 Subject: [PATCH 028/178] py-black: update url, add 19.3b0 (#14664) --- var/spack/repos/builtin/packages/py-black/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-black/package.py b/var/spack/repos/builtin/packages/py-black/package.py index e670ac7411e..913b2592c8b 100644 --- a/var/spack/repos/builtin/packages/py-black/package.py +++ b/var/spack/repos/builtin/packages/py-black/package.py @@ -12,9 +12,10 @@ class PyBlack(PythonPackage): speed, determinism, and freedom from pycodestyle nagging about formatting. """ - homepage = "https://github.com/ambv/black" + homepage = "https://github.com/psf/black" url = "https://pypi.io/packages/source/b/black/black-18.9b0.tar.gz" + version('19.3b0', sha256='68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c') version('18.9b0', sha256='e030a9a28f542debc08acceb273f228ac422798e5215ba2a791a6ddeaaca22a5') depends_on('python@3.6.0:') From 1af36f0cdc3580c49ea044c5240ef3e618c4d68a Mon Sep 17 00:00:00 2001 From: Matthias Wolf Date: Wed, 29 Jan 2020 15:58:41 +0100 Subject: [PATCH 029/178] flatbuffers: add 1.11.0 (#14663) --- var/spack/repos/builtin/packages/flatbuffers/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/flatbuffers/package.py b/var/spack/repos/builtin/packages/flatbuffers/package.py index 525c3c53ce3..538922a7def 100644 --- a/var/spack/repos/builtin/packages/flatbuffers/package.py +++ b/var/spack/repos/builtin/packages/flatbuffers/package.py @@ -13,6 +13,7 @@ class Flatbuffers(CMakePackage): homepage = "http://google.github.io/flatbuffers/" url = "https://github.com/google/flatbuffers/archive/v1.9.0.tar.gz" + version('1.11.0', sha256='3f4a286642094f45b1b77228656fbd7ea123964f19502f9ecfd29933fd23a50b') version('1.10.0', sha256='3714e3db8c51e43028e10ad7adffb9a36fc4aa5b1a363c2d0c4303dd1be59a7c') version('1.9.0', sha256='5ca5491e4260cacae30f1a5786d109230db3f3a6e5a0eb45d0d0608293d247e3') version('1.8.0', sha256='c45029c0a0f1a88d416af143e34de96b3091642722aa2d8c090916c6d1498c2e') From 4e3617fe1df1e6e19ac5e7a6fbd19a1a600d5916 Mon Sep 17 00:00:00 2001 From: Matthias Wolf Date: Wed, 29 Jan 2020 16:00:03 +0100 Subject: [PATCH 030/178] py-pyperf: add 1.6.{0,1} (#14662) --- var/spack/repos/builtin/packages/py-pyperf/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/py-pyperf/package.py b/var/spack/repos/builtin/packages/py-pyperf/package.py index f641eabc26f..435e1d8255c 100644 --- a/var/spack/repos/builtin/packages/py-pyperf/package.py +++ b/var/spack/repos/builtin/packages/py-pyperf/package.py @@ -14,6 +14,8 @@ class PyPyperf(PythonPackage): homepage = "https://pypi.python.org/pypi/pyperf" url = "https://github.com/vstinner/pyperf/archive/1.5.1.tar.gz" + version('1.6.1', sha256='fbe793f6f2e036ab4dcca105b5c5aa34fd331dd881e7a3e158e5e218c63cfc32') + version('1.6.0', sha256='7af7b9cfd9d26548ab7127f8e51791357ecd78cda46aad5b2d9664a70fc58878') version('1.5.1', sha256='9c271862bc2911be8eb01031a4a86cbc3f5bb615971514383802d3dcf46f18ed') depends_on('py-setuptools', type='build') From 52a7f94d9e74620913b57b28680ddbc7215c1bac Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Wed, 29 Jan 2020 18:40:39 +0100 Subject: [PATCH 031/178] pythia8: add new package (#14665) --- .../repos/builtin/packages/pythia8/package.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 var/spack/repos/builtin/packages/pythia8/package.py diff --git a/var/spack/repos/builtin/packages/pythia8/package.py b/var/spack/repos/builtin/packages/pythia8/package.py new file mode 100644 index 00000000000..0afbb8dc910 --- /dev/null +++ b/var/spack/repos/builtin/packages/pythia8/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Pythia8(AutotoolsPackage): + """The Pythia program is a standard tool for the generation of events in + high-energy collisions, comprising a coherent set of physics models for + the evolution from a few-body hard process to a complex multiparticle + final state.""" + + homepage = "http://home.thep.lu.se/Pythia/" + url = "http://home.thep.lu.se/~torbjorn/pythia8/pythia8244.tgz" + + version('8301', sha256='51382768eb9aafb97870dca1909516422297b64ef6a6b94659259b3e4afa7f06') + version('8244', sha256='e34880f999daf19cdd893a187123927ba77d1bf851e30f6ea9ec89591f4c92ca', preferred=True) + + variant('shared', default=True, description='Build shared library') + + def configure_args(self): + args = [] + + if '+shared' in self.spec: + args.append('--enable-shared') + + return args From 549aae7f72c5f81b14947bdffcd8158f76722f1f Mon Sep 17 00:00:00 2001 From: Jennifer Herting Date: Wed, 29 Jan 2020 14:23:22 -0500 Subject: [PATCH 032/178] [py-griddataformats] added new versions (#14671) --- .../repos/builtin/packages/py-griddataformats/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-griddataformats/package.py b/var/spack/repos/builtin/packages/py-griddataformats/package.py index 347d03bd92f..f7c8584ac0a 100644 --- a/var/spack/repos/builtin/packages/py-griddataformats/package.py +++ b/var/spack/repos/builtin/packages/py-griddataformats/package.py @@ -13,8 +13,10 @@ class PyGriddataformats(PythonPackage): data again.""" homepage = "http://www.mdanalysis.org/GridDataFormats" - url = "https://pypi.io/packages/source/G/GridDataFormats/GridDataFormats-0.3.3.tar.gz" + url = "https://pypi.io/packages/source/G/GridDataFormats/GridDataFormats-0.5.0.tar.gz" + version('0.5.0', sha256='f317ed60708de22d1b2a76ce89a00f722d903291b1055ff1018d441870c39d69') + version('0.4.1', sha256='b362662c2dc475e2a3895fe044eaaa9a707bd660fd109a63dac84a47236690a3') version('0.3.3', sha256='938f0efcb3bc2f58ec85048b933942da8a52c134170acc97cb095f09d3698fbd') depends_on('python@2.7:') From 30c8e1d8a055f37269c02dc75246cae651775504 Mon Sep 17 00:00:00 2001 From: Robert Pavel Date: Wed, 29 Jan 2020 12:24:03 -0700 Subject: [PATCH 033/178] Added xsbench@19 to Version List (#14668) --- var/spack/repos/builtin/packages/xsbench/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/xsbench/package.py b/var/spack/repos/builtin/packages/xsbench/package.py index 6488891e123..3648edc298a 100644 --- a/var/spack/repos/builtin/packages/xsbench/package.py +++ b/var/spack/repos/builtin/packages/xsbench/package.py @@ -18,6 +18,7 @@ class Xsbench(MakefilePackage): tags = ['proxy-app', 'ecp-proxy-app'] + version('19', sha256='57cc44ae3b0a50d33fab6dd48da13368720d2aa1b91cde47d51da78bf656b97e') version('18', sha256='a9a544eeacd1be8d687080d2df4eeb701c04eda31d3806e7c3ea1ff36c26f4b0') version('14', sha256='595afbcba8c1079067d5d17eedcb4ab0c1d115f83fd6f8c3de01d74b23015e2d') version('13', sha256='b503ea468d3720a0369304924477b758b3d128c8074776233fa5d567b7ffcaa2') From b142914b24684b806a07ec97e1c8318a6569b4ea Mon Sep 17 00:00:00 2001 From: Robert Pavel Date: Wed, 29 Jan 2020 12:24:22 -0700 Subject: [PATCH 034/178] Added MiniAMR@1.4.4 (#14667) --- var/spack/repos/builtin/packages/miniamr/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/miniamr/package.py b/var/spack/repos/builtin/packages/miniamr/package.py index 83dcad304af..b2166618c73 100644 --- a/var/spack/repos/builtin/packages/miniamr/package.py +++ b/var/spack/repos/builtin/packages/miniamr/package.py @@ -19,6 +19,7 @@ class Miniamr(MakefilePackage): tags = ['proxy-app', 'ecp-proxy-app'] version('develop', branch='master') + version('1.4.4', sha256='b83f438ff351481b4310c46ddf63b9fffc7f29f916a5717377e72919a5b788b6') version('1.4.3', sha256='4c3fbc1662ae3e139669fb3844134486a7488a0b6e085c3b24bebcc8d12d3ac6') version('1.4.2', sha256='d2347e0e22a8e79aa0dc3316b67dd7c40dded39d82f6e068e6fb8c9f0766566b') version('1.4.1', sha256='dd8e8d9fd0768cb4f2c5d7fe6989dfa6bb95a8461f04deaccdbb50b0dd51e97a') From ed4d544e8fac7b5faa5918d4ed0d9ee9adc7d95e Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Wed, 29 Jan 2020 13:24:44 -0600 Subject: [PATCH 035/178] Fix py-pillow tests (#14670) --- var/spack/repos/builtin/packages/py-pillow/package.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py index 06736003c0f..9f537d3138f 100644 --- a/var/spack/repos/builtin/packages/py-pillow/package.py +++ b/var/spack/repos/builtin/packages/py-pillow/package.py @@ -103,6 +103,8 @@ def variant_to_flag(variant): return args # Tests need to be re-added since `phases` was overridden - run_after('build_ext')(PythonPackage.test) - run_after('install')(PythonPackage.import_module_test) + run_after('build_ext')( + PythonPackage._run_default_build_time_test_callbacks) + run_after('install')( + PythonPackage._run_default_install_time_test_callbacks) run_after('install')(PythonPackage.sanity_check_prefix) From da189b8d1d0b9bb920b4af2de4016fe4db043425 Mon Sep 17 00:00:00 2001 From: noguchi-k <55966120+noguchi-k@users.noreply.github.com> Date: Thu, 30 Jan 2020 04:45:37 +0900 Subject: [PATCH 036/178] Fastx-toolkit package: patch for Fujitsu compiler (#14218) Specify the scope of pragma pack --- .../fastx-toolkit/fix_pragma_pack.patch | 19 +++++++++++++++++++ .../builtin/packages/fastx-toolkit/package.py | 2 ++ 2 files changed, 21 insertions(+) create mode 100644 var/spack/repos/builtin/packages/fastx-toolkit/fix_pragma_pack.patch diff --git a/var/spack/repos/builtin/packages/fastx-toolkit/fix_pragma_pack.patch b/var/spack/repos/builtin/packages/fastx-toolkit/fix_pragma_pack.patch new file mode 100644 index 00000000000..5f4d178d032 --- /dev/null +++ b/var/spack/repos/builtin/packages/fastx-toolkit/fix_pragma_pack.patch @@ -0,0 +1,19 @@ +--- spack-src/src/libfastx/fastx.h.org 2019-12-19 12:05:37.497936486 +0900 ++++ spack-src/src/libfastx/fastx.h 2019-12-19 13:44:55.481837853 +0900 +@@ -58,7 +58,7 @@ + OUTPUT_SAME_AS_INPUT=3 + } OUTPUT_FILE_TYPE; + +-#pragma pack(1) ++#pragma pack(push, 1) + typedef struct + { + /* Record data - common for FASTA/FASTQ */ +@@ -115,6 +115,7 @@ + FILE* input; + FILE* output; + } FASTX ; ++#pragma pack(pop) + + + void fastx_init_reader(FASTX *pFASTX, const char* filename, diff --git a/var/spack/repos/builtin/packages/fastx-toolkit/package.py b/var/spack/repos/builtin/packages/fastx-toolkit/package.py index 48633acbc67..8ad93ddad58 100644 --- a/var/spack/repos/builtin/packages/fastx-toolkit/package.py +++ b/var/spack/repos/builtin/packages/fastx-toolkit/package.py @@ -19,3 +19,5 @@ class FastxToolkit(AutotoolsPackage): # patch implicit fallthrough patch("pr-22.patch") + # fix error [-Werror,-Wpragma-pack] + patch('fix_pragma_pack.patch', when='%fj') From bd60e0f13772b4769cbf6831bcd3bc6497fb121b Mon Sep 17 00:00:00 2001 From: Greg Becker Date: Wed, 29 Jan 2020 12:39:55 -0800 Subject: [PATCH 037/178] fix cycle dependency in libxml+python (#13847) * fix cycle dependency in libxml+python * comment why we need these dependencies --- var/spack/repos/builtin/packages/libxml2/package.py | 3 ++- var/spack/repos/builtin/packages/python/package.py | 7 ++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py index 6c7d08ab3e6..8c91852ae32 100644 --- a/var/spack/repos/builtin/packages/libxml2/package.py +++ b/var/spack/repos/builtin/packages/libxml2/package.py @@ -27,7 +27,8 @@ class Libxml2(AutotoolsPackage): depends_on('zlib') depends_on('xz') - depends_on('python+shared', when='+python') + # avoid cycle dependency for concretizer + depends_on('python+shared~libxml2', when='+python') extends('python', when='+python', ignore=r'(bin.*$)|(include.*$)|(share.*$)|(lib/libxml2.*$)|' '(lib/xml2.*$)|(lib/cmake.*$)') diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index bdc4a64019f..55dfcc90217 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -70,6 +70,10 @@ class Python(AutotoolsPackage): extendable = True + # Variants to avoid cyclical dependencies for concretizer + variant('libxml2', default=False, + description='Use a gettext library build with libxml2') + variant( 'debug', default=False, description="debug build with extra checks (this is high overhead)" @@ -116,7 +120,8 @@ class Python(AutotoolsPackage): variant('tix', default=False, description='Build Tix module') depends_on('pkgconfig@0.9.0:', type='build') - depends_on('gettext') + depends_on('gettext +libxml2', when='+libxml2') + depends_on('gettext ~libxml2', when='~libxml2') # Optional dependencies # See detect_modules() in setup.py for details From 488e25ea346c8006ada6f0409b4db800129add9c Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 29 Jan 2020 19:29:07 -0500 Subject: [PATCH 038/178] pumi: sim version check, meshes via submodule, ctest (#14597) * pumi: sim version check, meshes via submodule, ctest * Apply suggestions from code review Co-Authored-By: Adam J. Stewart * pumi: update comment on master version string * pumi: description of simmodsuite_version_check variant * pumi: add white space to variant description Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/pumi/package.py | 22 ++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/var/spack/repos/builtin/packages/pumi/package.py b/var/spack/repos/builtin/packages/pumi/package.py index 191d0a31b47..f26c80670e9 100644 --- a/var/spack/repos/builtin/packages/pumi/package.py +++ b/var/spack/repos/builtin/packages/pumi/package.py @@ -24,10 +24,8 @@ class Pumi(CMakePackage): # We will use the scorec/core master branch as the 'nightly' version # of pumi in spack. The master branch is more stable than the # scorec/core develop branch and we perfer not to expose spack users - # to the added instability. The spack version string is 'develop' since - # it compares greater than a numbered version (e.g., 2.1.0). The spack - # version string 'master' compares less than a numbered version. - version('develop', branch='master') + # to the added instability. + version('master', submodules=True, branch='master') version('2.2.1', commit='cd826205db21b8439026db1f6af61a8ed4a18564') # tag 2.2.1 version('2.2.0', commit='8c7e6f13943893b2bc1ece15003e4869a0e9634f') # tag 2.2.0 version('2.1.0', commit='840fbf6ec49a63aeaa3945f11ddb224f6055ac9f') @@ -41,6 +39,9 @@ class Pumi(CMakePackage): description="Enable Simmetrix SimModSuite Support: 'base' enables " "the minimum set of functionality, 'kernels' adds CAD kernel " "support to 'base', and 'full' enables all functionality.") + variant('simmodsuite_version_check', default=True, + description="Enable check of Simmetrix SimModSuite version. " + "Disable the check for testing new versions.") depends_on('mpi') depends_on('cmake@3:', type='build') @@ -69,7 +70,10 @@ def cmake_args(self): '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc, '-DPUMI_FORTRAN_INTERFACE=%s' % ('ON' if '+fortran' in spec else 'OFF'), - '-DMDS_ID_TYPE=%s' % ('long' if '+int64' in spec else 'int') + '-DMDS_ID_TYPE=%s' % ('long' if '+int64' in spec else 'int'), + '-DSKIP_SIMMETRIX_VERSION_CHECK=%s' % + ('ON' if '~simmodsuite_version_check' in spec else 'OFF'), + '-DMESHES=%s' % join_path(self.stage.source_path, 'pumi_meshes') ] if self.spec.satisfies('simmodsuite=base'): args.append('-DENABLE_SIMMETRIX=ON') @@ -82,3 +86,11 @@ def cmake_args(self): mpi_id = spec['mpi'].name + spec['mpi'].version.string args.append('-DSIM_MPI=' + mpi_id) return args + + @run_after('build') + @on_package_attributes(run_tests=True) + def check(self): + """Run ctest after building project.""" + + with working_dir(self.build_directory): + ctest(parallel=False) From 85ef1be780c82392227961529d67fd2b498e1beb Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Wed, 29 Jan 2020 17:22:44 -0800 Subject: [PATCH 039/178] environments: synchronize read and uninstall (#14676) * `Environment.__init__` is now synchronized with all writing operations * `spack uninstall` now synchronizes its updates to any associated environment * A side effect of this is that the environment is no longer updated piecemeal as specs are uninstalled - all specs are removed from the environment before they are uninstalled --- lib/spack/spack/cmd/uninstall.py | 18 ++++++++---------- lib/spack/spack/environment.py | 20 ++++++++++++-------- 2 files changed, 20 insertions(+), 18 deletions(-) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 0ad42f4dfb6..2757d5d232e 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -6,6 +6,7 @@ from __future__ import print_function import sys +import itertools import spack.cmd import spack.environment as ev @@ -205,9 +206,6 @@ def do_uninstall(env, specs, force): # want to uninstall. spack.package.Package.uninstall_by_spec(item, force=True) - if env: - _remove_from_env(item, env) - # A package is ready to be uninstalled when nothing else references it, # unless we are requested to force uninstall it. is_ready = lambda x: not spack.store.db.query_by_spec_hash(x)[1].ref_count @@ -226,10 +224,6 @@ def do_uninstall(env, specs, force): for item in ready: item.do_uninstall(force=force) - # write any changes made to the active environment - if env: - env.write() - def get_uninstall_list(args, specs, env): # Gets the list of installed specs that match the ones give via cli @@ -317,9 +311,13 @@ def uninstall_specs(args, specs): if not args.yes_to_all: confirm_removal(anything_to_do) - # just force-remove things in the remove list - for spec in remove_list: - _remove_from_env(spec, env) + if env: + # Remove all the specs that are supposed to be uninstalled or just + # removed. + with env.write_transaction(): + for spec in itertools.chain(remove_list, uninstall_list): + _remove_from_env(spec, env) + env.write() # Uninstall everything on the list do_uninstall(env, uninstall_list, args.force) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index 87276eacbc8..dd2d9328493 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -567,6 +567,9 @@ def __init__(self, path, init_file=None, with_view=None): self.clear() if init_file: + # If we are creating the environment from an init file, we don't + # need to lock, because there are no Spack operations that alter + # the init file. with fs.open_if_filename(init_file) as f: if hasattr(f, 'name') and f.name.endswith('.lock'): self._read_manifest(default_manifest_yaml) @@ -575,7 +578,8 @@ def __init__(self, path, init_file=None, with_view=None): else: self._read_manifest(f, raw_yaml=default_manifest_yaml) else: - self._read() + with lk.ReadTransaction(self.txlock): + self._read() if with_view is False: self.views = {} @@ -1472,13 +1476,13 @@ def write(self, regenerate_views=True): # Remove yaml sections that are shadowing defaults # construct garbage path to ensure we don't find a manifest by accident - bare_env = Environment(os.path.join(self.manifest_path, 'garbage'), - with_view=self.view_path_default) - keys_present = list(yaml_dict.keys()) - for key in keys_present: - if yaml_dict[key] == config_dict(bare_env.yaml).get(key, None): - if key not in raw_yaml_dict: - del yaml_dict[key] + with fs.temp_cwd() as env_dir: + bare_env = Environment(env_dir, with_view=self.view_path_default) + keys_present = list(yaml_dict.keys()) + for key in keys_present: + if yaml_dict[key] == config_dict(bare_env.yaml).get(key, None): + if key not in raw_yaml_dict: + del yaml_dict[key] # if all that worked, write out the manifest file at the top level # Only actually write if it has changed or was never written From a2f8a2321d94a89ad1cb13e0dccca20e5f5aa325 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 28 Jan 2020 21:36:47 -0800 Subject: [PATCH 040/178] repo: avoid unnecessary spec parsing in `filename_for_package_name()` `filename_for_package_name()` and `dirname_for_package_name()` automatically construct a Spec from their arguments, which adds a fair amount of overhead to importing lots of packages. Removing this removes about 11% of the runtime of importing all packages in Spack (9s -> 8s). - [x] `filename_for_package_name()` and `dirname_for_package_name()` now take a string `pkg_name` arguments instead of specs. --- lib/spack/spack/repo.py | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py index c95e889772f..8e3dae5d474 100644 --- a/lib/spack/spack/repo.py +++ b/lib/spack/spack/repo.py @@ -932,7 +932,7 @@ def dump_provenance(self, spec, path): tty.warn("Patch file did not exist: %s" % patch.path) # Install the package.py file itself. - install(self.filename_for_package_name(spec), path) + install(self.filename_for_package_name(spec.name), path) def purge(self): """Clear entire package instance cache.""" @@ -974,20 +974,12 @@ def providers_for(self, vpkg_spec): def extensions_for(self, extendee_spec): return [p for p in self.all_packages() if p.extends(extendee_spec)] - def _check_namespace(self, spec): - """Check that the spec's namespace is the same as this repository's.""" - if spec.namespace and spec.namespace != self.namespace: - raise UnknownNamespaceError(spec.namespace) - - @autospec - def dirname_for_package_name(self, spec): + def dirname_for_package_name(self, pkg_name): """Get the directory name for a particular package. This is the directory that contains its package.py file.""" - self._check_namespace(spec) - return os.path.join(self.packages_path, spec.name) + return os.path.join(self.packages_path, pkg_name) - @autospec - def filename_for_package_name(self, spec): + def filename_for_package_name(self, pkg_name): """Get the filename for the module we should load for a particular package. Packages for a Repo live in ``$root//package.py`` @@ -996,8 +988,7 @@ def filename_for_package_name(self, spec): package doesn't exist yet, so callers will need to ensure the package exists before importing. """ - self._check_namespace(spec) - pkg_dir = self.dirname_for_package_name(spec.name) + pkg_dir = self.dirname_for_package_name(pkg_name) return os.path.join(pkg_dir, package_file_name) @property From 3519a176245d9cc6b5f6802aad3b1bb1c2d6ec47 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 28 Jan 2020 21:41:35 -0800 Subject: [PATCH 041/178] specs: avoid traversing specs when parsing The Spec parser currently calls `spec.traverse()` after every parse, in order to set the platform if it's not set. We don't need to do a full traverse -- we can just check the platforrm as new specs are parsed. This takes about a second off the time required to import all packages in Spack (from 8s to 7s). - [x] simplify platform-setting logic in `SpecParser`. --- lib/spack/spack/spec.py | 21 +++++++++++++-------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index 3bee81c0d4a..4b6bd67b6df 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1117,6 +1117,18 @@ def _add_dependency(self, spec, deptypes): self._dependencies[spec.name] = dspec spec._dependents[self.name] = dspec + def _add_default_platform(self): + """If a spec has an os or a target and no platform, give it + the default platform. + + This is private because it is used by the parser -- it's not + expected to be used outside of ``spec.py``. + + """ + arch = self.architecture + if arch and not arch.platform and (arch.os or arch.target): + self._set_architecture(platform=spack.architecture.platform().name) + # # Public interface # @@ -4053,14 +4065,6 @@ def do_parse(self): except spack.parse.ParseError as e: raise SpecParseError(e) - # If the spec has an os or a target and no platform, give it - # the default platform - platform_default = spack.architecture.platform().name - for spec in specs: - for s in spec.traverse(): - if s.architecture and not s.architecture.platform and \ - (s.architecture.os or s.architecture.target): - s._set_architecture(platform=platform_default) return specs def spec_from_file(self): @@ -4192,6 +4196,7 @@ def spec(self, name): else: break + spec._add_default_platform() return spec def variant(self, name=None): From 39035e4517a38f716446d0dcf5aa9996543654e1 Mon Sep 17 00:00:00 2001 From: Jennifer Herting Date: Thu, 30 Jan 2020 05:26:25 -0500 Subject: [PATCH 042/178] [r-rgl] added version 0.100.19 (#14675) --- var/spack/repos/builtin/packages/r-rgl/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/r-rgl/package.py b/var/spack/repos/builtin/packages/r-rgl/package.py index 357304f833b..c890df70d97 100644 --- a/var/spack/repos/builtin/packages/r-rgl/package.py +++ b/var/spack/repos/builtin/packages/r-rgl/package.py @@ -20,6 +20,7 @@ class RRgl(RPackage): version('0.100.26', sha256='e1889c2723ad458b39fdf9366fdaf590d7657d3762748f8534a8491ef754e740') version('0.100.24', sha256='1233a7bdc5a2b908fc64d5f56e92a0e123e8f7c0b9bac93dfd005608b78fa35a') + version('0.100.19', sha256='50630702554e422e0603f27d499aad3b6f822de5a73da7fdf70404ac50df7025') version('0.99.16', sha256='692a545ed2ff0f5e15289338736f0e3c092667574c43ac358d8004901d7a1a61') version('0.98.1', sha256='5f49bed9e092e672f73c8a1a5365cdffcda06db0315ac087e95ab9c9c71a6986') From 23a759cda0bec5e99518b042f123c8f150306a87 Mon Sep 17 00:00:00 2001 From: Jennifer Herting Date: Thu, 30 Jan 2020 05:26:48 -0500 Subject: [PATCH 043/178] [r-manipulatewidget] added versions (#14674) --- .../repos/builtin/packages/r-manipulatewidget/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/r-manipulatewidget/package.py b/var/spack/repos/builtin/packages/r-manipulatewidget/package.py index 2c05227d228..225688abae9 100644 --- a/var/spack/repos/builtin/packages/r-manipulatewidget/package.py +++ b/var/spack/repos/builtin/packages/r-manipulatewidget/package.py @@ -17,6 +17,13 @@ class RManipulatewidget(RPackage): list_url = "https://cloud.r-project.org/src/contrib/Archive/manipulateWidget/" version('0.10.0', sha256='3d61a3d0cedf5c8a850a3e62ed6af38c600dc3f25b44c4ff07a5093bf9ca4ffd') + version('0.9.0', sha256='5bf4bdb702263b0e156f40f3354922a06db7db544e497addcd6c98d9860bf3a3') + version('0.8.0', sha256='e7e6351b1fb8f39b9895e2536fa7c149cbc5d63d7022f67c1b25232cf0706ca7') + version('0.7.0', sha256='160ce5c68658301e00051c60ac5693701c5bc97b7344bacde0f56be4955231f6') + version('0.6.0', sha256='90aa1b30647d7034166b8d6c6185503b6855c70253e36a41742a84faa77ce0db') + version('0.5.1', sha256='5a672c2bd8ba16ec8212cd9fb620072b243e6d18c02dd3ec70bd8c2a1ff1c9c4') + version('0.5.0', sha256='2599e25f78bb0d748705160e1dfe62a673f5bb388ac5f415f3d649d2511737c8') + version('0.4.0', sha256='65cc7d28c2b2efc81fda35da019ac6e6058580cf0fdf5e31458cc96386c0c599') depends_on('r-base64enc', type=('build', 'run')) depends_on('r-codetools', type=('build', 'run')) From a5b2347cfe500c37c2af55b89950ce10f036e416 Mon Sep 17 00:00:00 2001 From: Jennifer Herting Date: Thu, 30 Jan 2020 05:27:04 -0500 Subject: [PATCH 044/178] [py-joblib] added version 0.11 (#14672) --- var/spack/repos/builtin/packages/py-joblib/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/py-joblib/package.py b/var/spack/repos/builtin/packages/py-joblib/package.py index e1b884d1928..43ea04ed998 100644 --- a/var/spack/repos/builtin/packages/py-joblib/package.py +++ b/var/spack/repos/builtin/packages/py-joblib/package.py @@ -19,6 +19,7 @@ class PyJoblib(PythonPackage): version('0.14.0', sha256='6fcc57aacb4e89451fd449e9412687c51817c3f48662c3d8f38ba3f8a0a193ff') version('0.13.2', sha256='315d6b19643ec4afd4c41c671f9f2d65ea9d787da093487a81ead7b0bac94524') + version('0.11', sha256='7b8fd56df36d9731a83729395ccb85a3b401f62a96255deb1a77220c00ed4085') version('0.10.3', sha256='29b2965a9efbc90a5fe66a389ae35ac5b5b0c1feabfc7cab7fd5d19f429a071d') version('0.10.2', sha256='3123553bdad83b143428033537c9e1939caf4a4d8813dade6a2246948c94494b') version('0.10.0', sha256='49b3a0ba956eaa2f077e1ebd230b3c8d7b98afc67520207ada20a4d8b8efd071') From 23a7feb91741f8be42b17fe1d02fbbddaa751810 Mon Sep 17 00:00:00 2001 From: Patrick Gartung Date: Thu, 30 Jan 2020 10:56:10 -0600 Subject: [PATCH 045/178] Limit the number of spec files downloaded to find matches for buildcaches (#14659) * Limit the number of spec flies downloaded to find matches --- lib/spack/spack/binary_distribution.py | 34 ++++++++++++-------------- lib/spack/spack/cmd/buildcache.py | 14 ++++++++--- lib/spack/spack/package.py | 2 +- lib/spack/spack/test/cmd/buildcache.py | 2 +- share/spack/spack-completion.bash | 2 +- 5 files changed, 29 insertions(+), 25 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index fd89a80fa81..bb5f3d86d09 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -664,7 +664,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, _cached_specs = None -def get_specs(force=False, use_arch=False): +def get_specs(force=False, use_arch=False, names=[]): """ Get spec.yaml's for build caches available on mirror """ @@ -672,6 +672,15 @@ def get_specs(force=False, use_arch=False): arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') + arch_pattern = ('([^-]*-[^-]*-[^-]*)') + if use_arch: + arch_pattern = '(%s-%s-[^-]*)' % (arch.platform, arch.os) + + names_or_hashes = [name.replace('/', '') for name in names] + names_pattern = '|'.join(names_or_hashes) + regex_pattern = '%s(.*)(%s)(.*)(spec.yaml$)' % (arch_pattern, + names_pattern) + name_re = re.compile(regex_pattern) if _cached_specs: tty.debug("Using previously-retrieved specs") @@ -692,30 +701,19 @@ def get_specs(force=False, use_arch=False): if os.path.exists(mirror_dir): files = os.listdir(mirror_dir) for file in files: - if re.search('spec.yaml', file): + m = name_re.search(file) + if m: link = url_util.join(fetch_url_build_cache, file) - if use_arch and re.search('%s-%s' % - (arch.platform, - arch.os), - file): - urls.add(link) - else: - urls.add(link) + urls.add(link) else: tty.msg("Finding buildcaches at %s" % url_util.format(fetch_url_build_cache)) p, links = web_util.spider( url_util.join(fetch_url_build_cache, 'index.html')) for link in links: - if re.search("spec.yaml", link): - if use_arch and re.search('%s-%s' % - (arch.platform, - arch.os), - link): - urls.add(link) - else: - urls.add(link) - + m = name_re.search(link) + if m: + urls.add(link) _cached_specs = [] for link in urls: with Stage(link, name="build_cache", keep=True) as stage: diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index cbcbc2c0cb4..d3481bb6e66 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -87,6 +87,8 @@ def setup_parser(subparser): help='show variants in output (can be long)') listcache.add_argument('-f', '--force', action='store_true', help="force new download of specs") + listcache.add_argument('-a', '--arch', action='store_true', + help="only list spec for the default architecture") arguments.add_common_arguments(listcache, ['specs']) listcache.set_defaults(func=listspecs) @@ -263,10 +265,10 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False): # List of specs that match expressions given via command line specs_from_cli = [] has_errors = False - specs = bindist.get_specs(force) for pkg in pkgs: matches = [] tty.msg("buildcache spec(s) matching %s \n" % pkg) + specs = bindist.get_specs(names=[pkg]) for spec in sorted(specs): if pkg.startswith('/'): pkghash = pkg.replace('/', '') @@ -415,10 +417,14 @@ def install_tarball(spec, args): def listspecs(args): """list binary packages available from mirrors""" - specs = bindist.get_specs(args.force) + specs = list() if args.specs: - constraints = set(args.specs) - specs = [s for s in specs if any(s.satisfies(c) for c in constraints)] + for s in bindist.get_specs(args.force, args.arch, + args.specs): + if s not in set(specs): + specs.append(s) + else: + specs = bindist.get_specs(force=args.force, use_arch=args.arch) display_specs(specs, args, all_headers=True) diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 6856e3a3973..6ad1ebe1f08 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1510,7 +1510,7 @@ def _update_explicit_entry_in_db(self, rec, explicit): def try_install_from_binary_cache(self, explicit): tty.msg('Searching for binary cache of %s' % self.name) - specs = binary_distribution.get_specs(use_arch=True) + specs = binary_distribution.get_specs(use_arch=True, names=[self.name]) binary_spec = spack.spec.Spec.from_dict(self.spec.to_dict()) binary_spec._mark_concrete() if binary_spec not in specs: diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py index f7b7611c33d..fdd76bff207 100644 --- a/lib/spack/spack/test/cmd/buildcache.py +++ b/lib/spack/spack/test/cmd/buildcache.py @@ -17,7 +17,7 @@ def mock_get_specs(database, monkeypatch): specs = database.query_local() monkeypatch.setattr( - spack.binary_distribution, 'get_specs', lambda x: specs + spack.binary_distribution, 'get_specs', lambda x, y, z: specs ) diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 20a5d936ea0..b408d0b2344 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -400,7 +400,7 @@ _spack_buildcache_install() { _spack_buildcache_list() { if $list_options then - SPACK_COMPREPLY="-h --help -l --long -L --very-long -v --variants -f --force" + SPACK_COMPREPLY="-h --help -l --long -L --very-long -v --variants -f --force -a --arch" else _all_packages fi From ee35d949f9a38fa99529ad5789bb9d3a7b75e8db Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 30 Jan 2020 12:08:47 -0600 Subject: [PATCH 046/178] Add GDAL 3.0.4 (#14688) --- var/spack/repos/builtin/packages/gdal/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py index 58033a153c9..21828112335 100644 --- a/var/spack/repos/builtin/packages/gdal/package.py +++ b/var/spack/repos/builtin/packages/gdal/package.py @@ -18,7 +18,7 @@ class Gdal(AutotoolsPackage): """ homepage = "https://www.gdal.org/" - url = "https://download.osgeo.org/gdal/3.0.3/gdal-3.0.3.tar.xz" + url = "https://download.osgeo.org/gdal/3.0.4/gdal-3.0.4.tar.xz" list_url = "https://download.osgeo.org/gdal/" list_depth = 1 @@ -29,6 +29,7 @@ class Gdal(AutotoolsPackage): 'osgeo.gdal_array', 'osgeo.gdalconst' ] + version('3.0.4', sha256='5569a4daa1abcbba47a9d535172fc335194d9214fdb96cd0f139bb57329ae277') version('3.0.3', sha256='e20add5802265159366f197a8bb354899e1693eab8dbba2208de14a457566109') version('3.0.2', sha256='c3765371ce391715c8f28bd6defbc70b57aa43341f6e94605f04fe3c92468983') version('3.0.1', sha256='45b4ae25dbd87282d589eca76481c426f72132d7a599556470d5c38263b09266') From 1e0408d05ac7bf06f2634bd298d059852fc2f87d Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 30 Jan 2020 19:33:16 +0100 Subject: [PATCH 047/178] Updated docstring and version of lmod to v8.3 (#14687) --- var/spack/repos/builtin/packages/lmod/package.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py index 30ffbd15530..146ef9e3b4e 100644 --- a/var/spack/repos/builtin/packages/lmod/package.py +++ b/var/spack/repos/builtin/packages/lmod/package.py @@ -2,23 +2,25 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * from glob import glob class Lmod(AutotoolsPackage): """Lmod is a Lua based module system that easily handles the MODULEPATH - Hierarchical problem. Environment Modules provide a convenient way to - dynamically change the users' environment through modulefiles. This - includes easily adding or removing directories to the PATH environment - variable. Modulefiles for Library packages provide environment variables - that specify where the library and header files can be found. + Hierarchical problem. + + Environment Modules provide a convenient way to dynamically change the + users' environment through modulefiles. This includes easily adding or + removing directories to the PATH environment variable. Modulefiles for + Library packages provide environment variables that specify where the + library and header files can be found. """ homepage = 'https://www.tacc.utexas.edu/research-development/tacc-projects/lmod' url = 'https://github.com/TACC/Lmod/archive/7.4.11.tar.gz' + version('8.3', sha256='c2c2e9e6b387b011ee617cb009a2199caac8bf200330cb8a065ceedee09e664a') + version('8.2.10', sha256='15676d82235faf5c755a747f0e318badb1a5c3ff1552fa8022c67ff083ee9e2f') version('8.1.5', sha256='3e5846d3d8e593cbcdfa0aed1474569bf5b5cfd19fd288de22051823d449d344') version('8.0.9', sha256='9813c22ae4dd21eb3dc480f6ce307156512092b4bca954bf8aacc15944f23673') version('7.8.15', sha256='00a257f5073d656adc73045997c28f323b7a4f6d901f1c57b7db2b0cd6bee6e6') From b2adcdb389ed684fd4dbb15e44da163c816548a7 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 30 Jan 2020 11:13:36 -0800 Subject: [PATCH 048/178] Bugfix: put environment lock in the right place (#14692) Locate the environment lock in the hidden environment directory rather than the root of the environment. --- lib/spack/spack/environment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py index dd2d9328493..fff1485e3c3 100644 --- a/lib/spack/spack/environment.py +++ b/lib/spack/spack/environment.py @@ -724,7 +724,7 @@ def _transaction_lock_path(self): """The location of the lock file used to synchronize multiple processes updating the same environment. """ - return os.path.join(self.path, 'transaction_lock') + return os.path.join(self.env_subdir_path, 'transaction_lock') @property def lock_path(self): From 7b2895109cac96174949b51ba19c5fc9aa011ad7 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Thu, 30 Jan 2020 12:34:54 -0800 Subject: [PATCH 049/178] Document how to add conditional dependencies (#14694) * add short docs section on conditional dependencies * add reference to spec syntax * add note that conditional dependencies can save time --- lib/spack/docs/packaging_guide.rst | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 223c509ac14..e566cb45450 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -1989,6 +1989,28 @@ inject the dependency's ``prefix/lib`` directory, but the package needs to be in ``PATH`` and ``PYTHONPATH`` during the build process and later when a user wants to run the package. +^^^^^^^^^^^^^^^^^^^^^^^^ +Conditional dependencies +^^^^^^^^^^^^^^^^^^^^^^^^ + +You may have a package that only requires a dependency under certain +conditions. For example, you may have a package that has optional MPI support, +- MPI is only a dependency when you want to enable MPI support for the +package. In that case, you could say something like: + +.. code-block:: python + + variant('mpi', default=False) + depends_on('mpi', when='+mpi') + +``when`` can include constraints on the variant, version, compiler, etc. and +the :mod:`syntax` is the same as for Specs written on the command +line. + +If a dependency/feature of a package isn't typically used, you can save time +by making it conditional (since Spack will not build the dependency unless it +is required for the Spec). + .. _dependency_dependency_patching: ^^^^^^^^^^^^^^^^^^^ From b072caadeced9ed1eda97825d9242849294f7362 Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Thu, 30 Jan 2020 15:42:48 -0500 Subject: [PATCH 050/178] fix: py-pillow build_ext vs. install (#14666) Previously, the install stage would compile in things that were disabled during the build_ext phase. This would also result in the build pulling in locally installed versions of libraries that were disabled. The install process doesn't honor the same command-line flags that build_ext does, but does call build_ext again. Avoid the whole issue by just writing the options to setup.cfg Also, add the Imagemagick dependency for tests. --- .../builtin/packages/py-pillow/package.py | 29 +++++++++++-------- 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py index 9f537d3138f..36d99f2c017 100644 --- a/var/spack/repos/builtin/packages/py-pillow/package.py +++ b/var/spack/repos/builtin/packages/py-pillow/package.py @@ -61,6 +61,7 @@ class PyPillow(PythonPackage): depends_on('libwebp', when='+webp') depends_on('libwebp+libwebpmux+libwebpdemux', when='+webpmux') depends_on('openjpeg', when='+jpeg2000') + depends_on('imagemagick', type='test') # Spack does not (yet) support these modes of building # depends_on('libimagequant', when='+imagequant') @@ -86,21 +87,25 @@ def patch(self): setup.filter('include_dirs = []', 'include_dirs = {0}'.format(include_dirs), string=True) - def build_ext_args(self, spec, prefix): - def variant_to_flag(variant): - able = 'enable' if '+' + variant in spec else 'disable' - return '--{0}-{1}'.format(able, variant) + def variant_to_cfg(setup): + able = 'enable' if '+' + variant in self.spec else 'disable' + return '{0}-{1}=1\n'.format(able, variant) - args = ['--enable-zlib', '--enable-jpeg'] + with open('setup.cfg', 'a') as setup: + # Default backend + setup.write('[build_ext]\n') + setup.write('enable-zlib=1\n') + setup.write('enable-jpeg=1\n') + variants = ['tiff', 'freetype', 'lcms', 'webp', + 'webpmux', 'jpeg2000'] + for variant in variants: + setup.write(variant_to_cfg(setup)) - variants = ['tiff', 'freetype', 'lcms', 'webp', 'webpmux', 'jpeg2000'] - args.extend(list(map(variant_to_flag, variants))) + # Spack does not (yet) support these modes of building + setup.write('disable-imagequant=1\n') - # Spack does not (yet) support these modes of building - args.append('--disable-imagequant') - - args.append('--rpath={0}'.format(':'.join(self.rpath))) - return args + setup.write('rpath={0}\n'.format(':'.join(self.rpath))) + setup.write('[install]\n') # Tests need to be re-added since `phases` was overridden run_after('build_ext')( From 12a99f4a2df3f4bbb3dc85414cd3b7aaa2849a6c Mon Sep 17 00:00:00 2001 From: Patrick Gartung Date: Thu, 30 Jan 2020 15:17:55 -0600 Subject: [PATCH 051/178] Use non-mutable default for names in binary_distribution::get_specs call (#14696) * Use non-mutable default for names * Make suggested change --- lib/spack/spack/binary_distribution.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index bb5f3d86d09..b898f27f49d 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -664,7 +664,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, _cached_specs = None -def get_specs(force=False, use_arch=False, names=[]): +def get_specs(force=False, use_arch=False, names=None): """ Get spec.yaml's for build caches available on mirror """ @@ -676,6 +676,8 @@ def get_specs(force=False, use_arch=False, names=[]): if use_arch: arch_pattern = '(%s-%s-[^-]*)' % (arch.platform, arch.os) + if names is None: + names = [''] names_or_hashes = [name.replace('/', '') for name in names] names_pattern = '|'.join(names_or_hashes) regex_pattern = '%s(.*)(%s)(.*)(spec.yaml$)' % (arch_pattern, From ed501eaab275f50dc3d689ee9e8d6c7fa707fd94 Mon Sep 17 00:00:00 2001 From: Patrick Gartung Date: Thu, 30 Jan 2020 16:06:50 -0600 Subject: [PATCH 052/178] Bypass build_cache/index.html read when trying to download spec.yaml for concretized spec. (#14698) * Add binary_distribution::get_spec which takes concretized spec Add binary_distribution::try_download_specs for downloading of spec.yaml files to cache get_spec is used by package::try_install_from_binary_cache to download only the spec.yaml for the concretized spec if it exists. --- lib/spack/spack/binary_distribution.py | 86 ++++++++++++++++++++------ lib/spack/spack/package.py | 3 +- 2 files changed, 68 insertions(+), 21 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index b898f27f49d..3e5dc89313e 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -660,16 +660,79 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, shutil.rmtree(tmpdir) -#: Internal cache for get_specs +# Internal cache for downloaded specs _cached_specs = None +def try_download_specs(urls=None, force=False): + ''' + Try to download the urls and cache them + ''' + global _cached_specs + _cached_specs = [] + if urls is None: + return {} + for link in urls: + with Stage(link, name="build_cache", keep=True) as stage: + if force and os.path.exists(stage.save_filename): + os.remove(stage.save_filename) + if not os.path.exists(stage.save_filename): + try: + stage.fetch() + except fs.FetchError: + continue + with open(stage.save_filename, 'r') as f: + # read the spec from the build cache file. All specs + # in build caches are concrete (as they are built) so + # we need to mark this spec concrete on read-in. + spec = Spec.from_yaml(f) + spec._mark_concrete() + _cached_specs.append(spec) + + return _cached_specs + + +def get_spec(spec=None, force=False): + """ + Check if spec.yaml exists on mirrors and return it if it does + """ + global _cached_specs + urls = set() + if spec is None: + return {} + specfile_name = tarball_name(spec, '.spec.yaml') + if _cached_specs: + tty.debug("Using previously-retrieved specs") + return _cached_specs + + if not spack.mirror.MirrorCollection(): + tty.debug("No Spack mirrors are currently configured") + return {} + + for mirror in spack.mirror.MirrorCollection().values(): + fetch_url_build_cache = url_util.join( + mirror.fetch_url, _build_cache_relative_path) + + mirror_dir = url_util.local_file_path(fetch_url_build_cache) + if mirror_dir: + tty.msg("Finding buildcaches in %s" % mirror_dir) + link = url_util.join(fetch_url_build_cache, specfile_name) + urls.add(link) + + else: + tty.msg("Finding buildcaches at %s" % + url_util.format(fetch_url_build_cache)) + link = url_util.join(fetch_url_build_cache, specfile_name) + urls.add(link) + + return try_download_specs(urls=urls, force=force) + + def get_specs(force=False, use_arch=False, names=None): """ Get spec.yaml's for build caches available on mirror """ global _cached_specs - arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') arch_pattern = ('([^-]*-[^-]*-[^-]*)') @@ -716,25 +779,8 @@ def get_specs(force=False, use_arch=False, names=None): m = name_re.search(link) if m: urls.add(link) - _cached_specs = [] - for link in urls: - with Stage(link, name="build_cache", keep=True) as stage: - if force and os.path.exists(stage.save_filename): - os.remove(stage.save_filename) - if not os.path.exists(stage.save_filename): - try: - stage.fetch() - except fs.FetchError: - continue - with open(stage.save_filename, 'r') as f: - # read the spec from the build cache file. All specs - # in build caches are concrete (as they are built) so - # we need to mark this spec concrete on read-in. - spec = Spec.from_yaml(f) - spec._mark_concrete() - _cached_specs.append(spec) - return _cached_specs + return try_download_specs(urls=urls, force=force) def get_keys(install=False, trust=False, force=False): diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 6ad1ebe1f08..d146be9af95 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1510,7 +1510,8 @@ def _update_explicit_entry_in_db(self, rec, explicit): def try_install_from_binary_cache(self, explicit): tty.msg('Searching for binary cache of %s' % self.name) - specs = binary_distribution.get_specs(use_arch=True, names=[self.name]) + specs = binary_distribution.get_spec(spec=self.spec, + force=False) binary_spec = spack.spec.Spec.from_dict(self.spec.to_dict()) binary_spec._mark_concrete() if binary_spec not in specs: From 9635ff3d20c17a92a89cf82db8d3f877dd04e1c7 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Fri, 31 Jan 2020 02:19:55 +0100 Subject: [PATCH 053/178] `spack containerize` generates containers from envs (#14202) This PR adds a new command to Spack: ```console $ spack containerize -h usage: spack containerize [-h] [--config CONFIG] creates recipes to build images for different container runtimes optional arguments: -h, --help show this help message and exit --config CONFIG configuration for the container recipe that will be generated ``` which takes an environment with an additional `container` section: ```yaml spack: specs: - gromacs build_type=Release - mpich - fftw precision=float packages: all: target: [broadwell] container: # Select the format of the recipe e.g. docker, # singularity or anything else that is currently supported format: docker # Select from a valid list of images base: image: "ubuntu:18.04" spack: prerelease # Additional system packages that are needed at runtime os_packages: - libgomp1 ``` and turns it into a `Dockerfile` or a Singularity definition file, for instance: ```Dockerfile # Build stage with Spack pre-installed and ready to be used FROM spack/ubuntu-bionic:prerelease as builder # What we want to install and how we want to install it # is specified in a manifest file (spack.yaml) RUN mkdir /opt/spack-environment \ && (echo "spack:" \ && echo " specs:" \ && echo " - gromacs build_type=Release" \ && echo " - mpich" \ && echo " - fftw precision=float" \ && echo " packages:" \ && echo " all:" \ && echo " target:" \ && echo " - broadwell" \ && echo " config:" \ && echo " install_tree: /opt/software" \ && echo " concretization: together" \ && echo " view: /opt/view") > /opt/spack-environment/spack.yaml # Install the software, remove unecessary deps and strip executables RUN cd /opt/spack-environment && spack install && spack autoremove -y RUN find -L /opt/view/* -type f -exec readlink -f '{}' \; | \ xargs file -i | \ grep 'charset=binary' | \ grep 'x-executable\|x-archive\|x-sharedlib' | \ awk -F: '{print $1}' | xargs strip -s # Modifications to the environment that are necessary to run RUN cd /opt/spack-environment && \ spack env activate --sh -d . >> /etc/profile.d/z10_spack_environment.sh # Bare OS image to run the installed executables FROM ubuntu:18.04 COPY --from=builder /opt/spack-environment /opt/spack-environment COPY --from=builder /opt/software /opt/software COPY --from=builder /opt/view /opt/view COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh RUN apt-get -yqq update && apt-get -yqq upgrade \ && apt-get -yqq install libgomp1 \ && rm -rf /var/lib/apt/lists/* ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l"] ``` --- lib/spack/docs/containers.rst | 307 ++++++++++++++++++ lib/spack/docs/dockerhub_spack.png | Bin 0 -> 90235 bytes lib/spack/docs/environments.rst | 2 + lib/spack/docs/index.rst | 1 + lib/spack/spack/cmd/containerize.py | 25 ++ lib/spack/spack/container/__init__.py | 81 +++++ lib/spack/spack/container/images.json | 50 +++ lib/spack/spack/container/images.py | 72 ++++ lib/spack/spack/container/writers/__init__.py | 154 +++++++++ lib/spack/spack/container/writers/docker.py | 30 ++ .../spack/container/writers/singularity.py | 33 ++ lib/spack/spack/schema/container.py | 82 +++++ lib/spack/spack/schema/merged.py | 2 + lib/spack/spack/test/cmd/gc.py | 4 +- lib/spack/spack/test/cmd/test.py | 2 +- lib/spack/spack/test/container/cli.py | 16 + lib/spack/spack/test/container/conftest.py | 43 +++ lib/spack/spack/test/container/docker.py | 74 +++++ lib/spack/spack/test/container/images.py | 58 ++++ lib/spack/spack/test/container/schema.py | 16 + lib/spack/spack/test/container/singularity.py | 42 +++ share/spack/spack-completion.bash | 6 +- share/spack/templates/container/Dockerfile | 51 +++ .../spack/templates/container/singularity.def | 90 +++++ 24 files changed, 1238 insertions(+), 3 deletions(-) create mode 100644 lib/spack/docs/containers.rst create mode 100644 lib/spack/docs/dockerhub_spack.png create mode 100644 lib/spack/spack/cmd/containerize.py create mode 100644 lib/spack/spack/container/__init__.py create mode 100644 lib/spack/spack/container/images.json create mode 100644 lib/spack/spack/container/images.py create mode 100644 lib/spack/spack/container/writers/__init__.py create mode 100644 lib/spack/spack/container/writers/docker.py create mode 100644 lib/spack/spack/container/writers/singularity.py create mode 100644 lib/spack/spack/schema/container.py create mode 100644 lib/spack/spack/test/container/cli.py create mode 100644 lib/spack/spack/test/container/conftest.py create mode 100644 lib/spack/spack/test/container/docker.py create mode 100644 lib/spack/spack/test/container/images.py create mode 100644 lib/spack/spack/test/container/schema.py create mode 100644 lib/spack/spack/test/container/singularity.py create mode 100644 share/spack/templates/container/Dockerfile create mode 100644 share/spack/templates/container/singularity.def diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst new file mode 100644 index 00000000000..bbb21a2e005 --- /dev/null +++ b/lib/spack/docs/containers.rst @@ -0,0 +1,307 @@ +.. Copyright 2013-2020 Lawrence Livermore National Security, LLC and other + Spack Project Developers. See the top-level COPYRIGHT file for details. + + SPDX-License-Identifier: (Apache-2.0 OR MIT) + +.. _containers: + +================ +Container Images +================ + +Spack can be an ideal tool to setup images for containers since all the +features discussed in :ref:`environments` can greatly help to manage +the installation of software during the image build process. Nonetheless, +building a production image from scratch still requires a lot of +boilerplate to: + +- Get Spack working within the image, possibly running as root +- Minimize the physical size of the software installed +- Properly update the system software in the base image + +To facilitate users with these tedious tasks, Spack provides a command +to automatically generate recipes for container images based on +Environments: + +.. code-block:: console + + $ ls + spack.yaml + + $ spack containerize + # Build stage with Spack pre-installed and ready to be used + FROM spack/centos7:latest as builder + + # What we want to install and how we want to install it + # is specified in a manifest file (spack.yaml) + RUN mkdir /opt/spack-environment \ + && (echo "spack:" \ + && echo " specs:" \ + && echo " - gromacs+mpi" \ + && echo " - mpich" \ + && echo " concretization: together" \ + && echo " config:" \ + && echo " install_tree: /opt/software" \ + && echo " view: /opt/view") > /opt/spack-environment/spack.yaml + + # Install the software, remove unecessary deps + RUN cd /opt/spack-environment && spack install && spack gc -y + + # Strip all the binaries + RUN find -L /opt/view/* -type f -exec readlink -f '{}' \; | \ + xargs file -i | \ + grep 'charset=binary' | \ + grep 'x-executable\|x-archive\|x-sharedlib' | \ + awk -F: '{print $1}' | xargs strip -s + + # Modifications to the environment that are necessary to run + RUN cd /opt/spack-environment && \ + spack env activate --sh -d . >> /etc/profile.d/z10_spack_environment.sh + + + # Bare OS image to run the installed executables + FROM centos:7 + + COPY --from=builder /opt/spack-environment /opt/spack-environment + COPY --from=builder /opt/software /opt/software + COPY --from=builder /opt/view /opt/view + COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh + + RUN yum update -y && yum install -y epel-release && yum update -y \ + && yum install -y libgomp \ + && rm -rf /var/cache/yum && yum clean all + + RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ \[$(tput sgr0)\]"' >> ~/.bashrc + + + LABEL "app"="gromacs" + LABEL "mpi"="mpich" + + ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l"] + + +The bits that make this automation possible are discussed in details +below. All the images generated in this way will be based on +multi-stage builds with: + +- A fat ``build`` stage containing common build tools and Spack itself +- A minimal ``final`` stage containing only the software requested by the user + +----------------- +Spack Base Images +----------------- + +Docker images with Spack preinstalled and ready to be used are +built on `Docker Hub `_ +at every push to ``develop`` or to a release branch. The OS that +are currently supported are summarized in the table below: + +.. _containers-supported-os: + +.. list-table:: Supported operating systems + :header-rows: 1 + + * - Operating System + - Base Image + - Spack Image + * - Ubuntu 16.04 + - ``ubuntu:16.04`` + - ``spack/ubuntu-xenial`` + * - Ubuntu 18.04 + - ``ubuntu:16.04`` + - ``spack/ubuntu-bionic`` + * - CentOS 6 + - ``centos:6`` + - ``spack/centos6`` + * - CentOS 7 + - ``centos:7`` + - ``spack/centos7`` + +All the images are tagged with the corresponding release of Spack: + +.. image:: dockerhub_spack.png + +with the exception of the ``latest`` tag that points to the HEAD +of the ``develop`` branch. These images are available for anyone +to use and take care of all the repetitive tasks that are necessary +to setup Spack within a container. All the container recipes generated +automatically by Spack use them as base images for their ``build`` stage. + + +------------------------- +Environment Configuration +------------------------- + +Any Spack Environment can be used for the automatic generation of container +recipes. Sensible defaults are provided for things like the base image or the +version of Spack used in the image. If a finer tuning is needed it can be +obtained by adding the relevant metadata under the ``container`` attribute +of environments: + +.. code-block:: yaml + + spack: + specs: + - gromacs+mpi + - mpich + + container: + # Select the format of the recipe e.g. docker, + # singularity or anything else that is currently supported + format: docker + + # Select from a valid list of images + base: + image: "centos:7" + spack: develop + + # Whether or not to strip binaries + strip: true + + # Additional system packages that are needed at runtime + os_packages: + - libgomp + + # Extra instructions + extra_instructions: + final: | + RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ \[$(tput sgr0)\]"' >> ~/.bashrc + + # Labels for the image + labels: + app: "gromacs" + mpi: "mpich" + +The tables below describe the configuration options that are currently supported: + +.. list-table:: General configuration options for the ``container`` section of ``spack.yaml`` + :header-rows: 1 + + * - Option Name + - Description + - Allowed Values + - Required + * - ``format`` + - The format of the recipe + - ``docker`` or ``singularity`` + - Yes + * - ``base:image`` + - Base image for ``final`` stage + - See :ref:`containers-supported-os` + - Yes + * - ``base:spack`` + - Version of Spack + - Valid tags for ``base:image`` + - Yes + * - ``strip`` + - Whether to strip binaries + - ``true`` (default) or ``false`` + - No + * - ``os_packages`` + - System packages to be installed + - Valid packages for the ``final`` OS + - No + * - ``extra_instructions:build`` + - Extra instructions (e.g. `RUN`, `COPY`, etc.) at the end of the ``build`` stage + - Anything understood by the current ``format`` + - No + * - ``extra_instructions:final`` + - Extra instructions (e.g. `RUN`, `COPY`, etc.) at the end of the ``final`` stage + - Anything understood by the current ``format`` + - No + * - ``labels`` + - Labels to tag the image + - Pairs of key-value strings + - No + +.. list-table:: Configuration options specific to Singularity + :header-rows: 1 + + * - Option Name + - Description + - Allowed Values + - Required + * - ``singularity:runscript`` + - Content of ``%runscript`` + - Any valid script + - No + * - ``singularity:startscript`` + - Content of ``%startscript`` + - Any valid script + - No + * - ``singularity:test`` + - Content of ``%test`` + - Any valid script + - No + * - ``singularity:help`` + - Description of the image + - Description string + - No + +Once the Environment is properly configured a recipe for a container +image can be printed to standard output by issuing the following +command from the directory where the ``spack.yaml`` resides: + +.. code-block:: console + + $ spack containerize + +The example ``spack.yaml`` above would produce for instance the +following ``Dockerfile``: + +.. code-block:: docker + + # Build stage with Spack pre-installed and ready to be used + FROM spack/centos7:latest as builder + + # What we want to install and how we want to install it + # is specified in a manifest file (spack.yaml) + RUN mkdir /opt/spack-environment \ + && (echo "spack:" \ + && echo " specs:" \ + && echo " - gromacs+mpi" \ + && echo " - mpich" \ + && echo " concretization: together" \ + && echo " config:" \ + && echo " install_tree: /opt/software" \ + && echo " view: /opt/view") > /opt/spack-environment/spack.yaml + + # Install the software, remove unecessary deps + RUN cd /opt/spack-environment && spack install && spack gc -y + + # Strip all the binaries + RUN find -L /opt/view/* -type f -exec readlink -f '{}' \; | \ + xargs file -i | \ + grep 'charset=binary' | \ + grep 'x-executable\|x-archive\|x-sharedlib' | \ + awk -F: '{print $1}' | xargs strip -s + + # Modifications to the environment that are necessary to run + RUN cd /opt/spack-environment && \ + spack env activate --sh -d . >> /etc/profile.d/z10_spack_environment.sh + + + # Bare OS image to run the installed executables + FROM centos:7 + + COPY --from=builder /opt/spack-environment /opt/spack-environment + COPY --from=builder /opt/software /opt/software + COPY --from=builder /opt/view /opt/view + COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh + + RUN yum update -y && yum install -y epel-release && yum update -y \ + && yum install -y libgomp \ + && rm -rf /var/cache/yum && yum clean all + + RUN echo 'export PS1="\[$(tput bold)\]\[$(tput setaf 1)\][gromacs]\[$(tput setaf 2)\]\u\[$(tput sgr0)\]:\w $ \[$(tput sgr0)\]"' >> ~/.bashrc + + + LABEL "app"="gromacs" + LABEL "mpi"="mpich" + + ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l"] + +.. note:: + Spack can also produce Singularity definition files to build the image. The + minimum version of Singularity required to build a SIF (Singularity Image Format) + from them is ``3.5.3``. \ No newline at end of file diff --git a/lib/spack/docs/dockerhub_spack.png b/lib/spack/docs/dockerhub_spack.png new file mode 100644 index 0000000000000000000000000000000000000000..44ff0ed7ed87652681e4d0bddd0c568d57fcc8c3 GIT binary patch literal 90235 zcmeFYRX|l=-!7`6AW{;N3y@Z%yOEF%>F$#5T!hk%h;&LycPzRWol=YLUc{nvPrvW| zzu$Ya_r*E;?wol6i&kB+luZUkgdGZ89T1s5`$&=@oPo5x&zC;Fo z^Q!TEIPmScv#7M{OW^W;X&MUrOyu%O(?!MJ+{N9<$?S=RoxQCYqqB*VnVFsQ7kif@ zq&A@^Pu@O}78g2D3CO-9!;jkyg z*Mw>nfMvzSO{wFbVB#S)erQzvulIA5*BrO0ZWyQBB#gV6_b0CuB&mQDXk$Gn;Hkm; zEprSN#Wyw?&8L=%n?6iUw!HtZ*%eH!AxCf?a+PsHKHGmBVC5+oHvd%JGZ|83bA+Za zTlan;Zc?HnBeZWYbE)_f@OB&V42Ogz>6?E-7BJs!R~`7RTHbI8$b3JdR6n4Mbun^!|}RG;@p z-G{vsZ!f)|i^!7yG3?t25=N$s)jxhr=VqP{shM;qvwq**^wJt4tmifB0&uSaKfeA% z82rt4ZsXMRKr_vQQo*3Iy(V2arxc7kpQtZwwo6aR9Slso`fcwhpHH2I!41n|91i@` zQL6sqQ3$^PQ(O7AX^F%?JH8QU6-FfH-{NGDu>EjaFHN@85jl-tWE4ME`;rv5tCDp9 zJKAhKNe1WYDHpPSp{M~93I?*_m`>j}%L)%@m*#^}hA`q>ybP_qhaKcKuqs=&qO$q# z^Z2rs46HHix-U!)JE3gJy!XK;IA!>f(NQ&5U1=N9nk712{F#%nXmZ^J#20CBYH8E= zhyF4f)DrZ}$Hym^n9alJBUffuSWi;wB9z2!Y4mZV^*OU1Nj`W=HxcX(W-dg@K;Wxd93B<9(%qP~m zPtZPzQbs047!m%Ip>cwMf-Tpman9bL;e9ZRl*u5$6#BNS_7x^hVr{`|Vhj7NAhWpy zo!;JmZ291~mVfc)AA{m0!spgL*K!<$2yyw{Tqf;rCdGcCG$=b>TMcLsXRx-WQ;f$* zznyb$KtoC5_Q7W_;R$jH&uE8op&M5cZWc+_>Soi;NV3KCUJ68Scv)K6nX5C5l*4_~ z9@E{ijTj`H7fc;mK_5rer8PM{WC7jnpi-Q-s`CAZxwIKCM|#s zMF`MuvPJLXIP1N-1~u_rjK?3Xy9$n)Y~Or^9WT5YjvMS#0B5`H4^S6tlae(yCVznM zug<~uc9*CpcF#f@`+FJk(1>Xg zfg)Q0cSx+l1fK;(vK-pUHKHGM&^~&h^s0zjK5-5~P*GhFVdjiq)q;4UkEi_mrz0ql zCHx)UD-Kh(Zev#Ce(IJU&}}1@F*REn#944Oy}GWIaa5jiJISTN{jv^cpReU+wCS|t zI~jPNPrz_Z@N%h|CrodwAyMK9k6*(BI6mM-&i)bA|#tpfHF$cdR+ed-)T#AJ36d7aF36E$o zPV5}lapNIT)OkdhsKWw}Gt5FctZ8GwOaLuBo%Ca%Wy;S%vw=LeWYr(yYziO6M~cUY z#M3fo2hrl$j2ILKSf%)T11&~K*%z9;gPrAsTLvu}#Ju38`^Uj&vjHYxIHQz~n6Gm7q zkO$csWkq5DK^dC{~n{m54J#+USb|HY8Dnqjc zjZR_7O!8&GG5CsN5DZl^tpec+RZ#X5)c<^^PAAOR9NMSK&1iRefWdgTi1tb7-rvqV z!zNL|{pwbO^SDg1s(H+LtsJ6wacU6O>g!1rGDy+(h**%;dSLtzx6tr*B>vKMT`?Oa zAx(2;G~xuJVbO5X!63}PkqIA;HDGg|lx&(l^twR((NF!&fM%DE2RjM5AzwQ0b8ih| z)-k{-FZ@kRT-Bs%8hz_F?i|;yI5O3%N`_PyS~2<=$x2H8XF*lXiiXyph}&m#3*W0QNbL1(79P9HYI)-f;D>tSh&2jAV$ z=|#jPSq=3Hgw37>0efh3h^D!$)|0Wu9P~z>s|3phh3MDyk$6F**3-c?Fx~!uWQ|)>*Zy}Pv|3CQk)PUbv>dVC7De2buSh11W`6i zC!+W)A>!a*w{u5OZi?G6Rl6_=f15!^`ls|7H3n#~_ig88xF&0e$Lm-*TIpOB!hC6a z4Q@hYh;Fngy$B_K!Dsqa7$(hRR^FI&St&kTUQr>|!CtRGaT?9iMN)wR-;_bfO+Or5 z7r%4v+rK1j&?~B-?BxNPuHoCbcaW;!p1r(OzdF`>KV1W3hCD72$#Yq2llpqM=QQE` zsKGF&lb2E1QE8?zKQ#=IeEsQpTq^IanS}^!?c>RrVAu;wv;bW2fjrVyKErg3V>GQz zKtw*IO;znTopL2{<*%lJyh+kl;oeWVp?$l@IrmH7E7rrPiUx>>V=lmVcNL=xKYMiW zyRS}R$V_eZAyk`_e(PHhyG(B3q|!&5)Z@hMg6rb#a60M*lGeM#>Lk?V)~Z-%Z5uNF z9fUA=fCEC?#>L&s&1AnJCOmku3|4sDKFJh{Cw~;dbs0=Xm3b=GS!93k4iRNgWnJyg zD1BVW`To^ZDEYzptjfzi9^~!1Sr_>=SDuLnoaDVu8#?oy*Gj7nt*PNNazo7P=WnC~ zF*nCLE8nhb^fgF(HDXOBX?=|wdL8uin?XBbAW%u_^@~8I!0i`=37;5}DPI<;(%1UM z6#V#EWR?$M{3@O8eRe;TEN(WFw7Mj9f9AFsAbq(oACDG&P5Lp;x}vpsU^5{28q*z- z<3oiEmbW{MvTzqN)bPFI&aAx``^xR20mXQwQ9H}g*c(X3Zz$r2x-B)&Hr<%28}P!i zSP0M4a8Zky&)mZaR#zAR zCZ!~OwW@3~C&%LqE9YG&m!Vnp@EUSzpxM}Vkj+796_vYJXF`ohMH|nJqYz7PoBW^1 z!2UYjcK8E=eOnU5fT0K1lIPl1be7U|sK7h}e~A&LmHj029U&0gtgTmTfg$>gx5+lms7?Vy+I}Ki6fQ zG;#e8D%ot5NQ_@u&+Wj>c3Z@Gd0zxDs!&+bEQ6d+2Xp}e?V9$ZhLOP@X2=1SiW&H zg+4tVCVeAxVdMMf=^JO2+SHZuWVqoIVcs1C!6@FW&hM6)Wk=r>Ps@nW=FcnTOnrbm zt-2Goo?`ub5b8CklkmCcUm_!fK5a&r&`}yso2N2wZ0t&j?nTxR(LP0 zcYnL*svv@W;qD8$pOZp(4+g$Y)lILJ--AVt( zUw0sz+PGcx-pWQNP;mAb4A9!jA2JEX*JoQ?9o5Lwn*QRL1q@(I^Kjk+dq|FDx!y?O z0FH-)-0Tv;@ysZJqBKc*Y9b9!LC;nucj8rdwWJQNkDEy>N}1hO9N@iHA6{)5o;lmu z4XizuN;?{*isgXMBKFwz)RvCd^_jj@L(I&6IH!;r;|t8|ZRRKv%uZ`GNi?qCaSRgM zw$>MP9;L_AuV;)z zl+idQaA~SO^Y}fvo};+ke~ayIFx|dyKGmA4FI*lK)$+9bahVKQjp3$*?&SL@D#^>? ziRVWK8HR2X8y{IWmh~ANXQpASB!n`Lt5C!pOgB+O(S7%vvbC;7m0VkRn%ARhSXRe- ztr-veih^5cOjgqJ>`2*J89!4uZtce|laGFP%!S5PcW`IY?Y>9kxi%p@8ZlX7<$49X zU~6k*ldiTt-Vuqx63!s6|A8n)TL)JEEGg!CdgPVg@l`+39wT(Swp!PYmz{UZgly;d z>VW#@6swH4*zE`jExXT9k$3c$4LO2cT6C1|7;w37 zP?bdvt;-cv*35gEF7z!~gwY(=x1-fw(X8 z7re}!`WoR&+G|*3qolYsi!8#u7ED7GZ=GvE&UztGF!^va(Ob+Cq^#@W5V~4;6SM8g z&9&ph$_c1GEm^>PF@TOxi(_Ybsh)w=lw`!JuAyOZ^~3zY*+s}R#01xeR7Tj^R_N-I z)-dKEF=Hb=CTUO3?HmWXkc)YEFGa}2Ml)CH@Z8s<1NzKUeO#TJqGprv9fPfznkj|N zN};4)=LWp0qA3dVso)JvZf09YeVtP6cl$wUi#E=S3xo!9`jnwrIGs0edeWX(b(Ri` z&4k-EJOY!#-PGnp&35XC+hUvFML8)gNp!iHN7h3GeJi-%ns=-$*m>skC}Q3SEND^N zO(l5RMGQ|$2J3%y*Pv{ztsX)dl{!7ti@*6kdTkjYo6o;cU*6fF+*gv@Q$7?Oix_md zI4X{9FT9I>Le*f?*p)Y_ z*IHlS89+)#1U0k}B6QV+Yu?&Xj?QyojMzF}U5v2l6ISw=Ad0K?S{w4Z^KIgDO;`l& z(20oomzy7<^zOddhTIG4+5M+1pYFb%w)ZzmxDt^XCc}JaE7q%+ZK^r2Es* zox;>zsGSY4B4aN3({wiyb;2PzPz#)28MQF*I7IR6B>h9`RiBnL z+@lQN8+fd@Ro8gM#uklZYio7q(H#d=ty*`fO`0*^fqsrV?{QRlZD;#R54ZN zxluRDh}7xSAPMBO)voB?{@Q|bZoyTHN|EehpIf#pm0%s$TKNvKh4VL?9rWk9)Sd3z z_npH7X-(X4b*rZ;c6H@RWT%a{o?&a| zw()GK?%jySG?&w+kKn=3i)OI2|0Qz_0!ZF3@T@X-1*sPP93ezoYifrIZp2z{FE#eK z)1W#ie?)uj^~c4Ru&#Tf*}!N|omIrzkcJhsyghBHV%)rdV(Z0-Z6(CkH9U>{$@!+G1mF~JES~Pk;_?pDty<-j|*@9keq~KwZ9ba`u2qlg}5U0bQPQJ^1l z6Xaiq7~oxZCg2?Drmb{Y(0HDPj#8yR1devZtp;DS+Eh>TW#!-*6SI8qJZmyv!rDF0 zz5z$s5_@l)fPmsEXFFetm$vP)x)t?^`FBez0Xp%;%kMS8>NHW8+JyaD^7!`Juh$hb zhFFW_}Zj4P>D9Y6(uZ?8vaAsrB8onmuXe0n`hbMZ!^ zH6!C-eSVF$t(9kFjuA5|y5pO1yGC82=URp00^8N}n<(hr@N)C#jd2C6nUbJYAT{_= zW&J|w(uVP5!SM{_ll6FSR`b=qs)OvK^W6@(y3G%z@*2$nBSQC!Lsosvi+<`i%T%wj z@+bmQrIgE5-?_O?j*)dSN4?c zdCI7xm%JW3Z%IS2ScT(^CST5TWMFr-RMPJ%S3 zjmzcsOq~*_ss4fx4>!{kzZSo$F><-Ox~Pe!=mnt7>uH-H+iMZ7FW(&5B;AX>8deI%kG?p{#~$D5ToUbLm-Lnq z=>`g;Plrq&h#8kyxO?5*GGSHwuB2ptolc#a<->9RFZ-%N5{9M9-|D0;0&l!Yz{NG) z@=Gl|BU`Q7G^TE2ydL@y!|NgUH$8uVTmz`~hNS*bY$=qo6c0TrWe2K}I@Whxa%Lj+~7@=*Ov0GGC{&{|D@%k&qqR`*+ zkmYyc!to*T5T}t>=LUb1_2ZHX>MldDoMMvLvz*mILVLd;?B*oFn=ARbuLjA2CzlzO z7mMPVmvNHb%@5)Z;Ie^zj6{hU(7D30$0+IwLCcg^dj`VVhh%(Q`kP; zNJDHWM1H+zu2s&T+o# zl$3O%r2szDc)CLn#eK&!$gU@(2r2^-KeMGa1`+EaQy|*O?^lU=-H;WE-(}J{2P!>c zEG07?>4L~v7r=`QiDpsw z*XCX5`8_vr{rQXY*Tr+3y?W5EjC1ryel}-WtK5k_VU$1_ziTt#6 z7+m)`AG?K@iW#WbYbC8LB*0vuCMZ6g@75f6i-Zeey^kZQ!DtjmzBMuL{F~|$JXZG1 z?evmCIB-F)v2FW|@)zh$rxY_Iu3D3O$5I8ttIo%!nhORTr*eKBAFYQrPPjpg#}I4l z(xR&(Ev7*d#-)qT37Ka{F+e_oRU&n~3Z-jp%TkZMoGmBS5$Cw_acG|DltLbfV!Yp; z8K6zAXNo+bixyetp%RclyUN$D&`{Y=zBK~@!%3g_{j@lYCEDk zXf%$GZ?_J9?{!YKO7qb;!wccH3z4|!R%gewYs+fioQL~C+-6_5$lHr4yJ%g`$;}JI z#2tLp$ps`yl0W{eH4*0kgP@DrZZ4_rn+2cCHdVM;j6O--RyCm>nc)hIOCE!fgLaJ& z`KmPpTY@lHWII;u?1h@lCR>nktR`Q;As;|ffGB?y;_b>Ik*1@3k;{AkW&kAMog|Lv z>v3E6oRj+F~fCw-|_|^1pyDXP}6{)9X%ampG~`v5*ma{iwl%N3caCbS{F zvoT#rM9h)2YAN$L?4DMdZJ9uNYu4o-w>r<6w3taR+12|3qwPC!Yvjp=#z5Kdhyc5# z(Rn#EL%a$4-!AMJm8fWZHg9xYumHvu0#C9cT-C+}8%xi(a}X-0!*JL&VTM?!P3VTX z5NcYl651losNg$W&cHO`38DQqoth2+9U@5WKae9DUfeD( zacx{W{}6kGo05P6;X^^qCVB?i>foaP2v zx#h~Zb?WuFZ;ao9>e`$N&voAyu`Udgjs57=?FG1K-?c55^!!`%Y3=Ve#HutmMQ$O0 zfmTqfSS$Xl!2scckdU#I3GcWC^#e_&C3qsaP8RpnLzeYpSDd>pX66ZcJm))ks#Wl% zNr-QB8svI`H+)>&#+c()B)#z5UgJ3t3GtiE8Ye`WIY*j_f`T#0u&N(tF%B9_2u@oi zZ&@$`n44);{1&Q$VR6@6FV(czanxHd+Qt7u%j6;QEN2~W!ftMfA|JI0hoePo?YJ(~ zS)&Mn^i;Ad(%hq+n={}&cDVIFT7VW9gzL&%XWZ0zih%%^aQ6U6n`ECZoGgq#8D+zG z*eRiQR>5!EDV+|u1bn3|2?J28h)Y&ZLr+|*O-lBUue=|>BQil}*K0s>?I;rIm(;yc zNJT_;OsZI+aZIIrUp|`U$(3hCG~*JcTT_f2v3H2T1ra%h*)lPfG3NW2~yCEk5hWeR^ zhShKSZY{M@mkja_0Ft@TScSjhW8?FX?oysClbK3Bv(jrFCJTCT<>MxjNlQ_zsNIj?Vau!Blh;wq^qTfQd zZ1{eDMILSK@7fyG$voGc-5O&n85ki?s6syzBN;v!*<@AD=a_&6+s_#GSc$vStSuyH zbL#3WRuePqLel#u~MZ1PYvmqw& zfoy8Ty7qdc?A7%{oPe>@w_E~+GL)duzQT&y@_~D*vsXddU%&5;?wQlZ_#lz^ikXyX z9W7!L+mPhwDxjC?vPQxl$qvu1{kqB$y%bK+H;_bjLPllS80{+aj&>a8!|Y6K#!wzs zNCI;>-;&?WM=SRM3=|^&+7V#2I<^hcR~PWXQU;JpIa5-k-eNMg--{sc!3t?EWH`aY zM%HN0^8S{fEN-*CrT>xM=3j%6IFJQr9|ExGj5gX zeC}Ff!sD}8ZAHP~wF1|BFbVyg!c&x(%gQ#wQ%Q>gpn9HVoo7pyh8xoYaV5Ca=V_+z z>JDp*uYRPWLDJSfztx*W!T9g?`kn`JGbXGU63rC0k;jxE>(rP_kN{Z|LaiOZT_d{E zDfC|oFC3j4h>>GlB4eN8q->l)cOyRsA%X%{@ z$sA?t#`9dkMxQZ|1eUv4-o{F;+zPSJTYN6LISZskss%4m>zh*rgGb%23pf@Ay;fO$ zK*1!0nQeAIdn?`T8L?0=kvr>MsW?#J=ad$+fJEUhFG3_y;h6EF3EbMI3X}DF<%jPOwWMa# zHDrgC+$?zRS~)!cz4kPkeg|V4Lg%F|AmWC3EGgj`5sD=WPdlevIY~Y~XO(c-9p~>)5Mjs zB?&`w!f;$AY&IDydUq7UZ$P4c7wpNZ&M_z8n#*<90*Hh{l*CFWJad;4k#yr!mTpzV zH#`;`MN&dxQS0*>1r3%;-hD$m5hM&abs~ktMnn#F)m?BUJ8$9T<$|3%u7V|;_2!5D zQO2CJ4?!@Cv1yxjfHh(CnVP&2;(HwkXAWMtyrC7U;~SRD=##f18MUk1H`=?lt=(?p zC$E6i%W#&cefRd%Ijpojfd#rKV(#haVptpOJvwV71F%OQKpRM+tNTANHwG|Q(lthh zd9ac=%%O+K@YKMZE6AAFMxs8!=d{-f?7yt`2A!zj!0*5K1x4h~-7O_cKUjU9QUEV( z=*i-^Xk%u~GJ^nTQwZd>x zCrrHseSSruYnu!*W=gw`vj!#mQl#3{{SiWfUq-6dlGTkpy)4nbA$;Xi<_4^}pwC#R zQ1)@>WAWQnp%`J! z43$=UBYMc=ZN2>`4cil_8!x8byKRWV7c^HKcsJ4`aGTwJx>?8h2(%w==Sj`cKGqcW zm3bfDJ+2Umg><$0dHUCwocd)B%v48=XNTbmK5TtM(ihy|$DUwxetA0I9{yY1=d?_A zB$RzHa-7)yZ3XEC3R%2N`&qALQX!AT5G7?d7E+_^3rq`e`zye02Kuu?DFI;yLqM5m zY;Z3v!yq+Tj0!#edlaDrvvr6h-5zJX1qHC z68Otokb?vfj)&4?$oxl358ts=FR!0uFOnai3R@#{{VXhh>?^*#8b~BQP9OggcP@6* z{4x?&5uGd@0ll$c1s4r&!+7od;!Q)XiH78pZQvL?2rA{ej2bCvsg2ZhisR?rlhh zw%VHJufeaC7l{NC0NwV9D0Lg`4O_=A)(+NxVb=29^L+#`Dj`UxhiAH#AA7WEG)D7H zdC;c*Yo(}OUxkh@Ch8KJaQ_-<4ajt}$8j0Mi(0hq8WZJRzo){>DrT;!T>!%~>N<1r z7#S!XLjWMXe_3@h?uK%|wV29iwV^-~0$PuXuACgr)HD;v-jd7Dyx3e8#m$}~{t|Q> zkc^x`y$M6d23@dE9w;Ev_SpClJcK5w#*_gcQ+l;i_NQ|Q^4|FmDreRO8Rx_XGbLrg zLXbSbOVtCD{&{c z+fP7;k|rc<3^r)z$A_|J3E}SAmrIQuq%@DI^l(|@jvsWI*fZH261aatGQ^5*uc)*O6qrYHsOFE|Bc#^(I{y!VfWefPz#!XM4ITzgg)21UGxQ76L z3dAn}bmQ3vn6p+NGQIe(*So)NA0Cj%NWe%BHflfelRDtK?FT>m4|3IxIC*35IhFnf zcAQVO59bE6%ErEpTsS*^7hdb?Ll0~KbGM#?yu7Gqu3~=(vLelC8#X0h-k6TQ2J{Yl z6jogo#+hg^P+8jUX*qL3u$&WwpZv9Wtx?-!)AGJSh6uRRc+#1ZwI2gD_*7;ZS&y9ehAPK&&?Hy9G*aqzw5sBAITvx}@0$gY`k@^ou^pP(`>1+ji{>D|vDBh%b4u32s zC4V?#Fj>XK)?oOvtVz+Vp@2V}KQMz&pJKUQ>5gqVO)(%b2O{-+1xz6NCshW|0l+iC zhE({SDX?6w7;Fif*60nQt{#?NCCa%&TpA+?B<}9M`*n7520Y8m zuDsV;Y#Od8x&X-U;rN7SAQwinPGk}mDOaX%;N3t ztHU?aK*Bjp&o!%g3l|qSI`nxeOY&@~np&Y;BlVYc^EL-|4B1(OQ~qF(EfbK86msVT zYx}j9$@E2TjL%rV550oJ*#P{ys_{|5RD*Zp+e0df;~*g$$CnZlv((xjx8K|^K2gan zI5$`6i!YU{slj1j7eDSM+C+JGYqj?x-T6yaEhi@c0hJG!7h-KzqI*QQ4o7O6;3Y(ASvEL~}q9#cW@$mw30U;`vvi$j>AN04ud_ z?7)QQV$rVJwrNqRt`di7TC03E_LHn$+WWm5gpSH0nk;B5F|MvDGUulWBo6xdX0a~R zuljJ2+gb!Vmi3eJ@x*O*%A6X!Z(3h2Ln`}fo!9S%Ul5fwmxDX!uVzNyc;0670Lf3C zDG~r{c?;^^@34D%-G`D8bX$fIH78+sjOO^9&$;nO)XKzPGAn+@q|D%3bDn3XS|)@e zs2=W;pON!^=%<^>y`CYE1-6G9&PTfkkjaNw=cfhRxeY$9;i0f@6~8&*R3{CZ4emrb zVB9lR%GS!9Aq#z;>nXh_n@^VR44DB=bRz}Asy_w){K_gP>%mba7E0?7-~gz!<~Xlr zJtgP&#Ie11wjr=fASD7sksY5VLB0*k+X2GEj-zH6CJc@?ZEOWxTOa`XO0nW+F)AA)}fyk9bEQ{-i zDNCUW0g+Cw9fsiZOSYuhhm_wZ`e;<|LyjTCGyr^=3%tY0)0h|`PR@|&R#l|9$!5Dc zSt;AKj?JhX11ZIe6JFt5yg&50fLtRPpBrKhAD@vMV7*#o#|0Gzy5I$P(OkERw*PYK zi^1|}_N|lsMO@kT`m}M53Hdl(ufXlc$jQNQNQq_n{bjQnLDji%JDaUHb5=2POyXcw zb`Ry&`FLuTpf#oIjhRNd$T zqtHsy@E*!m4T?8`99&$=nvP4~ZByO_FuO%xN&9YpBC^xsJ+0}a8xYiyg`SO)47PI# z&Tk@6Vz&2wt*rDj;`{Ei061;psDsB&3cl9|ih7|8fF`Wvh7DTqEVx=yp|PpA80Z6H zsQpq~jz(G_2ApebO%+)Knq_l9Wd@{Cn(nUb00K$)=v&b$|2IT~6KFl^2Fpjk@=rI~ z1MY2Ap^fY{CIccI+yuqo1>{n?!ot09Z;v4c#V7VDGFBQCqn@M13z{#EjVZV8=+X&=XgG!a(d?Rr=BH10bbj)yFfQ8DYdS_OCd5L31GW4 zGh;73N_a<_oJ_7uH&I(#a|BF{pLtpWMMjBD6~x{k$e6P;i>v#*H^{NIrj%pt!18pW z#VQ5JCD!~dZR_3vgbu^iIKyhV$nb9I>3pv zR|6Llthh;i{U>LZzqhbGY5t0EFcbVuZshv=$}$Pooy$j#w@|9@?#2pJHAQKUw(?LP zjv&%Y9)5K~5D}mfKqhr|zq^h2k``xfTi6F}&474<;0Tz{ch{GA=? z-3G)-n|~082sHTXhm2u2E_H!?!4HMc@4=wGJz6%=t&z+ggpNUXyM=i0m z*@UBjS?dZ-8mSet8w)pGVy4)->sc^9xf}XK1aB>gZDJly{l>-jmBoJ%D<#oSIm zWo0Wbj3S2A`nSDponAyO`{K>(Mb~tpBOi`IAfu0NfQP9sVGv;4KAJo(@@y@)0YU_T z1^~KOJfLf_A=;7w63zA_$Nh54_2MhOWTOw zoZY3tN`5^_o${MLKCa-fa?IijvX@&~ ze($dH*JOR~MzOA;3v!-_EE45_N4JKz{>-)?PV&}&?+K{YSs;z*&y!GZvhZteL2*Z{#Js|sjL#*EB)>kLj`+HR%P~8E9LP=Bpbs*;Gu$Pf7 z1eY+Je}b#@#*Q0MaXWV15pG{x?%W(Ln!!Ov9!KdU5d!m@&bNCU*0-{y+%7XkTfta~ zK6On4MMrLqz^}Q7V#aq;Y<4cL;cywj=L_*zUx+DonpVHTe2E2`58%4ZgSP*A^biUc%u58!$t}N-JBR+&7o0(Lc;v2J*naLy*Np*_XMiP+M2 zY0Zr@{&c97R?aY>M`;|S5awGM_etwG0!ly2c^(?}rvq0U3>ts?a`aJw&KO;R9Ca!6 zfy>H!av?dL>(|jF1r&>cw#ll^I6X44$dfd-y`gz3a@N+XX*7dLi+{w0W}u4PLW(ga z*s|PX6ALnhs&5nbO)lZ-L#g~B1~xD`f#hy3`Ktb1j@Elc$GBQIXW{)bIz0%tT?0xj zDiLrdHm2kIr9-?)8hrrO))u^!Cdi8|5NKzMEfo3hcB^J(-KO?Dh#&H*a#>7^-@QKU z2NOHY;=5FLJ4>kllgaXlmF_RLHrbWGzF9_L$k|&_FDGsn4yX3%Ak)m7YTu$4Wk@Qz zWAVG&Su8cr9W45Nt=h)6x9RfUrVGon>d;rD*+6NcHCSQ!<&p=$=RQ8)0Hht_oj-_y zFa~s>41JnnUZ;W!!o?%ii)tB%7g{r`Q{a_*a4&A#_>jNP^JulNbc zv&GYrDH_fpC!2=L35b{ohzwPs2GINBDHVBaIB7*@g%e4r-i0n@Few+JCzxS>!ZyN- zwd@<4iH9~{ba7mufPG(|96uyZ$GC{bgDL3(=z-3^d}Ou%Y6k54AT2wzlJcVRW>mlO zl(r$?zjvTkxL);CZ0_3x05tR!>(=%6At^eBL+NGO-4xK3@0V^&o6>7c{wB zOdZj5A$LB&Sy;Un@_f89;SB;i!2m=Wx4PFaEi^PuDE^85j?#a(X_ngn&3_}A=Iffh zj*L(&AoIrnG8Px0&j_k zpX`wDXjSXe?s6Fs-n$L?pW3gSxxPQ z1Sq=fC3F6>d6W(6_fHd9XlOI&|84u||J^QCs_^U`7yH0}Pf#x7?@|8O^?zt${l7bl z|81}A|Nof(2MxymA6T@F$7=T@N2O`cN2r0bp*hdc6H59})K%l`gb++C6ggatNzB}- z;MChe+auUQ6Q{NHq>hIDVhM+}MzP*xOKFX6Jd8t@Ar3Pu&sh5NyKf>{oiB$Rk`U*& zCOlzTT_PQK0_0fXtBqwx=PhNBENJtscbfr4;OjNYyu&@rm%t~{|N5dUr+-ry?B{DB z<`&dc+(hYXz9zp%sNomBnt&ub4nU$Kx<$pim?^6f@I^LV%9E2h0n-+S`6zQ+H3JVl( z4JMr7W@-&m@2gZa8O>_4aU0l@xZ%iaWsq+T)^---zslg>|250dkp#Wf1VhC%jgwz6 zE{yBejSPwUXEe2Bm`Uk)Q$b!wGhT#WCRjLBWfkvf(%r-DphKxxiWVonD6f0Z)E>=u zU2n?Ze`wl=w&v>D?F~qKdE)tVg=O1;k|CuM>!4H`#5ZSESfhjmPDzj+u_mej( zC+{{?8>D#6E$oxgE@=cmYq@;rDYO}M@o0;YGaTs4e5UsN`9E3!l?n?wFw{%EwK3z5 ze;ItRXZ!OAwt5?+wz%n^_0Epa!-)rTG6ki{a2q7@SI*G5BbSDM@UIoWDdtt)q*z|- zE`4X3LJBH&Vl4?FPeidiZpvL#9ACuqZH^dflv(&)85a4#d*3^KV0L)QX;Zx?1!9To z-YJ8BR^fZel8sf73D&4b0^i#`72Mgp^_L*6dPv8*x@pQ;;B;j0>Fi<}k`DUUqPp-4 z7yMp9CtJytU+tnKscZbKh;cS%oiwWv(m&etL&D{IaHgEq1<3vLroU5Dh@#8^i0g|~ zvK)X zkRJ4pSKaTab-y|s0ZY>Q0DrfFah-9u7>V#x{%2_xCq}sz_$?75#TuIHA}1aJEe-*} z*haqMC`FbTQ|B0CETwJ2o@#cODe0=bf=P#Vbq`;*Pv2s6iE=*Q&y7^n{kU9ks;|Fz zInM2ADO=U;jSA8|(*`G$d)##_2Qdz<{CN3#U;z>8|Ng_4lP z;W;XIwAtP)MIg7d*sEoa&=bGGXPr{Mo5BpuFojh*d&F9sdnI?%SLOQXV|_<-^0>Z| zWop(t1~dZA*3p#~#fjNlSEe~hw0E_971mLKl4llIuBdWNbYYeEBV;|Z5m*(TgKlnz zw+Z$U;gx^BE)hz2wDAPTfQJ9f)3R{Ec5Vd=`wYj$5VPO6(CzwL(Eiy;H(xGb zC%yIHlm!!?;;ZdmmJRTiFRA^~j`VFsaeAu#W}WJeg}>(DgX^hE6k3zc>Le~^)tMEr z%}$=*w*>V1M*~_6`JKtNB=-8vNvukK;%mNV?4vQ+p0{}z<`eSB&HKT@J_muA<}$Cl z+iy(pAd&+nIvJF?&0-tbqYBgop-4!hawb&<;IH6^@v{a#yAOFUv~uxHV;c{Beq?%v z1(|5S3Ov}b)#*L3aF3lS!ETz8Q*uN8G)H8*AIx$=LEa?@W-1q`Y@nE(gwj^tB_{+3 z>S{ID7qs8D&_;*Vi@O_>4y=T0KMi-nsyJLc6u28g3_%*tt*gle zhdRVwG+%WSvwcXhbL3cBk73t58Borbm^RQrjv&+Pt>eOia#nBKBRuY+O3bHxF&TOw zrg+;wD%VN57OKB;`hnyoyr&M(+Eb;JxO!bySgZG5)?I1O z?k>4igQJqdkXK7iDX44y(cp&gIV61UB9Ts0%b;}n!?zdA0==^dMe9#6^L}V+`T>(;)A&jt00^v1+Q~PHOicfJF_4K@0y@T? zN^(6Q%h0It8Ay0ZKM~awm-Xg{c@&-ErcuON{u`GiJB10)=bH5<5D_TMRvR&8p}ERK z@!cP$=w$_s!!wjhH$!z%8AVt(RGK+mhC^0zziu2C6!i<1$@Mt$zD!DVE*$m>Ojn=0 z=EB+Fn)SCRoXx z3SmS$K6()XA^Dq7ra^kMYL6R(!x`9by)(pfV`D}H-DwBd47}BKFK33QzjEJ05#~~m zPXZt4&dvj#p%92*|NI~z`pJ#e#q#N8OuA;Plxf}hA%{sk3sa7R9 zP8d+VbNF<1*U(hp&3?c?ygkvnU@pEfo>7hX>jLq^*ldsy^PHo%F!xChFJV@AM5+=O}$8>r=#!}gxiY| z{x|T@_(|H|46v4<%mwJX_bOm0{XR=Z#knw6Gj2;eLIV=g*n>v28c(oBE*H9g#72^2 ztKS~zts%Fm_r+(c=e(OWRBEIwvfFZ?_E6c%Y3%Qr`~Vqi)W;gBW9MR<#bOJWk3yAK zqq9Aqxl?ct(lTknivbpRr_eaKuc>e}qZwVRGOc@hOD)rbPf{#ZW08cO2XR;$a|6

*|zelEEH`gE@3+Dt^ z|K7l>Uegud5KM7=r=USC5%x>}=#A{-h69Vq4;JQE?hx9eb+$@uo9|K0Ho!Bc`aG8v z=tF0E>Q`f=Dk*v;;-OiJMAGL;L8=xr&fZcmKghP}lLMc^O_mtmSib@`-EzEyjNq&m<9 zej!NUoJN_iE}R`bb!9!$7FJ=PHjXMFI*FsCD&B&I|GU?UOf$HKmD{FOOp?n8asn;$nIf&0b)9G$1 zek8;5E+i{b&OoGAGa8#U&-NqA)5msN3)8}8tpReT=6uujoIr*~y}_z@i4)u!;>Z5@ zk@c5|326&`{Si#NWs?Fa!sQRe>LBAkTP5yOY7+)9)Wxamg++8WED`C`+wQX;A*fb$ zJpuT_(3uJB;2yrJR^UUeLPbLP^1U-NJrjV~`L7&fXk+YfbsX_OA#GU{4nE2+&ft6o zVAW8Q{azo1me^@(%EZOc@XJWhlYMDNP+Sj|)ys5bdFd0JcfwPD;MEll;F!O9pSak_ zDZ$S}Cef)(uvysZI4lPbT@8~?dbUvxDqcd^+AoE~an1p|S-T=gyDvTQXsD)nsKQ3> z=vLR!&E-IpdrH78Fwo@4wmr80T!KFZ;hebJ9Wc_3wrJl_bMK?5jhoOJZtBcv`TU0& zi^oJxh^3s?sFWB-G|nioA!kJCryQfr^0T88(#(eO$T$kf=pe>*WVDL>TesMA=lnRr z77Re3ao-#4rSkc*M;r%V{D3#=F4Vb7PiF09L5Rsx{U>!#0(Sx5X6nu8hO5;Wd?z<7 z*dmgMdb`T_Ra!}k*7+OxOwoS|>8Uv_|b(i$a^8%$MZOy9yQ)3GU0;1FhHH)=3Q2_hWGSiAy3T5i7?w|8wqr%S8ifr9utxz== z6h`vexgH$_Dph1keSOGL6>@HSlWPxIseX`b92>bur(096a?ow>HjwHAUU-0S&hO+< zx6=Hk1zS`(-$*JluRq+d5oOIf)?;}?5jM})<3>beC$5C%7Yx;>XwLJxvF{Lj`lvV= z{!!Dl;$1W!|6Nc!BvG-&yVCTl6OyE51k@AT!U%McOtpE-1B#Y5G9x1h&iYHZ$ffXUA7W%I!; z`E@kAkqe4;s3+W_kuumYJr+|(RZU~fkhsBb2^UR&JHmpHSg3lu@3`P8tqwIH947v` z{s+lX0a4s9WIb^X-N~zEOY)do8=isgT_C|Ee=w>+1do+njlK-rjm#s5nOyl-MdaCw z^V%Dik^oFB)Tk)7sx33Z&q}ekYN!LL1PM2un58`M2~o^dY|>J0^#L11PHyZe@7*S@*FK zYsCGX-Y-_H_coD_UPN^^3((jfJyA!Gtv4HNy>QIVENCjw{WM!MFM9WZ2kl-5^u8zc z@$N)$kiD>N?{-fAo))cOt}-Kss$`nDpe{w3u5I`J)=jLm)p>4iP{qsHL$ThkMq)qT zI)Vxt+p^(^f!xZD(Jr~tlr9plUc44M|5H4Xz&AI_7K|Df`yDqni;*wD*Qi9!urX@l z-v<=v)%yX-DAu&ZW-4`w@w_9Mx@vBoLrKy1L*N)5TQbr|Vclzl%9?{F+|j4rfli>- z=x0{^arN7CCaegxfzVV9^c@;^dGXQ~nF7WTG6y@Sc~y8z!&^^uCWxX0;L_Jf50ehc z#=wrg6y`<%Pg72idb38G6(8M_Gi(vc^PV?%qf4K&miq%^&0+$NvmR31+yDvT*1z(|snL`H$7D5{ zKArvkdaJq^lJIVl^CGics!zhPp5x1bsebuHC|y3I&7)C#on<(*lI?a1*;BfmUCR~- zm~HQt7^o&SOmPs8tTn_Ry1HVt%RRo_epgh4;AoKF!(16%kN^xSii#g?M_`>YYt1LY zq!kr7oZw<7rmrjGRvJ*?IhA`@JXKrItcGcNdBIniJ(`Cuk#Qw;iE8tqOA|FdKK~|i zZzvI$(eQ`%KvYcV(L0sz&hOk%NpqfYD}wR_?mW$xS#zju$~=nR$L#>-=T|PRac6r4bRo84j@n10~vTvR4A!>Coe5n;)u2 zk5KQR_~S#hU(G6xjzlYU21uD|v`EhU)1AWzTzHU*`4PB?`6*^!!ME zECfv4#ST=8Ts(Q1wF3BSjJPZ7`*ZB|v2v%Vb2LHo z4B)zG4+i7+w6Eetz-6-|%5nw;^|!YE;9|Z%l#&L5W;-5cR~@b)Qc=CE<{Q$L1@W(D ze($ZAg3|I^OX3MVvk*!3#udnjwEJ7rjXFmmH4}9$p5+VbIYd*F(}@X-kSoKW_hxv`E&a zch?Av$j-*kum22Pn0iW8sicfiRbkrn#eX*KcT&!Xz;(hSx-^vtI!o1-IxqZZCYh#U zS)Q_W*O<9N9r$l-Ma1pK?Qnk#|_#UnOPr`vrmLb??W!1Zv(@ z!4nayOFk~lg)bT!*p>M-slYRhADZLrkRNCs6g^d1y=S)3nIJ#IyB$3S8X2@l5IO<+ z32I``rx@5P=4?aKb2Ly#%5A7TJXX7mDe(N-p!iV@o(>zEqAGg2GTuEGPLAueS8S*M zEb@lX3&r20rzO)9_hq#_ z$#M&M`yT;71Jl^yx||8npS5M1!5Grrx)=orkM<4NUM+(!py;Wp94D#m>6-X2E`=2x z1T~sQV_t4XA6_)ruUPc`iNRP+W}7CQjOPK#{2X`eb|6#V?M2ajp9Sm1(S5`+L(JgG zr`apu8^c}m{MP~}klaG%V(=q28WTF<)m~9W{>8**5#(!iO;la8xhd$Q#7QQi0ZSs7 zOQYq?g-ENZwQ28%;GOR2E?IwlZPK9Y<(;!AbmMU-G%EkoWsPSU-~gtiq|IRZ#IIxL zcOl)c!oSH*mfrOq7s?(RTS__vgTC%F0Yup8&1{#FGtKl~5iy4r;A6eq(Wim)Q`;%{ z<(R;<8dS_3k(4C-LtUp~@#lrqWJmdtR{U_XBk0c--Xu(a{2Br>K=EEi*e7`paNQbJXcFcLXc^?uN;&v9*Yo_ zdw;FKO!I31lIaGc$mSk(dRDrAX`3C2MJQeL7!_&i@tgkX?F@-9b)>A%cQ&j#nRZ); zKZXwq1AWM88-$N=Kb!^$l=dHi{#ZL+WfEI@7~2|jkO{V%6l|;+M!z@5j4wYkpV)5W zr7brzGdidb?zGJ51);#teZ*8q zJPZimAEGGypad~FKAP=Y({Strck>n#lyNKFM_}Nr9d8Unh8%xj@dOY1_WSY!*2=I) zW8BTpsP>^}XOOx%H3aDy$eXv6w2z&b%+wdhlsXeEgYehgXXbKowDxY{91QxDK8q{Urx++8g8i6YX%(Zodbk;7v-9R+B;nAV@eU#>4>i@cyqOTLp1s4mm< zYLYI9_);?Tex{APo?Tg!4Fh2ODffhWH&nspnVvGusTia$uQGbDclu}yIsEO+iaWhZ zIgy(TX3J+od18%d3Nt;^5NXnie7(njilV)#NONyJ{j8$J=$-5={nID#4?WPTPr)T^$ineULSpSGZSy`ngX#^GJXJ4ZW zpx;z;^sUi1e`&7lwZvDA)9UdSRag1DZt32YRy@sE1xbEZ0_uQf+;^(SB=wS4xsMom z@TuvT2qkK%N`ir7LH23!#6z}Ti_#p6<)EGRk%_l(7cI5}ZM}$~G$ab67T+9ib<~gW zs4B4LM`s&XeH30rcCqTGUvg)CS_mJ~M13x(wRjvmha=h?!lP!V7CK(!3@S52hvc}B zN>(AFV37^gY$MMpo$VyMtW>MGq>Nyqad~==+~|5lsq0<@HiK|Kql+#er#t*mQyId@ zSF^Wx&8ZUhSf@`u`og*qCNZmmI?q}O2nRMZWFHLZmMBtDiqF^s_}8aYBSdyh$oqjL zH^eZURChG&tv!|YG@r}7cuJ;PKO`#-H$uZvuf{gTp?1{>W90NDxSR^ z%z7!DTws`=U=!3L7`PvqrxhN|s83CwJ?KDT-EmF*2rWcjwFx#|Rd6oP&ur}^JUR)9 z2rEeJm(a7K6P7A@$T?5yNh?hi%1yuG*|#(n6wD&%@8z~KbLH#3$>Xc#b~`N$l(k;W z(t)y3(SKLUo>zHuLyr&Q*LFHCySe#?iD`~Wu}wTVyzxs9ehp++8G^fHt~8AEdxl}# z2kd|OI@%d?C9?ijiF(tWS%VsY8COHBn=k*Pu5S#X z=;5{Og43*g2Xyqd(z7tlm@+xtiwO8I&kB4Jf(iJlT7yYe=9BV1?+I#KtfRe@=?F7{+DgWg8Ew<|gl-c@U?udzc5 zCe>_uO%r*g^KdWP>l_>IZ*4-H+m!Wljq*7 zk8%E zUR*7k;LMYu~x zYo2aq4P@GyvGyRq-@N|NsgNJ@3~6fuF3CKhpycNrUHQQ8N3b(o5Lm9cdE(SmSi72w z34H)cp_;koW@fm?-qJPz=NT_5)(fqfA8Vf300|9y1hjQCLc-C04#Is}Fw*F>6N$)J z=bSt?MXSPc@yNz&nojDNnwSN9)jwz-(Xh&uS~mFu*xEUcvZj~1Fd5Z1FTY!i0{3=y z&_21HK!pFP^`G2z693}<>DbLU^lj~Xx4Uz{6Vx?QR@mA%$w_(y*PjghYEbE*e7?iw zO$8hNNluqs7pG3ZLojhjaua3d@PfrCAZPUawW}7Cp-$yM)_Mznq1}*?xbYI#wt=m1 z?EP=Wn>%1*gFoON%S-ZB5HL{S$k_V4tXzLKkzds91Cg^8Pv3tk9NPtBc zwRB79Q{k7INQYY|WSJt9zICK)0D-0kFLn4Q&Z}@O)~nOCo~rNb2gCc&4jq%azv2?` z`^vGCGg&^nrm4^wvLL@bP*s^P)a=dBT#qWhe^1KuHD#*-W$*UwdG_w1pXH+NeB^7( zVt9JvhXMjXL5fQ>0!%^W|Cf{E(j&yW`a0VJXu>Kf2pr{ z-!B;crjY>ds~ou3my+MnNBHZY5yDm|=k~e&aZzzDLSa+$^e5n^*qK0 zb+5ZFih zW+I$vBmHtGX4hFqJnt=4KM1trFSSKogORF`J$=4EgF)1NXI6o_umvCr68-D?_^>UX ztG8MyJ3YaA@D$(>xmB^2nv!SRmQvR2zLl0}1MJ?~3ZCnUQljGtb*^aAO;6#74LtmD z5n6S&C;w$(+h{<>2`$~s6n6)CAion`nSG?X1b0Sq!2dlVqrjQu77G<-^ea`t?Od$F z(Z5vC`Liwmv7X4U_%METnnZadn+Gf;ng3{V5VSe`BaI4PMIxYcj@H@h)?)vu42Obr zwF8k;2?^Rl=ZY_*Nb4Z_n~m0>e4D)u<>e6~0&c#vNPzi+V>qBikRt*KKFB15)r>>e zNlt#JTw=7?CdSy&Ua5$9N`UE7>pyt*uNmeb-*^BWspQIGW~=8&`5nn*cHQ-8`xo9% z2SVgr>17@d#qBR=d49(?m8v4fwr-#KxVLs8W|y6>4&f)cq?LpL9#ow=x!Zx-UU9Q# zgAg#NX|ip`1f@beOv)I>)6)Yw5wlnSL~F?7G@H3TpHu$G7u3^HGdL`e7x6M%Kvw18$L?P!U$T$hZI1gT3F1*0!|c6-WlQw}r|P3N@~jQ%#jLKbDa&{qU=R zDgDjCd-))a&?0gAtm>(!+{sH^CXG&AQ~D4fKC;`+6$mzYlV^qmOgH@5&x$Ra`nEfu z?{SJP4}<#!9daXI6rNy&{#l@qL9?}$rwpAgh~X$iKq}FK$f5mO&PAE(!iL@{)PgTu zpBb7yLwrJG`233y+^p-!wi3(tHSd0IEbZ_aL_;eqccLVqaked)_k(PDiaTX;jSSIg z+fd99AO6|z{|vqnsyl}rhG}bQ#@{xspq)@lr2>bVt?sI?Dsp1-gf^%HqdHOqNW5T8 zl^n8Eqlim+EvPD^4Q#h;ba(`9JoCR_{!WfPMZ#7Ic(^Jxfg$laYti#j$24$z)T?*D ze~gQzO7xvM+bU(EAmPGP34HuG-9<9>cVYW;D0l=1p4wqwWv4fb6n{Xi9?|Lzx^EBm zlZki7LbR{O>b2`>adVZait1FwZN_L+_JA(5Egp ze*iN5crO`OSF}xFvShpiAx1q|$1iiGwv?yT_SXVc>Dwui%(Ldwa40DA*CS$tq&>^T z*U)Lz?i?<|!sR3G)`g&0n}2c#$Yj|*7%qI2tCI-u{YeaP4DxNeptVx=RFP`hna1@x zL591pnC^3qr^|WA!b3#-L&mRJ%kplr6mhns@}+g2^uI<${|~g6wl>%?XcucZFxOwE z`c;gu_s}Tk%<0)Hi#tV@5fOGs?|y|(KHTyZ$vPHI%l{9;XcBm&+M{1|YU+H=t(B*^ z_`LYNMzUxD_j6X>d{3%t-6`+UZU_8fQAyQRn2kZ{KvACE|LU`t%^QEKb|c{lL*=Vi z8EF(kRTsfDQx|l8a{s1>yGNM^0RHquQ^%_3e_I)x$-l9`YA%SIv<4?C!%11>z}Or` zTqS}^7v=k_X~D@XK6LR5Yoh5X!RXLl@r1o?6EDgMzT5ncBC2D8-NOHB^7wLnmI(fE zm!?&~1i*C)nt|4(FTil|^GZ+2K)LCRRt}mY$(SWeU=69xSVv$~`#|#8rl(klifrWxI~q zGDCKbNpfB2)f-n=l3!%SdB{Yy%Eabz050|!C$q}>*qtQ0gM`N4W!;(3>OyEhalON!2QOSO228ZK$iaUEf77iK-z z7(z}iJmXwbt1BHWCz{u@v2nm7zfWyhH*}3|PHjh54cl!zZTG6Lx3C8}a_Q5HjIe90 zO|EG)?O|-$M@jEIG8F!{w9)^bho*s?O0=H2-Md1+2%R+j-hPbxw-eZp8q!s_g;?-W z+=OL|PVeUW?dFwhZzI))JdAm%H^fMR1#1msCo{sDZ;NjAZCp{@1sh|H8rH_s25*Kk zYBTAhhOLjVvE<>TbX!j6_cw0s<-DGIDXo_Dm%7($n&~z$f)t~hU30i`?>yeHnBUxa zmna%;T6)DbUq&bvJ-Xe)_a-$)(_Og*MI&~O^F-BZYjE80$6fJia3xh?tMv5z!DVku z8F$ln=LW4!_ayxhrQ8G0pQ}^XCophZuMZF!`L^H#$ydh-Z#KTxl5qpY?lN|QKWtoqluLbC5L!w_XEn@~EXLEIxr$t*+eQHA_n z)#-3j=T(1)eyatUkEY=pThPyQM1;5T8adUSt`}z7E#!q#T&zQ~tXl*k?Uqzu;1qkU1~=41_wxy^`Z?u?QGj_cBkR-LW#eLFVa&sx zdO|q5k+K*vyYq&b(VHVG@Y#~6<}KcRAH-H&*fEySnQXEAZ0+=BOeh0-gXiaTST`fn(iSGz4WXE!0Z?;o=Sk!k49NoP;%dC0hpJ0{* z^P#^sUA5omZGV}dTZ=quKUk`|`)1UqwglQ{0*Op)`9<*?zkyU#?@kcIgH0Z6JjDPa zYYqO+4Hx^DfP@%T5LX!6+})KB(#=~+U(q+~%BL6f#kTK0mtNv!GM*Vnyx9&1`a~XM zzq)QY1|dpexxD zjj&zp1Bt!twSTs`l^}y6(*t<@3avAH=XS_#9lKFAUsUn@_G$OFRDbaMQnDY3uO-jP zmdAm`@q~LmCF4w&5k~!N!k9G2`M4)D{e^U701x+I!7%d&40Q#%p}Wy zO9Tyy3LX`4tM0@%3G+l|I^KsYHLLe@m&3a*VQ%2_V=s>DTmKg#+)uqR{<`PpDU2nU zP5`y8vagsmRw#Pf7YP8%Q_T55PB_W2bOvYb&hIf7k%H)1s0OEH?zkQt@mc{;eW%N| zNsGIpNsN=h+SX)F+oaE4yGKe6TRop}@QDMpkqZ_P$@&wo1iem3M zEF2?txRKXXFxQ;r(qvA!rZCpHz65+LbKAmP_jE)Aw2lv_+7tiY+p3u@?)!E%n{Jy_ z`(q-S?2Re1^^|noc<%?22Gytg9b$}NR$o4{$l@q|vF@pcC7ZJPi>u|Eyg6Q6QHeVh)GKB#l3?e9hl&$*IuivRS;Jsfaq1z**4TpbR zbZzZm;3`G;Wq2or@~{_W+& zOvIv^k#oLtl^O=u*rN5Ppm$)>W2@AEbyYaOS}UgN5>>BRaifSJ^{Af#81Mbmy(QR} zkirkkdnSkT6+bIqt;n;JOpzxHn5Zuy-!4e$sjmvu#|bf5UjK1gZzS*j8f@m){Kte2 zostBI6U-r^T@`)bI7rTo6B2rkh}x7Be`N+5BJ={k9sxG118Bt#TYEJPK^iG1` zGcJ2YdP6_O0xoeb$et}1mDEdB^#zU*w18|w(3UEE@te{3a1aZf@hyWvhG`bbH z)25*SbOPjX|0;_E@;#j2Q+%}TERx^^-)@5sdE^x8%eD5v?3^sR=^Y zv(dNa-&Sh~B=q+B6`25K*MX6keTzX0mJWV|(tr$>tOclwPj*PD8-fr6v>zC0(7wwi zIqDysfV?9Pe z_GL_^g;k;B)%LjH4kUob);DETmUl+$X}sxL#@4=zI=~%2(e3fo&0&rM&Iu_Qiw*{w zrou`XS|j6>n%V=ap5m0sOxrXft(zGk8NH(wM4q_zFh~5%z3~V?wMH`%1=7?rm85QB zB1w|tp>Gx`I1xmH?q|^FgQe!Ki((>WzxKv@Fp`*O#q`Xm+}$Jem0^FFca{8&iBo*- z^hHwv((x@W)Dz>76kj?YzYKN-lGh45WCcP~A#);t?BAB7TD~mafOb5_qZh64I_0oc zO?MhCJ#gGX1Y{RAD4`WZlHF>d6+eHzB&U850nx(4bEfAJm=nxHZucB41{~SNcp;&P zpfail_)%yo7Ck4iGI>wwsXyX;rb^kTCP%%Z8st3c6d4Ibhj^95MTig`YPON7Tez#Q z1%P6YoOVN$W}urCO*=1^Pjf(V*5n5qVI?UL<&jl=P1Ko;3ybm~ z7VMzv$*AP+a34qZ`QHUajZ3aF$ar_Xps2j>)FUm2BKCY~H>;<+m~kJ7K2F(HI-|Yl z=Q8dcTwBh7v-aO_V1dY-yun;|T!f&RijfsjNHJZ~mwvJ;n>lqXw8fESLO5gXE-(TI zjuXbvPVPxXm$aU3tow)LUf(wae4_NO#ys5Tt=#t+>TIztv{?}bvf=Tkt1}zHt*e@+ zFF7IcC4bLJe6ZZz@h#EPLEN{Y+)dH4cGb{8A%-0_5hM5 zsCspx4W*&!Q#^Sjvn88R;gFo1sABPu)c;F@6Al-T5U{PjN;PxZRjIj+HLGV!Kc6(* zY=1xa)-_zNqgcgN8v0iY766`mW>`srQRU7R{e?rQH+@Mu4UUj4KN|V zjL-BmWTtIZ{asMe5H_I)*Dwy6weR#{0I!~PY^(b#B5?TY*&j?zr&GG%4%(298^D9B z1*XB$N&`x2@~7CdQ_9P;w6VQ2kFc?$rq9ZYRT^PKi?vC=pB|Za#+}eKh?0B5*G*gR z_0`urDRoz_S@VQ9hHeGcwct_03mi5w0_V(Wm#k|*X<L$4;-$pOlz-Y-IQ8yVi0 zmCP8%o)_=CWr)aV3(tu>YEx8F>Mw@%Z?F(%GBgHSPPDh~zM(VK>3LY=G_0lnmEKDy{LiaM8SzT}3J$Ab@=;Jt8dGQ_qFZ zM?TnLHc`~X+%;NwwOHr8#>iJ9q&zt$pi)|MpO3dr;`MWl?Po@ZA}ko>_!N_QQ2dk< zZX2G_X{mRjru?U~{sBG(*MP6~rvR+P?;u^OiK$W!p%MlvR-|f|RScI#QOT4-KZP z@&ZnWCKm>m1*H3bYq;hCBT-@0GXX$Hkx!Xkh_6+Fi3el48B0EVWGNrn*GEzcV_WC6 z?%>IzSDn%A3;6||;xiV_FAN`aVh&Xrnl^ZmRkiEGjg2?PTh%W>8*d#`G$X(juLX;J z0_}d{^G4_v088$9a?f;&#G>p5#3TNCpUSd7mfj1$CYGTI{>1$F?Fy`Pc{&`wdIRWF zW%z{29vXBYb#;_BcJy8EX_!J))1<$G^%FOX4}!K!DDKo*qa)NZ2a$Fata91@D`nXC zKC$N{ve(y9AYRFOcj3660ElT{x`Xgtcv6nF{`^a>n#1j>lnT+>__lE-$;7dzjEIz1 zD4asi_^R#UuY`5pOxGxB{e155dwk<+8|kn1{G}Tt!P;$-s-VlWveGxX!4EN++QZ$c zJq&BlpMfgnMpx6gWi++97wmDxZ!ZKgYr8uhK7W%^Aq=X%AVZqmSwtih2GjTKX>E%a z`-FR^oL*b|@koqjI2Z-rR12@`P^uJQO_cmg(!cM2dc6{#ch!2SZ_@J=9P+S}C|#); zPFGEXLNaix`^{I2OmF|N#-n=mUWZ29F?c+pJ zmzS}PbfWtZt8t0rZnahd5SjU_IoXz5>zwCGg6$jr$FfJTg&Kq4KihYq!s|9WAa~qZ zT~_z;evcVDIe>MC)FF-39pXGlVu>RdIT!;*+O$^V$*|*nMY&^5&|)TQl+si?T{Sk| zCKjnPe`O_UCRD-Vp7Bd+ETCJ2nZyfeV~V^?`9!zsw)D3{=rG%9iv++)J>{%K=!%N& zi;sR0Wp?cQHw+}1BKh0ZP?&9&zb)3U5a}Pc8kKVS?DON0PZ~S#PK}t} ztWj4SASVz+a(J(pvm(qT6V~9N)yUZ=0vW|NIXirOq=RqFCXq@&RK^G&QJftoe3RW(yl&n<_RHvR$GI7Ah%05F)I2^$pqxMV2@n( zgtT9iP2?NOpEx)>T5}7*Jp(}JJguSE=Wq8Xy$rl9tSAkJd!TzW{azl>1n+Yli((I6_RaUMt>R$3Glhca6eS zy9DV_QU5_b2zEaf^ZD#dxU8sXm9%ifs^Q-6@uz?F-;2?w^XO&vv|;K@2d-t_5j~N4 zyV^T}k4_cMhW=I}d9X84ZcLn-hBxy_K8pSHPw^8-xp(D8;}8+k+C?8c9i~8fqq_Wm zUgT7bobTonMy4yGw_IWRV9|{9>X`BE-c0K-F9w{4M7TP<0ZJf!_qNa9cc^Bq=fvOp z$_pn#>!;xP_isENM$-q24Kj`8pe{egS~bU{8qJ@b*F0*n+N)P&bNWQvBfU9c9uO3S zNJ#iW@1!KIhW>p4L}i(+9CPhpKTffPxT#Vw)HvUPOQ3_qX+OqF@3SfWW$y+ygR?Lk zemCqH7kIldR>bD@qkFW)>9-T?>)u@Y%S1rAt`chP$)X>r3|`kE>4^Bd5%P^;=L~JB ztB=@Mr^24li^yQOtaM=;^@-Rp|W?J+Yh&iMAcPFv@*E_;T;otaT9k00*al2Ek* zXWQ~qnMRd21rgqae?UZAmhTWa>cB@$vdC;9%m5)SR=wrV5IS!s401}*S4}Wh!v??K zZ^wL;hjF+uG>|Sg6<9HL!45|-R>|{kUvnSlvSOB1-1|q(SB&t1CIEoWUSsX;tQXMM z=8L;$J)t{av5>D}DQpgwU_ zOIRvhh6IpUXKaveWf7bLxbD2jIOqdIZXm&wS6&agY3B~zT5;17Ko`;RVjUnKKAMBi ziWzEH?Oa2qRs~IwA8IKAp0a$=mFImH{Gt=1_D|6moV!&k(v*jC-d0dN@nD_nsFNHf z3`Xx+1R5~f$IuZH*`DkKn5n_KBW9JFbxA{fM>b|-Yu`}N62aL*?Db{qGsaWb<%Zow z1~|<5M$VaH%)JMX!{>ZaYln5IoVRHk)n8D#_i9o-@kP_T(CufqPhf@rU2?@Kay#dz z)zcYS!Dv#tr^UK^*!@ZP(rFD?#c4Y_=L^lz*`XKP)p2eVQ_IbJFTlkb5BIrQ1^1rp zW4-nM2zPInw(LOj@0;WnwC~3~e_%rESLdmXH(vujlx^l87%_n}6 zPBJ)rsWaf%E*oiljbc`t`SL~W6}(}$<+_ozkp-N(NV2xs?qg@x)1FHZb-N0>eF6ff z+`}?4ny%{zJ+Ogm{fxQf{dyl1+g*6EUyJnzoYe2=V!fDB4BCl0-mKFm0(J^&JO)6^ zKWTn<2N>J$QEPZbt^kiNlK#CS2NShyOTT)#EjgPiDXLyvWom=8LN!=Fxp-nJQM+8p zD5xoVeHY}q@P7#~y$lt@`cbWf>-hxE*T_ZQXDx_SxoD0vXt)1n{Cdb}L;-JeREN#b zs%5Z(#b`KdGJbJZW3g3-1j>gr?DV z0PyPA{H~Qh8dAy*iI=`9dML=5qRACA?5*z4a&-(XT09p>q<~0S~?1Qd|H8jv{8`2N{xL5Dq&K;SS+sp z@cWkkm5l4OX78YJE#*G)KlQf63^T3v!rLx+tXfY5+PlMdroM5zo^t0}mc4jFx?EVMpB+6St3hs7E*Ts{rVG$>h(EXz=n zM0+zqtNeih(6uE+Goypmz{ z+@$u(!XBkFJV`9G9hqxSA{kV%c9hws(&KB~Z+ml5Sd*0#h}ITld$l;iX}<9p%6=eX zw|@6~EM4Q4d`;x_0v1@cbwG>{+xTq@j&0l-g}#N&3a{WrH+g=^lQ{e{ z49E&>0(b-r=l0u~^<1wxpKm2mQ#D>aq;gQfRmMGuM(nn;Eil|w#2!Iaf?Vl%c3k24 zK~M((hKQ$(qtPtp=BjOMZ2L88OvQ1Wq3oUP9?FKB>Af@W&mPZs>=2&vV81-Cppx6K z-IdeX!?`m-z|;-Xx&vL19=ETOulV$7!sw6Eg#DJ9J&v0x?hJ8q-!`g}tOpb+uqp87 z={H|#e5(hzjfdw<3(G@OxTK7|(BneHii^))DtF=>8;~}>LTxVQcs%CH-yAlAZfQfu ziky7uM|^$y%6=UriJx&IL7oO+9g*QoUrx;2Kbuohgiq%@22^bI&osxrb2&4Lba}OT zwYmP}b$v46>ORBZ_4Le&0%`!Q73-hdW4N~L5=b9qgo3UP&R1uxNg!w8gISeWCx0_w z>i{^79iu8HQkF299?lDs+Z*3WG*8}4&fG86wI$n$lWtVyD$sbHOIx{nVA-pb<&iI? z3y`)?OtkZJJX$pTzWdexvaOxrW=|O(aAi)9_bce0{_0xgd@Jxtd-E*H{QDVicvw`> z>^%7UzoT5WhNoqxkHBFzw^6-wWEERb!9nT!-%6->vwdbupOxH%6s^^t#>1$T(h80(TVq z6{$mOQFOXWQv#ZAGsqnEQr~JQUIWc_Jbf~G*HT4aTST2^cA<^0TjUVW+m9HH-prYt z$4*DaG6z!J1AobWmNEGTq+JjO6MXQ8mbiC<#?U(XzQTN0uxG@F+Jc{Imnf4u?t4A+ zus$WFBBRu9hi~u-OM2nq>#Y5P3?|U&0s@H!YNe8C&61xMg%XGX)bzX$unKS2lG5ca ze@=+Bkcy!-UoRt2sE@`Uz#Y9B3UA$nqJ^7nN56FVlbB^p4Zn>_>l}=d)cwi?(Xsw8 zozRY>&m0;-=Ie1YG1uAI!Vcp+NrTpar*A`;%zPLf{xzb;8EbmdGP(7VJQ~U^Y>qXm zXLTTmGt2lE@C~Kq)vF)v7t_6atc&@vWUGTKxkZ`Y;|~4IT?_c;k^i;p#Mvu-SrpLS z*nr2$-ofk%4rrP&vYZM`t9!Lx;r1tSXToSKF4Jb~UJ0f7*sI|6Ogy9MH%j68%BDn~%JpiLwXXaCwTn%d zEJph&30pM6{LL5TfK&V4sG znladOWbb-;MUnK_O?H0R@JHwQ7;YXa@8I$zv-QXE3ixLJUrmX%+Crc}%WAC+xxkTK zc7!_60`LPn9?}0Fdv6uhb`$jr(^A}8ptu$*PNBFJC~n1y2lobdZHqgF;_mM5?h@RB zTX2WqoIKL!eeb`uzH@VOH7kFc+4Gy(I(q|r)*KTABJOuM?Me@HWBiwVu7D%_r}nh4 zYb!~%l-o8BejgM|!qW5vU~GXe^gQ$!a%I8#;kIaH#Qk!6SKwdaqdFMW((_jM8RKF_ zyQbH{jLZku=-FipFB0To^G5yQfm_aVCwGT@;DX*tSEiXO{FMin2zJ3>dP;NIPbV;^ zbbXLD5@~nt9H8Gs7beOckOS{&h#581Y_9S@)}HzJ&i3_}>GGzluj*i@4lmAlsvs@_f$c?@}JMflQawqV87kMm7xL3fPFL5)%W4uf4))ZVZ(IU}ePT)>g zeSUGd5Ye;%bKBcLb(-XSpY+amuy1F;W71g|h@(ld-wsbXKTAzfoq^|puK8(mHB;|o|#5^ttZSV>*EFy1jTUDeQ@4_awA?03nZR-DKL!7^u?r42NyE{(pr=zB~Oj=mIz@$o{wP*S5BH8k- zk;?Y1==8|VEJo|?*~a8f#_fjARw+c;gwx`)C3leE($gL7jf~CRZ#eRn4z+rLkMLB_Kv)##=%t8r*?MX(@WFa7%Hfy=Yt|?dcwZ`Bj=mwbSiMr`2wpc%%63p^auiBN85wCqXsQfYW2}jpM zDYnd@H>B3_#8F%wFqP*at8Hi|Uu39L@KI>#6` zJ1Y*3th7`i9n4a=6x`6(;mjdqt~m1G$T`6_WzrmdP;>h7MGCS4&vTX&L%7h6{NCim zI+vO|cCuLZr50(AiF&H^U<`-rk+0AB{>H1`BM-KR-*3W%GE0J?b@)`z_vWTc=j@@T z0=<4Bt&KH`-L(%yb1jRMG3`~_0u_`~m_!nt0Mh{9Ln4C&?Ux@MDf2eQ7kz^r-{rHnUpNC`{9bs zOT;BWS6u=WiKOJ-Er(aK9A6)QZ93iw#$U*(^H?9pCB5@X4ZZhDy->VoUTwKiAS1ki zgVYD7TWeI27+Y>zCfpX2+b)O1xSmEN@2-+bDVmK`;%q|d-xwmOF#(nnIU)gl-7dnQ zL6_8}9v2EBtuL~z1zbYqKSv&oyH7gm+W5m?d4e09Nh8{R3X^(ca_D&`p+I#RPjA$@2X#26y*USLKCdE_+o|I%^4`lYkj!`KwwkXA@V zBrv~e+$Zd5pAEK=zsTF}JJ{c*t&ylYXlWdY6uVu4usW1h+Z7M09&JkyBp-H!>y|^- zM1BijqT{UaB_?A6jh6-6i3OX#o*nYb8t6ko-WdUzqfcUlTK4dY@|uJ6W?ThK+V@0? ztuMs-$;MqZHsbAU+s9PfltgGguE=OzutW;fMv1JpniT(dD|l~*jKzM?NVIcJFf@Pt zDY=`lCsoeF0^XsStZQ4l5VOSE+1K_W#~qq&v(Cs=4z~!`$&SZbP6>`r2K0f+Ki#gU zQnrR^o|Kr3*Mow83Vv5()Qmxaf3&_lRPO!l&$vF`R|JhlzdK=UFkO?glz8+besYAv zXa8h~;LLCPz$%Vg<8#yk{om-!4EtT|0-*`na2X_Z$y{=SsQi=&3GD`KFAW^20 z&!eDNUKK0%jdA<}RHrXO>U?@8-tIBov`XI%!Rl+d@fklyVqFI6ZWsM3$xxD{&1Ri? z9Bg9?fhn%+M~!8*77-+-k)=4YJ}I-^<8YprhBhv7mSnnjbe|w)R54?2Y>EzLCJ&lpchB zF2VP&f@YTwnnNIINs71YzF8FGC%xmojIPIp_0psQ8NpY`vhPncx;7T7H30CO=`@?g znghciIkTO6M$KxjrZUi@={uDcF@hvP^S3JAP>QLzxli&49OO+g`9%Ttvr*Wm{3~LQgYVj=7h~UtwoAd?Kj?G8ng5gup8lN84o?v8MkVUH|k^Tc>J5^yy)^##E5UOB0%PKK- zl8GJgXpRljVnhQstv3{bWSevCK2c~s_0gQ1TQa>YhDv`?;2&`m)j+# z08H~ctc^7ry@7G7W0RY4k6qivBRNuV;~>=C(&_L`D;nJe>D3Y$%B)1)4gZ} z1LeNf6{77*p)^^e;j4J2^2{2Dew)kvTXZX&)ceBK_Me*EE8VA=!gMpaG42XJ@X@3&-0-6@6nHw{@H25kuW)_wM-*eWlS z(FM)klSj5EP_1Z6%OAk=Tp_h@C6(3)!1F&XTiCV=J>2t@JAy^PIXc{U6{aRInId-E+ISNIUY~>7&kXQuR?|yc%z3BS zE3qoPn1}k@9yt*=thb@=#G!4q9h3F zW24JPRAjjk*D{z)EMkMYa@efVFvKL+y1orS=ib)pJnhha1!8I`kgm4GyFdv)hH?fd z(D*M2ZM;@En&T`Rb8zhoom+CyFw)Q^RZfj4ds3I(?8QH*JsOJtI&-1pdcI0<*P|PD zSi`SqOoJm4zI9~N+9TL+rN;Y@YvC8D$j$2bO|2If6GEt#C`V~#z&LNjLG;>5KmJ8m z2+^@udmL7gmRwK0Er?HfB1&9}mVakUsG` z!1T_Nge*~x*xEEo!dhMkpD)Fi0{#8f`k^&S#Om;X|nNY1Q#!2p$;4gs$zQLrvK zybg$9du2v}o-xj$Q`-*G)WuL_!Mf>=j@ti3spo)8Eigtpp6Yrk=6~cIKA(byZ!T67lDIa5E2)gJqQp*Z88xACH0F>(P>>P;W_KLlkh% z-?c#^0XX5rBO0GrMy%-MM49PZju)`hkZ!cl7iC9I@T684MX#6Mx@bG|;eM(V$cTyq za3mFz4C-8vbE4LOad1j>F#)S;D{>+ns6sVy;PxM{%>c_pi7ja7Jgek=!!UW(`gwqL zPbazEX&@E4m`+Q1Ic|sIHCjR?9^hgAd=0A``+6>^WHH_ZLV;{;V-6q?p%vCX-+^QOliztu00UUiq?z$*zvNSAAP>q92dE%B_6t|_yS zWDOoUZAhlTCQl1+;QNNwijIxz?lbEAYtP1wf~*{n3*z-w5E$cD0iyFr zQR|%{Q}>gVVSX5B6xLgTR4eqy9XFsb2XlC3$y5$4M&+Pb#MUTHXjoea!Rbf=JgoC9 zBfk-9vJ`|Y%4Wk2kC*eT<&hC5Hm*QU4q`l`pA6N{WAUhY@?6X?PbPj`mn!q1XFB+e zMHP0Ui3MKBwT86n%tLGwGgzCw(=z)+oJy&VItT)*1ph-$ovFV^FUoM9qZCZ0i`XZU z{DFkf7_lVboAc0#!O~6=&K$`1+dU#1{FS4^6$j*{+>=s-JF||Te?3AV`mlpN!I>&35p8iC~%+g{9KRm-61h|W8(SWDtAmkJDArBbj~BYb1WTNBq#7QuR z1NbC?yFsRS+9hW16$rFwm{-cYv;RQ0N|w_@tIP(YR8VZFE5K0-h-`uXMa}HAG`Hg? z4ZfWCx|$RzW%yO4c7#cU+Wrm zTwwU8Nm@)svFTsr?fi#ve3fb>OJi#08U+nYCe>UFCy`Hl=!G);B|mb)i$~?W_3;B; z&$!aekS8&%I;)i!3(YWKnQT&NsHq*-_=f@X{e;&zaq8N6*4F*|jvEdQ==^$`#kk$q z2Xb!Uueh`i?k7_mYl*vUoNFFjZFqQA^0*`ZmFGQaHQsq@EA+w~#je$3pPzqbT{E3mldW%!S2Eb< z{Uvky>?e6Xc>+-cse0%*)iEH_98>>$Ka?3gr4IJftm=&|O#-NDHg<=eZwq7Vx8w^L1UG+D!8~th? zZDt4lUTqNZAL+ql^>)o=wcq_MA;>>pllxvVrG3D?je7@*I%n?l|6~f+e~dMadkECt zCE={K_^;YRl^Bsc9=nQGGTFGLtzD3DSZDVHpG9GvQ1avpP7BHJ*|cnHUGHnyCy1m{{b)&x`Mfp)0W`;1#2x{@ zjwrSADQ|gDW0R!?YXNc6dN-t8l`7&dTi(O`rkH0GiKkUR($axb;okg41t{5Yoa>s{ zX_R}~S~TeX09@3?mf)s7t&++Ki1^B;+f;CNjx)&8=)Z+PS}nG14YM2!oBC+L8|+HC zVS)&_2qPSQ(yebLTaso~ZC07U~ABYzDn(H=Dge^L? zHoU_?N{Dh?V{5~&U-_9{r$Jo0ZWfHF;A$a7EU9aLhKwvH;5El5ilmnrH};gg@OkS` zECAnKE;n0iQ<&2a#l@o(+hLA?y3%O$y@g-pH7q^#fV#;s*c=>YMX?$%bWQ6bn=D8!5hCd{{Y9=*bN0mETn#iN4dP<=b&vT`jrCf z?j(vvG5kU=yAY9O@Kmilqq5yS(DY>)WP5h%qT1>}vm}_|LK%gXUw~?T+uzgfbx>ta zc&FiMP!SxOXJnQir3J>I>tm~Vl6B2$fhDVTmD6jC{%~QOcx^PL!LC%A3WK6hkcKgGL88;sL~39f`7b?==peD1P|lmHf1!l^lUu9)iB@ ztG?dS_7_}1vpdy&YY0lU>9tBU+I+~Bv_-~fqJeglf-$v+N9Cpf`41OXp|{j>ANh~j zKP6{TBRN`#wYS1d(b<+QOXFz7_3KG3U%?|3;XLz~t*)u94Oy&djt3q}?+LJVXZ;3i zkMQo|Ub3ps;A4!0)9qrn3KOd<5U~TB+|)eEL}tQO?PhV_Gf`%S zTYFIEFz0l?UM1Qup#+LeX6CY__HmP0m`NeQTxol*Rb?llm=|1=<@z{imfB~1JG0Z) z*=pSDgow^39WPR~ReoP?4)GEP0uezwv@7mhsE62XGzP6aJ)tifjF;R~PFug_reC#~ zFWQpqy4m>~8FWd&OMkm;AXk5OJLB^pQ(`QuMKmJ8OSb9-ZdDJ-k1hKaL-)UOJeEJ} zy9GX>liN{~q6rkum79|G^4G2Iza4p+LyRz&Xqan~^oelH<#!hN!)}+2`N8(`&W2?U zO1Y#MFR2fh5?&i+M~(Tr3FdGu=8bD$XJ(G^aphyoIKAeRL|)S@L6g=BeerYLAiO(Z z;xz`yZQfBe7XmiNE;sX<6bZs762QUsBLnOrcSTWrd^P*a);6L2msfOhqPA{VyH)5s z!J_*~`TMdbT-BGujA7(1NV=u?5Pa72Cm zENE=usVq&FOsgLye^mFyx0*41&%**O)^^LtyMx1CUru`*`%Qfttg$ONA>A9+75QtE ztv7l!D*5W;c{sWo;Jpy>mWRG}%{E@Ra?V!FTh@p@CG=_b6PxmisuCPd^78DavIwGb z4zb96#mF8@K>9UnoX<;;JV3;<+?Utv3%-z-4=rnZbDTU(PFe`=jwF(6=Q`cn;FZI` z-1I@L@t<+Hc@!9a@;|t>6-&zC%enFnv@=YvyTKUiXWj2wmn7?Z%I(bnTP(M&*HQU*E^h4?GA$av2%mPIwZi7O zuu598so^6S7?^%OhjDb1QHC70UTo5vGM~>OE-hTd7bf-*6gmq(@(S3-X=ha$&IpyW zmT%=bl%-|DlkB>Xy1_GoWs?ig(MYI>%lbSWa`5J8j_CAJr@{>A2wtn0?KUkh>`hqW zTs6|Ru4mBSF1m6&eBvYmc3^zHSgaAhd3GYfKhl+IIu}bzKIf|jlYu(8J~jNJIRIY> z)1ha)eZjbz(lvb1Jhj=*XV;SfU`ixoJ0wP${IH^1eBDamJgGl~`ts{jJG}t(Smw^F zRF*36G2!xblc&BFDOqSM(d@a`Lojd>q&Ivpg}+WtlnTlZ&AXTuev*%0zn>r~xNW~3 z0Yllwc)42XWf6BA(fDIE}6iirVJDQz*$n8!Y`!?C`p9Y)NgIU?Dk2-9+$igVOQE}kcfBi4l3R%9?>ot2=30-@$kFNns!MVZ43#7cCPnZ z?U>|k(mveWy7EZio)UWEsDa%8xT9{feVt^~0-}XCvm@6s|n60<}v+5eR2kgAJgttK)OsTh z+|#HC8j}BX#4aSYHI{#+-aK_yn){INE`0Zbx|!`%AK{ORUS$P@IXD(jTg@L^ch7V} z%ghA3*!*~?htrSrnKa@x6`elf=P5f0zuI$D-zO3KgRAJWy3g2G z&zt>pE&o$jx8!&1y&g1;E+<)mFa@23nCg&v6LmVbH6Fab~y0t3%UiZC-b5O$E z)cKh34mmi$rXn?T!q>^?ye}s4Ou-3S8bs!oz+gq=77^2v^ z(iZJF5d5U*Ae1H0B4J_u9>%a9MmdLR)d9^iAW(Wse#RE=3T$myWWl-AWCf73T6c$Q zxd{Fxk`hMS-9QM56>m8g9!+10<0ul3l~l)T^0U_W0dxctT~@AyVgx-?TS+1#NM&zb z7f`v5p)<^gdN<A^qupG!G2Enm z$=99{^%V%+9EpK~tHNDXrpo^%#G6|IpgQ#%1Q)l`be3nk0rIWa7;Zs%&8FQ917(!> zrGog1nO7V$wsFQ?RNMEsZgE+D;dukJg3;c%KWPL)A#T)OvXi1i)zmO08~XlKP`NFa zZBHfYl>YSbrcTnJi*B0`72CT8n6)06et*__c%o{$iW1pn{Ie^E9HTQRQZ4FK`i@J> z22?%Iv&$&wihDEBr+)tB!3iaL21C;Rwd9T+*k1g+FSpRICW&9p;jZRQDb@+?IBC zvU2oqyNi}qPG&3&y0`Y-=bIcf)S~&W4}Zc0kRQEH`_^xLS@0OOAj=gMaXr*`mAnc5 zqggihn-wh$mvu^-zWlaB7@wSlMqq@}MbSp8)*}wjoprr9?x$ge^0NSet0rLFpMY4s z+@CC%je5OrxG8PAUyLB`M;ov~Y%phuumFWaQw8A0J*R z4eGZ-O*Wv$;oU^6WZ&M@k1v{Mm2yU%3O*FnV6I+d%>^iJ>vu)g$-9vSyTS$5HmsLuCT)p2m)6$#F4|YM_$Od5J}6Wy!Z2s zCn~D!f=#=%qefdQWWN?NkPUl!r2XQwL~Alr^5FQ9_bAAkUno}MUtMI2gMF-#)N-rj z`Fu&sbHbr(Md%Jl<_}zvrT@6nd-G{ zbX(fUL_Y0i)!zMdwUyvQ&-wz|o&}?Ixvq}Q@XsCcvo$h^83n;Nj{Mff@%M21L3(Dq zY(5YWcp}#I`QSto31v0%W$Vq=EwDZ<9Le~#+L1{SDTi<)gW$$E>$40b&=q5D1>DNO zZN0LbKW_NDj$vszpW@%CW}b%t9Dffd%av__jEkNr!sZUE5yQ;bGJ8$CcyvfCHN(QY z+U4Hrl?DemOe0_(kc_$VAaU9H&Ax{7)<@R)NY9OWm6b8hixcSKhQmOi{sv#Mj?-=c zqDdU+ktjlpSJK(94^vYw1Z9UfTt8bKnaqw{Y}&FBMHy5HE5&An`nJmT0`ksT()^OS zV;i%%Yj!$zxt5u2;j%WGu5V>tUlQBn3eYOvYokm2E0BX{5jqW ztXv}`7e<$EW<$Ny3F3$w_jjFz-{5>4*fO4Q+loUUQR^av=dw<^5{xO$sJ8Pct5qrn zGLXRnsuP`cVH@q|DQjl8qxvhcR-M|6Q3O|v@+q>ai2oswn-DA=mq9Vh7RL7hv|1wT zbd2S&ZjJ!9u&?ReJqIUliA8O49xSg6q@+|y<1sN%@Jn0O@rhD`XE)F`9FWBV(Sq~0 z7e@wrV*RhcFpD}W0VR>&n?UEZN>^I$v)M%$hOFNQm}N8c$W@rh`(A6a9i`9ahlStffGd74t0X>XM|d_k})*B^RJE-X(lr?WA^OsC5KFp>3wUX2$sM67;~^ zTq=_{s1jq6^S$3HdSE|T^Jf_mN=vqtJ$4~^8DQYK`swx7>{KWKn6=-==AH7s!;0sC zsv5+j7zW=|@HnfnkxA4z4$_JtjWP%yM08C=6&9N|B}Vw2$PpZ1zeehpKUkl-63rOVr)7~;N(Ql0}A}h z-qOa#Iq{>$#JND0=lL^A`A7c7b^JsLgL7JNU#$Dt>xcp^kZH1qJWKgVn~(>35PaJM z9Eg7Wzu7&ill1ezFk#pmKx<)V+%ie0!ao;>UNBx}IXzUp^>#Zr*bel(Jy|jy+ShsH zT99J|cH2g}h6`7{NH270U%AxX@RGDK9v=GRU3dd?r$u5wiRun!`DJ6!cI*$%#T7q8 z>vhsu>oNQcko$TZtS+3B1%pCo`>4_^iG_*O4onm%%dciXDrc(u+;&K4+rIs~O7mZo z9*CHy>J}WZnemsS0kqyP;DOz@UHl9H{(5b419O!cWR|LfTTuhvMxnB4nWw$KsPd6TY|5jHKObMkz^csyY+ShdW8 zeaf1nK716xnHC2{;_c5y;CDfG+CQ4fFV-T#I(t^VC);qQDUD|9$8naV25lc8Or{2s zS9|}^?Y_D=pWO{j`&#=yE7_j;-O}*frSWnAWkw;C5gcKaR5m|6fCXXiWZRe znf|{z*xUAc9ncM__mp)G1=i)A>9b5l$Uc55U#C68_|GQ%?%d7FZXE!_0rz?F<-Se5 zQU3=Fyy;@$TP_b=1ouBuNtJBvu5EcHb61;$we<-Y$S?rjaQj!MeFGD7(cT_&=AG5g zrF<%TI>6od4{T|D)*gS*@#f6}j@LTl|I{09EBIe!mM2C3SEXc&%>Pvfx8@7#|2(7h z{(rjuj`=sNGWoy1UwHqk4z3j1pFkp*kdWiA{7<3izXtRFKJ)*G_J0d$o|=E1GMp0x zRi(CoG0^gDkLD{=Y&$V@E^kD+wuBq)Xw{!icxcPPgGi6;N<5`h`GQ8HE@Ot{ZWT zjn}pE&F3Zm2))MK7cR@0T$SdFXmYMs=_7iYgNUemOQqcXX|~u@Sm4mDGX7rz1_|w4t&*$Z$SP#vM&mgzGJBrwa@wr z=)=y?#3Se1bTi-#-%i^db^Yr7`y+4Or1epmjw|TDj-@)_oS(;Vo;|eK_8P;8r(N>h z{b@&ey6oz8?E~mh@odAui09ll@OE}H_u=8XdiS807(v!`EeT1(=}vx0pdsk5IlU)0 zWAhNF8N3$cIlgot6yzy-NPgZOl{0sFL3NM@ImXAUuw1pVy>7x!^f{DYvQsV7GVQj) zvfa@ez8zJ>GC8(ltuELlh&aMdRgbxUUXvapt`C;&4>#1uUZbLY+%=P9;EjCbAzR&U`C6_>(GWO%`EWaNc=rtMP`fBf)Ou6F z@SV(rzDywNH`)->+Rd)8n&1A zi~RT922vp4r?JRSYLrE%z0N2W!QFPKk&YuYTkhD0)66B93tVB#;36m#yD-d9$h`mg znM(4sd+L6Y<-#WKnhNkZr6QM9>t`|}bi_wGVltOD9EBLD4c+<7;^xQ!(dAECPyN0| z1@OA0r)_ETQ*-ja+!ywfl_XVM#n#`HD{J+prz+d;Jw%Cdv(uzB)8p||V{IP^E0Vdn z)d0v{>0c^JP640LY3E`&s(g%I=#hrz3HC07+Iq7TGaJA^!bq-|B^+fxRsqrCp>DKj zAq4D($gEFC2u~L%<9QJan>K4doa0ZMtB0ytH7_CBjrLcA0{5-H--no=gl4MkP!u1I zrPC6ce_|lOj=@u9XVip#kpvUs$v>Z@jHd}K2(U+#`u}XE;FC4hZ1}jT5u6=2k zEWy4LM?!PMjs1)jpX{O;q0+D=MV3~b_@NxQbvh+8@3TqEl2Dfj<-_N2(g^uEL(F3& zfdRa$+4<#*!<5(_fwqE0=xvKqUlX8qVClwc(SFkjzzMRlm1d7NIuGrSetTr@a;} z9kM7~w%Nnj_qmhIX~fG>0B4qTG}5Ly2oQ`4NaabhB4^l!#$)A?e$lRtLZVe=JN27* zuufN&(TZ1o&?{}cx(7|&E*{)IA1-^jg043D&miCG42%+!-!!5n)&wl=(2C;9C=HHE zJ5BNyPDw)wQgUD}+(HGqWd_pUAqq6xTeOY5jr^#w_kPj$AHef%)e9$)7}>#98^afN zx~s)r!rUj&dk2oWTJ@7;Z#Da2z%|ZXOr0${))QTrx6czlaM+~Mw2$Z5>yDHGy*aHf z|4M9SIN(m#MJEx+3Iv?YrRBw*zKpU|hJab%4U8>yq*2v z)7!Rmp=(1fkDE*@Nz@adVDD4N%SE0p7hpsbsZkmLSxDxR7Q_FGcf(F?|3!Zu?|3HXN4 z=3oHQOXy#BcuO~3TT727WPN!TQKw7v#h_Pt=Y-eR6xFWue!N|YhT%7h^fbsDq>}J7 zXgPK?nOYYcBIYV>S!X3C8^-&Xiv=4xr~h=9o_@;SijL}gmByDaVZ)t zsekda-@b!$k_mW8Nu8#ilFXHJz7$4H8;=6XyTN`qTRP?N_!aT(-0;TBv)+!b64LV~ z*5`R4mWR#=)5`hNW`{)Gmta{Cua+h;;ng8gTq-!wF8CJDdA{=3$#t30*s{y1;nQZQ zDit$^JtLE%;nAipdR9hLJ0v-LZ^X^yH4re^p8Y;t1^OaiGe~<6n`{I1pz2R7Kyxw8 z+wUDK)N!mgK}wPBsJ=E?N5R*}%axo^-t<%(ZC4V`;SNK%kg4lMs|4Wr<+O%&BE-%QR&ZPq_}^n7q|ByQdqUubJ6-->mWo zkj%{#(Q`ptvl^wS=eY&JEFx+B8yK0CNZDW~FieXKM0)0Sa^Rg|E&zS576zj2Yt{uW z@op*HT(L;!3k!~P_~9_qA>%SD>`l@*JdZN!Mx(}-(OL9nhdqBo1M-ubbpCo2E)O9W zggiVOWgDs-`+WR?Aq(NKJ5OG^OSfEdJCUyuWQM@U{zX!bIU9DSdKp83x3kO}j~K#a z?(x@Go;!zS5}_9tU;WTa#lZpMny0sxuv2)L!SYh*NbiSuV`ZvulWi7mISyEtGhR4x zdk-F-#GMWX4Arz2WUNT`D?Kf677o`lr8S0`Y;RhSz^C9S8eUuOc-Dn+npj1raF=d7QVJMt9Xb;SzsJ4^lrS*(SgI23 zqxW{0@vVMu6Z+26$oQsXrNa|0?MabDC?XB-X#}koRM6rU>&}2`RXi?VhA>0UZDW3* ziabJD-=FsK#9I)mtC_M_-rD^WUd(z}ZAkDWSh(YRmyCEn>fXaY#*d+OfpbIDUY z92B-Z(bSl`n_D;xx$T`zBzE7r>2Z$M!Wi z?X~dvaAVmF`+VkNjD}V-(!{vTmXqNPK8?J`dHbuwYH}P@b=FCnaV-pTR*@V>u9Ouab2&yqsKO zYySLFFbUt}eBMs(iPpa+to3lIAxTk4+r>4?Rn)Mt#!H}Zs_zWsPrT4*#>!uBW868C zTf$rRnQ?%`HI49IN_M4dBeMU4ovww}H0X#>r&^0dT94I46VQ=#J&N1MDlObrm3?4u zv5ZUGF6eJEcuzqU+Ez_bh28H*d;)9vDs{Whlc9RST3u>%2~~)U&AqNYbXF@Wjnd-JOZ=An9FYp0WX$ ze)y~IfiGfW&WC~^0p9YFnlLe})^*2!n;Lew_r}xYmbCg|gSxN3(qRz5r)!%rDlXlU zL6lgn9-{%-?e+%|m`Q$kCzK3UKiSSJrKJAKGFHqi+GyYmm}~Z;ROM&<9d<>CzVz#) zrJ@zRz2)O_NauO1hntZbCN4^*LZcJ_sQi275m)2)L7o9X|!tWK~WBSJD9C(dRo$zBMNh4$sTv zZ!67_ryHS%jpQe2Ugqct`*=5TSMWkfd7ZyYzm zr~K=_=p5g}#{5;)zOq*IwDx$Hdb4U4w0GU1U+B0}i1%aHj%7?o|GJG@S~ze$?e9i3 zZqAq=rxeXUk55Ja&)?|Ke{RU3FlnIs`!Y2GeYfC0EqW=!<9}MxB15){|GX4cIl$we zmU0yP-&_&)hM&Lxe}83RMa(iBsrEoYDP1$l5M93BZOrnke|p=I_NRyAsG99|4r7DX ze*X?Lot0Wpu=~HUh++=^#f1LvjNtzQoedCxOGz~c_11y7ABXzWoT`#3{u({si-mh= zOxhcYsK&^W!^4^Oh2)ZnukB=31m-xP1=<)uo?uH+{(5n95baw2M0wKR9 ze4{W}fKAs-&nNa411;NJ)tWl~4f2;PjY=E_5br;joFt@zb;t168@gsf?A~`&1IUN` z>-PTw7Iu-59}FuLO@oVp#`v3;9OM5JknhxHy#XzToWkFh6)0e-m1X{<=D%XEWvsJICejlvC6oY2(Mg2H(`dfhS>t2 zd^F|0_&4+4??Htnor(#ZYL--Iv0VMNBWS%Hxf%E7M%*N~^0L|_2iG1=ah}fC+~xUq zx{Le|wUN~3J-;G(tsBDP>$ER#_UAiCdIq9cG={q)do{|MXF)<+xYM~ zN79gGi)y%mm3DG_dt?j@gtbXij6#jF5tLZgzq+ja1$p>CQJfSD^#H5JRR(BeS&*Ggryuq5^IFb_+aJV(GU0S~l7UgvKBN%g@aBXH|y+-#RWhznY8Ndp9+^u^Km6D}4^wH|_FUPnd?mxDM0 zcU}1bIxayBiSBSi={>}Kgl^*dtG8?^$T|p>y-!a-?7nOV)NC7Ot)cH`jelY1Ix+Wgb@ z@~^gT2c}5%CT1sQcT3`jGSb_Rd)0+s>d?b+HWpHo`cgpc#-J6)tZbR_{VX(kC8lVg zbhAdKGrST_NaaJa)5j)?w8fb*-m*K6m-h)*?Y)Nz(p3Cr!e^XCzia^*a9>OBBVt#r zDH!>aQwQmPzsDQQ9c6h}&%#Lo} z5VwHUJ^qXPOKV2cHH#_Av++nWXYhP8EPtfHZ%L5HF9QtzNy8puxz(lE0|vN4w1l=Q zmiUzmD?|^tO4TD5oKN4r8aLY=t6S>t+4jyT{OE?A zg+a%HsG+5P>z&StT0*ZY^_Gtx;$9f7WE3p2)of3a#qdMH9U)~J46LQr=iLmQQ-x%Na}XN{DxrTFSQ9mB^OWPI+1@6k%V zjpvhb9~FY==;#9X9PsVd-5~bB{JQA^onO4Sc+eZ`i?5=hqPsJ>-F8SGaBtQPbX->& zFL$Q{7+Haxld)C<$i~rR$Q7`ihQ($%tQcIM=e`~Cu~;wCzS8V$d*7x|t5RT@zb;d< zpKH2E>vpaIAuvk>5HWB&UsNsEOTKr|YL^BkRS?j_4eh~ROD8&hHklthMTTn1d z=i-U?+Tr;&jMe?z=zZto-1;{GQ`rp<1b24}?yiLg_rl%X-Jyz8YpuQZJ+HUj*6x2E&SO>0R&&lVKI{FHfge{Br2ks2n{*nzK>5z|Kazrb)oc@S3n?mdR3>i#u=-jU z%b~f(HTqg#320mW+IYd=-HOoBRr#8z^~V}@hV6wkNe03=aUExWUu4r1*)w z$AUO12sAjiw1P-?7mC?xR1Q4Q>{ivD2l(kZiuO15Ze+E3gzy&x*Z1vGo>ARK;Isuilyu& zHYO&frvsN6N9r&o&bjj>&OcjEm+vEf?DGi8cSW)RgZ>v#i6T={6l1!*W_Uk>X!l@< z{gF!5aIeB1%WfwUZ>tynIFtU}27t3nZSM%RqD1e|T1fYzkGWj;uXV|fx4tGwvi&yl z9M|8(zJFm;kfstHlDk{rqWN?l>eUK8n`Ekq`pTvUhl6kFdyA5i*vU#)6MVFFV7W;f zw*@{eOv#A*^)@4irO%Ja5jtmU+&?Gk>-)>SI2=#2>M1BOE&PcGSJ+O$lT_aQ`LDJ@ z&D;WXzqXjhpKpc!jm@NxgohvJT`CDK?${Qau`P>D~)%pqz~)r^fUxG z5|3;JLu;McEu5too@%s(FK)2ujg;s(1k&!4ES9Pjvf!2m%??*V{mPf1&*EVp^*AC5 zLvLwFJ?B83p?nrT3oWZ=RDVTZMQLNuae7%OD_2$g=3Wt>PUc4i^0UiW9D~*cR{TX_ z^|z6>EHao`_=y=bS~5Z|>@mj6zCvvYzMm|W4_mN&MH*!55$t^QA0Ad&jsPjE&A-Sn z_~P!ol<3;LM=(WOiU!ArB!q#4i5^z`9iUS6@F z*C$Mmzio~)l~k%wh7dFJ&ufp%h0_7lyZq@%sY9R}-!hOt5g6Y2nV&yBCg!6^I2IXe z2M+c~BJZM{GFkQh->@2ZxD~E)^=cLW^TD&IGg=7$Rcsc;Y?3$hqqNQ9N36{1YiU>t`p&_r|7~ zC#FQ)|7EPQp$ZP=g9gU_OY!_~bQNM$D(pWp>nyQcP1v#SeX`9Yv_Ee*QqueH^+ol( z>wmDfMWT-vR{h7z16}`R$eg90&)vviycVuU5znFqXj%95ehh?kKFEb;hip_4^dqp}t>jS_q^3P@$bDQXU2HtOF=mW*6)$M~k{QwaJLUEDY zP0aQ7=g%eNfR{+EnS;rM;XeTNzjq0dM89pUo=rDZap4b1weyK+M3wKGuUZ;bLCmYY z7cVA4D-oUI4G@z*dIt5Ow-KZewgL%)Gvf1)tDZ%;L;QC)sQ2=r20-DshAb(Cbx^}9 z-}kDazM0l?+@?03=u?>VLI>aV;FOqzp|*k6@5i~}7VIy_E7xz1Q!gGe>n-Mkv~b<> z$7~c>4QxJDO{m~>3MN>!y5{S}b8)nl<9Y{s4PKSQgul8_&kMeC`q`XMH68ZNSUOT; z#wkoS$oyreeN=~>NQ>O++%X7digdFFG+U_I*2m!mJVz)wq(i95a`4-OK|2%K!P+UD zCmvdJ74j8Jm-R$9toV`%?LOU+^(R-zGVG{C>yb$xE-5(7CC+^r2I=WjSq1r7y^cEZ zCJOmf306nCdxFNP*bDb5k%@em+gn}oWS&jyTQ4b-3U^PlE)g(1I=qW@HIeXEiV2OP zJl`9>Ly}zi*81+&BH^P#9+&TrZnPcLZ5vne9@383GibkR(6pBGw=_$ z%jtF1adVNAk!1y#u0A|Rv|XdsyJ*;Ix7J%oB)3x43L(uYG>7{csk7M}P}W=d8tG|Y z4J(9D=h%8LtzKcspy@0Gg(9C#G2FLcq)mE1{2~=ppAyt*FOYU~b@F`xM@^ugtLTJRh~+%5;-P6{uWZCr>7fvaN30L8B|=jb*KM7x`f%OV6F zken*hT`ibopvsR4SsF~n5X*0=`sS+_n7e|AhPJDOdaIYV0ksCYmCdC>?7^VilLpZT ztMDl?wemOlY`2sAKwo)=RXbvlvJ|;d0`|D)^TGU+fUos>{AsP(SrTHw!dK?uVaC>w zu*cS1T0$IHxzf&dp3c!QONV`Aga(Da2 zyfe^o9+rarc5V6LcR!LW@nT9!eWJi}5jEUe#^}QF_5$A>+Dc6|wh>O7l!(f^p`nkd z%rGrjB9nTx87`TJ~`p9VGL>wjr1g64G!)K zXy^TW``LJx{N&8-3p9_ZgO229`NdsAoI2VaT(yash$4cS^x|2Dq-h5A9K19b(^4rFpdg! zzYs+l)dnFsLhesBjvUsR{e;;RN_qQm0**HHOK#0sl4s@$i#ByB^)xUg7Wu}(m`ar- z6~nR2BKVV@-P~q0q_+D*dJ(D*@fQtKaC69niBfC?+aWhKDv+z@+BH>M!+F48&5p+lQ}cfEWmvV3g>hCRF%ka zw3ryONLp9V;$f9oDxFxa*>=75rqT54&Y}ZRM2SfH1kh=AC7T?SCwX5wn#Co2@A-&i zkublOA|Y4Y?rC*qh_BxPnn|$&wkzw|<6_1$6OH`1^XF!}%aFuhd~NezW;azZc3q{n zaNE$scD7?TWwVg5@z@uP9$eBr)G@)CMhytckvs5G)zj`Nb^KcKjq62mf~9Tc3QY*f z(0rzoxuM9X>jarboihq0UV`1l=3PLaV#fgK&@2L5SSed;L!}DGih0zxS#4#`vt17>_pjT5kT`mz(c{mbWcm_5^8&c>yUy@I7rpd#3o+a~?iT%lH}4i| z0(aOR&n<*x^tVDy6pxa~-*n!aI($tg1LVvqo!KnmG*XOh4awi)$2QScQ^zIY1KKI0 z&FKxk%tgPOA))J(?YJ`93i+_yoy1|j?n^{2BiM66c+AdIN$>wE6u{JQ-Qe0k62Uy3 zEQL#S8blITWBHjJa*2b~`0f*ff5ix!x%x(|@TJeo%ylfHJz|h&i7c@X+Py;P@9VBk zwtNTG;X#<^y$$AxD7t^l`~$ecCg6-kMLi8w7wxDtq3u z%>0_wt!1zN_qvCLhmunPkhFi6!WhT2QQPUO5IJbJF2o@x46P>An9#Qxl`3Z84U1D= zcM{`YxmMchoxpM$thU+J{TlaG(~Yy|yKA_{o7`o}GQTE;s0m|C6|z9C zsczi2zUa6lcoJeu>R8izjJmVyn|o|PcEI<|-y?a3`-qf$u5cQS`_OR{wSBv9Me#hr z`kI_~TpnodK}*W7(q}EW+IX+SUQ7?Gc>^^u{n8RETnQ*}RP39jJU;|Mu1A1`BCzuL zfPw&N2~5#i^-B4e*QF}POml{?&h*cR;hKaod9YG7E^LgiQbK715BD1`Tlu{2r&_(F zFDeC&H#v25+{gVfX8l)?iiY>)Y2yf1u-#6+?RCDTR=@x9db9_+6Jbf(>t-h_Z3Re4 z(R-OdKzsXVZ6~=R)d+7^I|Kjv#0W1FDqQD|2)r%x#Y+Lx&IgRLDHyhIYaYLG^(a_b z9cDVfv>wzbCL0U4UyzCUs9B!-%upFuV%oB9`JK*Bt-D+ zU5R!(G`o-5OsN&XKNp3mS4K2Ir40b;6$h!D?3Z_CYaZ$F2z*1c=U+xD?AQ%0*9>tjw7&^eKS-(|vzoc>o=49bOl1ev zvY^BjwS$r)TPLo5{BVQr(_+B5joRgvuxVTd?w*OY254>@FO5ie(O!3bvve1&7O0^ zhZAzc*78+~^Qe3sz39=MK)dTdSE7O+d}j*t+DRong$xQpLbX6g4e+<}GpQ#tNcY`_ zB`T~$T(U+p{B(O~O}fRSH>sqY6crf8r=YioZl^{CrQR$^a*IjdwfMD(h}W z5Dul1{?a*lVWni&hr{U)y}~v8^pLw*Kjv#^Z%0s|t zcJ_0+lv}6|^Rn(>wVAKMvUhs-kTSTcl)#k)E~_IxV$;`jREuW)89vLrJ^;3&RJlyT z1%nTqDz#2*#@|yqRXK_CuBl2vM1(NNY;|bxDt{Q7Kn9})hE)tf#bmELB zD1s!RF|MX*wn%N&oq*H4hhE8dH!BJtxFqfJ+hix)*$zuVq)fQ3bmaP_-J4?OY($V) z-CkiEdhKyPrgM(uX&XsEYz4kRE%Q*%k|OjWEIC2i-rU(KchOg3d9=u05293^O$kZr z4MOFwMt|(^Z8(pd=pDBCV<(CUK|UXLTRXcj8TXw$ z%iJ+Amh4}H=8`+sBLn&L&QSJH}g!8B?0N@#+6YT0U{qn9X-WS7zM$1jFVZ`_?zhaa*yQapa|om zCm3|>V~4Sy+BDn{RH_+3?CXjW()((!710&&FvFawgmT^h=F5K6HCc6N3A5n9went# zmo^nTD|V=z7qKYGiNwp28xWi`Il3MrCauO}V#TqMmVUlfDMPo%Wq8aGwDyd3&YkUU?% z+rH`}{u%8zyffl)ef586ANb5y5~CDPX&C7&*zn>dr@xC+uYtxz0-MLr*)-}=f9OKf#rR?7rQo#UTzVpxiaEi@Mc{eP*3)kHc&_*(y2=C`n(nY zmx_p^0JHu3+y7SXas?$N9Bb3HTA$@zMWHv!-%#f`c~Bco$lq3_pa=-L*w8H8nlV4TdA4|v zvJ&zvCq}2%ntK9_=F?V`;fS;%_Eb$|b?rP`@EGGjr;LbLIm~P4HCKE*{)@QF|Dnk6 zIm3|pB{-h~zDk*VgVvby3~NJpt%!3oe#1rmvlb1TIc;|WW`YzZOQrc>$NQypNl{7oEpvOAb)n$ zqmfkou04Bxo>Xt?%J*(-sRla9T0=O^>N(1ku}hu2`3gktZZIf#(_SI0zGhD?TC~sa zdMpsMI_JNq>p4-guDCX&?|QW_0N(cj+5p=Sy!Br)uy~|B<5C{C1fAR3=*L>`w(e^5 z;~fQDcH&#eQRuQ1T4mCvT1=F+ngp_k`;dgVK08y_Wt}KotKll2*ZQo0$Dc$6A#uX{ zfu7I1XI9hx9SWeW=QgsoYm#&63c{;1l&;(&^R;|Z=5mBL)6MOw!g?wEkvZP2->Z(5 z8OEheE*GAGSG8O-EzKxnt?NBVGH)cgf)n7Gon+QYIG|^i9yJ{IF~0ISgO~R^Mj8LZ zRJ73CE1xq@ec;w!2Tn1@#~s7!7ei_HxhlV1&^h+>d_J%O#!1dyRc8HHJ#kqBE*PV4XM?`Q(B`Cy=`f4%V zznB$FmZgWq<4XK^A}>!_ZAg)srXyRSCfIy;X@MXLB!{1qH$hZq_v&RAm|%&N0tpQu z48cgrx|_3re3b*C<$>W5!1M2HR4BXmRpmk*>51)G>cJ7l+kxJI4Fz>Yqvc>^&oiXq zwTI8I0I)d8;dB-fJ-FGR&7zYW62tWVhYf}6b++3@8D$*cPTW22i#koarn zU_7y5Xt5j8x$sg{oPOlaU2x&o=dTg?kMEmQwfm=iN&54N;TxeN(>CyFmeA0AvF zSl8%ooj085kyXEE3pHhVW9AFT&4#b84<|nJtq(l|Y&}rNDw&3_Ji8woeevMR0*Ds+ zb~4G@ma1s7Uvg*^l9oVn?02LB?3dk&rZ2P^b_hWZ%Kbhhda;Cv@(`AA-)!uD-20$9 zzz?OYvUp#SaUvYhxZy>X;#zb0d)5KB{eM_Z)W3H6z2!`q_#(@v^3QVP(I%dbbRT63 z!uV4k*Eo`>;U=+Z3B4n7qa-8M>Z@XLzo}YF-apKaYGM5B!X4Go3Ad^&N#v*gAq(fB zt93l_^3wo2v4a(m2ebLm?_j%qas)Cy4cDD^Z0e zh%E!B&8KOFz3LUx)AAJ|)K!8~$B$GM$e}1!=;%1mHt(|T!ZB+&9EI$$rLg4dqIB(8 zjVjvqcXv5bPXaWb&>@6((+1nwaia>w-A&{2>~Bcg;81>zX-LPK)*-}lCi0quA7 zd?)Q6a2S%Yu->EOXqtSJ zhr2u7U$qlyE-_gHh>L~{-GAX`9@#3KC2R9=p_$Eq)A-xbfZr6|Wq6j9rCto17goxX zvtzM!DEzX5$vu2&dL=g;H9tG%j|-9T#&aCN?#=$ug{vfx78l%sP7Gf%WD%1kEp5(V zl7~vH%|=BK=je-Cp51p{2k9o>7 z6A33e9M_UPR2SQe>CqS=X$fnp*A2o7vJ8o+ZzVT3fd&R#F&VvgUj2U{#O;wn%#ZB0J6DB;s+ zS0Y=>7!LTE#V7Ul-`EfL<0R}A*7EbN|Jd6hwx-7sr~SEfde|jU=p&d-TXr-JbbxhZ z$bG-Mgl%$`9p=crxWWRSm{``ec#M)6GEisb2`Ac<0~tO*pe6}OLwbv3#q7+%)#%StoDAbGWh7bsHF-=%37}vvrYuGwvr&-o&_F`klMVCD!N7R}3MQykW=^%8q3F z#6JQmQuTiy!jp{}S_aq$xAoC{?=`s8|$qV4)asYzyOxO>-tyi{mUo*I*m3< zL0a@0vEZKAP14lVx3!;t$~IPkGd@&#_g?u6zKKua1&~lywInBJ1AbAz+QUO(s9Pv8 z6Q$50lzo|E!12B1twJO1X}c(Nw$M^+#{B`wkJd={#7uo0w>DpicwqTisxwLUE0r0X z-hiStc_SGfvr}#Bgo+O%p5qhvaWJ`64)Qr28}<`_u-pC_b)FVFhN!`{9#0t-P}ENO z{<_~!w$2>AvNEnGnzGLFz1%Q%bm8~yrx}nN!e@+hMcZMp zvzHe;t;%n37s7$wyH82uZm+2(XplfIyRTxhaxL6e(nHRlPP{x;b>dqQZhy;(aU>Gz zNbb)Spalg9TPo~-6^OgRy4RhLjZFQB?j*#G;{D5qRaj}j;B$!lYg!ShZ?b_WJFOefzCvRPtI*~ z&Mj5Y?eU+bd>Hi5dFb5debNJWH)+obR{n#jV@}LA>mkRlJX5y!Su~ z0-6Bx18+p+^q7&-_83Lsv}6$TaKEQp=-#fB0RcpRlCwukq6YCN&C_kD$?OcYGB22D zBe|XKLiQR1MJ;-><{I(fPEBaFGP=Meki4z;9|N(O=mMw{r;u|VpJ^Kg^@mg6T_*OL|TU1^im-dDw4cg;#4`9{0fLi0sC znzuRDT$S3xQ)3ec^*h@O(p*SX@&@W4pLMc@zxn_vr06;$cvP6`JC2~2-Q$TedB?79 zhN+Y-2cUXOu={*j-TwZ1<~CsexQ+D5g#``FwA(!McPb#xITy(AH}wMj4@sRQ`hVao z7#Z(he2V_xPKz+b|3c4z|8`n@{(m|xa{f(C=fD4N^M*Lhe^cQi|83s*zkewerdUJW z0{4$}O@-Dzji4|*?Kn+!j>CM~P2kH1=W@~(QDkmp5flEqeqe_7M~DFQij7}Pr@-Rn zMHpl2*C6io*Pm)*1Wxj;bJWej(8~Po_KRdEfY+LuAsto~2C7P)kH2YtL#cw-Wloil zM|?EK`2O&oY8dLyso^YjcCCP5yj=(5Ct<3tAV-DY zD(lKvP!y+3%Hxnk<1W@mO!d?jx#g+6D@#a+J^drh<6}UcQCMZ9RIcYMQ-$R2MKL;+fV;g^qOB6wvSs{A7B4BM+xs3z^yl9?!LXkFq((eh^2a;C^>ppm2U z-HGm4S$uLGBbBM|DQi*v=96vYM)me|ty7{f;hq* z>YLeb@8Y6|gp-4h8DTiAQe=l|UEHs3o4)ZD$OE(gsQdIE+%8?FU&n{S|I#GXtTq2o zHOBa3132{TTXTxCY-!G zQSM1vDO>){{al_40Zv6;cDpGO_UiGbom+GL!0?GHbTcKr-r;co;nBE*sC$=OSy*IPj$$LafAv)^r(x0J0wKC-4 zB^x|qc5_fGdw_E>v)KrLvCJS+&25L?j*;Xwuyf3K@&qjvNEb`5jb-xr`8w7f^*DLl zs6W?b=3EcUu}{r%un)ZKU^!G{_Q!RP#Ma9rMTZjSJ=~1dz*yg-yaj?|nwP6L7~TmN zl3oJbpAzPTZ2Xe6%G3CL*}+is?P3HC@OW*Rp+q>yJL*Jluo(sd{+984wfR(X?FzXNPNl>%4}`O`!lET;C`I5e-AO^C7q6tnM%Wk_k_VNRXk20 z2!7{RuzNzOQt&Pklm5*|Z6IF}l^S0qIGMttvn`TAD|GyHdon{^EOyxk&Qp=~nI)Ad_*|oqDqYW4WD%$f0V_)4JK5;=Y?Ze-pD#tU zB!;s^E(AQ~7jPC`wSY8lPyN!ij_3Vvm>0F|Hx}kx(nBirqlbf~oDCI9$MB2P4DQcX zO)|>G{1dcP)>_k7;e)f*&cAI>Z^xDOe)N@Q_sGs1T~ko271BAnxwjy%k6j#bb<%p? zLs07^>I)jf&(HY2U%h`k!;^}V&K^}!i&?A_s;wBghEp+dKtn6;Yn+ z*FI;?d@KBVmOKhEW^a{#z4{jC+;+M&K8M&~q1ZlidEibkQ!zLzL_d=6@E4hwL+mL>+7*fFV8I&a_t zu3yJ~938dq;EI6d!Oc7Mx!jH;alThg1$}{ZGkKt&Q5?mJ>9)A_+uJ7D7yZIXij2I8 z^>dLKHJ_xO90IQ1tCq{^;QJRfH{*%4I{6WBoOvmICD ze-5}_NLN1`%mxIuYH}E{$)NHjDe#ndF^fYvXF6`gD1kl1-92+>(&{MD1~W_RwfMdu)KF*2`cvs1I4t?4+G;0tj2O{> z5^nk#pYFO*t3R{4?!&V$v*ra5y;PW-v33zsZwwKBdAKnL4j1+b2st=7xFlLEwsTuF zpk8Zd1js<1ot*PHJVZ1Rpfo+ASvBd2l7GS#g`oUTvQ z|0xMR^mVXzO@9kiyGb$dri_@tGwEuzYJaO@zOkan5&Sr*ysj{(2Xf*c(?_uOi z$22oroz&Kt)EFTlC9g~yVMqj+zUETU(y^3g#NOR^AF%BQy&e^5vYVT?n9B^yCN@T+ zIC#jDE39zYPgmgsphMjc{k-O?*5$_%8lRI7&$N=qlQQ|;Py0BZkHhqvGRH$X!+b(Y zWtWjWrgKH$nCve`>mJ_Gs(9|pa0OX)wa8j>nt%Ptj8^IV`cQSkMA>jSlsHya z?J|U12yYaYgow+3;(dt9FSJ_?ls4hn61+!M`etzPuL0hDhf%6VmSc&n<_q*I((?^Z8Y4)4&UDOyTEEAW z)egVKb4DZWcAR=fa+fH(sf_;CWOkQLzQ!}{=gef$3k{!GSG)bP@|CSsRma5^TTJaT zl{_`xtR5Pvupc$I7TH>UAR(R6QevR^fJp0u;igE^07ss8JKo@O-(H31dz|$dp5|Mb zjuk?Mins4=WzDotci`4VY+vJ4m2SJW;1 z56$vFLT<=SeQGk>pO==~!`pp^pd_+$22{FK%VVnHO~001`Z&?4)DQ8H#y( z;_Ta~-(&kx%IYp(lj<5=MUP%%UOvaj2KQHc9lrY|N#_=DdFU{tis-mlvx!;g)0`Tw zUEpYWXcgTMSw7qLD*n^;G%mX1&yj(Sy(>O0>Z?6UvVQLZ;3cqeg=}fk9nT(B6R5A%N9Sr713!5#a%Y;MrLrT9i z95wLfi7mD6zCwo9?^ZczO4%4a?+TsPB!tplDVH4)KT)MwhSECAS&bW*R!Ya+_)})3 zN@OqWj%Kn)YOdpa351clO?MaqL`p@x`_=kr{Rg9CAyLvtg~LJB%3h*Kf1=b8HM{V< z(5KG(UTkQ;jF}5P*~_*>TS_HYv&WJ_t0{D&WLCOCFOXep&TObnLDN@Dl7OP2;FUM~ z8Zrg{;{7$o>rQ}knK5VzR=lPC#OTs*ZC9?vF`6bXpwXFoF=x|q)&mxn#PO7*IZ3!#SRW4b! zmEZe%Kj=^*S6jeFcQ(8GdU9&tZw=#!H_0y7?qmxz3Z!}ccJ#7IbK%K`=APE%88f^S zxWa67?~^m{2?(5_d;?VP{OB{)iBbhhNbfF}4eiZe2P7##Y0 zI{l(hyGM2Y513N085q}9sZ`lZXcA56F#-PbbzD^7zpVmT|D!mS&ak_^9L0c%I9aB^ z)5(H|H@h5jHAfVy_GDEjj%xqaStJBI4|bN5iHjPmb$~k%@E8g` z>sMgr2+y(LzFO(WTt$b*T8Ew5yomRRnk>Sj%%npFPlGyik>f_v;~h@$0?+cWniWQi zytQv+m?Nlib_!Ar#tAcJitdq6E7^*Es;>>@P5X=I{J7n}((F9fDgJ3zv=__Amn`i& zTrf>iVVUPJl+sxPmdX9JvM z-n9HCl;d;GyT-m}v4up5hS{WZXN|PF84b>yOkr1_B*yCaC^2g}ABr~}OI)p7O_`ut zB$)Vi#*gb(Zin9ZgRC=N>F!rJV#Y0W&1jBCc~NBif3Q zL)th>Fr*j>Y87LZ%HQWId^QSodLk?uA~L?yW%IIXJ2APOch3q4x-HaQH#5T z<8t5cV2cE9O#=eUm*9yBCDA{HeJQ|5-os)!A%%Y%_o>J@5da_ILpFNy#46w_%281! z`BiTda%U=w#?Hr|mTVVQ;L;~z3L4oDO1=i{bJt{$!2J;nd`sASVe)Ez$B7%VyPww+B=%BXSeJ+d|CxFbKOD0LCr1>h4Dcd?s^5rYM)eF*cRK9(s;4fHIfJ~OQMI&nATB_8htnf)XNAO?EgpT6dul0$x$wv+b<=vKVMOk8HKk;+}QqHf+V^Rpv&{ z&&7$#e&FJ{_-O?S46H(u!T2EXp^zRTXQ5uf6t!Emw}4`Qnh;Rc-x!8^1!l|fr%OqEWhE&n=|a5 zTda$Pr(78O5t8C8JaPY0r5Rcu3Kcs17;H9AH4>XMP{Guo4qtl~Xllj(xtb2Ml2pS_ z2_-yR`NQ!%RxJl_P>q>H{6}2z%pJU}Q~bV@KzL)KQ!hPOwDyz%;$s8IGk_v;d^af3 z{zIXF0(#oz ztG~iJxBh^^jZWzvwhJGz^GY0@ zcUn44x@}g&p{IU}_Dk`n+;!opWeR7M9s71B|CO&4mIce4nRHhpK|5T(HN;~`owBEp zgj#7f7B+u5!%>6gi9|7j;?-JjW*0{Rb{`@uTQVUfp}m<2-z?9UAFdw>-5*aIOt_ml zGb>Q+wr@LizO{?WQ%!9*i2~17sAj6rC3R+7oZIpd#+(|f53BF3%Dx3(FIAuEPix`| zHQTWlF;m{Qp(b!5(0|00T#<56DN{jJJl=9Zukf?=yfsKs>b-EBr{OUdwX{zT#HUeSJP8L(mCJZ^9j%ij3FgTO79>1f87;; zx?QuNU3PMW-#=+bkSwNIla3W=vnc23k_=Fyz`We%SaP?oQ0o+wyN|b`)ODZN zKJm~2IQf`HtHOFFn-<_^hFcA3>ZuwV#E~^GKi*WbPrZ%1%I+JH4Kaj#cdWLwBM`2b zx)zr_sitGRx&k;ONxeaIT#w#{&Dv{&rDdeMCZNNfELUS^Onog|+{7CnUnS=8qq*?9 zsTTXWQzu?<*5t}eaYXH|B2@cpMHJgLk&U#)<3Gar_2Nl5M;Jnef7NnVFC>C)rxAfq zQ+RwNwIr?ISa3ax`W(H%G@ruw~o*Veh0Sb=H}aj`NV*UFFDja2Z_$+f_;zep3ZAbP8sC>A}+zVh+Q% z+fnj%X0j>7Q7qErKzXA?Aw~**G2o0cflMHlxA7*K`ND!aQU*d$g3+WXrKr^!w4@LM zHT_a}248LAvY(RI`{W#|90q!MY+K6o9ca{zw?_}X*0w}Ek{(Q*?yqPgUa|>+4`F^c zEry>&^<}74uC(V<;!9M^dO;2jF)8j7?NVEYKUZ=~0NUIIn3fX(dz5Aw^9nhWkgG*w zvw_LYrDgmSE$omy(;~8!6(6K?2tP5~{?#+9{)F3(BHvSB0(6WIu;#R#6*)7l_d@uF z-!Vl^B=2ipb5kF#0Lz2n_>oXZjl1l)1GUc|=aX8qJVSjuo-ma4J)&@ZJl~B- ztPc-9N-n?8_oBfiw4_k^89;}CVB}H?^GC$Bn~eQ>&R44|!45I?%9y0*j+o)+CxcR7 zqP~%)fsY}9N7Te#x>Hgy4R)*6UdZ>z5t~`w)YnLOcj`Rt{RVvan0<${28&GysEMdo zQJf3(7~;pSHEru>U9shT0OT?fIm^+S4B-ewrI}EKtvqu@{;>Q^z>T&}SMiQf>N=g& z(@>v^fJbWFl@pra-6^;A5~{<=kLjq7GwaE<1?=Ee#(b0``yU_c91xpUgaSR>BDZd{ zXnSFgvw%_I^NHmwQDFr_Tw4!O;0W5(8`Xj>DeO=Mn$(p(CH5M+*1G%ZOR#!sQb$rM zTq~>X`k_4&S!G@yp`g3-ll?d?E;$ZVr?R7MUP*d5qs^RFdwlaj=t*59pDUTXg9Jsg z!G3ZfspNY3dX85)O*%qqs+c93*p_pZGI=u;&`3KfS@?l*|Ax1lyGM>W-*@#mRlp3v z;NDM zz6Bh2!j_7~sz^M(voX>HJpHMg#A(AdwNAu?Owy+1BgUu__S@0~P$8UgH~An?WUAeP z_iH-_&7^t4E{B4F!n%57Wv(LrZ$G^wPzlCg_M1k<((H_f-P<@=TZc8j!Qs5i9N>3>zXx)u6-# zV$?=DO~)b~-4iCPD_MjXAKl}RJ)0$Gl%dvSLyP+VFl?q1wog1fs@iWhf+1$TFMcefC-)A#${z4pOA+Xri}>-x`G$S-8_ z%se{x+)w_kd-Y8;!e`^YkjIe)brFf;*o_K!-8AC8-KL*D%RFyG3)j=WGeAC%)M~vY z1I6CbU*m(n4x)2L7sl!&8kNwWPYcAUWqY8vPLI?n{A%;6XE zlhsO$Xw-kX1$}$Sh02VUA|>(w4TmZd&WG)Gbhbg~r5R#PmZx-v`o6MNB(W`r2M4 zp;)i+=pGvU_c(ohu@9dDU%&>{I|Mx!A!D(r`k@-4ZdCCu*A^zM)9J&=)@|0gEWyAG ztq(a_P?ol*7~`coqhdNslmxxB;hGzw6O15aYelm$_H8$FOUb7O{wIjGMK*wpXs)#b zfrQdtBZk5@8VfB5*_(hou`R&)I(yHLn>)e7Ws(0lnR|D;<>CP^n+Umk4Q1Mt_#0KY z?0B@vGyXatWj@f9^?5sm(geQkrTEBC@oPMr2!yYE0 zS{`Q7@1cjKu?Vk!DJT5YCH^3Gyp`sT&ewb8;F5|37&rRKl(|NlZeadBa>3=q+v{#t zAlTZsLwce_15Mc`l=lF$Dldzfv8vG*E8(q11Hoq>T?kXSi8#CKa8iEghvno9094jT z_>P)S7+bPlM!F7o@~J|Xx9#A$Q5NsIgW(8r$NMyoD3uSbA{!wT2)5S=exI%!M||HX z$U|t!nmUOfVK-jhhL@5fb+i}&yl)_0x$ML|*q64(e@R()SVy>@wBEvy5(sFY$Jjf% zWxvNQnLFg7M)QmW6SrlLxrP+a6;mLblPEw37JTJkwG=b*tV8B;!)aAKZl?`DtwOj= zXMPgF$@2pPwL5{crk|ATpmM3vC&q&b*AJC?i!RWqxo@8{Ru_T&T)cGD(I=ePP1}$4 zRzpKn6&B>M=F$7E+aZL@N}GWdH>Wo&sMnb7NVjC7s7*!l?!ApRrBe0H@=`5NUxM{M z;s?ya3R)XvEsQ6lDDmooFXA_n|EXO)X)ISe;wI4(EuUdj%)|WQ-#`Bk?jrvGYauV3 z@pvQK)^Xw5dQ5Q@_aCWJN(X%dDs7~OOTYj>sDPcbe`pk76cAWV|Dt66e~p5nYbg6q zJv0Vd0{$-}3FPE-0T?lq1mFK-F700;(*FVQ{2!=6hKf!||7ibHyMrijC+J$yiJKCF z8x|c-USY`a(U5|I%W>}?Dj-@z7$XVa+S|9ND7(S1r=rvTt$zOy{bH~gcGktp{x1p8 z|I?C?Z;$!@-$thjmFtYp1ISeL^59LY_%FEYM}_yrN9_lh5!R$t&gcF5$>@Lo^-w(~ zC>&n@NkzDF<4oOc{2$idsqv*{u}==&%j@??47#m<(7X4D#)%>*&Fr8s`pt=`F}0P! zCU^d5hp(YYwku+dOgab89q|Ftm7eG3XLxMBu!L+PUzV8axqGK`73+&mZw73U@a}7**0vaW63U#i3Xf_C> z5M=8qT-e>7X^aJ0DoMBR-M%;R&mVX5CGcp>mCg(1FkcJ%VzeLYJkZ;(c?McX((6o9 z?70|Li+<(-KjNK1S&qbA?_toa8Dy-gemfy4Xe#4Y1_9 z25vt4qaW5vThBr5zgPfnNA|#9nuCs;j3bbK&$63M$!4`@YI42)(FOrn-?NENP3x6t36$FF4}07_uA5&hh@i1{6DB z)XRPx^}6Mf@aFZEmi756V~0WUv^o;NxPKkORq)mag410Aq1PDIivMOIG+oh#azn(Q zVWxzw4;@Id{2MBex{ysum;g}vR~}V`K3mq089lo-kiFs<1Qz+oRN&l ztD&3T=mk&2ji(CS*0K#(W^e?9_h=lh^@8JZ)Q*g;-p&iMh5>{GGM9avsSp61iNer7 zjDhV2t8;90U>Lc`U&D;WX!(MGc9bYH++cm+Gm22QK=9e#0V@WBH^qff*?h6)W~3Nx zFB%wMF-kC%Z%(HZ0zHn!*E~$9EU;XcRL5FTYbVjsDx9jZra^=_f4A4sXmwMu?HhaM zZ*;Pha%gGM@S2!fgze8G?ck`kj4zO<$wo<8cp4I{8H%qfd>t<}p?QkYaXJ`j5rl4~H74M5u)L8n69om|c(V}= z7_JGcP22|4OQFD_?As0dzAlv4a5@+!&XLRcu8a?m&QhX*uRDPfJQY%ZfT-so=wu)b zjxel77%|>oSKEf2Z4zJl<;`EN{~{b&SswZ0%ufm=vA*kF53~NMf{9c1m>8A$1)`+J z(R}sA^u9!9G`tqip~(U?YU*mkv^k=L{V6jyWP_L?~*I+EV74uZ*2W~ z)$}NLk}h(QfP^VA`6+O#{1FV|Xwk%n!%2W3>) z?)_RhRU;Ey#(Q}QDV?2P35}E2m&xr7qz4=UgB_S-LaDt;XQ~0(m`n&ZW?r6>1 z%DuyCdOSe`su(Uh3P0pZ{ki6KR@Wg2Cj&Bi`9{ zbnl+|0ybzx@D`q#A{5fbcd%&RX^-YXDz-4q#`Y)IUcndD?awQs73$={Y1G>0^@kQ6 ztb=tmgK=4p)D*GrLG0?wEf~foSaNVU=blW5t!~%W3`bx>Y%mUzRwu7qde(@q!B=6D zv-+&Zkl*)4XV<4FW$xi$)K*8A&xIA&>3Y58GskymY2PXAPx`ds3!jts-I4TX)vg;V zRRXT%^zP;X`%~F++BbCa2pSB;&M0Cpu_LXL%o_M+GvAzb`aENY~n?K>hfvS(c~Iasc6Q{VWfKJ!|>31 zt&(_9_fG*5M6_vqa}H0uJeTbEU7ic+Z+?_|=(}56saU8Yxtx%;lo0^>R0~Yu?E)85 z*r8rnX3n5cT`>u^+N~e~CupTKqu}s4QsO^Q=&!#N*GzOzhk?14-%sB2II^TT(_u*Z zqo#4R(1O~lc@j&eBFSI@dO``5Jc3~@t0BFD`#CsS1rDB)G7(vY>p4p+xYBeY(Eg_Q zRcvpIvf9K;a3=>koQfvvFIRyGvr49RQYozG9lNb)=`8Vu-rrl?emQp5in>CQS zy#QB#s3IA!Kniyu6FEDYpYjnQa;_oYM3JJ*BnV*?Z$dc8y%q-tqTTZGG#s2UQGp=u zYJ#&0I7{G8OS9t@^Tb7RMM0tJswTaDx!0u5elnw8S+Ig$ZnQS0&OaMApL>=Mc~}QH zrgF8=286H2h)hQuBnYHvfJM7gF>ZkQO_kNOnTwPi!Ii_!2s+;(;C0mZKV7%=)5p9r z+`kkjDTSa<>+0_{llD-5YjG*Lr)_%AEhfLa9`mY;0`qD@XMTdu zl1~h2eMt7Ozq0jj$UzHlViKG5Ed&9T0$VDS`DH&F&81hBrd|9ScCm8CLz#|oeCJ%* zYl~T0%VZry+ZYIHgv%SL1%Nf8R-@U)*=p9$`3m&YrFw&j9y$Nctqa-)jCw_Bp1KNe zLj=knw3Dh{@JTKvL^-3_@cS$lS2^Z2Zj|0j*~5(6xJ1R=T`Y7!)g;slwt zD%8msEZ(fdJulP@(d0aua9vTtVP^6vQy<)7-6zx%?$$6q{XD5^il}jk(rci8xbTyv z#f3<#9{f6fd^1_ntjcY#ZN>wv`N9IbyKP6EM#beh@W7W480ZGC2hGZDWXM0v(KWbA zq?PkIQ(ZbA9>Q>!m;K3+8diH=v{>4BxiPiHb=NwaPep_`;jP!?o;u$kLv%!0$ZB}! zH*eRwPw!r58y0(($`-&kBlB4Wpo)mU$v0D?G~+W&e{QnXS|E3l&{?cSyxGBsz@B} z`yJ)7MTB>h^p5Kct#BwP&OiBX3=mK82CKpjMc@U&b;fH6F@y_EWb|1#r-XT=wbbn$ z>7-xT5pvwJ^yg83;-h9Ng=j)G#+J5h%tm2qyiT=(T3BnUbJ<(3HOa;+GS)5}ca6(- z__(zX^2Qi%jaQi0CDK=<)~KG9RzQRMGS_o9?c6uc5Pg4V4686xYbCvY zuK)}cUIp+?`*6qi!Z>5*GQ6hRnRV{RoB2m92@XVZY%JZLDjf&tbfXDZViopS8RQj0 z74^H9yk&o3A1s2XBC@@5n_d`jgdgDvnr{nq;A-Z|3V$kw!wp(zV37oFv~#Jpcl6w3 zKQ~Jb7BfWvjckohS{X?F&b&3*&eP_lFyS>E-ENZ5JZ@|>{;b3qR~gzrz3fZPIZ!L; ze5p+ym6^5Ahj|8iBmxVOW7(&;r@hz2O<;qXaYf_pmda|<%r5zqx;Of(?A(*bC5$&f zzT60^i*-Y`fhCbV3IB29ZIJA?sg>6 zkGc(x-K7wByY5po3n;S2m*QzCuW7Wo^RKf7(-pQe)u;QGEL?v1&Gavd7y5hFEAex& zk%He!TUZ1F!^_s$3*vA6y#Q4BReFKgk2SL)+(B%ld(oGsOyC z-FHsH*Zd5lE4Ari+_$Dv^Re%czZD(p2rQJkacO%gHX~-|16nK>o4G|2zQnNSO2Xld zWVKc7jNO$)O37`GSR%6gIt>492{Vp{UHZZ3#}Ww{{@)5dx+L2u(z;U z{V=mxY4xuFd2IcXryugM9?$KEj8|`tElXMY5b+vF{S-Aguhf$9A(HmWXC_aR~N6!>tcm_;Z|ofiL&$N7otn6L-wUaU$T1?2XBaSESp&970lh z!}o2?27b*N_aYX5C>~8q5g1vwk$N>HJA)-43WWL;^@*h@qry_vp@M(PV3i|lnWSN} z+rxCwyhR>ZUgzmVhsqrsTDt5n1Oi5Qdd@NOao;LbJRIXZ3EVJrkJGV2csy~$Jfe(E zawH!PDt?p3mQe{@?xV=*HYCey0Kaj@Nia?|Jq44YS$+RFV7k|a05ozZAokeQ^{}h^ zj|IN6xL?b}UJWAog2Svk8>MKYtyx!lrbzY7Y-9eyw<-+CZr?Sei~Z367LGI=37I_k zZjMPRHuKNRr2%y_`QiZN2O>nP;>}Msc^0nHS`eBblygJmi~f_V428MZ-cW6eZhA*XtC6dQz3p zbT)idyawB!S2H1rveHcNyJ9-rX7}UqS~F&>fubX5sO=XACeh@0zby}e(2b36K^ITI zn!h4V>1|wjZjLjf6;2jy10UQEG@KDo8%=91T?xx8+af&2nVB+xzN}FGX!DF*O+&}0 z!sG+fx%Yjm_juEDr(_?3r=M1O^SB#K<|A26Zp@GzvkAKqrsrz-8|ccJ3C41evcA;k z)qn3gb4FEHgEqFhJ-%lG-Dd=hF?vG2;$(9r;!}X_d*sfS>sxP%zz@er8Du4lYK`Mm z(OXq4nPWS`F`K|gf`!D7zcno|gJTF4a%86Qy$-wWhK+Tb7_fY!ux)o*_p#j%$~R{` z9nmQCbUTIwzpmIAyk>_<)s z|90PQZ1Wyz8S9&sMf|lu&b+g|&J|>h0I9XDpmwTv3jio zdoe=#`!z@xI*oy|j~x8jwa5Fc``;8{NqK12`539`GU48NZNo|kDo9AN%J>f1FoR)b z*yP@i+)y;aZTn^NMuxbrie`V7Y_NEWye2)SRg4z?Y3h-mdxn$I&RKQigDK>*5Rd}Nl3P71 ztHfXL?z!t1;1yaOnUSFxH&a8HwOZ2O?{+c0gU%)R9}+X19C?tMai--gr8#%@4CTJP z`%f=(@tqdS-OcQv)U&LgwumDJlznd;8uHPQDFsfnT*_8uR8ckvB*3OP_TFZ!gJ{tu z==oiUtYmke&5_FVSI47A6e3C1OZMbCd|y*zp3K+@{o;*WIP>GoL2}MBbK(seeQoir zFWOo%4CKkcit|gJFz>0+HUyh9an#h_eD5s_$q~jbZ>Z94yx>t7t4}T zrvrSr&FP9Hv+udbA2!0?);=J0&EsV|NAtpz&W0kY} ze*Mo1?W|8UH|{1Anat`X$9dEdW}whrhZ_j^GE)HVB^icOE=Lln)NR^g_I(8X7tZl7 zG?pIbtYXR^(bgNjsVZT7G)YW{H(zhaEz|R5{xJW%oSxD58)UN~QjPB0RY?k5Bo3#g zqop_v%89$OIHfB6I7~E$18&mfLh6b14D3gecx5jj;DwGY%r6x!7`D)S-q4nLy*nYq zQ70sThJj{<{62!s4Khph?7c2M#^)Q(cJKrI=LWy?9kghnnIgw{YQQ36M68WbAHU~Z zkKSfQaxXQ3##$c1$x8t*og_iif~Tx23boJGCTsx9|FL#h<}?kArlkAC&0A#<-N1?1ib$R^Z2enEXOlf8M77D z{#v#1DUZbShM7+LIvk=8-&o{)5eBiwP6wD8n;D&R)}}4UrGlPn8SkQ^c#lvdD8aRX zUtI$ZFZYV@BEh0K5S}8?gY!DdLG}8oUxc6HRXG@t8~UG7XISpBtNXO2(YlK_JX{It zt9es-x#HUqoMHVN?|H+q47{OIctt#oLt+8!>?_S18v0z%?PVdT*0cCDKFp(H)y@5j z(G6^9|3O)G@>@e)tIe!BlkKSXK#35$dcy{1GNW5M%J&ok5z;TOzM#3^Av9E$Ko%ab zGKy&GCi+#_klqs`7VJkHs~u@rbFpYtH6cpvH9Ry1)dr#O%E%5`Laj~ue`}14hi_e) z94|CFvooXHp-eEKM3Uh3ybuwnw{3s+aYZ99;gYpn-`3UDy~E@eO-p}Yp0S&X4ukr2KP>isb5 zZ95dftFkE#?%11JVH-wbs7*plWPheKh6CbJ}f@yplw`(vh6k@I=i_6vNzz8A%B_bshXOd@6G_NWVGc ztzG|F0!{Ik)3oSbnLLg6az<-2ZO(;|bPoqz$^w7n6wz=h==5EhC)dRh82{GQ`eS`t z@`u>l!Hj3;@d7fV=L^3;vvQN&PEtfn>0*sbSL&>frPBk?Y$-~zr0v;oQ5bLP7OR#R zeWPl5@yni$C29zrxg!>8|P zUuiou%7clFwO&bbTDtI-L~^~i_4kjM7|Tg7Z!}R*oij-%mgxJcTD##$Lp3wQ@15J+ z-sWN*DMw^Yt{)SSg8Hc<;=f9xU9MV7HqmU&wJMyZ15Ts41nG8O!ARTIb38-aOTEoW z;s$2;6L>I0@R5?C!S0?zRq71t?m-QQJ|1JMM0T$`R{Ts-If9~z}%QSF=Bm- zCwjTeE2d6sFO>7uglwitUn-%v4MukSJJDAKfoyJ1wQ|Vt$i_PfigphGAF&}%eUyWN zKq2V|6}UYfYG@6G;#gD0hX`gYlzWz|yW=CVZt87(-T+{4<;93#H;)N|tO%j98E63V zO6_2qKNRnt0l<{rj~ZcGGqflB?2FIvU{XZAy9KypU{@G5nuI>5&*n-C2-q`h0+;>{S%mJ)w-c}AJ^F~{?^_7*tocSeS5jy>1#?Y2u+?3(c0I#h5r)ONi z3kg9j&iHnFWBEfb<>dp38A}b29U`}y=Hw}KeFn+JNe!a zuvWWxjW#V@X*j3mOY^#p{b~x2;v2dIo)3bYo*@Mo{P)%6xZQdqdgNSbRc72A^%3F0!elXeIDwx!~5!AaUnT{9&jc#=% zHXcdNW^g|Rg<`@n?On|!+}D0~VghH&KYe+EBL$!<6OZn8y5^7`{>eMFNPZ~~AMqh( zXd0Ywu*dq(h&#`HSaJ7i&EW3=m8iBds%hr1UDaU3HUE57ii(}eaTJ1b2NxdRUcTsW zRqtOP*_sJC84pR#AO}mGe8>?RgYcrd>#sP-oMt}+Ig{J(XydF9-38R?HJcD);}(dWe9-k_I6^X4{x{yz zoNmzm?>w>MRK=Ga{_E$(#a}SLHPQT6bmUuwAI|>${?I1cAkTl_Ah(g(%>M7hLPIl@ z{^ukAmCXMZiCxzd`~JHa1N9C&57KvD_Ngnj3JC9l-b1uqLr5e713RS|E~I@ zu=#3#PCR(jJoxT-^dRvCE|cjg*_i|&5yM0Tgv>oy3Unpchue+&2t<9&!7;olUZpzO zl@xcvjrSDa(DOVz&N^PeXNmP}sV@h9bIv|t#&96LM_bo*+G26H*8xjC-GS16R;FK- zI{slHu;6EXie)k8@eUyT07(0CgzKJJZf98Y@NSORuTx0!Vfjpy5Do*$C9kBgf;RCB z_S59-cE7j0xcCkLo8VCcX(263vhxj1Y8!IYiKs(m^lL2(nIAaBWSkIW_x)14)V3z3 zI@Ev!{`WKsuS4MANRp(*zx>d4^$o5Kt#eL$KqC}q2BphBw#?VwZH>ywhxL@US2~_( zHwFos6sGH4Q8O>C;|=@v;Q3nuj+Y}G7t$U-N{%HOI)AEucoP>p6QkQ%aPs__=n1{Z zR=@k;lt#$RUg0z76W%l*HOAkSx7oOIWWxTDwXNUxip?S7bG_dnzwf~qcY>EAvQYgk zXbl)W+waw6ZPoo;DZvZovf7xw!iWTcRc$!tMxbm%0xbAh`d4H;e+X#X$Wb`Bha@A3 zAKrYO;HLG9_UV8TE43WKnaLME&B=4$7+c7oX}kC=c~!90NFvYd@y~FZQP%Chqjs{? z62^!aL{?w{znLu1!hBlWSsx*+4HAl$yu9d;I_aNMPuI=ZotdlUZ#`hWVN!Z#g4WND zX2lQ+OP_HUx!Z01Y-PZ40MWwzk`0miZPS#^y#@^}CaAEzrGL;IClq{E_2FcB&;Cs# zUa~J6ZB5pZ?J;`biR8rN&hcb1UZOz}R|%2EKCcqx&k!v`Sx?bsC4ne*FlV5jvrKdq zZ>>L5fP{nXFPb~l{ejAxE*h3JNj`WUo+fJ|8pAWgA9W~LSaIj24-p)5B=<#%S9a8| z-Nkl!YuN8e(5@K~9%tpPS+#}-p>uvY$Ae8~iVnZ!UmG1!yWDwU zWZiA?i81xlQ5>FKfiVL}+Ct_LoszzuEM`kssJ=_Ey$jjuE?aUS%pq=Md%Sz+6^SvH z!`O9Hx$gdVA%#=v$ACk;O%zdWg-mD!?gR!x)F)K#Co4%~be6)@P|9MiGFYd05h}^( zoFX+$XmEq9g2TvWzcXUW3z`q*%oy|pcf86Byq|B19=3RYbve2%?b%E2 z7~@`Z7lE@Cp8O!gfqpVbud1@@5Q3^lq)(!Qm#|Vmy?}7>Q}zP=i+n&wA03mvHGz1) zh$7sG&}kFv1Q>}36dEek_DA5(SOseys@jY+`_g`CAvvEf?(u`jXq*;_hn`38P*U7@ zIbAxofYBiLg{kzC(92tZB&S&Fs8H~(HDL$InT)qJo}z!4c^#zoPKW%t>6^=|syIj` zg*yK|tg?hGWZLnwOPDg>=gmKfjh7zd2=Fb)*j5@Zz~)#qRZ+gs03(!?7f2w)8#36w zFcA+B=h-1Hnpm_eXW9R-7i#CllyH!`9Qe-FNJ~amHUoGl@AQL32v>&InX66bT;_V3 zgi?)l6Cj2gd>^yZg{zTS#oY`!zcCm!@-(I8zCD|(0vEiYxmLu6naPwkfcJ!R=7_Mj zbCix)Iok?C2Ss4?{PKYC^aCail3vbJ@(&6haw<>*>Sgx47N%O=dAKIUdI z|DK=-JaCDsAb^BnT<2em@UkFHa<(Q$j>p6TCMWCA!T8ZT;K|*-W|XKXBbqULBRM<+ z*3AV+MTZx*)n(@FO%WTa+}tl44CIv)8PP5@vb%E-o zKc8TYr8^OY*3g9q9-%8eOBA?vKg7gysw06VdjuM>)oH?~m5e)BV*@P-p?An+CI{sw z!_WS+B&Hj<__7XOk*W-#p#y;6`nRP;n8~KpiZ~G{mL=%Ozm3-US_j4%6g)^6zW>j0<;>ApIZ_ru&SmUGnvIyzU=Ij)7Z{M)JT#_N7DU@+B zH{YE0UtnbGS8j$2fO{RpOU;esf8Al-r!o3*U|9p*2+p}FJia1B zGF=%L5_-to<0REmz$~xX0RbVrnSw(XcerOOJA6;v+JGe9+S$EC~bADhwAD<*v`-2S z7bd#&c}POpTw_`UIxv2dD9FgGhW(IO0lJ;|`CA7)t*n8n<3Y$ChYp`O*YernZ`(sc z?`;#rb&t9&yYbGTw*)xCl@jxUmzRO4N$A2S2EzDb6vFm)Ckf7ke{4Q4o}_ugS;HCz zyS{7tGnUZiN@FQ(?=U_Q{lLvgOHO_TQ5UFMpsVIHuhCtzaMrJ$Y1c5Zk-Zs}7VlsD zEYO^Ct2I>}b0(?Aj6|dGVN!@L*C*rbF;n~P*vIR&E9T?nt8v=jw))>jX%I=?2V=lo z9?WA|Q>=e;6he`}^b)XAw-zBt-}lhfD^EwV@H$r$h3(WGXfV#6HjX0Z2`*jLG*4o+ zLA{6mWwX<68o=p;4?#?V8K!?G`)f(p{)p`qE=^&mrH@puV2GV09B*vU`gB#x$wd*8p<#Jaon_b$PaW_XJscpeo>Mqge&(C}@ zsOycGn1JTHBRRWE{Hyw}YOuH$8{Qweg0DNwG0HY`zsH=QGYST&7eqOm3TWc`y7LiX zPT3f@A zXMiO(c$>aC#h)&-@e-_M1)3_EK^0aH z)IEIbTO2=^ErWGbzU_Tzn;!*MF^=nDEJq zd9k-4-E!`laJel5p^)hYgZs;@`xWYNNtxhGQ_TfWKl1}NAzYF>FcUq@$_=;XfM+q- zuK9J-%_g=&e}frM;`&Xi&^3BBW!<3Be#)*dx6bzODv}lHqiS-dnOilFY*dc~*}uC9 zX{sxezKOjN5mpYbHQJdM66n@V9v@T0G>SS@tz|oVpY3!q5#0(P{>74yfVEBox(KJN zrXXAZ#1GLYwRAn-APuTya)m4UbS{lw11lsMxVb7k>%9{RH@A&+S3E3@hcVULB6A_w zKuZyT3-p+!vaKzAeoeRc3_xTG+dV5jmd~KYqnOfLam}K;8n;~`JO9OfzTf=R_KJ<~ z(zVjUs3JSX5*BLDS&rvVD<8)VE;=f8knNewZ99dzuZx<+0?>q3*kBQF>D2=8n8%Dh zQ=e@M7q8cPaJ6YGpI$W)p4L!NhCo6`$0Y`=aZ{8yQ01UgpeYM4HL^+mW|_UCT=PW4L2QgA+&)pA94S z>7My>xr5==TheC7_eM4t7F@-b;h~g<=e%BqqV2A+*ei_56ZJ@o%6JMjilacMQnk!7 z+xZW32_@bF%iU;IVF&Gb_}DIKmDc)l31zfL!QXs@3U4bXS_s9y zel>C4j=-g(L$NBtKKMKJe}<1$@BY~K_UcJ|Bemw{|4L`EfS=yp|ND^mO5*=Yd9b(( z>=gffr$?T41^eG*$X@w>(?{ZWTYe#+#GO2QjZy*XoJ?T8&WHl~QPF zSKBnW^xz^h>af4`Rk6Ugp!;(xy!6Kh6IJJk+gDY58czlviC%Kh&w*TsuYbRF_@S95X0KwM$*4bVII*6@aBacI>FEA* zX*#{psQ%pn>;-qi?c9>3`#BqHHA$9_5_r-U&->gxG3{K>K(0LUQ4@0`L`g&6Y>Mf? zL#Law(#G_y;jgLsjYf>Vc4he~^kAH0G0qIG#Wd%&fgy8K52evA%+p9&&MK$^g)W>K zQKZ%xX)`D-$_P!|Q>;Li$F+hm-iE!Fx;j^wd3*`*gV)_BC}4z%+C`pY>dcT+c77Rn zO@eqA)7pGjDezKS$qdUO;MZX-aQsqzNq~CCK3rZ~g9EtRK0DDm{dS4-Bw4-DXP6f& z8ROU_hOaEycZV=oZO2pvjJkbp$iiWZE;W4B&?ZYaC;;w5Ykt#R!dS^-Jx91y=@vC; z;M$eU=x8d8ZJ<}Yh10`QiSdv`$nCZ#X&Ry(av%8ZWA?)2%bI|GZMW@FHdhAjl>wkTn2*41bW2yZEkJw8y?_mNG_L?#UHv5l#-fkR44G z1|$(0FXqy3Q}0Pe1VOe#LCCh7OH1SgpKTn=me+Pr8=E=CBsLJZ7ztd z>a|B@-5DwjkiyA7EpKURg`DC!X2eM_Q;vxd;l0qc^`Nh4FnD=adwVFMrNw%i#wIv& z1g6vlS$@7Q5~djJTem?0@^k6c<%?&&FR)VDtW;g&E0&Gj$#*+h?QoN16-F%Y^h??R zOM~e$6zs}!;+_P^$Q)NYc-WI6gjRo=VDjgADJ}mie~e|amzVkSlS=PvHpdrZYNnMT zt#QGDDiqfQwi}zX^!nhki2B{@H50>{;g)c?JB~uC_3>k%uVBtcO!V!FAjxAI`MEM~ zpyfnfnVrUawI>Zaet$$(x`v98Po@ePa_dX3!Xc179k%1OZ;Z3TuyXYv@j} zk_Ci{Vd>aUHbpv92e@IJ*bPom#rH@zl=~zE1Jy0M%oNVuK88q?Q|)P*x;UuGTsY7EU7;jG zL6=!W4@Wl;XaL+uN_=GAOmlUptZ3$q92^($ zg+946JWaj5%WJ*ul6VF@awHsgSF=qixnc_$zhkXeV|W!u`EuhvXT31i8MDXM^pm%C z-Sx26m|j-+cm&(&GO>Nf|*PlafC=Yf)Gji*zPLEiHbgE*tN%#bo{ z98bsLr@#Iwf42IgT?0exMKfuoJmI|}DCGgJW3tW-ycX8ub4eC~G1J-<0QTF`0&Hy5-%iwDK|`!v5lAuZ&fZtv?*} zN#&1OO>K>yC)DO2H+A=fkiZmj3@1jEOZ^?UWD@a91immOub>uM98nZQ+b|$)Zw#yE zao-o`{3d^g!O!fTs=icIz+8}i(uU#kHPwx>XLl*-NbQH?81@oSm;F=ITB+$*VyanV z6ZrLi|vLTo;64VRxP7dW79+A77&0&#L;a?yWgM)l?TY-7HJ?h!e zAm&m%^}Z)iTi4rlS^?oPllMxAeMf(B0UHjC41RImzkjYHB8M8ySm$OA2`<{L6ZQ>+ z1%$F7JUDnlZPW!k*$lLc(tBN!70`g3k(tbvN9y-hCXilZVo#p1P%eEG*^OL1bn?~; zL9n#LRoTf}>X(N~U9et3I*)H?Iy$|Ht!u`%sx&M6yTFjAMc`ygp0MprR^(s@st zINZF@ihhQ!tL>!r9g)SaK`ilTi<I%g;laiZDm zy=Al1vszm*sc%{5?ys|sxcd>4BoJaEITE&P;S&7vU#O{hv%FH(YWc?KC!~~$N?S5n3EgwF45)iF9RngCggwxbr+W;Xr{eX|#*jRCv!^}K4 z@hro`#;-zdkGcg(4Y#HRjNM~s)*CJ+rj0DcfS=q;2zXY!UyErsh$j-Ocb}{a4Cvg; znJx$>q}}Nq!fA~L{<-+UGXdW4cogjbvWi1Si zhUAH0alP}K9$>lx=V;x4bOv!>yTw`6vH5+){i~4;lfZAxDPZcK`v@8-0=?Z|ef@ss zCHP0oPK=3&AhFYvH~|5D3M55-{0L~@JPPhg7wam6E-|0D`(Q^r!b}Nf!zS9i>Fyy{ zTaB4?@Tk3+bPb~7Iz!3B!HjPxp$ydq?-Lq>p+1J_n~K8YV6op-aJ&FuPK$#!LEXmU zL97Alx_u_~eEvA@I9K)qQjr!@%6G-u$HUhQ9Rr&I(pk>J(>T4= zZFfslqVaCeG13CM_dv+I+WyD)#Iqw@x@uXHEM!)6r7j;52&?^W(Tu+k+Wi4`!RCTE zwT4R?8yc$c0BFkyJ1@F)%fT8&@-LSvO{H^155#^mo7hoMdM}6AjMz2RYVq^^89p9= zU%)n3%EzB$7C04l-yA~H(pZ`3UzKJutSFCOyfXa_lJ!=k(aFM?iXbgGL#nSzhbSOS z2cYS5y|Z+wff#cd9ru})@AbHcx_Ik{l-ic^`uQdL?h#vjm*4ZDS&X9&_M8?4tFihp z@4J%Zz{*p^)TsJ-Qxe_XG;gnXcYSScgUa%mv-_V8Xw-f5_@?(54FNS@e+0U&Mp8H;2+Q}R_&EFtmg7lkGr;*hz4#?%FxKFHOc*h zhwGm<^*%Lsle%+nA}osiD|^6veK?m3vrl3pyxGNG50AfAuhl=RakOKd(GOr`+J2ee zW835xL@v+FTp>4<=mQ42`BC1Ti60gwB*sr{VKEl8zyd{iL!0~Lr?*Bs7YYnZb$%+H zE#|^EvwJxep1k$^Dl40m2ddSK-0eki*))U#HS>dq;TAICSys@(Pb@67lOlsQNYkL= zHrKJiJ8ulO(vIPI5vO0(o7;Q{rvjuM<05soVs5Gq3!O5Na>pL#zvc|RBjQTztDI`H zZmws4WV>En;m}a&L~?Fsgv3dDbPbKVtkD)1)^rUP{y-vtM9DR;!Qo~yKvupyrCG?7 z&1N#$u^dy-P}xiLJGc4D$)r8RK8?%3j2Fpn&e-gtGig*K+ysp%B@F=D<+AZsLrY&)Uahb6{6 zXGhtiI;}sSv;NEt zk`!f5YwJ?X@6xbN_`^lc$dTFMg*ZLswe*y+-?Jg1(xG{R`m48ZyQxP^y}w(3EKO44 z*Jq@*T+VpDKc5bR%zCe^74EJrVG@4XN*Q+Bto_!9*?!f{N>{F0hFh*a96tDXBd@ET zqG$!(%PCu4s*Aqw5*sPY{JB49?b2)$D5Hqy6OxulfcARD$~14a^Csq09DtlkCbTrr zrB%^jPlM{Jg1x*f7vJwnl--91&-iFsA1X93r7b&-Uk6 z7>uDhO1Zb-!EfW;M$pRf#cAsGRE}>mq_xNL0FCVt>D;SUGy-Q zVGkzg8uux#8r4`77Z+Py83&<$OnTig-7f!cX(NtA>k@eV;O1Gc9jjWy2&;7dceL~W z&vE|W5bf{y-;QP&8P#C`segIw4JYyZC++)ZT1x0bYr1MGGvB^{*MN`^NB`Yk|BPp( jpIR0FzW6@{e(@rF%CWFvY_+cp2OH8~6~rq<_5J@Zvm)yf literal 0 HcmV?d00001 diff --git a/lib/spack/docs/environments.rst b/lib/spack/docs/environments.rst index 336d574bd7f..5ec1ec9032a 100644 --- a/lib/spack/docs/environments.rst +++ b/lib/spack/docs/environments.rst @@ -49,6 +49,8 @@ Spack uses a "manifest and lock" model similar to `Bundler gemfiles managers. The user input file is named ``spack.yaml`` and the lock file is named ``spack.lock`` +.. _environments-using: + ------------------ Using Environments ------------------ diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index 489c15645a6..8170f152a4e 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -66,6 +66,7 @@ or refer to the full manual below. config_yaml build_settings environments + containers mirrors module_file_support repositories diff --git a/lib/spack/spack/cmd/containerize.py b/lib/spack/spack/cmd/containerize.py new file mode 100644 index 00000000000..cc2c0015603 --- /dev/null +++ b/lib/spack/spack/cmd/containerize.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os +import os.path +import spack.container + +description = ("creates recipes to build images for different" + " container runtimes") +section = "container" +level = "long" + + +def containerize(parser, args): + config_dir = args.env_dir or os.getcwd() + config_file = os.path.abspath(os.path.join(config_dir, 'spack.yaml')) + if not os.path.exists(config_file): + msg = 'file not found: {0}' + raise ValueError(msg.format(config_file)) + + config = spack.container.validate(config_file) + + recipe = spack.container.recipe(config) + print(recipe) diff --git a/lib/spack/spack/container/__init__.py b/lib/spack/spack/container/__init__.py new file mode 100644 index 00000000000..fc3750355a5 --- /dev/null +++ b/lib/spack/spack/container/__init__.py @@ -0,0 +1,81 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Package that provides functions and classes to +generate container recipes from a Spack environment +""" +import warnings + +import spack.environment +import spack.schema.env as env +import spack.util.spack_yaml as syaml +from .writers import recipe + +__all__ = ['validate', 'recipe'] + + +def validate(configuration_file): + """Validate a Spack environment YAML file that is being used to generate a + recipe for a container. + + Since a few attributes of the configuration must have specific values for + the container recipe, this function returns a sanitized copy of the + configuration in the input file. If any modification is needed, a warning + will be issued. + + Args: + configuration_file (str): path to the Spack environment YAML file + + Returns: + A sanitized copy of the configuration stored in the input file + """ + import jsonschema + with open(configuration_file) as f: + config = syaml.load(f) + + # Ensure we have a "container" attribute with sensible defaults set + env_dict = spack.environment.config_dict(config) + env_dict.setdefault('container', { + 'format': 'docker', + 'base': {'image': 'ubuntu:18.04', 'spack': 'develop'} + }) + env_dict['container'].setdefault('format', 'docker') + env_dict['container'].setdefault( + 'base', {'image': 'ubuntu:18.04', 'spack': 'develop'} + ) + + # Remove attributes that are not needed / allowed in the + # container recipe + for subsection in ('cdash', 'gitlab_ci', 'modules'): + if subsection in env_dict: + msg = ('the subsection "{0}" in "{1}" is not used when generating' + ' container recipes and will be discarded') + warnings.warn(msg.format(subsection, configuration_file)) + env_dict.pop(subsection) + + # Set the default value of the concretization strategy to "together" and + # warn if the user explicitly set another value + env_dict.setdefault('concretization', 'together') + if env_dict['concretization'] != 'together': + msg = ('the "concretization" attribute of the environment is set ' + 'to "{0}" [the advised value is instead "together"]') + warnings.warn(msg.format(env_dict['concretization'])) + + # Check if the install tree was explicitly set to a custom value and warn + # that it will be overridden + environment_config = env_dict.get('config', {}) + if environment_config.get('install_tree', None): + msg = ('the "config:install_tree" attribute has been set explicitly ' + 'and will be overridden in the container image') + warnings.warn(msg) + + # Likewise for the view + environment_view = env_dict.get('view', None) + if environment_view: + msg = ('the "view" attribute has been set explicitly ' + 'and will be overridden in the container image') + warnings.warn(msg) + + jsonschema.validate(config, schema=env.schema) + return config diff --git a/lib/spack/spack/container/images.json b/lib/spack/spack/container/images.json new file mode 100644 index 00000000000..ecd911815d4 --- /dev/null +++ b/lib/spack/spack/container/images.json @@ -0,0 +1,50 @@ +{ + "ubuntu:18.04": { + "update": "apt-get -yqq update && apt-get -yqq upgrade", + "install": "apt-get -yqq install", + "clean": "rm -rf /var/lib/apt/lists/*", + "environment": [], + "build": "spack/ubuntu-bionic", + "build_tags": { + "develop": "latest", + "0.14": "0.14", + "0.14.0": "0.14.0" + } + }, + "ubuntu:16.04": { + "update": "apt-get -yqq update && apt-get -yqq upgrade", + "install": "apt-get -yqq install", + "clean": "rm -rf /var/lib/apt/lists/*", + "environment": [], + "build": "spack/ubuntu-xenial", + "build_tags": { + "develop": "latest", + "0.14": "0.14", + "0.14.0": "0.14.0" + } + }, + "centos:7": { + "update": "yum update -y && yum install -y epel-release && yum update -y", + "install": "yum install -y", + "clean": "rm -rf /var/cache/yum && yum clean all", + "environment": [], + "build": "spack/centos7", + "build_tags": { + "develop": "latest", + "0.14": "0.14", + "0.14.0": "0.14.0" + } + }, + "centos:6": { + "update": "yum update -y && yum install -y epel-release && yum update -y", + "install": "yum install -y", + "clean": "rm -rf /var/cache/yum && yum clean all", + "environment": [], + "build": "spack/centos6", + "build_tags": { + "develop": "latest", + "0.14": "0.14", + "0.14.0": "0.14.0" + } + } +} \ No newline at end of file diff --git a/lib/spack/spack/container/images.py b/lib/spack/spack/container/images.py new file mode 100644 index 00000000000..421fc244258 --- /dev/null +++ b/lib/spack/spack/container/images.py @@ -0,0 +1,72 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Manages the details on the images used in the build and the run stage.""" +import json +import os.path + +#: Global variable used to cache in memory the content of images.json +_data = None + + +def data(): + """Returns a dictionary with the static data on the images. + + The dictionary is read from a JSON file lazily the first time + this function is called. + """ + global _data + if not _data: + json_dir = os.path.abspath(os.path.dirname(__file__)) + json_file = os.path.join(json_dir, 'images.json') + with open(json_file) as f: + _data = json.load(f) + return _data + + +def build_info(image, spack_version): + """Returns the name of the build image and its tag. + + Args: + image (str): image to be used at run-time. Should be of the form + : e.g. "ubuntu:18.04" + spack_version (str): version of Spack that we want to use to build + + Returns: + A tuple with (image_name, image_tag) for the build image + """ + # Don't handle error here, as a wrong image should have been + # caught by the JSON schema + image_data = data()[image] + build_image = image_data['build'] + + # Try to check if we have a tag for this Spack version + try: + build_tag = image_data['build_tags'][spack_version] + except KeyError: + msg = ('the image "{0}" has no tag for Spack version "{1}" ' + '[valid versions are {2}]') + msg = msg.format(build_image, spack_version, + ', '.join(image_data['build_tags'].keys())) + raise ValueError(msg) + + return build_image, build_tag + + +def package_info(image): + """Returns the commands used to update system repositories, install + system packages and clean afterwards. + + Args: + image (str): image to be used at run-time. Should be of the form + : e.g. "ubuntu:18.04" + + Returns: + A tuple of (update, install, clean) commands. + """ + image_data = data()[image] + update = image_data['update'] + install = image_data['install'] + clean = image_data['clean'] + return update, install, clean diff --git a/lib/spack/spack/container/writers/__init__.py b/lib/spack/spack/container/writers/__init__.py new file mode 100644 index 00000000000..a1d2fa31020 --- /dev/null +++ b/lib/spack/spack/container/writers/__init__.py @@ -0,0 +1,154 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Writers for different kind of recipes and related +convenience functions. +""" +import collections +import copy + +import spack.environment +import spack.schema.env +import spack.tengine as tengine +import spack.util.spack_yaml as syaml + +from spack.container.images import build_info, package_info + +#: Caches all the writers that are currently supported +_writer_factory = {} + + +def writer(name): + """Decorator to register a factory for a recipe writer. + + Each factory should take a configuration dictionary and return a + properly configured writer that, when called, prints the + corresponding recipe. + """ + def _decorator(factory): + _writer_factory[name] = factory + return factory + return _decorator + + +def create(configuration): + """Returns a writer that conforms to the configuration passed as input. + + Args: + configuration: how to generate the current recipe + """ + name = spack.environment.config_dict(configuration)['container']['format'] + return _writer_factory[name](configuration) + + +def recipe(configuration): + """Returns a recipe that conforms to the configuration passed as input. + + Args: + configuration: how to generate the current recipe + """ + return create(configuration)() + + +class PathContext(tengine.Context): + """Generic context used to instantiate templates of recipes that + install software in a common location and make it available + directly via PATH. + """ + def __init__(self, config): + self.config = spack.environment.config_dict(config) + self.container_config = self.config['container'] + + @tengine.context_property + def run(self): + """Information related to the run image.""" + image = self.container_config['base']['image'] + Run = collections.namedtuple('Run', ['image']) + return Run(image=image) + + @tengine.context_property + def build(self): + """Information related to the build image.""" + + # Map the final image to the correct build image + run_image = self.container_config['base']['image'] + spack_version = self.container_config['base']['spack'] + image, tag = build_info(run_image, spack_version) + + Build = collections.namedtuple('Build', ['image', 'tag']) + return Build(image=image, tag=tag) + + @tengine.context_property + def strip(self): + """Whether or not to strip binaries in the image""" + return self.container_config.get('strip', True) + + @tengine.context_property + def paths(self): + """Important paths in the image""" + Paths = collections.namedtuple('Paths', [ + 'environment', 'store', 'view' + ]) + return Paths( + environment='/opt/spack-environment', + store='/opt/software', + view='/opt/view' + ) + + @tengine.context_property + def manifest(self): + """The spack.yaml file that should be used in the image""" + import jsonschema + # Copy in the part of spack.yaml prescribed in the configuration file + manifest = copy.deepcopy(self.config) + manifest.pop('container') + + # Ensure that a few paths are where they need to be + manifest.setdefault('config', syaml.syaml_dict()) + manifest['config']['install_tree'] = self.paths.store + manifest['view'] = self.paths.view + manifest = {'spack': manifest} + + # Validate the manifest file + jsonschema.validate(manifest, schema=spack.schema.env.schema) + + return syaml.dump(manifest, default_flow_style=False).strip() + + @tengine.context_property + def os_packages(self): + """Additional system packages that are needed at run-time.""" + package_list = self.container_config.get('os_packages', None) + if not package_list: + return package_list + + image = self.container_config['base']['image'] + update, install, clean = package_info(image) + Packages = collections.namedtuple( + 'Packages', ['update', 'install', 'list', 'clean'] + ) + return Packages(update=update, install=install, + list=package_list, clean=clean) + + @tengine.context_property + def extra_instructions(self): + Extras = collections.namedtuple('Extra', ['build', 'final']) + extras = self.container_config.get('extra_instructions', {}) + build, final = extras.get('build', None), extras.get('final', None) + return Extras(build=build, final=final) + + @tengine.context_property + def labels(self): + return self.container_config.get('labels', {}) + + def __call__(self): + """Returns the recipe as a string""" + env = tengine.make_environment() + t = env.get_template(self.template_name) + return t.render(**self.to_dict()) + + +# Import after function definition all the modules in this package, +# so that registration of writers will happen automatically +import spack.container.writers.singularity # noqa +import spack.container.writers.docker # noqa diff --git a/lib/spack/spack/container/writers/docker.py b/lib/spack/spack/container/writers/docker.py new file mode 100644 index 00000000000..557d22c8039 --- /dev/null +++ b/lib/spack/spack/container/writers/docker.py @@ -0,0 +1,30 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import spack.tengine as tengine + +from . import writer, PathContext + + +@writer('docker') +class DockerContext(PathContext): + """Context used to instantiate a Dockerfile""" + #: Name of the template used for Dockerfiles + template_name = 'container/Dockerfile' + + @tengine.context_property + def manifest(self): + manifest_str = super(DockerContext, self).manifest + # Docker doesn't support HEREDOC so we need to resort to + # a horrible echo trick to have the manifest in the Dockerfile + echoed_lines = [] + for idx, line in enumerate(manifest_str.split('\n')): + if idx == 0: + echoed_lines.append('&& (echo "' + line + '" \\') + continue + echoed_lines.append('&& echo "' + line + '" \\') + + echoed_lines[-1] = echoed_lines[-1].replace(' \\', ')') + + return '\n'.join(echoed_lines) diff --git a/lib/spack/spack/container/writers/singularity.py b/lib/spack/spack/container/writers/singularity.py new file mode 100644 index 00000000000..32f29eb83d3 --- /dev/null +++ b/lib/spack/spack/container/writers/singularity.py @@ -0,0 +1,33 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import spack.tengine as tengine +from . import writer, PathContext + + +@writer('singularity') +class SingularityContext(PathContext): + """Context used to instantiate a Singularity definition file""" + #: Name of the template used for Singularity definition files + template_name = 'container/singularity.def' + + @property + def singularity_config(self): + return self.container_config.get('singularity', {}) + + @tengine.context_property + def runscript(self): + return self.singularity_config.get('runscript', '') + + @tengine.context_property + def startscript(self): + return self.singularity_config.get('startscript', '') + + @tengine.context_property + def test(self): + return self.singularity_config.get('test', '') + + @tengine.context_property + def help(self): + return self.singularity_config.get('help', '') diff --git a/lib/spack/spack/schema/container.py b/lib/spack/spack/schema/container.py new file mode 100644 index 00000000000..cb1ed8d63ac --- /dev/null +++ b/lib/spack/spack/schema/container.py @@ -0,0 +1,82 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +"""Schema for the 'container' subsection of Spack environments.""" + +#: Schema for the container attribute included in Spack environments +container_schema = { + 'type': 'object', + 'additionalProperties': False, + 'properties': { + # The recipe formats that are currently supported by the command + 'format': { + 'type': 'string', + 'enum': ['docker', 'singularity'] + }, + # Describes the base image to start from and the version + # of Spack to be used + 'base': { + 'type': 'object', + 'additionalProperties': False, + 'properties': { + 'image': { + 'type': 'string', + 'enum': ['ubuntu:18.04', + 'ubuntu:16.04', + 'centos:7', + 'centos:6'] + }, + 'spack': { + 'type': 'string', + 'enum': ['develop', '0.14', '0.14.0'] + } + }, + 'required': ['image', 'spack'] + }, + # Whether or not to strip installed binaries + 'strip': { + 'type': 'boolean', + 'default': True + }, + # Additional system packages that are needed at runtime + 'os_packages': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + # Add labels to the image + 'labels': { + 'type': 'object', + }, + # Add a custom extra section at the bottom of a stage + 'extra_instructions': { + 'type': 'object', + 'additionalProperties': False, + 'properties': { + 'build': {'type': 'string'}, + 'final': {'type': 'string'} + } + }, + # Reserved for properties that are specific to each format + 'singularity': { + 'type': 'object', + 'additionalProperties': False, + 'default': {}, + 'properties': { + 'runscript': {'type': 'string'}, + 'startscript': {'type': 'string'}, + 'test': {'type': 'string'}, + 'help': {'type': 'string'} + } + }, + 'docker': { + 'type': 'object', + 'additionalProperties': False, + 'default': {}, + } + } +} + +properties = {'container': container_schema} diff --git a/lib/spack/spack/schema/merged.py b/lib/spack/spack/schema/merged.py index d56228c1160..e118acf2860 100644 --- a/lib/spack/spack/schema/merged.py +++ b/lib/spack/spack/schema/merged.py @@ -13,6 +13,7 @@ import spack.schema.cdash import spack.schema.compilers import spack.schema.config +import spack.schema.container import spack.schema.gitlab_ci import spack.schema.mirrors import spack.schema.modules @@ -26,6 +27,7 @@ spack.schema.cdash.properties, spack.schema.compilers.properties, spack.schema.config.properties, + spack.schema.container.properties, spack.schema.gitlab_ci.properties, spack.schema.mirrors.properties, spack.schema.modules.properties, diff --git a/lib/spack/spack/test/cmd/gc.py b/lib/spack/spack/test/cmd/gc.py index 76eb608cf29..22c85a1d780 100644 --- a/lib/spack/spack/test/cmd/gc.py +++ b/lib/spack/spack/test/cmd/gc.py @@ -30,7 +30,9 @@ def test_packages_are_removed(config, mutable_database, capsys): @pytest.mark.db -def test_gc_with_environment(config, mutable_database, capsys): +def test_gc_with_environment( + config, mutable_database, mutable_mock_env_path, capsys +): s = spack.spec.Spec('simple-inheritance') s.concretize() s.package.do_install(fake=True, explicit=True) diff --git a/lib/spack/spack/test/cmd/test.py b/lib/spack/spack/test/cmd/test.py index 3595f91953d..9a64209cfa6 100644 --- a/lib/spack/spack/test/cmd/test.py +++ b/lib/spack/spack/test/cmd/test.py @@ -1,4 +1,4 @@ -# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) diff --git a/lib/spack/spack/test/container/cli.py b/lib/spack/spack/test/container/cli.py new file mode 100644 index 00000000000..8e5403f072f --- /dev/null +++ b/lib/spack/spack/test/container/cli.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import llnl.util.filesystem as fs +import spack.main + + +containerize = spack.main.SpackCommand('containerize') + + +def test_command(configuration_dir, capsys): + with capsys.disabled(): + with fs.working_dir(configuration_dir): + output = containerize() + assert 'FROM spack/ubuntu-bionic' in output diff --git a/lib/spack/spack/test/container/conftest.py b/lib/spack/spack/test/container/conftest.py new file mode 100644 index 00000000000..802b34c5f89 --- /dev/null +++ b/lib/spack/spack/test/container/conftest.py @@ -0,0 +1,43 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import pytest + +import spack.util.spack_yaml as syaml + + +@pytest.fixture() +def minimal_configuration(): + return { + 'spack': { + 'specs': [ + 'gromacs', + 'mpich', + 'fftw precision=float' + ], + 'container': { + 'format': 'docker', + 'base': { + 'image': 'ubuntu:18.04', + 'spack': 'develop' + } + } + } + } + + +@pytest.fixture() +def config_dumper(tmpdir): + """Function that dumps an environment config in a temporary folder.""" + def dumper(configuration): + content = syaml.dump(configuration, default_flow_style=False) + config_file = tmpdir / 'spack.yaml' + config_file.write(content) + return str(tmpdir) + return dumper + + +@pytest.fixture() +def configuration_dir(minimal_configuration, config_dumper): + return config_dumper(minimal_configuration) diff --git a/lib/spack/spack/test/container/docker.py b/lib/spack/spack/test/container/docker.py new file mode 100644 index 00000000000..fbdc085828e --- /dev/null +++ b/lib/spack/spack/test/container/docker.py @@ -0,0 +1,74 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import spack.container.writers as writers + + +def test_manifest(minimal_configuration): + writer = writers.create(minimal_configuration) + manifest_str = writer.manifest + for line in manifest_str.split('\n'): + assert 'echo' in line + + +def test_build_and_run_images(minimal_configuration): + writer = writers.create(minimal_configuration) + + # Test the output of run property + run = writer.run + assert run.image == 'ubuntu:18.04' + + # Test the output of the build property + build = writer.build + assert build.image == 'spack/ubuntu-bionic' + assert build.tag == 'latest' + + +def test_packages(minimal_configuration): + # In this minimal configuration we don't have packages + writer = writers.create(minimal_configuration) + assert writer.os_packages is None + + # If we add them a list should be returned + pkgs = ['libgomp1'] + minimal_configuration['spack']['container']['os_packages'] = pkgs + writer = writers.create(minimal_configuration) + p = writer.os_packages + assert p.update + assert p.install + assert p.clean + assert p.list == pkgs + + +def test_ensure_render_works(minimal_configuration): + # Here we just want to ensure that nothing is raised + writer = writers.create(minimal_configuration) + writer() + + +def test_strip_is_set_from_config(minimal_configuration): + writer = writers.create(minimal_configuration) + assert writer.strip is True + + minimal_configuration['spack']['container']['strip'] = False + writer = writers.create(minimal_configuration) + assert writer.strip is False + + +def test_extra_instructions_is_set_from_config(minimal_configuration): + writer = writers.create(minimal_configuration) + assert writer.extra_instructions == (None, None) + + test_line = 'RUN echo Hello world!' + e = minimal_configuration['spack']['container'] + e['extra_instructions'] = {} + e['extra_instructions']['build'] = test_line + writer = writers.create(minimal_configuration) + assert writer.extra_instructions == (test_line, None) + + e['extra_instructions']['final'] = test_line + del e['extra_instructions']['build'] + writer = writers.create(minimal_configuration) + assert writer.extra_instructions == (None, test_line) diff --git a/lib/spack/spack/test/container/images.py b/lib/spack/spack/test/container/images.py new file mode 100644 index 00000000000..808676c39a9 --- /dev/null +++ b/lib/spack/spack/test/container/images.py @@ -0,0 +1,58 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os.path + +import pytest + +import spack.container + + +@pytest.mark.parametrize('image,spack_version,expected', [ + ('ubuntu:18.04', 'develop', ('spack/ubuntu-bionic', 'latest')), + ('ubuntu:18.04', '0.14.0', ('spack/ubuntu-bionic', '0.14.0')), +]) +def test_build_info(image, spack_version, expected): + output = spack.container.images.build_info(image, spack_version) + assert output == expected + + +@pytest.mark.parametrize('image,spack_version', [ + ('ubuntu:18.04', 'doesnotexist') +]) +def test_build_info_error(image, spack_version): + with pytest.raises(ValueError, match=r"has no tag for"): + spack.container.images.build_info(image, spack_version) + + +@pytest.mark.parametrize('image', [ + 'ubuntu:18.04' +]) +def test_package_info(image): + update, install, clean = spack.container.images.package_info(image) + assert update + assert install + assert clean + + +@pytest.mark.parametrize('extra_config,expected_msg', [ + ({'modules': {'enable': ['tcl']}}, 'the subsection "modules" in'), + ({'concretization': 'separately'}, 'the "concretization" attribute'), + ({'config': {'install_tree': '/some/dir'}}, + 'the "config:install_tree" attribute has been set'), + ({'view': '/some/dir'}, 'the "view" attribute has been set') +]) +def test_validate( + extra_config, expected_msg, minimal_configuration, config_dumper +): + minimal_configuration['spack'].update(extra_config) + spack_yaml_dir = config_dumper(minimal_configuration) + spack_yaml = os.path.join(spack_yaml_dir, 'spack.yaml') + + with pytest.warns(UserWarning) as w: + spack.container.validate(spack_yaml) + + # Tests are designed to raise only one warning + assert len(w) == 1 + assert expected_msg in str(w.pop().message) diff --git a/lib/spack/spack/test/container/schema.py b/lib/spack/spack/test/container/schema.py new file mode 100644 index 00000000000..3f33a3f9f7d --- /dev/null +++ b/lib/spack/spack/test/container/schema.py @@ -0,0 +1,16 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import spack.container +import spack.schema.container + + +def test_images_in_schema(): + properties = spack.schema.container.container_schema['properties'] + allowed_images = set( + properties['base']['properties']['image']['enum'] + ) + images_in_json = set(x for x in spack.container.images.data()) + assert images_in_json == allowed_images diff --git a/lib/spack/spack/test/container/singularity.py b/lib/spack/spack/test/container/singularity.py new file mode 100644 index 00000000000..445a119f6cb --- /dev/null +++ b/lib/spack/spack/test/container/singularity.py @@ -0,0 +1,42 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +import pytest + +import spack.container.writers as writers + + +@pytest.fixture +def singularity_configuration(minimal_configuration): + minimal_configuration['spack']['container']['format'] = 'singularity' + return minimal_configuration + + +def test_ensure_render_works(singularity_configuration): + container_config = singularity_configuration['spack']['container'] + assert container_config['format'] == 'singularity' + # Here we just want to ensure that nothing is raised + writer = writers.create(singularity_configuration) + writer() + + +@pytest.mark.parametrize('properties,expected', [ + ({'runscript': '/opt/view/bin/h5ls'}, + {'runscript': '/opt/view/bin/h5ls', + 'startscript': '', + 'test': '', + 'help': ''}) +]) +def test_singularity_specific_properties( + properties, expected, singularity_configuration +): + # Set the property in the configuration + container_config = singularity_configuration['spack']['container'] + for name, value in properties.items(): + container_config.setdefault('singularity', {})[name] = value + + # Assert the properties return the expected values + writer = writers.create(singularity_configuration) + for name, value in expected.items(): + assert getattr(writer, name) == value diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index b408d0b2344..623e9fba738 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -313,7 +313,7 @@ _spack() { then SPACK_COMPREPLY="-h --help -H --all-help --color -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars" else - SPACK_COMPREPLY="activate add arch blame bootstrap build build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config configure create deactivate debug dependencies dependents deprecate dev-build diy docs edit env extensions fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup spec stage test uninstall unload upload-s3 url verify versions view" + SPACK_COMPREPLY="activate add arch blame bootstrap build build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config configure containerize create deactivate debug dependencies dependents deprecate dev-build diy docs edit env extensions fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup spec stage test uninstall unload upload-s3 url verify versions view" fi } @@ -628,6 +628,10 @@ _spack_configure() { fi } +_spack_containerize() { + SPACK_COMPREPLY="-h --help" +} + _spack_create() { if $list_options then diff --git a/share/spack/templates/container/Dockerfile b/share/spack/templates/container/Dockerfile new file mode 100644 index 00000000000..740f46e9ee9 --- /dev/null +++ b/share/spack/templates/container/Dockerfile @@ -0,0 +1,51 @@ +# Build stage with Spack pre-installed and ready to be used +FROM {{ build.image }}:{{ build.tag }} as builder + +# What we want to install and how we want to install it +# is specified in a manifest file (spack.yaml) +RUN mkdir {{ paths.environment }} \ +{{ manifest }} > {{ paths.environment }}/spack.yaml + +# Install the software, remove unecessary deps +RUN cd {{ paths.environment }} && spack install && spack gc -y +{% if strip %} + +# Strip all the binaries +RUN find -L {{ paths.view }}/* -type f -exec readlink -f '{}' \; | \ + xargs file -i | \ + grep 'charset=binary' | \ + grep 'x-executable\|x-archive\|x-sharedlib' | \ + awk -F: '{print $1}' | xargs strip -s +{% endif %} + +# Modifications to the environment that are necessary to run +RUN cd {{ paths.environment }} && \ + spack env activate --sh -d . >> /etc/profile.d/z10_spack_environment.sh + +{% if extra_instructions.build %} +{{ extra_instructions.build }} +{% endif %} + +# Bare OS image to run the installed executables +FROM {{ run.image }} + +COPY --from=builder {{ paths.environment }} {{ paths.environment }} +COPY --from=builder {{ paths.store }} {{ paths.store }} +COPY --from=builder {{ paths.view }} {{ paths.view }} +COPY --from=builder /etc/profile.d/z10_spack_environment.sh /etc/profile.d/z10_spack_environment.sh + +{% if os_packages %} +RUN {{ os_packages.update }} \ + && {{ os_packages.install }}{% for pkg in os_packages.list %} {{ pkg }}{% endfor %} \ + && {{ os_packages.clean }} +{% endif %} + +{% if extra_instructions.final %} +{{ extra_instructions.final }} +{% endif %} + +{% for label, value in labels.items() %} +LABEL "{{ label }}"="{{ value }}" +{% endfor %} + +ENTRYPOINT ["/bin/bash", "--rcfile", "/etc/profile", "-l"] diff --git a/share/spack/templates/container/singularity.def b/share/spack/templates/container/singularity.def new file mode 100644 index 00000000000..616e677f966 --- /dev/null +++ b/share/spack/templates/container/singularity.def @@ -0,0 +1,90 @@ +Bootstrap: docker +From: {{ build.image }}:{{ build.tag }} +Stage: build + +%post + # Create the manifest file for the installation in /opt/spack-environment + mkdir {{ paths.environment }} && cd {{ paths.environment }} + cat << EOF > spack.yaml +{{ manifest }} +EOF + + # Install all the required software + . /opt/spack/share/spack/setup-env.sh + spack install + spack gc -y + spack env activate --sh -d . >> {{ paths.environment }}/environment_modifications.sh +{% if strip %} + + # Strip the binaries to reduce the size of the image + find -L {{ paths.view }}/* -type f -exec readlink -f '{}' \; | \ + xargs file -i | \ + grep 'charset=binary' | \ + grep 'x-executable\|x-archive\|x-sharedlib' | \ + awk -F: '{print $1}' | xargs strip -s +{% endif %} +{% if extra_instructions.build %} +{{ extra_instructions.build }} +{% endif %} + + +{% if apps %} +{% for application, help_text in apps.items() %} + +%apprun {{ application }} + exec /opt/view/bin/{{ application }} "$@" + +%apphelp {{ application }} + {{help_text }} +{% endfor %} +{% endif %} + +Bootstrap: docker +From: {{ run.image }} +Stage: final + +%files from build + {{ paths.environment }} /opt + {{ paths.store }} /opt + {{ paths.view }} /opt + {{ paths.environment }}/environment_modifications.sh {{ paths.environment }}/environment_modifications.sh + +%post +{% if os_packages.list %} + # Update, install and cleanup of system packages + {{ os_packages.update }} + {{ os_packages.install }} {{ os_packages.list | join | replace('\n', ' ') }} + {{ os_packages.clean }} +{% endif %} + # Modify the environment without relying on sourcing shell specific files at startup + cat {{ paths.environment }}/environment_modifications.sh >> $SINGULARITY_ENVIRONMENT +{% if extra_instructions.final %} +{{ extra_instructions.final }} +{% endif %} + +{% if runscript %} +%runscript +{{ runscript }} +{% endif %} + +{% if startscript %} +%startscript +{{ startscript }} +{% endif %} + +{% if test %} +%test +{{ test }} +{% endif %} + +{% if help %} +%help +{{ help }} +{% endif %} + +{% if labels %} +%labels +{% for label, value in labels.items() %} + {{ label }} {{ value }} +{% endfor %} +{% endif %} \ No newline at end of file From 09e318fc84de2abbc39c6b40aa325d34964bf9eb Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 31 Jan 2020 12:31:14 -0600 Subject: [PATCH 054/178] Document how to use Spack to replace Homebrew/Conda (#13083) * Document how to use Spack to replace Homebrew/Conda * Initial draft; can iterate more as features become available --- lib/spack/docs/workflows.rst | 242 +++++++++++++++++++++++++++++++++++ 1 file changed, 242 insertions(+) diff --git a/lib/spack/docs/workflows.rst b/lib/spack/docs/workflows.rst index 914f84041ba..b329a0205c8 100644 --- a/lib/spack/docs/workflows.rst +++ b/lib/spack/docs/workflows.rst @@ -1095,6 +1095,248 @@ or filesystem views. However, it has some drawbacks: integrate Spack explicitly in their workflow. Not all users are willing to do this. +------------------------------------- +Using Spack to Replace Homebrew/Conda +------------------------------------- + +Spack is an incredibly powerful package manager, designed for supercomputers +where users have diverse installation needs. But Spack can also be used to +handle simple single-user installations on your laptop. Most macOS users are +already familiar with package managers like Homebrew and Conda, where all +installed packages are symlinked to a single central location like ``/usr/local``. +In this section, we will show you how to emulate the behavior of Homebrew/Conda +using :ref:`environments`! + +^^^^^ +Setup +^^^^^ + +First, let's create a new environment. We'll assume that Spack is already set up +correctly, and that you've already sourced the setup script for your shell. +To create a new environment, simply run: + +.. code-block:: console + + $ spack env create myenv + ==> Updating view at /Users/me/spack/var/spack/environments/myenv/.spack-env/view + ==> Created environment 'myenv' in /Users/me/spack/var/spack/environments/myenv + $ spack env activate myenv + +Here, *myenv* can be anything you want to name your environment. Next, we can add +a list of packages we would like to install into our environment. Let's say we +want a newer version of Bash than the one that comes with macOS, and we want a +few Python libraries. We can run: + +.. code-block:: console + + $ spack add bash + ==> Adding bash to environment myenv + ==> Updating view at /Users/me/spack/var/spack/environments/myenv/.spack-env/view + $ spack add python@3: + ==> Adding python@3: to environment myenv + ==> Updating view at /Users/me/spack/var/spack/environments/myenv/.spack-env/view + $ spack add py-numpy py-scipy py-matplotlib + ==> Adding py-numpy to environment myenv + ==> Adding py-scipy to environment myenv + ==> Adding py-matplotlib to environment myenv + ==> Updating view at /Users/me/spack/var/spack/environments/myenv/.spack-env/view + +Each package can be listed on a separate line, or combined into a single line. +Notice that we're explicitly asking for Python 3 here. You can use any spec +you would normally use on the command line with other Spack commands. + +Next, we want to manually configure a couple of things. In the ``myenv`` +directory, we can find the ``spack.yaml`` that actually defines our environment. + +.. code-block:: console + + $ vim ~/spack/var/spack/environments/myenv/spack.yaml + +.. code-block:: yaml + + # This is a Spack Environment file. + # + # It describes a set of packages to be installed, along with + # configuration settings. + spack: + # add package specs to the `specs` list + specs: [bash, 'python@3:', py-numpy, py-scipy, py-matplotlib] + view: + default: + root: /Users/me/spack/var/spack/environments/myenv/.spack-env/view + projections: {} + config: {} + mirrors: {} + modules: + enable: [] + packages: {} + repos: [] + upstreams: {} + definitions: [] + concretization: separately + +You can see the packages we added earlier in the ``specs:`` section. If you +ever want to add more packages, you can either use ``spack add`` or manually +edit this file. + +We also need to change the ``concretization:`` option. By default, Spack +concretizes each spec *separately*, allowing multiple versions of the same +package to coexist. Since we want a single consistent environment, we want to +concretize all of the specs *together*. + +Here is what your ``spack.yaml`` looks like with these new settings, and with +some of the sections we don't plan on using removed: + +.. code-block:: diff + + spack: + - specs: [bash, 'python@3:', py-numpy, py-scipy, py-matplotlib] + + specs: + + - bash + + - 'python@3:' + + - py-numpy + + - py-scipy + + - py-matplotlib + - view: + - default: + - root: /Users/me/spack/var/spack/environments/myenv/.spack-env/view + - projections: {} + + view: /Users/me/spack/var/spack/environments/myenv/.spack-env/view + - config: {} + - mirrors: {} + - modules: + - enable: [] + - packages: {} + - repos: [] + - upstreams: {} + - definitions: [] + + concretization: together + - concretization: separately + +"""""""""""""""" +Symlink location +"""""""""""""""" + +In the ``spack.yaml`` file above, you'll notice that by default, Spack symlinks +all installations to ``/Users/me/spack/var/spack/environments/myenv/.spack-env/view``. +You can actually change this to any directory you want. For example, Homebrew +uses ``/usr/local``, while Conda uses ``/Users/me/anaconda``. In order to access +files in these locations, you need to update ``PATH`` and other environment variables +to point to them. Activating the Spack environment does this automatically, but +you can also manually set them in your ``.bashrc``. + +.. warning:: + + There are several reasons why you shouldn't use ``/usr/local``: + + 1. If you are on macOS 10.11+ (El Capitan and newer), Apple makes it hard + for you. You may notice permissions issues on ``/usr/local`` due to their + `System Integrity Protection `_. + By default, users don't have permissions to install anything in ``/usr/local``, + and you can't even change this using ``sudo chown`` or ``sudo chmod``. + 2. Other package managers like Homebrew will try to install things to the + same directory. If you plan on using Homebrew in conjunction with Spack, + don't symlink things to ``/usr/local``. + 3. If you are on a shared workstation, or don't have sudo priveleges, you + can't do this. + + If you still want to do this anyway, there are several ways around SIP. + You could disable SIP by booting into recovery mode and running + ``csrutil disable``, but this is not recommended, as it can open up your OS + to security vulnerabilities. Another technique is to run ``spack concretize`` + and ``spack install`` using ``sudo``. This is also not recommended. + + The safest way I've found is to create your installation directories using + sudo, then change ownership back to the user like so: + + .. code-block:: bash + + for directory in .spack bin contrib include lib man share + do + sudo mkdir -p /usr/local/$directory + sudo chown $(id -un):$(id -gn) /usr/local/$directory + done + + Depending on the packages you install in your environment, the exact list of + directories you need to create may vary. You may also find some packages + like Java libraries that install a single file to the installation prefix + instead of in a subdirectory. In this case, the action is the same, just replace + ``mkdir -p`` with ``touch`` in the for-loop above. + + But again, it's safer just to use the default symlink location. + + +^^^^^^^^^^^^ +Installation +^^^^^^^^^^^^ + +To actually concretize the environment, run: + +.. code-block:: console + + $ spack concretize + +This will tell you which if any packages are already installed, and alert you +to any conflicting specs. + +To actually install these packages and symlink them to your ``view:`` +directory, simply run: + +.. code-block:: console + + $ spack install + +Now, when you type ``which python3``, it should find the one you just installed. + +In order to change the default shell to our newer Bash installation, we first +need to add it to this list of acceptable shells. Run: + +.. code-block:: console + + $ sudo vim /etc/shells + +and add the absolute path to your bash executable. Then run: + +.. code-block:: console + + $ chsh -s /path/to/bash + +Now, when you log out and log back in, ``echo $SHELL`` should point to the +newer version of Bash. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Updating Installed Packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Let's say you upgraded to a new version of macOS, or a new version of Python +was released, and you want to rebuild your entire software stack. To do this, +simply run the following commands: + +.. code-block:: console + + $ spack env activate myenv + $ spack concretize --force + $ spack install + +The ``--force`` flag tells Spack to overwrite its previous concretization +decisions, allowing you to choose a new version of Python. If any of the new +packages like Bash are already installed, ``spack install`` won't re-install +them, it will keep the symlinks in place. + +^^^^^^^^^^^^^^ +Uninstallation +^^^^^^^^^^^^^^ + +If you decide that Spack isn't right for you, uninstallation is simple. +Just run: + +.. code-block:: console + + $ spack env activate myenv + $ spack uninstall --all + +This will uninstall all packages in your environment and remove the symlinks. + ------------------------ Using Spack on Travis-CI ------------------------ From 47ee690076508edc48740bac16b529a06d0be1a0 Mon Sep 17 00:00:00 2001 From: Christoph Junghans Date: Fri, 31 Jan 2020 12:35:20 -0700 Subject: [PATCH 055/178] portage: fix compile on icc (#14707) --- .../packages/portage/p_intel_ice.patch | 24 +++++++++++++++++++ .../repos/builtin/packages/portage/package.py | 3 +++ 2 files changed, 27 insertions(+) create mode 100644 var/spack/repos/builtin/packages/portage/p_intel_ice.patch diff --git a/var/spack/repos/builtin/packages/portage/p_intel_ice.patch b/var/spack/repos/builtin/packages/portage/p_intel_ice.patch new file mode 100644 index 00000000000..98832ea8b77 --- /dev/null +++ b/var/spack/repos/builtin/packages/portage/p_intel_ice.patch @@ -0,0 +1,24 @@ +--- portage.orig/portage/search/pairs.cc 2018-07-16 13:42:12.000000000 -0600 ++++ portage/portage/search/pairs.cc 2020-01-09 15:32:43.289085000 -0700 +@@ -323,8 +323,8 @@ + // check for completely outside source boxes + bool outside = false; + for (size_t m=0;m=cminmax[1][m]) outside=true; ++ if (y[m][j]<=cminmax[0][m]) { outside=true; break; } ++ if (y[m][j]>=cminmax[1][m]) { outside=true; break; } + } + if (outside) return pairlist; + +@@ -347,8 +347,8 @@ + // check that y is contained + bool inside = true; + for(size_t m=0; m= xur[m]) inside = false; ++ if (y[m][j] <= xll[m]) { inside = false; break; } ++ if (y[m][j] >= xur[m]) { inside = false; break; } + } + + // add pair: put x's in this y-cell onto neighbor list, if inside diff --git a/var/spack/repos/builtin/packages/portage/package.py b/var/spack/repos/builtin/packages/portage/package.py index 075c3b07423..097ec9aab1c 100644 --- a/var/spack/repos/builtin/packages/portage/package.py +++ b/var/spack/repos/builtin/packages/portage/package.py @@ -31,6 +31,9 @@ class Portage(CMakePackage): # fixed in version above 1.2.2 patch('rel-with-deb-info.patch', when='@1.2.2') + # intel/19.0.4 got an ICE (internal compiler error) compiling pairs.cc + patch('p_intel_ice.patch', when='@1.2.2') + variant('mpi', default=True, description='Support MPI') depends_on("cmake@3.1:", type='build') From d83309493ff8d596e6c6d6776111a815dc0da257 Mon Sep 17 00:00:00 2001 From: Christoph Junghans Date: Fri, 31 Jan 2020 12:35:46 -0700 Subject: [PATCH 056/178] superlu-dist: enforce OpenMP=OFF (#14708) --- var/spack/repos/builtin/packages/superlu-dist/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py index c9f384a9156..989158356d8 100644 --- a/var/spack/repos/builtin/packages/superlu-dist/package.py +++ b/var/spack/repos/builtin/packages/superlu-dist/package.py @@ -72,6 +72,7 @@ def cmake_args(self): args.append('-Denable_openmp=ON') else: args.append('-Denable_openmp=OFF') + args.append('-DCMAKE_DISABLE_FIND_PACKAGE_OpenMP=ON') if '+shared' in spec: args.append('-DBUILD_SHARED_LIBS:BOOL=ON') From c029c8ff89b3567ef2fde8f238add57bf1314423 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Thu, 30 Jan 2020 19:07:24 -0800 Subject: [PATCH 057/178] `spack -V` is now more descriptive for dev branches `spack -V` previously always returned the version of spack from `spack.spack_version`. This gives us a general idea of what version users are on, but if they're on `develop` or on some branch, we have to ask more questions. This PR makes `spack -V` check whether this instance of Spack is a git repository, and if it is, it appends useful information from `git describe --tags` to the version. Specifically, it adds: - number of commits since the last release tag - abbreviated (but unique) commit hash So, if you're on `develop` you might get something like this: $ spack -V 0.13.3-912-3519a1762 This means you're on commit 3519a1762, which is 912 commits ahead of the 0.13.3 release. If you are on a release branch, or if you are using a tarball of Spack, you'll get the usual `spack.spack_version`: $ spack -V 0.13.3 This should help when asking users what version they are on, since a lot of people use the `develop` branch. --- lib/spack/spack/main.py | 32 +++++++++++++++++- lib/spack/spack/test/main.py | 63 ++++++++++++++++++++++++++++++++++++ 2 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 lib/spack/spack/test/main.py diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 4ce4ae331e0..4386f504353 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -13,6 +13,7 @@ import sys import re import os +import os.path import inspect import pstats import argparse @@ -35,6 +36,7 @@ import spack.store import spack.util.debug import spack.util.path +import spack.util.executable as exe from spack.error import SpackError @@ -107,6 +109,34 @@ def add_all_commands(parser): parser.add_command(cmd) +def get_version(): + """Get a descriptive version of this instance of Spack. + + If this is a git repository, and if it is not on a release tag, + return a string like: + + release_version-commits_since_release-commit + + If we *are* at a release tag, or if this is not a git repo, return + the real spack release number (e.g., 0.13.3). + + """ + git_path = os.path.join(spack.paths.prefix, ".git") + if os.path.exists(git_path): + git = exe.which("git") + if git: + desc = git("-C", spack.paths.prefix, "describe", "--tags", + output=str, fail_on_error=False) + + if git.returncode == 0: + match = re.match(r"v([^-]+)-([^-]+)-g([a-f\d]+)", desc) + if match: + v, n, commit = match.groups() + return "%s-%s-%s" % (v, n, commit) + + return spack.spack_version + + def index_commands(): """create an index of commands by section for this help level""" index = {} @@ -679,7 +709,7 @@ def main(argv=None): # -h, -H, and -V are special as they do not require a command, but # all the other options do nothing without a command. if args.version: - print(spack.spack_version) + print(get_version()) return 0 elif args.help: sys.stdout.write(parser.format_help(level=args.help)) diff --git a/lib/spack/spack/test/main.py b/lib/spack/spack/test/main.py new file mode 100644 index 00000000000..c35a6e195b9 --- /dev/null +++ b/lib/spack/spack/test/main.py @@ -0,0 +1,63 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +import os + +import llnl.util.filesystem as fs + +import spack.paths +from spack.main import get_version, main + + +def test_get_version_no_match_git(tmpdir, working_env): + git = str(tmpdir.join("git")) + with open(git, "w") as f: + f.write("""#!/bin/sh +echo v0.13.3 +""") + fs.set_executable(git) + + os.environ["PATH"] = str(tmpdir) + assert spack.spack_version == get_version() + + +def test_get_version_match_git(tmpdir, working_env): + git = str(tmpdir.join("git")) + with open(git, "w") as f: + f.write("""#!/bin/sh +echo v0.13.3-912-g3519a1762 +""") + fs.set_executable(git) + + os.environ["PATH"] = str(tmpdir) + assert "0.13.3-912-3519a1762" == get_version() + + +def test_get_version_no_repo(tmpdir, monkeypatch): + monkeypatch.setattr(spack.paths, "prefix", str(tmpdir)) + assert spack.spack_version == get_version() + + +def test_get_version_no_git(tmpdir, working_env): + os.environ["PATH"] = str(tmpdir) + assert spack.spack_version == get_version() + + +def test_main_calls_get_version(tmpdir, capsys, working_env): + os.environ["PATH"] = str(tmpdir) + main(["-V"]) + assert spack.spack_version == capsys.readouterr()[0].strip() + + +def test_get_version_bad_git(tmpdir, working_env): + bad_git = str(tmpdir.join("git")) + with open(bad_git, "w") as f: + f.write("""#!/bin/sh +exit 1 +""") + fs.set_executable(bad_git) + + os.environ["PATH"] = str(tmpdir) + assert spack.spack_version == get_version() From b7ee2d02e4c6f8d71deb3a4073a67f31cd7bef3c Mon Sep 17 00:00:00 2001 From: Matthieu Dorier Date: Fri, 31 Jan 2020 20:01:17 +0000 Subject: [PATCH 058/178] Fixed DIY package missing MPI dependency (#14704) --- var/spack/repos/builtin/packages/diy/package.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/diy/package.py b/var/spack/repos/builtin/packages/diy/package.py index 72ea5fcfed6..efd581f1bac 100644 --- a/var/spack/repos/builtin/packages/diy/package.py +++ b/var/spack/repos/builtin/packages/diy/package.py @@ -16,6 +16,10 @@ class Diy(CMakePackage): version('3.5.0', sha256='b3b5490441d521b6e9b33471c782948194bf95c7c3df3eb97bc5cf4530b91576') version('master', branch='master') + depends_on('mpi') + def cmake_args(self): - args = ['-Dbuild_examples=off', '-Dbuild_tests=off'] + args = ['-Dbuild_examples=off', + '-Dbuild_tests=off', + '-DCMAKE_CXX_COMPILER=%s' % self.spec['mpi'].mpicxx] return args From 6f4c90378ac7f0984bc63fa8cdf467c433528f9c Mon Sep 17 00:00:00 2001 From: "Dr. Christian Tacke" <58549698+ChristianTackeGSI@users.noreply.github.com> Date: Fri, 31 Jan 2020 21:01:58 +0100 Subject: [PATCH 059/178] [libfabric@1.9.0] Fix C++ header usage (#14703) Fix from libfabric's git for 1.9.0. Compiling C++ software against libfabric@1.9.0 doesn't work without this patch. See: https://github.com/ofiwg/libfabric/commit/2e95b0efd85fa8a3d814128e34ec57ffd357460e --- var/spack/repos/builtin/packages/libfabric/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/libfabric/package.py b/var/spack/repos/builtin/packages/libfabric/package.py index 5e4255b7cfb..da5edc48f48 100644 --- a/var/spack/repos/builtin/packages/libfabric/package.py +++ b/var/spack/repos/builtin/packages/libfabric/package.py @@ -59,6 +59,12 @@ class Libfabric(AutotoolsPackage): variant('kdreg', default=False, description='Enable kdreg on supported Cray platforms') + # For version 1.9.0: + # headers: fix forward-declaration of enum fi_collective_op with C++ + patch('https://github.com/ofiwg/libfabric/commit/2e95b0efd85fa8a3d814128e34ec57ffd357460e.patch', + sha256='71f06e8bf0adeccd425b194ac524e4d596469e9dab9e7a4f8bb209e6b9a454f4', + when='@1.9.0') + depends_on('rdma-core', when='fabrics=verbs') depends_on('opa-psm2', when='fabrics=psm2') depends_on('psm', when='fabrics=psm') From 04e6fd60f3529809a81d6ca0a5ea5ff5f50d61ff Mon Sep 17 00:00:00 2001 From: Themos Tsikas Date: Fri, 31 Jan 2020 20:03:31 +0000 Subject: [PATCH 060/178] NAG Fortran Compiler, 6.0 dropped (unavailable), 7.0 added (#14691) * NAG Fortran Compiler, 6.0 dropped (unavailable), 7.0 added * Update package.py * Update package.py --- var/spack/repos/builtin/packages/nag/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/nag/package.py b/var/spack/repos/builtin/packages/nag/package.py index 7df19fda8a1..8c4309198cb 100644 --- a/var/spack/repos/builtin/packages/nag/package.py +++ b/var/spack/repos/builtin/packages/nag/package.py @@ -10,10 +10,11 @@ class Nag(Package): """The NAG Fortran Compiler.""" homepage = "http://www.nag.com/nagware/np.asp" + maintainers = ['ThemosTsikas'] + version('7.0', sha256='ea83075cde9e625083b85be04426b0536b2da32db3cfd0c3eb3f2cf8253a2030') version('6.2', sha256='e22f70f52949c4eb5526229c13920c924af5254d07a584cf54fefecd130fd29c') version('6.1', sha256='32580e0004e6798abf1fa52f0070281b28abeb0da2387530a4cc41218e813c7c') - version('6.0', sha256='d5a326777a20303626b121da58522a122fcb1e3b4f2fcd657d9848e7b39fe7f8') # Licensing license_required = True From 0c9ac8d8d1dfed110af4812b0fb3885e31a1959f Mon Sep 17 00:00:00 2001 From: "Kelly (KT) Thompson" Date: Fri, 31 Jan 2020 13:03:54 -0700 Subject: [PATCH 061/178] draco: update versions (#14690) * draco: update versions + Added versions 7.3.0 and 7.4.0. + Change several variants to be default TRUE since most consumers need these variants enabled (eospac, lapack, parmetis, superlu-dist). Change variant name for `+superlu_dist` to use hyphen instead of underscore. This makes the variant name consistent with the spackage name for `superlu-dist`. + Clean up `depends_on` instructions and avoid specifying `type` when possible. + Provide patch files that are necessary for some machines (mostly Cray machines). * Remove trailing whitespace. * Revert variant name to use underscore. * add maintainer information. --- .../builtin/packages/draco/d710-python2.patch | 62 +++++++++++++++++ .../repos/builtin/packages/draco/d710.patch | 62 +++++++++++++++++ .../repos/builtin/packages/draco/d730.patch | 34 ++++++++++ .../repos/builtin/packages/draco/d740.patch | 21 ++++++ .../repos/builtin/packages/draco/package.py | 68 ++++++++++--------- 5 files changed, 214 insertions(+), 33 deletions(-) create mode 100644 var/spack/repos/builtin/packages/draco/d710-python2.patch create mode 100644 var/spack/repos/builtin/packages/draco/d710.patch create mode 100644 var/spack/repos/builtin/packages/draco/d730.patch create mode 100644 var/spack/repos/builtin/packages/draco/d740.patch diff --git a/var/spack/repos/builtin/packages/draco/d710-python2.patch b/var/spack/repos/builtin/packages/draco/d710-python2.patch new file mode 100644 index 00000000000..7f50f5d782c --- /dev/null +++ b/var/spack/repos/builtin/packages/draco/d710-python2.patch @@ -0,0 +1,62 @@ +diff --git a/config/ApplicationUnitTest.cmake b/config/ApplicationUnitTest.cmake +index a0a79858..0c47b72a 100644 +--- a/config/ApplicationUnitTest.cmake ++++ b/config/ApplicationUnitTest.cmake +@@ -249,7 +249,7 @@ macro( aut_register_test ) + endif(VERBOSE_DEBUG) + + # Look for python, which is used to drive application unit tests +- if( NOT PYTHONINTERP_FOUND ) ++ if( NOT Python_Interpreter_FOUND ) + # python should have been found when vendor_libraries.cmake was run. + message( FATAL_ERROR "Draco requires python. Python not found in PATH.") + endif() +@@ -289,7 +289,7 @@ macro( aut_register_test ) + if (${PYTHON_TEST}) + add_test( + NAME ${ctestname_base}${argname} +- COMMAND "${PYTHON_EXECUTABLE}" ++ COMMAND "${Python_EXECUTABLE}" + ${aut_DRIVER} + ${SHARED_ARGUMENTS} + ) +diff --git a/config/draco-config-install.cmake.in b/config/draco-config-install.cmake.in +index c5bf1c75..a16f72f4 100644 +--- a/config/draco-config-install.cmake.in ++++ b/config/draco-config-install.cmake.in +@@ -107,8 +107,9 @@ set( WITH_CUDA "@WITH_CUDA@" ) + #endif() + + # Python +-set( PYTHONINTERP_FOUND "@PYTHONINTERP_FOUND@" ) +-set( PYTHON_EXECUTABLE "@PYTHON_EXECUTABLE@" ) ++set( Python_FOUND "@Python_FOUND@" ) ++set( Python_Interpreter_FOUND "@Python_Interpreter_FOUND@" ) ++set( Python_EXECUTABLE "@Python_EXECUTABLE@" ) + + ## --------------------------------------------------------------------------- + ## Set useful general variables +diff --git a/config/vendor_libraries.cmake b/config/vendor_libraries.cmake +index c3e079bc..6b393eb4 100644 +--- a/config/vendor_libraries.cmake ++++ b/config/vendor_libraries.cmake +@@ -16,7 +16,7 @@ include( setupMPI ) # defines the macros setupMPILibrariesUnix|Windows + macro( setupPython ) + + message( STATUS "Looking for Python...." ) +- find_package(PythonInterp 2.7 QUIET REQUIRED) ++ find_package(Python 2.7 QUIET REQUIRED COMPONENTS Interpreter) + # PYTHONINTERP_FOUND - Was the Python executable found + # PYTHON_EXECUTABLE - path to the Python interpreter + set_package_properties( PythonInterp PROPERTIES +@@ -25,8 +25,8 @@ macro( setupPython ) + TYPE REQUIRED + PURPOSE "Required for running tests and accessing features that rely on matplotlib." + ) +- if( PYTHONINTERP_FOUND ) +- message( STATUS "Looking for Python....found ${PYTHON_EXECUTABLE}" ) ++ if( Python_Interpreter_FOUND ) ++ message( STATUS "Looking for Python....found ${Python_EXECUTABLE}" ) + else() + message( STATUS "Looking for Python....not found" ) + endif() diff --git a/var/spack/repos/builtin/packages/draco/d710.patch b/var/spack/repos/builtin/packages/draco/d710.patch new file mode 100644 index 00000000000..1f4e8d56565 --- /dev/null +++ b/var/spack/repos/builtin/packages/draco/d710.patch @@ -0,0 +1,62 @@ +diff --git a/config/ApplicationUnitTest.cmake b/config/ApplicationUnitTest.cmake +index a0a79858..0c47b72a 100644 +--- a/config/ApplicationUnitTest.cmake ++++ b/config/ApplicationUnitTest.cmake +@@ -249,7 +249,7 @@ macro( aut_register_test ) + endif(VERBOSE_DEBUG) + + # Look for python, which is used to drive application unit tests +- if( NOT PYTHONINTERP_FOUND ) ++ if( NOT Python_Interpreter_FOUND ) + # python should have been found when vendor_libraries.cmake was run. + message( FATAL_ERROR "Draco requires python. Python not found in PATH.") + endif() +@@ -289,7 +289,7 @@ macro( aut_register_test ) + if (${PYTHON_TEST}) + add_test( + NAME ${ctestname_base}${argname} +- COMMAND "${PYTHON_EXECUTABLE}" ++ COMMAND "${Python_EXECUTABLE}" + ${aut_DRIVER} + ${SHARED_ARGUMENTS} + ) +diff --git a/config/draco-config-install.cmake.in b/config/draco-config-install.cmake.in +index c5bf1c75..a16f72f4 100644 +--- a/config/draco-config-install.cmake.in ++++ b/config/draco-config-install.cmake.in +@@ -107,8 +107,9 @@ set( WITH_CUDA "@WITH_CUDA@" ) + #endif() + + # Python +-set( PYTHONINTERP_FOUND "@PYTHONINTERP_FOUND@" ) +-set( PYTHON_EXECUTABLE "@PYTHON_EXECUTABLE@" ) ++set( Python_FOUND "@Python_FOUND@" ) ++set( Python_Interpreter_FOUND "@Python_Interpreter_FOUND@" ) ++set( Python_EXECUTABLE "@Python_EXECUTABLE@" ) + + ## --------------------------------------------------------------------------- + ## Set useful general variables +diff --git a/config/vendor_libraries.cmake b/config/vendor_libraries.cmake +index c3e079bc..6b393eb4 100644 +--- a/config/vendor_libraries.cmake ++++ b/config/vendor_libraries.cmake +@@ -16,7 +16,7 @@ include( setupMPI ) # defines the macros setupMPILibrariesUnix|Windows + macro( setupPython ) + + message( STATUS "Looking for Python...." ) +- find_package(PythonInterp 2.7 QUIET REQUIRED) ++ find_package(Python 3.5 QUIET REQUIRED COMPONENTS Interpreter) + # PYTHONINTERP_FOUND - Was the Python executable found + # PYTHON_EXECUTABLE - path to the Python interpreter + set_package_properties( PythonInterp PROPERTIES +@@ -25,8 +25,8 @@ macro( setupPython ) + TYPE REQUIRED + PURPOSE "Required for running tests and accessing features that rely on matplotlib." + ) +- if( PYTHONINTERP_FOUND ) +- message( STATUS "Looking for Python....found ${PYTHON_EXECUTABLE}" ) ++ if( Python_Interpreter_FOUND ) ++ message( STATUS "Looking for Python....found ${Python_EXECUTABLE}" ) + else() + message( STATUS "Looking for Python....not found" ) + endif() diff --git a/var/spack/repos/builtin/packages/draco/d730.patch b/var/spack/repos/builtin/packages/draco/d730.patch new file mode 100644 index 00000000000..926b205e640 --- /dev/null +++ b/var/spack/repos/builtin/packages/draco/d730.patch @@ -0,0 +1,34 @@ +diff --git a/config/platform_checks.cmake b/config/platform_checks.cmake +index c9841b0d..84bf07f5 100644 +--- a/config/platform_checks.cmake ++++ b/config/platform_checks.cmake +@@ -85,6 +85,7 @@ macro( query_craype ) + set( CRAY_PE ON CACHE BOOL + "Are we building in a Cray Programming Environment?") + ++ if (FALSE) + # We expect developers to use the Cray compiler wrappers (especially in + # setupMPI.cmake). See also + # https://cmake.org/cmake/help/latest/module/FindMPI.html +@@ -111,6 +112,7 @@ macro( query_craype ) + "Otherwise please email this error message and other related information to" + " draco@lanl.gov.\n" ) + endif() ++ endif() + message( STATUS + "Looking to see if we are building in a Cray Environment..." + "found version $ENV{CRAYPE_VERSION}.") + +diff --git a/config/setupMPI.cmake b/config/setupMPI.cmake +index da522499..5b5e27c5 100644 +--- a/config/setupMPI.cmake ++++ b/config/setupMPI.cmake +@@ -51,7 +51,7 @@ function( setMPIflavorVer ) + if( DEFINED ENV{CRAY_MPICH2_VER} ) + set( MPI_VERSION $ENV{CRAY_MPICH2_VER} ) + endif() +- elseif( ${MPI_FLAVOR} STREQUAL "spectrum" ) ++ elseif( "${MPI_FLAVOR}" STREQUAL "spectrum" ) + if( DEFINED ENV{LMOD_MPI_VERSION} ) + set( LMOD_MPI_VERSION $ENV{LMOD_MPI_VERSION} ) + endif() diff --git a/var/spack/repos/builtin/packages/draco/d740.patch b/var/spack/repos/builtin/packages/draco/d740.patch new file mode 100644 index 00000000000..684158474a9 --- /dev/null +++ b/var/spack/repos/builtin/packages/draco/d740.patch @@ -0,0 +1,21 @@ +diff --git a/config/platform_checks.cmake b/config/platform_checks.cmake +index c9841b0d..aeecc767 100644 +--- a/config/platform_checks.cmake ++++ b/config/platform_checks.cmake +@@ -88,6 +88,8 @@ macro( query_craype ) + # We expect developers to use the Cray compiler wrappers (especially in + # setupMPI.cmake). See also + # https://cmake.org/cmake/help/latest/module/FindMPI.html ++ if( NOT "$ENV{CXX}" MATCHES "/lib/spack/env/" ) ++ # skip this check if building from within spack. + if( NOT "$ENV{CXX}" MATCHES "CC$" OR + NOT "$ENV{CC}" MATCHES "cc$" OR + NOT "$ENV{FC}" MATCHES "ftn$" OR +@@ -110,6 +112,7 @@ macro( query_craype ) + " export CRAYPE_LINK_TYPE=dynamic\n" + "Otherwise please email this error message and other related information to" + " draco@lanl.gov.\n" ) ++ endif() + endif() + message( STATUS + "Looking to see if we are building in a Cray Environment..." diff --git a/var/spack/repos/builtin/packages/draco/package.py b/var/spack/repos/builtin/packages/draco/package.py index 163d5e2385d..de1e3696b5c 100644 --- a/var/spack/repos/builtin/packages/draco/package.py +++ b/var/spack/repos/builtin/packages/draco/package.py @@ -15,42 +15,52 @@ class Draco(CMakePackage): homepage = "https://github.com/lanl/draco" url = "https://github.com/lanl/Draco/archive/draco-7_1_0.zip" git = "https://github.com/lanl/Draco.git" + maintainers = ['KineticTheory'] version('develop', branch='develop') + version('7.4.0', sha256='61da2c3feace0e92c5410c9e9e613708fdf8954b1367cdc62c415329b0ddab6e') + version('7.3.0', sha256='dc47ef6c1e04769ea177a10fc6ddf506f3e1e8d36eb5d49f4bc38cc509e24f10') version('7.2.0', sha256='ac4eac03703d4b7344fa2390a54140533c5e1f6ea0d59ef1f1d525c434ebe639') - version('7_1_0', sha256='eca6bb86eb930837fb5e09b76c85c200b2c1522267cc66f81f2ec11a8262b5c9') - version('6_25_0', sha256='e27eba44f397e7d111ff9a45b518b186940f75facfc6f318d76bd0e72f987440') - version('6_23_0', sha256='edf20308746c06647087cb4e6ae7656fd057a89091a22bcba8f17a52e28b7849') - version('6_22_0', sha256='4d1ed54944450c4ec7d00d7ba371469506c6985922f48f780bae2580c9335b86') - version('6_21_0', sha256='f1ac88041606cdb1dfddd3bc74db0f1e15d8fc9d0a1eed939c8aa0fa63a85b55') - version('6_20_1', sha256='b1c51000c9557e0818014713fce70d681869c50ed9c4548dcfb2e9219c354ebe') - version('6_20_0', sha256='a6e3142c1c90b09c4ff8057bfee974369b815122b01d1f7b57888dcb9b1128f6') + version('7.1.0', sha256='eca6bb86eb930837fb5e09b76c85c200b2c1522267cc66f81f2ec11a8262b5c9') + version('6.25.0', sha256='e27eba44f397e7d111ff9a45b518b186940f75facfc6f318d76bd0e72f987440') + version('6.23.0', sha256='edf20308746c06647087cb4e6ae7656fd057a89091a22bcba8f17a52e28b7849') + version('6.22.0', sha256='4d1ed54944450c4ec7d00d7ba371469506c6985922f48f780bae2580c9335b86') + version('6.21.0', sha256='f1ac88041606cdb1dfddd3bc74db0f1e15d8fc9d0a1eed939c8aa0fa63a85b55') + version('6.20.1', sha256='b1c51000c9557e0818014713fce70d681869c50ed9c4548dcfb2e9219c354ebe') + version('6.20.0', sha256='a6e3142c1c90b09c4ff8057bfee974369b815122b01d1f7b57888dcb9b1128f6') variant('build_type', default='Release', description='CMake build type', values=('Debug', 'Release', 'RelWithDebInfo', 'MinSizeRel')) - variant('eospac', default=False, description='Enable EOSPAC Support') - variant('lapack', default=False, description='Enable LAPACK Wrapper') - variant('parmetis', default=False, description='Enable Parmetis Support') - variant('qt', default=False, description='Enable Qt Support') - variant('superlu_dist', default=False, description='Enable SuperLU-DIST Support') + variant('eospac', default=True, description='Enable EOSPAC support') + variant('lapack', default=True, description='Enable LAPACK wrapper') + variant('libquo', default=True, description='Enable Quo wrapper') + variant('parmetis', default=True, description='Enable Parmetis support') + variant('qt', default=False, description='Enable Qt support') + variant('superlu_dist', default=True, description='Enable SuperLU-DIST support') - depends_on('gsl', type=('build', 'link')) - depends_on('mpi@3:', type=('build', 'link', 'run')) - depends_on('numdiff', type='build') + depends_on('gsl') + depends_on('mpi@3:', type=('build', 'link', 'run')) + depends_on('numdiff', type='build') depends_on('python@2.7:', type=('build', 'run')) - depends_on('random123', type='build') + depends_on('random123', type='build') - depends_on('cmake@3.9:', when='@:6.99', type='build') - depends_on('cmake@3.11:', when='@7.0.0:7.1.9', type='build') - depends_on('cmake@3.14:', when='@7.2:', type='build') - depends_on('eospac@6.3:', when='+eospac', type=('build', 'link')) - depends_on('lapack', when='+lapack', type=('build', 'link')) - depends_on('metis', when='+parmetis', type=('build', 'link')) - depends_on('parmetis', when='+parmetis', type=('build', 'link')) + depends_on('cmake@3.9:', when='@:6.99', type='build') + depends_on('cmake@3.11:', when='@7.0.0:7.1.99', type='build') + depends_on('cmake@3.14:', when='@7.2:', type='build') + depends_on('eospac@6.3:', when='+eospac') + depends_on('lapack', when='+lapack') + depends_on('libquo', when='@7.4.0:+libquo') + depends_on('metis', when='+parmetis') + depends_on('parmetis', when='+parmetis') depends_on('qt', when='+qt', type=('build', 'link', 'run')) - depends_on('superlu-dist@:5.99', when='+superlu-dist', - type=('build', 'link')) + depends_on('superlu-dist@:5.99', when='+superlu_dist') + + # Fix python discovery. + patch('d710.patch', when='@7.1.0^python@3:') + patch('d710-python2.patch', when='@7.1.0^python@2.7:2.99') + patch('d730.patch', when='@7.3.0:7.3.99') + patch('d740.patch', when='@7.4.0:7.4.99') def url_for_version(self, version): url = "https://github.com/lanl/Draco/archive/draco-{0}.zip" @@ -63,11 +73,3 @@ def cmake_args(self): '-DBUILD_TESTING={0}'.format('ON' if self.run_tests else 'OFF') ]) return options - - @run_after('build') - @on_package_attributes(run_tests=True) - def check(self): - """Run ctest after building project.""" - - with working_dir(self.build_directory): - ctest() From 412c3361137c883391d33cb80e04db6ebba63042 Mon Sep 17 00:00:00 2001 From: Tim Haines Date: Fri, 31 Jan 2020 17:56:40 -0600 Subject: [PATCH 062/178] boost: Add versions 1.71.0 and 1.72.0, use Clang toolset on Darwin (#14678) * boost: Add versions 1.71.0 and 1.72.0 * Remove 'darwin' target * Add hainest as maintainer --- var/spack/repos/builtin/packages/boost/package.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index 8f5c33556bc..884aaa40b6a 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -22,8 +22,11 @@ class Boost(Package): git = "https://github.com/boostorg/boost.git" list_url = "http://sourceforge.net/projects/boost/files/boost/" list_depth = 1 + maintainers = ['hainest'] version('develop', branch='develop', submodules=True) + version('1.72.0', sha256='59c9b274bc451cf91a9ba1dd2c7fdcaf5d60b1b3aa83f2c9fa143417cc660722') + version('1.71.0', sha256='d73a8da01e8bf8c7eda40b4c84915071a8c8a0df4a6734537ddde4a8580524ee') version('1.70.0', sha256='430ae8354789de4fd19ee52f3b1f739e1fba576f0aded0897c3c2bc00fb38778') version('1.69.0', sha256='8f32d4617390d1c2d16f26a27ab60d97807b35440d45891fa340fc2648b04406') version('1.68.0', sha256='7f6130bc3cf65f56a618888ce9d5ea704fa10b462be126ad053e80e553d6d8b7') @@ -205,7 +208,7 @@ def libs(self): # Add option to C/C++ compile commands in clang-linux.jam patch('clang-linux_add_option.patch', when='@1.56.0:1.63.0') - patch('clang-linux_add_option2.patch', when='@:1.55.0') + patch('clang-linux_add_option2.patch', when='@1.47.0:1.55.0') def url_for_version(self, version): if version >= Version('1.63.0'): @@ -216,9 +219,6 @@ def url_for_version(self, version): return url.format(version.dotted, version.underscored) def determine_toolset(self, spec): - if spec.satisfies("platform=darwin"): - return 'darwin' - toolsets = {'g++': 'gcc', 'icpc': 'intel', 'clang++': 'clang', From ab36008635e2c22e40ce608f150475f38a19b772 Mon Sep 17 00:00:00 2001 From: Patrick Gartung Date: Fri, 31 Jan 2020 20:08:47 -0600 Subject: [PATCH 063/178] binary_distribution: Initialize _cached_specs at the module level and only search the mirrors in get_spec if spec is not in _cached_specs. (#14714) * Initialize _cached_specs at the file level and check for spec in it before searching mirrors in try_download_spec. * Make _cached_specs a set to avoid duplicates * Fix packaging test * Ignore build_cache in stage when spec.yaml files are downloaded. --- lib/spack/spack/binary_distribution.py | 16 +++++----------- lib/spack/spack/test/conftest.py | 2 +- lib/spack/spack/test/packaging.py | 2 +- 3 files changed, 7 insertions(+), 13 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index 3e5dc89313e..f1834889b85 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -661,7 +661,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False, # Internal cache for downloaded specs -_cached_specs = None +_cached_specs = set() def try_download_specs(urls=None, force=False): @@ -669,7 +669,6 @@ def try_download_specs(urls=None, force=False): Try to download the urls and cache them ''' global _cached_specs - _cached_specs = [] if urls is None: return {} for link in urls: @@ -687,7 +686,7 @@ def try_download_specs(urls=None, force=False): # we need to mark this spec concrete on read-in. spec = Spec.from_yaml(f) spec._mark_concrete() - _cached_specs.append(spec) + _cached_specs.add(spec) return _cached_specs @@ -701,14 +700,14 @@ def get_spec(spec=None, force=False): if spec is None: return {} specfile_name = tarball_name(spec, '.spec.yaml') - if _cached_specs: - tty.debug("Using previously-retrieved specs") - return _cached_specs if not spack.mirror.MirrorCollection(): tty.debug("No Spack mirrors are currently configured") return {} + if spec in _cached_specs: + return _cached_specs + for mirror in spack.mirror.MirrorCollection().values(): fetch_url_build_cache = url_util.join( mirror.fetch_url, _build_cache_relative_path) @@ -732,7 +731,6 @@ def get_specs(force=False, use_arch=False, names=None): """ Get spec.yaml's for build caches available on mirror """ - global _cached_specs arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') arch_pattern = ('([^-]*-[^-]*-[^-]*)') @@ -747,10 +745,6 @@ def get_specs(force=False, use_arch=False, names=None): names_pattern) name_re = re.compile(regex_pattern) - if _cached_specs: - tty.debug("Using previously-retrieved specs") - return _cached_specs - if not spack.mirror.MirrorCollection(): tty.debug("No Spack mirrors are currently configured") return {} diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index 8b8d128d2c6..d4c11e16938 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -170,7 +170,7 @@ def ignore_stage_files(): Used to track which leftover files in the stage have been seen. """ # to start with, ignore the .lock file at the stage root. - return set(['.lock', spack.stage._source_path_subdir]) + return set(['.lock', spack.stage._source_path_subdir, 'build_cache']) def remove_whatever_it_is(path): diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py index fa601196c38..edad8e29fa4 100644 --- a/lib/spack/spack/test/packaging.py +++ b/lib/spack/spack/test/packaging.py @@ -214,7 +214,7 @@ def test_buildcache(mock_archive, tmpdir): stage.destroy() # Remove cached binary specs since we deleted the mirror - bindist._cached_specs = None + bindist._cached_specs = set() def test_relocate_text(tmpdir): From 0c0aba579a67e9e1a94f1a57f5bc7a3464a7658f Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Sat, 1 Feb 2020 19:46:35 -0600 Subject: [PATCH 064/178] octave: add maintainer (#14716) --- var/spack/repos/builtin/packages/octave/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py index 9381ea5658f..b3b9567db46 100644 --- a/var/spack/repos/builtin/packages/octave/package.py +++ b/var/spack/repos/builtin/packages/octave/package.py @@ -16,6 +16,7 @@ class Octave(AutotoolsPackage, GNUMirrorPackage): homepage = "https://www.gnu.org/software/octave/" gnu_mirror_path = "octave/octave-4.0.0.tar.gz" + maintainers = ['mtmiller'] extendable = True From 44f60f9fec54876da4aee022512c06424923702b Mon Sep 17 00:00:00 2001 From: Jean-Paul Pelteret Date: Sun, 2 Feb 2020 16:25:51 +0100 Subject: [PATCH 065/178] Update package: SymEngine 0.5.0 (#14722) --- var/spack/repos/builtin/packages/symengine/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/symengine/package.py b/var/spack/repos/builtin/packages/symengine/package.py index 1beb80d3d94..ceb1b9a5ed4 100644 --- a/var/spack/repos/builtin/packages/symengine/package.py +++ b/var/spack/repos/builtin/packages/symengine/package.py @@ -15,6 +15,7 @@ class Symengine(CMakePackage): git = "https://github.com/symengine/symengine.git" version('develop', branch='master') + version('0.5.0', sha256='5d02002f00d16a0928d1056e6ecb8f34fd59f3bfd8ed0009a55700334dbae29b') version('0.4.0', sha256='dd755901a9e2a49e53ba3bbe3f565f94265af05299e57a7b592186dd35916a1b') version('0.3.0', sha256='591463cb9e741d59f6dfd39a7943e3865d3afe9eac47d1a9cbf5ca74b9c49476') version('0.2.0', sha256='64d050b0b9decd12bf4ea3b7d18d3904dd7cb8baaae9fbac1b8068e3c59709be') From 3d9787a8bfdbfa1866724cbcfc47e4f5d5326c72 Mon Sep 17 00:00:00 2001 From: "Mark W. Krentel" Date: Sun, 2 Feb 2020 09:26:21 -0600 Subject: [PATCH 066/178] hpcviewer: add version 2020.01 (#14718) --- var/spack/repos/builtin/packages/hpcviewer/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/hpcviewer/package.py b/var/spack/repos/builtin/packages/hpcviewer/package.py index c0baf6dab24..21cfa67daaf 100644 --- a/var/spack/repos/builtin/packages/hpcviewer/package.py +++ b/var/spack/repos/builtin/packages/hpcviewer/package.py @@ -37,6 +37,9 @@ class Hpcviewer(Package): maintainers = ['mwkrentel'] viewer_sha = { + ('2020.01', 'x86_64'): '3cd5a2a382cec1d64c8bd0abaf2b1461dcd4092a4b4074ddbdc1b96d2a0b4220', + ('2020.01', 'ppc64'): '814394a5f410033cc1019526c268ef98b5b381e311fcd39ae8b2bde6c6ff017c', + ('2020.01', 'ppc64le'): 'e830e956b8088c415fb25ef44a8aca16ebcb27bcd34536866612343217e3f9e4', ('2019.12', 'x86_64'): '6ba149c8d23d9913291655602894f7a91f9c838e69ae5682fd7b605467255c2d', ('2019.12', 'ppc64'): '787257272381fac26401e1013952bea94635172503e7abf8063081fe03f08384', ('2019.12', 'ppc64le'): 'fd20891fdae6dd5c2313cdd98e53c52023a0cf146a1121d0c889ebedc08a8bb9', @@ -58,6 +61,9 @@ class Hpcviewer(Package): } trace_sha = { + ('2020.01', 'x86_64'): '9459177a2445e85d648384e2ccee20524592e91a74d615262f32d0876831cd7c', + ('2020.01', 'ppc64'): '02366a2ba30b9b2450d50cf44933288f04fae5bf9868eef7bb2ae1b49d4f454e', + ('2020.01', 'ppc64le'): '39970e84e397ed96bc994e7b8db3b7b3aab4e3155fa7ca8e68b9274bb58115f0', ('2019.12', 'x86_64'): '6339b36e655e2c2b07af4cb40946f325acc46da3ec590d36069661e69b046a92', ('2019.12', 'ppc64'): 'fe4ee5af22a983fa0ddbfbd97fa6676f07492400536e900188455f21e489c59b', ('2019.12', 'ppc64le'): '2688ea834c546b9e2c6e9d69d271a62dd00f6bc7ff4cb874563ba8d0ae5824e3', From dcaa50c9d0839b2ceb4606525e521c917adbba06 Mon Sep 17 00:00:00 2001 From: Jean-Paul Pelteret Date: Mon, 3 Feb 2020 13:04:12 +0100 Subject: [PATCH 067/178] deal.II: Add patch for TBB version check (#14724) --- var/spack/repos/builtin/packages/dealii/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 32546eb508f..797931052e0 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -180,6 +180,12 @@ class Dealii(CMakePackage, CudaPackage): sha256='61f217744b70f352965be265d2f06e8c1276685e2944ca0a88b7297dd55755da', when='@9.0.1 ^boost@1.70.0:') + # Fix TBB version check + # https://github.com/dealii/dealii/pull/9208 + patch('https://github.com/dealii/dealii/commit/80b13fe5a2eaefc77fa8c9266566fa8a2de91edf.patch', + sha256='6f876dc8eadafe2c4ec2a6673864fb451c6627ca80511b6e16f3c401946fdf33', + when='@9.0.0:9.1.1') + # check that the combination of variants makes sense # 64-bit BLAS: for p in ['openblas', 'intel-mkl', 'intel-parallel-studio+mkl']: From 30dd7ae176a3a1e7066b89a77de97ab2a9ce2c68 Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Mon, 3 Feb 2020 08:58:34 -0500 Subject: [PATCH 068/178] clingo: new versions (#14728) 5.3.0 and 5.4.0 --- var/spack/repos/builtin/packages/clingo/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/clingo/package.py b/var/spack/repos/builtin/packages/clingo/package.py index bb1b2f6af26..8f1b64af60b 100644 --- a/var/spack/repos/builtin/packages/clingo/package.py +++ b/var/spack/repos/builtin/packages/clingo/package.py @@ -19,6 +19,8 @@ class Clingo(CMakePackage): homepage = "https://potassco.org/clingo/" url = "https://github.com/potassco/clingo/archive/v5.2.2.tar.gz" + version('5.4.0', sha256='e2de331ee0a6d254193aab5995338a621372517adcf91568092be8ac511c18f3') + version('5.3.0', sha256='b0d406d2809352caef7fccf69e8864d55e81ee84f4888b0744894977f703f976') version('5.2.2', sha256='da1ef8142e75c5a6f23c9403b90d4f40b9f862969ba71e2aaee9a257d058bfcf') depends_on('doxygen', type=('build')) From 94971d519c495fc134e60078f7904c424ff77aa8 Mon Sep 17 00:00:00 2001 From: albestro <9337627+albestro@users.noreply.github.com> Date: Mon, 3 Feb 2020 17:26:38 +0100 Subject: [PATCH 069/178] Fix CMake and update libarchive (#14684) * update libarchive and fix version of libarchive cmake dependency * (at least) libarchive 3.3.3 dependency from cmake 3.15.0 * cmake depends on libarchive 3.1.0 if not specified differently currently it is applied to cmake <3.15.0 Co-Authored-By: Adam J. Stewart Co-authored-by: Adam J. Stewart --- var/spack/repos/builtin/packages/cmake/package.py | 3 ++- var/spack/repos/builtin/packages/libarchive/package.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py index 5e4b644e5b0..4f6e30874b4 100644 --- a/var/spack/repos/builtin/packages/cmake/package.py +++ b/var/spack/repos/builtin/packages/cmake/package.py @@ -101,7 +101,8 @@ class Cmake(Package): depends_on('zlib', when='~ownlibs') depends_on('bzip2', when='~ownlibs') depends_on('xz', when='~ownlibs') - depends_on('libarchive', when='~ownlibs') + depends_on('libarchive@3.1.0:', when='~ownlibs') + depends_on('libarchive@3.3.3:', when='@3.15.0:~ownlibs') depends_on('libuv@1.0.0:1.10.99', when='@3.7.0:3.10.3~ownlibs') depends_on('libuv@1.10.0:1.10.99', when='@3.11.0:3.11.99~ownlibs') depends_on('libuv@1.10.0:', when='@3.12.0:~ownlibs') diff --git a/var/spack/repos/builtin/packages/libarchive/package.py b/var/spack/repos/builtin/packages/libarchive/package.py index 9449a481408..1e0a09c9598 100644 --- a/var/spack/repos/builtin/packages/libarchive/package.py +++ b/var/spack/repos/builtin/packages/libarchive/package.py @@ -13,6 +13,7 @@ class Libarchive(AutotoolsPackage): homepage = "http://www.libarchive.org" url = "http://www.libarchive.org/downloads/libarchive-3.1.2.tar.gz" + version('3.4.1', sha256='fcf87f3ad8db2e4f74f32526dee62dd1fb9894782b0a503a89c9d7a70a235191') version('3.3.2', sha256='ed2dbd6954792b2c054ccf8ec4b330a54b85904a80cef477a1c74643ddafa0ce') version('3.2.1', sha256='72ee1a4e3fd534525f13a0ba1aa7b05b203d186e0c6072a8a4738649d0b3cfd2') version('3.1.2', sha256='eb87eacd8fe49e8d90c8fdc189813023ccc319c5e752b01fb6ad0cc7b2c53d5e') From 5ad44477b2ef02f581d89b08862a8a34c53195c8 Mon Sep 17 00:00:00 2001 From: Patrick Gartung Date: Mon, 3 Feb 2020 13:40:14 -0600 Subject: [PATCH 070/178] buildcache list: restore original behavior of allowing constraints like @version. (#14732) --- lib/spack/spack/binary_distribution.py | 17 ++++++----------- lib/spack/spack/cmd/buildcache.py | 18 ++++++++---------- lib/spack/spack/test/cmd/buildcache.py | 2 +- share/spack/spack-completion.bash | 2 +- 4 files changed, 16 insertions(+), 23 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index f1834889b85..515a6166d21 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -727,23 +727,18 @@ def get_spec(spec=None, force=False): return try_download_specs(urls=urls, force=force) -def get_specs(force=False, use_arch=False, names=None): +def get_specs(force=False, allarch=False): """ Get spec.yaml's for build caches available on mirror """ arch = architecture.Arch(architecture.platform(), 'default_os', 'default_target') arch_pattern = ('([^-]*-[^-]*-[^-]*)') - if use_arch: + if not allarch: arch_pattern = '(%s-%s-[^-]*)' % (arch.platform, arch.os) - if names is None: - names = [''] - names_or_hashes = [name.replace('/', '') for name in names] - names_pattern = '|'.join(names_or_hashes) - regex_pattern = '%s(.*)(%s)(.*)(spec.yaml$)' % (arch_pattern, - names_pattern) - name_re = re.compile(regex_pattern) + regex_pattern = '%s(.*)(spec.yaml$)' % (arch_pattern) + arch_re = re.compile(regex_pattern) if not spack.mirror.MirrorCollection(): tty.debug("No Spack mirrors are currently configured") @@ -760,7 +755,7 @@ def get_specs(force=False, use_arch=False, names=None): if os.path.exists(mirror_dir): files = os.listdir(mirror_dir) for file in files: - m = name_re.search(file) + m = arch_re.search(file) if m: link = url_util.join(fetch_url_build_cache, file) urls.add(link) @@ -770,7 +765,7 @@ def get_specs(force=False, use_arch=False, names=None): p, links = web_util.spider( url_util.join(fetch_url_build_cache, 'index.html')) for link in links: - m = name_re.search(link) + m = arch_re.search(link) if m: urls.add(link) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index d3481bb6e66..392984f852d 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -87,8 +87,9 @@ def setup_parser(subparser): help='show variants in output (can be long)') listcache.add_argument('-f', '--force', action='store_true', help="force new download of specs") - listcache.add_argument('-a', '--arch', action='store_true', - help="only list spec for the default architecture") + listcache.add_argument('-a', '--allarch', action='store_true', + help="list specs for all available architectures" + + " instead of default platform and OS") arguments.add_common_arguments(listcache, ['specs']) listcache.set_defaults(func=listspecs) @@ -265,10 +266,11 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False): # List of specs that match expressions given via command line specs_from_cli = [] has_errors = False + allarch = False + specs = bindist.get_specs(force, allarch) for pkg in pkgs: matches = [] tty.msg("buildcache spec(s) matching %s \n" % pkg) - specs = bindist.get_specs(names=[pkg]) for spec in sorted(specs): if pkg.startswith('/'): pkghash = pkg.replace('/', '') @@ -417,14 +419,10 @@ def install_tarball(spec, args): def listspecs(args): """list binary packages available from mirrors""" - specs = list() + specs = bindist.get_specs(args.force, args.allarch) if args.specs: - for s in bindist.get_specs(args.force, args.arch, - args.specs): - if s not in set(specs): - specs.append(s) - else: - specs = bindist.get_specs(force=args.force, use_arch=args.arch) + constraints = set(args.specs) + specs = [s for s in specs if any(s.satisfies(c) for c in constraints)] display_specs(specs, args, all_headers=True) diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py index fdd76bff207..064daeb0636 100644 --- a/lib/spack/spack/test/cmd/buildcache.py +++ b/lib/spack/spack/test/cmd/buildcache.py @@ -17,7 +17,7 @@ def mock_get_specs(database, monkeypatch): specs = database.query_local() monkeypatch.setattr( - spack.binary_distribution, 'get_specs', lambda x, y, z: specs + spack.binary_distribution, 'get_specs', lambda x, y: specs ) diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 623e9fba738..8a22e342e20 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -400,7 +400,7 @@ _spack_buildcache_install() { _spack_buildcache_list() { if $list_options then - SPACK_COMPREPLY="-h --help -l --long -L --very-long -v --variants -f --force -a --arch" + SPACK_COMPREPLY="-h --help -l --long -L --very-long -v --variants -f --force -a --allarch" else _all_packages fi From 74c8f258269e6ee63fad3a6c376afa9217892325 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 3 Feb 2020 15:07:12 -0600 Subject: [PATCH 071/178] Add new versions of matplotlib (#14731) --- var/spack/repos/builtin/packages/py-matplotlib/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py index eab28c70eda..6ef54f981d3 100644 --- a/var/spack/repos/builtin/packages/py-matplotlib/package.py +++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py @@ -14,7 +14,7 @@ class PyMatplotlib(PythonPackage): environments across platforms.""" homepage = "https://pypi.python.org/pypi/matplotlib" - url = "https://pypi.io/packages/source/m/matplotlib/matplotlib-3.1.2.tar.gz" + url = "https://pypi.io/packages/source/m/matplotlib/matplotlib-3.1.3.tar.gz" maintainers = ['adamjstewart'] @@ -28,10 +28,12 @@ class PyMatplotlib(PythonPackage): 'matplotlib.testing.jpl_units' ] + version('3.1.3', sha256='db3121f12fb9b99f105d1413aebaeb3d943f269f3d262b45586d12765866f0c6') version('3.1.2', sha256='8e8e2c2fe3d873108735c6ee9884e6f36f467df4a143136209cff303b183bada') version('3.1.1', sha256='1febd22afe1489b13c6749ea059d392c03261b2950d1d45c17e3aed812080c93') version('3.0.2', sha256='c94b792af431f6adb6859eb218137acd9a35f4f7442cea57e4a59c54751c36af') version('3.0.0', sha256='b4e2333c98a7c2c1ff6eb930cd2b57d4b818de5437c5048802096b32f66e65f9') + version('2.2.5', sha256='a3037a840cd9dfdc2df9fee8af8f76ca82bfab173c0f9468193ca7a89a2b60ea') version('2.2.3', sha256='7355bf757ecacd5f0ac9dd9523c8e1a1103faadf8d33c22664178e17533f8ce5') version('2.2.2', sha256='4dc7ef528aad21f22be85e95725234c5178c0f938e2228ca76640e5e84d8cde8') version('2.0.2', sha256='0ffbc44faa34a8b1704bc108c451ecf87988f900ef7ce757b8e2e84383121ff1') From 42633b0869971aa5a0864f7ab45ce5354ee7be56 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 3 Feb 2020 22:08:44 +0100 Subject: [PATCH 072/178] octave: mkoctfile doesn't use compiler wrappers (#14726) * Octave: moved the short description in its own paragraph * Octave: patch mkoctfile.in.cc to avoid using compiler wrappers * Added a check to ensure mkoctfile works correctly --- .../builtin/packages/octave/helloworld.cc | 11 +++ .../repos/builtin/packages/octave/package.py | 69 +++++++++++++++++-- 2 files changed, 74 insertions(+), 6 deletions(-) create mode 100644 var/spack/repos/builtin/packages/octave/helloworld.cc diff --git a/var/spack/repos/builtin/packages/octave/helloworld.cc b/var/spack/repos/builtin/packages/octave/helloworld.cc new file mode 100644 index 00000000000..f2cb6d9c1ef --- /dev/null +++ b/var/spack/repos/builtin/packages/octave/helloworld.cc @@ -0,0 +1,11 @@ +#include + +DEFUN_DLD (helloworld, args, nargout, + "Hello World Help String") +{ + octave_stdout << "Hello World has " + << args.length () << " input arguments and " + << nargout << " output arguments.\n"; + + return octave_value_list (); +} diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py index b3b9567db46..f70895bfda8 100644 --- a/var/spack/repos/builtin/packages/octave/package.py +++ b/var/spack/repos/builtin/packages/octave/package.py @@ -2,17 +2,23 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - -from spack import * +import os.path +import shutil import sys +import tempfile + +import spack.util.environment class Octave(AutotoolsPackage, GNUMirrorPackage): """GNU Octave is a high-level language, primarily intended for numerical - computations. It provides a convenient command line interface for solving - linear and nonlinear problems numerically, and for performing other - numerical experiments using a language that is mostly compatible with - Matlab. It may also be used as a batch-oriented language.""" + computations. + + It provides a convenient command line interface for solving linear and + nonlinear problems numerically, and for performing other numerical + experiments using a language that is mostly compatible with Matlab. + It may also be used as a batch-oriented language. + """ homepage = "https://www.gnu.org/software/octave/" gnu_mirror_path = "octave/octave-4.0.0.tar.gz" @@ -89,6 +95,57 @@ class Octave(AutotoolsPackage, GNUMirrorPackage): depends_on('suite-sparse', when='+suitesparse') depends_on('zlib', when='+zlib') + def patch(self): + # Filter mkoctfile.in.cc to use underlying compilers and not + # Spack compiler wrappers. We are patching the template file + # and not mkoctfile.cc since the latter is generated as part + # of the build. + mkoctfile_in = os.path.join( + self.stage.source_path, 'src', 'mkoctfile.in.cc' + ) + quote = lambda s: '"' + s + '"' + entries_to_patch = { + r'%OCTAVE_CONF_MKOCTFILE_CC%': quote(self.compiler.cc), + r'%OCTAVE_CONF_MKOCTFILE_CXX%': quote(self.compiler.cxx), + r'%OCTAVE_CONF_MKOCTFILE_F77%': quote(self.compiler.f77), + r'%OCTAVE_CONF_MKOCTFILE_DL_LD%': quote(self.compiler.cxx), + r'%OCTAVE_CONF_MKOCTFILE_LD_CXX%': quote(self.compiler.cxx) + } + + for pattern, subst in entries_to_patch.items(): + filter_file(pattern, subst, mkoctfile_in) + + @run_after('install') + @on_package_attributes(run_tests=True) + def check_mkoctfile_works_outside_of_build_env(self): + # Check that mkoctfile is properly configured and can compile + # Octave extensions outside of the build env + mkoctfile = Executable(os.path.join(self.prefix, 'bin', 'mkoctfile')) + helloworld_cc = os.path.join( + os.path.dirname(__file__), 'helloworld.cc' + ) + tmp_dir = tempfile.mkdtemp() + shutil.copy(helloworld_cc, tmp_dir) + + # We need to unset these variables since we are still within + # Spack's build environment when running tests + vars_to_unset = ['CC', 'CXX', 'F77', 'FC'] + + with spack.util.environment.preserve_environment(*vars_to_unset): + # Delete temporarily the environment variables that point + # to Spack compiler wrappers + for v in vars_to_unset: + del os.environ[v] + # Check that mkoctfile outputs the expected value for CC + cc = mkoctfile('-p', 'CC', output=str) + msg = "mkoctfile didn't output the expected CC compiler" + assert self.compiler.cc in cc, msg + + # Try to compile an Octave extension + shutil.copy(helloworld_cc, tmp_dir) + with working_dir(tmp_dir): + mkoctfile('helloworld.cc') + def configure_args(self): # See # https://github.com/macports/macports-ports/blob/master/math/octave/ From 7d444f08e7fd3a03118f8a1eddfbcbc81142c914 Mon Sep 17 00:00:00 2001 From: Rob Latham Date: Mon, 3 Feb 2020 19:15:52 -0600 Subject: [PATCH 073/178] update pmdk to 1.8 (#14733) new upstream release - notable in that it has experimental powerpc-le support (e.g. summit) - required a new patch to disable documentation --- .../repos/builtin/packages/pmdk/package.py | 8 +++++--- .../packages/pmdk/pmem-1.8-disable-docs.patch | 19 +++++++++++++++++++ 2 files changed, 24 insertions(+), 3 deletions(-) create mode 100644 var/spack/repos/builtin/packages/pmdk/pmem-1.8-disable-docs.patch diff --git a/var/spack/repos/builtin/packages/pmdk/package.py b/var/spack/repos/builtin/packages/pmdk/package.py index 7858344aea1..c8c836e4ebd 100644 --- a/var/spack/repos/builtin/packages/pmdk/package.py +++ b/var/spack/repos/builtin/packages/pmdk/package.py @@ -17,13 +17,15 @@ class Pmdk(Package): git = "https://github.com/pmem/pmdk.git" version('develop', branch='master') + version('1.8', sha256='a241ea76ef76d233cb92826b6823ed48091a2fb6963282a4fea848dbce68aa21') version('1.7', sha256='865ce1b422bc83109cb4a63dcff8fd1077eea3617e668faf6a043208d8be03ca') version('1.6', sha256='3b99e6c30709326a94d2e73a9247a8dfb58d0a394c5b7714e5c3d8a3ad2e2e9f') version('1.5', sha256='6b069d7207febeb62440e89245e8b18fcdf40b6170d2ec2ef33c252ed16db2d4') depends_on('ncurses', when='@1.6:') # documentation requires doxygen and a bunch of other dependencies - patch('0001-make-doc-building-explicit.patch') + patch('0001-make-doc-building-explicit.patch', when="@:1.7") + patch('pmem-1.8-disable-docs.patch', when='@1.8') def install(self, spec, prefix): make_args = [ @@ -33,8 +35,8 @@ def install(self, spec, prefix): 'BUILD_RPMEM=n', ] - # pmdk is particular about the ARCH specification, must be - # exactly "x86_64" for build to work + # pmdk prior to 1.8 was particular about the ARCH specification, must + # be exactly "x86_64" for build to work if spec.target.family == 'x86_64': make_args += ['ARCH=x86_64'] diff --git a/var/spack/repos/builtin/packages/pmdk/pmem-1.8-disable-docs.patch b/var/spack/repos/builtin/packages/pmdk/pmem-1.8-disable-docs.patch new file mode 100644 index 00000000000..f850e70e0f1 --- /dev/null +++ b/var/spack/repos/builtin/packages/pmdk/pmem-1.8-disable-docs.patch @@ -0,0 +1,19 @@ +--- pmdk-1.8/Makefile 2020-01-31 06:42:27.000000000 -0500 ++++ pmdk-1.8.nodocs/Makefile 2020-02-03 13:51:24.844691000 -0500 +@@ -79,7 +79,7 @@ rpm : override DESTDIR="$(CURDIR)/$(RPM_ + dpkg: override DESTDIR="$(CURDIR)/$(DPKG_BUILDDIR)" + rpm dpkg: override prefix=/usr + +-all: doc ++all: + $(MAKE) -C src $@ + + doc: +@@ -151,7 +151,6 @@ install: all + + install uninstall: + $(MAKE) -C src $@ +- $(MAKE) -C doc $@ + + .PHONY: all clean clobber test check cstyle check-license install uninstall\ + source rpm dpkg pkg-clean pcheck check-remote format doc require-rpmem\ From 4accc78409da79dc5ed260505e942621dae691db Mon Sep 17 00:00:00 2001 From: Andrew W Elble Date: Mon, 3 Feb 2020 22:02:45 -0500 Subject: [PATCH 074/178] Git fetching: add option to remove submodules (#14370) Add an optional 'submodules_delete' field to Git versions in Spack packages that allows them to remove specific submodules. For example: the nervanagpu submodule has become unavailable for the PyTorch project (see issue 19457 at https://github.com/pytorch/pytorch/issues/). Removing this submodule allows 0.4.1 to build. --- lib/spack/docs/packaging_guide.rst | 3 + lib/spack/spack/fetch_strategy.py | 12 +++- lib/spack/spack/test/conftest.py | 27 ++++++++- lib/spack/spack/test/git_fetch.py | 57 ++++++++++++++++++- .../builtin/packages/py-torch/package.py | 3 +- 5 files changed, 97 insertions(+), 5 deletions(-) diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index e566cb45450..6e1271b12c6 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -929,6 +929,9 @@ Git fetching supports the following parameters to ``version``: * ``tag``: Name of a tag to fetch. * ``commit``: SHA hash (or prefix) of a commit to fetch. * ``submodules``: Also fetch submodules recursively when checking out this repository. +* ``submodules_delete``: A list of submodules to forcibly delete from the repository + after fetching. Useful if a version in the repository has submodules that + have disappeared/are no longer accessible. * ``get_full_repo``: Ensure the full git history is checked out with all remote branch information. Normally (``get_full_repo=False``, the default), the git option ``--depth 1`` will be used if the version of git and the specified diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index cf8ec0b095a..5ae01286c4b 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -714,7 +714,8 @@ class GitFetchStrategy(VCSFetchStrategy): Repositories are cloned into the standard stage source path directory. """ url_attr = 'git' - optional_attrs = ['tag', 'branch', 'commit', 'submodules', 'get_full_repo'] + optional_attrs = ['tag', 'branch', 'commit', 'submodules', + 'get_full_repo', 'submodules_delete'] def __init__(self, **kwargs): # Discards the keywords in kwargs that may conflict with the next call @@ -725,6 +726,7 @@ def __init__(self, **kwargs): self._git = None self.submodules = kwargs.get('submodules', False) + self.submodules_delete = kwargs.get('submodules_delete', False) self.get_full_repo = kwargs.get('get_full_repo', False) @property @@ -858,6 +860,14 @@ def fetch(self): git(*pull_args, ignore_errors=1) git(*co_args) + if self.submodules_delete: + with working_dir(self.stage.source_path): + for submodule_to_delete in self.submodules_delete: + args = ['rm', submodule_to_delete] + if not spack.config.get('config:debug'): + args.insert(1, '--quiet') + git(*args) + # Init submodules if the user asked for them. if self.submodules: with working_dir(self.stage.source_path): diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py index d4c11e16938..97bbb69b528 100644 --- a/lib/spack/spack/test/conftest.py +++ b/lib/spack/spack/test/conftest.py @@ -744,11 +744,31 @@ def mock_archive(request, tmpdir_factory): @pytest.fixture(scope='session') def mock_git_repository(tmpdir_factory): - """Creates a very simple git repository with two branches and - two commits. + """Creates a simple git repository with two branches, + two commits and two submodules. Each submodule has one commit. """ git = spack.util.executable.which('git', required=True) + suburls = [] + for submodule_count in range(2): + tmpdir = tmpdir_factory.mktemp('mock-git-repo-submodule-dir-{0}' + .format(submodule_count)) + tmpdir.ensure(spack.stage._source_path_subdir, dir=True) + repodir = tmpdir.join(spack.stage._source_path_subdir) + suburls.append((submodule_count, 'file://' + str(repodir))) + + # Initialize the repository + with repodir.as_cwd(): + git('init') + git('config', 'user.name', 'Spack') + git('config', 'user.email', 'spack@spack.io') + + # r0 is just the first commit + submodule_file = 'r0_file_{0}'.format(submodule_count) + repodir.ensure(submodule_file) + git('add', submodule_file) + git('commit', '-m', 'mock-git-repo r0 {0}'.format(submodule_count)) + tmpdir = tmpdir_factory.mktemp('mock-git-repo-dir') tmpdir.ensure(spack.stage._source_path_subdir, dir=True) repodir = tmpdir.join(spack.stage._source_path_subdir) @@ -759,6 +779,9 @@ def mock_git_repository(tmpdir_factory): git('config', 'user.name', 'Spack') git('config', 'user.email', 'spack@spack.io') url = 'file://' + str(repodir) + for number, suburl in suburls: + git('submodule', 'add', suburl, + 'third_party/submodule{0}'.format(number)) # r0 is just the first commit r0_file = 'r0_file' diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py index 57474e56b7c..8dc57da793b 100644 --- a/lib/spack/spack/test/git_fetch.py +++ b/lib/spack/spack/test/git_fetch.py @@ -19,7 +19,6 @@ from spack.fetch_strategy import GitFetchStrategy from spack.util.executable import which - pytestmark = pytest.mark.skipif( not which('git'), reason='requires git to be installed') @@ -217,3 +216,59 @@ def test_get_full_repo(get_full_repo, git_version, mock_git_repository, else: assert(nbranches == 2) assert(ncommits == 1) + + +@pytest.mark.disable_clean_stage_check +@pytest.mark.parametrize("submodules", [True, False]) +def test_gitsubmodule(submodules, mock_git_repository, config, + mutable_mock_repo): + """ + Test GitFetchStrategy behavior with submodules + """ + type_of_test = 'tag-branch' + t = mock_git_repository.checks[type_of_test] + + # Construct the package under test + spec = Spec('git-test') + spec.concretize() + pkg = spack.repo.get(spec) + args = copy.copy(t.args) + args['submodules'] = submodules + pkg.versions[ver('git')] = args + pkg.do_stage() + with working_dir(pkg.stage.source_path): + for submodule_count in range(2): + file_path = os.path.join(pkg.stage.source_path, + 'third_party/submodule{0}/r0_file_{0}' + .format(submodule_count)) + if submodules: + assert os.path.isfile(file_path) + else: + assert not os.path.isfile(file_path) + + +@pytest.mark.disable_clean_stage_check +def test_gitsubmodules_delete(mock_git_repository, config, mutable_mock_repo): + """ + Test GitFetchStrategy behavior with submodules_delete + """ + type_of_test = 'tag-branch' + t = mock_git_repository.checks[type_of_test] + + # Construct the package under test + spec = Spec('git-test') + spec.concretize() + pkg = spack.repo.get(spec) + args = copy.copy(t.args) + args['submodules'] = True + args['submodules_delete'] = ['third_party/submodule0', + 'third_party/submodule1'] + pkg.versions[ver('git')] = args + pkg.do_stage() + with working_dir(pkg.stage.source_path): + file_path = os.path.join(pkg.stage.source_path, + 'third_party/submodule0') + assert not os.path.isdir(file_path) + file_path = os.path.join(pkg.stage.source_path, + 'third_party/submodule1') + assert not os.path.isdir(file_path) diff --git a/var/spack/repos/builtin/packages/py-torch/package.py b/var/spack/repos/builtin/packages/py-torch/package.py index 2a20235bef9..e40bc840feb 100644 --- a/var/spack/repos/builtin/packages/py-torch/package.py +++ b/var/spack/repos/builtin/packages/py-torch/package.py @@ -58,7 +58,8 @@ class PyTorch(PythonPackage, CudaPackage): version('1.1.0', tag='v1.1.0', submodules=True) version('1.0.1', tag='v1.0.1', submodules=True) version('1.0.0', tag='v1.0.0', submodules=True) - version('0.4.1', tag='v0.4.1', submodules=True) + version('0.4.1', tag='v0.4.1', submodules=True, + submodules_delete=['third_party/nervanagpu']) version('0.4.0', tag='v0.4.0', submodules=True) version('0.3.1', tag='v0.3.1', submodules=True) From 19ff3c0f67f89e842716a0c3e16996db8f471c8a Mon Sep 17 00:00:00 2001 From: Nicolas Richart Date: Tue, 4 Feb 2020 11:04:59 +0100 Subject: [PATCH 075/178] akantu: new package at v3.0.0 (#14685) --- .../repos/builtin/packages/akantu/package.py | 92 +++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 var/spack/repos/builtin/packages/akantu/package.py diff --git a/var/spack/repos/builtin/packages/akantu/package.py b/var/spack/repos/builtin/packages/akantu/package.py new file mode 100644 index 00000000000..c58225261f1 --- /dev/null +++ b/var/spack/repos/builtin/packages/akantu/package.py @@ -0,0 +1,92 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Akantu(CMakePackage): + """ + Akantu means a little element in Kinyarwanda, a Bantu language. From now + on it is also an opensource object-oriented Finite Element library which + has the ambition to be generic and efficient. + + """ + homepage = "https://akantu.ch" + url = "https://gitlab.com/akantu/akantu/-/archive/v3.0.0/akantu-v3.0.0.tar.gz" + git = "https://gitlab.com/akantu/akantu.git" + + maintainers = ['nrichart'] + + version('master', branch='master') + version('3.0.0', sha256='7e8f64e25956eba44def1b2d891f6db8ba824e4a82ff0d51d6b585b60ab465db') + + variant('external_solvers', values=any_combination_of('mumps', 'petsc'), + description="Activates the implicit solver") + variant('mpi', default=True, + description="Activates parallel capabilities") + variant('python', default=False, + description="Activates python bindings") + + depends_on('boost@:1.66', when='@:3.0.99') + depends_on('boost') + depends_on('lapack') + depends_on('cmake@3.5.1:', type='build') + depends_on('python', when='+python', type=('build', 'run')) + depends_on('py-numpy', when='+python', type=('build', 'run')) + depends_on('py-scipy', when='+python', type=('build', 'run')) + depends_on('py-pybind11', when='@3.1:+python', type=('build', 'run')) + + depends_on('mumps', when='~mpi external_solvers=mumps') + depends_on('mumps+mpi', when='+mpi external_solvers=mumps') + depends_on('netlib-scalapack', when='+mpi external_solvers=mumps') + depends_on('petsc+double', when='~mpi external_solvers=petsc') + depends_on('petsc+double+mpi', when='+mpi external_solvers=petsc') + + depends_on('mpi', when='+mpi') + depends_on('scotch', when='+mpi') + + extends('python', when='+python') + + conflicts('gcc@:5.3.99') + conflicts('@:3.0.99 external_solvers=petsc') + conflicts('@:3.0.99 +python') + + def cmake_args(self): + spec = self.spec + + args = [ + '-DAKANTU_COHESIVE_ELEMENT:BOOL=ON', + '-DAKANTU_DAMAGE_NON_LOCAL:BOOL=ON', + '-DAKANTU_HEAT_TRANSFER:BOOL=ON', + '-DAKANTU_SOLID_MECHANICS:BOOL=ON', + '-DAKANTU_STRUCTURAL_MECHANICS:BOOL=OFF', + '-DAKANTU_PARALLEL:BOOL={0}'.format( + 'ON' if spec.satisfies('+mpi') else 'OFF'), + '-DAKANTU_PYTHON_INTERFACE:BOOL={0}'.format( + 'ON' if spec.satisfies('+python') else 'OFF'), + ] + + if spec.satisfies('@:3.0.99'): + args.extend(['-DCMAKE_CXX_FLAGS=-Wno-class-memaccess', + '-DAKANTU_TRACTION_AT_SPLIT_NODE_CONTACT:BOOL=OFF']) + else: + args.append('-DAKANTU_TRACTION_AT_SPLIT_NODE_CONTACT:BOOL=ON') + + solvers = [] + if spec.satisfies('external_solvers=mumps'): + solvers.append('Mumps') + args.append('-DMUMPS_DIR:PATH=${0}'.format(spec['mumps'].prefix)) + if spec.satisfies('external_solvers=petsc'): + solvers.append('PETSc') + + if len(solvers) > 0: + args.extend([ + '-DAKANTU_IMPLICIT_SOLVER:STRING={0}'.format( + '+'.join(solvers)), + '-DAKANTU_IMPLICIT:BOOL=ON']) + else: + args.append('-DAKANTU_IMPLICIT:BOOL=OFF') + + return args From 0ec93778526c499efb91d823dd6728411c603673 Mon Sep 17 00:00:00 2001 From: "Dr. Christian Tacke" <58549698+ChristianTackeGSI@users.noreply.github.com> Date: Tue, 4 Feb 2020 15:27:57 +0100 Subject: [PATCH 076/178] pythia8: added old versions, build depend on rsync (#14740) --- var/spack/repos/builtin/packages/pythia8/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/pythia8/package.py b/var/spack/repos/builtin/packages/pythia8/package.py index 0afbb8dc910..8f2ec41b545 100644 --- a/var/spack/repos/builtin/packages/pythia8/package.py +++ b/var/spack/repos/builtin/packages/pythia8/package.py @@ -17,9 +17,15 @@ class Pythia8(AutotoolsPackage): version('8301', sha256='51382768eb9aafb97870dca1909516422297b64ef6a6b94659259b3e4afa7f06') version('8244', sha256='e34880f999daf19cdd893a187123927ba77d1bf851e30f6ea9ec89591f4c92ca', preferred=True) + version('8240', sha256='d27495d8ca7707d846f8c026ab695123c7c78c7860f04e2c002e483080418d8d') + version('8235', sha256='e82f0d6165a8250a92e6aa62fb53201044d8d853add2fdad6d3719b28f7e8e9d') + version('8230', sha256='332fad0ed4f12e6e0cb5755df0ae175329bc16bfaa2ae472d00994ecc99cd78d') + version('8212', sha256='f8fb4341c7e8a8be3347eb26b00329a388ccf925313cfbdba655a08d7fd5a70e') variant('shared', default=True, description='Build shared library') + depends_on('rsync', type='build') + def configure_args(self): args = [] From 1bbe0a1f743b513344afae20c09eec000492d45e Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Tue, 4 Feb 2020 15:29:21 +0100 Subject: [PATCH 077/178] abinit: fix dependency on fftw (#14739) fixes #14578 Abinit's recipe requires double precision FFTW libraries --- var/spack/repos/builtin/packages/abinit/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/abinit/package.py b/var/spack/repos/builtin/packages/abinit/package.py index 870e69fb3d2..6f07ffee811 100644 --- a/var/spack/repos/builtin/packages/abinit/package.py +++ b/var/spack/repos/builtin/packages/abinit/package.py @@ -71,7 +71,7 @@ class Abinit(AutotoolsPackage): # depends_on('elpa~openmp', when='+elpa+mpi~openmp') # depends_on('elpa+openmp', when='+elpa+mpi+openmp') - depends_on('fftw precision=float') + depends_on('fftw precision=float,double') depends_on('fftw~openmp', when='~openmp') depends_on('fftw+openmp', when='+openmp') From 603e2794dbc43f83c7c84815ac3270e5a85b62b0 Mon Sep 17 00:00:00 2001 From: Christoph Junghans Date: Tue, 4 Feb 2020 10:18:20 -0700 Subject: [PATCH 078/178] cray-libsci: initial commit (#14709) * cray-libsci: initial commit --- .../builtin/packages/cray-libsci/package.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100755 var/spack/repos/builtin/packages/cray-libsci/package.py diff --git a/var/spack/repos/builtin/packages/cray-libsci/package.py b/var/spack/repos/builtin/packages/cray-libsci/package.py new file mode 100755 index 00000000000..c0313a1e39e --- /dev/null +++ b/var/spack/repos/builtin/packages/cray-libsci/package.py @@ -0,0 +1,43 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from llnl.util.filesystem import LibraryList +from spack import * +import os + + +class CrayLibsci(Package): + """The Cray Scientific Libraries package, LibSci, is a collection of + numerical routines optimized for best performance on Cray systems.""" + + homepage = "http://www.nersc.gov/users/software/programming-libraries/math-libraries/libsci/" + url = "http://www.nersc.gov/users/software/programming-libraries/math-libraries/libsci/" + + version("18.11.1.2") + version("16.11.1") + version("16.09.1") + version('16.07.1') + version("16.06.1") + version("16.03.1") + + provides("blas") + provides("lapack") + provides("scalapack") + + # NOTE: Cray compiler wrappers already include linking for the following + @property + def blas_libs(self): + return LibraryList(os.path.join(self.prefix.lib, 'libsci.so')) + + @property + def lapack_libs(self): + return self.blas_libs + + @property + def scalapack_libs(self): + return self.blas_libs + + def install(self, spec, prefix): + raise NoBuildError(spec) From b34e7ad28f1dba60cdcd69185f8b229b38ebf99e Mon Sep 17 00:00:00 2001 From: Rao Garimella Date: Tue, 4 Feb 2020 12:15:45 -0700 Subject: [PATCH 079/178] MSTK package: add version 3.3.2 (#14689) --- var/spack/repos/builtin/packages/mstk/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/mstk/package.py b/var/spack/repos/builtin/packages/mstk/package.py index b67e0f7493e..07e21b9ba9f 100644 --- a/var/spack/repos/builtin/packages/mstk/package.py +++ b/var/spack/repos/builtin/packages/mstk/package.py @@ -28,6 +28,7 @@ class Mstk(CMakePackage): maintainers = ['raovgarimella', 'julienloiseau'] version('master', branch='master') + version('3.3.2', sha256='fb4ffa97af4d0a0d4771c8585d0b27538b013d8b1cfaff992e5e054fef62af0b') version('3.3.1', sha256='9fdb0c33c1b68714d708b355d963547cf41332812658d4560d4db43904fc78de') version('3.3.0', sha256='205c48fb5619937b5dd83788da739b7c2060155b7c41793e29ce05422b8f7dfb') version('3.2.1', sha256='09bc6684abb576e34c324322db6f71f8987c6ec436a54256b85ef4db40185470') @@ -52,7 +53,7 @@ class Mstk(CMakePackage): conflicts('partitioner=all', when='-parallel') conflicts('partitioner=zoltan', when='-parallel') - # MSTK turns on METIS only for parallel buildsu + # MSTK turns on METIS only for parallel builds conflicts('partitioner=metis', when='-parallel') # dependencies From 997a0f420753e1d1b4eb769fb7afb8685f173b5a Mon Sep 17 00:00:00 2001 From: Jennifer Herting Date: Tue, 4 Feb 2020 16:01:26 -0500 Subject: [PATCH 080/178] New version for mariadb and disable dtrace (#14734) * [mariadb] added version 10.4.8 * [mariadb] disabled dtrace --- var/spack/repos/builtin/packages/mariadb/package.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/var/spack/repos/builtin/packages/mariadb/package.py b/var/spack/repos/builtin/packages/mariadb/package.py index 0d2d34d3b80..c89c4c44e6f 100644 --- a/var/spack/repos/builtin/packages/mariadb/package.py +++ b/var/spack/repos/builtin/packages/mariadb/package.py @@ -20,6 +20,7 @@ class Mariadb(CMakePackage): homepage = "https://mariadb.org/about/" url = "http://ftp.hosteurope.de/mirror/archive.mariadb.org/mariadb-10.2.8/source/mariadb-10.2.8.tar.gz" + version('10.4.8', sha256='10cc2c3bdb76733c9c6fd1e3c6c860d8b4282c85926da7d472d2a0e00fffca9b') version('10.4.7', sha256='c8e6a6d0bb4f22c416ed675d24682a3ecfa383c5283efee70c8edf131374d817') version('10.2.8', sha256='8dd250fe79f085e26f52ac448fbdb7af2a161f735fae3aed210680b9f2492393') version('10.1.23', sha256='54d8114e24bfa5e3ebdc7d69e071ad1471912847ea481b227d204f9d644300bf') @@ -51,3 +52,10 @@ class Mariadb(CMakePackage): depends_on('openssl@:1.0', when='@:10.1') conflicts('%gcc@9.1.0:', when='@:5.5') + + def cmake_args(self): + args = [] + + args.append('-DENABLE_DTRACE:BOOL=OFF') + + return args From d4d82281d1d5d0a43732e79cc7031354959f86d0 Mon Sep 17 00:00:00 2001 From: Jennifer Herting Date: Tue, 4 Feb 2020 16:11:36 -0500 Subject: [PATCH 081/178] [rust] added version 1.41.0 (#14742) --- var/spack/repos/builtin/packages/rust/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py index d3de328d8d0..2ae30b5c035 100644 --- a/var/spack/repos/builtin/packages/rust/package.py +++ b/var/spack/repos/builtin/packages/rust/package.py @@ -13,6 +13,7 @@ class Rust(Package): git = "https://github.com/rust-lang/rust.git" version('develop', branch='master') + version('1.41.0', tag='1.41.0') version('1.34.0', tag='1.34.0') version('1.32.0', tag='1.32.0') version('1.31.1', tag='1.31.1') From 1974ad4e7f54aecfbce4bfca55ecf03ebf8192f6 Mon Sep 17 00:00:00 2001 From: Christoph Junghans Date: Tue, 4 Feb 2020 15:06:03 -0700 Subject: [PATCH 082/178] heffte: initial commit (#14744) --- .../repos/builtin/packages/heffte/package.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 var/spack/repos/builtin/packages/heffte/package.py diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py new file mode 100644 index 00000000000..e3bb7e49610 --- /dev/null +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -0,0 +1,31 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Heffte(CMakePackage): + """Highly Efficient FFT for Exascale""" + + homepage = "https://bitbucket.org/icl/heffte" + url = "https://bitbucket.org/icl/heffte/get/v0.1.tar.gz" + git = "https://bitbucket.org/icl/heffte.git" + + version('master', branch='master') + version('0.1', sha256='d279a03298d2dc76574b1ae1031acb4ea964348cf359273d1afa4668b5bfe748') + + variant('cuda', default=False, description='Builds with support for GPUs via CUDA') + + depends_on('fftw') + depends_on('mpi') + depends_on('cuda', when="+cuda") + + def cmake_args(self): + args = ['-DBUILD_SHARED=ON'] + if '+cuda' in self.spec: + args.append('-DBUILD_GPU=ON') + else: + args.append('-DBUILD_GPU=OFF') + return args From 52d1f5b83996c0086adb62b509bdf01b03424733 Mon Sep 17 00:00:00 2001 From: "Seth R. Johnson" Date: Tue, 4 Feb 2020 19:32:48 -0600 Subject: [PATCH 083/178] Remove python patch for inapplicable versions (#14746) This patch was merged in to upstream python 3.7 and is fixed in 3.7.6. --- var/spack/repos/builtin/packages/python/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index 55dfcc90217..f666d4dd4df 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -160,7 +160,7 @@ class Python(AutotoolsPackage): # Fixes build with the Intel compilers # https://github.com/python/cpython/pull/16717 - patch('intel-3.6.7.patch', when='@3.6.7:3.6.8,3.7.1: %intel') + patch('intel-3.6.7.patch', when='@3.6.7:3.6.8,3.7.1:3.7.5 %intel') # For more information refer to this bug report: # https://bugs.python.org/issue29712 From 731148e0e189a2d584cae8f084804fb6c2698dcb Mon Sep 17 00:00:00 2001 From: "Seth R. Johnson" Date: Tue, 4 Feb 2020 19:33:33 -0600 Subject: [PATCH 084/178] Use CMake for libmng package (#14747) * Convert libmng to use CMake rather than autoconf The autoconf script failed to recognize the intel compiler; it was harwired to use gcc. * Simplify cmake logic and remove unused variant --- .../repos/builtin/packages/libmng/package.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/libmng/package.py b/var/spack/repos/builtin/packages/libmng/package.py index 0d8cb9e7231..29d70379bcc 100644 --- a/var/spack/repos/builtin/packages/libmng/package.py +++ b/var/spack/repos/builtin/packages/libmng/package.py @@ -6,16 +6,17 @@ from spack import * -class Libmng(AutotoolsPackage): - """libmng -THE reference library for reading, displaying, writing +class Libmng(CMakePackage): + """THE reference library for reading, displaying, writing and examining Multiple-Image Network Graphics. MNG is the animation - extension to the popular PNG image-format.""" + extension to the popular PNG image format.""" homepage = "http://sourceforge.net/projects/libmng/" url = "http://downloads.sourceforge.net/project/libmng/libmng-devel/2.0.3/libmng-2.0.3.tar.gz" version('2.0.3', sha256='cf112a1fb02f5b1c0fce5cab11ea8243852c139e669c44014125874b14b7dfaa') version('2.0.2', sha256='4908797bb3541fb5cd8fffbe0b1513ed163509f2a4d57a78b26a96f8d1dd05a2') + depends_on("gzip") depends_on("jpeg") depends_on("zlib") depends_on("lcms") @@ -25,11 +26,6 @@ def patch(self): filter_file(r'^(\#include \)', '#include\n\\1', 'libmng_types.h') - @run_before('configure') - def clean_configure_directory(self): - """Without this, configure crashes with: - - configure: error: source directory already configured; - run "make distclean" there first - """ - make('distclean') + def cmake_args(self): + return ['-DWITH_LCMS2:BOOL=ON', + '-DWITH_LCMS1:BOOL=OFF'] From 29a01f488c53c7d14427505ca9336c25fe2fa993 Mon Sep 17 00:00:00 2001 From: "Seth R. Johnson" Date: Tue, 4 Feb 2020 21:03:47 -0600 Subject: [PATCH 085/178] Fix QT on Intel (#14745) - More robustly handle compiler version switching between QT4 and 5, and mac/linux, and gcc/intel/clang - Remove assumption about intel linker being in path --- .../repos/builtin/packages/qt/package.py | 98 ++++++++++--------- 1 file changed, 51 insertions(+), 47 deletions(-) diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index 1a463c50195..48cdcc32600 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -175,8 +175,9 @@ class Qt(Package): use_xcode = True - # Mapping for compilers in the QT 'mkspecs' + # Mapping for compilers/systems in the QT 'mkspecs' compiler_mapping = {'intel': 'icc', 'clang': 'clang-libc++', 'gcc': 'g++'} + platform_mapping = {'darwin': 'macx'} def url_for_version(self, version): # URL keeps getting more complicated with every release @@ -227,6 +228,25 @@ def setup_dependent_build_environment(self, env, dependent_spec): def setup_dependent_package(self, module, dependent_spec): module.qmake = Executable(join_path(self.spec.prefix.bin, 'qmake')) + def get_mkspec(self): + """Determine the mkspecs root directory and QT platform. + """ + spec = self.spec + cname = spec.compiler.name + cname = self.compiler_mapping.get(cname, cname) + pname = spec.architecture.platform + pname = self.platform_mapping.get(pname, pname) + + qtplat = None + mkspec_dir = 'qtbase/mkspecs' if spec.satisfies('@5:') else 'mkspecs' + for subdir in ('', 'unsupported'): + platdirname = "".join([subdir, pname, "-", cname]) + if os.path.exists(os.path.join(mkspec_dir, platdirname)): + qtplat = platdirname + break + + return (mkspec_dir, qtplat) + @when('@4 platform=darwin') def patch(self): ogl = self.spec['opengl'] if '+opengl' in self.spec else None @@ -284,32 +304,40 @@ def repl(match): ] filter_file(r'(\+=.*)debug_and_release', r'\1', *files_to_filter) - @when('@4') # *NOT* darwin/mac + @when('@4: %gcc') # *NOT* darwin/mac gcc def patch(self): + (mkspec_dir, platform) = self.get_mkspec() + + def conf(name): + return os.path.join(mkspec_dir, 'common', name + '.conf') + # Fix qmake compilers in the default mkspec - filter_file('^QMAKE_CC .*', 'QMAKE_CC = cc', - 'mkspecs/common/g++-base.conf') - filter_file('^QMAKE_CXX .*', 'QMAKE_CXX = c++', - 'mkspecs/common/g++-base.conf') - - # Necessary to build with GCC 6 and other modern compilers - # http://stackoverflow.com/questions/10354371/ - filter_file('(^QMAKE_CXXFLAGS .*)', r'\1 -std=gnu++98', - 'mkspecs/common/gcc-base.conf') + filter_file('^QMAKE_CC .*', 'QMAKE_CC = cc', conf('g++-base')) + filter_file('^QMAKE_CXX .*', 'QMAKE_CXX = c++', conf('g++-base')) + # Don't error out on undefined symbols filter_file('^QMAKE_LFLAGS_NOUNDEF .*', 'QMAKE_LFLAGS_NOUNDEF = ', - 'mkspecs/common/g++-unix.conf') + conf('g++-unix')) - @when('@5') + if self.spec.satisfies('@4'): + # Necessary to build with GCC 6 and other modern compilers + # http://stackoverflow.com/questions/10354371/ + with open(conf_file, 'a') as f: + f.write("QMAKE_CXXFLAGS += -std=gnu++98\n") + + @when('@4: %intel') def patch(self): - # Fix qmake compilers in the default mkspec - filter_file('^QMAKE_CC .*', 'QMAKE_CC = cc', - 'qtbase/mkspecs/common/g++-base.conf') - filter_file('^QMAKE_CXX .*', 'QMAKE_CXX = c++', - 'qtbase/mkspecs/common/g++-base.conf') + (mkspec_dir, platform) = self.get_mkspec() + conf_file = os.path.join(mkspec_dir, platform, "qmake.conf") - filter_file('^QMAKE_LFLAGS_NOUNDEF .*', 'QMAKE_LFLAGS_NOUNDEF = ', - 'qtbase/mkspecs/common/g++-unix.conf') + # Intel's `ar` equivalent might not be in the path: replace it with + # explicit + xiar = os.path.join(os.path.dirname(self.compiler.cc), 'xiar') + filter_file(r'\bxiar\b', xiar, conf_file) + + if self.spec.satisfies('@4'): + with open(conf_file, 'a') as f: + f.write("QMAKE_CXXFLAGS += -std=gnu++98\n") @property def common_config_args(self): @@ -437,36 +465,12 @@ def common_config_args(self): config_args.append('-{0}framework'.format( '' if '+framework' in self.spec else 'no-')) - # Note: QT defaults to the following compilers - # QT4 mac: gcc - # QT5 mac: clang - # linux: gcc - # In QT4, unsupported compilers lived under an 'unsupported' - # subdirectory but are now in the main platform directory. - spec = self.spec - cname = spec.compiler.name - cname = self.compiler_mapping.get(cname, cname) - is_new_qt = spec.satisfies('@5:') - platform = None - if MACOS_VERSION: - if is_new_qt and cname != "clang-libc++": - platform = 'macx-' + cname - elif not is_new_qt and cname != "g++": - platform = 'unsupported/macx-' + cname - elif cname != 'g++': - if is_new_qt: - platform = 'linux-' + cname - else: - platform = 'unsupported/linux-' + cname - - if platform is not None: - config_args.extend(['-platform', platform]) + (_, qtplat) = self.get_mkspec() + if qtplat is not None: + config_args.extend(['-platform', qtplat]) return config_args - # Don't disable all the database drivers, but should - # really get them into spack at some point. - @when('@3') def configure(self, spec, prefix): # A user reported that this was necessary to link Qt3 on ubuntu. From ffdde40f56d48c18ca9c45b0599221ef1dab40a2 Mon Sep 17 00:00:00 2001 From: Eisuke Kawashima Date: Wed, 5 Feb 2020 12:05:19 +0900 Subject: [PATCH 086/178] Bump Open Babel to 3 (#14738) --- .../builtin/packages/openbabel/package.py | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/var/spack/repos/builtin/packages/openbabel/package.py b/var/spack/repos/builtin/packages/openbabel/package.py index fff2ee2818a..c847c9c1eeb 100644 --- a/var/spack/repos/builtin/packages/openbabel/package.py +++ b/var/spack/repos/builtin/packages/openbabel/package.py @@ -12,18 +12,24 @@ class Openbabel(CMakePackage): search, convert, analyze, or store data from molecular modeling, chemistry, solid-state materials, biochemistry, or related areas.""" - homepage = "http://openbabel.org/wiki/Main_Page" - url = "https://sourceforge.net/projects/openbabel/files/openbabel/2.4.1/openbabel-2.4.1.tar.gz" + homepage = 'https://openbabel.org/wiki/Main_Page' + url = 'https://github.com/openbabel/openbabel/archive/openbabel-3-0-0.tar.gz' + git = 'https://github.com/openbabel/openbabel.git' - version('2.4.1', sha256='204136582cdfe51d792000b20202de8950218d617fd9c6e18cee36706a376dfc') + version('master', branch='master') + version('3.0.0', tag='openbabel-3-0-0') + version('2.4.1', tag='openbabel-2-4-1') + version('2.4.0', tag='openbabel-2-4-0') variant('python', default=True, description='Build Python bindings') extends('python', when='+python') depends_on('python', type=('build', 'run'), when='+python') - depends_on('cmake@2.4.8:', type='build') - depends_on('pkgconfig', type='build') + depends_on('cmake@3.1:', type='build') + depends_on('pkgconfig', type='build') + depends_on('swig@2.0:', type='build', when='+python') + depends_on('boost') depends_on('cairo') # required to support PNG depiction depends_on('eigen@3.0:') # required if using the language bindings depends_on('libxml2') # required to read/write CML files, XML formats @@ -43,6 +49,7 @@ def cmake_args(self): args.extend([ '-DPYTHON_BINDINGS=ON', '-DPYTHON_EXECUTABLE={0}'.format(spec['python'].command.path), + '-DRUN_SWIG=ON', ]) else: args.append('-DPYTHON_BINDINGS=OFF') @@ -56,6 +63,6 @@ def check_install(self): obabel('-:C1=CC=CC=C1Br', '-omol') if '+python' in self.spec: - # Attempt to import the Python modules - for module in ['openbabel', 'pybel']: - python('-c', 'import {0}'.format(module)) + python('-c', 'import openbabel') + if self.spec.version < Version('3.0.0'): + python('-c', 'import pybel') From 57cc7831bf23a66f454f7a92d032b533f9100c67 Mon Sep 17 00:00:00 2001 From: "Seth R. Johnson" Date: Wed, 5 Feb 2020 03:23:58 -0600 Subject: [PATCH 087/178] qt: fix on Intel (#14748) Follow up from #14745 --- var/spack/repos/builtin/packages/qt/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index 48cdcc32600..9115f41a783 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -322,7 +322,7 @@ def conf(name): if self.spec.satisfies('@4'): # Necessary to build with GCC 6 and other modern compilers # http://stackoverflow.com/questions/10354371/ - with open(conf_file, 'a') as f: + with open(conf('gcc-base'), 'a') as f: f.write("QMAKE_CXXFLAGS += -std=gnu++98\n") @when('@4: %intel') From ba25bb3050d828fc408f2e28c324546604410857 Mon Sep 17 00:00:00 2001 From: Themos Tsikas Date: Wed, 5 Feb 2020 14:02:46 +0000 Subject: [PATCH 088/178] NAG Compiler 6.2 (Build 6252) download checksum (#14750) --- var/spack/repos/builtin/packages/nag/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/nag/package.py b/var/spack/repos/builtin/packages/nag/package.py index 8c4309198cb..d8818d0d580 100644 --- a/var/spack/repos/builtin/packages/nag/package.py +++ b/var/spack/repos/builtin/packages/nag/package.py @@ -13,7 +13,7 @@ class Nag(Package): maintainers = ['ThemosTsikas'] version('7.0', sha256='ea83075cde9e625083b85be04426b0536b2da32db3cfd0c3eb3f2cf8253a2030') - version('6.2', sha256='e22f70f52949c4eb5526229c13920c924af5254d07a584cf54fefecd130fd29c') + version('6.2', sha256='9b60f6ffa4f4be631079676963e74eea25e8824512e5c864eb06758b2a3cdd2d') version('6.1', sha256='32580e0004e6798abf1fa52f0070281b28abeb0da2387530a4cc41218e813c7c') # Licensing From bce4bec059da2de069e9c6aa5856ded2e4c7292e Mon Sep 17 00:00:00 2001 From: Cameron Smith Date: Wed, 5 Feb 2020 20:32:36 -0500 Subject: [PATCH 089/178] PUMI package: add version 2.2.2 (#14751) --- var/spack/repos/builtin/packages/pumi/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/pumi/package.py b/var/spack/repos/builtin/packages/pumi/package.py index f26c80670e9..62b771aa99c 100644 --- a/var/spack/repos/builtin/packages/pumi/package.py +++ b/var/spack/repos/builtin/packages/pumi/package.py @@ -26,6 +26,7 @@ class Pumi(CMakePackage): # scorec/core develop branch and we perfer not to expose spack users # to the added instability. version('master', submodules=True, branch='master') + version('2.2.2', commit='bc34e3f7cfd8ab314968510c71486b140223a68f') # tag 2.2.2 version('2.2.1', commit='cd826205db21b8439026db1f6af61a8ed4a18564') # tag 2.2.1 version('2.2.0', commit='8c7e6f13943893b2bc1ece15003e4869a0e9634f') # tag 2.2.0 version('2.1.0', commit='840fbf6ec49a63aeaa3945f11ddb224f6055ac9f') From 1a0c31703a0f98a7fa5d57c2de484b49a51c5f6c Mon Sep 17 00:00:00 2001 From: Michael Kuhn Date: Thu, 6 Feb 2020 02:45:17 +0100 Subject: [PATCH 090/178] acts-core package: build root with cxxstd=17 for 0.8.1 and newer (#14761) The build fails if root uses an older C++ standard. --- var/spack/repos/builtin/packages/acts-core/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/acts-core/package.py b/var/spack/repos/builtin/packages/acts-core/package.py index 16ecd155bc6..644397102ab 100644 --- a/var/spack/repos/builtin/packages/acts-core/package.py +++ b/var/spack/repos/builtin/packages/acts-core/package.py @@ -76,7 +76,7 @@ class ActsCore(CMakePackage): depends_on('eigen @3.2.9:', type='build') depends_on('nlohmann-json @3.2.0:', when='@0.14.0: +json') depends_on('root @6.10: cxxstd=14', when='+tgeo @:0.8.0') - depends_on('root @6.10:', when='+tgeo @0.8.1:') + depends_on('root @6.10: cxxstd=17', when='+tgeo @0.8.1:') depends_on('dd4hep @1.2:', when='+dd4hep') def cmake_args(self): From 1f6f81269637e691bc1c96433a8c04ba8dec5674 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Wed, 5 Feb 2020 20:37:08 -0600 Subject: [PATCH 091/178] Update llvm, adjust dependency specs and conflicts (#14561) This PR adds a new version of llvm and fixes the dependency specs. - This package depends on libtinfo in all cases so change the ncurses dependency to reflect that - if +lldb is in the spec but +python is not then do not build the lldb python support - build lldb python support only if +python is in the spec with +lldb - install the llvm python bindings if +python is in the spec - install the clang python bindings if +clang and +python are in the spec - Fixes for conflicts with ~clang - Fix typo in conflict of compiler-rt and flang --- .../repos/builtin/packages/llvm/package.py | 24 +++++++++++++------ 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py index f2356971d5e..05e9404592c 100644 --- a/var/spack/repos/builtin/packages/llvm/package.py +++ b/var/spack/repos/builtin/packages/llvm/package.py @@ -22,6 +22,7 @@ class Llvm(CMakePackage): family = 'compiler' # Used by lmod version('master', branch='master') + version('9.0.1', sha256='be7b034641a5fda51ffca7f5d840b1a768737779f75f7c4fd18fe2d37820289a') version('9.0.0', sha256='7807fac25330e24e9955ca46cd855dd34bbc9cc4fdba8322366206654d1036f2') version('8.0.0', sha256='d81238b4a69e93e29f74ce56f8107cbfcf0c7d7b40510b7879e98cc031e25167') version('7.1.0', sha256='71c93979f20e01f1a1cc839a247945f556fa5e63abf2084e8468b238080fd839') @@ -102,8 +103,10 @@ class Llvm(CMakePackage): # openmp dependencies depends_on('perl-data-dumper', type=('build')) + # ncurses dependency + depends_on('ncurses+termlib') + # lldb dependencies - depends_on('ncurses', when='+lldb') depends_on('swig', when='+lldb') depends_on('libedit', when='+lldb') depends_on('py-six', when='@5.0.0: +lldb +python') @@ -115,8 +118,11 @@ class Llvm(CMakePackage): depends_on('gmp', when='@:3.6.999 +polly') depends_on('isl', when='@:3.6.999 +polly') - conflicts('+clang_extra', when='~clang') - conflicts('+lldb', when='~clang') + conflicts('+clang_extra', when='~clang') + conflicts('+lldb', when='~clang') + conflicts('+libcxx', when='~clang') + conflicts('+internal_unwind', when='~clang') + conflicts('+compiler-rt', when='~clang') # LLVM 4 and 5 does not build with GCC 8 conflicts('%gcc@8:', when='@:5') @@ -194,6 +200,9 @@ def cmake_args(self): if '+python' in spec and '+lldb' in spec and spec.satisfies('@5.0.0:'): cmake_args.append('-DLLDB_USE_SYSTEM_SIX:Bool=TRUE') + if '~python' in spec and '+lldb' in spec: + cmake_args.append('-DLLDB_DISABLE_PYTHON:Bool=TRUE') + if '+gold' in spec: cmake_args.append('-DLLVM_BINUTILS_INCDIR=' + spec['binutils'].prefix.include) @@ -282,10 +291,11 @@ def pre_install(self): @run_after('install') def post_install(self): - if '+clang' in self.spec and '+python' in self.spec: - install_tree( - 'tools/clang/bindings/python/clang', - join_path(site_packages_dir, 'clang')) + if '+python' in self.spec: + install_tree('llvm/bindings/python', site_packages_dir) + + if '+clang' in self.spec: + install_tree('clang/bindings/python', site_packages_dir) with working_dir(self.build_directory): install_tree('bin', join_path(self.prefix, 'libexec', 'llvm')) From 58a905ec769a7521738e644130016966683a7d45 Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 15:46:13 +0800 Subject: [PATCH 092/178] ant: added versions up to v1.10.7 (#14764) --- var/spack/repos/builtin/packages/ant/package.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/var/spack/repos/builtin/packages/ant/package.py b/var/spack/repos/builtin/packages/ant/package.py index a0f2166185a..057a806b76f 100644 --- a/var/spack/repos/builtin/packages/ant/package.py +++ b/var/spack/repos/builtin/packages/ant/package.py @@ -15,6 +15,13 @@ class Ant(Package): homepage = "http://ant.apache.org/" url = "https://archive.apache.org/dist/ant/source/apache-ant-1.9.7-src.tar.gz" + version('1.10.7', sha256='2f9c4ef094581663b41a7412324f65b854f17622e5b2da9fcb9541ca8737bd52') + version('1.10.6', sha256='c641721ae844196b28780e7999d2ae886085b89433438ab797d531413a924311') + version('1.10.5', sha256='5937cf11d74d75d6e8927402950b012e037e362f9f728262ce432ad289b9f6ca') + version('1.10.4', sha256='b0718c6c1b2b8d3bc77cd1e30ea183cd7741cfb52222a97c754e02b8e38d1948') + version('1.10.3', sha256='988b0cac947559f7347f314b9a3dae1af0dfdcc254de56d1469de005bf281c5a') + version('1.10.2', sha256='f3cf217b9befae2fef7198b51911e33a8809d98887cc971c8957596f459c6285') + version('1.10.1', sha256='68f7ced0aa15d1f9f672f23d67c86deaf728e9576936313cfbff4f7a0e6ce382') version('1.10.0', sha256='1f78036c38753880e16fb755516c8070187a78fe4b2e99b59eda5b81b58eccaf') version('1.9.9', sha256='d6a0c93777ab27db36212d77c5733ac80d17fe24e83f947df23a8e0ad4ac48cc') version('1.9.8', sha256='5f4daf56e66fc7a71de772920ca27c15eac80cf1fcf41f3b4f2d535724942681') From 2dab92742dac4fc37742db12c20fe89f5afd717f Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 15:51:34 +0800 Subject: [PATCH 093/178] atop: added versions up to v2.5.0 (#14765) --- var/spack/repos/builtin/packages/atop/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/atop/package.py b/var/spack/repos/builtin/packages/atop/package.py index 608fbd28b8d..c06d27cbd7b 100644 --- a/var/spack/repos/builtin/packages/atop/package.py +++ b/var/spack/repos/builtin/packages/atop/package.py @@ -11,6 +11,10 @@ class Atop(Package): homepage = "http://www.atoptool.nl/index.php" url = "http://www.atoptool.nl/download/atop-2.2-3.tar.gz" + version('2.5.0', sha256='4b911057ce50463b6e8b3016c5963d48535c0cddeebc6eda817e292b22f93f33') + version('2.4.0', sha256='be1c010a77086b7d98376fce96514afcd73c3f20a8d1fe01520899ff69a73d69') + version('2.3.0', sha256='73e4725de0bafac8c63b032e8479e2305e3962afbe977ec1abd45f9e104eb264') + version('2.2.6', sha256='d0386840ee4df36e5d0ad55f144661b434d9ad35d94deadc0405b514485db615') version('2.2-3', sha256='c785b8a2355be28b3de6b58a8ea4c4fcab8fadeaa57a99afeb03c66fac8e055d') depends_on('zlib') From 0a92b547014439dfc77b71090742e9ae862b4249 Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 16:56:01 +0800 Subject: [PATCH 094/178] coreutils: added v8.30 and v8.31 (#14770) --- var/spack/repos/builtin/packages/coreutils/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/coreutils/package.py b/var/spack/repos/builtin/packages/coreutils/package.py index 9dfcf3ce0f4..4aa938a4343 100644 --- a/var/spack/repos/builtin/packages/coreutils/package.py +++ b/var/spack/repos/builtin/packages/coreutils/package.py @@ -15,6 +15,8 @@ class Coreutils(AutotoolsPackage, GNUMirrorPackage): homepage = "http://www.gnu.org/software/coreutils/" gnu_mirror_path = "coreutils/coreutils-8.26.tar.xz" + version('8.31', sha256='ff7a9c918edce6b4f4b2725e3f9b37b0c4d193531cac49a48b56c4d0d3a9e9fd') + version('8.30', sha256='e831b3a86091496cdba720411f9748de81507798f6130adeaef872d206e1b057') version('8.29', sha256='92d0fa1c311cacefa89853bdb53c62f4110cdfda3820346b59cbd098f40f955e') version('8.26', sha256='155e94d748f8e2bc327c66e0cbebdb8d6ab265d2f37c3c928f7bf6c3beba9a8e') version('8.23', sha256='ec43ca5bcfc62242accb46b7f121f6b684ee21ecd7d075059bf650ff9e37b82d') From 29a906d20ce0f9ed69871a8106c866920f895bd2 Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 16:57:42 +0800 Subject: [PATCH 095/178] ghostscript: added v9.27 and v9.50 (#14772) --- var/spack/repos/builtin/packages/ghostscript/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/ghostscript/package.py b/var/spack/repos/builtin/packages/ghostscript/package.py index 498403f63ce..72f3e6f3e01 100644 --- a/var/spack/repos/builtin/packages/ghostscript/package.py +++ b/var/spack/repos/builtin/packages/ghostscript/package.py @@ -13,6 +13,8 @@ class Ghostscript(AutotoolsPackage): homepage = "http://ghostscript.com/" url = "https://github.com/ArtifexSoftware/ghostpdl-downloads/releases/download/gs926/ghostscript-9.26.tar.gz" + version('9.50', sha256='0f53e89fd647815828fc5171613e860e8535b68f7afbc91bf89aee886769ce89') + version('9.27', sha256='9760e8bdd07a08dbd445188a6557cb70e60ccb6a5601f7dbfba0d225e28ce285') version('9.26', sha256='831fc019bd477f7cc2d481dc5395ebfa4a593a95eb2fe1eb231a97e450d7540d') version('9.21', sha256='02bceadbc4dddeb6f2eec9c8b1623d945d355ca11b8b4df035332b217d58ce85') version('9.18', sha256='5fc93079749a250be5404c465943850e3ed5ffbc0d5c07e10c7c5ee8afbbdb1b') From 3b38a45a768724f93c143450dfe9a8be49f3d5c4 Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 16:58:46 +0800 Subject: [PATCH 096/178] gl2ps: added v1.4.0 (#14773) --- var/spack/repos/builtin/packages/gl2ps/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gl2ps/package.py b/var/spack/repos/builtin/packages/gl2ps/package.py index d2211f09baf..eed6ef31194 100644 --- a/var/spack/repos/builtin/packages/gl2ps/package.py +++ b/var/spack/repos/builtin/packages/gl2ps/package.py @@ -13,6 +13,7 @@ class Gl2ps(CMakePackage): homepage = "http://www.geuz.org/gl2ps/" url = "http://geuz.org/gl2ps/src/gl2ps-1.3.9.tgz" + version('1.4.0', sha256='03cb5e6dfcd87183f3b9ba3b22f04cd155096af81e52988cc37d8d8efe6cf1e2') version('1.3.9', sha256='8a680bff120df8bcd78afac276cdc38041fed617f2721bade01213362bcc3640') variant('png', default=True, description='Enable PNG support') From c2460dbcd27909f564242d5d064368ad68857144 Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 16:59:51 +0800 Subject: [PATCH 097/178] cln: added v1.3.5 and v1.3.6 (#14768) --- var/spack/repos/builtin/packages/cln/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/cln/package.py b/var/spack/repos/builtin/packages/cln/package.py index ba95fa78a92..e6374ecaf51 100644 --- a/var/spack/repos/builtin/packages/cln/package.py +++ b/var/spack/repos/builtin/packages/cln/package.py @@ -16,6 +16,8 @@ class Cln(AutotoolsPackage): homepage = "https://www.ginac.de/CLN/" git = "git://www.ginac.de/cln.git" + version('1.3.6', commit='d4ba1cc869be2c647c4ab48ac571b1fc9c2021a9') + version('1.3.5', commit='b221c033c082b462455502b7e63702a9c466aede') version('1.3.4', commit='9b86a7fc69feb1b288469982001af565f73057eb') version('1.3.3', commit='1c9bd61ff0b89b0bf8030e44cb398e8f75112222') version('1.3.2', commit='00817f7b60a961b860f6d305ac82dd51b70d6ba6') From 588c87c665d0d43824b9d80602edabe56eb46a51 Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 17:03:08 +0800 Subject: [PATCH 098/178] blis: added v0.6.1 (#14766) --- var/spack/repos/builtin/packages/blis/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/blis/package.py b/var/spack/repos/builtin/packages/blis/package.py index e901c5f755b..6d9d53a2a45 100644 --- a/var/spack/repos/builtin/packages/blis/package.py +++ b/var/spack/repos/builtin/packages/blis/package.py @@ -27,6 +27,7 @@ class Blis(Package): git = "https://github.com/flame/blis.git" version('master', branch='master') + version('0.6.1', sha256='76b22f29b7789cf117c0873d2a6b2a6d61f903869168148f2e7306353c105c37') version('0.6.0', sha256='ad5765cc3f492d0c663f494850dafc4d72f901c332eb442f404814ff2995e5a9') version('0.5.0', sha256='1a004d69c139e8a0448c6a6007863af3a8c3551b8d9b8b73fe08e8009f165fa8') version('0.4.0', sha256='9c7efd75365a833614c01b5adfba93210f869d92e7649e0b5d9edc93fc20ea76') From bb0b88f38a6cb90368449076a0faaeea70ce719d Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 17:33:16 +0800 Subject: [PATCH 099/178] enchant: added v2.2.6 and v2.2.7 (#14775) --- var/spack/repos/builtin/packages/enchant/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/enchant/package.py b/var/spack/repos/builtin/packages/enchant/package.py index c5903abc4bb..4592a2a6714 100644 --- a/var/spack/repos/builtin/packages/enchant/package.py +++ b/var/spack/repos/builtin/packages/enchant/package.py @@ -15,6 +15,8 @@ class Enchant(AutotoolsPackage): homepage = "https://abiword.github.io/enchant/" url = "https://github.com/AbiWord/enchant/releases/download/v2.2.5/enchant-2.2.5.tar.gz" + version('2.2.7', sha256='1b22976135812b35cb5b8d21a53ad11d5e7c1426c93f51e7a314a2a42cab3a09') + version('2.2.6', sha256='8048c5bd26190b21279745cfecd05808c635bc14912e630340cd44a49b87d46d') version('2.2.5', sha256='ffce4ea00dbda1478d91c3e1538cadfe5761d9d6c0ceb27bc3dba51882fe1c47') version('2.2.4', sha256='f5d6b689d23c0d488671f34b02d07b84e408544b2f9f6e74fb7221982b1ecadc') version('2.2.3', sha256='abd8e915675cff54c0d4da5029d95c528362266557c61c7149d53fa069b8076d') From f745b790f36bccd919b70f7e886a1180924378e5 Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 17:34:04 +0800 Subject: [PATCH 100/178] gnuplot: added v5.2.8 (#14776) --- var/spack/repos/builtin/packages/gnuplot/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/gnuplot/package.py b/var/spack/repos/builtin/packages/gnuplot/package.py index d5e35f3d65e..17ee973eee9 100644 --- a/var/spack/repos/builtin/packages/gnuplot/package.py +++ b/var/spack/repos/builtin/packages/gnuplot/package.py @@ -26,6 +26,7 @@ class Gnuplot(AutotoolsPackage): # dependency of readline. Fix it with a small patch patch('term_include.patch') + version('5.2.8', sha256='60a6764ccf404a1668c140f11cc1f699290ab70daa1151bb58fed6139a28ac37') version('5.2.7', sha256='97fe503ff3b2e356fe2ae32203fc7fd2cf9cef1f46b60fe46dc501a228b9f4ed') version('5.2.5', sha256='039db2cce62ddcfd31a6696fe576f4224b3bc3f919e66191dfe2cdb058475caa') version('5.2.2', sha256='a416d22f02bdf3873ef82c5eb7f8e94146795811ef808e12b035ada88ef7b1a1') From f66f56287dc0221abe5423b49d28cf968c8ed745 Mon Sep 17 00:00:00 2001 From: iarspider Date: Thu, 6 Feb 2020 10:40:56 +0100 Subject: [PATCH 101/178] geant4: add missing dependency on geant4-data (#14767) --- var/spack/repos/builtin/packages/geant4/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py index 31aaba70d09..6fb23184797 100644 --- a/var/spack/repos/builtin/packages/geant4/package.py +++ b/var/spack/repos/builtin/packages/geant4/package.py @@ -71,6 +71,7 @@ class Geant4(CMakePackage): depends_on('geant4-data@10.03.p03', when='@10.03.p03 ~data') depends_on('geant4-data@10.04', when='@10.04 ~data') + depends_on('geant4-data@10.05.p01', when='@10.05.p01 ~data') # As released, 10.03.03 has issues with respect to using external # CLHEP. From 1a846abbe8a6b709d714d3902f301c1ba7d9463a Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 17:54:10 +0800 Subject: [PATCH 102/178] go: added v1.13.7 (#14777) --- var/spack/repos/builtin/packages/go/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py index f60189b8b1c..f2d0e94f877 100644 --- a/var/spack/repos/builtin/packages/go/package.py +++ b/var/spack/repos/builtin/packages/go/package.py @@ -35,6 +35,7 @@ class Go(Package): extendable = True + version('1.13.7', sha256='e4ad42cc5f5c19521fbbbde3680995f2546110b5c6aa2b48c3754ff7af9b41f4') version('1.13.6', sha256='aae5be954bdc40bcf8006eb77e8d8a5dde412722bc8effcdaf9772620d06420c') version('1.13.5', sha256='27d356e2a0b30d9983b60a788cf225da5f914066b37a6b4f69d457ba55a626ff') version('1.13.4', sha256='95dbeab442ee2746b9acf0934c8e2fc26414a0565c008631b04addb8c02e7624') From 88289cd2c7cab72a700087b0dcc6d1222190f947 Mon Sep 17 00:00:00 2001 From: darmac Date: Thu, 6 Feb 2020 18:12:10 +0800 Subject: [PATCH 103/178] graphicsmagick: added versions up to v1.3.34 (#14778) --- var/spack/repos/builtin/packages/graphicsmagick/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/graphicsmagick/package.py b/var/spack/repos/builtin/packages/graphicsmagick/package.py index c98efcfce7f..5e32b05e142 100644 --- a/var/spack/repos/builtin/packages/graphicsmagick/package.py +++ b/var/spack/repos/builtin/packages/graphicsmagick/package.py @@ -18,6 +18,11 @@ class Graphicsmagick(AutotoolsPackage): homepage = "http://www.graphicsmagick.org/" url = "https://sourceforge.net/projects/graphicsmagick/files/graphicsmagick/1.3.29/GraphicsMagick-1.3.29.tar.xz/download" + version('1.3.34', sha256='df009d5173ed0d6a0c6457234256c5a8aeaace782afa1cbab015d5a12bd4f7a4') + version('1.3.33', sha256='130cb330a633580b5124eba5c125bbcbc484298423a97b9bed37ccd50d6dc778') + version('1.3.32', sha256='b842a5a0d6c84fd6c5f161b5cd8e02bbd210b0c0b6728dd762b7c53062ba94e1') + version('1.3.31', sha256='096bbb59d6f3abd32b562fc3b34ea90d88741dc5dd888731d61d17e100394278') + version('1.3.30', sha256='d965e5c6559f55eec76c20231c095d4ae682ea0cbdd8453249ae8771405659f1') version('1.3.29', sha256='e18df46a6934c8c12bfe274d09f28b822f291877f9c81bd9a506f879a7610cd4') depends_on('bzip2') From e62c82de7f7c1e9fdd2711d1c9c7b049ad6f5026 Mon Sep 17 00:00:00 2001 From: Figroc Chen Date: Thu, 6 Feb 2020 20:20:37 +0800 Subject: [PATCH 104/178] grpc: added v1.25.0, v1.26.0 and v1.27.0 (#14781) --- var/spack/repos/builtin/packages/grpc/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/grpc/package.py b/var/spack/repos/builtin/packages/grpc/package.py index 839cd3b2ef5..8cbfe773289 100644 --- a/var/spack/repos/builtin/packages/grpc/package.py +++ b/var/spack/repos/builtin/packages/grpc/package.py @@ -9,8 +9,11 @@ class Grpc(CMakePackage): """A high performance, open-source universal RPC framework.""" homepage = "https://grpc.io" - url = "https://github.com/grpc/grpc/archive/v1.24.3.tar.gz" + url = "https://github.com/grpc/grpc/archive/v1.27.0.tar.gz" + version('1.27.0', sha256='3ccc4e5ae8c1ce844456e39cc11f1c991a7da74396faabe83d779836ef449bce') + version('1.26.0', sha256='2fcb7f1ab160d6fd3aaade64520be3e5446fc4c6fa7ba6581afdc4e26094bd81') + version('1.25.0', sha256='ffbe61269160ea745e487f79b0fd06b6edd3d50c6d9123f053b5634737cf2f69') version('1.24.3', sha256='c84b3fa140fcd6cce79b3f9de6357c5733a0071e04ca4e65ba5f8d306f10f033') version('1.23.1', sha256='dd7da002b15641e4841f20a1f3eb1e359edb69d5ccf8ac64c362823b05f523d9') From 759f6b6d13c909d46f0f6fa56b0e87cb498fcd20 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Thu, 6 Feb 2020 14:58:51 +0100 Subject: [PATCH 105/178] Added optimized version of Blis for AMD (#14780) The Blis package has been refactored to be able to reuse the build logic for the fork of the project optimized for AMD. --- .../repos/builtin/packages/amdblis/package.py | 22 ++++++ .../repos/builtin/packages/blis/package.py | 68 ++++++++++--------- 2 files changed, 59 insertions(+), 31 deletions(-) create mode 100644 var/spack/repos/builtin/packages/amdblis/package.py diff --git a/var/spack/repos/builtin/packages/amdblis/package.py b/var/spack/repos/builtin/packages/amdblis/package.py new file mode 100644 index 00000000000..5b216bebd7d --- /dev/null +++ b/var/spack/repos/builtin/packages/amdblis/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) +from spack.pkg.builtin.blis import BlisBase + + +class Amdblis(BlisBase): + """AMD Optimized BLIS. + + BLIS is a portable software framework for instantiating high-performance + BLAS-like dense linear algebra libraries. The framework was designed to + isolate essential kernels of computation that, when optimized, immediately + enable optimized implementations of most of its commonly used and + computationally intensive operations. + """ + + homepage = "https://developer.amd.com/amd-aocl/blas-library/" + url = "https://github.com/amd/blis/archive/2.1.tar.gz" + git = "https://github.com/amd/blis.git" + + version('2.1', sha256='3b1d611d46f0f13b3c0917e27012e0f789b23dbefdddcf877b20327552d72fb3') diff --git a/var/spack/repos/builtin/packages/blis/package.py b/var/spack/repos/builtin/packages/blis/package.py index 6d9d53a2a45..ea7ca8ff428 100644 --- a/var/spack/repos/builtin/packages/blis/package.py +++ b/var/spack/repos/builtin/packages/blis/package.py @@ -3,39 +3,16 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * - # Although this looks like an Autotools package, it's not one. Refer to: # https://github.com/flame/blis/issues/17 # https://github.com/flame/blis/issues/195 # https://github.com/flame/blis/issues/197 -class Blis(Package): - """BLIS is a portable software framework for instantiating high-performance - BLAS-like dense linear algebra libraries. The framework was designed to - isolate essential kernels of computation that, when optimized, immediately - enable optimized implementations of most of its commonly used and - computationally intensive operations. BLIS is written in ISO C99 and - available under a new/modified/3-clause BSD license. While BLIS exports a - new BLAS-like API, it also includes a BLAS compatibility layer which gives - application developers access to BLIS implementations via traditional BLAS - routine calls. An object-based API unique to BLIS is also available.""" - - homepage = "https://github.com/flame/blis" - url = "https://github.com/flame/blis/archive/0.4.0.tar.gz" - git = "https://github.com/flame/blis.git" - - version('master', branch='master') - version('0.6.1', sha256='76b22f29b7789cf117c0873d2a6b2a6d61f903869168148f2e7306353c105c37') - version('0.6.0', sha256='ad5765cc3f492d0c663f494850dafc4d72f901c332eb442f404814ff2995e5a9') - version('0.5.0', sha256='1a004d69c139e8a0448c6a6007863af3a8c3551b8d9b8b73fe08e8009f165fa8') - version('0.4.0', sha256='9c7efd75365a833614c01b5adfba93210f869d92e7649e0b5d9edc93fc20ea76') - version('0.3.2', sha256='b87e42c73a06107d647a890cbf12855925777dc7124b0c7698b90c5effa7f58f') - version('0.3.1', sha256='957f28d47c5cf71ffc62ce8cc1277e17e44d305b1c2fa8506b0b55617a9f28e4') - version('0.3.0', sha256='d34d17df7bdc2be8771fe0b7f867109fd10437ac91e2a29000a4a23164c7f0da') - version('0.2.2', sha256='4a7ecb56034fb20e9d1d8b16e2ef587abbc3d30cb728e70629ca7e795a7998e8') - +class BlisBase(Package): + """Base class for building BLIS, shared with the AMD optimized version + of the library in the 'amdblis' package. + """ depends_on('python@2.7:2.8,3.4:', type=('build', 'run')) variant( @@ -74,10 +51,6 @@ class Blis(Package): provides('blas', when="+blas") provides('blas', when="+cblas") - # Problems with permissions on installed libraries: - # https://github.com/flame/blis/issues/343 - patch('Makefile_0.6.0.patch', when='@0.4.0:0.6.0') - phases = ['configure', 'build', 'install'] def configure(self, spec, prefix): @@ -128,3 +101,36 @@ def darwin_fix(self): # The shared library is not installed correctly on Darwin; fix this if self.spec.satisfies('platform=darwin'): fix_darwin_install_name(self.prefix.lib) + + +class Blis(BlisBase): + """BLIS is a portable software framework for instantiating high-performance + BLAS-like dense linear algebra libraries. + + The framework was designed to isolate essential kernels of computation + that, when optimized, immediately enable optimized implementations of + most of its commonly used and computationally intensive operations. BLIS + is written in ISO C99 and available under a new/modified/3-clause BSD + license. While BLIS exports a new BLAS-like API, it also includes a + BLAS compatibility layer which gives application developers access to + BLIS implementations via traditional BLAS routine calls. + An object-based API unique to BLIS is also available. + """ + + homepage = "https://github.com/flame/blis" + url = "https://github.com/flame/blis/archive/0.4.0.tar.gz" + git = "https://github.com/flame/blis.git" + + version('master', branch='master') + version('0.6.1', sha256='76b22f29b7789cf117c0873d2a6b2a6d61f903869168148f2e7306353c105c37') + version('0.6.0', sha256='ad5765cc3f492d0c663f494850dafc4d72f901c332eb442f404814ff2995e5a9') + version('0.5.0', sha256='1a004d69c139e8a0448c6a6007863af3a8c3551b8d9b8b73fe08e8009f165fa8') + version('0.4.0', sha256='9c7efd75365a833614c01b5adfba93210f869d92e7649e0b5d9edc93fc20ea76') + version('0.3.2', sha256='b87e42c73a06107d647a890cbf12855925777dc7124b0c7698b90c5effa7f58f') + version('0.3.1', sha256='957f28d47c5cf71ffc62ce8cc1277e17e44d305b1c2fa8506b0b55617a9f28e4') + version('0.3.0', sha256='d34d17df7bdc2be8771fe0b7f867109fd10437ac91e2a29000a4a23164c7f0da') + version('0.2.2', sha256='4a7ecb56034fb20e9d1d8b16e2ef587abbc3d30cb728e70629ca7e795a7998e8') + + # Problems with permissions on installed libraries: + # https://github.com/flame/blis/issues/343 + patch('Makefile_0.6.0.patch', when='@0.4.0:0.6.0') From f7e2e84647218935e5a20cac7992787d68322024 Mon Sep 17 00:00:00 2001 From: Benoist LAURENT Date: Thu, 6 Feb 2020 16:18:16 +0100 Subject: [PATCH 106/178] Update package.py (#14784) Fix download link --- var/spack/repos/builtin/packages/subread/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/subread/package.py b/var/spack/repos/builtin/packages/subread/package.py index 1a588458d1a..32d8e9abf12 100644 --- a/var/spack/repos/builtin/packages/subread/package.py +++ b/var/spack/repos/builtin/packages/subread/package.py @@ -12,7 +12,7 @@ class Subread(MakefilePackage): sequencing data.""" homepage = "http://subread.sourceforge.net/" - url = "https://downloads.sourceforge.net/project/subread/subread-1.5.2/subread-1.5.2-source.tar.gz" + url = "https://iweb.dl.sourceforge.net/project/subread/subread-1.5.2/subread-1.5.2-source.tar.gz" version('2.0.0', sha256='bd7b45f7d8872b0f5db5d23a385059f21d18b49e432bcb6e3e4a879fe51b41a8') version('1.6.4', sha256='b7bd0ee3b0942d791aecce6454d2f3271c95a010beeeff2daf1ff71162e43969') From 6b1958219867f6fd140557caf753280e6d97a3a5 Mon Sep 17 00:00:00 2001 From: Robert Mijakovic Date: Thu, 6 Feb 2020 16:20:01 +0100 Subject: [PATCH 107/178] =?UTF-8?q?update=20version:=20intel=20packages=20?= =?UTF-8?q?daal,=20ipp,=20mkl-dnn,=20mkl,=20mpi,=20parallel=E2=80=A6=20(#1?= =?UTF-8?q?4783)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * update version: intel packages daal, ipp, mkl-dnn, mkl, mpi, parallel-studio, pin, tbb and makes url parameter consistent and always use single quote. * Fixes a typo with one of the sha256 checksum.. --- .../builtin/packages/intel-daal/package.py | 37 ++++++++++-------- .../builtin/packages/intel-ipp/package.py | 16 +++++--- .../builtin/packages/intel-mkl-dnn/package.py | 20 ++++++++++ .../builtin/packages/intel-mkl/package.py | 38 ++++++++++--------- .../builtin/packages/intel-mpi/package.py | 8 +++- .../packages/intel-parallel-studio/package.py | 6 +++ .../builtin/packages/intel-pin/package.py | 3 +- .../builtin/packages/intel-tbb/package.py | 5 ++- 8 files changed, 91 insertions(+), 42 deletions(-) diff --git a/var/spack/repos/builtin/packages/intel-daal/package.py b/var/spack/repos/builtin/packages/intel-daal/package.py index fd7f88102be..9d5ab0f2eff 100644 --- a/var/spack/repos/builtin/packages/intel-daal/package.py +++ b/var/spack/repos/builtin/packages/intel-daal/package.py @@ -12,35 +12,40 @@ class IntelDaal(IntelPackage): homepage = "https://software.intel.com/en-us/daal" version('2020.0.166', sha256='695166c9ab32ac5d3006d6d35162db3c98734210507144e315ed7c3b7dbca9c1', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16234/l_daal_2020.0.166.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16234/l_daal_2020.0.166.tgz') + version('2019.5.281', sha256='e92aaedbe35c9daf1c9483260cb2363da8a85fa1aa5566eb38cf4b1f410bc368', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15818/l_daal_2019.5.281.tgz') + version('2019.4.243', sha256='c74486a555ca5636c2ac1b060d5424726c022468f3ee0898bb46e333cda6f7b8', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15552/l_daal_2019.4.243.tgz') version('2019.3.199', sha256='1f7d9cdecc1091b03f1ee6303fc7566179d1e3f1813a98ef7a6239f7d456b8ef', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15277/l_daal_2019.3.199.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15277/l_daal_2019.3.199.tgz') + version('2019.2.187', sha256='2982886347e9376e892a5c4e22fa1d4b7b843e1ae988a107dd2d0a639f257765', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15097/l_daal_2019.2.187.tgz') version('2019.1.144', sha256='1672afac568c93e185283cf7e044d511381092ebc95d7204c4dccb83cc493197', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/14869/l_daal_2019.1.144.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/14869/l_daal_2019.1.144.tgz') version('2019.0.117', sha256='85ac8e983bc9b9cc635e87cb4ec775ffd3695e44275d20fdaf53c19ed280d69f', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13577/l_daal_2019.0.117.tgz") - # Doesn't appear to be a 2018.4 update yet + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13577/l_daal_2019.0.117.tgz') version('2018.3.222', sha256='378fec529a36508dd97529037e1164ff98e0e062a9a47ede99ccf9e91493d1e2', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13007/l_daal_2018.3.222.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13007/l_daal_2018.3.222.tgz') version('2018.2.199', sha256='cee30299b3ffaea515f5a9609f4df0f644579c8a1ba2b61747b390f6caf85b14', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12727/l_daal_2018.2.199.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12727/l_daal_2018.2.199.tgz') version('2018.1.163', sha256='ac96b5a6c137cda18817d9b3505975863f8f53347225ebb6ccdaaf4bdb8dc349', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12414/l_daal_2018.1.163.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12414/l_daal_2018.1.163.tgz') version('2018.0.128', sha256='d13a7cd1b6779971f2ba46797447de9409c98a4d2f0eb0dc9622d9d63ac8990f', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12072/l_daal_2018.0.128.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12072/l_daal_2018.0.128.tgz') version('2017.4.239', sha256='cc4b608f59f3b2fafee16389102a763d27c46f6d136a6cfa89847418a8ea7460', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12148/l_daal_2017.4.239.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12148/l_daal_2017.4.239.tgz') version('2017.3.196', sha256='cfa863f342dd1c5fe8f1c7b6fd69589140370fc92742a19d82c8594e4e1e46ce', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11546/l_daal_2017.3.196.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11546/l_daal_2017.3.196.tgz') version('2017.2.174', sha256='5ee838b08d4cda7fc3e006e1deeed41671cbd7cfd11b64ec3b762c94dfc2b660', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11308/l_daal_2017.2.174.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11308/l_daal_2017.2.174.tgz') version('2017.1.132', sha256='6281105d3947fc2860e67401ea0218198cc4753fd2d4b513528a89143248e4f3', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/10983/l_daal_2017.1.132.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/10983/l_daal_2017.1.132.tgz') version('2017.0.098', sha256='a7064425653b4f5f0fe51e25358d267d8ae023179eece61e08da891b67d79fe5', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9664/l_daal_2017.0.098.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9664/l_daal_2017.0.098.tgz') version('2016.3.210', sha256='367eaef21ea0143c11ae3fd56cd2a05315768c059e14caa15894bcf96853687c', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9099/l_daal_2016.3.210.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9099/l_daal_2016.3.210.tgz') version('2016.2.181', sha256='afdb65768957784d28ac537b4933a86eb4193c68a636157caed17b29ccdbfacb', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/8687/l_daal_2016.2.181.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/8687/l_daal_2016.2.181.tgz') provides('daal') diff --git a/var/spack/repos/builtin/packages/intel-ipp/package.py b/var/spack/repos/builtin/packages/intel-ipp/package.py index c29c703fd73..1727e488be9 100644 --- a/var/spack/repos/builtin/packages/intel-ipp/package.py +++ b/var/spack/repos/builtin/packages/intel-ipp/package.py @@ -13,8 +13,14 @@ class IntelIpp(IntelPackage): version('2020.0.166', sha256='6844007892ba524e828f245355cee44e8149f4c233abbbea16f7bb55a7d6ecff', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16233/l_ipp_2020.0.166.tgz') + version('2019.5.281', sha256='61d1e1da1a4a50f1cf02a3ed44e87eed05e94d58b64ef1e67a3bdec363bee713', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15817/l_ipp_2019.5.281.tgz') + version('2019.4.243', sha256='d4f4232323e66b010d8440c75189aeb6a3249966e05035242b21982238a7a7f2', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15541/l_ipp_2019.4.243.tgz') version('2019.3.199', sha256='02545383206c1ae4dd66bfa6a38e2e14480ba11932eeed632df8ab798aa15ccd', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15276/l_ipp_2019.3.199.tgz') + version('2019.2.187', sha256='280e9081278a0db3892fe82474c1201ec780a6f7c8d1f896494867f4b3bd8421', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15096/l_ipp_2019.2.187.tgz') version('2019.1.144', sha256='1eb7cd0fba74615aeafa4e314c645414497eb73f1705200c524fe78f00620db3', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/14887/l_ipp_2019.1.144.tgz') version('2019.0.117', sha256='d552ba49fba58f0e94da2048176f21c5dfd490dca7c5ce666dfc2d18db7fd551', @@ -28,15 +34,15 @@ class IntelIpp(IntelPackage): version('2018.0.128', sha256='da568ceec1b7acbcc8f666b73d4092788b037b1b03c0436974b82155056ed166', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12071/l_ipp_2018.0.128.tgz') version('2017.3.196', sha256='50d49a1000a88a8a58bd610466e90ae28d07a70993a78cbbf85d44d27c4232b6', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11545/l_ipp_2017.3.196.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11545/l_ipp_2017.3.196.tgz') version('2017.2.174', sha256='92f866c9dce8503d7e04223ec35f281cfeb0b81cf94208c3becb11aacfda7b99', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11307/l_ipp_2017.2.174.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11307/l_ipp_2017.2.174.tgz') version('2017.1.132', sha256='2908bdeab3057d4ebcaa0b8ff5b00eb47425d35961a96f14780be68554d95376', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11031/l_ipp_2017.1.132.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11031/l_ipp_2017.1.132.tgz') version('2017.0.098', sha256='7633d16e2578be64533892336c8a15c905139147b0f74eaf9f281358ad7cdcba', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9663/l_ipp_2017.0.098.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9663/l_ipp_2017.0.098.tgz') # built from parallel_studio_xe_2016.3.067 version('9.0.3.210', sha256='8ce7bf17f4a0bbf8c441063de26be7f6e0f6179789e23f24eaa8b712632b3cdd', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9067/l_ipp_9.0.3.210.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9067/l_ipp_9.0.3.210.tgz') provides('ipp') diff --git a/var/spack/repos/builtin/packages/intel-mkl-dnn/package.py b/var/spack/repos/builtin/packages/intel-mkl-dnn/package.py index 0a06327bfa6..ab3a7cde461 100644 --- a/var/spack/repos/builtin/packages/intel-mkl-dnn/package.py +++ b/var/spack/repos/builtin/packages/intel-mkl-dnn/package.py @@ -15,7 +15,27 @@ class IntelMklDnn(CMakePackage): maintainers = ['adamjstewart'] + version('1.2', sha256='30979a09753e8e35d942446c3778c9f0eba543acf2fb0282af8b9c89355d0ddf') + version('1.1.3', sha256='0e9bcbc86cc215a84a5455a395ce540c68e255eaa586e37222fff622b9b17df7') + version('1.1.2', sha256='284b20e0cab67025bb7d21317f805d6217ad77fb3a47ad84b3bacf37bde62da9') version('1.1.1', sha256='a31b08a89473bfe3bd6ed542503336d21b4177ebe4ccb9a97810808f634db6b6') + version('1.1', sha256='c5aac67e5ed4d95fe9943f835df49bbe6d608507780787c64aa620bdbd2171ba') + version('1.0.4', sha256='2a3ca90a8b690e65ddd0ccc95a09818e6da439cc854d014367645fcfd58a9690') + version('1.0.3', sha256='e0de341bd0bbebde7637e69383899ba415ce67682ff2f0f3d5a0d268e1bea69b') + version('1.0.2', sha256='3164eb2914e2160ac6ffd345781cf7554ce410830398cc6b2761e8668faf5ca8') + version('1.0.1', sha256='91fb84601c18f8a5a87eccd7b63d61f03495f36c5c533bd7f59443e4f8bb2595') + version('1.0', sha256='27fd9da9720c452852f1226581e7914efcf74e1ff898468fdcbe1813528831ba') + version('0.21.3', sha256='31e78581e59d7e60d4becaba3834fc6a5bf2dccdae3e16b7f70d89ceab38423f') + version('0.21.2', sha256='ed56652dd237deb86ee9bf102c18de5f2625c059e5ab1d7512c8dc01e316b694') + version('0.21.1', sha256='766ecfa5ac68be8cf9eacd4c712935c0ed945e5e6fe51640f05ee735cff62a38') + version('0.21', sha256='eb0aff133134898cf173d582a90e39b90ea9ea59544de7914208c2392b51a15f') + version('0.20.6', sha256='74675e93eef339ff3d9a9be95c15d0c7ad8736a5356c23428ab2e33dcdb8e3e1') + version('0.20.5', sha256='081d9f853c00fe0b597c8f00f2f3ff8d79c2a9cb95f292ff2c90557709763021') + version('0.20.4', sha256='b6422a000a6754334bdae673c25f84efd95e6d3cd016b752145b9391dc13e729') + version('0.20.3', sha256='a198a9bd3c584607e6a467f780beca92c8411cd656fcc8ec6fa5abe73d4af823') + version('0.20.2', sha256='1ae0e8a1a3df58deadc08ca0a01f8d3720600b26ca9e53685493e8e8250243b2') + version('0.20.1', sha256='26f720ed912843ba293e8a1e0822fe5318e93c529d80c87af1cf555d68e642d0') + version('0.20', sha256='52e111fefbf5a38e36f7bae7646860f7cbc985eba0725768f3fee8cdb31a9977') version('0.19', sha256='ba39da6adb263df05c4ca2a120295641fc97be75b588922e4274cb628dbe1dcd') version('0.18.1', sha256='fc7506701dfece9b03c0dc83d0cda9a44a5de17cdb54bc7e09168003f02dbb70') version('0.11', sha256='4cb4a85b05fe42aa527fd70a048caddcba9361f6d3d7bea9f33d74524e206d7d') diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py index 13831971a63..b6d8c38dcb2 100644 --- a/var/spack/repos/builtin/packages/intel-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-mkl/package.py @@ -14,41 +14,45 @@ class IntelMkl(IntelPackage): homepage = "https://software.intel.com/en-us/intel-mkl" version('2020.0.166', sha256='f6d92deb3ff10b11ba3df26b2c62bb4f0f7ae43e21905a91d553e58f0f5a8ae0', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16232/l_mkl_2020.0.166.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16232/l_mkl_2020.0.166.tgz') version('2019.5.281', sha256='9995ea4469b05360d509c9705e9309dc983c0a10edc2ae3a5384bc837326737e', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15816/l_mkl_2019.5.281.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15816/l_mkl_2019.5.281.tgz') + version('2019.4.243', sha256='fcac7b0369665d93f0c4dd98afe2816aeba5410e2b760655fe55fc477f8f33d0', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15540/l_mkl_2019.4.243.tgz') version('2019.3.199', sha256='06de2b54f4812e7c39a118536259c942029fe1d6d8918ad9df558a83c4162b8f', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15275/l_mkl_2019.3.199.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15275/l_mkl_2019.3.199.tgz') + version('2019.2.187', sha256='2bf004e6b5adb4f956993d6c20ea6ce289bb630314dd501db7f2dd5b9978ed1d', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15095/l_mkl_2019.2.187.tgz') version('2019.1.144', sha256='5205a460a9c685f7a442868367389b2d0c25e1455346bc6a37c5b8ff90a20fbb', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/14895/l_mkl_2019.1.144.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/14895/l_mkl_2019.1.144.tgz') version('2019.0.117', sha256='4e1fe2c705cfc47050064c0d6c4dee1a8c6740ac1c4f64dde9c7511c4989c7ad', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13575/l_mkl_2019.0.117.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13575/l_mkl_2019.0.117.tgz') version('2018.4.274', sha256='18eb3cde3e6a61a88f25afff25df762a560013f650aaf363f7d3d516a0d04881', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13725/l_mkl_2018.4.274.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13725/l_mkl_2018.4.274.tgz') version('2018.3.222', sha256='108d59c0927e58ce8c314db6c2b48ee331c3798f7102725f425d6884eb6ed241', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13005/l_mkl_2018.3.222.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13005/l_mkl_2018.3.222.tgz') version('2018.2.199', sha256='e28d12173bef9e615b0ded2f95f59a42b3e9ad0afa713a79f8801da2bfb31936', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12725/l_mkl_2018.2.199.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12725/l_mkl_2018.2.199.tgz') version('2018.1.163', sha256='f6dc263fc6f3c350979740a13de1b1e8745d9ba0d0f067ece503483b9189c2ca', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12414/l_mkl_2018.1.163.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12414/l_mkl_2018.1.163.tgz') version('2018.0.128', sha256='c368baa40ca88057292512534d7fad59fa24aef06da038ea0248e7cd1e280cec', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12070/l_mkl_2018.0.128.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12070/l_mkl_2018.0.128.tgz') version('2017.4.239', sha256='dcac591ed1e95bd72357fd778edba215a7eab9c6993236373231cc16c200c92a', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12147/l_mkl_2017.4.239.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/12147/l_mkl_2017.4.239.tgz') version('2017.3.196', sha256='fd7295870fa164d6138c9818304f25f2bb263c814a6c6539c9fe4e104055f1ca', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11544/l_mkl_2017.3.196.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11544/l_mkl_2017.3.196.tgz') version('2017.2.174', sha256='0b8a3fd6bc254c3c3d9d51acf047468c7f32bf0baff22aa1e064d16d9fea389f', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11306/l_mkl_2017.2.174.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11306/l_mkl_2017.2.174.tgz') version('2017.1.132', sha256='8c6bbeac99326d59ef3afdc2a95308c317067efdaae50240d2f4a61f37622e69', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11024/l_mkl_2017.1.132.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11024/l_mkl_2017.1.132.tgz') version('2017.0.098', sha256='f2233e8e011f461d9c15a853edf7ed0ae8849aa665a1ec765c1ff196fd70c4d9', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9662/l_mkl_2017.0.098.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9662/l_mkl_2017.0.098.tgz') # built from parallel_studio_xe_2016.3.x version('11.3.3.210', sha256='ff858f0951fd698e9fb30147ea25a8a810c57f0126c8457b3b0cdf625ea43372', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9068/l_mkl_11.3.3.210.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9068/l_mkl_11.3.3.210.tgz') # built from parallel_studio_xe_2016.2.062 version('11.3.2.181', sha256='bac04a07a1fe2ae4996a67d1439ee90c54f31305e8663d1ccfce043bed84fc27', - url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/8711/l_mkl_11.3.2.181.tgz") + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/8711/l_mkl_11.3.2.181.tgz') variant('shared', default=True, description='Builds shared library') variant('ilp64', default=False, description='64 bit integers') diff --git a/var/spack/repos/builtin/packages/intel-mpi/package.py b/var/spack/repos/builtin/packages/intel-mpi/package.py index dd49ec21f16..27885ed1f21 100644 --- a/var/spack/repos/builtin/packages/intel-mpi/package.py +++ b/var/spack/repos/builtin/packages/intel-mpi/package.py @@ -11,12 +11,16 @@ class IntelMpi(IntelPackage): homepage = "https://software.intel.com/en-us/intel-mpi-library" - version('2019.6.154', sha256='f5bad7bcc50ea3a09a6a0615ae8fce42ba8a652bc61e32e12ff7b54952e8f8c9', - url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16120/l_mpi_2019.6.154.tgz') + version('2019.6.166', sha256='119be69f1117c93a9e5e9b8b4643918e55d2a55a78ad9567f77d16cdaf18cd6e', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16120/l_mpi_2019.6.166.tgz') + version('2019.5.281', sha256='9c59da051f1325b221e5bc4d8b689152e85d019f143069fa39e17989306811f4', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15838/l_mpi_2019.5.281.tgz') version('2019.4.243', sha256='233a8660b92ecffd89fedd09f408da6ee140f97338c293146c9c080a154c5fcd', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15553/l_mpi_2019.4.243.tgz') version('2019.3.199', sha256='5304346c863f64de797250eeb14f51c5cfc8212ff20813b124f20e7666286990', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15260/l_mpi_2019.3.199.tgz') + version('2019.2.187', sha256='6a3305933b5ef9e3f7de969e394c91620f3fa4bb815a4f439577739d04778b20', + url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15040/l_mpi_2019.2.187.tgz') version('2019.1.144', sha256='dac86a5db6b86503313742b17535856a432955604f7103cb4549a9bfc256c3cd', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/14879/l_mpi_2019.1.144.tgz') version('2019.0.117', sha256='dfb403f49c1af61b337aa952b71289c7548c3a79c32c57865eab0ea0f0e1bc08', diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py index 29442a8c594..f1b453c257c 100644 --- a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py +++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py @@ -21,6 +21,7 @@ class IntelParallelStudio(IntelPackage): # Cluster Edition (top tier; all components included) version('cluster.2020.0', sha256='573b1d20707d68ce85b70934cfad15b5ad9cc14124a261c17ddd7717ba842c64', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16225/parallel_studio_xe_2020_cluster_edition.tgz') + # version('cluster.2019.5', sha256='c03421de616bd4e640ed25ce4103ec9c5c85768a940a5cb5bd1e97b45be33904', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15809/parallel_studio_xe_2019_update5_cluster_edition.tgz') version('cluster.2019.4', sha256='32aee12de3b5ca14caf7578313c06b205795c67620f4a9606ea45696ee3b3d9e', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15533/parallel_studio_xe_2019_update4_cluster_edition.tgz') version('cluster.2019.3', sha256='b5b022366d6d1a98dbb63b60221c62bc951c9819653ad6f5142192e89f78cf63', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15268/parallel_studio_xe_2019_update3_cluster_edition.tgz') @@ -58,8 +59,11 @@ class IntelParallelStudio(IntelPackage): # Cluster; differences manifest only in the tokens present in the license # file delivered as part of the purchase. version('professional.2020.0', sha256='e88cad18d28da50ed9cb87b12adccf13efd91bf94731dc33290481306c6f15ac', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16226/parallel_studio_xe_2020_professional_edition.tgz') + # + version('professional.2019.5', sha256='0ec638330214539361f8632e20759f385a5a78013dcc980ee93743d86d354452', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15810/parallel_studio_xe_2019_update5_professional_edition.tgz') version('professional.2019.4', sha256='9b2818ea5739ade100841e99ce79ef7f4049a2513beb2ce20fc94706f1ba0231', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15534/parallel_studio_xe_2019_update4_professional_edition.tgz') version('professional.2019.3', sha256='92a8879106d0bdf1ecf4670cd97fbcdc67d78b13bdf484f2c516a533aa7a27f9', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15269/parallel_studio_xe_2019_update3_professional_edition.tgz') + version('professional.2019.2', sha256='cdb629d74612d135ca197f1f64e6a081e31df68cda92346a29e1223bb06e64ea', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15089/parallel_studio_xe_2019_update2_professional_edition.tgz') version('professional.2019.1', sha256='bc83ef5a728903359ae11a2b90ad7dae4ae61194afb28bb5bb419f6a6aea225d', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/14825/parallel_studio_xe_2019_update1_professional_edition.tgz') version('professional.2019.0', sha256='94b9714e353e5c4f58d38cb236e2f8911cbef31c4b42a148d60c988e926411e2', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13578/parallel_studio_xe_2019_professional_edition.tgz') # @@ -89,9 +93,11 @@ class IntelParallelStudio(IntelPackage): # Composer Edition (basic tier; excluded: MPI/..., Advisor/Inspector/Vtune) version('composer.2020.0', sha256='9168045466139b8e280f50f0606b9930ffc720bbc60bc76f5576829ac15757ae', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/16229/parallel_studio_xe_2020_composer_edition.tgz') + # version('composer.2019.5', sha256='e8c8e4b9b46826a02c49325c370c79f896858611bf33ddb7fb204614838ad56c', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15813/parallel_studio_xe_2019_update5_composer_edition.tgz') version('composer.2019.4', sha256='1915993445323e1e78d6de73702a88fa3df2036109cde03d74ee38fef9f1abf2', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15537/parallel_studio_xe_2019_update4_composer_edition.tgz') version('composer.2019.3', sha256='15373ac6df2a84e6dd9fa0eac8b5f07ab00cdbb67f494161fd0d4df7a71aff8e', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15272/parallel_studio_xe_2019_update3_composer_edition.tgz') + version('composer.2019.2', sha256='1e0f400be1f458592a8c2e7d55c1b2a4506f68f22bacbf1175af947809a4cd87', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/15092/parallel_studio_xe_2019_update2_composer_edition.tgz') version('composer.2019.1', sha256='db000cb2ebf411f6e91719db68a0c68b8d3f7d38ad7f2049ea5b2f1b5f006c25', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/14832/parallel_studio_xe_2019_update1_composer_edition.tgz') version('composer.2019.0', sha256='e1a29463038b063e01f694e2817c0fcf1a8e824e24f15a26ce85f20afa3f963a', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/13581/parallel_studio_xe_2019_composer_edition.tgz') # diff --git a/var/spack/repos/builtin/packages/intel-pin/package.py b/var/spack/repos/builtin/packages/intel-pin/package.py index 3e74caa03da..ae0473734c3 100644 --- a/var/spack/repos/builtin/packages/intel-pin/package.py +++ b/var/spack/repos/builtin/packages/intel-pin/package.py @@ -16,7 +16,8 @@ class IntelPin(Package): maintainers = ['matthiasdiener'] version('3.11', sha256='aa5abca475a6e106a75e6ed4ba518fb75a57549a59f00681e6bd6e3f221bd23a', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.11-97998-g7ecce2dac-gcc-linux.tar.gz') - version('3.7', sha256='4730328795be61f1addb0e505a3792a4b4ca80b1b9405acf217beec6b5b90fb8', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.7-97619-g0d0c92f4f-gcc-linux.tar.gz') + version('3.10', sha256='7c8f14c3a0654bab662b58aba460403138fa44517bd40052501e8e0075b2702a', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.10-97971-gc5e41af74-gcc-linux.tar.gz') + version('3.7', sha256='4730328795be61f1addb0e505a3792a4b4ca80b1b9405acf217beec6b5b90fb8', url='https://software.intel.com/sites/landingpage/pintool/downloads/pin-3.7-97619-g0d0c92f4f-gcc-linux.tar.gz') def install(self, spec, prefix): install_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py index 2cbda4f5a42..0ae85edabb9 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-tbb/package.py @@ -22,7 +22,10 @@ class IntelTbb(Package): # patches and filters below as needed. # See url_for_version() below. - version('2020', sha256='db80f4f7abb95c2d08fe64abdc0a9250903e4c725f1c667ac517450de426023a') + + version('2020.1', sha256='48d51c63b16787af54e1ee4aaf30042087f20564b4eecf9a032d5568bc2f0bf8') + version('2020.0', sha256='8eed2377ac62e6ac10af5a8303ce861e4525ffe491a061b48e8fe094fc741ce9') + version('2019.9', sha256='15652f5328cf00c576f065e5cd3eaf3317422fe82afb67a9bcec0dc065bd2abe') version('2019.8', sha256='7b1fd8caea14be72ae4175896510bf99c809cd7031306a1917565e6de7382fba') version('2019.7', sha256='4204a93f4c0fd989fb6f79acae74feb02ee39725c93968773d9b6efeb75c7a6a') version('2019.6', sha256='2ba197b3964fce8a84429dd15b75eba7434cb89afc54f86d5ee6f726fdbe97fd') From b43f658c397f2cb1ebd7ed3bca1c9fb1d90a04b4 Mon Sep 17 00:00:00 2001 From: Matt Belhorn Date: Thu, 6 Feb 2020 09:42:05 -0600 Subject: [PATCH 108/178] Adds fma and vsx features to entire power arch family. (#14759) VSX alitvec extensions are supported by PowerISA from v2.06 (Power7+), but might not be listed in features. FMA has been supported by PowerISA since Power1, but might not be listed in features. This commit adds these features to all the power ISA family sets. --- lib/spack/llnl/util/cpu/microarchitectures.json | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/lib/spack/llnl/util/cpu/microarchitectures.json b/lib/spack/llnl/util/cpu/microarchitectures.json index 56f1f19df8c..51411d4a24c 100644 --- a/lib/spack/llnl/util/cpu/microarchitectures.json +++ b/lib/spack/llnl/util/cpu/microarchitectures.json @@ -1298,6 +1298,20 @@ "ppc64" ] }, + "vsx": { + "reason": "VSX alitvec extensions are supported by PowerISA from v2.06 (Power7+), but might not be listed in features", + "families": [ + "ppc64le", + "ppc64" + ] + }, + "fma": { + "reason": "FMA has been supported by PowerISA since Power1, but might not be listed in features", + "families": [ + "ppc64le", + "ppc64" + ] + }, "sse4.1": { "reason": "permits to refer to sse4_1 also as sse4.1", "any_of": [ From 458c9a22bf16e7e3f9d4475074bcf5ac64c6907e Mon Sep 17 00:00:00 2001 From: Figroc Chen Date: Fri, 7 Feb 2020 02:20:55 +0800 Subject: [PATCH 109/178] tensorflow-serving-client: add new version 2.1.0 (#14786) --- .../builtin/packages/tensorflow-serving-client/package.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/tensorflow-serving-client/package.py b/var/spack/repos/builtin/packages/tensorflow-serving-client/package.py index 732733e593e..6b2b74649f8 100644 --- a/var/spack/repos/builtin/packages/tensorflow-serving-client/package.py +++ b/var/spack/repos/builtin/packages/tensorflow-serving-client/package.py @@ -11,8 +11,9 @@ class TensorflowServingClient(CMakePackage): proto files""" homepage = "https://github.com/figroc/tensorflow-serving-client" - url = "https://github.com/figroc/tensorflow-serving-client/archive/v2.0.0.tar.gz" + url = "https://github.com/figroc/tensorflow-serving-client/archive/v2.1.0.tar.gz" + version('2.1.0', sha256='7a31d8cfa1d861f73953d4728665dd6d74e205d1fa01062a6c2b1aeee4674f73') version('2.0.0', sha256='55310ad484f257173ad5194df7f7116b2049260c3d29049ef8d789d1d8bd9948') depends_on('protobuf') From 22c9f5cbd8b807c6ff101a2af30c5fc4cab2d13b Mon Sep 17 00:00:00 2001 From: Oliver Breitwieser Date: Fri, 7 Feb 2020 03:59:16 +0100 Subject: [PATCH 110/178] Allow installing unsigned binary packages (#11107) This commit introduces a `--no-check-signature` option for `spack install` so that unsigned packages can be installed. It is off by default (signatures required). --- lib/spack/spack/cmd/install.py | 7 ++++++- lib/spack/spack/package.py | 7 ++++--- share/spack/spack-completion.bash | 2 +- 3 files changed, 11 insertions(+), 5 deletions(-) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 8f1eab0eb32..9e136439205 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -42,7 +42,8 @@ def update_kwargs_from_args(args, kwargs): 'use_cache': args.use_cache, 'cache_only': args.cache_only, 'explicit': True, # Always true for install command - 'stop_at': args.until + 'stop_at': args.until, + 'unsigned': args.unsigned, }) kwargs.update({ @@ -98,6 +99,10 @@ def setup_parser(subparser): '--cache-only', action='store_true', dest='cache_only', default=False, help="only install package from binary mirrors") + subparser.add_argument( + '--no-check-signature', action='store_true', + dest='unsigned', default=False, + help="do not check signatures of binary packages") subparser.add_argument( '--show-log-on-error', action='store_true', help="print full build log to stderr if build fails") diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index d146be9af95..d67c017a704 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -1508,7 +1508,7 @@ def _update_explicit_entry_in_db(self, rec, explicit): message = '{s.name}@{s.version} : marking the package explicit' tty.msg(message.format(s=self)) - def try_install_from_binary_cache(self, explicit): + def try_install_from_binary_cache(self, explicit, unsigned=False): tty.msg('Searching for binary cache of %s' % self.name) specs = binary_distribution.get_spec(spec=self.spec, force=False) @@ -1525,7 +1525,7 @@ def try_install_from_binary_cache(self, explicit): tty.msg('Installing %s from binary cache' % self.name) binary_distribution.extract_tarball( binary_spec, tarball, allow_root=False, - unsigned=False, force=False) + unsigned=unsigned, force=False) self.installed_from_binary_cache = True spack.store.db.add( self.spec, spack.store.layout, explicit=explicit) @@ -1666,7 +1666,8 @@ def do_install(self, **kwargs): tty.msg(colorize('@*{Installing} @*g{%s}' % self.name)) if kwargs.get('use_cache', True): - if self.try_install_from_binary_cache(explicit): + if self.try_install_from_binary_cache( + explicit, unsigned=kwargs.get('unsigned', False)): tty.msg('Successfully installed %s from binary cache' % self.name) print_pkg(self.prefix) diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 8a22e342e20..e6b75294525 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -945,7 +945,7 @@ _spack_info() { _spack_install() { if $list_options then - SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all" + SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --no-check-signature --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete -f --file --clean --dirty --test --run-tests --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all" else _all_packages fi From 7d9a0fa18083729315728646d4a3a09bd9d99bf0 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:13:22 +0800 Subject: [PATCH 111/178] hunspell : fix 1.7.0 sha256sum (#14832) --- var/spack/repos/builtin/packages/hunspell/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/hunspell/package.py b/var/spack/repos/builtin/packages/hunspell/package.py index c24ee10607d..8eedf43539e 100644 --- a/var/spack/repos/builtin/packages/hunspell/package.py +++ b/var/spack/repos/builtin/packages/hunspell/package.py @@ -12,7 +12,7 @@ class Hunspell(AutotoolsPackage): homepage = "http://hunspell.github.io/" url = "https://github.com/hunspell/hunspell/archive/v1.6.0.tar.gz" - version('1.7.0', sha256='57be4e03ae9dd62c3471f667a0d81a14513e314d4d92081292b90435944ff951') + version('1.7.0', sha256='bb27b86eb910a8285407cf3ca33b62643a02798cf2eef468c0a74f6c3ee6bc8a') version('1.6.0', sha256='512e7d2ee69dad0b35ca011076405e56e0f10963a02d4859dbcc4faf53ca68e2') depends_on('autoconf', type='build') From f9889526f2e6e4b9873b06309f35402866f94670 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:14:16 +0800 Subject: [PATCH 112/178] thrift: added v0.12.0 and v0.13.0 (#14831) --- var/spack/repos/builtin/packages/thrift/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/thrift/package.py b/var/spack/repos/builtin/packages/thrift/package.py index 0f8ccc00cbf..92f77e03312 100644 --- a/var/spack/repos/builtin/packages/thrift/package.py +++ b/var/spack/repos/builtin/packages/thrift/package.py @@ -19,6 +19,8 @@ class Thrift(Package): homepage = "http://thrift.apache.org" url = "http://apache.mirrors.ionfish.org/thrift/0.11.0/thrift-0.11.0.tar.gz" + version('0.13.0', sha256='7ad348b88033af46ce49148097afe354d513c1fca7c607b59c33ebb6064b5179') + version('0.12.0', sha256='c336099532b765a6815173f62df0ed897528a9d551837d627c1f87fadad90428') version('0.11.0', sha256='c4ad38b6cb4a3498310d405a91fef37b9a8e79a50cd0968148ee2524d2fa60c2') version('0.10.0', sha256='2289d02de6e8db04cbbabb921aeb62bfe3098c4c83f36eec6c31194301efa10b') version('0.9.3', sha256='b0740a070ac09adde04d43e852ce4c320564a292f26521c46b78e0641564969e') From 710fabd68af1b9e451e385a6589bbd56af2b19f3 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:18:29 +0800 Subject: [PATCH 113/178] wireshark: added new versions up to v3.2.1 (#14828) --- var/spack/repos/builtin/packages/wireshark/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/wireshark/package.py b/var/spack/repos/builtin/packages/wireshark/package.py index 62ac8a0e91a..8939a2adc96 100644 --- a/var/spack/repos/builtin/packages/wireshark/package.py +++ b/var/spack/repos/builtin/packages/wireshark/package.py @@ -13,6 +13,10 @@ class Wireshark(CMakePackage): homepage = "https://www.wireshark.org" url = "https://www.wireshark.org/download/src/all-versions/wireshark-2.6.0.tar.xz" + version('3.2.1', sha256='589f640058d6408ebbd695a80ebbd6e7bd99d8db64ecda253d27100dfd27e85b') + version('3.2.0', sha256='4cfd33a19a454ff4002243e9d04d6afd64280a109a21ae652a192f2be2b1b66c') + version('3.1.0', sha256='a7b54f9e35fc69291dcac5104ecbef8260534a75dec5b8105605b6c423fd3de3') + version('3.0.8', sha256='b4bd8189934d82330a053c5b10398f2b625b1e1c8818831ab61739b2d7aa7561') version('2.6.0', sha256='711c7f01d27a8817d58277a5487cef3e3c7bab1c8caaf8f4c92aa21015b9117f') variant('libssh', default=False, description='Build with libssh') From 7de3ea4e19203113def69feced1e86264bae4ee5 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:19:09 +0800 Subject: [PATCH 114/178] squashfs: added v4.4 (#14825) --- var/spack/repos/builtin/packages/squashfs/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/squashfs/package.py b/var/spack/repos/builtin/packages/squashfs/package.py index 2d1043e37b4..62cd3c690a8 100644 --- a/var/spack/repos/builtin/packages/squashfs/package.py +++ b/var/spack/repos/builtin/packages/squashfs/package.py @@ -13,6 +13,7 @@ class Squashfs(MakefilePackage): url = 'https://downloads.sourceforge.net/project/squashfs/squashfs/squashfs4.3/squashfs4.3.tar.gz' # version sha1 + version('4.4', sha256='a981b3f3f2054b5a2e658851a3c06a2460ad04a9a8a645e0afe063a63fdbb07e') version('4.3', sha256='0d605512437b1eb800b4736791559295ee5f60177e102e4d4ccd0ee241a5f3f6') version('4.2', sha256='d9e0195aa922dbb665ed322b9aaa96e04a476ee650f39bbeadb0d00b24022e96') version('4.1', sha256='3a870d065a25b3f5467bc6d9ed34340befab51a3f9e4b7e3792ea0ff4e06046a') From 4d3bd1116c2ed4a7564f7c87eca2a138f3eb5e3a Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:21:15 +0800 Subject: [PATCH 115/178] scala: added v2.12.10 and v2.13.1 (#14823) --- var/spack/repos/builtin/packages/scala/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/scala/package.py b/var/spack/repos/builtin/packages/scala/package.py index ba46d656e5d..a5226af1cf1 100644 --- a/var/spack/repos/builtin/packages/scala/package.py +++ b/var/spack/repos/builtin/packages/scala/package.py @@ -17,6 +17,8 @@ class Scala(Package): homepage = "https://www.scala-lang.org/" url = "https://downloads.lightbend.com/scala/2.12.1/scala-2.12.1.tgz" + version('2.13.1', sha256='6918ccc494e34810a7254ad2c4e6f0e1183784c22e7b4801b7dbc8d1994a04db') + version('2.12.10', sha256='3b12bda3300fedd91f64fc7f9165fd45c58328b1b760af24ca6ffe92e3b0656a') version('2.12.5', sha256='b261ffe9a495b12e9dda2ed37331e579547e4d1b8b5810161b6c3b39ac806aa1') version('2.12.1', sha256='4db068884532a3e27010df17befaca0f06ea50f69433d58e06a5e63c7a3cc359') version('2.11.11', sha256='12037ca64c68468e717e950f47fc77d5ceae5e74e3bdca56f6d02fd5bfd6900b') From 291c11070067dbd00430a72aace9e9a157e43e85 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:23:00 +0800 Subject: [PATCH 116/178] rclone: added new versions up to v1.51.0 (#14822) --- var/spack/repos/builtin/packages/rclone/package.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/var/spack/repos/builtin/packages/rclone/package.py b/var/spack/repos/builtin/packages/rclone/package.py index 26d4d18d3ae..8e043319f82 100644 --- a/var/spack/repos/builtin/packages/rclone/package.py +++ b/var/spack/repos/builtin/packages/rclone/package.py @@ -13,6 +13,12 @@ class Rclone(Package): homepage = "http://rclone.org" url = "https://github.com/ncw/rclone/releases/download/v1.43/rclone-v1.43.tar.gz" + version('1.51.0', sha256='3eb5b7ffce17e56fadb29bf854666723a14c93fedc02046c7f34c792dbd227ee') + version('1.50.2', sha256='6dd8998a72514d3820d241ae46dc609c0305b742aee3db6aaf6017b46c996091') + version('1.50.1', sha256='48d6c80883427469682b4d97099d7631cf3b67aa85e652c254423bd1422ce216') + version('1.50.0', sha256='f901fd1752aae6116d94fd08d010a70d94535257c2d23caa505e631cce1e802a') + version('1.49.5', sha256='abd2c83d71c63a4b0a30b1980b942868e707d05e14ae76ad39abf5cc5a5fde63') + version('1.49.4', sha256='070afc85e4e9921151d7cb67247db8f0ff2f06fcf2652c43a42fa6e1e35847af') version('1.43', sha256='d30527b00cecb4e5e7188dddb78e5cec62d67cf2422dab82190db58512b5a4e3') depends_on("go", type='build') From e4cac224626de01fd6623b7cb29a69965741fefb Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:26:15 +0800 Subject: [PATCH 117/178] qt: added v5.14.0 and v5.14.1 (#14821) --- var/spack/repos/builtin/packages/qt/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index 9115f41a783..1bc0f1df938 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -21,6 +21,8 @@ class Qt(Package): phases = ['configure', 'build', 'install'] + version('5.14.1', sha256='6f17f488f512b39c2feb57d83a5e0a13dcef32999bea2e2a8f832f54a29badb8') + version('5.14.0', sha256='be9a77cd4e1f9d70b58621d0753be19ea498e6b0da0398753e5038426f76a8ba') version('5.13.1', sha256='adf00266dc38352a166a9739f1a24a1e36f1be9c04bf72e16e142a256436974e') version('5.12.5', sha256='a2299e21db7767caf98242767bffb18a2a88a42fee2d6a393bedd234f8c91298') version('5.12.2', sha256='59b8cb4e728450b21224dcaaa40eb25bafc5196b6988f2225c394c6b7f881ff5') From ef9cb9737695f73798eee5b8b5b2adabb7c856ed Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:30:12 +0800 Subject: [PATCH 118/178] plplot: added v5.14.0 and v5.15.0 (#14817) --- var/spack/repos/builtin/packages/plplot/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/plplot/package.py b/var/spack/repos/builtin/packages/plplot/package.py index 74385bddfb6..d0806069651 100644 --- a/var/spack/repos/builtin/packages/plplot/package.py +++ b/var/spack/repos/builtin/packages/plplot/package.py @@ -12,6 +12,8 @@ class Plplot(CMakePackage): homepage = "http://plplot.sourceforge.net/" url = "https://sourceforge.net/projects/plplot/files/plplot/5.13.0%20Source/plplot-5.13.0.tar.gz/download" + version('5.15.0', sha256='b92de4d8f626a9b20c84fc94f4f6a9976edd76e33fb1eae44f6804bdcc628c7b') + version('5.14.0', sha256='331009037c9cad9fcefacd7dbe9c7cfae25e766f5590f9efd739a294c649df97') version('5.13.0', sha256='ec36bbee8b03d9d1c98f8fd88f7dc3415560e559b53eb1aa991c2dcf61b25d2b') version('5.12.0', sha256='8dc5da5ef80e4e19993d4c3ef2a84a24cc0e44a5dade83201fca7160a6d352ce') version('5.11.0', sha256='bfa8434e6e1e7139a5651203ec1256c8581e2fac3122f907f7d8d25ed3bd5f7e') From 8c1aee1b78ba523ba3e3acd8f82454cb66f5142b Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:30:45 +0800 Subject: [PATCH 119/178] skopeo: added v0.1.40 (#14824) --- var/spack/repos/builtin/packages/skopeo/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/skopeo/package.py b/var/spack/repos/builtin/packages/skopeo/package.py index c526edd3213..4f0372639ce 100644 --- a/var/spack/repos/builtin/packages/skopeo/package.py +++ b/var/spack/repos/builtin/packages/skopeo/package.py @@ -14,6 +14,7 @@ class Skopeo(MakefilePackage): homepage = "https://github.com/containers/skopeo" url = "https://github.com/containers/skopeo/archive/v0.1.39.tar.gz" + version('0.1.40', sha256='ee1e33245938fcb622f5864fac860e2d8bfa2fa907af4b5ffc3704ed0db46bbf') version('0.1.39', sha256='e9d70f7f7b891675a816f06a22df0490285ad20eefbd91f5da69ca12f56c29f2') version('0.1.38', sha256='104ceb9c582dc5c3a49dd1752c4c326bba03f2f801596f089372e831f48ed705') version('0.1.37', sha256='49c0c1b2c2f32422d3230f827ae405fc554fb34af41a54e59b2121ac1500505d') From 8c1845581d7d9de641fd3e1acdb7b03796974395 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:41:30 +0800 Subject: [PATCH 120/178] qwt: added v6.1.3 (#14820) --- var/spack/repos/builtin/packages/qwt/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/qwt/package.py b/var/spack/repos/builtin/packages/qwt/package.py index bba2a26e007..be7aba63728 100644 --- a/var/spack/repos/builtin/packages/qwt/package.py +++ b/var/spack/repos/builtin/packages/qwt/package.py @@ -16,6 +16,7 @@ class Qwt(QMakePackage): homepage = "http://qwt.sourceforge.net/" url = "https://sourceforge.net/projects/qwt/files/qwt/6.1.3/qwt-6.1.3.tar.bz2" + version('6.1.4', sha256='1529215329e51fc562e0009505a838f427919a18b362afff441f035b2d9b5bd9') version('6.1.3', sha256='f3ecd34e72a9a2b08422fb6c8e909ca76f4ce5fa77acad7a2883b701f4309733') version('5.2.2', sha256='36bf2ee51ca9c74fde1322510ffd39baac0db60d5d410bb157968a78d9c1464b') From fecb26763d9c7f3cf5ffdb24dbc655796fb696d9 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:43:43 +0800 Subject: [PATCH 121/178] pangomm: added versions up to v2.41.3 (#14816) --- .../repos/builtin/packages/pangomm/package.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/var/spack/repos/builtin/packages/pangomm/package.py b/var/spack/repos/builtin/packages/pangomm/package.py index be3fb690388..e77aaf711bc 100644 --- a/var/spack/repos/builtin/packages/pangomm/package.py +++ b/var/spack/repos/builtin/packages/pangomm/package.py @@ -12,6 +12,21 @@ class Pangomm(AutotoolsPackage): homepage = "http://www.pango.org/" url = "https://ftp.gnome.org/pub/GNOME/sources/pangomm/2.14/pangomm-2.14.1.tar.gz" + version('2.43.1', sha256='00483967b4ed0869da09dc0617de45625b9ab846c7b07aa25dfc940a4fc540a4') + version('2.42.0', sha256='ca6da067ff93a6445780c0b4b226eb84f484ab104b8391fb744a45cbc7edbf56') + version('2.41.5', sha256='5131830d5b37b181ca4fa8f641ad86faa985c0bb7dcc833c98672d294367b304') + version('2.40.2', sha256='0a97aa72513db9088ca3034af923484108746dba146e98ed76842cf858322d05') + version('2.39.1', sha256='10c06bbf12a03963ffe9c697887b57c72f1dac1671d09dba45cecd25db5dc6ed') + version('2.38.1', sha256='effb18505b36d81fc32989a39ead8b7858940d0533107336a30bc3eef096bc8b') + version('2.37.2', sha256='bb83d769f4d4256e0b108e84a4f0441065da8483c7cc51518b0634668ed094f5') + version('2.36.0', sha256='a8d96952c708d7726bed260d693cece554f8f00e48b97cccfbf4f5690b6821f0') + version('2.35.1', sha256='3eb4d11014d09627b2b7c532c65b54fa182905b4c9688901ae11cdfb506dbc55') + version('2.34.0', sha256='0e82bbff62f626692a00f3772d8b17169a1842b8cc54d5f2ddb1fec2cede9e41') + version('2.28.4', sha256='778dcb66a793cbfd52a9f92ee5a71b888c3603a913fc1ddc48a30204de6d6c82') + version('2.27.1', sha256='0d707b4a9e632223f7f27215f83fff679166cc89b9b7f209e7fe049af7b4562e') + version('2.26.3', sha256='4f68e4d2d4b6c4ae82327ebd9e69f2cbc4379e502d12856c36943399b87d71a2') + version('2.25.1', sha256='25684058138050a35ebb4f4e13899aea12045dfb00cc351dfe78f01cb1a1f21c') + version('2.24.0', sha256='24c7b8782b8986fa8f6224ac1e5f1a02412b7d8bc21b53d14d6df9c7d9b59a3f') version('2.14.1', sha256='2ea6cee273cca1aae2ee5a5dac0c416b4dc354e46debb51f20c6eeba828f5ed5') version('2.14.0', sha256='baa3b231c9498fb1140254e3feb4eb93c638f07e6e26ae0e36c3699ec14d80fd') From 1203134253feeb7eca5a44a4a98021dca88bd0b8 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:46:56 +0800 Subject: [PATCH 122/178] mysql: added v8.0.17, v8.0.18 and v8.0.19 (#14813) --- var/spack/repos/builtin/packages/mysql/package.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/var/spack/repos/builtin/packages/mysql/package.py b/var/spack/repos/builtin/packages/mysql/package.py index 39b509f4a18..7501d57cbd0 100644 --- a/var/spack/repos/builtin/packages/mysql/package.py +++ b/var/spack/repos/builtin/packages/mysql/package.py @@ -13,6 +13,9 @@ class Mysql(CMakePackage): homepage = "https://www.mysql.com/" url = "https://dev.mysql.com/get/Downloads/MySQL-8.0/mysql-8.0.15.tar.gz" + version('8.0.19', sha256='a62786d67b5e267eef928003967b4ccfe362d604b80f4523578e0688f5b9f834') + version('8.0.18', sha256='4cb39a315298eb243c25c53c184b3682b49c2a907a1d8432ba0620534806ade8') + version('8.0.17', sha256='c6e3f38199a77bfd8a4925ca00b252d3b6159b90e4980c7232f1c58d6ca759d6') version('8.0.16', sha256='8d9fe89920dc8bbbde2857b7b877ad2fa5ec2f231c68e941d484f3b72735eaea') version('8.0.15', sha256='bb1bca2dc2f23ee9dd395cc4db93b64561d4ac20b53be5d1dae563f7be64825e') version('8.0.14', sha256='bc53f4c914fb39650289700d144529121d71f38399d2d24a0f5c76e5a8abd204') From 5bfb0eb44750fa0f4cae4d324bad04ea224f0446 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 20:47:38 +0800 Subject: [PATCH 123/178] maven: added versions up to v3.6.3 (#14811) --- var/spack/repos/builtin/packages/maven/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/maven/package.py b/var/spack/repos/builtin/packages/maven/package.py index 381ac4f93ed..832104fd783 100644 --- a/var/spack/repos/builtin/packages/maven/package.py +++ b/var/spack/repos/builtin/packages/maven/package.py @@ -12,6 +12,10 @@ class Maven(Package): homepage = "https://maven.apache.org/index.html" url = "https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.3.9/apache-maven-3.3.9-bin.tar.gz" + version('3.6.3', sha256='26ad91d751b3a9a53087aefa743f4e16a17741d3915b219cf74112bf87a438c5') + version('3.6.2', sha256='3fbc92d1961482d6fbd57fbf3dd6d27a4de70778528ee3fb44aa7d27eb32dfdc') + version('3.6.1', sha256='2528c35a99c30f8940cc599ba15d34359d58bec57af58c1075519b8cd33b69e7') + version('3.6.0', sha256='6a1b346af36a1f1a491c1c1a141667c5de69b42e6611d3687df26868bc0f4637') version('3.5.0', sha256='beb91419245395bd69a4a6edad5ca3ec1a8b64e41457672dc687c173a495f034') version('3.3.9', sha256='6e3e9c949ab4695a204f74038717aa7b2689b1be94875899ac1b3fe42800ff82') From 4cb82948004655bea1846e0f1d725aa5147b52dc Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 21:04:21 +0800 Subject: [PATCH 124/178] lighttpd: added versions up to v1.4.55 (#14810) --- var/spack/repos/builtin/packages/lighttpd/package.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/var/spack/repos/builtin/packages/lighttpd/package.py b/var/spack/repos/builtin/packages/lighttpd/package.py index ee360936af8..70e3a7a8b8e 100644 --- a/var/spack/repos/builtin/packages/lighttpd/package.py +++ b/var/spack/repos/builtin/packages/lighttpd/package.py @@ -13,6 +13,11 @@ class Lighttpd(CMakePackage): homepage = "https://www.lighttpd.net" url = "https://download.lighttpd.net/lighttpd/releases-1.4.x/lighttpd-1.4.50.tar.gz" + version('1.4.55', sha256='065259fb618774df516add13df22a52cac76a8f59e4561f143fe3ec810f4a03a') + version('1.4.54', sha256='5151d38cb7c4c40effa13710e77ebdbef899f945b062cf32befc02d128ac424c') + version('1.4.53', sha256='423b3951f212e3a30511eb86f4662a1848c6e857074289ff23fc310eef520266') + version('1.4.52', sha256='0f9de0227681c078f6b8c6154b581ced5fe7bcb5ff428ccf292581764b771145') + version('1.4.51', sha256='4301fe64136c7030d63cccc96996c6603dcbe82cca9a72e0aca29ce88284c978') version('1.4.50', sha256='c9a9f175aca6db22ebebbc47de52c54a99bbd1dce8d61bb75103609a3d798235') version('1.4.49', sha256='8b744baf9f29c386fff1a6d2e435491e726cb8d29cfdb1fe20ab782ee2fc2ac7') From fd9b1fb6de3d7e5f50ee69e9aabaa79daac210c8 Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 21:04:40 +0800 Subject: [PATCH 125/178] libconfig: added v1.7.2 (#14808) --- var/spack/repos/builtin/packages/libconfig/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/libconfig/package.py b/var/spack/repos/builtin/packages/libconfig/package.py index 6fadb4e4821..abed0394009 100644 --- a/var/spack/repos/builtin/packages/libconfig/package.py +++ b/var/spack/repos/builtin/packages/libconfig/package.py @@ -14,6 +14,7 @@ class Libconfig(AutotoolsPackage): force_autoreconf = True + version('1.7.2', sha256='f67ac44099916ae260a6c9e290a90809e7d782d96cdd462cac656ebc5b685726') version('1.7.1', sha256='d288e6ae817f4ef78df43cdb2647f768dc97899ee82fcc41f857e8eb9fd7fbdb') version('1.5', sha256='cae5c02361d8a9b2bb26946c64f089d2e5e599972f386203fbc48975c0d885c8') From 2c3e2669f226e9f7afc5e84af29b868ef546a6fd Mon Sep 17 00:00:00 2001 From: darmac Date: Fri, 7 Feb 2020 21:05:50 +0800 Subject: [PATCH 126/178] imlib2: added v1.6.0 and v1.6.1 (#14807) --- var/spack/repos/builtin/packages/imlib2/package.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/imlib2/package.py b/var/spack/repos/builtin/packages/imlib2/package.py index 32c49c0b005..8cb3dc59b09 100644 --- a/var/spack/repos/builtin/packages/imlib2/package.py +++ b/var/spack/repos/builtin/packages/imlib2/package.py @@ -16,7 +16,9 @@ class Imlib2(AutotoolsPackage): maintainers = ['TheQueasle'] - version('1.5.1', 'fa4e57452b8843f4a70f70fd435c746ae2ace813250f8c65f977db5d7914baae') + version('1.6.1', sha256='4d393a77e13da883c8ee2da3b029da3570210fe37d000c9ac33d9fce751b166d') + version('1.6.0', sha256='cfc440ddfaed5fc85ba2572ad8d87a87cd77a5bffb33ebca882c42cefcd8691d') + version('1.5.1', sha256='fa4e57452b8843f4a70f70fd435c746ae2ace813250f8c65f977db5d7914baae') depends_on('libtiff') depends_on('giflib') From f685d538d80b20bd66bb833bbb44f001507f8192 Mon Sep 17 00:00:00 2001 From: Themos Tsikas Date: Fri, 7 Feb 2020 15:33:37 +0000 Subject: [PATCH 127/178] NAG Compiler 7.0 (Build 7009) download checksum (#14840) --- var/spack/repos/builtin/packages/nag/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/nag/package.py b/var/spack/repos/builtin/packages/nag/package.py index d8818d0d580..4e235599137 100644 --- a/var/spack/repos/builtin/packages/nag/package.py +++ b/var/spack/repos/builtin/packages/nag/package.py @@ -12,7 +12,7 @@ class Nag(Package): homepage = "http://www.nag.com/nagware/np.asp" maintainers = ['ThemosTsikas'] - version('7.0', sha256='ea83075cde9e625083b85be04426b0536b2da32db3cfd0c3eb3f2cf8253a2030') + version('7.0', sha256='fafd97ebb58753ab5b9f13822d2e3d24c2f488ea25928c4c3a13e4e2e350ab3e') version('6.2', sha256='9b60f6ffa4f4be631079676963e74eea25e8824512e5c864eb06758b2a3cdd2d') version('6.1', sha256='32580e0004e6798abf1fa52f0070281b28abeb0da2387530a4cc41218e813c7c') From d1d5f5f9e750595372bfe44bf15b71d6f8d523bc Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Fri, 7 Feb 2020 11:20:19 -0600 Subject: [PATCH 128/178] patch aws-parallelcluster so that it doesn't require enum34 (#14796) * aws-parallelcluster always depends on enum34 * Build aws-parallelcluster without enum34 * Update homepage * Add unit tests --- .../packages/aws-parallelcluster/enum34.patch | 17 ++++++++++++ .../packages/aws-parallelcluster/package.py | 26 ++++++++++++++++--- .../builtin/packages/py-enum34/package.py | 9 +++++-- 3 files changed, 47 insertions(+), 5 deletions(-) create mode 100644 var/spack/repos/builtin/packages/aws-parallelcluster/enum34.patch diff --git a/var/spack/repos/builtin/packages/aws-parallelcluster/enum34.patch b/var/spack/repos/builtin/packages/aws-parallelcluster/enum34.patch new file mode 100644 index 00000000000..c96a5c2d413 --- /dev/null +++ b/var/spack/repos/builtin/packages/aws-parallelcluster/enum34.patch @@ -0,0 +1,17 @@ +diff -Naur a/setup.py b/setup.py +--- a/setup.py 2020-02-06 15:40:26.000000000 -0600 ++++ b/setup.py 2020-02-06 15:41:17.000000000 -0600 +@@ -27,10 +27,12 @@ + "future>=0.16.0,<=0.18.2", + "tabulate>=0.8.2,<=0.8.3", + "ipaddress>=1.0.22", +- "enum34>=1.1.6", + "PyYAML>=5.1.2", + ] + ++if sys.version_info < (3, 4): ++ REQUIRES.append("enum34>=1.1.6") ++ + if sys.version_info[0] == 2: + REQUIRES.append("configparser>=3.5.0,<=3.8.1") + diff --git a/var/spack/repos/builtin/packages/aws-parallelcluster/package.py b/var/spack/repos/builtin/packages/aws-parallelcluster/package.py index 6fd95ac2ee2..9a0bb4a9cc8 100644 --- a/var/spack/repos/builtin/packages/aws-parallelcluster/package.py +++ b/var/spack/repos/builtin/packages/aws-parallelcluster/package.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * +import os class AwsParallelcluster(PythonPackage): @@ -12,14 +13,21 @@ class AwsParallelcluster(PythonPackage): homepage = "https://github.com/aws/aws-parallelcluster" url = "https://pypi.io/packages/source/a/aws-parallelcluster/aws-parallelcluster-2.5.1.tar.gz" - maintainers = ['sean-smith', 'demartinofra', 'enrico-usai', - 'lukeseawalker', 'rexcsn', 'ddeidda', 'tilne'] + + maintainers = [ + 'sean-smith', 'demartinofra', 'enrico-usai', 'lukeseawalker', 'rexcsn', + 'ddeidda', 'tilne' + ] + import_modules = [ + 'pcluster', 'awsbatch', 'pcluster.dcv', 'pcluster.configure', + 'pcluster.config', 'pcluster.networking' + ] version('2.5.1', sha256='4fd6e14583f8cf81f9e4aa1d6188e3708d3d14e6ae252de0a94caaf58be76303') version('2.5.0', sha256='3b0209342ea0d9d8cc95505456103ad87c2d4e35771aa838765918194efd0ad3') depends_on('python@2.7:', type=('build', 'run')) - depends_on('py-setuptools', type='build') + depends_on('py-setuptools', type=('build', 'run')) depends_on('py-boto3@1.10.15:', type=('build', 'run')) depends_on('py-future@0.16.0:0.18.2', type=('build', 'run')) depends_on('py-tabulate@0.8.2:0.8.3', type=('build', 'run')) @@ -27,3 +35,15 @@ class AwsParallelcluster(PythonPackage): depends_on('py-enum34@1.1.6:', when='^python@:3.3', type=('build', 'run')) depends_on('py-pyyaml@5.1.2:', type=('build', 'run')) depends_on('py-configparser@3.5.0:3.8.1', when='^python@:2', type=('build', 'run')) + + # https://github.com/aws/aws-parallelcluster/pull/1633 + patch('enum34.patch', when='@:2.5.1') + + @run_after('install') + @on_package_attributes(run_tests=True) + def install_test(self): + # Make sure executables work + for exe in ['awsbhosts', 'awsbkill', 'awsbout', 'awsbqueues', + 'awsbstat', 'awsbsub', 'pcluster']: + exe = Executable(os.path.join(self.prefix.bin, exe)) + exe('--help') diff --git a/var/spack/repos/builtin/packages/py-enum34/package.py b/var/spack/repos/builtin/packages/py-enum34/package.py index 0de579afb79..7bcdf462b3d 100644 --- a/var/spack/repos/builtin/packages/py-enum34/package.py +++ b/var/spack/repos/builtin/packages/py-enum34/package.py @@ -9,11 +9,16 @@ class PyEnum34(PythonPackage): """Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4.""" - homepage = "https://pypi.python.org/pypi/enum34" + homepage = "https://bitbucket.org/stoneleaf/enum34/src" url = "https://pypi.io/packages/source/e/enum34/enum34-1.1.6.tar.gz" version('1.1.6', sha256='8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1') - depends_on('python') + # enum34 is a backport of the enum library from Python 3.4. It is not + # intended to be used with Python 3.4+. In fact, it won't build at all + # for Python 3.6+, as new constructs were added to the builtin enum + # library that aren't present in enum34. See: + # https://bitbucket.org/stoneleaf/enum34/issues/19 + depends_on('python@:3.5', type=('build', 'run')) depends_on('py-ordereddict', when='^python@:2.6', type=('build', 'run')) depends_on('py-setuptools', type='build') From b442b21751634ff771d7dab990683ee3556d5c86 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 21 Jan 2020 23:36:10 -0800 Subject: [PATCH 129/178] bugfix: hashes should use ordered dictionaries (#14390) Despite trying very hard to keep dicts out of our hash algorithm, we seem to still accidentally add them in ways that the tests can't catch. This can cause errors when hashes are not computed deterministically. This fixes an error we saw with Python 3.5, where dictionary iteration order is random. In this instance, we saw a bug when reading Spack environment lockfiles -- The load would fail like this: ``` ... File "/sw/spack/lib/spack/spack/environment.py", line 1249, in concretized_specs yield (s, self.specs_by_hash[h]) KeyError: 'qcttqplkwgxzjlycbs4rfxxladnt423p' ``` This was because the hashes differed depending on whether we wrote `path` or `module` first when recomputing the build hash as part of reading a Spack lockfile. We can fix it by ensuring a determistic iteration order. - [x] Fix two places (one that caused an issue, and one that did not... yet) where our to_node_dict-like methods were using regular python dicts. - [x] Also add a check that statically analyzes our to_node_dict functions and flags any that use Python dicts. The test found the two errors fixed here, specifically: ``` E AssertionError: assert [] == ['Use syaml_dict instead of ...pack/spack/spec.py:1495:28'] E Right contains more items, first extra item: 'Use syaml_dict instead of dict at /Users/gamblin2/src/spack/lib/spack/spack/spec.py:1495:28' E Full diff: E - [] E + ['Use syaml_dict instead of dict at ' E + '/Users/gamblin2/src/spack/lib/spack/spack/spec.py:1495:28'] ``` and ``` E AssertionError: assert [] == ['Use syaml_dict instead of ...ack/architecture.py:359:15'] E Right contains more items, first extra item: 'Use syaml_dict instead of dict at /Users/gamblin2/src/spack/lib/spack/spack/architecture.py:359:15' E Full diff: E - [] E + ['Use syaml_dict instead of dict at ' E + '/Users/gamblin2/src/spack/lib/spack/spack/architecture.py:359:15'] ``` --- lib/spack/spack/architecture.py | 8 ++-- lib/spack/spack/spec.py | 8 ++-- lib/spack/spack/test/spec_yaml.py | 73 +++++++++++++++++++++++++++++++ 3 files changed, 81 insertions(+), 8 deletions(-) diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py index 7552795cd23..378fb5d5d9c 100644 --- a/lib/spack/spack/architecture.py +++ b/lib/spack/spack/architecture.py @@ -351,10 +351,10 @@ def _cmp_key(self): return (self.name, self.version) def to_dict(self): - return { - 'name': self.name, - 'version': self.version - } + return syaml_dict([ + ('name', self.name), + ('version', self.version) + ]) @key_ordering diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index c553da796dc..f983e5c5590 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -1506,10 +1506,10 @@ def to_node_dict(self, hash=ht.dag_hash): d['parameters'] = params if self.external: - d['external'] = { - 'path': self.external_path, - 'module': self.external_module - } + d['external'] = syaml.syaml_dict([ + ('path', self.external_path), + ('module', self.external_module), + ]) if not self._concrete: d['concrete'] = False diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py index 7fd2a36469a..96bed17b78b 100644 --- a/lib/spack/spack/test/spec_yaml.py +++ b/lib/spack/spack/test/spec_yaml.py @@ -8,13 +8,20 @@ YAML format preserves DAG information in the spec. """ +import ast +import inspect import os from collections import Iterable, Mapping +import pytest + +import spack.architecture import spack.hash_types as ht +import spack.spec import spack.util.spack_json as sjson import spack.util.spack_yaml as syaml +import spack.version from spack import repo from spack.spec import Spec, save_dependency_spec_yamls @@ -204,6 +211,72 @@ def test_ordered_read_not_required_for_consistent_dag_hash( assert spec.full_hash() == round_trip_reversed_json_spec.full_hash() +@pytest.mark.parametrize("module", [ + spack.spec, + spack.architecture, + spack.version, +]) +def test_hashes_use_no_python_dicts(module): + """Coarse check to make sure we don't use dicts in Spec.to_node_dict(). + + Python dicts are not guaranteed to iterate in a deterministic order + (at least not in all python versions) so we need to use lists and + syaml_dicts. syaml_dicts are ordered and ensure that hashes in Spack + are deterministic. + + This test is intended to handle cases that are not covered by the + consistency checks above, or that would be missed by a dynamic check. + This test traverses the ASTs of functions that are used in our hash + algorithms, finds instances of dictionaries being constructed, and + prints out the line numbers where they occur. + + """ + class FindFunctions(ast.NodeVisitor): + """Find a function definition called to_node_dict.""" + def __init__(self): + self.nodes = [] + + def visit_FunctionDef(self, node): # noqa + if node.name in ("to_node_dict", "to_dict", "to_dict_or_value"): + self.nodes.append(node) + + class FindDicts(ast.NodeVisitor): + """Find source locations of dicts in an AST.""" + def __init__(self, filename): + self.nodes = [] + self.filename = filename + + def add_error(self, node): + self.nodes.append( + "Use syaml_dict instead of dict at %s:%s:%s" + % (self.filename, node.lineno, node.col_offset) + ) + + def visit_Dict(self, node): # noqa + self.add_error(node) + + def visit_Call(self, node): # noqa + name = None + if isinstance(node.func, ast.Name): + name = node.func.id + elif isinstance(node.func, ast.Attribute): + name = node.func.attr + + if name == 'dict': + self.add_error(node) + + find_functions = FindFunctions() + module_ast = ast.parse(inspect.getsource(module)) + find_functions.visit(module_ast) + + find_dicts = FindDicts(module.__file__) + for node in find_functions.nodes: + find_dicts.visit(node) + + # fail with offending lines if we found some dicts. + assert [] == find_dicts.nodes + + def reverse_all_dicts(data): """Descend into data and reverse all the dictionaries""" if isinstance(data, dict): From 5397d500c831a78d39caa3dd8aff931e1ea8ec4d Mon Sep 17 00:00:00 2001 From: Jeffrey Salmond Date: Wed, 8 Jan 2020 23:52:39 +0000 Subject: [PATCH 130/178] Remove extensions from view in the correct order (#12961) When removing packages from a view, extensions were being deactivated in an arbitrary order. Extensions must be deactivated in preorder traversal (dependents before dependencies), so when this order was violated the view update would fail. This commit ensures that views deactivate extensions based on a preorder traversal and adds a test for it. --- lib/spack/spack/filesystem_view.py | 38 ++++++++++++++++++------------ lib/spack/spack/test/views.py | 14 +++++++++++ 2 files changed, 37 insertions(+), 15 deletions(-) diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py index 5455ccb1077..3d17d7e4cec 100644 --- a/lib/spack/spack/filesystem_view.py +++ b/lib/spack/spack/filesystem_view.py @@ -399,23 +399,31 @@ def remove_specs(self, *specs, **kwargs): "The following packages will be unusable: %s" % ", ".join((s.name for s in dependents))) - extensions = set(filter(lambda s: s.package.is_extension, - to_deactivate)) - standalones = to_deactivate - extensions + # Determine the order that packages should be removed from the view; + # dependents come before their dependencies. + to_deactivate_sorted = list() + depmap = dict() + for spec in to_deactivate: + depmap[spec] = set(d for d in spec.traverse(root=False) + if d in to_deactivate) - # Please note that a traversal of the DAG in post-order and then - # forcibly removing each package should remove the need to specify - # with_dependents for deactivating extensions/allow removal without - # additional checks (force=True). If removal performance becomes - # unbearable for whatever reason, this should be the first point of - # attack. - # - # see: https://github.com/spack/spack/pull/3227#discussion_r117147475 - remove_extension = ft.partial(self.remove_extension, - with_dependents=with_dependents) + while depmap: + for spec in [s for s, d in depmap.items() if not d]: + to_deactivate_sorted.append(spec) + for s in depmap.keys(): + depmap[s].discard(spec) + depmap.pop(spec) + to_deactivate_sorted.reverse() - set(map(remove_extension, extensions)) - set(map(self.remove_standalone, standalones)) + # Ensure that the sorted list contains all the packages + assert set(to_deactivate_sorted) == to_deactivate + + # Remove the packages from the view + for spec in to_deactivate_sorted: + if spec.package.is_extension: + self.remove_extension(spec, with_dependents=with_dependents) + else: + self.remove_standalone(spec) self._purge_empty_directories() diff --git a/lib/spack/spack/test/views.py b/lib/spack/spack/test/views.py index a94fa42c21a..52ecb91e734 100644 --- a/lib/spack/spack/test/views.py +++ b/lib/spack/spack/test/views.py @@ -6,6 +6,8 @@ import os from spack.spec import Spec +from spack.directory_layout import YamlDirectoryLayout +from spack.filesystem_view import YamlFilesystemView def test_global_activation(install_mockery, mock_fetch): @@ -27,3 +29,15 @@ def test_global_activation(install_mockery, mock_fetch): extendee_spec.prefix, '.spack', 'extensions.yaml') assert (view.extensions_layout.extension_file_path(extendee_spec) == expected_path) + + +def test_remove_extensions_ordered(install_mockery, mock_fetch, tmpdir): + view_dir = str(tmpdir.join('view')) + layout = YamlDirectoryLayout(view_dir) + view = YamlFilesystemView(view_dir, layout) + e2 = Spec('extension2').concretized() + e2.package.do_install() + view.add_specs(e2) + + e1 = e2['extension1'] + view.remove_specs(e1, e2) From 69e5683ba49d9e3c7e5f21d8cfade2d0539097cf Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Mon, 6 Jan 2020 23:18:14 -0600 Subject: [PATCH 131/178] Fix outdated bash tab completion (#14392) --- share/spack/spack-completion.bash | 619 ++++++++++++++++++------------ 1 file changed, 370 insertions(+), 249 deletions(-) diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash index 5e8936125f8..8597cd3d7ef 100755 --- a/share/spack/spack-completion.bash +++ b/share/spack/spack-completion.bash @@ -34,6 +34,7 @@ function _bash_completion_spack { # For example, `spack -d install []` will call _spack_install # and `spack compiler add []` will call _spack_compiler_add local subfunction=$(IFS='_'; echo "_${COMP_WORDS_NO_FLAGS[*]}") + # Translate dashes to underscores, as dashes are not permitted in # compatibility mode. See https://github.com/spack/spack/pull/4079 subfunction=${subfunction//-/_} @@ -96,11 +97,11 @@ function _spack { if $list_options then compgen -W "-h --help -H --all-help --color -C --config-scope - -d --debug --pdb -e --env -D --env-dir -E --no-env - --use-env-repo -k --insecure -l --enable-locks - -L --disable-locks -m --mock -p --profile - --sorted-profile --lines -v --verbose --stacktrace - -V --version --print-shell-vars" -- "$cur" + -d --debug --timestamp --pdb -e --env -D --env-dir + -E --no-env --use-env-repo -k --insecure + -l --enable-locks -L --disable-locks -m --mock + -p --profile --sorted-profile --lines -v --verbose + --stacktrace -V --version --print-shell-vars" -- "$cur" else compgen -W "$(_subcommands)" -- "$cur" fi @@ -118,15 +119,16 @@ function _spack_activate { function _spack_add { if $list_options then - compgen -W "-h --help" -- "$cur" + compgen -W "-h --help -l --list-name" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi } function _spack_arch { - compgen -W "-h --help -p --platform -o --operating-system - -t --target --known-targets" -- "$cur" + compgen -W "-h --help --known-targets -p --platform + -o --operating-system -t --target -f --frontend + -b --backend" -- "$cur" } function _spack_blame { @@ -141,7 +143,7 @@ function _spack_blame { function _spack_bootstrap { compgen -W "-h --help -j --jobs --keep-prefix --keep-stage -n --no-checksum -v --verbose --use-cache --no-cache - --clean --dirty" -- "$cur" + --cache-only --clean --dirty" -- "$cur" } function _spack_build { @@ -156,7 +158,7 @@ function _spack_build { function _spack_build_env { if $list_options then - compgen -W "-h --help --clean --dirty" -- "$cur" + compgen -W "-h --help --clean --dirty --dump --pickle" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi @@ -167,15 +169,17 @@ function _spack_buildcache { then compgen -W "-h --help" -- "$cur" else - compgen -W "create install keys list" -- "$cur" + compgen -W "create install list keys preview check download + get-buildcache-name save-yaml copy update-index" -- "$cur" fi } function _spack_buildcache_create { if $list_options then - compgen -W "-h --help -r --rel -f --force -u --unsigned -a --allow-root - -k --key -d --directory" -- "$cur" + compgen -W "-h --help -r --rel -f --force -u --unsigned + -a --allow-root -k --key -d --directory + --no-rebuild-index -y --spec-yaml --no-deps" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi @@ -184,8 +188,18 @@ function _spack_buildcache_create { function _spack_buildcache_install { if $list_options then - compgen -W "-h --help -f --force -m --multiple -a --allow-root -u - --unsigned" -- "$cur" + compgen -W "-h --help -f --force -m --multiple -a --allow-root + -u --unsigned" -- "$cur" + else + compgen -W "$(_all_packages)" -- "$cur" + fi +} + +function _spack_buildcache_list { + if $list_options + then + compgen -W "-h --help -l --long -L --very-long -v --variants + -f --force" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi @@ -195,15 +209,42 @@ function _spack_buildcache_keys { compgen -W "-h --help -i --install -t --trust -f --force" -- "$cur" } -function _spack_buildcache_list { +function _spack_buildcache_preview { if $list_options then - compgen -W "-h --help -f --force" -- "$cur" + compgen -W "-h --help" -- "$cur" else - compgen -W "$(_all_packages)" -- "$cur" + compgen -W "$(_installed_packages)" -- "$cur" fi } +function _spack_buildcache_check { + compgen -W "-h --help -m --mirror-url -o --output-file --scope + -s --spec -y --spec-yaml --rebuild-on-error" -- "$cur" +} + +function _spack_buildcache_download { + compgen -W "-h --help -s --spec -y --spec-yaml -p --path + -c --require-cdashid" -- "$cur" +} + +function _spack_buildcache_get_buildcache_name { + compgen -W "-h --help -s --spec -y --spec-yaml" -- "$cur" +} + +function _spack_buildcache_save_yaml { + compgen -W "-h --help --root-spec --root-spec-yaml -s --specs + -y --yaml-dir" -- "$cur" +} + +function _spack_buildcache_copy { + compgen -W "-h --help --base-dir --spec-yaml --destination-url" -- "$cur" +} + +function _spack_buildcache_update_index { + compgen -W "-h --help -d --mirror-url" -- "$cur" +} + function _spack_cd { if $list_options then @@ -257,29 +298,16 @@ function _spack_compiler { fi } -function _spack_compiler_add { - if $list_options - then - compgen -W "-h --help --scope" -- "$cur" - fi -} - function _spack_compiler_find { - # Alias to `spack compiler add` - _spack_compiler_add -} - -function _spack_compiler_info { if $list_options then compgen -W "-h --help --scope" -- "$cur" - else - compgen -W "$(_installed_compilers)" -- "$cur" fi } -function _spack_compiler_list { - compgen -W "-h --help --scope" -- "$cur" +function _spack_compiler_add { + # Alias to `spack compiler find` + _spack_compiler_find } function _spack_compiler_remove { @@ -296,10 +324,24 @@ function _spack_compiler_rm { _spack_compiler_remove } -function _spack_compilers { +function _spack_compiler_list { compgen -W "-h --help --scope" -- "$cur" } +function _spack_compiler_info { + if $list_options + then + compgen -W "-h --help --scope" -- "$cur" + else + compgen -W "$(_installed_compilers)" -- "$cur" + fi +} + +function _spack_compilers { + # Alias to `spack compiler list` + _spack_compiler_list +} + function _spack_concretize { compgen -W "-h --help -f --force" -- "$cur" } @@ -309,25 +351,7 @@ function _spack_config { then compgen -W "-h --help --scope" -- "$cur" else - compgen -W "blame edit get" -- "$cur" - fi -} - -function _spack_config_blame { - if $list_options - then - compgen -W "-h --help" -- "$cur" - else - compgen -W "mirrors repos modules packages config compilers" -- "$cur" - fi -} - -function _spack_config_edit { - if $list_options - then - compgen -W "-h --help --print-file" -- "$cur" - else - compgen -W "mirrors repos modules packages config compilers" -- "$cur" + compgen -W "get blame edit" -- "$cur" fi } @@ -336,7 +360,28 @@ function _spack_config_get { then compgen -W "-h --help" -- "$cur" else - compgen -W "mirrors repos modules packages config compilers" -- "$cur" + compgen -W "compilers mirrors repos packages modules config + upstreams" -- "$cur" + fi +} + +function _spack_config_blame { + if $list_options + then + compgen -W "-h --help" -- "$cur" + else + compgen -W "compilers mirrors repos packages modules config + upstreams" -- "$cur" + fi +} + +function _spack_config_edit { + if $list_options + then + compgen -W "-h --help --print-file" -- "$cur" + else + compgen -W "compilers mirrors repos packages modules config + upstreams" -- "$cur" fi } @@ -382,8 +427,8 @@ function _spack_debug_create_db_tarball { function _spack_dependencies { if $list_options then - compgen -W "-h --help -i --installed -t --transitive -V - --no-expand-virtuals" -- "$cur" + compgen -W "-h --help -i --installed -t --transitive + --deptype -V --no-expand-virtuals" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi @@ -398,13 +443,36 @@ function _spack_dependents { fi } +function _spack_deprecate { + if $list_options + then + compgen -W "-h --help -y --yes-to-all -d --dependencies + -D --no-dependencies -i --install-deprecator + -I --no-install-deprecator -l --link-type" -- "$cur" + else + compgen -W "$(_all_packages)" -- "$cur" + fi +} + +function _spack_dev_build { + if $list_options + then + compgen -W "-h --help -j --jobs -d --source-path + -i --ignore-dependencies -n --no-checksum + --keep-prefix --skip-patch -q --quiet -u --until + --clean --dirty" -- "$cur" + else + compgen -W "$(_all_packages)" -- "$cur" + fi +} + function _spack_diy { if $list_options then compgen -W "-h --help -j --jobs -d --source-path -i --ignore-dependencies -n --no-checksum - --keep-prefix --skip-patch -q --quiet --clean - --dirty -u --until" -- "$cur" + --keep-prefix --skip-patch -q --quiet -u --until + --clean --dirty" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi @@ -417,8 +485,8 @@ function _spack_docs { function _spack_edit { if $list_options then - compgen -W "-h --help -b --build-system -c --command -d --docs -t - --test -m --module -r --repo -N --namespace" -- "$cur" + compgen -W "-h --help -b --build-system -c --command -d --docs + -t --test -m --module -r --repo -N --namespace" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi @@ -429,47 +497,29 @@ function _spack_env { then compgen -W "-h --help" -- "$cur" else - compgen -W "activate create deactivate list ls loads - remove rm status st" -- "$cur" + compgen -W "activate deactivate create remove rm list ls + status st loads view" -- "$cur" fi } function _spack_env_activate { if $list_options then - compgen -W "-h --help --sh --csh -d --dir -p --prompt" -- "$cur" + compgen -W "-h --help --sh --csh -v --with-view + -V --without-view -d --dir -p --prompt" -- "$cur" else compgen -W "$(_environments)" -- "$cur" fi } -function _spack_env_create { - if $list_options - then - compgen -W "-h --help -d --dir" -- "$cur" - fi -} - function _spack_env_deactivate { compgen -W "-h --help --sh --csh" -- "$cur" } -function _spack_env_list { - compgen -W "-h --help" -- "$cur" -} - -function _spack_env_ls { - # Alias to `spack env list` - _spack_env_list -} - -function _spack_env_loads { +function _spack_env_create { if $list_options then - compgen -W "-h --help -m --module-type --input-only -p --prefix - -x --exclude -r --dependencies" -- "$cur" - else - compgen -W "$(_environments)" -- "$cur" + compgen -W "-h --help -d --dir --without-view --with-view" -- "$cur" fi } @@ -487,6 +537,15 @@ function _spack_env_rm { _spack_env_remove } +function _spack_env_list { + compgen -W "-h --help" -- "$cur" +} + +function _spack_env_ls { + # Alias to `spack env list` + _spack_env_list +} + function _spack_env_status { compgen -W "-h --help" -- "$cur" } @@ -496,15 +555,34 @@ function _spack_env_st { _spack_env_status } +function _spack_env_loads { + if $list_options + then + compgen -W "-h --help -m --module-type --input-only -p --prefix + -x --exclude -r --dependencies" -- "$cur" + else + compgen -W "$(_environments)" -- "$cur" + fi +} + +function _spack_env_view { + if $list_options + then + compgen -W "-h --help" -- "$cur" + else + compgen -W "regenerate enable disable" -- "$cur" + fi +} + function _spack_extensions { if $list_options then - compgen -W "-h --help -l --long -p --paths -d --deps - -s --show -v --view" -- "$cur" + compgen -W "-h --help -l --long -L --very-long -d --deps + -p --paths -s --show -v --view" -- "$cur" else - compgen -W "aspell go-bootstrap go icedtea java jdk lua - matlab mofem-cephas octave perl python r ruby - rust tcl yorick" -- "$cur" + compgen -W "aspell go-bootstrap go icedtea jdk kim-api lua + matlab mofem-cephas octave openjdk perl python r + ruby rust tcl yorick" -- "$cur" fi } @@ -521,13 +599,13 @@ function _spack_fetch { function _spack_find { if $list_options then - compgen -W "-h --help -s --short -d --deps -p --paths - --format --json --groups --no-groups -l --long - -L --very-long -t --tags -c --show-concretized - -f --show-flags --show-full-compiler -x --explicit - -X --implicit -u --unknown -m --missing -v --variants - -M --only-missing -N --namespace --start-date - --end-date" -- "$cur" + compgen -W "-h --help --format --json -d --deps -p --paths + --groups --no-groups -l --long -L --very-long + -t --tags -c --show-concretized -f --show-flags + --show-full-compiler -x --explicit -X --implicit + -u --unknown -m --missing -v --variants + -M --only-missing --deprecated --only-deprecated + -N --namespace --start-date --end-date" -- "$cur" else compgen -W "$(_installed_packages)" -- "$cur" fi @@ -546,36 +624,15 @@ function _spack_gpg { then compgen -W "-h --help" -- "$cur" else - compgen -W "create export init list sign trust untrust verify" -- "$cur" + compgen -W "verify trust untrust sign create list init + export" -- "$cur" fi } -function _spack_gpg_create { - if $list_options - then - compgen -W "-h --help --comment --expires --export" -- "$cur" - fi -} - -function _spack_gpg_export { +function _spack_gpg_verify { if $list_options then compgen -W "-h --help" -- "$cur" - fi -} - -function _spack_gpg_init { - compgen -W "-h --help" -- "$cur" -} - -function _spack_gpg_list { - compgen -W "-h --help --trusted --signing" -- "$cur" -} - -function _spack_gpg_sign { - if $list_options - then - compgen -W "-h --help --output --key --clearsign" -- "$cur" else compgen -W "$(installed_packages)" -- "$cur" fi @@ -592,23 +649,49 @@ function _spack_gpg_untrust { if $list_options then compgen -W "-h --help --signing" -- "$cur" + else + compgen -W "$(_keys)" -- "$cur" fi } -function _spack_gpg_verify { +function _spack_gpg_sign { + if $list_options + then + compgen -W "-h --help --output --key --clearsign" -- "$cur" + else + compgen -W "$(installed_packages)" -- "$cur" + fi +} + +function _spack_gpg_create { + if $list_options + then + compgen -W "-h --help --comment --expires --export" -- "$cur" + fi +} + +function _spack_gpg_list { + compgen -W "-h --help --trusted --signing" -- "$cur" +} + +function _spack_gpg_init { + compgen -W "-h --help" -- "$cur" +} + +function _spack_gpg_export { if $list_options then compgen -W "-h --help" -- "$cur" else - compgen -W "$(installed_packages)" -- "$cur" + compgen -W "$(_keys)" -- "$cur" fi } function _spack_graph { if $list_options then - compgen -W "-h --help -a --ascii -d --dot -n --normalize -s --static - -i --installed -t --deptype" -- "$cur" + compgen -W "-h --help -a --ascii -d --dot -s --static + -i --installed --deptype" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi @@ -635,12 +718,14 @@ function _spack_info { function _spack_install { if $list_options then - compgen -W "-h --help --only -j --jobs -I --install-status - --overwrite --keep-prefix --keep-stage --dont-restage - --use-cache --no-cache --show-log-on-error --source + compgen -W "-h --help --only -u --until -j --jobs --overwrite + --keep-prefix --keep-stage --dont-restage --use-cache + --no-cache --cache-only --show-log-on-error --source -n --no-checksum -v --verbose --fake --only-concrete - -f --file --clean --dirty --test --log-format --log-file - --cdash-upload-url -y --yes-to-all" -- "$cur" + -f --file --clean --dirty --test --run-tests + --log-format --log-file -y --yes-to-all + --cdash-upload-url --cdash-build --cdash-site + --cdash-track --cdash-buildstamp" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi @@ -704,7 +789,7 @@ function _spack_log_parse { function _spack_maintainers { if $list_options then - compgen -W "-h --help -a --all --maintained --unmaintained + compgen -W "-h --help --maintained --unmaintained -a --all --by-user" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" @@ -716,7 +801,17 @@ function _spack_mirror { then compgen -W "-h --help -n --no-checksum" -- "$cur" else - compgen -W "add create list remove rm" -- "$cur" + compgen -W "create add remove rm set-url list" -- "$cur" + fi +} + +function _spack_mirror_create { + if $list_options + then + compgen -W "-h --help -d --directory -a --all -f --file + -D --dependencies -n --versions-per-spec" -- "$cur" + else + compgen -W "$(_all_packages)" -- "$cur" fi } @@ -727,20 +822,6 @@ function _spack_mirror_add { fi } -function _spack_mirror_create { - if $list_options - then - compgen -W "-h --help -d --directory -f --file - -D --dependencies -n --versions-per-spec" -- "$cur" - else - compgen -W "$(_all_packages)" -- "$cur" - fi -} - -function _spack_mirror_list { - compgen -W "-h --help --scope" -- "$cur" -} - function _spack_mirror_remove { if $list_options then @@ -755,6 +836,19 @@ function _spack_mirror_rm { _spack_mirror_remove } +function _spack_mirror_set_url { + if $list_options + then + compgen -W "-h --help --push --scope" -- "$cur" + else + compgen -W "$(_mirrors)" -- "$cur" + fi +} + +function _spack_mirror_list { + compgen -W "-h --help --scope" -- "$cur" +} + function _spack_module { if $list_options then @@ -764,55 +858,6 @@ function _spack_module { fi } -function _spack_module_tcl { - if $list_options - then - compgen -W "-h --help" -- "$cur" - else - compgen -W "refresh find rm loads" -- "$cur" - fi -} - - -function _spack_module_tcl_find { - if $list_options - then - compgen -W "-h --help --full-path -r --dependencies" -- "$cur" - else - compgen -W "$(_installed_packages)" -- "$cur" - fi -} - -function _spack_module_tcl_loads { - if $list_options - then - compgen -W "-h --help --input-only -p --prefix -x --exclude - -r --dependencies" -- "$cur" - else - compgen -W "$(_installed_packages)" -- "$cur" - fi - -} - -function _spack_module_tcl_refresh { - if $list_options - then - compgen -W "-h --help --delete-tree -y --yes-to-all" -- "$cur" - else - compgen -W "$(_installed_packages)" -- "$cur" - fi -} - -function _spack_module_tcl_rm { - if $list_options - then - compgen -W "-h --help -y --yes-to-all" -- "$cur" - else - compgen -W "$(_installed_packages)" -- "$cur" - fi -} - - function _spack_module_lmod { if $list_options then @@ -822,6 +867,15 @@ function _spack_module_lmod { fi } +function _spack_module_lmod_refresh { + if $list_options + then + compgen -W "-h --help --delete-tree --upstream-modules + -y --yes-to-all" -- "$cur" + else + compgen -W "$(_installed_packages)" -- "$cur" + fi +} function _spack_module_lmod_find { if $list_options @@ -832,6 +886,15 @@ function _spack_module_lmod_find { fi } +function _spack_module_lmod_rm { + if $list_options + then + compgen -W "-h --help -y --yes-to-all" -- "$cur" + else + compgen -W "$(_installed_packages)" -- "$cur" + fi +} + function _spack_module_lmod_loads { if $list_options then @@ -843,16 +906,44 @@ function _spack_module_lmod_loads { } -function _spack_module_lmod_refresh { +function _spack_module_lmod_setdefault { if $list_options then - compgen -W "-h --help --delete-tree -y --yes-to-all" -- "$cur" + compgen -W "-h --help" -- "$cur" else compgen -W "$(_installed_packages)" -- "$cur" fi } -function _spack_module_lmod_rm { +function _spack_module_tcl { + if $list_options + then + compgen -W "-h --help" -- "$cur" + else + compgen -W "refresh find rm loads" -- "$cur" + fi +} + +function _spack_module_tcl_refresh { + if $list_options + then + compgen -W "-h --help --delete-tree --upstream-modules + -y --yes-to-all" -- "$cur" + else + compgen -W "$(_installed_packages)" -- "$cur" + fi +} + +function _spack_module_tcl_find { + if $list_options + then + compgen -W "-h --help --full-path -r --dependencies" -- "$cur" + else + compgen -W "$(_installed_packages)" -- "$cur" + fi +} + +function _spack_module_tcl_rm { if $list_options then compgen -W "-h --help -y --yes-to-all" -- "$cur" @@ -861,10 +952,11 @@ function _spack_module_lmod_rm { fi } -function _spack_module_lmod_setdefault { +function _spack_module_tcl_loads { if $list_options then - compgen -W "-h --help" -- "$cur" + compgen -W "-h --help --input-only -p --prefix -x --exclude + -r --dependencies" -- "$cur" else compgen -W "$(_installed_packages)" -- "$cur" fi @@ -884,7 +976,7 @@ function _spack_pkg { then compgen -W "-h --help" -- "$cur" else - compgen -W "add added diff list removed" -- "$cur" + compgen -W "add list diff added changed removed" -- "$cur" fi } @@ -897,7 +989,7 @@ function _spack_pkg_add { fi } -function _spack_pkg_added { +function _spack_pkg_list { # FIXME: How to list git revisions? if $list_options then @@ -913,7 +1005,7 @@ function _spack_pkg_diff { fi } -function _spack_pkg_list { +function _spack_pkg_added { # FIXME: How to list git revisions? if $list_options then @@ -921,6 +1013,14 @@ function _spack_pkg_list { fi } +function _spack_pkg_changed { + # FIXME: How to list git revisions? + if $list_options + then + compgen -W "-h --help -t --type" -- "$cur" + fi +} + function _spack_pkg_removed { # FIXME: How to list git revisions? if $list_options @@ -956,10 +1056,15 @@ function _spack_reindex { compgen -W "-h --help" -- "$cur" } +function _spack_release_jobs { + compgen -W "-h --help -o --output-file -p --print-summary + --cdash-credentials" -- "$cur" +} + function _spack_remove { if $list_options then - compgen -W "-h --help -a --all -f --force" -- "$cur" + compgen -W "-h --help -a --all -l --list-name -f --force" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" fi @@ -970,14 +1075,7 @@ function _spack_repo { then compgen -W "-h --help" -- "$cur" else - compgen -W "add create list remove rm" -- "$cur" - fi -} - -function _spack_repo_add { - if $list_options - then - compgen -W "-h --help --scope" -- "$cur" + compgen -W "create list add remove rm" -- "$cur" fi } @@ -992,6 +1090,13 @@ function _spack_repo_list { compgen -W "-h --help --scope" -- "$cur" } +function _spack_repo_add { + if $list_options + then + compgen -W "-h --help --scope" -- "$cur" + fi +} + function _spack_repo_remove { if $list_options then @@ -1051,7 +1156,7 @@ function _spack_spec { if $list_options then compgen -W "-h --help -l --long -L --very-long -I --install-status - -j --json -y --yaml -c --cover -N --namespaces + -y --yaml -j --json -c --cover -N --namespaces -t --types" -- "$cur" else compgen -W "$(_all_packages)" -- "$cur" @@ -1092,34 +1197,37 @@ function _spack_unload { then compgen -W "-h --help" -- "$cur" else - compgen -W "$(_installed_packages)" + compgen -W "$(_installed_packages)" -- "$cur" fi } -function _spack_unuse { +function _spack_upload_s3 { if $list_options then compgen -W "-h --help" -- "$cur" else - compgen -W "$(_installed_packages)" + compgen -W "spec index" -- "$cur" fi } +function _spack_upload_s3_spec { + compgen -W "-h --help -s --spec -y --spec-yaml -b --base-dir + -e --endpoint-url" -- "$cur" +} + +function _spack_upload_s3_index { + compgen -W "-h --help -e --endpoint-url" -- "$cur" +} + function _spack_url { if $list_options then compgen -W "-h --help" -- "$cur" else - compgen -W "list parse stats summary" -- "$cur" + compgen -W "parse list summary stats" -- "$cur" fi } -function _spack_url_list { - compgen -W "-h --help -c --color -e --extrapolation - -n --incorrect-name -N --correct-name - -v --incorrect-version -V --correct-version" -- "$cur" -} - function _spack_url_parse { if $list_options then @@ -1127,20 +1235,27 @@ function _spack_url_parse { fi } -function _spack_url_stats { - compgen -W "-h --help" -- "$cur" +function _spack_url_list { + compgen -W "-h --help -c --color -e --extrapolation + -n --incorrect-name -N --correct-name + -v --incorrect-version -V --correct-version" -- "$cur" } function _spack_url_summary { compgen -W "-h --help" -- "$cur" } -function _spack_use { +function _spack_url_stats { + compgen -W "-h --help" -- "$cur" +} + +function _spack_verify { if $list_options then - compgen -W "-h --help -r --dependencies" -- "$cur" + compgen -W "-h --help -l --local -j --json -a --all -s --specs + -f --files" -- "$cur" else - compgen -W "$(_installed_packages)" -- "$cur" + compgen -W "$(_all_packages)" -- "$cur" fi } @@ -1159,8 +1274,16 @@ function _spack_view { compgen -W "-h --help -v --verbose -e --exclude -d --dependencies" -- "$cur" else - compgen -W "add check hard hardlink remove rm soft - statlink status symlink" -- "$cur" + compgen -W "symlink add soft hardlink hard remove rm statlink + status check" -- "$cur" + fi +} + +function _spack_view_symlink { + if $list_options + then + compgen -W "-h --help --projection-file + -i --ignore-conflicts" -- "$cur" fi } @@ -1169,23 +1292,24 @@ function _spack_view_add { _spack_view_symlink } -function _spack_view_check { - # Alias for `spack view statlink` - _spack_view_statlink -} - -function _spack_view_hard { - # Alias for `spack view hardlink` - _spack_view_hardlink +function _spack_view_soft { + # Alias for `spack view symlink` + _spack_view_symlink } function _spack_view_hardlink { if $list_options then - compgen -W "-h --help -i --ignore-conflicts" -- "$cur" + compgen -W "-h --help --projection-file + -i --ignore-conflicts" -- "$cur" fi } +function _spack_view_hard { + # Alias for `spack view hardlink` + _spack_view_hardlink +} + function _spack_view_remove { if $list_options then @@ -1198,11 +1322,6 @@ function _spack_view_rm { _spack_view_remove } -function _spack_view_soft { - # Alias for `spack view symlink` - _spack_view_symlink -} - function _spack_view_statlink { if $list_options then @@ -1215,11 +1334,9 @@ function _spack_view_status { _spack_view_statlink } -function _spack_view_symlink { - if $list_options - then - compgen -W "-h --help -i --ignore-conflicts" -- "$cur" - fi +function _spack_view_check { + # Alias for `spack view statlink` + _spack_view_statlink } # Helper functions for subcommands @@ -1264,6 +1381,10 @@ function _environments { spack env list } +function _keys { + spack gpg list +} + # Testing functions function _test_vars { From 4da8f7fceff7c9f829ed99823c288abf6b81db48 Mon Sep 17 00:00:00 2001 From: Sajid Ali <30510036+s-sajid-ali@users.noreply.github.com> Date: Thu, 2 Jan 2020 15:30:11 -0600 Subject: [PATCH 132/178] RHEL8 bugfix for module_cmd (#14349) --- lib/spack/spack/util/module_cmd.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py index 4ff6b0de431..d2036707691 100644 --- a/lib/spack/spack/util/module_cmd.py +++ b/lib/spack/spack/util/module_cmd.py @@ -9,6 +9,7 @@ """ import subprocess import os +import sys import json import re @@ -31,7 +32,7 @@ def module(*args): if args[0] in module_change_commands: # Do the module manipulation, then output the environment in JSON # and read the JSON back in the parent process to update os.environ - module_cmd += ' >/dev/null; python -c %s' % py_cmd + module_cmd += ' >/dev/null;' + sys.executable + ' -c %s' % py_cmd module_p = subprocess.Popen(module_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, From f9f28e8fbaaf6bbc3fa72050bfce34d6fad25173 Mon Sep 17 00:00:00 2001 From: "Adam J. Stewart" Date: Thu, 16 Jan 2020 15:46:18 -0600 Subject: [PATCH 133/178] Fix use of sys.executable for module/env commands (#14496) * Fix use of sys.executable for module/env commands * Fix unit tests * More consistent quotation, less duplication * Fix import syntax --- lib/spack/spack/util/environment.py | 5 +++-- lib/spack/spack/util/module_cmd.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index f7dc728e7cb..83b350d1c79 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -17,6 +17,7 @@ import llnl.util.tty as tty import spack.util.executable as executable +from spack.util.module_cmd import py_cmd from llnl.util.lang import dedupe @@ -918,8 +919,8 @@ def _source_single_file(file_and_args, environment): source_file.extend(x for x in file_and_args) source_file = ' '.join(source_file) - dump_cmd = 'import os, json; print(json.dumps(dict(os.environ)))' - dump_environment = sys.executable + ' -c "{0}"'.format(dump_cmd) + dump_environment = 'PYTHONHOME="{0}" "{1}" -c "{2}"'.format( + sys.prefix, sys.executable, py_cmd) # Try to source the file source_file_arguments = ' '.join([ diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py index d2036707691..1781e050321 100644 --- a/lib/spack/spack/util/module_cmd.py +++ b/lib/spack/spack/util/module_cmd.py @@ -18,7 +18,7 @@ # This list is not exhaustive. Currently we only use load and unload # If we need another option that changes the environment, add it here. module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse'] -py_cmd = "'import os;import json;print(json.dumps(dict(os.environ)))'" +py_cmd = 'import os; import json; print(json.dumps(dict(os.environ)))' # This is just to enable testing. I hate it but we can't find a better way _test_mode = False @@ -32,7 +32,8 @@ def module(*args): if args[0] in module_change_commands: # Do the module manipulation, then output the environment in JSON # and read the JSON back in the parent process to update os.environ - module_cmd += ' >/dev/null;' + sys.executable + ' -c %s' % py_cmd + module_cmd += ' > /dev/null; PYTHONHOME="{0}" "{1}" -c "{2}"'.format( + sys.prefix, sys.executable, py_cmd) module_p = subprocess.Popen(module_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, From 010f9451c9d27fb1e8b73802876123573c872d17 Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Sat, 25 Jan 2020 01:49:45 +0100 Subject: [PATCH 134/178] bugfix: make `_source_single_file` work in venvs (#14569) Using `sys.executable` to run Python in a sub-shell doesn't always work in a virtual environment as the `sys.executable` Python is not necessarily compatible with any loaded spack/other virtual environment. - revert use of sys.executable to print out subshell environment (#14496) - try instead to use an available python, then if there *is not* one, use `sys.executable` - this addresses RHEL8 (where there is no `python` and `PYTHONHOME` issue in a simpler way --- lib/spack/spack/util/environment.py | 11 ++++++++--- lib/spack/spack/util/module_cmd.py | 5 ++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py index 83b350d1c79..bfd6300ec8f 100644 --- a/lib/spack/spack/util/environment.py +++ b/lib/spack/spack/util/environment.py @@ -17,7 +17,6 @@ import llnl.util.tty as tty import spack.util.executable as executable -from spack.util.module_cmd import py_cmd from llnl.util.lang import dedupe @@ -919,8 +918,14 @@ def _source_single_file(file_and_args, environment): source_file.extend(x for x in file_and_args) source_file = ' '.join(source_file) - dump_environment = 'PYTHONHOME="{0}" "{1}" -c "{2}"'.format( - sys.prefix, sys.executable, py_cmd) + # If the environment contains 'python' use it, if not + # go with sys.executable. Below we just need a working + # Python interpreter, not necessarily sys.executable. + python_cmd = executable.which('python3', 'python', 'python2') + python_cmd = python_cmd.name if python_cmd else sys.executable + + dump_cmd = 'import os, json; print(json.dumps(dict(os.environ)))' + dump_environment = python_cmd + ' -c "{0}"'.format(dump_cmd) # Try to source the file source_file_arguments = ' '.join([ diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py index 1781e050321..d2036707691 100644 --- a/lib/spack/spack/util/module_cmd.py +++ b/lib/spack/spack/util/module_cmd.py @@ -18,7 +18,7 @@ # This list is not exhaustive. Currently we only use load and unload # If we need another option that changes the environment, add it here. module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse'] -py_cmd = 'import os; import json; print(json.dumps(dict(os.environ)))' +py_cmd = "'import os;import json;print(json.dumps(dict(os.environ)))'" # This is just to enable testing. I hate it but we can't find a better way _test_mode = False @@ -32,8 +32,7 @@ def module(*args): if args[0] in module_change_commands: # Do the module manipulation, then output the environment in JSON # and read the JSON back in the parent process to update os.environ - module_cmd += ' > /dev/null; PYTHONHOME="{0}" "{1}" -c "{2}"'.format( - sys.prefix, sys.executable, py_cmd) + module_cmd += ' >/dev/null;' + sys.executable + ' -c %s' % py_cmd module_p = subprocess.Popen(module_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, From 3a3a58a10690dcf1fce94d8671a665a514e0e5ab Mon Sep 17 00:00:00 2001 From: codeandkey Date: Fri, 7 Feb 2020 14:28:47 -0600 Subject: [PATCH 135/178] r-stargazer: new package at 5.2.2 --- .../builtin/packages/r-stargazer/package.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-stargazer/package.py diff --git a/var/spack/repos/builtin/packages/r-stargazer/package.py b/var/spack/repos/builtin/packages/r-stargazer/package.py new file mode 100644 index 00000000000..fff3c6ece24 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-stargazer/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RStargazer(RPackage): + """stargazer: Well-Formatted Regression and Summary Statistics Tables""" + + homepage = "https://cloud.r-project.org/package=stargazer" + url = "https://cloud.r-project.org/src/contrib/stargazer_5.2.2.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/stargazer" + + version('5.2.2', sha256='70eb4a13a6ac1bfb35af07cb8a63d501ad38dfd9817fc3fba6724260b23932de') From 3e5427078d5a0d317be889058078fa4389592e8d Mon Sep 17 00:00:00 2001 From: codeandkey Date: Fri, 7 Feb 2020 14:23:55 -0600 Subject: [PATCH 136/178] r-evd: new package at 2.3-3 --- .../repos/builtin/packages/r-evd/package.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-evd/package.py diff --git a/var/spack/repos/builtin/packages/r-evd/package.py b/var/spack/repos/builtin/packages/r-evd/package.py new file mode 100644 index 00000000000..c30bac399c0 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-evd/package.py @@ -0,0 +1,16 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class REvd(RPackage): + """evd: Functions for Extreme Value Distributions""" + + homepage = "https://cloud.r-project.org/package=evd" + url = "https://cloud.r-project.org/src/contrib/evd_2.3-3.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/evd" + + version('2.3-3', sha256='2fc5ef2e0c3a2a9392425ddd45914445497433d90fb80b8c363877baee4559b4') From a8d5c6ccf22cba113e60efbd6f0d2142f61bce14 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 7 Feb 2020 16:51:44 -0600 Subject: [PATCH 137/178] version bump: 0.13.4 --- lib/spack/spack/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py index 00744096613..953ec89f9f4 100644 --- a/lib/spack/spack/__init__.py +++ b/lib/spack/spack/__init__.py @@ -5,7 +5,7 @@ #: major, minor, patch version for Spack, in a tuple -spack_version_info = (0, 13, 3) +spack_version_info = (0, 13, 4) #: String containing Spack version joined with .'s spack_version = '.'.join(str(v) for v in spack_version_info) From 0311b63e0bf9c88cbfc9ddf470fcd1f674f24073 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Fri, 7 Feb 2020 16:52:07 -0600 Subject: [PATCH 138/178] update CHANGELOG.md for 0.13.4 --- CHANGELOG.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2b2a1cf9f01..fef5553c8fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,12 @@ +# v0.13.4 (2020-02-07) + +This release contains several bugfixes: + +* bugfixes for invoking python in various environments (#14349, #14496, #14569) +* brought tab completion up to date (#14392) +* bugfix for removing extensions from views in order (#12961) +* bugfix for nondeterministic hashing for specs with externals (#14390) + # v0.13.3 (2019-12-23) This release contains more major performance improvements for Spack From e22ac814b02b2440d9f656f8091bf28942af3273 Mon Sep 17 00:00:00 2001 From: Dan Lipsa Date: Fri, 7 Feb 2020 19:55:04 -0500 Subject: [PATCH 139/178] Align default libxml2 settings for gettext and Python (#14795) Python depends on gettext. Packages that depend on gettext and Python together will encounter a concretizer bug which incorrectly detects a constraint conflict. This sets the default value of +libxml2 in Python to be the same as gettext so that packages which depend on both (like mesa) can successfully concretize without adding manual constraints. --- var/spack/repos/builtin/packages/python/package.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py index f666d4dd4df..37d65c54c00 100644 --- a/var/spack/repos/builtin/packages/python/package.py +++ b/var/spack/repos/builtin/packages/python/package.py @@ -71,7 +71,7 @@ class Python(AutotoolsPackage): extendable = True # Variants to avoid cyclical dependencies for concretizer - variant('libxml2', default=False, + variant('libxml2', default=True, description='Use a gettext library build with libxml2') variant( From 7a10478708ea23b2a0df8c7d8f5fd57ff34372e6 Mon Sep 17 00:00:00 2001 From: "Mark W. Krentel" Date: Sun, 9 Feb 2020 11:10:44 -0600 Subject: [PATCH 140/178] intel-tbb: fix sha256 sums for 2020 versions Fixes #14850. Commit 6b1958219 added versions 2020 and 2020.1 for intel-tbb as part of updating several intel packages but added the wrong sha256 sums for the github/01org repository. Also, version 2020 is 2020, not 2020.0. Add patch makefile-debug to restore the debug targets. --- .../packages/intel-tbb/makefile-debug.patch | 42 +++++++++++++++++++ .../builtin/packages/intel-tbb/package.py | 7 +++- 2 files changed, 47 insertions(+), 2 deletions(-) create mode 100644 var/spack/repos/builtin/packages/intel-tbb/makefile-debug.patch diff --git a/var/spack/repos/builtin/packages/intel-tbb/makefile-debug.patch b/var/spack/repos/builtin/packages/intel-tbb/makefile-debug.patch new file mode 100644 index 00000000000..8db5e2b985e --- /dev/null +++ b/var/spack/repos/builtin/packages/intel-tbb/makefile-debug.patch @@ -0,0 +1,42 @@ +The debug targets were removed starting with rev 2020. +This patch restores them. + + +--- tbb-2020/Makefile.orig 2019-12-18 02:42:41.000000000 -0600 ++++ tbb-2020/Makefile 2020-02-09 00:27:17.058452442 -0600 +@@ -26,15 +26,19 @@ + all: tbb tbbmalloc tbbproxy test examples + + tbb: mkdir ++ $(MAKE) -C "$(work_dir)_debug" -r -f $(tbb_root)/build/Makefile.tbb cfg=debug + $(MAKE) -C "$(work_dir)_release" -r -f $(tbb_root)/build/Makefile.tbb cfg=release + + tbbmalloc: mkdir ++ $(MAKE) -C "$(work_dir)_debug" -r -f $(tbb_root)/build/Makefile.tbbmalloc cfg=debug malloc + $(MAKE) -C "$(work_dir)_release" -r -f $(tbb_root)/build/Makefile.tbbmalloc cfg=release malloc + + tbbproxy: mkdir ++ $(MAKE) -C "$(work_dir)_debug" -r -f $(tbb_root)/build/Makefile.tbbproxy cfg=debug tbbproxy + $(MAKE) -C "$(work_dir)_release" -r -f $(tbb_root)/build/Makefile.tbbproxy cfg=release tbbproxy + + tbbbind: mkdir ++ $(MAKE) -C "$(work_dir)_debug" -r -f $(tbb_root)/build/Makefile.tbbbind cfg=debug tbbbind + $(MAKE) -C "$(work_dir)_release" -r -f $(tbb_root)/build/Makefile.tbbbind cfg=release tbbbind + + test: tbb tbbmalloc $(if $(use_proxy),tbbproxy) +@@ -42,6 +46,7 @@ + -$(MAKE) -C "$(work_dir)_release" -r -f $(tbb_root)/build/Makefile.test cfg=release + + rml: mkdir ++ $(MAKE) -C "$(work_dir)_debug" -r -f $(tbb_root)/build/Makefile.rml cfg=debug + $(MAKE) -C "$(work_dir)_release" -r -f $(tbb_root)/build/Makefile.rml cfg=release + + examples: tbb tbbmalloc +@@ -64,6 +69,7 @@ + $(shell $(MAKE) -s -i -r -C examples -f Makefile tbb_root=.. clean >$(NUL) 2>$(NUL)) + + mkdir: ++ $(shell $(MD) "$(work_dir)_debug" >$(NUL) 2>$(NUL)) + $(shell $(MD) "$(work_dir)_release" >$(NUL) 2>$(NUL)) + @echo Created the $(work_dir)_release directory + diff --git a/var/spack/repos/builtin/packages/intel-tbb/package.py b/var/spack/repos/builtin/packages/intel-tbb/package.py index 0ae85edabb9..4b68ae5b4e8 100644 --- a/var/spack/repos/builtin/packages/intel-tbb/package.py +++ b/var/spack/repos/builtin/packages/intel-tbb/package.py @@ -23,8 +23,8 @@ class IntelTbb(Package): # See url_for_version() below. - version('2020.1', sha256='48d51c63b16787af54e1ee4aaf30042087f20564b4eecf9a032d5568bc2f0bf8') - version('2020.0', sha256='8eed2377ac62e6ac10af5a8303ce861e4525ffe491a061b48e8fe094fc741ce9') + version('2020.1', sha256='72cffaeac3b50b117c4e2279f9162308d35873b3e744aff5a088beff6f65c9af') + version('2020', sha256='db80f4f7abb95c2d08fe64abdc0a9250903e4c725f1c667ac517450de426023a') version('2019.9', sha256='15652f5328cf00c576f065e5cd3eaf3317422fe82afb67a9bcec0dc065bd2abe') version('2019.8', sha256='7b1fd8caea14be72ae4175896510bf99c809cd7031306a1917565e6de7382fba') version('2019.7', sha256='4204a93f4c0fd989fb6f79acae74feb02ee39725c93968773d9b6efeb75c7a6a') @@ -102,6 +102,9 @@ class IntelTbb(Package): patch("tbb_cmakeConfig-2019.5.patch", level=0, when='@2019.5:') patch("tbb_cmakeConfig.patch", level=0, when='@2017.7:2019.4') + # Restore the debug targets. + patch("makefile-debug.patch", when="@2020:") + # Some very old systems don't support transactional memory. patch("disable-tm.patch", when='~tm') From 30b30e11dcdd8ea0894b61d427fcbe2dfe30d525 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Sun, 9 Feb 2020 19:26:31 -0600 Subject: [PATCH 141/178] New package - r-rmariadb (#14762) * New package - r-rmariadb This PR creates the r-rmariadb package. It also includes an update to the r-dbi package as a newer version of that is needed. * Update var/spack/repos/builtin/packages/r-rmariadb/package.py Argh, copy/paste. I wish the mirror would list itself as the archive site as well, but it just mirrors that data field from CRAN site. Thanks for catching that, I will make sure to look for that in the future. Co-Authored-By: Adam J. Stewart * Use mariadb-client Use mariadb-client so people can set a preferred provider. Co-authored-by: Adam J. Stewart --- .../repos/builtin/packages/r-dbi/package.py | 1 + .../builtin/packages/r-rmariadb/package.py | 29 +++++++++++++++++++ 2 files changed, 30 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-rmariadb/package.py diff --git a/var/spack/repos/builtin/packages/r-dbi/package.py b/var/spack/repos/builtin/packages/r-dbi/package.py index 49cfa2a22e1..cca20df5ecb 100644 --- a/var/spack/repos/builtin/packages/r-dbi/package.py +++ b/var/spack/repos/builtin/packages/r-dbi/package.py @@ -15,6 +15,7 @@ class RDbi(RPackage): url = "https://cloud.r-project.org/src/contrib/DBI_0.7.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/DBI" + version('1.1.0', sha256='a96db7fa39a58f1ed34c6e78d8f5f7e4cf0882afb301323b5c6975d6729203e4') version('1.0.0', sha256='ff16f118eb3f759183441835e932b87358dd80ab9800ce576a8f3df1b6f01cf5') version('0.4-1', sha256='eff14a9af4975f23f8e1f4347d82c33c32c0b4f4f3e11370c582a89aeb8ac68e') version('0.7', sha256='2557d5d59a45620ec9de340c2c25eec4cc478d3fc3f8b87979cf337c5bcfde11') diff --git a/var/spack/repos/builtin/packages/r-rmariadb/package.py b/var/spack/repos/builtin/packages/r-rmariadb/package.py new file mode 100644 index 00000000000..ac7ea12323a --- /dev/null +++ b/var/spack/repos/builtin/packages/r-rmariadb/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RRmariadb(RPackage): + """Implements a 'DBI'-compliant interface to 'MariaDB' + () and 'MySQL' () + databases.""" + + homepage = "https://rmariadb.r-dbi.org/" + url = "https://cloud.r-project.org/src/contrib/RMariaDB_1.0.8.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/RMariaDB" + + version('1.0.8', sha256='3c8aedc519dc063ceb068535a3700bc5caf26f867078cc5a228aa8961e2d99f5') + + depends_on('r@2.8.0:', type=('build', 'run')) + depends_on('r-bit64', type=('build', 'run')) + depends_on('r-dbi@1.1.0:', type=('build', 'run')) + depends_on('r-hms@0.5.0:', type=('build', 'run')) + depends_on('r-rcpp@0.12.4:', type=('build', 'run')) + depends_on('r-bh', type=('build', 'run')) + depends_on('r-plogr', type=('build', 'run')) + + # non-R dependencies + depends_on('mariadb-client') From 3ba5df376335fbb7bda4cd78e3c185e7bc6b09fb Mon Sep 17 00:00:00 2001 From: Kai Torben Ohlhus Date: Mon, 10 Feb 2020 12:04:09 +0900 Subject: [PATCH 142/178] octave: add 5.2.0 (#14868) Add version Octave 5.2.0 including sha256. --- var/spack/repos/builtin/packages/octave/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py index f70895bfda8..29ea7ea8983 100644 --- a/var/spack/repos/builtin/packages/octave/package.py +++ b/var/spack/repos/builtin/packages/octave/package.py @@ -26,6 +26,7 @@ class Octave(AutotoolsPackage, GNUMirrorPackage): extendable = True + version('5.2.0', sha256='2fea62b3c78d6f38e9451da8a4d26023840725977dffee5250d3d180f56595e1') version('5.1.0', sha256='e36b1124cac27c7caa51cc57de408c31676d5f0096349b4d50b57bfe1bcd7495') version('4.4.1', sha256='09fbd0f212f4ef21e53f1d9c41cf30ce3d7f9450fb44911601e21ed64c67ae97') version('4.4.0', sha256='72f846379fcec7e813d46adcbacd069d72c4f4d8f6003bcd92c3513aafcd6e96') From 1c5838be5c4e44b3a37bbf6a859b3eb24af7faf9 Mon Sep 17 00:00:00 2001 From: darmac Date: Mon, 10 Feb 2020 16:39:45 +0800 Subject: [PATCH 143/178] pngquant : add depency on libpng (#14836) --- var/spack/repos/builtin/packages/pngquant/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/pngquant/package.py b/var/spack/repos/builtin/packages/pngquant/package.py index 73d3946b487..a6bcd69de7b 100644 --- a/var/spack/repos/builtin/packages/pngquant/package.py +++ b/var/spack/repos/builtin/packages/pngquant/package.py @@ -16,3 +16,5 @@ class Pngquant(AutotoolsPackage): url = "http://pngquant.org/pngquant-2.12.5-src.tar.gz" version('2.12.5', sha256='3638936cf6270eeeaabcee42e10768d78e4dc07cac9310307835c1f58b140808') + + depends_on('libpng') From 85b6e3e6d40fc1f9eb585363ac15d914038f9a0c Mon Sep 17 00:00:00 2001 From: darmac Date: Mon, 10 Feb 2020 16:40:46 +0800 Subject: [PATCH 144/178] openldap: added v2.4.49 (#14815) --- var/spack/repos/builtin/packages/openldap/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/openldap/package.py b/var/spack/repos/builtin/packages/openldap/package.py index 0ef3a63f015..89d190ae273 100644 --- a/var/spack/repos/builtin/packages/openldap/package.py +++ b/var/spack/repos/builtin/packages/openldap/package.py @@ -19,6 +19,7 @@ class Openldap(AutotoolsPackage): homepage = "https://www.openldap.org/" url = "ftp://ftp.openldap.org/pub/OpenLDAP/openldap-release/openldap-2.4.48.tgz" + version('2.4.49', sha256='e3b117944b4180f23befe87d0dcf47f29de775befbc469dcf4ac3dab3311e56e') version('2.4.48', sha256='d9523ffcab5cd14b709fcf3cb4d04e8bc76bb8970113255f372bc74954c6074d') variant('client_only', default=True, description='Client only installation') From f274d89c3397f07f7a163e6b6740280dcea86cbd Mon Sep 17 00:00:00 2001 From: darmac Date: Mon, 10 Feb 2020 16:44:50 +0800 Subject: [PATCH 145/178] xterm: added versions up to v353 (#14829) --- var/spack/repos/builtin/packages/xterm/package.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/var/spack/repos/builtin/packages/xterm/package.py b/var/spack/repos/builtin/packages/xterm/package.py index 92bf2e5fbd2..e72da49f0cb 100644 --- a/var/spack/repos/builtin/packages/xterm/package.py +++ b/var/spack/repos/builtin/packages/xterm/package.py @@ -14,6 +14,10 @@ class Xterm(AutotoolsPackage): homepage = "http://invisible-island.net/xterm/" url = "ftp://ftp.invisible-island.net/xterm/xterm-327.tgz" + version('353', sha256='e521d3ee9def61f5d5c911afc74dd5c3a56ce147c7071c74023ea24cac9bb768') + version('350', sha256='aefb59eefd310268080d1a90a447368fb97a9a6737bfecfc3800bf6cc304104d') + version('340', sha256='b5c7f77b7afade798461e2a2f86d5af64f9c9c9f408b1af0f545add978df722a') + version('330', sha256='7aeef9f29f6b95e09f481173c8c3053357bf5ffe162585647f690fd1707556df') version('327', sha256='66fb2f6c35b342148f549c276b12a3aa3fb408e27ab6360ddec513e14376150b') depends_on('libxft') From 4d173579cb29ef1a96ad175a5f4b3d8593c4725c Mon Sep 17 00:00:00 2001 From: Kai Torben Ohlhus Date: Mon, 10 Feb 2020 18:35:29 +0900 Subject: [PATCH 146/178] openblas: added v3.8.0 (#14875) --- var/spack/repos/builtin/packages/openblas/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py index 695dcec80c3..b3ce744bf8b 100644 --- a/var/spack/repos/builtin/packages/openblas/package.py +++ b/var/spack/repos/builtin/packages/openblas/package.py @@ -18,6 +18,7 @@ class Openblas(MakefilePackage): git = 'https://github.com/xianyi/OpenBLAS.git' version('develop', branch='develop') + version('0.3.8', sha256='8f86ade36f0dbed9ac90eb62575137388359d97d8f93093b38abe166ad7ef3a8') version('0.3.7', sha256='bde136122cef3dd6efe2de1c6f65c10955bbb0cc01a520c2342f5287c28f9379') version('0.3.6', sha256='e64c8fe083832ffbc1459ab6c72f71d53afd3b36e8497c922a15a06b72e9002f') version('0.3.5', sha256='0950c14bd77c90a6427e26210d6dab422271bc86f9fc69126725833ecdaa0e85') From 42c829adb4583dd87bee21ddb82e0e3436e83887 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 10 Feb 2020 03:38:44 -0600 Subject: [PATCH 147/178] New package: r-gsalib (#14873) --- .../repos/builtin/packages/r-gsalib/package.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-gsalib/package.py diff --git a/var/spack/repos/builtin/packages/r-gsalib/package.py b/var/spack/repos/builtin/packages/r-gsalib/package.py new file mode 100644 index 00000000000..fe105aa14dd --- /dev/null +++ b/var/spack/repos/builtin/packages/r-gsalib/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RGsalib(RPackage): + """This package contains utility functions used by the Genome Analysis + Toolkit (GATK) to load tables and plot data. The GATK is a toolkit for + variant discovery in high-throughput sequencing data.""" + + homepage = "https://cloud.r-project.org/package=gsalib" + url = "https://cloud.r-project.org/src/contrib/gsalib_2.1.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/gsalib" + + version('2.1', sha256='e1b23b986c18b89a94c58d9db45e552d1bce484300461803740dacdf7c937fcc') From 716978e00c3989760adf3e9b3c15ab30c5750705 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 10 Feb 2020 03:40:47 -0600 Subject: [PATCH 148/178] New package: r-copula (#14874) This PR adds the r-copula package and dependencies. - new package: r-adgoftest - new package: r-gsl - new package: r-pspline - new package: r-stabledist --- .../builtin/packages/r-adgoftest/package.py | 17 ++++++++ .../builtin/packages/r-copula/package.py | 39 +++++++++++++++++++ .../repos/builtin/packages/r-gsl/package.py | 21 ++++++++++ .../builtin/packages/r-pspline/package.py | 18 +++++++++ .../builtin/packages/r-stabledist/package.py | 20 ++++++++++ 5 files changed, 115 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-adgoftest/package.py create mode 100644 var/spack/repos/builtin/packages/r-copula/package.py create mode 100644 var/spack/repos/builtin/packages/r-gsl/package.py create mode 100644 var/spack/repos/builtin/packages/r-pspline/package.py create mode 100644 var/spack/repos/builtin/packages/r-stabledist/package.py diff --git a/var/spack/repos/builtin/packages/r-adgoftest/package.py b/var/spack/repos/builtin/packages/r-adgoftest/package.py new file mode 100644 index 00000000000..1117c5c986f --- /dev/null +++ b/var/spack/repos/builtin/packages/r-adgoftest/package.py @@ -0,0 +1,17 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RAdgoftest(RPackage): + """Anderson-Darling GoF test with p-value calculation based on Marsaglia's + 2004 paper 'Evaluating the Anderson-Darling Distribution'""" + + homepage = "https://cloud.r-project.org/package=ADGofTest" + url = "https://cloud.r-project.org/src/contrib/ADGofTest_0.3.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/ADGofTest" + + version('0.3', sha256='9cd9313954f6ecd82480d373f6c5371ca84ab33e3f5c39d972d35cfcf1096846') diff --git a/var/spack/repos/builtin/packages/r-copula/package.py b/var/spack/repos/builtin/packages/r-copula/package.py new file mode 100644 index 00000000000..0090a65a790 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-copula/package.py @@ -0,0 +1,39 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RCopula(RPackage): + """Classes (S4) of commonly used elliptical, Archimedean, extreme-value and + other copula families, as well as their rotations, mixtures and + asymmetrizations. Nested Archimedean copulas, related tools and special + functions. Methods for density, distribution, random number generation, + bivariate dependence measures, Rosenblatt transform, Kendall distribution + function, perspective and contour plots. Fitting of copula models with + potentially partly fixed parameters, including standard errors. Serial + independence tests, copula specification tests (independence, + exchangeability, radial symmetry, extreme-value dependence, + goodness-of-fit) and model selection based on cross-validation. Empirical + copula, smoothed versions, and non-parametric estimators of the Pickands + dependence function.""" + + homepage = "http://copula.r-forge.r-project.org/" + url = "https://cloud.r-project.org/src/contrib/copula_0.999-20.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/copula" + + version('0.999-20', sha256='7d3d47bce2dacb05b94a772f84dbf3d83c99ac2ac11e5f1b4b03d50d9d5c0fb0') + + depends_on('r@3.2.0:', type=('build', 'run')) + depends_on('r-matrix', type=('build', 'run')) + depends_on('r-lattice', type=('build', 'run')) + depends_on('r-colorspace', type=('build', 'run')) + depends_on('r-gsl', type=('build', 'run')) + depends_on('r-adgoftest', type=('build', 'run')) + depends_on('r-stabledist@0.6-4:', type=('build', 'run')) + depends_on('r-mvtnorm', type=('build', 'run')) + depends_on('r-pcapp', type=('build', 'run')) + depends_on('r-pspline', type=('build', 'run')) + depends_on('r-numderiv', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-gsl/package.py b/var/spack/repos/builtin/packages/r-gsl/package.py new file mode 100644 index 00000000000..8aa7f778590 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-gsl/package.py @@ -0,0 +1,21 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RGsl(RPackage): + """An R wrapper for some of the functionality of the Gnu Scientific + Library.""" + + homepage = "https://github.com/RobinHankin/gsl.git" + url = "https://cloud.r-project.org/src/contrib/gsl_2.1-6.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/gsl" + + version('2.1-6', sha256='f5d463239693f146617018987687db31b163653708cbae0b730b9b7bed81995c') + + depends_on('r@3.1.0:', type=('build', 'run')) + + depends_on('gsl@2.1:') diff --git a/var/spack/repos/builtin/packages/r-pspline/package.py b/var/spack/repos/builtin/packages/r-pspline/package.py new file mode 100644 index 00000000000..7a33098c153 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-pspline/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RPspline(RPackage): + """Smoothing splines with penalties on order m derivatives.""" + + homepage = "https://cloud.r-project.org/package=pspline" + url = "https://cloud.r-project.org/src/contrib/pspline_1.0-18.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/pspline" + + version('1.0-18', sha256='f71cf293bd5462e510ac5ad16c4a96eda18891a0bfa6447dd881c65845e19ac7') + + depends_on('r@2.0.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-stabledist/package.py b/var/spack/repos/builtin/packages/r-stabledist/package.py new file mode 100644 index 00000000000..c187d4771e1 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-stabledist/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RStabledist(RPackage): + """Density, Probability and Quantile functions, and random number + generation for (skew) stable distributions, using the parametrizations of + Nolan.""" + + homepage = "http://www.rmetrics.org/" + url = "https://cloud.r-project.org/src/contrib/stabledist_0.7-1.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/stabledist" + + version('0.7-1', sha256='06c5704d3a3c179fa389675c537c39a006867bc6e4f23dd7e406476ed2c88a69') + + depends_on('r@3.1.0:', type=('build', 'run')) From 22df37c328855bbf5a06c03f9a034ec22f243c98 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 10 Feb 2020 03:41:56 -0600 Subject: [PATCH 149/178] New package: r-pscbs (#14867) This PR adds the r-pscbs package along with new dependencies and updates. - new package: r-aroma-light - new package: r-r-cache - updated package: r-r-oo --- .../builtin/packages/r-aroma-light/package.py | 23 +++++++++++++++ .../repos/builtin/packages/r-pscbs/package.py | 29 +++++++++++++++++++ .../builtin/packages/r-r-cache/package.py | 29 +++++++++++++++++++ .../repos/builtin/packages/r-r-oo/package.py | 1 + 4 files changed, 82 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-aroma-light/package.py create mode 100644 var/spack/repos/builtin/packages/r-pscbs/package.py create mode 100644 var/spack/repos/builtin/packages/r-r-cache/package.py diff --git a/var/spack/repos/builtin/packages/r-aroma-light/package.py b/var/spack/repos/builtin/packages/r-aroma-light/package.py new file mode 100644 index 00000000000..a704ac72b4b --- /dev/null +++ b/var/spack/repos/builtin/packages/r-aroma-light/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RAromaLight(RPackage): + """Methods for microarray analysis that take basic data types such as + matrices and lists of vectors. These methods can be used standalone, be + utilized in other packages, or be wrapped up in higher-level classes.""" + + homepage = "https://www.aroma-project.org/" + git = "https://git.bioconductor.org/packages/aroma.light" + + version('3.16.0', commit='fc16179fc4bee8954c5415d7cd13e3112b75b4fd') + + depends_on('r@2.15.2:', type=('build', 'run')) + depends_on('r-r-methodss3@1.7.1:', type=('build', 'run')) + depends_on('r-r-oo@1.22.0:', type=('build', 'run')) + depends_on('r-r-utils@2.9.0:', type=('build', 'run')) + depends_on('r-matrixstats@0.54.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-pscbs/package.py b/var/spack/repos/builtin/packages/r-pscbs/package.py new file mode 100644 index 00000000000..c3197b77955 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-pscbs/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RPscbs(RPackage): + """Segmentation of allele-specific DNA copy number data and detection of + regions with abnormal copy number within each parental chromosome. Both + tumor-normal paired and tumor-only analyses are supported.""" + + homepage = "https://github.com/HenrikBengtsson/PSCBS" + url = "https://cloud.r-project.org/src/contrib/PSCBS_0.65.0.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/PSCBS" + + version('0.65.0', sha256='3365065d5375c599eb024bfff12c5f6b10a6b1a4fe4ba6f200f7e83618dd399a') + + depends_on('r@3.2.0:', type=('build', 'run')) + depends_on('r-r-methodss3@1.7.1:', type=('build', 'run')) + depends_on('r-r-oo@1.22.1:', type=('build', 'run')) + depends_on('r-r-utils@2.8.0:', type=('build', 'run')) + depends_on('r-r-cache@0.13.0:', type=('build', 'run')) + depends_on('r-matrixstats@0.54.0:', type=('build', 'run')) + depends_on('r-aroma-light@2.4.0:', type=('build', 'run')) + depends_on('r-dnacopy@1.42.0:', type=('build', 'run')) + depends_on('r-listenv@0.7.0:', type=('build', 'run')) + depends_on('r-future@1.12.0:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-r-cache/package.py b/var/spack/repos/builtin/packages/r-r-cache/package.py new file mode 100644 index 00000000000..a27cf38bba4 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-r-cache/package.py @@ -0,0 +1,29 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RRCache(RPackage): + """Memoization can be used to speed up repetitive and computational + expensive function calls. The first time a function that implements + memoization is called the results are stored in a cache memory. The next + time the function is called with the same set of parameters, the results + are momentarily retrieved from the cache avoiding repeating the + calculations. With this package, any R object can be cached in a key-value + storage where the key can be an arbitrary set of R objects. The cache + memory is persistent (on the file system).""" + + homepage = "https://github.com/HenrikBengtsson/R.cache" + url = "https://cloud.r-project.org/src/contrib/R.cache_0.14.0.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/R.cache" + + version('0.14.0', sha256='18af4e372440b9f28b4b71346c8ed9de220232f9903730ccee2bfb3c612c16d9') + + depends_on('r@2.14.0:', type=('build', 'run')) + depends_on('r-r-methodss3@1.7.1:', type=('build', 'run')) + depends_on('r-r-oo@1.23.0:', type=('build', 'run')) + depends_on('r-r-utils@2.8.0:', type=('build', 'run')) + depends_on('r-digest@0.6.13:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-r-oo/package.py b/var/spack/repos/builtin/packages/r-r-oo/package.py index e25e515b7fb..cedaafe72a9 100644 --- a/var/spack/repos/builtin/packages/r-r-oo/package.py +++ b/var/spack/repos/builtin/packages/r-r-oo/package.py @@ -19,6 +19,7 @@ class RROo(RPackage): url = "https://cloud.r-project.org/src/contrib/R.oo_1.21.0.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/R.oo" + version('1.23.0', sha256='f5124ce3dbb0a62e8ef1bfce2de2d1dc2f776e8c48fd8cac358f7f5feb592ea1') version('1.22.0', sha256='c0862e4608fb2b8f91ec4494d46c2f3ba7bc44999f9aa3d7b9625d3792e7dd4c') version('1.21.0', sha256='645ceec2f815ed39650ca72db87fb4ece7357857875a4ec73e18bfaf647f431c') From d00a3eda6beaed99e66caa3ac429585a603abc26 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 10 Feb 2020 03:42:32 -0600 Subject: [PATCH 150/178] New package: r-proj4 (#14871) --- .../repos/builtin/packages/r-proj4/package.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-proj4/package.py diff --git a/var/spack/repos/builtin/packages/r-proj4/package.py b/var/spack/repos/builtin/packages/r-proj4/package.py new file mode 100644 index 00000000000..ba3ad51c99b --- /dev/null +++ b/var/spack/repos/builtin/packages/r-proj4/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RProj4(RPackage): + """A simple interface to lat/long projection and datum transformation of + the PROJ.4 cartographic projections library. It allows transformation of + geographic coordinates from one projection and/or datum to another.""" + + homepage = "http://www.rforge.net/proj4/" + url = "https://cloud.r-project.org/src/contrib/proj4_1.0-8.1.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/proj4" + + version('1.0-8.1', sha256='a3a2a8f0014fd79fa34b5957440fd38299d8e97f1a802a61a068a6c6cda10a7e') + + depends_on('r@2.0.0:', type=('build', 'run')) + + depends_on('proj@:5') From ad914c28d7e5a98b1d9548624ae0a0031c91c7e5 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 10 Feb 2020 03:43:27 -0600 Subject: [PATCH 151/178] New package: r-proj (#14872) --- .../repos/builtin/packages/r-proj/package.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-proj/package.py diff --git a/var/spack/repos/builtin/packages/r-proj/package.py b/var/spack/repos/builtin/packages/r-proj/package.py new file mode 100644 index 00000000000..8a9cea0941b --- /dev/null +++ b/var/spack/repos/builtin/packages/r-proj/package.py @@ -0,0 +1,27 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RProj(RPackage): + """A wrapper around the generic coordinate transformation software 'PROJ' + that transforms geospatial coordinates from one coordinate reference system + ('CRS') to another. This includes cartographic projections as well as + geodetic transformations. Version 6.0.0 or higher is required. The + intention is for this package to be used by user-packages such as 'reproj', + and that the older 'PROJ.4' and version 5 pathways be provided by the + legacy package. The 'PROJ' library is available from + .""" + + homepage = "https://github.com/hypertidy/PROJ" + url = "https://cloud.r-project.org/src/contrib/PROJ_0.1.0.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/PROJ" + + version('0.1.0', sha256='5186f221335e8092bbcd4d82bd323ee7e752c7c9cf83d3f94e4567e0b407aa6f') + + depends_on('r@2.10:', type=('build', 'run')) + + depends_on('proj@6:') From bd442e884a4ab240b8e6a96ca2096043c18a7652 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 10 Feb 2020 03:44:00 -0600 Subject: [PATCH 152/178] New package: r-imager (#14866) This PR adds the r-imager package and dependencies. - new package: r-bmp - new package: r-readbitmap --- .../repos/builtin/packages/r-bmp/package.py | 18 +++++++++ .../builtin/packages/r-imager/package.py | 37 +++++++++++++++++++ .../builtin/packages/r-readbitmap/package.py | 28 ++++++++++++++ 3 files changed, 83 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-bmp/package.py create mode 100644 var/spack/repos/builtin/packages/r-imager/package.py create mode 100644 var/spack/repos/builtin/packages/r-readbitmap/package.py diff --git a/var/spack/repos/builtin/packages/r-bmp/package.py b/var/spack/repos/builtin/packages/r-bmp/package.py new file mode 100644 index 00000000000..6f6a37b7c33 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-bmp/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RBmp(RPackage): + """Reads Windows BMP format images. Currently limited to 8 bit greyscale + images and 24,32 bit (A)RGB images. Pure R implementation without external + dependencies.""" + + homepage = "https://cloud.r-project.org/package=bmp" + url = "https://cloud.r-project.org/src/contrib/bmp_0.3.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/bmp" + + version('0.3', sha256='bdf790249b932e80bc3a188a288fef079d218856cf64ffb88428d915423ea649') diff --git a/var/spack/repos/builtin/packages/r-imager/package.py b/var/spack/repos/builtin/packages/r-imager/package.py new file mode 100644 index 00000000000..cd0f8510546 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-imager/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RImager(RPackage): + """Fast image processing for images in up to 4 dimensions (two spatial + dimensions, one time/depth dimension, one colour dimension). Provides most + traditional image processing tools (filtering, morphology, transformations, + etc.) as well as various functions for easily analysing image data using R. + The package wraps 'CImg', , a simple, modern C++ library + for image processing.""" + + homepage = "http://dahtah.github.io/imager" + url = "https://cloud.r-project.org/src/contrib/imager_0.41.2.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/imager" + + version('0.41.2', sha256='9be8bc8b3190d469fcb2883045a404d3b496a0380f887ee3caea11f0a07cd8a5') + + depends_on('r@2.10.0:', type=('build', 'run')) + depends_on('r-magrittr', type=('build', 'run')) + depends_on('r-rcpp@0.11.5:', type=('build', 'run')) + depends_on('r-stringr', type=('build', 'run')) + depends_on('r-png', type=('build', 'run')) + depends_on('r-jpeg', type=('build', 'run')) + depends_on('r-readbitmap', type=('build', 'run')) + depends_on('r-purrr', type=('build', 'run')) + depends_on('r-cairo', type=('build', 'run')) + depends_on('r-downloader', type=('build', 'run')) + depends_on('r-igraph', type=('build', 'run')) + depends_on('r-plyr', type=('build', 'run')) + + depends_on('fftw') + depends_on('libtiff') diff --git a/var/spack/repos/builtin/packages/r-readbitmap/package.py b/var/spack/repos/builtin/packages/r-readbitmap/package.py new file mode 100644 index 00000000000..3ade391413b --- /dev/null +++ b/var/spack/repos/builtin/packages/r-readbitmap/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RReadbitmap(RPackage): + """Identifies and reads Windows BMP, JPEG, PNG, and TIFF format bitmap + images. Identification defaults to the use of the magic number embedded in + the file rather than the file extension. Reading of JPEG and PNG image + depends on libjpg and libpng libraries. See file INSTALL for details if + necessary.""" + + homepage = "https://github.com/jefferis/readbitmap" + url = "https://cloud.r-project.org/src/contrib/readbitmap_0.1.5.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/readbitmap" + + version('0.1.5', sha256='737d7d585eb33de2c200da64d16781e3c9522400fe2af352e1460c6a402a0291') + + depends_on('r-bmp', type=('build', 'run')) + depends_on('r-jpeg', type=('build', 'run')) + depends_on('r-png', type=('build', 'run')) + depends_on('r-tiff', type=('build', 'run')) + + depends_on('libjpeg') + depends_on('libpng') From 1ed0efec7defdba982e76cc78e131a8a92a6876f Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 10 Feb 2020 03:44:17 -0600 Subject: [PATCH 153/178] New package: r-suppdists (#14865) --- .../builtin/packages/r-suppdists/package.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-suppdists/package.py diff --git a/var/spack/repos/builtin/packages/r-suppdists/package.py b/var/spack/repos/builtin/packages/r-suppdists/package.py new file mode 100644 index 00000000000..f85ee2c0a85 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-suppdists/package.py @@ -0,0 +1,22 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RSuppdists(RPackage): + """Ten distributions supplementing those built into R. Inverse Gauss, + Kruskal-Wallis, Kendall's Tau, Friedman's chi squared, Spearman's rho, + maximum F ratio, the Pearson product moment correlation coefficient, + Johnson distributions, normal scores and generalized hypergeometric + distributions.""" + + homepage = "https://cloud.r-project.org/package=SuppDists" + url = "https://cloud.r-project.org/src/contrib/SuppDists_1.1-9.5.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/SuppDists" + + version('1.1-9.5', sha256='680b67145c07d44e200275e08e48602fe19cd99fb106c05422b3f4a244c071c4') + + depends_on('r@3.3.0:', type=('build', 'run')) From 8aa1eba2e0134473ccc858cc5a81963bbfa0f36f Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 10 Feb 2020 03:49:34 -0600 Subject: [PATCH 154/178] New package: r-watermelon (#14864) This PR creates the r-watermelon package, along with dependencies. - new package: r-fdb-infiniummethylation-hg19 - new package: r-illuminahumanmethylation450kanno-ilmn12-hg19 - new package: r-lumi - new package: r-methylumi - new package: r-roc - new package: r-txdb-hsapiens-ucsc-hg19-knowngene - updated package: r-matrixstats, new version needed as a dependency --- .../r-fdb-infiniummethylation-hg19/package.py | 23 +++++++++++ .../package.py | 20 +++++++++ .../repos/builtin/packages/r-lumi/package.py | 37 +++++++++++++++++ .../builtin/packages/r-matrixstats/package.py | 1 + .../builtin/packages/r-methylumi/package.py | 41 +++++++++++++++++++ .../repos/builtin/packages/r-roc/package.py | 18 ++++++++ .../package.py | 20 +++++++++ .../builtin/packages/r-watermelon/package.py | 25 +++++++++++ 8 files changed, 185 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-fdb-infiniummethylation-hg19/package.py create mode 100644 var/spack/repos/builtin/packages/r-illuminahumanmethylation450kanno-ilmn12-hg19/package.py create mode 100644 var/spack/repos/builtin/packages/r-lumi/package.py create mode 100644 var/spack/repos/builtin/packages/r-methylumi/package.py create mode 100644 var/spack/repos/builtin/packages/r-roc/package.py create mode 100644 var/spack/repos/builtin/packages/r-txdb-hsapiens-ucsc-hg19-knowngene/package.py create mode 100644 var/spack/repos/builtin/packages/r-watermelon/package.py diff --git a/var/spack/repos/builtin/packages/r-fdb-infiniummethylation-hg19/package.py b/var/spack/repos/builtin/packages/r-fdb-infiniummethylation-hg19/package.py new file mode 100644 index 00000000000..ae6749434a9 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-fdb-infiniummethylation-hg19/package.py @@ -0,0 +1,23 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RFdbInfiniummethylationHg19(RPackage): + """Compiled HumanMethylation27 and HumanMethylation450 annotations.""" + + # No available git repository + homepage = "https://bioconductor.org/packages/release/data/annotation/html/FDb.InfiniumMethylation.hg19.html" + url = "https://bioconductor.org/packages/release/data/annotation/src/contrib/FDb.InfiniumMethylation.hg19_2.2.0.tar.gz" + + version('2.2.0', sha256='605aa3643588a2f40a942fa760b92662060a0dfedb26b4e4cd6f1a78b703093f') + + depends_on('r@2.10:', type=('build', 'run')) + depends_on('r-genomicfeatures@1.7.22:', type=('build', 'run')) + depends_on('r-txdb-hsapiens-ucsc-hg19-knowngene', type=('build', 'run')) + depends_on('r-org-hs-eg-db', type=('build', 'run')) + depends_on('r-annotationdbi', type=('build', 'run')) + depends_on('r-biostrings', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-illuminahumanmethylation450kanno-ilmn12-hg19/package.py b/var/spack/repos/builtin/packages/r-illuminahumanmethylation450kanno-ilmn12-hg19/package.py new file mode 100644 index 00000000000..34424309640 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-illuminahumanmethylation450kanno-ilmn12-hg19/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RIlluminahumanmethylation450kannoIlmn12Hg19(RPackage): + """Manifests and annotation for Illumina's 450k array data.""" + + # This package is available via bioconductor but there is no available git + # repository. + homepage = "https://bioconductor.org/packages/release/data/annotation/html/IlluminaHumanMethylation450kanno.ilmn12.hg19.html" + url = "https://bioconductor.org/packages/release/data/annotation/src/contrib/IlluminaHumanMethylation450kanno.ilmn12.hg19_0.6.0.tar.gz" + + version('0.6.0', sha256='249b8fd62add3c95b5047b597cff0868d26a98862a47cebd656edcd175a73b15') + + depends_on('r@3.3.0:', type=('build', 'run')) + depends_on('r-minfi@1.19.15:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-lumi/package.py b/var/spack/repos/builtin/packages/r-lumi/package.py new file mode 100644 index 00000000000..d79de8d6623 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-lumi/package.py @@ -0,0 +1,37 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RLumi(RPackage): + """The lumi package provides an integrated solution for the Illumina + microarray data analysis. It includes functions of Illumina BeadStudio + (GenomeStudio) data input, quality control, BeadArray-specific variance + stabilization, normalization and gene annotation at the probe level. It + also includes the functions of processing Illumina methylation microarrays, + especially Illumina Infinium methylation microarrays.""" + + homepage = "https://bioconductor.org/packages/release/bioc/html/lumi.html" + git = "https://git.bioconductor.org/packages/lumi" + + version('2.38.0', commit='321d480d44ce9a0c02ce5af1bddc1f549abdea59') + + depends_on('r@2.10:', type=('build', 'run')) + depends_on('r-biobase@2.5.5:', type=('build', 'run')) + depends_on('r-affy@1.23.4:', type=('build', 'run')) + depends_on('r-methylumi@2.3.2:', type=('build', 'run')) + depends_on('r-genomicfeatures', type=('build', 'run')) + depends_on('r-genomicranges', type=('build', 'run')) + depends_on('r-annotate', type=('build', 'run')) + depends_on('r-lattice', type=('build', 'run')) + depends_on('r-mgcv@1.4-0:', type=('build', 'run')) + depends_on('r-nleqslv', type=('build', 'run')) + depends_on('r-kernsmooth', type=('build', 'run')) + depends_on('r-preprocesscore', type=('build', 'run')) + depends_on('r-rsqlite', type=('build', 'run')) + depends_on('r-dbi', type=('build', 'run')) + depends_on('r-annotationdbi', type=('build', 'run')) + depends_on('r-mass', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-matrixstats/package.py b/var/spack/repos/builtin/packages/r-matrixstats/package.py index 9446add3183..9bebc7ad9d0 100644 --- a/var/spack/repos/builtin/packages/r-matrixstats/package.py +++ b/var/spack/repos/builtin/packages/r-matrixstats/package.py @@ -18,6 +18,7 @@ class RMatrixstats(RPackage): url = "https://cloud.r-project.org/src/contrib/matrixStats_0.52.2.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/matrixStats" + version('0.55.0', sha256='16d6bd90eee4cee8df4c15687de0f9b72730c03e56603c2998007d4533e8db19') version('0.54.0', sha256='8f0db4e181300a208b9aedbebfdf522a2626e6675d2662656efb8ba71b05a06f') version('0.52.2', sha256='39da6aa6b109f89a141dab8913d981abc4fbd3f8be9e206f92e382cc5270d2a5') diff --git a/var/spack/repos/builtin/packages/r-methylumi/package.py b/var/spack/repos/builtin/packages/r-methylumi/package.py new file mode 100644 index 00000000000..55f9f42a963 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-methylumi/package.py @@ -0,0 +1,41 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RMethylumi(RPackage): + """This package provides classes for holding and manipulating Illumina + methylation data. Based on eSet, it can contain MIAME information, sample + information, feature information, and multiple matrices of data. An + "intelligent" import function, methylumiR can read the Illumina text files + and create a MethyLumiSet. methylumIDAT can directly read raw IDAT files + from HumanMethylation27 and HumanMethylation450 microarrays. Normalization, + background correction, and quality control features for GoldenGate, + Infinium, and Infinium HD arrays are also included.""" + + homepage = "https://bioconductor.org/packages/release/bioc/html/methylumi.html" + git = "https://git.bioconductor.org/packages/methylumi" + + version('2.32.0', commit='e2a29c1b214c0d43c7325d176f9ce41dcf8e2f9d') + + depends_on('r@2.13:', type=('build', 'run')) + depends_on('r-biobase', type=('build', 'run')) + depends_on('r-scales', type=('build', 'run')) + depends_on('r-reshape2', type=('build', 'run')) + depends_on('r-ggplot2', type=('build', 'run')) + depends_on('r-matrixstats', type=('build', 'run')) + depends_on('r-fdb-infiniummethylation-hg19@2.2.0:', type=('build', 'run')) + depends_on('r-minfi', type=('build', 'run')) + depends_on('r-biocgenerics', type=('build', 'run')) + depends_on('r-iranges', type=('build', 'run')) + depends_on('r-genomeinfodb', type=('build', 'run')) + depends_on('r-genomicranges', type=('build', 'run')) + depends_on('r-summarizedexperiment', type=('build', 'run')) + depends_on('r-lattice', type=('build', 'run')) + depends_on('r-annotate', type=('build', 'run')) + depends_on('r-genefilter', type=('build', 'run')) + depends_on('r-annotationdbi', type=('build', 'run')) + depends_on('r-illuminaio', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-roc/package.py b/var/spack/repos/builtin/packages/r-roc/package.py new file mode 100644 index 00000000000..90cb6bfb0d5 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-roc/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RRoc(RPackage): + """Provide utilities for ROC, with microarray focus.""" + + homepage = "https://bioconductor.org/packages/release/bioc/html/ROC.html" + git = "https://git.bioconductor.org/packages/ROC" + + version('1.62.0', commit='60250fdb091f6a938709b8a2cffe6442ee22a9a2') + + depends_on('r@1.9.0:', type=('build', 'run')) + depends_on('r-knitr', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-txdb-hsapiens-ucsc-hg19-knowngene/package.py b/var/spack/repos/builtin/packages/r-txdb-hsapiens-ucsc-hg19-knowngene/package.py new file mode 100644 index 00000000000..a0d5b6ccde5 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-txdb-hsapiens-ucsc-hg19-knowngene/package.py @@ -0,0 +1,20 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RTxdbHsapiensUcscHg19Knowngene(RPackage): + """Exposes an annotation databases generated from UCSC by exposing these as + TxDb objects.""" + + # This is a bioconductor package but there is no available git repo. + homepage = "https://bioconductor.org/packages/release/data/annotation/html/TxDb.Hsapiens.UCSC.hg19.knownGene.html" + url = "https://bioconductor.org/packages/release/data/annotation/src/contrib/TxDb.Hsapiens.UCSC.hg19.knownGene_3.2.2.tar.gz" + + version('3.2.2', sha256='063de2b1174782a0b2b8ab7f04a0bdf3c43252cb67c685a9f8ef2b8e318352e9') + + depends_on('r-genomicfeatures@1.21.30:', type=('build', 'run')) + depends_on('r-annotationdbi', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-watermelon/package.py b/var/spack/repos/builtin/packages/r-watermelon/package.py new file mode 100644 index 00000000000..a1d827870c0 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-watermelon/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RWatermelon(RPackage): + """Illumina 450 methylation array normalization and metrics.""" + + homepage = "https://bioconductor.org/packages/release/bioc/html/wateRmelon.html" + git = "https://git.bioconductor.org/packages/wateRmelon" + + version('1.30.0', commit='66d7579fe49206d965832288df7937c3d43ed578') + + depends_on('r@2.10:', type=('build', 'run')) + depends_on('r-biobase', type=('build', 'run')) + depends_on('r-limma', type=('build', 'run')) + depends_on('r-matrixstats', type=('build', 'run')) + depends_on('r-methylumi', type=('build', 'run')) + depends_on('r-lumi', type=('build', 'run')) + depends_on('r-roc', type=('build', 'run')) + depends_on('r-illuminahumanmethylation450kanno-ilmn12-hg19', type=('build', 'run')) + depends_on('r-illuminaio', type=('build', 'run')) From cb3dbea198433641c806a9b1fa9a58d32fefc389 Mon Sep 17 00:00:00 2001 From: Hadrien G Date: Mon, 10 Feb 2020 11:06:56 +0100 Subject: [PATCH 155/178] acts-core: added v0.16.0 (#14839) * Adapt to ACTS v0.16 * ACTS uses the DD4hep XML components nowadays --- .../builtin/packages/acts-core/package.py | 23 ++++++++++++++----- 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/acts-core/package.py b/var/spack/repos/builtin/packages/acts-core/package.py index 644397102ab..1cd9fe63758 100644 --- a/var/spack/repos/builtin/packages/acts-core/package.py +++ b/var/spack/repos/builtin/packages/acts-core/package.py @@ -34,6 +34,7 @@ class ActsCore(CMakePackage): maintainers = ['HadrienG2'] version('develop', branch='master') + version('0.16.0', commit='b3d965fe0b8ae335909d79114ef261c6b996773a') version('0.15.0', commit='267c28f69c561e64369661a6235b03b5a610d6da') version('0.14.0', commit='38d678fcb205b77d60326eae913fbb1b054acea1') version('0.13.0', commit='b33f7270ddbbb33050b7ec60b4fa255dc2bfdc88') @@ -58,16 +59,18 @@ class ActsCore(CMakePackage): version('0.08.0', commit='99eedb38f305e3a1cd99d9b4473241b7cd641fa9') # Variants that affect the core ACTS library - variant('legacy', default=False, description='Build the Legacy package') + variant('benchmarks', default=False, description='Build the performance benchmarks') variant('examples', default=False, description='Build the examples') variant('tests', default=False, description='Build the unit tests') variant('integration_tests', default=False, description='Build the integration tests') # Variants the enable / disable ACTS plugins - variant('digitization', default=False, description='Build the geometric digitization plugin') variant('dd4hep', default=False, description='Build the DD4hep plugin') + variant('digitization', default=False, description='Build the geometric digitization plugin') + variant('fatras', default=False, description='Build the FAst TRAcking Simulation package') variant('identification', default=False, description='Build the Identification plugin') variant('json', default=False, description='Build the Json plugin') + variant('legacy', default=False, description='Build the Legacy package') variant('tgeo', default=False, description='Build the TGeo plugin') depends_on('cmake @3.11:', type='build') @@ -77,7 +80,7 @@ class ActsCore(CMakePackage): depends_on('nlohmann-json @3.2.0:', when='@0.14.0: +json') depends_on('root @6.10: cxxstd=14', when='+tgeo @:0.8.0') depends_on('root @6.10: cxxstd=17', when='+tgeo @0.8.1:') - depends_on('dd4hep @1.2:', when='+dd4hep') + depends_on('dd4hep @1.2: +xercesc', when='+dd4hep') def cmake_args(self): spec = self.spec @@ -86,15 +89,23 @@ def cmake_variant(cmake_label, spack_variant): enabled = spec.satisfies('+' + spack_variant) return "-DACTS_BUILD_{0}={1}".format(cmake_label, enabled) + integration_tests_label = "INTEGRATIONTESTS" + tests_label = "UNITTESTS" + if spec.satisfies('@:0.15.99'): + integration_tests_label = "INTEGRATION_TESTS" + tests_label = "TESTS" + args = [ - cmake_variant("LEGACY", "legacy"), + cmake_variant("BENCHMARKS", "benchmarks"), cmake_variant("EXAMPLES", "examples"), - cmake_variant("TESTS", "tests"), - cmake_variant("INTEGRATION_TESTS", "integration_tests"), + cmake_variant(tests_label, "tests"), + cmake_variant(integration_tests_label, "integration_tests"), cmake_variant("DIGITIZATION_PLUGIN", "digitization"), cmake_variant("DD4HEP_PLUGIN", "dd4hep"), + cmake_variant("FATRAS", "fatras"), cmake_variant("IDENTIFICATION_PLUGIN", "identification"), cmake_variant("JSON_PLUGIN", "json"), + cmake_variant("LEGACY", "legacy"), cmake_variant("TGEO_PLUGIN", "tgeo") ] From 745a843911aad95f900c806ebc8704132e48fa74 Mon Sep 17 00:00:00 2001 From: Glenn Johnson Date: Mon, 10 Feb 2020 04:08:04 -0600 Subject: [PATCH 156/178] New package: r-biomartr (#14812) This PR creates the r-biomartr package as well as needed new packages and updates. - new dependency: r-philentropy - update: r-curl --- .../builtin/packages/r-biomartr/package.py | 44 +++++++++++++++++++ .../repos/builtin/packages/r-curl/package.py | 8 ++-- .../builtin/packages/r-philentropy/package.py | 28 ++++++++++++ 3 files changed, 77 insertions(+), 3 deletions(-) create mode 100644 var/spack/repos/builtin/packages/r-biomartr/package.py create mode 100644 var/spack/repos/builtin/packages/r-philentropy/package.py diff --git a/var/spack/repos/builtin/packages/r-biomartr/package.py b/var/spack/repos/builtin/packages/r-biomartr/package.py new file mode 100644 index 00000000000..6cb63e979ce --- /dev/null +++ b/var/spack/repos/builtin/packages/r-biomartr/package.py @@ -0,0 +1,44 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RBiomartr(RPackage): + """Perform large scale genomic data retrieval and functional annotation + retrieval. This package aims to provide users with a standardized way to + automate genome, proteome, 'RNA', coding sequence ('CDS'), 'GFF', and + metagenome retrieval from 'NCBI RefSeq', 'NCBI Genbank', 'ENSEMBL', + 'ENSEMBLGENOMES', and 'UniProt' databases. Furthermore, an interface to the + 'BioMart' database (Smedley et al. (2009) ) + allows users to retrieve functional annotation for genomic loci. In + addition, users can download entire databases such as 'NCBI RefSeq' (Pruitt + et al. (2007) ), 'NCBI nr', 'NCBI nt', 'NCBI + Genbank' (Benson et al. (2013) ), etc. as well as + 'ENSEMBL' and 'ENSEMBLGENOMES' with only one command.""" + + homepage = "https://docs.ropensci.org/biomartr" + url = "https://cloud.r-project.org/src/contrib/biomartr_0.9.2.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/biomartr" + + version('0.9.2', sha256='d88085696e9c5614828602254c33f2cdd3bbfeebc2f21a705eee3cb961097c89') + + depends_on('r-biomart', type=('build', 'run')) + depends_on('r-biostrings', type=('build', 'run')) + depends_on('r-curl', type=('build', 'run')) + depends_on('r-tibble', type=('build', 'run')) + depends_on('r-jsonlite', type=('build', 'run')) + depends_on('r-data-table@1.9.4:', type=('build', 'run')) + depends_on('r-dplyr@0.3.0:', type=('build', 'run')) + depends_on('r-readr@0.2.2:', type=('build', 'run')) + depends_on('r-downloader@0.3:', type=('build', 'run')) + depends_on('r-rcurl@1.95-4.5:', type=('build', 'run')) + depends_on('r-xml@3.98-1.1:', type=('build', 'run')) + depends_on('r-httr@0.6.1:', type=('build', 'run')) + depends_on('r-stringr@0.6.2:', type=('build', 'run')) + depends_on('r-purrr', type=('build', 'run')) + depends_on('r-r-utils', type=('build', 'run')) + depends_on('r-philentropy', type=('build', 'run')) + depends_on('r-fs@1.3.1:', type=('build', 'run')) diff --git a/var/spack/repos/builtin/packages/r-curl/package.py b/var/spack/repos/builtin/packages/r-curl/package.py index 25081b86f7b..f1a7a2de60a 100644 --- a/var/spack/repos/builtin/packages/r-curl/package.py +++ b/var/spack/repos/builtin/packages/r-curl/package.py @@ -21,12 +21,14 @@ class RCurl(RPackage): url = "https://cloud.r-project.org/src/contrib/curl_2.3.tar.gz" list_url = "https://cloud.r-project.org/src/contrib/Archive/curl" + version('4.3', sha256='7406d485bb50a6190e3ed201e3489063fd249b8b3b1b4f049167ac405a352edb') version('4.0', sha256='09a99c9c86666449188fbb211cb1e9fbdb5108ab56f0d09322cd0ae50e926171') version('3.3', sha256='0cb0b9a9280edc42ebed94708541ec86b4f48779e722171e45227eab8a88a5bd') version('3.0', sha256='7bf8e3ae7cc77802ae300277e85d925d4c0611a9b7dad5c5601e0d2cbe14a506') - version('2.3', sha256='f901dad6bb70a6875a85da75bcbb42afffdcdf4ef221909733826bcb012d7c3d') - version('1.0', sha256='f8927228754fdfb21dbf08b9e67c5f97e06764c4adf327a4126eed84b1508f3d') + version('2.3', sha256='f901dad6bb70a6875a85da75bcbb42afffdcdf4ef221909733826bcb012d7c3d') + version('1.0', sha256='f8927228754fdfb21dbf08b9e67c5f97e06764c4adf327a4126eed84b1508f3d') version('0.9.7', sha256='46e150998723fd1937da598f47f49fe47e40c1f57ec594436c6ef1e0145b44dc') depends_on('r@3.0.0:', type=('build', 'run')) - depends_on('curl') + depends_on('curl', when='@4.3:') + depends_on('curl@:7.63', when='@:4.0') diff --git a/var/spack/repos/builtin/packages/r-philentropy/package.py b/var/spack/repos/builtin/packages/r-philentropy/package.py new file mode 100644 index 00000000000..2814ebc6769 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-philentropy/package.py @@ -0,0 +1,28 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RPhilentropy(RPackage): + """Computes 46 optimized distance and similarity measures for comparing + probability functions (Drost (2018) ). These + comparisons between probability functions have their foundations in a broad + range of scientific disciplines from mathematics to ecology. The aim of + this package is to provide a core framework for clustering, classification, + statistical inference, goodness-of-fit, non-parametric statistics, + information theory, and machine learning tasks that are based on comparing + univariate or multivariate probability functions.""" + + homepage = "https://github.com/HajkD/philentropy" + url = "https://cloud.r-project.org/src/contrib/philentropy_0.4.0.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/philentropy" + + version('0.4.0', sha256='bfd30bf5635aab6a82716299a87d44cf96c7ab7f4ee069843869bcc85c357127') + + depends_on('r@3.1.2:', type=('build', 'run')) + depends_on('r-rcpp', type=('build', 'run')) + depends_on('r-dplyr', type=('build', 'run')) + depends_on('r-kernsmooth', type=('build', 'run')) From c77eeca61ed21ac19a757e27581f336dcd6af912 Mon Sep 17 00:00:00 2001 From: darmac Date: Mon, 10 Feb 2020 18:08:45 +0800 Subject: [PATCH 157/178] icedtea: added versions up to v3.9.0 (#14806) --- var/spack/repos/builtin/packages/icedtea/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/icedtea/package.py b/var/spack/repos/builtin/packages/icedtea/package.py index 415cd427236..7f142ae4bf5 100644 --- a/var/spack/repos/builtin/packages/icedtea/package.py +++ b/var/spack/repos/builtin/packages/icedtea/package.py @@ -16,7 +16,13 @@ class Icedtea(AutotoolsPackage): homepage = "http://icedtea.classpath.org/wiki/Main_Page" url = "http://icedtea.wildebeest.org/download/source/icedtea-3.4.0.tar.gz" - version('3.4.0', sha256='2b606bbbf4ca5bcf2c8e811ea9060da30744860f3d63e1b3149fb5550a90b92b') + version('3.9.0', sha256='84a63bc59f4e101ce8fa183060a59c7e8cbe270945310e90c92b8609a9b8bc88') + version('3.8.0', sha256='ef1a9110294d0a905833f1db30da0c8a88bd2bde8d92ddb711d72ec763cd25b0') + version('3.7.0', sha256='936302694e193791885e81cf72097eeadee5b68ba220889228b0aafbfb2cb654') + version('3.6.0', sha256='74a43c4e027c72bb1c324f8f73af21565404326c9998f534f234ec2a36ca1cdb') + version('3.5.1', sha256='b229f2aa5d743ff850fa695e61f65139bb6eca1a9d10af5306ad3766fcea2eb2') + version('3.5.0', sha256='2c92e18fa70edaf73517fcf91bc2a7cc2ec2aa8ffdf22bb974fa6f9bc3065f30') + version('3.4.0', sha256='2b606bbbf4ca5bcf2c8e811ea9060da30744860f3d63e1b3149fb5550a90b92b') variant('X', default=False, description="Build with GUI support.") variant('shenandoah', default=False, From 37a6b8d54f7562b561c4197af62ccf54d4a7f192 Mon Sep 17 00:00:00 2001 From: Glenn P Johnson Date: Sun, 9 Feb 2020 14:59:30 -0600 Subject: [PATCH 158/178] Fix vim build with ^ncurses+termlib This PR will set the approriate library if ncurses is built with a separate tinfo library. --- var/spack/repos/builtin/packages/vim/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/vim/package.py b/var/spack/repos/builtin/packages/vim/package.py index 394caaa1aee..a022af0218f 100644 --- a/var/spack/repos/builtin/packages/vim/package.py +++ b/var/spack/repos/builtin/packages/vim/package.py @@ -81,7 +81,10 @@ def configure_args(self): configure_args = ["--enable-fail-if-missing"] - configure_args.append("--with-tlib=ncursesw") + if '+termlib' in spec['ncurses']: + configure_args.append("--with-tlib=tinfow") + else: + configure_args.append("--with-tlib=ncursesw") configure_args.append("--with-features=" + feature_set) From 4e32505770f2ab54717f77e51bdc79ee12faba96 Mon Sep 17 00:00:00 2001 From: Peter Scheibel Date: Mon, 10 Feb 2020 10:44:52 -0800 Subject: [PATCH 159/178] libedit package: add version 3.1-20191231 (#14851) This new version of libedit can make use of an ncurses dependency built with +termlib (which moves some symbols into a separate libtinfo). --- var/spack/repos/builtin/packages/libedit/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/libedit/package.py b/var/spack/repos/builtin/packages/libedit/package.py index 7e12182a854..fd4bf49de41 100644 --- a/var/spack/repos/builtin/packages/libedit/package.py +++ b/var/spack/repos/builtin/packages/libedit/package.py @@ -11,6 +11,7 @@ class Libedit(AutotoolsPackage): homepage = "http://thrysoee.dk/editline/" url = "http://thrysoee.dk/editline/libedit-20170329-3.1.tar.gz" + version('3.1-20191231', sha256='dbb82cb7e116a5f8025d35ef5b4f7d4a3cdd0a3909a146a39112095a2d229071') version('3.1-20170329', sha256='91f2d90fbd2a048ff6dad7131d9a39e690fd8a8fd982a353f1333dd4017dd4be') version('3.1-20160903', sha256='0ccbd2e7d46097f136fcb1aaa0d5bc24e23bb73f57d25bee5a852a683eaa7567') version('3.1-20150325', sha256='c88a5e4af83c5f40dda8455886ac98923a9c33125699742603a88a0253fcc8c5') From 357786ce6b48749499538978b19f75b4ba92487c Mon Sep 17 00:00:00 2001 From: Massimiliano Culpo Date: Mon, 10 Feb 2020 20:22:21 +0100 Subject: [PATCH 160/178] Spack find: fix queries that specify dependencies (#14757) Fixes #10019 If multiple instances of a package were installed in a single instance of Spack, and they differed in terms of dependencies, then "spack find" would not distinguish specs based on their dependencies. For example if two instances of X were installed, one with Y and one with Z, then "spack find X ^Y" would display both instances of X. --- lib/spack/spack/database.py | 3 ++- lib/spack/spack/test/database.py | 20 +++++++++++++++++++ .../builtin.mock/packages/hdf5/package.py | 15 ++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 var/spack/repos/builtin.mock/packages/hdf5/package.py diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index c06d1ae5463..65b85e026ba 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -1254,7 +1254,8 @@ def _query( if not (start_date < inst_date < end_date): continue - if query_spec is any or rec.spec.satisfies(query_spec): + if (query_spec is any or + rec.spec.satisfies(query_spec, strict=True)): results.append(rec.spec) return results diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py index a2b9677ec69..1af125a7233 100644 --- a/lib/spack/spack/test/database.py +++ b/lib/spack/spack/test/database.py @@ -729,3 +729,23 @@ def test_query_unused_specs(mutable_database): unused = spack.store.db.unused_specs assert len(unused) == 1 assert unused[0].name == 'cmake' + + +@pytest.mark.regression('10019') +def test_query_spec_with_conditional_dependency(mutable_database): + # The issue is triggered by having dependencies that are + # conditional on a Boolean variant + s = spack.spec.Spec('hdf5~mpi') + s.concretize() + s.package.do_install(fake=True, explicit=True) + + results = spack.store.db.query_local('hdf5 ^mpich') + assert not results + + +@pytest.mark.regression('10019') +def test_query_spec_with_non_conditional_virtual_dependency(database): + # Ensure the same issue doesn't come up for virtual + # dependency that are not conditional on variants + results = spack.store.db.query_local('mpileaks ^mpich') + assert len(results) == 1 diff --git a/var/spack/repos/builtin.mock/packages/hdf5/package.py b/var/spack/repos/builtin.mock/packages/hdf5/package.py new file mode 100644 index 00000000000..495dbcbaa84 --- /dev/null +++ b/var/spack/repos/builtin.mock/packages/hdf5/package.py @@ -0,0 +1,15 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +class Hdf5(Package): + homepage = "http://www.llnl.gov" + url = "http://www.llnl.gov/hdf5-1.0.tar.gz" + + version(2.3, 'foobarbaz') + + variant('mpi', default=True, description='Debug variant') + + depends_on('mpi', when='mpi') From 490508d324dac62210077162a6b351c327ee7c66 Mon Sep 17 00:00:00 2001 From: Brian Van Essen Date: Mon, 10 Feb 2020 12:13:47 -0800 Subject: [PATCH 161/178] Hydrogen: update dependency on aluminum (#14760) * Hydrogen now depends on `aluminum +nccl` vs. `aluminum +mpi_cuda` * Hydrogen: Simplify Mac OS OpenMP-detection logic * Aluminum: Add Mac OS OpenMP-detection logic * LBANN: depend on conduit@0.4.0: instead of conduit@master --- .../repos/builtin/packages/aluminum/package.py | 10 ++++++++++ .../repos/builtin/packages/hydrogen/package.py | 16 +++++++--------- .../repos/builtin/packages/lbann/package.py | 4 ++-- 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/var/spack/repos/builtin/packages/aluminum/package.py b/var/spack/repos/builtin/packages/aluminum/package.py index 535f4df5f27..5edc4c37927 100644 --- a/var/spack/repos/builtin/packages/aluminum/package.py +++ b/var/spack/repos/builtin/packages/aluminum/package.py @@ -3,6 +3,7 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import os from spack import * @@ -45,4 +46,13 @@ def cmake_args(self): '-DALUMINUM_ENABLE_CUDA:BOOL=%s' % ('+gpu' in spec), '-DALUMINUM_ENABLE_MPI_CUDA:BOOL=%s' % ('+mpi_cuda' in spec), '-DALUMINUM_ENABLE_NCCL:BOOL=%s' % ('+nccl' in spec)] + + # Add support for OS X to find OpenMP + if (self.spec.satisfies('%clang platform=darwin')): + clang = self.compiler.cc + clang_bin = os.path.dirname(clang) + clang_root = os.path.dirname(clang_bin) + args.extend([ + '-DOpenMP_DIR={0}'.format(clang_root)]) + return args diff --git a/var/spack/repos/builtin/packages/hydrogen/package.py b/var/spack/repos/builtin/packages/hydrogen/package.py index 1607bc73198..625c336fff0 100644 --- a/var/spack/repos/builtin/packages/hydrogen/package.py +++ b/var/spack/repos/builtin/packages/hydrogen/package.py @@ -4,7 +4,6 @@ # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os -import sys from spack import * @@ -83,7 +82,7 @@ class Hydrogen(CMakePackage): depends_on('netlib-lapack +external-blas', when='blas=essl') depends_on('aluminum', when='+al ~cuda') - depends_on('aluminum +gpu +mpi_cuda', when='+al +cuda') + depends_on('aluminum +gpu +nccl', when='+al +cuda') # Note that this forces us to use OpenBLAS until #1712 is fixed depends_on('lapack', when='blas=openblas ~openmp_blas') @@ -135,13 +134,12 @@ def cmake_args(self): ] # Add support for OS X to find OpenMP - if (self.spec.satisfies('%clang')): - if (sys.platform == 'darwin'): - clang = self.compiler.cc - clang_bin = os.path.dirname(clang) - clang_root = os.path.dirname(clang_bin) - args.extend([ - '-DOpenMP_DIR={0}'.format(clang_root)]) + if (self.spec.satisfies('%clang platform=darwin')): + clang = self.compiler.cc + clang_bin = os.path.dirname(clang) + clang_root = os.path.dirname(clang_bin) + args.extend([ + '-DOpenMP_DIR={0}'.format(clang_root)]) if 'blas=openblas' in spec: args.extend([ diff --git a/var/spack/repos/builtin/packages/lbann/package.py b/var/spack/repos/builtin/packages/lbann/package.py index ca7d03703c3..cd8b81c3d57 100644 --- a/var/spack/repos/builtin/packages/lbann/package.py +++ b/var/spack/repos/builtin/packages/lbann/package.py @@ -100,8 +100,8 @@ class Lbann(CMakePackage): depends_on('cnpy') depends_on('nccl', when='@0.94:0.98.2 +gpu +nccl') - depends_on('conduit@master +hdf5', when='@0.94:0.99 +conduit') - depends_on('conduit@master +hdf5', when='@:0.90,0.99:') + depends_on('conduit@0.4.0: +hdf5', when='@0.94:0.99 +conduit') + depends_on('conduit@0.4.0: +hdf5', when='@:0.90,0.99:') depends_on('python@3: +shared', type=('build', 'run'), when='@:0.90,0.99:') extends("python") From 35df75ee7728ca925d39a25b8c65f46ad7dcd01a Mon Sep 17 00:00:00 2001 From: darmac Date: Tue, 11 Feb 2020 16:46:59 +0800 Subject: [PATCH 162/178] hepmc: added versions up to v3.2.0 (#14805) --- var/spack/repos/builtin/packages/hepmc/package.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/hepmc/package.py b/var/spack/repos/builtin/packages/hepmc/package.py index fe8502d0978..9a315bc5280 100644 --- a/var/spack/repos/builtin/packages/hepmc/package.py +++ b/var/spack/repos/builtin/packages/hepmc/package.py @@ -14,6 +14,10 @@ class Hepmc(CMakePackage): homepage = "http://hepmc.web.cern.ch/hepmc/" url = "http://hepmc.web.cern.ch/hepmc/releases/hepmc2.06.09.tgz" + version('3.2.0', sha256='b59f301e2660db3c81e4fd4aa0701cc010463691ac57424edf4301e16a5944b4') + version('3.1.2', sha256='7df91b862cead2657ee54ab043ecf0a8f46da354c8a7ea90d8db2c435904bce0') + version('3.1.1', sha256='46098a7e9717921e436c265836c914d515cb831dde898f3f1d129468df7982e7') + version('3.1.0', sha256='ac02977d6bde18398abf9d25a2e55614e22b5e819ed4b21ea55cf31aabe6df67') version('3.0.0', sha256='7ac3c939a857a5ad67bea1e77e3eb16e80d38cfdf825252ac57160634c26d9ec') version('2.06.09', sha256='e0f8fddd38472c5615210894444686ac5d72df3be682f7d151b562b236d9b422') version('2.06.08', sha256='8be6c1793e0a045f07ddb88bb64b46de7e66a52e75fb72b3f82f9a3e3ba8a8ce') @@ -30,7 +34,9 @@ def cmake_args(self): ] def url_for_version(self, version): - if version <= Version("2.06.08"): + if version > Version("3.0.0"): + url = "http://hepmc.web.cern.ch/hepmc/releases/HepMC3-{0}.tar.gz" + elif version <= Version("2.06.08"): url = "http://lcgapp.cern.ch/project/simu/HepMC/download/HepMC-{0}.tar.gz" else: url = "http://hepmc.web.cern.ch/hepmc/releases/hepmc{0}.tgz" From 1a41ec766a1d123a7663703e2708435cba139c25 Mon Sep 17 00:00:00 2001 From: Christoph Junghans Date: Tue, 11 Feb 2020 03:02:17 -0600 Subject: [PATCH 163/178] heffte: added v0.2 (#14881) --- var/spack/repos/builtin/packages/heffte/package.py | 1 + 1 file changed, 1 insertion(+) diff --git a/var/spack/repos/builtin/packages/heffte/package.py b/var/spack/repos/builtin/packages/heffte/package.py index e3bb7e49610..4a3fec0f339 100644 --- a/var/spack/repos/builtin/packages/heffte/package.py +++ b/var/spack/repos/builtin/packages/heffte/package.py @@ -14,6 +14,7 @@ class Heffte(CMakePackage): git = "https://bitbucket.org/icl/heffte.git" version('master', branch='master') + version('0.2', sha256='4e76ae60982b316c2e873b2e5735669b22620fefa1fc82f325cdb6989bec78d1') version('0.1', sha256='d279a03298d2dc76574b1ae1031acb4ea964348cf359273d1afa4668b5bfe748') variant('cuda', default=False, description='Builds with support for GPUs via CUDA') From 7706b76d3a6e57e0be930843aa59e34c749104d0 Mon Sep 17 00:00:00 2001 From: Matthias Diener Date: Tue, 11 Feb 2020 03:03:44 -0600 Subject: [PATCH 164/178] unifdef: added new package (#14880) --- .../repos/builtin/packages/unifdef/package.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 var/spack/repos/builtin/packages/unifdef/package.py diff --git a/var/spack/repos/builtin/packages/unifdef/package.py b/var/spack/repos/builtin/packages/unifdef/package.py new file mode 100644 index 00000000000..7b38686d931 --- /dev/null +++ b/var/spack/repos/builtin/packages/unifdef/package.py @@ -0,0 +1,25 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + + +from spack import * + + +class Unifdef(MakefilePackage): + """The unifdef utility selectively processes conditional C preprocessor #if + and #ifdef directives. It removes from a file both the directives and the + additional text that they delimit, while otherwise leaving the file + alone.""" + + homepage = "https://dotat.at/prog/unifdef/" + url = "https://dotat.at/prog/unifdef/unifdef-2.11.tar.xz" + + maintainers = ['matthiasdiener'] + + version('2.11', sha256='828ffc270ac262b88fe011136acef2780c05b0dc3c5435d005651740788d4537') + + def edit(self, spec, prefix): + makefile = FileFilter('Makefile') + makefile.filter(r'\$\{HOME\}', prefix) From e8b6c40b316358b01eec3e84c45bc7aab6071b64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fr=C3=A9d=C3=A9ric=20Simonis?= Date: Tue, 11 Feb 2020 10:06:03 +0100 Subject: [PATCH 165/178] precice: add version 2.0.0 (#14842) Added python3 dependency Added support for new CMake variables --- .../repos/builtin/packages/precice/package.py | 38 +++++++++++++++---- 1 file changed, 31 insertions(+), 7 deletions(-) diff --git a/var/spack/repos/builtin/packages/precice/package.py b/var/spack/repos/builtin/packages/precice/package.py index c3f37f7d69c..22556719e44 100644 --- a/var/spack/repos/builtin/packages/precice/package.py +++ b/var/spack/repos/builtin/packages/precice/package.py @@ -19,6 +19,7 @@ class Precice(CMakePackage): maintainers = ['fsimonis', 'MakisH'] version('develop', branch='develop') + version('2.0.0', sha256='c8979d366f06e35626a8da08a1c589df77ec13972eb524a1ba99a011e245701f') version('1.6.1', sha256='7d0c54faa2c69e52304f36608d93c408629868f16f3201f663a0f9b2008f0763') version('1.6.0', sha256='c3b16376fda9eb3449adb6cc3c1e267c3dc792a5d118e37d93a32a59b5a4bc6f') version('1.5.2', sha256='051e0d7655a91f8681901e5c92812e48f33a5779309e2f104c99f5a687e1a418') @@ -44,9 +45,14 @@ class Precice(CMakePackage): depends_on('libxml2') depends_on('mpi', when='+mpi') depends_on('petsc@3.6:', when='+petsc') - depends_on('python@2.7:2.8', when='+python', type=('build', 'run')) + + # Python 3 support was added in version 2.0 + depends_on('python@2.7:2.8', when='@:1.9+python', type=('build', 'run')) + depends_on('python@3:', when='@2:+python', type=('build', 'run')) + # numpy 1.17+ requires Python 3 - depends_on('py-numpy@:1.16', when='+python', type=('build', 'run')) + depends_on('py-numpy@:1.16', when='@:1.9+python', type=('build', 'run')) + depends_on('py-numpy@1.17:', when='@2:+python', type=('build', 'run')) # We require C++11 compiler support as well as # library support for time manipulators (N2071, N2072) @@ -62,6 +68,17 @@ def cmake_args(self): # The xSDK installation policies were implemented after 1.5.2 xsdk_mode = spec.satisfies("@1.6:") + # Select the correct CMake variables by version + mpi_option = "MPI" + if spec.satisfies("@2:"): + mpi_option = "PRECICE_MPICommunication" + petsc_option = "PETSC" + if spec.satisfies("@2:"): + petsc_option = "PRECICE_PETScMapping" + python_option = "PYTHON" + if spec.satisfies("@2:"): + python_option = "PRECICE_PythonActions" + def variant_bool(feature, on='ON', off='OFF'): """Ternary for spec variant to ON/OFF string""" if feature in spec: @@ -70,9 +87,10 @@ def variant_bool(feature, on='ON', off='OFF'): cmake_args = [ '-DBUILD_SHARED_LIBS:BOOL=%s' % variant_bool('+shared'), - '-DMPI:BOOL=%s' % variant_bool('+mpi'), ] + cmake_args.append('-D%s:BOOL=%s' % (mpi_option, variant_bool('+mpi'))) + # Boost if xsdk_mode: cmake_args.append('-DTPL_ENABLE_BOOST=ON') @@ -95,13 +113,16 @@ def variant_bool(feature, on='ON', off='OFF'): # PETSc if '+petsc' in spec: + if xsdk_mode: + cmake_args.append('-DTPL_ENABLE_PETSC:BOOL=ON') + else: + cmake_args.append('-D%s:BOOL=ON' % petsc_option) cmake_args.extend([ - '-DTPL_ENABLE_PETSC:BOOL=ON' if xsdk_mode else '-DPETSC=ON', '-DPETSC_DIR=%s' % spec['petsc'].prefix, '-DPETSC_ARCH=.' ]) else: - cmake_args.append('-DPETSC:BOOL=OFF') + cmake_args.append('-D%s:BOOL=OFF' % petsc_option) # Python if '+python' in spec: @@ -111,13 +132,16 @@ def variant_bool(feature, on='ON', off='OFF'): spec['py-numpy'].prefix, spec['python'].package.site_packages_dir, 'numpy', 'core', 'include') + if xsdk_mode: + cmake_args.append('-DTPL_ENABLE_PYTHON:BOOL=ON') + else: + cmake_args.append('-D%s:BOOL=ON' % python_option) cmake_args.extend([ - '-DTPL_ENABLE_PYTHON:BOOL=ON' if xsdk_mode else '-DPYTHON=ON', '-DPYTHON_INCLUDE_DIR=%s' % python_include, '-DNumPy_INCLUDE_DIR=%s' % numpy_include, '-DPYTHON_LIBRARY=%s' % python_library ]) else: - cmake_args.append('-DPYTHON:BOOL=OFF') + cmake_args.append('-D%s:BOOL=OFF' % python_option) return cmake_args From 5c33f638d7278602728c04e16ff2486f479b8575 Mon Sep 17 00:00:00 2001 From: "Seth R. Johnson" Date: Tue, 11 Feb 2020 15:00:02 -0500 Subject: [PATCH 166/178] Fix QT4 platform detection for linux clang (#14891) * Fix QT4 platform name for linux clang * Fix clang compiler flags on QT4 --- .../repos/builtin/packages/qt/package.py | 28 +++++++++++++++++-- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py index 1bc0f1df938..3753f4166c4 100644 --- a/var/spack/repos/builtin/packages/qt/package.py +++ b/var/spack/repos/builtin/packages/qt/package.py @@ -5,6 +5,8 @@ from spack import * from spack.operating_systems.mac_os import macos_version +import llnl.util.tty as tty +import itertools import os import sys @@ -18,6 +20,7 @@ class Qt(Package): url = 'http://download.qt.io/archive/qt/5.7/5.7.0/single/qt-everywhere-opensource-src-5.7.0.tar.gz' list_url = 'http://download.qt.io/archive/qt/' list_depth = 3 + maintainers = ['sethrj'] phases = ['configure', 'build', 'install'] @@ -178,7 +181,9 @@ class Qt(Package): use_xcode = True # Mapping for compilers/systems in the QT 'mkspecs' - compiler_mapping = {'intel': 'icc', 'clang': 'clang-libc++', 'gcc': 'g++'} + compiler_mapping = {'intel': ('icc',), + 'clang': ('clang-libc++', 'clang'), + 'gcc': ('g++',)} platform_mapping = {'darwin': 'macx'} def url_for_version(self, version): @@ -235,17 +240,26 @@ def get_mkspec(self): """ spec = self.spec cname = spec.compiler.name - cname = self.compiler_mapping.get(cname, cname) pname = spec.architecture.platform + + # Transform spack compiler name to a list of possible QT compilers + cnames = self.compiler_mapping.get(cname, [cname]) + # Transform platform name to match those in QT pname = self.platform_mapping.get(pname, pname) qtplat = None mkspec_dir = 'qtbase/mkspecs' if spec.satisfies('@5:') else 'mkspecs' - for subdir in ('', 'unsupported'): + for subdir, cname in itertools.product(('', 'unsupported/'), cnames): platdirname = "".join([subdir, pname, "-", cname]) + tty.debug("Checking for platform '{0}' in {1}".format( + platdirname, mkspec_dir)) if os.path.exists(os.path.join(mkspec_dir, platdirname)): qtplat = platdirname break + else: + tty.warn("No matching QT platform was found in {0} " + "for platform '{1}' and compiler {2}".format( + mkspec_dir, pname, ",".join(cnames))) return (mkspec_dir, qtplat) @@ -341,6 +355,14 @@ def patch(self): with open(conf_file, 'a') as f: f.write("QMAKE_CXXFLAGS += -std=gnu++98\n") + @when('@4 %clang') + def patch(self): + (mkspec_dir, platform) = self.get_mkspec() + conf_file = os.path.join(mkspec_dir, platform, "qmake.conf") + + with open(conf_file, 'a') as f: + f.write("QMAKE_CXXFLAGS += -std=gnu++98\n") + @property def common_config_args(self): # incomplete list is here http://doc.qt.io/qt-5/configure-options.html From c2885990b8d6dcf4de4f19e90ec8a1c0d94f790c Mon Sep 17 00:00:00 2001 From: Scott Wittenburg Date: Tue, 11 Feb 2020 15:21:49 -0700 Subject: [PATCH 167/178] Fix intel-mkl package (#14856) * Add cpio package * intel-mkl: Depend on cli tool needed during build --- .../repos/builtin/packages/cpio/package.py | 18 ++++++++++++++++++ .../builtin/packages/intel-mkl/package.py | 2 ++ 2 files changed, 20 insertions(+) create mode 100644 var/spack/repos/builtin/packages/cpio/package.py diff --git a/var/spack/repos/builtin/packages/cpio/package.py b/var/spack/repos/builtin/packages/cpio/package.py new file mode 100644 index 00000000000..ae6b429ccc6 --- /dev/null +++ b/var/spack/repos/builtin/packages/cpio/package.py @@ -0,0 +1,18 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Cpio(AutotoolsPackage, GNUMirrorPackage): + """GNU cpio copies files into or out of a cpio or tar archive. The + archive can be another file on the disk, a magnetic tape, or a pipe. + """ + homepage = "https://www.gnu.org/software/cpio/" + gnu_mirror_path = "cpio/cpio-2.13.tar.gz" + + version('2.13', sha256='e87470d9c984317f658567c03bfefb6b0c829ff17dbf6b0de48d71a4c8f3db88') + + build_directory = 'spack-build' diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py index b6d8c38dcb2..423223589af 100644 --- a/var/spack/repos/builtin/packages/intel-mkl/package.py +++ b/var/spack/repos/builtin/packages/intel-mkl/package.py @@ -54,6 +54,8 @@ class IntelMkl(IntelPackage): version('11.3.2.181', sha256='bac04a07a1fe2ae4996a67d1439ee90c54f31305e8663d1ccfce043bed84fc27', url='http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/8711/l_mkl_11.3.2.181.tgz') + depends_on('cpio', type='build') + variant('shared', default=True, description='Builds shared library') variant('ilp64', default=False, description='64 bit integers') variant( From 582e7ce2c83cb18e9bc49ec7cf962fae149d155b Mon Sep 17 00:00:00 2001 From: Jen Herting Date: Tue, 11 Feb 2020 15:56:48 -0500 Subject: [PATCH 168/178] [mono] add version 6.8.0.105 --- var/spack/repos/builtin/packages/mono/package.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/var/spack/repos/builtin/packages/mono/package.py b/var/spack/repos/builtin/packages/mono/package.py index 40a6a25fe94..73464f0fb94 100644 --- a/var/spack/repos/builtin/packages/mono/package.py +++ b/var/spack/repos/builtin/packages/mono/package.py @@ -28,6 +28,8 @@ class Mono(AutotoolsPackage): depends_on('libiconv') depends_on('perl', type=('build')) + version('6.8.0.105', sha256='578799c44c3c86a9eb5daf6dec6c60a24341940fd376371956d4a46cf8612178', + url='https://download.mono-project.com/sources/mono/mono-6.8.0.105.tar.xz') version('5.18.0.240', sha256='143e80eb00519ff496742e78ee07403a3c3629437f3a498eee539de8108da895') version('5.16.0.220', sha256='f420867232b426c062fa182256a66b29efa92992c119847359cdd1ab75af8de3') version('5.14.0.177', sha256='d4f5fa2e8188d66fbc8054f4145711e45c1faa6d070e63600efab93d1d189498') From c56c4b334d80e9cc954cac5f087dd49f3a4f1066 Mon Sep 17 00:00:00 2001 From: Todd Gamblin Date: Tue, 11 Feb 2020 14:16:40 -0800 Subject: [PATCH 169/178] bugfix: `spack -V` should use `working_dir()` instead of `git -C` - `git -C` doesn't work on git before 1.8.5 - `working_dir` gets us the same effect --- lib/spack/spack/main.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py index 4386f504353..0821b4e6997 100644 --- a/lib/spack/spack/main.py +++ b/lib/spack/spack/main.py @@ -22,6 +22,7 @@ from six import StringIO import llnl.util.cpu +import llnl.util.filesystem as fs import llnl.util.tty as tty import llnl.util.tty.color as color from llnl.util.tty.log import log_output @@ -125,8 +126,9 @@ def get_version(): if os.path.exists(git_path): git = exe.which("git") if git: - desc = git("-C", spack.paths.prefix, "describe", "--tags", - output=str, fail_on_error=False) + with fs.working_dir(spack.paths.prefix): + desc = git( + "describe", "--tags", output=str, fail_on_error=False) if git.returncode == 0: match = re.match(r"v([^-]+)-([^-]+)-g([a-f\d]+)", desc) From 78f16d703dba477f27c77213729a7707eafe08d8 Mon Sep 17 00:00:00 2001 From: codeandkey Date: Fri, 7 Feb 2020 14:25:08 -0600 Subject: [PATCH 170/178] r-truncdist: new package at 1.0-2 --- .../builtin/packages/r-truncdist/package.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 var/spack/repos/builtin/packages/r-truncdist/package.py diff --git a/var/spack/repos/builtin/packages/r-truncdist/package.py b/var/spack/repos/builtin/packages/r-truncdist/package.py new file mode 100644 index 00000000000..5ead0acf763 --- /dev/null +++ b/var/spack/repos/builtin/packages/r-truncdist/package.py @@ -0,0 +1,19 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class RTruncdist(RPackage): + """truncdist: Truncated Random Variables""" + + homepage = "https://cloud.r-project.org/package=truncdist" + url = "https://cloud.r-project.org/src/contrib/truncdist_1.0-2.tar.gz" + list_url = "https://cloud.r-project.org/src/contrib/Archive/truncdist" + + version('1.0-2', sha256='b848b68bdd983bd496fa7327632ffa8add8d2231229b8af5c8bc29d823e1300a') + + depends_on('r@2.0.1:', type=('build', 'run')) + depends_on('r-evd', type=('build', 'run')) From 630611a786eb92932608cdf2d2b73049323f1990 Mon Sep 17 00:00:00 2001 From: darmac Date: Wed, 12 Feb 2020 08:57:18 +0800 Subject: [PATCH 171/178] libdrm: add version 2.4.100 (#14809) --- var/spack/repos/builtin/packages/libdrm/package.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/libdrm/package.py b/var/spack/repos/builtin/packages/libdrm/package.py index c724f1cd4a5..82f55a9955f 100644 --- a/var/spack/repos/builtin/packages/libdrm/package.py +++ b/var/spack/repos/builtin/packages/libdrm/package.py @@ -12,13 +12,14 @@ class Libdrm(Package): on Linux, BSD and other systems supporting the ioctl interface.""" homepage = "http://dri.freedesktop.org/libdrm/" - url = "http://dri.freedesktop.org/libdrm/libdrm-2.4.59.tar.gz" + url = "https://dri.freedesktop.org/libdrm/libdrm-2.4.59.tar.gz" - version('2.4.81', sha256='64036c5e0668fdc2b820dcc0ebab712f44fd2c2147d23dc5a6e003b19f0d3e9f') - version('2.4.75', sha256='a411bff814b4336c8908dcbd045cd89fdc7afedc75b795d897d462e467cbb01d') - version('2.4.70', sha256='73615b9c1c4852e5ce045efa19c866e8df98e396b2443bf859eea05574ecb64f') - version('2.4.59', sha256='ed9d03a92c2d80e6310cc350db3430620f1659ae084a07c6824cee7bc81ae8fa') - version('2.4.33', sha256='bd2a8fecf28616f2157ca33ede691c139cc294ed2d0c4244b62ca7d22e98e5a4') + version('2.4.100', sha256='6a5337c054c0c47bc16607a21efa2b622e08030be4101ef4a241c5eb05b6619b') + version('2.4.81', sha256='64036c5e0668fdc2b820dcc0ebab712f44fd2c2147d23dc5a6e003b19f0d3e9f') + version('2.4.75', sha256='a411bff814b4336c8908dcbd045cd89fdc7afedc75b795d897d462e467cbb01d') + version('2.4.70', sha256='73615b9c1c4852e5ce045efa19c866e8df98e396b2443bf859eea05574ecb64f') + version('2.4.59', sha256='ed9d03a92c2d80e6310cc350db3430620f1659ae084a07c6824cee7bc81ae8fa') + version('2.4.33', sha256='bd2a8fecf28616f2157ca33ede691c139cc294ed2d0c4244b62ca7d22e98e5a4') depends_on('pkgconfig', type='build') depends_on('libpciaccess@0.10:', when=(sys.platform != 'darwin')) From 2c63ea49d1130a6f668c7b449f5f63adffe8d72a Mon Sep 17 00:00:00 2001 From: Sajid Ali <30510036+s-sajid-ali@users.noreply.github.com> Date: Tue, 11 Feb 2020 19:02:39 -0600 Subject: [PATCH 172/178] New package: libvips (#14794) --- .../repos/builtin/packages/libvips/package.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 var/spack/repos/builtin/packages/libvips/package.py diff --git a/var/spack/repos/builtin/packages/libvips/package.py b/var/spack/repos/builtin/packages/libvips/package.py new file mode 100644 index 00000000000..89371208ab3 --- /dev/null +++ b/var/spack/repos/builtin/packages/libvips/package.py @@ -0,0 +1,45 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Libvips(AutotoolsPackage): + """libvips is a demand-driven, horizontally threaded image processing + library. Compared to similar libraries, libvips runs quickly and uses + little memory.""" + + homepage = "https://libvips.github.io/libvips/" + url = "https://github.com/libvips/libvips/releases/download/v8.9.0/vips-8.9.0.tar.gz" + git = "https://github.com/libvips/libvips.git" + + version('8.9.1', sha256='45633798877839005016c9d3494e98dee065f5cb9e20f4552d3b315b8e8bce91', preferred=True) + version('8.9.0', sha256='97334a5e70aff343d2587f23cb8068fc846a58cd937c89a446142ccf00ea0349') + + variant('fftw', default=True, + description='Uses FFTW3 for fourier transforms.') + + variant('jpeg', default=False, + description='Enable JPEG support') + + variant('tiff', default=False, + description='Enable TIFF support') + + variant('png', default=False, + description='Enable pngfile support') + + variant('poppler', default=False, + description='Enable PDF rendering via poppler') + + # TODO: Add more variants! + + depends_on('glib') + depends_on('expat') + + depends_on('fftw', when='+fftw') + depends_on('libjpeg', when='+jpeg') + depends_on('libtiff', when='+tiff') + depends_on('libpng', when='+png') + depends_on('poppler', when='+poppler') From 947dabc35688a658f0be45e924d4a5cc0a0fd496 Mon Sep 17 00:00:00 2001 From: Hadrien G Date: Wed, 12 Feb 2020 02:17:06 +0100 Subject: [PATCH 173/178] [dd4hep] Make DDDigi work with current TBB releases (#14791) Add patch for DDDigi's broken TBB support. The issue is fixed in DD4hep master so the patch is only required for 1.11.0 --- .../repos/builtin/packages/dd4hep/package.py | 4 ++ .../packages/dd4hep/tbb-workarounds.patch | 41 +++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 var/spack/repos/builtin/packages/dd4hep/tbb-workarounds.patch diff --git a/var/spack/repos/builtin/packages/dd4hep/package.py b/var/spack/repos/builtin/packages/dd4hep/package.py index 46d64b9a822..6fd63f76c68 100644 --- a/var/spack/repos/builtin/packages/dd4hep/package.py +++ b/var/spack/repos/builtin/packages/dd4hep/package.py @@ -22,6 +22,10 @@ class Dd4hep(CMakePackage): version('1.11.0', commit='280c7d748d56a704699408ac8e57815d029b169a') version('1.10.0', commit='9835d1813c07d9d5850d1e68276c0171d1726801') + # Workarounds for various TBB issues in DD4hep v1.11 + # See https://github.com/AIDASoft/DD4hep/pull/613 . + patch('tbb-workarounds.patch', when='@1.11.0') + variant('xercesc', default=False, description="Enable 'Detector Builders' based on XercesC") variant('geant4', default=False, description="Enable the simulation part based on Geant4") variant('testing', default=False, description="Enable and build tests") diff --git a/var/spack/repos/builtin/packages/dd4hep/tbb-workarounds.patch b/var/spack/repos/builtin/packages/dd4hep/tbb-workarounds.patch new file mode 100644 index 00000000000..65923297743 --- /dev/null +++ b/var/spack/repos/builtin/packages/dd4hep/tbb-workarounds.patch @@ -0,0 +1,41 @@ +diff --git a/DDDigi/CMakeLists.txt b/DDDigi/CMakeLists.txt +index e6fb1096..88eb5c92 100644 +--- a/DDDigi/CMakeLists.txt ++++ b/DDDigi/CMakeLists.txt +@@ -34,12 +34,10 @@ target_include_directories(DDDigi + + FIND_PACKAGE(TBB QUIET) + if(TBB_FOUND) +- dd4hep_print( "|++> TBB_INCLUDE_DIR --> ${TBB_INCLUDE_DIR}") +- dd4hep_print( "|++> TBB_LIBRARY --> ${TBB_LIBRARY}") ++ dd4hep_print( "|++> TBB_IMPORTED_TARGETS --> ${TBB_IMPORTED_TARGETS}") + dd4hep_print( "|++> TBB found. DDDigi will run multi threaded.") + target_compile_definitions(DDDigi PUBLIC DD4HEP_USE_TBB) +- target_link_libraries(DDDigi ${TBB_LIBRARY}) +- target_include_directories(DDDigi ${TBB_INCLUDE_DIRS}) ++ target_link_libraries(DDDigi PUBLIC ${TBB_IMPORTED_TARGETS}) + else() + dd4hep_print( "|++> TBB not found. DDDigi will only work single threaded.") + endif() +diff --git a/DDDigi/src/DigiKernel.cpp b/DDDigi/src/DigiKernel.cpp +index d62c6694..f2c2e86c 100644 +--- a/DDDigi/src/DigiKernel.cpp ++++ b/DDDigi/src/DigiKernel.cpp +@@ -91,7 +91,7 @@ public: + DigiEventAction* action = 0; + Wrapper(DigiContext& c, DigiEventAction* a) + : context(c), action(a) {} +- Wrapper(Wrapper&& copy) = delete; ++ Wrapper(Wrapper&& copy) = default; + Wrapper(const Wrapper& copy) = default; + Wrapper& operator=(Wrapper&& copy) = delete; + Wrapper& operator=(const Wrapper& copy) = delete; +@@ -111,7 +111,7 @@ class DigiKernel::Processor { + DigiKernel& kernel; + public: + Processor(DigiKernel& k) : kernel(k) {} +- Processor(Processor&& l) = delete; ++ Processor(Processor&& l) = default; + Processor(const Processor& l) = default; + void operator()() const { + int todo = 1; From cf120d7441b7c7a2d7c09d27ef51bcfb5b526a9d Mon Sep 17 00:00:00 2001 From: darmac Date: Wed, 12 Feb 2020 09:34:44 +0800 Subject: [PATCH 174/178] OpenCV package: add versions 4.2.0, 4.1.2, and 4.1.1(#14814) --- var/spack/repos/builtin/packages/opencv/package.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py index fcd3cb7424d..95abf6368ae 100644 --- a/var/spack/repos/builtin/packages/opencv/package.py +++ b/var/spack/repos/builtin/packages/opencv/package.py @@ -25,8 +25,11 @@ class Opencv(CMakePackage, CudaPackage): git = 'https://github.com/opencv/opencv.git' version('master', branch='master') + version('4.2.0', sha256='9ccb2192d7e8c03c58fee07051364d94ed7599363f3b0dce1c5e6cc11c1bb0ec') + version('4.1.2', sha256='385dd0a9c25e67ef0dd60e022d2a2d7b17e2f36819cf3cb46aa8cdff5c5282c9') + version('4.1.1', sha256='5de5d96bdfb9dad6e6061d70f47a0a91cee96bb35afb9afb9ecb3d43e243d217') version('4.1.0-openvino', sha256='58764d2487c6fb4cd950fb46483696ae7ae28e257223d6e44e162caa22ee9e5c') - version('4.1.0', sha256='8f6e4ab393d81d72caae6e78bd0fd6956117ec9f006fba55fcdb88caf62989b7', preferred=True) + version('4.1.0', sha256='8f6e4ab393d81d72caae6e78bd0fd6956117ec9f006fba55fcdb88caf62989b7') version('4.0.1-openvino', sha256='8cbe32d12a70decad7a8327eb4fba46016a9c47ff3ba6e114d27b450f020716f') version('4.0.1', sha256='7b86a0ee804244e0c407321f895b15e4a7162e9c5c0d2efc85f1cadec4011af4') version('4.0.0-openvino', sha256='aa910078ed0b7e17bd10067e04995c131584a6ed6d0dcc9ca44a292aa8e296fc') From c8cb480eb7ac18c6c5b9c3b73841b00ecfa0ed56 Mon Sep 17 00:00:00 2001 From: Hadrien G Date: Wed, 12 Feb 2020 02:38:22 +0100 Subject: [PATCH 175/178] ROOT: Add Pythia8 support (#14790) --- var/spack/repos/builtin/packages/root/README.md | 2 -- var/spack/repos/builtin/packages/root/package.py | 7 +++++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/var/spack/repos/builtin/packages/root/README.md b/var/spack/repos/builtin/packages/root/README.md index d808498b343..5f9180dc954 100644 --- a/var/spack/repos/builtin/packages/root/README.md +++ b/var/spack/repos/builtin/packages/root/README.md @@ -30,8 +30,6 @@ Monitoring with Monalisa depends on `libapmoncpp`. #### `oracle` -#### `pythia8` - #### `tcmalloc` #### `veccore` diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py index a3c9202833d..598fd78dd95 100644 --- a/var/spack/repos/builtin/packages/root/package.py +++ b/var/spack/repos/builtin/packages/root/package.py @@ -117,6 +117,8 @@ class Root(CMakePackage): description='Enable postgres support') variant('pythia6', default=False, description='Enable pythia6 support') + variant('pythia8', default=False, + description='Enable pythia8 support') variant('python', default=True, description='Enable Python ROOT bindings') variant('qt4', default=False, @@ -216,6 +218,7 @@ class Root(CMakePackage): depends_on('openssl', when='+davix') # Also with davix depends_on('postgresql', when='+postgres') depends_on('pythia6+root', when='+pythia6') + depends_on('pythia8', when='+pythia8') depends_on('python@2.7:', when='+python', type=('build', 'run')) depends_on('r', when='+r', type=('build', 'run')) depends_on('r-rcpp', when='+r', type=('build', 'run')) @@ -361,8 +364,8 @@ def cmake_args(self): ['oracle', False], ['pgsql', 'postgres'], ['pythia6'], - ['pythia8', False], - ['python', self.spec.satisfies('+python')], + ['pythia8'], + ['python'], ['qt', 'qt4'], # See conflicts ['qtgsi', 'qt4'], # See conflicts ['r', 'R'], From 78ce1c7ce4dca74acf81f3332237d47cf81cdad2 Mon Sep 17 00:00:00 2001 From: victorusu Date: Wed, 12 Feb 2020 02:51:26 +0100 Subject: [PATCH 176/178] New package: ReFrame (#14737) --- .../repos/builtin/packages/reframe/package.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 var/spack/repos/builtin/packages/reframe/package.py diff --git a/var/spack/repos/builtin/packages/reframe/package.py b/var/spack/repos/builtin/packages/reframe/package.py new file mode 100644 index 00000000000..14a307e6e44 --- /dev/null +++ b/var/spack/repos/builtin/packages/reframe/package.py @@ -0,0 +1,34 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other +# Spack Project Developers. See the top-level COPYRIGHT file for details. +# +# SPDX-License-Identifier: (Apache-2.0 OR MIT) + +from spack import * + + +class Reframe(Package): + """ReFrame is a framework for writing regression tests for HPC systems. + The goal of this framework is to abstract away the complexity of the + interactions with the system, separating the logic of a regression test + from the low-level details, which pertain to the system configuration and + setup. This allows users to write easily portable regression tests, + focusing only on the functionality.""" + + homepage = 'https://reframe-hpc.readthedocs.io' + url = 'https://github.com/eth-cscs/reframe/archive/v2.21.tar.gz' + git = 'https://github.com/eth-cscs/reframe.git' + + # notify when the package is updated. + maintainers = ['victorusu', 'vkarak'] + + version('master', branch='master') + version('2.21', sha256='f35d4fda2f9672c87d3ef664d9a2d6eb0c01c88218a31772a6645c32c8934c4d') + version('2.20', sha256='310c18d705858bbe6bd9a2dc4d382b254c1f093b0671d72363f2111e8c162ba4') + version('2.17.3', sha256='dc8dfb2ccb9a966303879b7cdcd188c47063e9b7999cbd5d6255223b066bf357') + version('2.17.2', sha256='092241cdc15918040aacb922c806aecb59c5bdc3ff7db034a4f355d39aecc101') + version('2.17.1', sha256='0b0d32a892607840a7d668f5dcea6f03f7022a26b23e5042a0faf5b8c41cb146') + + depends_on('python@3.5:', type=('run')) + + def install(self, spec, prefix): + install_tree(self.stage.source_path, self.prefix) From b5a3ee636bb22a84756c4a9d212dffc87ad783e6 Mon Sep 17 00:00:00 2001 From: Axel Huebl Date: Tue, 11 Feb 2020 20:47:50 -0800 Subject: [PATCH 177/178] WarpX: master is the new develop (#14898) WarpX removed the `dev` branch in favor of a simpler, `master`-centric development model. `master` is the new development branch and there is no stable branch anymore (we use tags and release branches instead). --- var/spack/repos/builtin/packages/warpx/package.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/var/spack/repos/builtin/packages/warpx/package.py b/var/spack/repos/builtin/packages/warpx/package.py index 6873bdf9d18..7755beba9c5 100644 --- a/var/spack/repos/builtin/packages/warpx/package.py +++ b/var/spack/repos/builtin/packages/warpx/package.py @@ -18,8 +18,9 @@ class Warpx(MakefilePackage): homepage = "https://ecp-warpx.github.io/index.html" git = "https://github.com/ECP-WarpX/WarpX.git" + maintainers = ['ax3l', 'dpgrote', 'MaxThevenet', 'RemiLehe'] + version('master', tag='master') - version('dev', tag='dev') depends_on('mpi') @@ -40,11 +41,6 @@ class Warpx(MakefilePackage): resource(name='amrex', git='https://github.com/AMReX-Codes/amrex.git', when='@master', - tag='master') - - resource(name='amrex', - git='https://github.com/AMReX-Codes/amrex.git', - when='@dev', tag='development') resource(name='picsar', From 3b2c534e73bb8b6945c133adf0ec4983eb90d897 Mon Sep 17 00:00:00 2001 From: Axel Huebl Date: Wed, 12 Feb 2020 01:02:01 -0800 Subject: [PATCH 178/178] openPMD-api: Build Env Fix & Dev (#14904) - fix a missing `env.` prefix in the build environment. - rename development branch to same name as in git --- var/spack/repos/builtin/packages/openpmd-api/package.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/var/spack/repos/builtin/packages/openpmd-api/package.py b/var/spack/repos/builtin/packages/openpmd-api/package.py index f05caa68f5d..d01ca40b482 100644 --- a/var/spack/repos/builtin/packages/openpmd-api/package.py +++ b/var/spack/repos/builtin/packages/openpmd-api/package.py @@ -14,7 +14,7 @@ class OpenpmdApi(CMakePackage): maintainers = ['ax3l'] - version('develop', branch='dev') + version('dev', branch='dev') version('0.10.3', tag='0.10.3-alpha') version('0.10.2', tag='0.10.2-alpha') version('0.10.1', tag='0.10.1-alpha') @@ -114,4 +114,4 @@ def setup_dependent_build_environment(self, env, dependent_spec): # pre-load dependent CMake-PUBLIC header-only libs env.prepend_path('CMAKE_PREFIX_PATH', self.spec['mpark-variant'].prefix) - prepend_path('CPATH', self.spec['mpark-variant'].prefix.include) + env.prepend_path('CPATH', self.spec['mpark-variant'].prefix.include)